]> git.saurik.com Git - apple/xnu.git/blob - bsd/net/zlib.c
373826df333a5b916b885379ce857647ec61a08e
[apple/xnu.git] / bsd / net / zlib.c
1 /*
2 * Copyright (c) 2006 Apple Computer, Inc. All Rights Reserved.
3 *
4 * @APPLE_LICENSE_OSREFERENCE_HEADER_START@
5 *
6 * This file contains Original Code and/or Modifications of Original Code
7 * as defined in and that are subject to the Apple Public Source License
8 * Version 2.0 (the 'License'). You may not use this file except in
9 * compliance with the License. The rights granted to you under the
10 * License may not be used to create, or enable the creation or
11 * redistribution of, unlawful or unlicensed copies of an Apple operating
12 * system, or to circumvent, violate, or enable the circumvention or
13 * violation of, any terms of an Apple operating system software license
14 * agreement.
15 *
16 * Please obtain a copy of the License at
17 * http://www.opensource.apple.com/apsl/ and read it before using this
18 * file.
19 *
20 * The Original Code and all software distributed under the License are
21 * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
22 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
23 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
24 * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
25 * Please see the License for the specific language governing rights and
26 * limitations under the License.
27 *
28 * @APPLE_LICENSE_OSREFERENCE_HEADER_END@
29 */
30 /*
31 * This file is derived from various .h and .c files from the zlib-1.0.4
32 * distribution by Jean-loup Gailly and Mark Adler, with some additions
33 * by Paul Mackerras to aid in implementing Deflate compression and
34 * decompression for PPP packets. See zlib.h for conditions of
35 * distribution and use.
36 *
37 * Changes that have been made include:
38 * - added Z_PACKET_FLUSH (see zlib.h for details)
39 * - added inflateIncomp and deflateOutputPending
40 * - allow strm->next_out to be NULL, meaning discard the output
41 *
42 * $FreeBSD: src/sys/net/zlib.c,v 1.10 1999/12/29 04:38:38 peter Exp $
43 */
44
45 #define NO_DUMMY_DECL
46 #define NO_ZCFUNCS
47 #define MY_ZCALLOC
48
49 /* +++ zutil.h */
50 /* zutil.h -- internal interface and configuration of the compression library
51 * Copyright (C) 1995-2002 Jean-loup Gailly.
52 * For conditions of distribution and use, see copyright notice in zlib.h
53 */
54
55 /* WARNING: this file should *not* be used by applications. It is
56 part of the implementation of the compression library and is
57 subject to change. Applications should only use zlib.h.
58 */
59
60 /* @(#) $Id: zlib.c,v 1.10 2004/07/29 19:17:20 lindak Exp $ */
61
62 #ifndef _Z_UTIL_H
63 #define _Z_UTIL_H
64
65 #ifdef KERNEL
66 #include <net/zlib.h>
67 #else
68 #include "zlib.h"
69 #endif
70
71 #ifdef KERNEL
72 /* Assume this is a *BSD or SVR4 kernel */
73 #include <sys/types.h>
74 #include <sys/time.h>
75 #include <sys/systm.h>
76 # define HAVE_MEMCPY
77 # define memcpy(d, s, n) bcopy((s), (d), (n))
78 # define memset(d, v, n) bzero((d), (n))
79 # define memcmp bcmp
80
81 #else
82 #if defined(__KERNEL__)
83 /* Assume this is a Linux kernel */
84 #include <linux/string.h>
85 #define HAVE_MEMCPY
86
87 #else /* not kernel */
88 #ifdef STDC
89 # include <stddef.h>
90 # include <string.h>
91 # include <stdlib.h>
92 #endif
93 #ifdef NO_ERRNO_H
94 extern int errno;
95 #else
96 # include <errno.h>
97 #endif
98 #endif /* __KERNEL__ */
99 #endif /* KERNEL */
100
101 #ifndef local
102 # define local static
103 #endif
104 /* compile with -Dlocal if your debugger can't find static symbols */
105
106 typedef unsigned char uch;
107 typedef uch FAR uchf;
108 typedef unsigned short ush;
109 typedef ush FAR ushf;
110 typedef unsigned long ulg;
111
112 extern const char *z_errmsg[10]; /* indexed by 2-zlib_error */
113 /* (size given to avoid silly warnings with Visual C++) */
114
115 #define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)]
116
117 #define ERR_RETURN(strm,err) \
118 return (strm->msg = (char*)ERR_MSG(err), (err))
119 /* To be used only when the state is known to be valid */
120
121 /* common constants */
122
123 #ifndef DEF_WBITS
124 # define DEF_WBITS MAX_WBITS
125 #endif
126 /* default windowBits for decompression. MAX_WBITS is for compression only */
127
128 #if MAX_MEM_LEVEL >= 8
129 # define DEF_MEM_LEVEL 8
130 #else
131 # define DEF_MEM_LEVEL MAX_MEM_LEVEL
132 #endif
133 /* default memLevel */
134
135 #define STORED_BLOCK 0
136 #define STATIC_TREES 1
137 #define DYN_TREES 2
138 /* The three kinds of block type */
139
140 #define MIN_MATCH 3
141 #define MAX_MATCH 258
142 /* The minimum and maximum match lengths */
143
144 #define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */
145
146 /* target dependencies */
147
148 #ifdef MSDOS
149 # define OS_CODE 0x00
150 # if defined(__TURBOC__) || defined(__BORLANDC__)
151 # if(__STDC__ == 1) && (defined(__LARGE__) || defined(__COMPACT__))
152 /* Allow compilation with ANSI keywords only enabled */
153 void _Cdecl farfree( void *block );
154 void *_Cdecl farmalloc( unsigned long nbytes );
155 # else
156 # include <alloc.h>
157 # endif
158 # else /* MSC or DJGPP */
159 # include <malloc.h>
160 # endif
161 #endif
162
163 #ifdef OS2
164 # define OS_CODE 0x06
165 #endif
166
167 #ifdef WIN32 /* Window 95 & Windows NT */
168 # define OS_CODE 0x0b
169 #endif
170
171 #if defined(VAXC) || defined(VMS)
172 # define OS_CODE 0x02
173 # define F_OPEN(name, mode) \
174 fopen((name), (mode), "mbc=60", "ctx=stm", "rfm=fix", "mrs=512")
175 #endif
176
177 #ifdef AMIGA
178 # define OS_CODE 0x01
179 #endif
180
181 #if defined(ATARI) || defined(atarist)
182 # define OS_CODE 0x05
183 #endif
184
185 #if defined(MACOS) || defined(TARGET_OS_MAC)
186 # define OS_CODE 0x07
187 # if defined(__MWERKS__) && __dest_os != __be_os && __dest_os != __win32_os
188 # include <unix.h> /* for fdopen */
189 # else
190 # ifndef fdopen
191 # define fdopen(fd,mode) NULL /* No fdopen() */
192 # endif
193 # endif
194 #endif
195
196 #ifdef __50SERIES /* Prime/PRIMOS */
197 # define OS_CODE 0x0F
198 #endif
199
200 #ifdef TOPS20
201 # define OS_CODE 0x0a
202 #endif
203
204 #if defined(_BEOS_) || defined(RISCOS)
205 # define fdopen(fd,mode) NULL /* No fdopen() */
206 #endif
207
208 #if (defined(_MSC_VER) && (_MSC_VER > 600))
209 # define fdopen(fd,type) _fdopen(fd,type)
210 #endif
211
212
213 /* Common defaults */
214
215 #ifndef OS_CODE
216 # define OS_CODE 0x03 /* assume Unix */
217 #endif
218
219 #ifndef F_OPEN
220 # define F_OPEN(name, mode) fopen((name), (mode))
221 #endif
222
223 /* functions */
224
225 #ifdef HAVE_STRERROR
226 extern char *strerror OF((int));
227 # define zstrerror(errnum) strerror(errnum)
228 #else
229 # define zstrerror(errnum) ""
230 #endif
231
232 #if defined(pyr)
233 # define NO_MEMCPY
234 #endif
235 #if defined(SMALL_MEDIUM) && !defined(_MSC_VER) && !defined(__SC__)
236 /* Use our own functions for small and medium model with MSC <= 5.0.
237 * You may have to use the same strategy for Borland C (untested).
238 * The __SC__ check is for Symantec.
239 */
240 # define NO_MEMCPY
241 #endif
242 #if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY)
243 # define HAVE_MEMCPY
244 #endif
245 #ifdef HAVE_MEMCPY
246 # ifdef SMALL_MEDIUM /* MSDOS small or medium model */
247 # define zmemcpy _fmemcpy
248 # define zmemcmp _fmemcmp
249 # define zmemzero(dest, len) _fmemset(dest, 0, len)
250 # else
251 # define zmemcpy memcpy
252 # define zmemcmp memcmp
253 # define zmemzero(dest, len) memset(dest, 0, len)
254 # endif
255 #else
256 extern void zmemcpy OF((Bytef* dest, const Bytef* source, uInt len));
257 extern int zmemcmp OF((const Bytef* s1, const Bytef* s2, uInt len));
258 extern void zmemzero OF((Bytef* dest, uInt len));
259 #endif
260
261 /* Diagnostic functions */
262 #ifdef DEBUG_ZLIB
263 # include <stdio.h>
264 extern int z_verbose;
265 extern void z_error OF((char *m));
266 # define Assert(cond,msg) {if(!(cond)) z_error(msg);}
267 # define Trace(x) {if (z_verbose>=0) fprintf x ;}
268 # define Tracev(x) {if (z_verbose>0) fprintf x ;}
269 # define Tracevv(x) {if (z_verbose>1) fprintf x ;}
270 # define Tracec(c,x) {if (z_verbose>0 && (c)) fprintf x ;}
271 # define Tracecv(c,x) {if (z_verbose>1 && (c)) fprintf x ;}
272 #else
273 # define Assert(cond,msg)
274 # define Trace(x)
275 # define Tracev(x)
276 # define Tracevv(x)
277 # define Tracec(c,x)
278 # define Tracecv(c,x)
279 #endif
280
281
282 typedef uLong (ZEXPORT *check_func) OF((uLong check, const Bytef *buf,
283 uInt len));
284 voidpf zcalloc OF((voidpf opaque, unsigned items, unsigned size));
285 void zcfree OF((voidpf opaque, voidpf ptr));
286
287 #define ZALLOC(strm, items, size) \
288 (*((strm)->zalloc))((strm)->opaque, (items), (size))
289 #define ZFREE(strm, addr) (*((strm)->zfree))((strm)->opaque, (voidpf)(addr))
290 #define TRY_FREE(s, p) {if (p) ZFREE(s, p);}
291
292 #endif /* _Z_UTIL_H */
293 /* --- zutil.h */
294
295 /* +++ deflate.h */
296 /* deflate.h -- internal compression state
297 * Copyright (C) 1995-2002 Jean-loup Gailly
298 * For conditions of distribution and use, see copyright notice in zlib.h
299 */
300
301 /* WARNING: this file should *not* be used by applications. It is
302 part of the implementation of the compression library and is
303 subject to change. Applications should only use zlib.h.
304 */
305
306 /* @(#) $Id: zlib.c,v 1.10 2004/07/29 19:17:20 lindak Exp $ */
307
308 #ifndef _DEFLATE_H
309 #define _DEFLATE_H
310
311 /* #include "zutil.h" */
312
313 /* ===========================================================================
314 * Internal compression state.
315 */
316
317 #define LENGTH_CODES 29
318 /* number of length codes, not counting the special END_BLOCK code */
319
320 #define LITERALS 256
321 /* number of literal bytes 0..255 */
322
323 #define L_CODES (LITERALS+1+LENGTH_CODES)
324 /* number of Literal or Length codes, including the END_BLOCK code */
325
326 #define D_CODES 30
327 /* number of distance codes */
328
329 #define BL_CODES 19
330 /* number of codes used to transfer the bit lengths */
331
332 #define HEAP_SIZE (2*L_CODES+1)
333 /* maximum heap size */
334
335 #define MAX_BITS 15
336 /* All codes must not exceed MAX_BITS bits */
337
338 #define INIT_STATE 42
339 #define BUSY_STATE 113
340 #define FINISH_STATE 666
341 /* Stream status */
342
343
344 /* Data structure describing a single value and its code string. */
345 typedef struct ct_data_s {
346 union {
347 ush freq; /* frequency count */
348 ush code; /* bit string */
349 } fc;
350 union {
351 ush dad; /* father node in Huffman tree */
352 ush len; /* length of bit string */
353 } dl;
354 } FAR ct_data;
355
356 #define Freq fc.freq
357 #define Code fc.code
358 #define Dad dl.dad
359 #define Len dl.len
360
361 typedef struct static_tree_desc_s static_tree_desc;
362
363 typedef struct tree_desc_s {
364 ct_data *dyn_tree; /* the dynamic tree */
365 int max_code; /* largest code with non zero frequency */
366 static_tree_desc *stat_desc; /* the corresponding static tree */
367 } FAR tree_desc;
368
369 typedef ush Pos;
370 typedef Pos FAR Posf;
371 typedef unsigned IPos;
372
373 /* A Pos is an index in the character window. We use short instead of int to
374 * save space in the various tables. IPos is used only for parameter passing.
375 */
376
377 typedef struct deflate_state {
378 z_streamp strm; /* pointer back to this zlib stream */
379 int status; /* as the name implies */
380 Bytef *pending_buf; /* output still pending */
381 ulg pending_buf_size; /* size of pending_buf */
382 Bytef *pending_out; /* next pending byte to output to the stream */
383 int pending; /* nb of bytes in the pending buffer */
384 int noheader; /* suppress zlib header and adler32 */
385 Byte data_type; /* UNKNOWN, BINARY or ASCII */
386 Byte method; /* STORED (for zip only) or DEFLATED */
387 int last_flush; /* value of flush param for previous deflate call */
388
389 /* used by deflate.c: */
390
391 uInt w_size; /* LZ77 window size (32K by default) */
392 uInt w_bits; /* log2(w_size) (8..16) */
393 uInt w_mask; /* w_size - 1 */
394
395 Bytef *window;
396 /* Sliding window. Input bytes are read into the second half of the window,
397 * and move to the first half later to keep a dictionary of at least wSize
398 * bytes. With this organization, matches are limited to a distance of
399 * wSize-MAX_MATCH bytes, but this ensures that IO is always
400 * performed with a length multiple of the block size. Also, it limits
401 * the window size to 64K, which is quite useful on MSDOS.
402 * To do: use the user input buffer as sliding window.
403 */
404
405 ulg window_size;
406 /* Actual size of window: 2*wSize, except when the user input buffer
407 * is directly used as sliding window.
408 */
409
410 Posf *prev;
411 /* Link to older string with same hash index. To limit the size of this
412 * array to 64K, this link is maintained only for the last 32K strings.
413 * An index in this array is thus a window index modulo 32K.
414 */
415
416 Posf *head; /* Heads of the hash chains or NIL. */
417
418 uInt ins_h; /* hash index of string to be inserted */
419 uInt hash_size; /* number of elements in hash table */
420 uInt hash_bits; /* log2(hash_size) */
421 uInt hash_mask; /* hash_size-1 */
422
423 uInt hash_shift;
424 /* Number of bits by which ins_h must be shifted at each input
425 * step. It must be such that after MIN_MATCH steps, the oldest
426 * byte no longer takes part in the hash key, that is:
427 * hash_shift * MIN_MATCH >= hash_bits
428 */
429
430 long block_start;
431 /* Window position at the beginning of the current output block. Gets
432 * negative when the window is moved backwards.
433 */
434
435 uInt match_length; /* length of best match */
436 IPos prev_match; /* previous match */
437 int match_available; /* set if previous match exists */
438 uInt strstart; /* start of string to insert */
439 uInt match_start; /* start of matching string */
440 uInt lookahead; /* number of valid bytes ahead in window */
441
442 uInt prev_length;
443 /* Length of the best match at previous step. Matches not greater than this
444 * are discarded. This is used in the lazy match evaluation.
445 */
446
447 uInt max_chain_length;
448 /* To speed up deflation, hash chains are never searched beyond this
449 * length. A higher limit improves compression ratio but degrades the
450 * speed.
451 */
452
453 uInt max_lazy_match;
454 /* Attempt to find a better match only when the current match is strictly
455 * smaller than this value. This mechanism is used only for compression
456 * levels >= 4.
457 */
458 # define max_insert_length max_lazy_match
459 /* Insert new strings in the hash table only if the match length is not
460 * greater than this length. This saves time but degrades compression.
461 * max_insert_length is used only for compression levels <= 3.
462 */
463
464 int level; /* compression level (1..9) */
465 int strategy; /* favor or force Huffman coding*/
466
467 uInt good_match;
468 /* Use a faster search when the previous match is longer than this */
469
470 int nice_match; /* Stop searching when current match exceeds this */
471
472 /* used by trees.c: */
473 /* Didn't use ct_data typedef below to supress compiler warning */
474 struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */
475 struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */
476 struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */
477
478 struct tree_desc_s l_desc; /* desc. for literal tree */
479 struct tree_desc_s d_desc; /* desc. for distance tree */
480 struct tree_desc_s bl_desc; /* desc. for bit length tree */
481
482 ush bl_count[MAX_BITS+1];
483 /* number of codes at each bit length for an optimal tree */
484
485 int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */
486 int heap_len; /* number of elements in the heap */
487 int heap_max; /* element of largest frequency */
488 /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
489 * The same heap array is used to build all trees.
490 */
491
492 uch depth[2*L_CODES+1];
493 /* Depth of each subtree used as tie breaker for trees of equal frequency
494 */
495
496 uchf *l_buf; /* buffer for literals or lengths */
497
498 uInt lit_bufsize;
499 /* Size of match buffer for literals/lengths. There are 4 reasons for
500 * limiting lit_bufsize to 64K:
501 * - frequencies can be kept in 16 bit counters
502 * - if compression is not successful for the first block, all input
503 * data is still in the window so we can still emit a stored block even
504 * when input comes from standard input. (This can also be done for
505 * all blocks if lit_bufsize is not greater than 32K.)
506 * - if compression is not successful for a file smaller than 64K, we can
507 * even emit a stored file instead of a stored block (saving 5 bytes).
508 * This is applicable only for zip (not gzip or zlib).
509 * - creating new Huffman trees less frequently may not provide fast
510 * adaptation to changes in the input data statistics. (Take for
511 * example a binary file with poorly compressible code followed by
512 * a highly compressible string table.) Smaller buffer sizes give
513 * fast adaptation but have of course the overhead of transmitting
514 * trees more frequently.
515 * - I can't count above 4
516 */
517
518 uInt last_lit; /* running index in l_buf */
519
520 ushf *d_buf;
521 /* Buffer for distances. To simplify the code, d_buf and l_buf have
522 * the same number of elements. To use different lengths, an extra flag
523 * array would be necessary.
524 */
525
526 ulg opt_len; /* bit length of current block with optimal trees */
527 ulg static_len; /* bit length of current block with static trees */
528 uInt matches; /* number of string matches in current block */
529 int last_eob_len; /* bit length of EOB code for last block */
530
531 #ifdef DEBUG_ZLIB
532 ulg compressed_len; /* total bit length of compressed file mod 2^32 */
533 ulg bits_sent; /* bit length of compressed data sent mod 2^32 */
534 #endif
535
536 ush bi_buf;
537 /* Output buffer. bits are inserted starting at the bottom (least
538 * significant bits).
539 */
540 int bi_valid;
541 /* Number of valid bits in bi_buf. All bits above the last valid bit
542 * are always zero.
543 */
544
545 } FAR deflate_state;
546
547 /* Output a byte on the stream.
548 * IN assertion: there is enough room in pending_buf.
549 */
550 #define put_byte(s, c) {s->pending_buf[s->pending++] = (c);}
551
552
553 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
554 /* Minimum amount of lookahead, except at the end of the input file.
555 * See deflate.c for comments about the MIN_MATCH+1.
556 */
557
558 #define MAX_DIST(s) ((s)->w_size-MIN_LOOKAHEAD)
559 /* In order to simplify the code, particularly on 16 bit machines, match
560 * distances are limited to MAX_DIST instead of WSIZE.
561 */
562
563 /* in trees.c */
564 void _tr_init OF((deflate_state *s));
565 int _tr_tally OF((deflate_state *s, unsigned dist, unsigned lc));
566 void _tr_flush_block OF((deflate_state *s, charf *buf, ulg stored_len,
567 int eof));
568 void _tr_align OF((deflate_state *s));
569 void _tr_stored_block OF((deflate_state *s, charf *buf, ulg stored_len,
570 int eof));
571
572 #define d_code(dist) \
573 ((dist) < 256 ? _dist_code[dist] : _dist_code[256+((dist)>>7)])
574 /* Mapping from a distance to a distance code. dist is the distance - 1 and
575 * must not have side effects. _dist_code[256] and _dist_code[257] are never
576 * used.
577 */
578
579 #ifndef DEBUG_ZLIB
580 /* Inline versions of _tr_tally for speed: */
581
582 #if defined(GEN_TREES_H) || !defined(STDC)
583 extern uch _length_code[];
584 extern uch _dist_code[];
585 #else
586 extern const uch _length_code[];
587 extern const uch _dist_code[];
588 #endif
589
590 # define _tr_tally_lit(s, c, flush) \
591 { uch cc = (c); \
592 s->d_buf[s->last_lit] = 0; \
593 s->l_buf[s->last_lit++] = cc; \
594 s->dyn_ltree[cc].Freq++; \
595 flush = (s->last_lit == s->lit_bufsize-1); \
596 }
597 # define _tr_tally_dist(s, distance, length, flush) \
598 { uch len = (length); \
599 ush dist = (distance); \
600 s->d_buf[s->last_lit] = dist; \
601 s->l_buf[s->last_lit++] = len; \
602 dist--; \
603 s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \
604 s->dyn_dtree[d_code(dist)].Freq++; \
605 flush = (s->last_lit == s->lit_bufsize-1); \
606 }
607 #else
608 # define _tr_tally_lit(s, c, flush) flush = _tr_tally(s, 0, c)
609 # define _tr_tally_dist(s, distance, length, flush) \
610 flush = _tr_tally(s, distance, length)
611 #endif
612
613 #endif
614 /* --- deflate.h */
615
616 /* +++ deflate.c */
617 /* deflate.c -- compress data using the deflation algorithm
618 * Copyright (C) 1995-2002 Jean-loup Gailly.
619 * For conditions of distribution and use, see copyright notice in zlib.h
620 */
621
622 /*
623 * ALGORITHM
624 *
625 * The "deflation" process depends on being able to identify portions
626 * of the input text which are identical to earlier input (within a
627 * sliding window trailing behind the input currently being processed).
628 *
629 * The most straightforward technique turns out to be the fastest for
630 * most input files: try all possible matches and select the longest.
631 * The key feature of this algorithm is that insertions into the string
632 * dictionary are very simple and thus fast, and deletions are avoided
633 * completely. Insertions are performed at each input character, whereas
634 * string matches are performed only when the previous match ends. So it
635 * is preferable to spend more time in matches to allow very fast string
636 * insertions and avoid deletions. The matching algorithm for small
637 * strings is inspired from that of Rabin & Karp. A brute force approach
638 * is used to find longer strings when a small match has been found.
639 * A similar algorithm is used in comic (by Jan-Mark Wams) and freeze
640 * (by Leonid Broukhis).
641 * A previous version of this file used a more sophisticated algorithm
642 * (by Fiala and Greene) which is guaranteed to run in linear amortized
643 * time, but has a larger average cost, uses more memory and is patented.
644 * However the F&G algorithm may be faster for some highly redundant
645 * files if the parameter max_chain_length (described below) is too large.
646 *
647 * ACKNOWLEDGEMENTS
648 *
649 * The idea of lazy evaluation of matches is due to Jan-Mark Wams, and
650 * I found it in 'freeze' written by Leonid Broukhis.
651 * Thanks to many people for bug reports and testing.
652 *
653 * REFERENCES
654 *
655 * Deutsch, L.P.,"DEFLATE Compressed Data Format Specification".
656 * Available in ftp://ds.internic.net/rfc/rfc1951.txt
657 *
658 * A description of the Rabin and Karp algorithm is given in the book
659 * "Algorithms" by R. Sedgewick, Addison-Wesley, p252.
660 *
661 * Fiala,E.R., and Greene,D.H.
662 * Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595
663 *
664 */
665
666 /* @(#) $Id: zlib.c,v 1.10 2004/07/29 19:17:20 lindak Exp $ */
667
668 /* #include "deflate.h" */
669
670 const char deflate_copyright[] =
671 " deflate 1.1.4 Copyright 1995-2002 Jean-loup Gailly ";
672 /*
673 If you use the zlib library in a product, an acknowledgment is welcome
674 in the documentation of your product. If for some reason you cannot
675 include such an acknowledgment, I would appreciate that you keep this
676 copyright string in the executable of your product.
677 */
678
679 /* ===========================================================================
680 * Function prototypes.
681 */
682 typedef enum {
683 need_more, /* block not completed, need more input or more output */
684 block_done, /* block flush performed */
685 finish_started, /* finish started, need only more output at next deflate */
686 finish_done /* finish done, accept no more input or output */
687 } block_state;
688
689 typedef block_state (*compress_func) OF((deflate_state *s, int flush));
690 /* Compression function. Returns the block state after the call. */
691
692 local void fill_window OF((deflate_state *s));
693 local block_state deflate_stored OF((deflate_state *s, int flush));
694 local block_state deflate_fast OF((deflate_state *s, int flush));
695 local block_state deflate_slow OF((deflate_state *s, int flush));
696 local void lm_init OF((deflate_state *s));
697 local void putShortMSB OF((deflate_state *s, uInt b));
698 local void flush_pending OF((z_streamp strm));
699 local int read_buf OF((z_streamp strm, Bytef *buf, unsigned size));
700 #ifdef ASMV
701 void match_init OF((void)); /* asm code initialization */
702 uInt longest_match OF((deflate_state *s, IPos cur_match));
703 #else
704 local uInt longest_match OF((deflate_state *s, IPos cur_match));
705 #endif
706
707 #ifdef DEBUG_ZLIB
708 local void check_match OF((deflate_state *s, IPos start, IPos match,
709 int length));
710 #endif
711
712 /* ===========================================================================
713 * Local data
714 */
715
716 #define NIL 0
717 /* Tail of hash chains */
718
719 #ifndef TOO_FAR
720 # define TOO_FAR 4096
721 #endif
722 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
723
724 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
725 /* Minimum amount of lookahead, except at the end of the input file.
726 * See deflate.c for comments about the MIN_MATCH+1.
727 */
728
729 /* Values for max_lazy_match, good_match and max_chain_length, depending on
730 * the desired pack level (0..9). The values given below have been tuned to
731 * exclude worst case performance for pathological files. Better values may be
732 * found for specific files.
733 */
734 typedef struct config_s {
735 ush good_length; /* reduce lazy search above this match length */
736 ush max_lazy; /* do not perform lazy search above this match length */
737 ush nice_length; /* quit search above this match length */
738 ush max_chain;
739 compress_func func;
740 } config;
741
742 local const config configuration_table[10] = {
743 /* good lazy nice chain */
744 /* 0 */ {0, 0, 0, 0, deflate_stored}, /* store only */
745 /* 1 */ {4, 4, 8, 4, deflate_fast}, /* maximum speed, no lazy matches */
746 /* 2 */ {4, 5, 16, 8, deflate_fast},
747 /* 3 */ {4, 6, 32, 32, deflate_fast},
748
749 /* 4 */ {4, 4, 16, 16, deflate_slow}, /* lazy matches */
750 /* 5 */ {8, 16, 32, 32, deflate_slow},
751 /* 6 */ {8, 16, 128, 128, deflate_slow},
752 /* 7 */ {8, 32, 128, 256, deflate_slow},
753 /* 8 */ {32, 128, 258, 1024, deflate_slow},
754 /* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* maximum compression */
755
756 /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
757 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
758 * meaning.
759 */
760
761 #define EQUAL 0
762 /* result of memcmp for equal strings */
763
764 #ifndef NO_DUMMY_DECL
765 struct static_tree_desc_s {int dummy;}; /* for buggy compilers */
766 #endif
767
768 /* ===========================================================================
769 * Update a hash value with the given input byte
770 * IN assertion: all calls to to UPDATE_HASH are made with consecutive
771 * input characters, so that a running hash key can be computed from the
772 * previous key instead of complete recalculation each time.
773 */
774 #define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask)
775
776
777 /* ===========================================================================
778 * Insert string str in the dictionary and set match_head to the previous head
779 * of the hash chain (the most recent string with same hash key). Return
780 * the previous length of the hash chain.
781 * If this file is compiled with -DFASTEST, the compression level is forced
782 * to 1, and no hash chains are maintained.
783 * IN assertion: all calls to to INSERT_STRING are made with consecutive
784 * input characters and the first MIN_MATCH bytes of str are valid
785 * (except for the last MIN_MATCH-1 bytes of the input file).
786 */
787 #ifdef FASTEST
788 #define INSERT_STRING(s, str, match_head) \
789 (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
790 match_head = s->head[s->ins_h], \
791 s->head[s->ins_h] = (Pos)(str))
792 #else
793 #define INSERT_STRING(s, str, match_head) \
794 (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \
795 s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \
796 s->head[s->ins_h] = (Pos)(str))
797 #endif
798
799 /* ===========================================================================
800 * Initialize the hash table (avoiding 64K overflow for 16 bit systems).
801 * prev[] will be initialized on the fly.
802 */
803 #define CLEAR_HASH(s) \
804 s->head[s->hash_size-1] = NIL; \
805 zmemzero((Bytef *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head));
806
807 /* ========================================================================= */
808 int ZEXPORT deflateInit_(strm, level, version, stream_size)
809 z_streamp strm;
810 int level;
811 const char *version;
812 int stream_size;
813 {
814 return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL,
815 Z_DEFAULT_STRATEGY, version, stream_size);
816 /* To do: ignore strm->next_in if we use it as window */
817 }
818
819 /* ========================================================================= */
820 int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
821 version, stream_size)
822 z_streamp strm;
823 int level;
824 int method;
825 int windowBits;
826 int memLevel;
827 int strategy;
828 const char *version;
829 int stream_size;
830 {
831 deflate_state *s;
832 int noheader = 0;
833 static const char* my_version = ZLIB_VERSION;
834
835 ushf *overlay;
836 /* We overlay pending_buf and d_buf+l_buf. This works since the average
837 * output size for (length,distance) codes is <= 24 bits.
838 */
839
840 if (version == Z_NULL || version[0] != my_version[0] ||
841 stream_size != sizeof(z_stream)) {
842 return Z_VERSION_ERROR;
843 }
844 if (strm == Z_NULL) return Z_STREAM_ERROR;
845
846 strm->msg = Z_NULL;
847 #ifndef NO_ZCFUNCS
848 if (strm->zalloc == Z_NULL) {
849 strm->zalloc = zcalloc;
850 strm->opaque = (voidpf)0;
851 }
852 if (strm->zfree == Z_NULL) strm->zfree = zcfree;
853 #endif
854
855 if (level == Z_DEFAULT_COMPRESSION) level = 6;
856 #ifdef FASTEST
857 level = 1;
858 #endif
859
860 if (windowBits < 0) { /* undocumented feature: suppress zlib header */
861 noheader = 1;
862 windowBits = -windowBits;
863 }
864 if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != Z_DEFLATED ||
865 windowBits < 9 || windowBits > 15 || level < 0 || level > 9 ||
866 strategy < 0 || strategy > Z_HUFFMAN_ONLY) {
867 return Z_STREAM_ERROR;
868 }
869 s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state));
870 if (s == Z_NULL) return Z_MEM_ERROR;
871 strm->state = (struct internal_state FAR *)s;
872 s->strm = strm;
873
874 s->noheader = noheader;
875 s->w_bits = windowBits;
876 s->w_size = 1 << s->w_bits;
877 s->w_mask = s->w_size - 1;
878
879 s->hash_bits = memLevel + 7;
880 s->hash_size = 1 << s->hash_bits;
881 s->hash_mask = s->hash_size - 1;
882 s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);
883
884 s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));
885 s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof(Pos));
886 s->head = (Posf *) ZALLOC(strm, s->hash_size, sizeof(Pos));
887
888 s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */
889
890 overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2);
891 s->pending_buf = (uchf *) overlay;
892 s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L);
893
894 if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||
895 s->pending_buf == Z_NULL) {
896 strm->msg = (char*)ERR_MSG(Z_MEM_ERROR);
897 deflateEnd (strm);
898 return Z_MEM_ERROR;
899 }
900 s->d_buf = overlay + s->lit_bufsize/sizeof(ush);
901 s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize;
902
903 s->level = level;
904 s->strategy = strategy;
905 s->method = (Byte)method;
906
907 return deflateReset(strm);
908 }
909
910 /* ========================================================================= */
911 int ZEXPORT deflateSetDictionary (strm, dictionary, dictLength)
912 z_streamp strm;
913 const Bytef *dictionary;
914 uInt dictLength;
915 {
916 deflate_state *s;
917 uInt length = dictLength;
918 uInt n;
919 IPos hash_head = 0;
920
921 if (strm == Z_NULL || strm->state == Z_NULL || dictionary == Z_NULL ||
922 ((deflate_state*)strm->state)->status != INIT_STATE) return Z_STREAM_ERROR;
923
924 s = (deflate_state*)strm->state;
925 strm->adler = adler32(strm->adler, dictionary, dictLength);
926
927 if (length < MIN_MATCH) return Z_OK;
928 if (length > MAX_DIST(s)) {
929 length = MAX_DIST(s);
930 #ifndef USE_DICT_HEAD
931 dictionary += dictLength - length; /* use the tail of the dictionary */
932 #endif
933 }
934 zmemcpy(s->window, dictionary, length);
935 s->strstart = length;
936 s->block_start = (long)length;
937
938 /* Insert all strings in the hash table (except for the last two bytes).
939 * s->lookahead stays null, so s->ins_h will be recomputed at the next
940 * call of fill_window.
941 */
942 s->ins_h = s->window[0];
943 UPDATE_HASH(s, s->ins_h, s->window[1]);
944 for (n = 0; n <= length - MIN_MATCH; n++) {
945 INSERT_STRING(s, n, hash_head);
946 }
947 if (hash_head) hash_head = 0; /* to make compiler happy */
948 return Z_OK;
949 }
950
951 /* ========================================================================= */
952 int ZEXPORT deflateReset (strm)
953 z_streamp strm;
954 {
955 deflate_state *s;
956
957 if (strm == Z_NULL || strm->state == Z_NULL ||
958 strm->zalloc == Z_NULL || strm->zfree == Z_NULL) return Z_STREAM_ERROR;
959
960 strm->total_in = strm->total_out = 0;
961 strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */
962 strm->data_type = Z_UNKNOWN;
963
964 s = (deflate_state *)strm->state;
965 s->pending = 0;
966 s->pending_out = s->pending_buf;
967
968 if (s->noheader < 0) {
969 s->noheader = 0; /* was set to -1 by deflate(..., Z_FINISH); */
970 }
971 s->status = s->noheader ? BUSY_STATE : INIT_STATE;
972 strm->adler = 1;
973 s->last_flush = Z_NO_FLUSH;
974
975 _tr_init(s);
976 lm_init(s);
977
978 return Z_OK;
979 }
980
981 /* ========================================================================= */
982 int ZEXPORT deflateParams(strm, level, strategy)
983 z_streamp strm;
984 int level;
985 int strategy;
986 {
987 deflate_state *s;
988 compress_func func;
989 int err = Z_OK;
990
991 if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;
992 s = (deflate_state*)strm->state;
993
994 if (level == Z_DEFAULT_COMPRESSION) {
995 level = 6;
996 }
997 if (level < 0 || level > 9 || strategy < 0 || strategy > Z_HUFFMAN_ONLY) {
998 return Z_STREAM_ERROR;
999 }
1000 func = configuration_table[s->level].func;
1001
1002 if (func != configuration_table[level].func && strm->total_in != 0) {
1003 /* Flush the last buffer: */
1004 err = deflate(strm, Z_PARTIAL_FLUSH);
1005 }
1006 if (s->level != level) {
1007 s->level = level;
1008 s->max_lazy_match = configuration_table[level].max_lazy;
1009 s->good_match = configuration_table[level].good_length;
1010 s->nice_match = configuration_table[level].nice_length;
1011 s->max_chain_length = configuration_table[level].max_chain;
1012 }
1013 s->strategy = strategy;
1014 return err;
1015 }
1016
1017 /* =========================================================================
1018 * Put a short in the pending buffer. The 16-bit value is put in MSB order.
1019 * IN assertion: the stream state is correct and there is enough room in
1020 * pending_buf.
1021 */
1022 local void putShortMSB (s, b)
1023 deflate_state *s;
1024 uInt b;
1025 {
1026 put_byte(s, (Byte)(b >> 8));
1027 put_byte(s, (Byte)(b & 0xff));
1028 }
1029
1030 /* =========================================================================
1031 * Flush as much pending output as possible. All deflate() output goes
1032 * through this function so some applications may wish to modify it
1033 * to avoid allocating a large strm->next_out buffer and copying into it.
1034 * (See also read_buf()).
1035 */
1036 local void flush_pending(strm)
1037 z_streamp strm;
1038 {
1039 deflate_state* s = (deflate_state*)strm->state;
1040 unsigned len = s->pending;
1041
1042 if (len > strm->avail_out) len = strm->avail_out;
1043 if (len == 0) return;
1044
1045 zmemcpy(strm->next_out, s->pending_out, len);
1046 strm->next_out += len;
1047 s->pending_out += len;
1048 strm->total_out += len;
1049 strm->avail_out -= len;
1050 s->pending -= len;
1051 if (s->pending == 0) {
1052 s->pending_out = s->pending_buf;
1053 }
1054 }
1055
1056 /* ========================================================================= */
1057 int ZEXPORT deflate (strm, flush)
1058 z_streamp strm;
1059 int flush;
1060 {
1061 int old_flush; /* value of flush param for previous deflate call */
1062 deflate_state *s;
1063
1064 if (strm == Z_NULL || strm->state == Z_NULL ||
1065 flush > Z_FINISH || flush < 0) {
1066 return Z_STREAM_ERROR;
1067 }
1068 s = (deflate_state*)strm->state;
1069
1070 if (strm->next_out == Z_NULL ||
1071 (strm->next_in == Z_NULL && strm->avail_in != 0) ||
1072 (s->status == FINISH_STATE && flush != Z_FINISH)) {
1073 ERR_RETURN(strm, Z_STREAM_ERROR);
1074 }
1075 if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR);
1076
1077 s->strm = strm; /* just in case */
1078 old_flush = s->last_flush;
1079 s->last_flush = flush;
1080
1081 /* Write the zlib header */
1082 if (s->status == INIT_STATE) {
1083
1084 uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8;
1085 uInt level_flags = (s->level-1) >> 1;
1086
1087 if (level_flags > 3) level_flags = 3;
1088 header |= (level_flags << 6);
1089 if (s->strstart != 0) header |= PRESET_DICT;
1090 header += 31 - (header % 31);
1091
1092 s->status = BUSY_STATE;
1093 putShortMSB(s, header);
1094
1095 /* Save the adler32 of the preset dictionary: */
1096 if (s->strstart != 0) {
1097 putShortMSB(s, (uInt)(strm->adler >> 16));
1098 putShortMSB(s, (uInt)(strm->adler & 0xffff));
1099 }
1100 strm->adler = 1L;
1101 }
1102
1103 /* Flush as much pending output as possible */
1104 if (s->pending != 0) {
1105 flush_pending(strm);
1106 if (strm->avail_out == 0) {
1107 /* Since avail_out is 0, deflate will be called again with
1108 * more output space, but possibly with both pending and
1109 * avail_in equal to zero. There won't be anything to do,
1110 * but this is not an error situation so make sure we
1111 * return OK instead of BUF_ERROR at next call of deflate:
1112 */
1113 s->last_flush = -1;
1114 return Z_OK;
1115 }
1116
1117 /* Make sure there is something to do and avoid duplicate consecutive
1118 * flushes. For repeated and useless calls with Z_FINISH, we keep
1119 * returning Z_STREAM_END instead of Z_BUFF_ERROR.
1120 */
1121 } else if (strm->avail_in == 0 && flush <= old_flush &&
1122 flush != Z_FINISH) {
1123 ERR_RETURN(strm, Z_BUF_ERROR);
1124 }
1125
1126 /* User must not provide more input after the first FINISH: */
1127 if (s->status == FINISH_STATE && strm->avail_in != 0) {
1128 ERR_RETURN(strm, Z_BUF_ERROR);
1129 }
1130
1131 /* Start a new block or continue the current one.
1132 */
1133 if (strm->avail_in != 0 || s->lookahead != 0 ||
1134 (flush != Z_NO_FLUSH && s->status != FINISH_STATE)) {
1135 block_state bstate;
1136
1137 bstate = (*(configuration_table[s->level].func))(s, flush);
1138
1139 if (bstate == finish_started || bstate == finish_done) {
1140 s->status = FINISH_STATE;
1141 }
1142 if (bstate == need_more || bstate == finish_started) {
1143 if (strm->avail_out == 0) {
1144 s->last_flush = -1; /* avoid BUF_ERROR next call, see above */
1145 }
1146 return Z_OK;
1147 /* If flush != Z_NO_FLUSH && avail_out == 0, the next call
1148 * of deflate should use the same flush parameter to make sure
1149 * that the flush is complete. So we don't have to output an
1150 * empty block here, this will be done at next call. This also
1151 * ensures that for a very small output buffer, we emit at most
1152 * one empty block.
1153 */
1154 }
1155 if (bstate == block_done) {
1156 if (flush == Z_PARTIAL_FLUSH) {
1157 _tr_align(s);
1158 } else { /* FULL_FLUSH or SYNC_FLUSH */
1159 _tr_stored_block(s, (char*)0, 0L, 0);
1160 /* For a full flush, this empty block will be recognized
1161 * as a special marker by inflate_sync().
1162 */
1163 if (flush == Z_FULL_FLUSH) {
1164 CLEAR_HASH(s); /* forget history */
1165 }
1166 }
1167 flush_pending(strm);
1168 if (strm->avail_out == 0) {
1169 s->last_flush = -1; /* avoid BUF_ERROR at next call, see above */
1170 return Z_OK;
1171 }
1172 }
1173 }
1174 Assert(strm->avail_out > 0, "bug2");
1175
1176 if (flush != Z_FINISH) return Z_OK;
1177 if (s->noheader) return Z_STREAM_END;
1178
1179 /* Write the zlib trailer (adler32) */
1180 putShortMSB(s, (uInt)(strm->adler >> 16));
1181 putShortMSB(s, (uInt)(strm->adler & 0xffff));
1182 flush_pending(strm);
1183 /* If avail_out is zero, the application will call deflate again
1184 * to flush the rest.
1185 */
1186 s->noheader = -1; /* write the trailer only once! */
1187 return s->pending != 0 ? Z_OK : Z_STREAM_END;
1188 }
1189
1190 /* ========================================================================= */
1191 int ZEXPORT deflateEnd (strm)
1192 z_streamp strm;
1193 {
1194 deflate_state* s;
1195 int status;
1196
1197 if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;
1198
1199 s = (deflate_state*)strm->state;
1200 status = s->status;
1201 if (status != INIT_STATE && status != BUSY_STATE &&
1202 status != FINISH_STATE) {
1203 return Z_STREAM_ERROR;
1204 }
1205
1206 /* Deallocate in reverse order of allocations: */
1207 TRY_FREE(strm, s->pending_buf);
1208 TRY_FREE(strm, s->head);
1209 TRY_FREE(strm, s->prev);
1210 TRY_FREE(strm, s->window);
1211
1212 ZFREE(strm, s);
1213 strm->state = Z_NULL;
1214
1215 return status == BUSY_STATE ? Z_DATA_ERROR : Z_OK;
1216 }
1217
1218 /* =========================================================================
1219 * Copy the source state to the destination state.
1220 * To simplify the source, this is not supported for 16-bit MSDOS (which
1221 * doesn't have enough memory anyway to duplicate compression states).
1222 */
1223 int ZEXPORT deflateCopy (dest, source)
1224 z_streamp dest;
1225 z_streamp source;
1226 {
1227 #ifdef MAXSEG_64K
1228 return Z_STREAM_ERROR;
1229 #else
1230 deflate_state *ds;
1231 deflate_state *ss;
1232 ushf *overlay;
1233
1234
1235 if (source == Z_NULL || dest == Z_NULL || source->state == Z_NULL) {
1236 return Z_STREAM_ERROR;
1237 }
1238
1239 ss = (deflate_state*)source->state;
1240
1241 *dest = *source;
1242
1243 ds = (deflate_state *) ZALLOC(dest, 1, sizeof(deflate_state));
1244 if (ds == Z_NULL) return Z_MEM_ERROR;
1245 dest->state = (struct internal_state FAR *) ds;
1246 *ds = *ss;
1247 ds->strm = dest;
1248
1249 ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte));
1250 ds->prev = (Posf *) ZALLOC(dest, ds->w_size, sizeof(Pos));
1251 ds->head = (Posf *) ZALLOC(dest, ds->hash_size, sizeof(Pos));
1252 overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof(ush)+2);
1253 ds->pending_buf = (uchf *) overlay;
1254
1255 if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL ||
1256 ds->pending_buf == Z_NULL) {
1257 deflateEnd (dest);
1258 return Z_MEM_ERROR;
1259 }
1260 /* following zmemcpy do not work for 16-bit MSDOS */
1261 zmemcpy(ds->window, ss->window, ds->w_size * 2 * sizeof(Byte));
1262 zmemcpy(ds->prev, ss->prev, ds->w_size * sizeof(Pos));
1263 zmemcpy(ds->head, ss->head, ds->hash_size * sizeof(Pos));
1264 zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size);
1265
1266 ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf);
1267 ds->d_buf = overlay + ds->lit_bufsize/sizeof(ush);
1268 ds->l_buf = ds->pending_buf + (1+sizeof(ush))*ds->lit_bufsize;
1269
1270 ds->l_desc.dyn_tree = ds->dyn_ltree;
1271 ds->d_desc.dyn_tree = ds->dyn_dtree;
1272 ds->bl_desc.dyn_tree = ds->bl_tree;
1273
1274 return Z_OK;
1275 #endif
1276 }
1277
1278 /* ===========================================================================
1279 * Read a new buffer from the current input stream, update the adler32
1280 * and total number of bytes read. All deflate() input goes through
1281 * this function so some applications may wish to modify it to avoid
1282 * allocating a large strm->next_in buffer and copying from it.
1283 * (See also flush_pending()).
1284 */
1285 local int read_buf(strm, buf, size)
1286 z_streamp strm;
1287 Bytef *buf;
1288 unsigned size;
1289 {
1290 unsigned len = strm->avail_in;
1291
1292 if (len > size) len = size;
1293 if (len == 0) return 0;
1294
1295 strm->avail_in -= len;
1296
1297 if (!((deflate_state*)strm->state)->noheader) {
1298 strm->adler = adler32(strm->adler, strm->next_in, len);
1299 }
1300 zmemcpy(buf, strm->next_in, len);
1301 strm->next_in += len;
1302 strm->total_in += len;
1303
1304 return (int)len;
1305 }
1306
1307 /* ===========================================================================
1308 * Initialize the "longest match" routines for a new zlib stream
1309 */
1310 local void lm_init (s)
1311 deflate_state *s;
1312 {
1313 s->window_size = (ulg)2L*s->w_size;
1314
1315 CLEAR_HASH(s);
1316
1317 /* Set the default configuration parameters:
1318 */
1319 s->max_lazy_match = configuration_table[s->level].max_lazy;
1320 s->good_match = configuration_table[s->level].good_length;
1321 s->nice_match = configuration_table[s->level].nice_length;
1322 s->max_chain_length = configuration_table[s->level].max_chain;
1323
1324 s->strstart = 0;
1325 s->block_start = 0L;
1326 s->lookahead = 0;
1327 s->match_length = s->prev_length = MIN_MATCH-1;
1328 s->match_available = 0;
1329 s->ins_h = 0;
1330 #ifdef ASMV
1331 match_init(); /* initialize the asm code */
1332 #endif
1333 }
1334
1335 /* ===========================================================================
1336 * Set match_start to the longest match starting at the given string and
1337 * return its length. Matches shorter or equal to prev_length are discarded,
1338 * in which case the result is equal to prev_length and match_start is
1339 * garbage.
1340 * IN assertions: cur_match is the head of the hash chain for the current
1341 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
1342 * OUT assertion: the match length is not greater than s->lookahead.
1343 */
1344 #ifndef ASMV
1345 /* For 80x86 and 680x0, an optimized version will be provided in match.asm or
1346 * match.S. The code will be functionally equivalent.
1347 */
1348 #ifndef FASTEST
1349 local uInt longest_match(s, cur_match)
1350 deflate_state *s;
1351 IPos cur_match; /* current match */
1352 {
1353 unsigned chain_length = s->max_chain_length;/* max hash chain length */
1354 register Bytef *scan = s->window + s->strstart; /* current string */
1355 register Bytef *match; /* matched string */
1356 register int len; /* length of current match */
1357 int best_len = s->prev_length; /* best match length so far */
1358 int nice_match = s->nice_match; /* stop if match long enough */
1359 IPos limit = s->strstart > (IPos)MAX_DIST(s) ?
1360 s->strstart - (IPos)MAX_DIST(s) : NIL;
1361 /* Stop when cur_match becomes <= limit. To simplify the code,
1362 * we prevent matches with the string of window index 0.
1363 */
1364 Posf *prev = s->prev;
1365 uInt wmask = s->w_mask;
1366
1367 #ifdef UNALIGNED_OK
1368 /* Compare two bytes at a time. Note: this is not always beneficial.
1369 * Try with and without -DUNALIGNED_OK to check.
1370 */
1371 register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;
1372 register ush scan_start = *(ushf*)scan;
1373 register ush scan_end = *(ushf*)(scan+best_len-1);
1374 #else
1375 register Bytef *strend = s->window + s->strstart + MAX_MATCH;
1376 register Byte scan_end1 = scan[best_len-1];
1377 register Byte scan_end = scan[best_len];
1378 #endif
1379
1380 /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
1381 * It is easy to get rid of this optimization if necessary.
1382 */
1383 Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
1384
1385 /* Do not waste too much time if we already have a good match: */
1386 if (s->prev_length >= s->good_match) {
1387 chain_length >>= 2;
1388 }
1389 /* Do not look for matches beyond the end of the input. This is necessary
1390 * to make deflate deterministic.
1391 */
1392 if ((uInt)nice_match > s->lookahead) nice_match = s->lookahead;
1393
1394 Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
1395
1396 do {
1397 Assert(cur_match < s->strstart, "no future");
1398 match = s->window + cur_match;
1399
1400 /* Skip to next match if the match length cannot increase
1401 * or if the match length is less than 2:
1402 */
1403 #if (defined(UNALIGNED_OK) && MAX_MATCH == 258)
1404 /* This code assumes sizeof(unsigned short) == 2. Do not use
1405 * UNALIGNED_OK if your compiler uses a different size.
1406 */
1407 if (*(ushf*)(match+best_len-1) != scan_end ||
1408 *(ushf*)match != scan_start) continue;
1409
1410 /* It is not necessary to compare scan[2] and match[2] since they are
1411 * always equal when the other bytes match, given that the hash keys
1412 * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at
1413 * strstart+3, +5, ... up to strstart+257. We check for insufficient
1414 * lookahead only every 4th comparison; the 128th check will be made
1415 * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is
1416 * necessary to put more guard bytes at the end of the window, or
1417 * to check more often for insufficient lookahead.
1418 */
1419 Assert(scan[2] == match[2], "scan[2]?");
1420 scan++, match++;
1421 do {
1422 } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1423 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1424 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1425 *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&
1426 scan < strend);
1427 /* The funny "do {}" generates better code on most compilers */
1428
1429 /* Here, scan <= window+strstart+257 */
1430 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1431 if (*scan == *match) scan++;
1432
1433 len = (MAX_MATCH - 1) - (int)(strend-scan);
1434 scan = strend - (MAX_MATCH-1);
1435
1436 #else /* UNALIGNED_OK */
1437
1438 if (match[best_len] != scan_end ||
1439 match[best_len-1] != scan_end1 ||
1440 *match != *scan ||
1441 *++match != scan[1]) continue;
1442
1443 /* The check at best_len-1 can be removed because it will be made
1444 * again later. (This heuristic is not always a win.)
1445 * It is not necessary to compare scan[2] and match[2] since they
1446 * are always equal when the other bytes match, given that
1447 * the hash keys are equal and that HASH_BITS >= 8.
1448 */
1449 scan += 2, match++;
1450 Assert(*scan == *match, "match[2]?");
1451
1452 /* We check for insufficient lookahead only every 8th comparison;
1453 * the 256th check will be made at strstart+258.
1454 */
1455 do {
1456 } while (*++scan == *++match && *++scan == *++match &&
1457 *++scan == *++match && *++scan == *++match &&
1458 *++scan == *++match && *++scan == *++match &&
1459 *++scan == *++match && *++scan == *++match &&
1460 scan < strend);
1461
1462 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1463
1464 len = MAX_MATCH - (int)(strend - scan);
1465 scan = strend - MAX_MATCH;
1466
1467 #endif /* UNALIGNED_OK */
1468
1469 if (len > best_len) {
1470 s->match_start = cur_match;
1471 best_len = len;
1472 if (len >= nice_match) break;
1473 #ifdef UNALIGNED_OK
1474 scan_end = *(ushf*)(scan+best_len-1);
1475 #else
1476 scan_end1 = scan[best_len-1];
1477 scan_end = scan[best_len];
1478 #endif
1479 }
1480 } while ((cur_match = prev[cur_match & wmask]) > limit
1481 && --chain_length != 0);
1482
1483 if ((uInt)best_len <= s->lookahead) return (uInt)best_len;
1484 return s->lookahead;
1485 }
1486
1487 #else /* FASTEST */
1488 /* ---------------------------------------------------------------------------
1489 * Optimized version for level == 1 only
1490 */
1491 local uInt longest_match(s, cur_match)
1492 deflate_state *s;
1493 IPos cur_match; /* current match */
1494 {
1495 register Bytef *scan = s->window + s->strstart; /* current string */
1496 register Bytef *match; /* matched string */
1497 register int len; /* length of current match */
1498 register Bytef *strend = s->window + s->strstart + MAX_MATCH;
1499
1500 /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
1501 * It is easy to get rid of this optimization if necessary.
1502 */
1503 Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");
1504
1505 Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");
1506
1507 Assert(cur_match < s->strstart, "no future");
1508
1509 match = s->window + cur_match;
1510
1511 /* Return failure if the match length is less than 2:
1512 */
1513 if (match[0] != scan[0] || match[1] != scan[1]) return MIN_MATCH-1;
1514
1515 /* The check at best_len-1 can be removed because it will be made
1516 * again later. (This heuristic is not always a win.)
1517 * It is not necessary to compare scan[2] and match[2] since they
1518 * are always equal when the other bytes match, given that
1519 * the hash keys are equal and that HASH_BITS >= 8.
1520 */
1521 scan += 2, match += 2;
1522 Assert(*scan == *match, "match[2]?");
1523
1524 /* We check for insufficient lookahead only every 8th comparison;
1525 * the 256th check will be made at strstart+258.
1526 */
1527 do {
1528 } while (*++scan == *++match && *++scan == *++match &&
1529 *++scan == *++match && *++scan == *++match &&
1530 *++scan == *++match && *++scan == *++match &&
1531 *++scan == *++match && *++scan == *++match &&
1532 scan < strend);
1533
1534 Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");
1535
1536 len = MAX_MATCH - (int)(strend - scan);
1537
1538 if (len < MIN_MATCH) return MIN_MATCH - 1;
1539
1540 s->match_start = cur_match;
1541 return len <= s->lookahead ? len : s->lookahead;
1542 }
1543 #endif /* FASTEST */
1544 #endif /* ASMV */
1545
1546 #ifdef DEBUG_ZLIB
1547 /* ===========================================================================
1548 * Check that the match at match_start is indeed a match.
1549 */
1550 local void check_match(s, start, match, length)
1551 deflate_state *s;
1552 IPos start, match;
1553 int length;
1554 {
1555 /* check that the match is indeed a match */
1556 if (zmemcmp(s->window + match,
1557 s->window + start, length) != EQUAL) {
1558 fprintf(stderr, " start %u, match %u, length %d\n",
1559 start, match, length);
1560 do {
1561 fprintf(stderr, "%c%c", s->window[match++], s->window[start++]);
1562 } while (--length != 0);
1563 z_error("invalid match");
1564 }
1565 if (z_verbose > 1) {
1566 fprintf(stderr,"\\[%d,%d]", start-match, length);
1567 do { putc(s->window[start++], stderr); } while (--length != 0);
1568 }
1569 }
1570 #else
1571 # define check_match(s, start, match, length)
1572 #endif
1573
1574 /* ===========================================================================
1575 * Fill the window when the lookahead becomes insufficient.
1576 * Updates strstart and lookahead.
1577 *
1578 * IN assertion: lookahead < MIN_LOOKAHEAD
1579 * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
1580 * At least one byte has been read, or avail_in == 0; reads are
1581 * performed for at least two bytes (required for the zip translate_eol
1582 * option -- not supported here).
1583 */
1584 local void fill_window(s)
1585 deflate_state *s;
1586 {
1587 register unsigned n, m;
1588 register Posf *p;
1589 unsigned more; /* Amount of free space at the end of the window. */
1590 uInt wsize = s->w_size;
1591
1592 do {
1593 more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
1594
1595 /* Deal with !@#$% 64K limit: */
1596 if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
1597 more = wsize;
1598
1599 } else if (more == (unsigned)(-1)) {
1600 /* Very unlikely, but possible on 16 bit machine if strstart == 0
1601 * and lookahead == 1 (input done one byte at time)
1602 */
1603 more--;
1604
1605 /* If the window is almost full and there is insufficient lookahead,
1606 * move the upper half to the lower one to make room in the upper half.
1607 */
1608 } else if (s->strstart >= wsize+MAX_DIST(s)) {
1609
1610 zmemcpy(s->window, s->window+wsize, (unsigned)wsize);
1611 s->match_start -= wsize;
1612 s->strstart -= wsize; /* we now have strstart >= MAX_DIST */
1613 s->block_start -= (long) wsize;
1614
1615 /* Slide the hash table (could be avoided with 32 bit values
1616 at the expense of memory usage). We slide even when level == 0
1617 to keep the hash table consistent if we switch back to level > 0
1618 later. (Using level 0 permanently is not an optimal usage of
1619 zlib, so we don't care about this pathological case.)
1620 */
1621 n = s->hash_size;
1622 p = &s->head[n];
1623 do {
1624 m = *--p;
1625 *p = (Pos)(m >= wsize ? m-wsize : NIL);
1626 } while (--n);
1627
1628 n = wsize;
1629 #ifndef FASTEST
1630 p = &s->prev[n];
1631 do {
1632 m = *--p;
1633 *p = (Pos)(m >= wsize ? m-wsize : NIL);
1634 /* If n is not on any hash chain, prev[n] is garbage but
1635 * its value will never be used.
1636 */
1637 } while (--n);
1638 #endif
1639 more += wsize;
1640 }
1641 if (s->strm->avail_in == 0) return;
1642
1643 /* If there was no sliding:
1644 * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
1645 * more == window_size - lookahead - strstart
1646 * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
1647 * => more >= window_size - 2*WSIZE + 2
1648 * In the BIG_MEM or MMAP case (not yet supported),
1649 * window_size == input_size + MIN_LOOKAHEAD &&
1650 * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
1651 * Otherwise, window_size == 2*WSIZE so more >= 2.
1652 * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
1653 */
1654 Assert(more >= 2, "more < 2");
1655
1656 n = read_buf(s->strm, s->window + s->strstart + s->lookahead, more);
1657 s->lookahead += n;
1658
1659 /* Initialize the hash value now that we have some input: */
1660 if (s->lookahead >= MIN_MATCH) {
1661 s->ins_h = s->window[s->strstart];
1662 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
1663 #if MIN_MATCH != 3
1664 Call UPDATE_HASH() MIN_MATCH-3 more times
1665 #endif
1666 }
1667 /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
1668 * but this is not important since only literal bytes will be emitted.
1669 */
1670
1671 } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
1672 }
1673
1674 /* ===========================================================================
1675 * Flush the current block, with given end-of-file flag.
1676 * IN assertion: strstart is set to the end of the current match.
1677 */
1678 #define FLUSH_BLOCK_ONLY(s, eof) { \
1679 _tr_flush_block(s, (s->block_start >= 0L ? \
1680 (charf *)&s->window[(unsigned)s->block_start] : \
1681 (charf *)Z_NULL), \
1682 (ulg)((long)s->strstart - s->block_start), \
1683 (eof)); \
1684 s->block_start = s->strstart; \
1685 flush_pending(s->strm); \
1686 Tracev((stderr,"[FLUSH]")); \
1687 }
1688
1689 /* Same but force premature exit if necessary. */
1690 #define FLUSH_BLOCK(s, eof) { \
1691 FLUSH_BLOCK_ONLY(s, eof); \
1692 if (s->strm->avail_out == 0) return (eof) ? finish_started : need_more; \
1693 }
1694
1695 /* ===========================================================================
1696 * Copy without compression as much as possible from the input stream, return
1697 * the current block state.
1698 * This function does not insert new strings in the dictionary since
1699 * uncompressible data is probably not useful. This function is used
1700 * only for the level=0 compression option.
1701 * NOTE: this function should be optimized to avoid extra copying from
1702 * window to pending_buf.
1703 */
1704 local block_state deflate_stored(s, flush)
1705 deflate_state *s;
1706 int flush;
1707 {
1708 /* Stored blocks are limited to 0xffff bytes, pending_buf is limited
1709 * to pending_buf_size, and each stored block has a 5 byte header:
1710 */
1711 ulg max_block_size = 0xffff;
1712 ulg max_start;
1713
1714 if (max_block_size > s->pending_buf_size - 5) {
1715 max_block_size = s->pending_buf_size - 5;
1716 }
1717
1718 /* Copy as much as possible from input to output: */
1719 for (;;) {
1720 /* Fill the window as much as possible: */
1721 if (s->lookahead <= 1) {
1722
1723 Assert(s->strstart < s->w_size+MAX_DIST(s) ||
1724 s->block_start >= (long)s->w_size, "slide too late");
1725
1726 fill_window(s);
1727 if (s->lookahead == 0 && flush == Z_NO_FLUSH) return need_more;
1728
1729 if (s->lookahead == 0) break; /* flush the current block */
1730 }
1731 Assert(s->block_start >= 0L, "block gone");
1732
1733 s->strstart += s->lookahead;
1734 s->lookahead = 0;
1735
1736 /* Emit a stored block if pending_buf will be full: */
1737 max_start = s->block_start + max_block_size;
1738 if (s->strstart == 0 || (ulg)s->strstart >= max_start) {
1739 /* strstart == 0 is possible when wraparound on 16-bit machine */
1740 s->lookahead = (uInt)(s->strstart - max_start);
1741 s->strstart = (uInt)max_start;
1742 FLUSH_BLOCK(s, 0);
1743 }
1744 /* Flush if we may have to slide, otherwise block_start may become
1745 * negative and the data will be gone:
1746 */
1747 if (s->strstart - (uInt)s->block_start >= MAX_DIST(s)) {
1748 FLUSH_BLOCK(s, 0);
1749 }
1750 }
1751 FLUSH_BLOCK(s, flush == Z_FINISH);
1752 return flush == Z_FINISH ? finish_done : block_done;
1753 }
1754
1755 /* ===========================================================================
1756 * Compress as much as possible from the input stream, return the current
1757 * block state.
1758 * This function does not perform lazy evaluation of matches and inserts
1759 * new strings in the dictionary only for unmatched strings or for short
1760 * matches. It is used only for the fast compression options.
1761 */
1762 local block_state deflate_fast(s, flush)
1763 deflate_state *s;
1764 int flush;
1765 {
1766 IPos hash_head = NIL; /* head of the hash chain */
1767 int bflush; /* set if current block must be flushed */
1768
1769 for (;;) {
1770 /* Make sure that we always have enough lookahead, except
1771 * at the end of the input file. We need MAX_MATCH bytes
1772 * for the next match, plus MIN_MATCH bytes to insert the
1773 * string following the next match.
1774 */
1775 if (s->lookahead < MIN_LOOKAHEAD) {
1776 fill_window(s);
1777 if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
1778 return need_more;
1779 }
1780 if (s->lookahead == 0) break; /* flush the current block */
1781 }
1782
1783 /* Insert the string window[strstart .. strstart+2] in the
1784 * dictionary, and set hash_head to the head of the hash chain:
1785 */
1786 if (s->lookahead >= MIN_MATCH) {
1787 INSERT_STRING(s, s->strstart, hash_head);
1788 }
1789
1790 /* Find the longest match, discarding those <= prev_length.
1791 * At this point we have always match_length < MIN_MATCH
1792 */
1793 if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) {
1794 /* To simplify the code, we prevent matches with the string
1795 * of window index 0 (in particular we have to avoid a match
1796 * of the string with itself at the start of the input file).
1797 */
1798 if (s->strategy != Z_HUFFMAN_ONLY) {
1799 s->match_length = longest_match (s, hash_head);
1800 }
1801 /* longest_match() sets match_start */
1802 }
1803 if (s->match_length >= MIN_MATCH) {
1804 check_match(s, s->strstart, s->match_start, s->match_length);
1805
1806 _tr_tally_dist(s, s->strstart - s->match_start,
1807 s->match_length - MIN_MATCH, bflush);
1808
1809 s->lookahead -= s->match_length;
1810
1811 /* Insert new strings in the hash table only if the match length
1812 * is not too large. This saves time but degrades compression.
1813 */
1814 #ifndef FASTEST
1815 if (s->match_length <= s->max_insert_length &&
1816 s->lookahead >= MIN_MATCH) {
1817 s->match_length--; /* string at strstart already in hash table */
1818 do {
1819 s->strstart++;
1820 INSERT_STRING(s, s->strstart, hash_head);
1821 /* strstart never exceeds WSIZE-MAX_MATCH, so there are
1822 * always MIN_MATCH bytes ahead.
1823 */
1824 } while (--s->match_length != 0);
1825 s->strstart++;
1826 } else
1827 #endif
1828 {
1829 s->strstart += s->match_length;
1830 s->match_length = 0;
1831 s->ins_h = s->window[s->strstart];
1832 UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);
1833 #if MIN_MATCH != 3
1834 Call UPDATE_HASH() MIN_MATCH-3 more times
1835 #endif
1836 /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not
1837 * matter since it will be recomputed at next deflate call.
1838 */
1839 }
1840 } else {
1841 /* No match, output a literal byte */
1842 Tracevv((stderr,"%c", s->window[s->strstart]));
1843 _tr_tally_lit (s, s->window[s->strstart], bflush);
1844 s->lookahead--;
1845 s->strstart++;
1846 }
1847 if (bflush) FLUSH_BLOCK(s, 0);
1848 }
1849 FLUSH_BLOCK(s, flush == Z_FINISH);
1850 return flush == Z_FINISH ? finish_done : block_done;
1851 }
1852
1853 /* ===========================================================================
1854 * Same as above, but achieves better compression. We use a lazy
1855 * evaluation for matches: a match is finally adopted only if there is
1856 * no better match at the next window position.
1857 */
1858 local block_state deflate_slow(s, flush)
1859 deflate_state *s;
1860 int flush;
1861 {
1862 IPos hash_head = NIL; /* head of hash chain */
1863 int bflush; /* set if current block must be flushed */
1864
1865 /* Process the input block. */
1866 for (;;) {
1867 /* Make sure that we always have enough lookahead, except
1868 * at the end of the input file. We need MAX_MATCH bytes
1869 * for the next match, plus MIN_MATCH bytes to insert the
1870 * string following the next match.
1871 */
1872 if (s->lookahead < MIN_LOOKAHEAD) {
1873 fill_window(s);
1874 if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {
1875 return need_more;
1876 }
1877 if (s->lookahead == 0) break; /* flush the current block */
1878 }
1879
1880 /* Insert the string window[strstart .. strstart+2] in the
1881 * dictionary, and set hash_head to the head of the hash chain:
1882 */
1883 if (s->lookahead >= MIN_MATCH) {
1884 INSERT_STRING(s, s->strstart, hash_head);
1885 }
1886
1887 /* Find the longest match, discarding those <= prev_length.
1888 */
1889 s->prev_length = s->match_length, s->prev_match = s->match_start;
1890 s->match_length = MIN_MATCH-1;
1891
1892 if (hash_head != NIL && s->prev_length < s->max_lazy_match &&
1893 s->strstart - hash_head <= MAX_DIST(s)) {
1894 /* To simplify the code, we prevent matches with the string
1895 * of window index 0 (in particular we have to avoid a match
1896 * of the string with itself at the start of the input file).
1897 */
1898 if (s->strategy != Z_HUFFMAN_ONLY) {
1899 s->match_length = longest_match (s, hash_head);
1900 }
1901 /* longest_match() sets match_start */
1902
1903 if (s->match_length <= 5 && (s->strategy == Z_FILTERED ||
1904 (s->match_length == MIN_MATCH &&
1905 s->strstart - s->match_start > TOO_FAR))) {
1906
1907 /* If prev_match is also MIN_MATCH, match_start is garbage
1908 * but we will ignore the current match anyway.
1909 */
1910 s->match_length = MIN_MATCH-1;
1911 }
1912 }
1913 /* If there was a match at the previous step and the current
1914 * match is not better, output the previous match:
1915 */
1916 if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) {
1917 uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;
1918 /* Do not insert strings in hash table beyond this. */
1919
1920 check_match(s, s->strstart-1, s->prev_match, s->prev_length);
1921
1922 _tr_tally_dist(s, s->strstart -1 - s->prev_match,
1923 s->prev_length - MIN_MATCH, bflush);
1924
1925 /* Insert in hash table all strings up to the end of the match.
1926 * strstart-1 and strstart are already inserted. If there is not
1927 * enough lookahead, the last two strings are not inserted in
1928 * the hash table.
1929 */
1930 s->lookahead -= s->prev_length-1;
1931 s->prev_length -= 2;
1932 do {
1933 if (++s->strstart <= max_insert) {
1934 INSERT_STRING(s, s->strstart, hash_head);
1935 }
1936 } while (--s->prev_length != 0);
1937 s->match_available = 0;
1938 s->match_length = MIN_MATCH-1;
1939 s->strstart++;
1940
1941 if (bflush) FLUSH_BLOCK(s, 0);
1942
1943 } else if (s->match_available) {
1944 /* If there was no match at the previous position, output a
1945 * single literal. If there was a match but the current match
1946 * is longer, truncate the previous match to a single literal.
1947 */
1948 Tracevv((stderr,"%c", s->window[s->strstart-1]));
1949 _tr_tally_lit(s, s->window[s->strstart-1], bflush);
1950 if (bflush) {
1951 FLUSH_BLOCK_ONLY(s, 0);
1952 }
1953 s->strstart++;
1954 s->lookahead--;
1955 if (s->strm->avail_out == 0) return need_more;
1956 } else {
1957 /* There is no previous match to compare with, wait for
1958 * the next step to decide.
1959 */
1960 s->match_available = 1;
1961 s->strstart++;
1962 s->lookahead--;
1963 }
1964 }
1965 Assert (flush != Z_NO_FLUSH, "no flush?");
1966 if (s->match_available) {
1967 Tracevv((stderr,"%c", s->window[s->strstart-1]));
1968 _tr_tally_lit(s, s->window[s->strstart-1], bflush);
1969 s->match_available = 0;
1970 }
1971 FLUSH_BLOCK(s, flush == Z_FINISH);
1972 return flush == Z_FINISH ? finish_done : block_done;
1973 }
1974 /* --- deflate.c */
1975
1976 /* +++ trees.c */
1977 /* trees.c -- output deflated data using Huffman coding
1978 * Copyright (C) 1995-2002 Jean-loup Gailly
1979 * For conditions of distribution and use, see copyright notice in zlib.h
1980 */
1981
1982 /*
1983 * ALGORITHM
1984 *
1985 * The "deflation" process uses several Huffman trees. The more
1986 * common source values are represented by shorter bit sequences.
1987 *
1988 * Each code tree is stored in a compressed form which is itself
1989 * a Huffman encoding of the lengths of all the code strings (in
1990 * ascending order by source values). The actual code strings are
1991 * reconstructed from the lengths in the inflate process, as described
1992 * in the deflate specification.
1993 *
1994 * REFERENCES
1995 *
1996 * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification".
1997 * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc
1998 *
1999 * Storer, James A.
2000 * Data Compression: Methods and Theory, pp. 49-50.
2001 * Computer Science Press, 1988. ISBN 0-7167-8156-5.
2002 *
2003 * Sedgewick, R.
2004 * Algorithms, p290.
2005 * Addison-Wesley, 1983. ISBN 0-201-06672-6.
2006 */
2007
2008 /* @(#) $Id: zlib.c,v 1.10 2004/07/29 19:17:20 lindak Exp $ */
2009
2010 /* #define GEN_TREES_H */
2011
2012 /* #include "deflate.h" */
2013
2014 #ifdef DEBUG_ZLIB
2015 # include <ctype.h>
2016 #endif
2017
2018 /* ===========================================================================
2019 * Constants
2020 */
2021
2022 #define MAX_BL_BITS 7
2023 /* Bit length codes must not exceed MAX_BL_BITS bits */
2024
2025 #define END_BLOCK 256
2026 /* end of block literal code */
2027
2028 #define REP_3_6 16
2029 /* repeat previous bit length 3-6 times (2 bits of repeat count) */
2030
2031 #define REPZ_3_10 17
2032 /* repeat a zero length 3-10 times (3 bits of repeat count) */
2033
2034 #define REPZ_11_138 18
2035 /* repeat a zero length 11-138 times (7 bits of repeat count) */
2036
2037 local const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */
2038 = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};
2039
2040 local const int extra_dbits[D_CODES] /* extra bits for each distance code */
2041 = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};
2042
2043 local const int extra_blbits[BL_CODES]/* extra bits for each bit length code */
2044 = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};
2045
2046 local const uch bl_order[BL_CODES]
2047 = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};
2048 /* The lengths of the bit length codes are sent in order of decreasing
2049 * probability, to avoid transmitting the lengths for unused bit length codes.
2050 */
2051
2052 #define Buf_size (8 * 2*sizeof(char))
2053 /* Number of bits used within bi_buf. (bi_buf might be implemented on
2054 * more than 16 bits on some systems.)
2055 */
2056
2057 /* ===========================================================================
2058 * Local data. These are initialized only once.
2059 */
2060
2061 #define DIST_CODE_LEN 512 /* see definition of array dist_code below */
2062
2063 #if defined(GEN_TREES_H) || !defined(STDC)
2064 /* non ANSI compilers may not accept trees.h */
2065
2066 local ct_data *static_ltree = Z_NULL;
2067 /* The static literal tree. Since the bit lengths are imposed, there is no
2068 * need for the L_CODES extra codes used during heap construction. However
2069 * The codes 286 and 287 are needed to build a canonical tree (see _tr_init
2070 * below).
2071 */
2072
2073 local ct_data *static_dtree = Z_NULL;
2074 /* The static distance tree. (Actually a trivial tree since all codes use
2075 * 5 bits.)
2076 */
2077
2078 uch *_dist_code = Z_NULL;
2079 /* Distance codes. The first 256 values correspond to the distances
2080 * 3 .. 258, the last 256 values correspond to the top 8 bits of
2081 * the 15 bit distances.
2082 */
2083
2084 uch *_length_code = Z_NULL;
2085 /* length code for each normalized match length (0 == MIN_MATCH) */
2086
2087 local int *base_length = Z_NULL;
2088 /* First normalized length for each code (0 = MIN_MATCH) */
2089
2090 local int *base_dist = Z_NULL;
2091 /* First normalized distance for each code (0 = distance of 1) */
2092
2093 #else
2094 /* +++ trees.h */
2095 /* header created automatically with -DGEN_TREES_H */
2096
2097 local const ct_data static_ltree[L_CODES+2] = {
2098 {{ 12},{ 8}}, {{140},{ 8}}, {{ 76},{ 8}}, {{204},{ 8}}, {{ 44},{ 8}},
2099 {{172},{ 8}}, {{108},{ 8}}, {{236},{ 8}}, {{ 28},{ 8}}, {{156},{ 8}},
2100 {{ 92},{ 8}}, {{220},{ 8}}, {{ 60},{ 8}}, {{188},{ 8}}, {{124},{ 8}},
2101 {{252},{ 8}}, {{ 2},{ 8}}, {{130},{ 8}}, {{ 66},{ 8}}, {{194},{ 8}},
2102 {{ 34},{ 8}}, {{162},{ 8}}, {{ 98},{ 8}}, {{226},{ 8}}, {{ 18},{ 8}},
2103 {{146},{ 8}}, {{ 82},{ 8}}, {{210},{ 8}}, {{ 50},{ 8}}, {{178},{ 8}},
2104 {{114},{ 8}}, {{242},{ 8}}, {{ 10},{ 8}}, {{138},{ 8}}, {{ 74},{ 8}},
2105 {{202},{ 8}}, {{ 42},{ 8}}, {{170},{ 8}}, {{106},{ 8}}, {{234},{ 8}},
2106 {{ 26},{ 8}}, {{154},{ 8}}, {{ 90},{ 8}}, {{218},{ 8}}, {{ 58},{ 8}},
2107 {{186},{ 8}}, {{122},{ 8}}, {{250},{ 8}}, {{ 6},{ 8}}, {{134},{ 8}},
2108 {{ 70},{ 8}}, {{198},{ 8}}, {{ 38},{ 8}}, {{166},{ 8}}, {{102},{ 8}},
2109 {{230},{ 8}}, {{ 22},{ 8}}, {{150},{ 8}}, {{ 86},{ 8}}, {{214},{ 8}},
2110 {{ 54},{ 8}}, {{182},{ 8}}, {{118},{ 8}}, {{246},{ 8}}, {{ 14},{ 8}},
2111 {{142},{ 8}}, {{ 78},{ 8}}, {{206},{ 8}}, {{ 46},{ 8}}, {{174},{ 8}},
2112 {{110},{ 8}}, {{238},{ 8}}, {{ 30},{ 8}}, {{158},{ 8}}, {{ 94},{ 8}},
2113 {{222},{ 8}}, {{ 62},{ 8}}, {{190},{ 8}}, {{126},{ 8}}, {{254},{ 8}},
2114 {{ 1},{ 8}}, {{129},{ 8}}, {{ 65},{ 8}}, {{193},{ 8}}, {{ 33},{ 8}},
2115 {{161},{ 8}}, {{ 97},{ 8}}, {{225},{ 8}}, {{ 17},{ 8}}, {{145},{ 8}},
2116 {{ 81},{ 8}}, {{209},{ 8}}, {{ 49},{ 8}}, {{177},{ 8}}, {{113},{ 8}},
2117 {{241},{ 8}}, {{ 9},{ 8}}, {{137},{ 8}}, {{ 73},{ 8}}, {{201},{ 8}},
2118 {{ 41},{ 8}}, {{169},{ 8}}, {{105},{ 8}}, {{233},{ 8}}, {{ 25},{ 8}},
2119 {{153},{ 8}}, {{ 89},{ 8}}, {{217},{ 8}}, {{ 57},{ 8}}, {{185},{ 8}},
2120 {{121},{ 8}}, {{249},{ 8}}, {{ 5},{ 8}}, {{133},{ 8}}, {{ 69},{ 8}},
2121 {{197},{ 8}}, {{ 37},{ 8}}, {{165},{ 8}}, {{101},{ 8}}, {{229},{ 8}},
2122 {{ 21},{ 8}}, {{149},{ 8}}, {{ 85},{ 8}}, {{213},{ 8}}, {{ 53},{ 8}},
2123 {{181},{ 8}}, {{117},{ 8}}, {{245},{ 8}}, {{ 13},{ 8}}, {{141},{ 8}},
2124 {{ 77},{ 8}}, {{205},{ 8}}, {{ 45},{ 8}}, {{173},{ 8}}, {{109},{ 8}},
2125 {{237},{ 8}}, {{ 29},{ 8}}, {{157},{ 8}}, {{ 93},{ 8}}, {{221},{ 8}},
2126 {{ 61},{ 8}}, {{189},{ 8}}, {{125},{ 8}}, {{253},{ 8}}, {{ 19},{ 9}},
2127 {{275},{ 9}}, {{147},{ 9}}, {{403},{ 9}}, {{ 83},{ 9}}, {{339},{ 9}},
2128 {{211},{ 9}}, {{467},{ 9}}, {{ 51},{ 9}}, {{307},{ 9}}, {{179},{ 9}},
2129 {{435},{ 9}}, {{115},{ 9}}, {{371},{ 9}}, {{243},{ 9}}, {{499},{ 9}},
2130 {{ 11},{ 9}}, {{267},{ 9}}, {{139},{ 9}}, {{395},{ 9}}, {{ 75},{ 9}},
2131 {{331},{ 9}}, {{203},{ 9}}, {{459},{ 9}}, {{ 43},{ 9}}, {{299},{ 9}},
2132 {{171},{ 9}}, {{427},{ 9}}, {{107},{ 9}}, {{363},{ 9}}, {{235},{ 9}},
2133 {{491},{ 9}}, {{ 27},{ 9}}, {{283},{ 9}}, {{155},{ 9}}, {{411},{ 9}},
2134 {{ 91},{ 9}}, {{347},{ 9}}, {{219},{ 9}}, {{475},{ 9}}, {{ 59},{ 9}},
2135 {{315},{ 9}}, {{187},{ 9}}, {{443},{ 9}}, {{123},{ 9}}, {{379},{ 9}},
2136 {{251},{ 9}}, {{507},{ 9}}, {{ 7},{ 9}}, {{263},{ 9}}, {{135},{ 9}},
2137 {{391},{ 9}}, {{ 71},{ 9}}, {{327},{ 9}}, {{199},{ 9}}, {{455},{ 9}},
2138 {{ 39},{ 9}}, {{295},{ 9}}, {{167},{ 9}}, {{423},{ 9}}, {{103},{ 9}},
2139 {{359},{ 9}}, {{231},{ 9}}, {{487},{ 9}}, {{ 23},{ 9}}, {{279},{ 9}},
2140 {{151},{ 9}}, {{407},{ 9}}, {{ 87},{ 9}}, {{343},{ 9}}, {{215},{ 9}},
2141 {{471},{ 9}}, {{ 55},{ 9}}, {{311},{ 9}}, {{183},{ 9}}, {{439},{ 9}},
2142 {{119},{ 9}}, {{375},{ 9}}, {{247},{ 9}}, {{503},{ 9}}, {{ 15},{ 9}},
2143 {{271},{ 9}}, {{143},{ 9}}, {{399},{ 9}}, {{ 79},{ 9}}, {{335},{ 9}},
2144 {{207},{ 9}}, {{463},{ 9}}, {{ 47},{ 9}}, {{303},{ 9}}, {{175},{ 9}},
2145 {{431},{ 9}}, {{111},{ 9}}, {{367},{ 9}}, {{239},{ 9}}, {{495},{ 9}},
2146 {{ 31},{ 9}}, {{287},{ 9}}, {{159},{ 9}}, {{415},{ 9}}, {{ 95},{ 9}},
2147 {{351},{ 9}}, {{223},{ 9}}, {{479},{ 9}}, {{ 63},{ 9}}, {{319},{ 9}},
2148 {{191},{ 9}}, {{447},{ 9}}, {{127},{ 9}}, {{383},{ 9}}, {{255},{ 9}},
2149 {{511},{ 9}}, {{ 0},{ 7}}, {{ 64},{ 7}}, {{ 32},{ 7}}, {{ 96},{ 7}},
2150 {{ 16},{ 7}}, {{ 80},{ 7}}, {{ 48},{ 7}}, {{112},{ 7}}, {{ 8},{ 7}},
2151 {{ 72},{ 7}}, {{ 40},{ 7}}, {{104},{ 7}}, {{ 24},{ 7}}, {{ 88},{ 7}},
2152 {{ 56},{ 7}}, {{120},{ 7}}, {{ 4},{ 7}}, {{ 68},{ 7}}, {{ 36},{ 7}},
2153 {{100},{ 7}}, {{ 20},{ 7}}, {{ 84},{ 7}}, {{ 52},{ 7}}, {{116},{ 7}},
2154 {{ 3},{ 8}}, {{131},{ 8}}, {{ 67},{ 8}}, {{195},{ 8}}, {{ 35},{ 8}},
2155 {{163},{ 8}}, {{ 99},{ 8}}, {{227},{ 8}}
2156 };
2157
2158 local const ct_data static_dtree[D_CODES] = {
2159 {{ 0},{ 5}}, {{16},{ 5}}, {{ 8},{ 5}}, {{24},{ 5}}, {{ 4},{ 5}},
2160 {{20},{ 5}}, {{12},{ 5}}, {{28},{ 5}}, {{ 2},{ 5}}, {{18},{ 5}},
2161 {{10},{ 5}}, {{26},{ 5}}, {{ 6},{ 5}}, {{22},{ 5}}, {{14},{ 5}},
2162 {{30},{ 5}}, {{ 1},{ 5}}, {{17},{ 5}}, {{ 9},{ 5}}, {{25},{ 5}},
2163 {{ 5},{ 5}}, {{21},{ 5}}, {{13},{ 5}}, {{29},{ 5}}, {{ 3},{ 5}},
2164 {{19},{ 5}}, {{11},{ 5}}, {{27},{ 5}}, {{ 7},{ 5}}, {{23},{ 5}}
2165 };
2166
2167 const uch _dist_code[DIST_CODE_LEN] = {
2168 0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8,
2169 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10,
2170 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11,
2171 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12,
2172 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13,
2173 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13,
2174 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
2175 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
2176 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14,
2177 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15,
2178 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
2179 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15,
2180 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 0, 0, 16, 17,
2181 18, 18, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22,
2182 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
2183 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
2184 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
2185 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27,
2186 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
2187 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
2188 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
2189 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28,
2190 28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
2191 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
2192 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29,
2193 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29
2194 };
2195
2196 const uch _length_code[MAX_MATCH-MIN_MATCH+1]= {
2197 0, 1, 2, 3, 4, 5, 6, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 12, 12,
2198 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16,
2199 17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19,
2200 19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20,
2201 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22,
2202 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23,
2203 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
2204 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
2205 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25,
2206 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26,
2207 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26,
2208 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27,
2209 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28
2210 };
2211
2212 local const int base_length[LENGTH_CODES] = {
2213 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56,
2214 64, 80, 96, 112, 128, 160, 192, 224, 0
2215 };
2216
2217 local const int base_dist[D_CODES] = {
2218 0, 1, 2, 3, 4, 6, 8, 12, 16, 24,
2219 32, 48, 64, 96, 128, 192, 256, 384, 512, 768,
2220 1024, 1536, 2048, 3072, 4096, 6144, 8192, 12288, 16384, 24576
2221 };
2222
2223 /* --- trees.h */
2224 #endif /* GEN_TREES_H */
2225
2226 struct static_tree_desc_s {
2227 const ct_data *static_tree; /* static tree or NULL */
2228 const intf *extra_bits; /* extra bits for each code or NULL */
2229 int extra_base; /* base index for extra_bits */
2230 int elems; /* max number of elements in the tree */
2231 int max_length; /* max bit length for the codes */
2232 };
2233
2234 local static_tree_desc static_l_desc =
2235 {NULL, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
2236
2237 local static_tree_desc static_d_desc =
2238 {NULL, extra_dbits, 0, D_CODES, MAX_BITS};
2239
2240 local static_tree_desc static_bl_desc =
2241 {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS};
2242
2243 /* ===========================================================================
2244 * Local (static) routines in this file.
2245 */
2246
2247 local int tr_static_init OF((z_streamp z));
2248 local void init_block OF((deflate_state *s));
2249 local void pqdownheap OF((deflate_state *s, ct_data *tree, int k));
2250 local void gen_bitlen OF((deflate_state *s, tree_desc *desc));
2251 local void gen_codes OF((ct_data *tree, int max_code, ushf *bl_count));
2252 local void build_tree OF((deflate_state *s, tree_desc *desc));
2253 local void scan_tree OF((deflate_state *s, ct_data *tree, int max_code));
2254 local void send_tree OF((deflate_state *s, ct_data *tree, int max_code));
2255 local int build_bl_tree OF((deflate_state *s));
2256 local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
2257 int blcodes));
2258 local void compress_block OF((deflate_state *s, ct_data *ltree,
2259 ct_data *dtree));
2260 local void set_data_type OF((deflate_state *s));
2261 local unsigned bi_reverse OF((unsigned value, int length));
2262 local void bi_windup OF((deflate_state *s));
2263 local void bi_flush OF((deflate_state *s));
2264 local void copy_block OF((deflate_state *s, charf *buf, unsigned len,
2265 int header));
2266
2267 #ifdef GEN_TREES_H
2268 local void gen_trees_header OF((void));
2269 #endif
2270
2271 #ifndef DEBUG_ZLIB
2272 # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len)
2273 /* Send a code of the given tree. c and tree must not have side effects */
2274
2275 #else /* DEBUG_ZLIB */
2276 # define send_code(s, c, tree) \
2277 { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \
2278 send_bits(s, tree[c].Code, tree[c].Len); }
2279 #endif
2280
2281 /* ===========================================================================
2282 * Output a short LSB first on the stream.
2283 * IN assertion: there is enough room in pendingBuf.
2284 */
2285 #define put_short(s, w) { \
2286 put_byte(s, (uch)((w) & 0xff)); \
2287 put_byte(s, (uch)((ush)(w) >> 8)); \
2288 }
2289
2290 /* ===========================================================================
2291 * Send a value on a given number of bits.
2292 * IN assertion: length <= 16 and value fits in length bits.
2293 */
2294 #ifdef DEBUG_ZLIB
2295 local void send_bits OF((deflate_state *s, int value, int length));
2296
2297 local void send_bits(s, value, length)
2298 deflate_state *s;
2299 int value; /* value to send */
2300 int length; /* number of bits */
2301 {
2302 Tracevv((stderr," l %2d v %4x ", length, value));
2303 Assert(length > 0 && length <= 15, "invalid length");
2304 s->bits_sent += (ulg)length;
2305
2306 /* If not enough room in bi_buf, use (valid) bits from bi_buf and
2307 * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
2308 * unused bits in value.
2309 */
2310 if (s->bi_valid > (int)Buf_size - length) {
2311 s->bi_buf |= (value << s->bi_valid);
2312 put_short(s, s->bi_buf);
2313 s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);
2314 s->bi_valid += length - Buf_size;
2315 } else {
2316 s->bi_buf |= value << s->bi_valid;
2317 s->bi_valid += length;
2318 }
2319 }
2320 #else /* !DEBUG_ZLIB */
2321
2322 #define send_bits(s, value, length) \
2323 { int len = length;\
2324 if (s->bi_valid > (int)Buf_size - len) {\
2325 int val = value;\
2326 s->bi_buf |= (val << s->bi_valid);\
2327 put_short(s, s->bi_buf);\
2328 s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\
2329 s->bi_valid += len - Buf_size;\
2330 } else {\
2331 s->bi_buf |= (value) << s->bi_valid;\
2332 s->bi_valid += len;\
2333 }\
2334 }
2335 #endif /* DEBUG_ZLIB */
2336
2337
2338 #ifndef MAX
2339 #define MAX(a,b) (a >= b ? a : b)
2340 #endif
2341 /* the arguments must not have side effects */
2342
2343 typedef struct {
2344 ct_data static_ltree[L_CODES+2];
2345 ct_data static_dtree[D_CODES];
2346 uch _dist_code[DIST_CODE_LEN];
2347 uch _length_code[MAX_MATCH-MIN_MATCH+1];
2348 int base_length[LENGTH_CODES];
2349 int base_dist[D_CODES];
2350 } __used_to_be_static;
2351
2352 static __used_to_be_static *static_storage = Z_NULL;
2353
2354 /* ===========================================================================
2355 * Initialize the various 'constant' tables.
2356 */
2357 local int tr_static_init(
2358 z_streamp z)
2359 {
2360 #if defined(GEN_TREES_H) || !defined(STDC)
2361 static int static_init_done = 0;
2362 int n; /* iterates over tree elements */
2363 int bits; /* bit counter */
2364 int length; /* length value */
2365 int code; /* code value */
2366 int dist; /* distance index */
2367 ush bl_count[MAX_BITS+1];
2368 /* number of codes at each bit length for an optimal tree */
2369
2370 if (static_init_done) return Z_OK;
2371
2372 /* allocate storage for static structures */
2373 if (static_storage == Z_NULL) {
2374 static_storage = (__used_to_be_static*)ZALLOC(z, 1, sizeof(__used_to_be_static));
2375 if (static_storage == Z_NULL)
2376 return Z_MEM_ERROR;
2377 }
2378
2379 static_ltree = static_storage->static_ltree;
2380 static_dtree = static_storage->static_dtree;
2381 _dist_code = static_storage->_dist_code;
2382 _length_code = static_storage->_length_code;
2383 base_length = static_storage->base_length;
2384 base_dist = static_storage->base_dist;
2385
2386 /* For some embedded targets, global variables are not initialized: */
2387 static_l_desc.static_tree = static_ltree;
2388 static_l_desc.extra_bits = extra_lbits;
2389 static_d_desc.static_tree = static_dtree;
2390 static_d_desc.extra_bits = extra_dbits;
2391 static_bl_desc.extra_bits = extra_blbits;
2392
2393 /* Initialize the mapping length (0..255) -> length code (0..28) */
2394 length = 0;
2395 for (code = 0; code < LENGTH_CODES-1; code++) {
2396 base_length[code] = length;
2397 for (n = 0; n < (1<<extra_lbits[code]); n++) {
2398 _length_code[length++] = (uch)code;
2399 }
2400 }
2401 Assert (length == 256, "tr_static_init: length != 256");
2402 /* Note that the length 255 (match length 258) can be represented
2403 * in two different ways: code 284 + 5 bits or code 285, so we
2404 * overwrite length_code[255] to use the best encoding:
2405 */
2406 _length_code[length-1] = (uch)code;
2407
2408 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
2409 dist = 0;
2410 for (code = 0 ; code < 16; code++) {
2411 base_dist[code] = dist;
2412 for (n = 0; n < (1<<extra_dbits[code]); n++) {
2413 _dist_code[dist++] = (uch)code;
2414 }
2415 }
2416 Assert (dist == 256, "tr_static_init: dist != 256");
2417 dist >>= 7; /* from now on, all distances are divided by 128 */
2418 for ( ; code < D_CODES; code++) {
2419 base_dist[code] = dist << 7;
2420 for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {
2421 _dist_code[256 + dist++] = (uch)code;
2422 }
2423 }
2424 Assert (dist == 256, "tr_static_init: 256+dist != 512");
2425
2426 /* Construct the codes of the static literal tree */
2427 for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;
2428 n = 0;
2429 while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;
2430 while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;
2431 while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;
2432 while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;
2433 /* Codes 286 and 287 do not exist, but we must include them in the
2434 * tree construction to get a canonical Huffman tree (longest code
2435 * all ones)
2436 */
2437 gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);
2438
2439 /* The static distance tree is trivial: */
2440 for (n = 0; n < D_CODES; n++) {
2441 static_dtree[n].Len = 5;
2442 static_dtree[n].Code = bi_reverse((unsigned)n, 5);
2443 }
2444 static_init_done = 1;
2445
2446 # ifdef GEN_TREES_H
2447 gen_trees_header();
2448 # endif
2449 #endif /* defined(GEN_TREES_H) || !defined(STDC) */
2450 return Z_OK;
2451 }
2452
2453 /* ===========================================================================
2454 * Genererate the file trees.h describing the static trees.
2455 */
2456 #ifdef GEN_TREES_H
2457 # ifndef DEBUG_ZLIB
2458 # include <stdio.h>
2459 # endif
2460
2461 # define SEPARATOR(i, last, width) \
2462 ((i) == (last)? "\n};\n\n" : \
2463 ((i) % (width) == (width)-1 ? ",\n" : ", "))
2464
2465 void gen_trees_header()
2466 {
2467 FILE *header = fopen("trees.h", "w");
2468 int i;
2469
2470 Assert (header != NULL, "Can't open trees.h");
2471 fprintf(header,
2472 "/* header created automatically with -DGEN_TREES_H */\n\n");
2473
2474 fprintf(header, "local const ct_data static_ltree[L_CODES+2] = {\n");
2475 for (i = 0; i < L_CODES+2; i++) {
2476 fprintf(header, "{{%3u},{%3u}}%s", static_ltree[i].Code,
2477 static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5));
2478 }
2479
2480 fprintf(header, "local const ct_data static_dtree[D_CODES] = {\n");
2481 for (i = 0; i < D_CODES; i++) {
2482 fprintf(header, "{{%2u},{%2u}}%s", static_dtree[i].Code,
2483 static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5));
2484 }
2485
2486 fprintf(header, "const uch _dist_code[DIST_CODE_LEN] = {\n");
2487 for (i = 0; i < DIST_CODE_LEN; i++) {
2488 fprintf(header, "%2u%s", _dist_code[i],
2489 SEPARATOR(i, DIST_CODE_LEN-1, 20));
2490 }
2491
2492 fprintf(header, "const uch _length_code[MAX_MATCH-MIN_MATCH+1]= {\n");
2493 for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) {
2494 fprintf(header, "%2u%s", _length_code[i],
2495 SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20));
2496 }
2497
2498 fprintf(header, "local const int base_length[LENGTH_CODES] = {\n");
2499 for (i = 0; i < LENGTH_CODES; i++) {
2500 fprintf(header, "%1u%s", base_length[i],
2501 SEPARATOR(i, LENGTH_CODES-1, 20));
2502 }
2503
2504 fprintf(header, "local const int base_dist[D_CODES] = {\n");
2505 for (i = 0; i < D_CODES; i++) {
2506 fprintf(header, "%5u%s", base_dist[i],
2507 SEPARATOR(i, D_CODES-1, 10));
2508 }
2509
2510 fclose(header);
2511 }
2512 #endif /* GEN_TREES_H */
2513
2514 /* ===========================================================================
2515 * Initialize the tree data structures for a new zlib stream.
2516 */
2517 void _tr_init(s)
2518 deflate_state *s;
2519 {
2520 tr_static_init(s->strm);
2521
2522 s->l_desc.dyn_tree = s->dyn_ltree;
2523 s->l_desc.stat_desc = &static_l_desc;
2524
2525 s->d_desc.dyn_tree = s->dyn_dtree;
2526 s->d_desc.stat_desc = &static_d_desc;
2527
2528 s->bl_desc.dyn_tree = s->bl_tree;
2529 s->bl_desc.stat_desc = &static_bl_desc;
2530
2531 s->bi_buf = 0;
2532 s->bi_valid = 0;
2533 s->last_eob_len = 8; /* enough lookahead for inflate */
2534 #ifdef DEBUG_ZLIB
2535 s->compressed_len = 0L;
2536 s->bits_sent = 0L;
2537 #endif
2538
2539 /* Initialize the first block of the first file: */
2540 init_block(s);
2541 }
2542
2543 /* ===========================================================================
2544 * Initialize a new block.
2545 */
2546 local void init_block(s)
2547 deflate_state *s;
2548 {
2549 int n; /* iterates over tree elements */
2550
2551 /* Initialize the trees. */
2552 for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;
2553 for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;
2554 for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
2555
2556 s->dyn_ltree[END_BLOCK].Freq = 1;
2557 s->opt_len = s->static_len = 0L;
2558 s->last_lit = s->matches = 0;
2559 }
2560
2561 #define SMALLEST 1
2562 /* Index within the heap array of least frequent node in the Huffman tree */
2563
2564
2565 /* ===========================================================================
2566 * Remove the smallest element from the heap and recreate the heap with
2567 * one less element. Updates heap and heap_len.
2568 */
2569 #define pqremove(s, tree, top) \
2570 {\
2571 top = s->heap[SMALLEST]; \
2572 s->heap[SMALLEST] = s->heap[s->heap_len--]; \
2573 pqdownheap(s, tree, SMALLEST); \
2574 }
2575
2576 /* ===========================================================================
2577 * Compares to subtrees, using the tree depth as tie breaker when
2578 * the subtrees have equal frequency. This minimizes the worst case length.
2579 */
2580 #define smaller(tree, n, m, depth) \
2581 (tree[n].Freq < tree[m].Freq || \
2582 (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))
2583
2584 /* ===========================================================================
2585 * Restore the heap property by moving down the tree starting at node k,
2586 * exchanging a node with the smallest of its two sons if necessary, stopping
2587 * when the heap property is re-established (each father smaller than its
2588 * two sons).
2589 */
2590 local void pqdownheap(s, tree, k)
2591 deflate_state *s;
2592 ct_data *tree; /* the tree to restore */
2593 int k; /* node to move down */
2594 {
2595 int v = s->heap[k];
2596 int j = k << 1; /* left son of k */
2597 while (j <= s->heap_len) {
2598 /* Set j to the smallest of the two sons: */
2599 if (j < s->heap_len &&
2600 smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {
2601 j++;
2602 }
2603 /* Exit if v is smaller than both sons */
2604 if (smaller(tree, v, s->heap[j], s->depth)) break;
2605
2606 /* Exchange v with the smallest son */
2607 s->heap[k] = s->heap[j]; k = j;
2608
2609 /* And continue down the tree, setting j to the left son of k */
2610 j <<= 1;
2611 }
2612 s->heap[k] = v;
2613 }
2614
2615 /* ===========================================================================
2616 * Compute the optimal bit lengths for a tree and update the total bit length
2617 * for the current block.
2618 * IN assertion: the fields freq and dad are set, heap[heap_max] and
2619 * above are the tree nodes sorted by increasing frequency.
2620 * OUT assertions: the field len is set to the optimal bit length, the
2621 * array bl_count contains the frequencies for each bit length.
2622 * The length opt_len is updated; static_len is also updated if stree is
2623 * not null.
2624 */
2625 local void gen_bitlen(s, desc)
2626 deflate_state *s;
2627 tree_desc *desc; /* the tree descriptor */
2628 {
2629 ct_data *tree = desc->dyn_tree;
2630 int max_code = desc->max_code;
2631 const ct_data *stree = desc->stat_desc->static_tree;
2632 const intf *extra = desc->stat_desc->extra_bits;
2633 int base = desc->stat_desc->extra_base;
2634 int max_length = desc->stat_desc->max_length;
2635 int h; /* heap index */
2636 int n, m; /* iterate over the tree elements */
2637 int bits; /* bit length */
2638 int xbits; /* extra bits */
2639 ush f; /* frequency */
2640 int overflow = 0; /* number of elements with bit length too large */
2641
2642 for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;
2643
2644 /* In a first pass, compute the optimal bit lengths (which may
2645 * overflow in the case of the bit length tree).
2646 */
2647 tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */
2648
2649 for (h = s->heap_max+1; h < HEAP_SIZE; h++) {
2650 n = s->heap[h];
2651 bits = tree[tree[n].Dad].Len + 1;
2652 if (bits > max_length) bits = max_length, overflow++;
2653 tree[n].Len = (ush)bits;
2654 /* We overwrite tree[n].Dad which is no longer needed */
2655
2656 if (n > max_code) continue; /* not a leaf node */
2657
2658 s->bl_count[bits]++;
2659 xbits = 0;
2660 if (n >= base) xbits = extra[n-base];
2661 f = tree[n].Freq;
2662 s->opt_len += (ulg)f * (bits + xbits);
2663 if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits);
2664 }
2665 if (overflow == 0) return;
2666
2667 Trace((stderr,"\nbit length overflow\n"));
2668 /* This happens for example on obj2 and pic of the Calgary corpus */
2669
2670 /* Find the first bit length which could increase: */
2671 do {
2672 bits = max_length-1;
2673 while (s->bl_count[bits] == 0) bits--;
2674 s->bl_count[bits]--; /* move one leaf down the tree */
2675 s->bl_count[bits+1] += 2; /* move one overflow item as its brother */
2676 s->bl_count[max_length]--;
2677 /* The brother of the overflow item also moves one step up,
2678 * but this does not affect bl_count[max_length]
2679 */
2680 overflow -= 2;
2681 } while (overflow > 0);
2682
2683 /* Now recompute all bit lengths, scanning in increasing frequency.
2684 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
2685 * lengths instead of fixing only the wrong ones. This idea is taken
2686 * from 'ar' written by Haruhiko Okumura.)
2687 */
2688 for (bits = max_length; bits != 0; bits--) {
2689 n = s->bl_count[bits];
2690 while (n != 0) {
2691 m = s->heap[--h];
2692 if (m > max_code) continue;
2693 if (tree[m].Len != (unsigned) bits) {
2694 Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));
2695 s->opt_len += ((long)bits - (long)tree[m].Len)
2696 *(long)tree[m].Freq;
2697 tree[m].Len = (ush)bits;
2698 }
2699 n--;
2700 }
2701 }
2702 }
2703
2704 /* ===========================================================================
2705 * Generate the codes for a given tree and bit counts (which need not be
2706 * optimal).
2707 * IN assertion: the array bl_count contains the bit length statistics for
2708 * the given tree and the field len is set for all tree elements.
2709 * OUT assertion: the field code is set for all tree elements of non
2710 * zero code length.
2711 */
2712 local void gen_codes (tree, max_code, bl_count)
2713 ct_data *tree; /* the tree to decorate */
2714 int max_code; /* largest code with non zero frequency */
2715 ushf *bl_count; /* number of codes at each bit length */
2716 {
2717 ush next_code[MAX_BITS+1]; /* next code value for each bit length */
2718 ush code = 0; /* running code value */
2719 int bits; /* bit index */
2720 int n; /* code index */
2721
2722 /* The distribution counts are first used to generate the code values
2723 * without bit reversal.
2724 */
2725 for (bits = 1; bits <= MAX_BITS; bits++) {
2726 next_code[bits] = code = (code + bl_count[bits-1]) << 1;
2727 }
2728 /* Check that the bit counts in bl_count are consistent. The last code
2729 * must be all ones.
2730 */
2731 Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,
2732 "inconsistent bit counts");
2733 Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
2734
2735 for (n = 0; n <= max_code; n++) {
2736 int len = tree[n].Len;
2737 if (len == 0) continue;
2738 /* Now reverse the bits */
2739 tree[n].Code = bi_reverse(next_code[len]++, len);
2740
2741 Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
2742 n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));
2743 }
2744 }
2745
2746 /* ===========================================================================
2747 * Construct one Huffman tree and assigns the code bit strings and lengths.
2748 * Update the total bit length for the current block.
2749 * IN assertion: the field freq is set for all tree elements.
2750 * OUT assertions: the fields len and code are set to the optimal bit length
2751 * and corresponding code. The length opt_len is updated; static_len is
2752 * also updated if stree is not null. The field max_code is set.
2753 */
2754 local void build_tree(s, desc)
2755 deflate_state *s;
2756 tree_desc *desc; /* the tree descriptor */
2757 {
2758 ct_data *tree = desc->dyn_tree;
2759 const ct_data *stree = desc->stat_desc->static_tree;
2760 int elems = desc->stat_desc->elems;
2761 int n, m; /* iterate over heap elements */
2762 int max_code = -1; /* largest code with non zero frequency */
2763 int node; /* new node being created */
2764
2765 /* Construct the initial heap, with least frequent element in
2766 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
2767 * heap[0] is not used.
2768 */
2769 s->heap_len = 0, s->heap_max = HEAP_SIZE;
2770
2771 for (n = 0; n < elems; n++) {
2772 if (tree[n].Freq != 0) {
2773 s->heap[++(s->heap_len)] = max_code = n;
2774 s->depth[n] = 0;
2775 } else {
2776 tree[n].Len = 0;
2777 }
2778 }
2779
2780 /* The pkzip format requires that at least one distance code exists,
2781 * and that at least one bit should be sent even if there is only one
2782 * possible code. So to avoid special checks later on we force at least
2783 * two codes of non zero frequency.
2784 */
2785 while (s->heap_len < 2) {
2786 node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);
2787 tree[node].Freq = 1;
2788 s->depth[node] = 0;
2789 s->opt_len--; if (stree) s->static_len -= stree[node].Len;
2790 /* node is 0 or 1 so it does not have extra bits */
2791 }
2792 desc->max_code = max_code;
2793
2794 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
2795 * establish sub-heaps of increasing lengths:
2796 */
2797 for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);
2798
2799 /* Construct the Huffman tree by repeatedly combining the least two
2800 * frequent nodes.
2801 */
2802 node = elems; /* next internal node of the tree */
2803 do {
2804 pqremove(s, tree, n); /* n = node of least frequency */
2805 m = s->heap[SMALLEST]; /* m = node of next least frequency */
2806
2807 s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */
2808 s->heap[--(s->heap_max)] = m;
2809
2810 /* Create a new node father of n and m */
2811 tree[node].Freq = tree[n].Freq + tree[m].Freq;
2812 s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1);
2813 tree[n].Dad = tree[m].Dad = (ush)node;
2814 #ifdef DUMP_BL_TREE
2815 if (tree == s->bl_tree) {
2816 fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)",
2817 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
2818 }
2819 #endif
2820 /* and insert the new node in the heap */
2821 s->heap[SMALLEST] = node++;
2822 pqdownheap(s, tree, SMALLEST);
2823
2824 } while (s->heap_len >= 2);
2825
2826 s->heap[--(s->heap_max)] = s->heap[SMALLEST];
2827
2828 /* At this point, the fields freq and dad are set. We can now
2829 * generate the bit lengths.
2830 */
2831 gen_bitlen(s, (tree_desc *)desc);
2832
2833 /* The field len is now set, we can generate the bit codes */
2834 gen_codes ((ct_data *)tree, max_code, s->bl_count);
2835 }
2836
2837 /* ===========================================================================
2838 * Scan a literal or distance tree to determine the frequencies of the codes
2839 * in the bit length tree.
2840 */
2841 local void scan_tree (s, tree, max_code)
2842 deflate_state *s;
2843 ct_data *tree; /* the tree to be scanned */
2844 int max_code; /* and its largest code of non zero frequency */
2845 {
2846 int n; /* iterates over all tree elements */
2847 int prevlen = -1; /* last emitted length */
2848 int curlen; /* length of current code */
2849 int nextlen = tree[0].Len; /* length of next code */
2850 int count = 0; /* repeat count of the current code */
2851 int max_count = 7; /* max repeat count */
2852 int min_count = 4; /* min repeat count */
2853
2854 if (nextlen == 0) max_count = 138, min_count = 3;
2855 tree[max_code+1].Len = (ush)0xffff; /* guard */
2856
2857 for (n = 0; n <= max_code; n++) {
2858 curlen = nextlen; nextlen = tree[n+1].Len;
2859 if (++count < max_count && curlen == nextlen) {
2860 continue;
2861 } else if (count < min_count) {
2862 s->bl_tree[curlen].Freq += count;
2863 } else if (curlen != 0) {
2864 if (curlen != prevlen) s->bl_tree[curlen].Freq++;
2865 s->bl_tree[REP_3_6].Freq++;
2866 } else if (count <= 10) {
2867 s->bl_tree[REPZ_3_10].Freq++;
2868 } else {
2869 s->bl_tree[REPZ_11_138].Freq++;
2870 }
2871 count = 0; prevlen = curlen;
2872 if (nextlen == 0) {
2873 max_count = 138, min_count = 3;
2874 } else if (curlen == nextlen) {
2875 max_count = 6, min_count = 3;
2876 } else {
2877 max_count = 7, min_count = 4;
2878 }
2879 }
2880 }
2881
2882 /* ===========================================================================
2883 * Send a literal or distance tree in compressed form, using the codes in
2884 * bl_tree.
2885 */
2886 local void send_tree (s, tree, max_code)
2887 deflate_state *s;
2888 ct_data *tree; /* the tree to be scanned */
2889 int max_code; /* and its largest code of non zero frequency */
2890 {
2891 int n; /* iterates over all tree elements */
2892 int prevlen = -1; /* last emitted length */
2893 int curlen; /* length of current code */
2894 int nextlen = tree[0].Len; /* length of next code */
2895 int count = 0; /* repeat count of the current code */
2896 int max_count = 7; /* max repeat count */
2897 int min_count = 4; /* min repeat count */
2898
2899 /* tree[max_code+1].Len = -1; */ /* guard already set */
2900 if (nextlen == 0) max_count = 138, min_count = 3;
2901
2902 for (n = 0; n <= max_code; n++) {
2903 curlen = nextlen; nextlen = tree[n+1].Len;
2904 if (++count < max_count && curlen == nextlen) {
2905 continue;
2906 } else if (count < min_count) {
2907 do { send_code(s, curlen, s->bl_tree); } while (--count != 0);
2908
2909 } else if (curlen != 0) {
2910 if (curlen != prevlen) {
2911 send_code(s, curlen, s->bl_tree); count--;
2912 }
2913 Assert(count >= 3 && count <= 6, " 3_6?");
2914 send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);
2915
2916 } else if (count <= 10) {
2917 send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);
2918
2919 } else {
2920 send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);
2921 }
2922 count = 0; prevlen = curlen;
2923 if (nextlen == 0) {
2924 max_count = 138, min_count = 3;
2925 } else if (curlen == nextlen) {
2926 max_count = 6, min_count = 3;
2927 } else {
2928 max_count = 7, min_count = 4;
2929 }
2930 }
2931 }
2932
2933 /* ===========================================================================
2934 * Construct the Huffman tree for the bit lengths and return the index in
2935 * bl_order of the last bit length code to send.
2936 */
2937 local int build_bl_tree(s)
2938 deflate_state *s;
2939 {
2940 int max_blindex; /* index of last bit length code of non zero freq */
2941
2942 /* Determine the bit length frequencies for literal and distance trees */
2943 scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);
2944 scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);
2945
2946 /* Build the bit length tree: */
2947 build_tree(s, (tree_desc *)(&(s->bl_desc)));
2948 /* opt_len now includes the length of the tree representations, except
2949 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
2950 */
2951
2952 /* Determine the number of bit length codes to send. The pkzip format
2953 * requires that at least 4 bit length codes be sent. (appnote.txt says
2954 * 3 but the actual value used is 4.)
2955 */
2956 for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
2957 if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;
2958 }
2959 /* Update opt_len to include the bit length tree and counts */
2960 s->opt_len += 3*(max_blindex+1) + 5+5+4;
2961 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",
2962 s->opt_len, s->static_len));
2963
2964 return max_blindex;
2965 }
2966
2967 /* ===========================================================================
2968 * Send the header for a block using dynamic Huffman trees: the counts, the
2969 * lengths of the bit length codes, the literal tree and the distance tree.
2970 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
2971 */
2972 local void send_all_trees(s, lcodes, dcodes, blcodes)
2973 deflate_state *s;
2974 int lcodes, dcodes, blcodes; /* number of codes for each tree */
2975 {
2976 int rank; /* index in bl_order */
2977
2978 Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
2979 Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,
2980 "too many codes");
2981 Tracev((stderr, "\nbl counts: "));
2982 send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */
2983 send_bits(s, dcodes-1, 5);
2984 send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */
2985 for (rank = 0; rank < blcodes; rank++) {
2986 Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
2987 send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);
2988 }
2989 Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));
2990
2991 send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */
2992 Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));
2993
2994 send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */
2995 Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));
2996 }
2997
2998 /* ===========================================================================
2999 * Send a stored block
3000 */
3001 void _tr_stored_block(s, buf, stored_len, eof)
3002 deflate_state *s;
3003 charf *buf; /* input block */
3004 ulg stored_len; /* length of input block */
3005 int eof; /* true if this is the last block for a file */
3006 {
3007 send_bits(s, (STORED_BLOCK<<1)+eof, 3); /* send block type */
3008 #ifdef DEBUG_ZLIB
3009 s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;
3010 s->compressed_len += (stored_len + 4) << 3;
3011 #endif
3012 copy_block(s, buf, (unsigned)stored_len, 1); /* with header */
3013 }
3014
3015 /* ===========================================================================
3016 * Send one empty static block to give enough lookahead for inflate.
3017 * This takes 10 bits, of which 7 may remain in the bit buffer.
3018 * The current inflate code requires 9 bits of lookahead. If the
3019 * last two codes for the previous block (real code plus EOB) were coded
3020 * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode
3021 * the last real code. In this case we send two empty static blocks instead
3022 * of one. (There are no problems if the previous block is stored or fixed.)
3023 * To simplify the code, we assume the worst case of last real code encoded
3024 * on one bit only.
3025 */
3026 void _tr_align(s)
3027 deflate_state *s;
3028 {
3029 send_bits(s, STATIC_TREES<<1, 3);
3030 send_code(s, END_BLOCK, static_ltree);
3031 #ifdef DEBUG_ZLIB
3032 s->compressed_len += 10L; /* 3 for block type, 7 for EOB */
3033 #endif
3034 bi_flush(s);
3035 /* Of the 10 bits for the empty block, we have already sent
3036 * (10 - bi_valid) bits. The lookahead for the last real code (before
3037 * the EOB of the previous block) was thus at least one plus the length
3038 * of the EOB plus what we have just sent of the empty static block.
3039 */
3040 if (1 + s->last_eob_len + 10 - s->bi_valid < 9) {
3041 send_bits(s, STATIC_TREES<<1, 3);
3042 send_code(s, END_BLOCK, static_ltree);
3043 #ifdef DEBUG_ZLIB
3044 s->compressed_len += 10L;
3045 #endif
3046 bi_flush(s);
3047 }
3048 s->last_eob_len = 7;
3049 }
3050
3051 /* ===========================================================================
3052 * Determine the best encoding for the current block: dynamic trees, static
3053 * trees or store, and output the encoded block to the zip file.
3054 */
3055 void _tr_flush_block(s, buf, stored_len, eof)
3056 deflate_state *s;
3057 charf *buf; /* input block, or NULL if too old */
3058 ulg stored_len; /* length of input block */
3059 int eof; /* true if this is the last block for a file */
3060 {
3061 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
3062 int max_blindex = 0; /* index of last bit length code of non zero freq */
3063
3064 /* Build the Huffman trees unless a stored block is forced */
3065 if (s->level > 0) {
3066
3067 /* Check if the file is ascii or binary */
3068 if (s->data_type == Z_UNKNOWN) set_data_type(s);
3069
3070 /* Construct the literal and distance trees */
3071 build_tree(s, (tree_desc *)(&(s->l_desc)));
3072 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,
3073 s->static_len));
3074
3075 build_tree(s, (tree_desc *)(&(s->d_desc)));
3076 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,
3077 s->static_len));
3078 /* At this point, opt_len and static_len are the total bit lengths of
3079 * the compressed block data, excluding the tree representations.
3080 */
3081
3082 /* Build the bit length tree for the above two trees, and get the index
3083 * in bl_order of the last bit length code to send.
3084 */
3085 max_blindex = build_bl_tree(s);
3086
3087 /* Determine the best encoding. Compute first the block length in bytes*/
3088 opt_lenb = (s->opt_len+3+7)>>3;
3089 static_lenb = (s->static_len+3+7)>>3;
3090
3091 Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",
3092 opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,
3093 s->last_lit));
3094
3095 if (static_lenb <= opt_lenb) opt_lenb = static_lenb;
3096
3097 } else {
3098 Assert(buf != (char*)0, "lost buf");
3099 opt_lenb = static_lenb = stored_len + 5; /* force a stored block */
3100 }
3101
3102 #ifdef FORCE_STORED
3103 if (buf != (char*)0) { /* force stored block */
3104 #else
3105 if (stored_len+4 <= opt_lenb && buf != (char*)0) {
3106 /* 4: two words for the lengths */
3107 #endif
3108 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
3109 * Otherwise we can't have processed more than WSIZE input bytes since
3110 * the last block flush, because compression would have been
3111 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
3112 * transform a block into a stored block.
3113 */
3114 _tr_stored_block(s, buf, stored_len, eof);
3115
3116 #ifdef FORCE_STATIC
3117 } else if (static_lenb >= 0) { /* force static trees */
3118 #else
3119 } else if (static_lenb == opt_lenb) {
3120 #endif
3121 send_bits(s, (STATIC_TREES<<1)+eof, 3);
3122 compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree);
3123 #ifdef DEBUG_ZLIB
3124 s->compressed_len += 3 + s->static_len;
3125 #endif
3126 } else {
3127 send_bits(s, (DYN_TREES<<1)+eof, 3);
3128 send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,
3129 max_blindex+1);
3130 compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree);
3131 #ifdef DEBUG_ZLIB
3132 s->compressed_len += 3 + s->opt_len;
3133 #endif
3134 }
3135 Assert (s->compressed_len == s->bits_sent, "bad compressed size");
3136 /* The above check is made mod 2^32, for files larger than 512 MB
3137 * and uLong implemented on 32 bits.
3138 */
3139 init_block(s);
3140
3141 if (eof) {
3142 bi_windup(s);
3143 #ifdef DEBUG_ZLIB
3144 s->compressed_len += 7; /* align on byte boundary */
3145 #endif
3146 }
3147 Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,
3148 s->compressed_len-7*eof));
3149 }
3150
3151 /* ===========================================================================
3152 * Save the match info and tally the frequency counts. Return true if
3153 * the current block must be flushed.
3154 */
3155 int _tr_tally (s, dist, lc)
3156 deflate_state *s;
3157 unsigned dist; /* distance of matched string */
3158 unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */
3159 {
3160 s->d_buf[s->last_lit] = (ush)dist;
3161 s->l_buf[s->last_lit++] = (uch)lc;
3162 if (dist == 0) {
3163 /* lc is the unmatched char */
3164 s->dyn_ltree[lc].Freq++;
3165 } else {
3166 s->matches++;
3167 /* Here, lc is the match length - MIN_MATCH */
3168 dist--; /* dist = match distance - 1 */
3169 Assert((ush)dist < (ush)MAX_DIST(s) &&
3170 (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&
3171 (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match");
3172
3173 s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++;
3174 s->dyn_dtree[d_code(dist)].Freq++;
3175 }
3176
3177 #ifdef TRUNCATE_BLOCK
3178 /* Try to guess if it is profitable to stop the current block here */
3179 if ((s->last_lit & 0x1fff) == 0 && s->level > 2) {
3180 /* Compute an upper bound for the compressed length */
3181 ulg out_length = (ulg)s->last_lit*8L;
3182 ulg in_length = (ulg)((long)s->strstart - s->block_start);
3183 int dcode;
3184 for (dcode = 0; dcode < D_CODES; dcode++) {
3185 out_length += (ulg)s->dyn_dtree[dcode].Freq *
3186 (5L+extra_dbits[dcode]);
3187 }
3188 out_length >>= 3;
3189 Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
3190 s->last_lit, in_length, out_length,
3191 100L - out_length*100L/in_length));
3192 if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;
3193 }
3194 #endif
3195 return (s->last_lit == s->lit_bufsize-1);
3196 /* We avoid equality with lit_bufsize because of wraparound at 64K
3197 * on 16 bit machines and because stored blocks are restricted to
3198 * 64K-1 bytes.
3199 */
3200 }
3201
3202 /* ===========================================================================
3203 * Send the block data compressed using the given Huffman trees
3204 */
3205 local void compress_block(s, ltree, dtree)
3206 deflate_state *s;
3207 ct_data *ltree; /* literal tree */
3208 ct_data *dtree; /* distance tree */
3209 {
3210 unsigned dist; /* distance of matched string */
3211 int lc; /* match length or unmatched char (if dist == 0) */
3212 unsigned lx = 0; /* running index in l_buf */
3213 unsigned code; /* the code to send */
3214 int extra; /* number of extra bits to send */
3215
3216 if (s->last_lit != 0) do {
3217 dist = s->d_buf[lx];
3218 lc = s->l_buf[lx++];
3219 if (dist == 0) {
3220 send_code(s, lc, ltree); /* send a literal byte */
3221 Tracecv(isgraph(lc), (stderr," '%c' ", lc));
3222 } else {
3223 /* Here, lc is the match length - MIN_MATCH */
3224 code = _length_code[lc];
3225 send_code(s, code+LITERALS+1, ltree); /* send the length code */
3226 extra = extra_lbits[code];
3227 if (extra != 0) {
3228 lc -= base_length[code];
3229 send_bits(s, lc, extra); /* send the extra length bits */
3230 }
3231 dist--; /* dist is now the match distance - 1 */
3232 code = d_code(dist);
3233 Assert (code < D_CODES, "bad d_code");
3234
3235 send_code(s, code, dtree); /* send the distance code */
3236 extra = extra_dbits[code];
3237 if (extra != 0) {
3238 dist -= base_dist[code];
3239 send_bits(s, dist, extra); /* send the extra distance bits */
3240 }
3241 } /* literal or match pair ? */
3242
3243 /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */
3244 Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow");
3245
3246 } while (lx < s->last_lit);
3247
3248 send_code(s, END_BLOCK, ltree);
3249 s->last_eob_len = ltree[END_BLOCK].Len;
3250 }
3251
3252 /* ===========================================================================
3253 * Set the data type to ASCII or BINARY, using a crude approximation:
3254 * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise.
3255 * IN assertion: the fields freq of dyn_ltree are set and the total of all
3256 * frequencies does not exceed 64K (to fit in an int on 16 bit machines).
3257 */
3258 local void set_data_type(s)
3259 deflate_state *s;
3260 {
3261 int n = 0;
3262 unsigned ascii_freq = 0;
3263 unsigned bin_freq = 0;
3264 while (n < 7) bin_freq += s->dyn_ltree[n++].Freq;
3265 while (n < 128) ascii_freq += s->dyn_ltree[n++].Freq;
3266 while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq;
3267 s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : Z_ASCII);
3268 }
3269
3270 /* ===========================================================================
3271 * Reverse the first len bits of a code, using straightforward code (a faster
3272 * method would use a table)
3273 * IN assertion: 1 <= len <= 15
3274 */
3275 local unsigned bi_reverse(code, len)
3276 unsigned code; /* the value to invert */
3277 int len; /* its bit length */
3278 {
3279 register unsigned res = 0;
3280 do {
3281 res |= code & 1;
3282 code >>= 1, res <<= 1;
3283 } while (--len > 0);
3284 return res >> 1;
3285 }
3286
3287 /* ===========================================================================
3288 * Flush the bit buffer, keeping at most 7 bits in it.
3289 */
3290 local void bi_flush(s)
3291 deflate_state *s;
3292 {
3293 if (s->bi_valid == 16) {
3294 put_short(s, s->bi_buf);
3295 s->bi_buf = 0;
3296 s->bi_valid = 0;
3297 } else if (s->bi_valid >= 8) {
3298 put_byte(s, (Byte)s->bi_buf);
3299 s->bi_buf >>= 8;
3300 s->bi_valid -= 8;
3301 }
3302 }
3303
3304 /* ===========================================================================
3305 * Flush the bit buffer and align the output on a byte boundary
3306 */
3307 local void bi_windup(s)
3308 deflate_state *s;
3309 {
3310 if (s->bi_valid > 8) {
3311 put_short(s, s->bi_buf);
3312 } else if (s->bi_valid > 0) {
3313 put_byte(s, (Byte)s->bi_buf);
3314 }
3315 s->bi_buf = 0;
3316 s->bi_valid = 0;
3317 #ifdef DEBUG_ZLIB
3318 s->bits_sent = (s->bits_sent+7) & ~7;
3319 #endif
3320 }
3321
3322 /* ===========================================================================
3323 * Copy a stored block, storing first the length and its
3324 * one's complement if requested.
3325 */
3326 local void copy_block(s, buf, len, header)
3327 deflate_state *s;
3328 charf *buf; /* the input data */
3329 unsigned len; /* its length */
3330 int header; /* true if block header must be written */
3331 {
3332 bi_windup(s); /* align on byte boundary */
3333 s->last_eob_len = 8; /* enough lookahead for inflate */
3334
3335 if (header) {
3336 put_short(s, (ush)len);
3337 put_short(s, (ush)~len);
3338 #ifdef DEBUG_ZLIB
3339 s->bits_sent += 2*16;
3340 #endif
3341 }
3342 #ifdef DEBUG_ZLIB
3343 s->bits_sent += (ulg)len<<3;
3344 #endif
3345 while (len--) {
3346 put_byte(s, *buf++);
3347 }
3348 }
3349 /* --- trees.c */
3350
3351 /* +++ inflate.c */
3352 /* inflate.c -- zlib interface to inflate modules
3353 * Copyright (C) 1995-2002 Mark Adler
3354 * For conditions of distribution and use, see copyright notice in zlib.h
3355 */
3356
3357 /* #include "zutil.h" */
3358
3359 /* +++ infblock.h */
3360 /* infblock.h -- header to use infblock.c
3361 * Copyright (C) 1995-2002 Mark Adler
3362 * For conditions of distribution and use, see copyright notice in zlib.h
3363 */
3364
3365 /* WARNING: this file should *not* be used by applications. It is
3366 part of the implementation of the compression library and is
3367 subject to change. Applications should only use zlib.h.
3368 */
3369
3370 struct inflate_blocks_state;
3371 typedef struct inflate_blocks_state FAR inflate_blocks_statef;
3372
3373 extern inflate_blocks_statef * inflate_blocks_new OF((
3374 z_streamp z,
3375 check_func c, /* check function */
3376 uInt w)); /* window size */
3377
3378 extern int inflate_blocks OF((
3379 inflate_blocks_statef *,
3380 z_streamp ,
3381 int)); /* initial return code */
3382
3383 extern void inflate_blocks_reset OF((
3384 inflate_blocks_statef *,
3385 z_streamp ,
3386 uLongf *)); /* check value on output */
3387
3388 extern int inflate_blocks_free OF((
3389 inflate_blocks_statef *,
3390 z_streamp));
3391
3392 extern void inflate_set_dictionary OF((
3393 inflate_blocks_statef *s,
3394 const Bytef *d, /* dictionary */
3395 uInt n)); /* dictionary length */
3396
3397 extern int inflate_blocks_sync_point OF((
3398 inflate_blocks_statef *s));
3399 /* --- infblock.h */
3400
3401 #ifndef NO_DUMMY_DECL
3402 struct inflate_blocks_state {int dummy;}; /* for buggy compilers */
3403 #endif
3404
3405 /* inflate private state */
3406 typedef struct inflate_state {
3407
3408 /* mode */
3409 enum {
3410 METHOD, /* waiting for method byte */
3411 FLAG, /* waiting for flag byte */
3412 DICT4, /* four dictionary check bytes to go */
3413 DICT3, /* three dictionary check bytes to go */
3414 DICT2, /* two dictionary check bytes to go */
3415 DICT1, /* one dictionary check byte to go */
3416 DICT0, /* waiting for inflateSetDictionary */
3417 BLOCKS, /* decompressing blocks */
3418 CHECK4, /* four check bytes to go */
3419 CHECK3, /* three check bytes to go */
3420 CHECK2, /* two check bytes to go */
3421 CHECK1, /* one check byte to go */
3422 DONE, /* finished check, done */
3423 BAD} /* got an error--stay here */
3424 mode; /* current inflate mode */
3425
3426 /* mode dependent information */
3427 union {
3428 uInt method; /* if FLAGS, method byte */
3429 struct {
3430 uLong was; /* computed check value */
3431 uLong need; /* stream check value */
3432 } check; /* if CHECK, check values to compare */
3433 uInt marker; /* if BAD, inflateSync's marker bytes count */
3434 } sub; /* submode */
3435
3436 /* mode independent information */
3437 int nowrap; /* flag for no wrapper */
3438 uInt wbits; /* log2(window size) (8..15, defaults to 15) */
3439 inflate_blocks_statef
3440 *blocks; /* current inflate_blocks state */
3441
3442 }inflate_state;
3443
3444
3445 int ZEXPORT inflateReset(z)
3446 z_streamp z;
3447 {
3448 inflate_state* s;
3449 if (z == Z_NULL || z->state == Z_NULL)
3450 return Z_STREAM_ERROR;
3451
3452 s = (inflate_state*)z->state;
3453 z->total_in = z->total_out = 0;
3454 z->msg = Z_NULL;
3455 s->mode = s->nowrap ? BLOCKS : METHOD;
3456 inflate_blocks_reset(s->blocks, z, Z_NULL);
3457 Tracev((stderr, "inflate: reset\n"));
3458 return Z_OK;
3459 }
3460
3461
3462 int ZEXPORT inflateEnd(z)
3463 z_streamp z;
3464 {
3465 if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL)
3466 return Z_STREAM_ERROR;
3467 if (((inflate_state*)z->state)->blocks != Z_NULL)
3468 inflate_blocks_free(((inflate_state*)z->state)->blocks, z);
3469 ZFREE(z, z->state);
3470 z->state = Z_NULL;
3471 Tracev((stderr, "inflate: end\n"));
3472 return Z_OK;
3473 }
3474
3475
3476 int ZEXPORT inflateInit2_(z, w, version, stream_size)
3477 z_streamp z;
3478 int w;
3479 const char *version;
3480 int stream_size;
3481 {
3482 inflate_state* s;
3483 if (version == Z_NULL || version[0] != ZLIB_VERSION[0] ||
3484 stream_size != sizeof(z_stream))
3485 return Z_VERSION_ERROR;
3486
3487 /* initialize state */
3488 if (z == Z_NULL)
3489 return Z_STREAM_ERROR;
3490 z->msg = Z_NULL;
3491 #ifndef NO_ZCFUNCS
3492 if (z->zalloc == Z_NULL)
3493 {
3494 z->zalloc = zcalloc;
3495 z->opaque = (voidpf)0;
3496 }
3497 if (z->zfree == Z_NULL) z->zfree = zcfree;
3498 #endif
3499 if ((z->state = (struct internal_state FAR *)
3500 ZALLOC(z,1,sizeof(struct inflate_state))) == Z_NULL)
3501 return Z_MEM_ERROR;
3502 s = (inflate_state*)z->state;
3503 s->blocks = Z_NULL;
3504
3505 /* handle undocumented nowrap option (no zlib header or check) */
3506 s->nowrap = 0;
3507 if (w < 0)
3508 {
3509 w = - w;
3510 s->nowrap = 1;
3511 }
3512
3513 /* set window size */
3514 if (w < 8 || w > 15)
3515 {
3516 inflateEnd(z);
3517 return Z_STREAM_ERROR;
3518 }
3519 s->wbits = (uInt)w;
3520
3521 /* create inflate_blocks state */
3522 if ((s->blocks =
3523 inflate_blocks_new(z, s->nowrap ? Z_NULL : adler32, (uInt)1 << w))
3524 == Z_NULL)
3525 {
3526 inflateEnd(z);
3527 return Z_MEM_ERROR;
3528 }
3529 Tracev((stderr, "inflate: allocated\n"));
3530
3531 /* reset state */
3532 inflateReset(z);
3533 return Z_OK;
3534 }
3535
3536
3537 int ZEXPORT inflateInit_(z, version, stream_size)
3538 z_streamp z;
3539 const char *version;
3540 int stream_size;
3541 {
3542 return inflateInit2_(z, DEF_WBITS, version, stream_size);
3543 }
3544
3545
3546 #define NEEDBYTE {if(z->avail_in==0)return r;r=f;}
3547 #define NEXTBYTE (z->avail_in--,z->total_in++,*z->next_in++)
3548
3549 int ZEXPORT inflate(z, f)
3550 z_streamp z;
3551 int f;
3552 {
3553 int r;
3554 uInt b;
3555 inflate_state* s;
3556
3557 if (z == Z_NULL || z->state == Z_NULL || z->next_in == Z_NULL)
3558 return Z_STREAM_ERROR;
3559 f = f == Z_FINISH ? Z_BUF_ERROR : Z_OK;
3560 r = Z_BUF_ERROR;
3561 s = (inflate_state*)z->state;
3562 while (1) switch (s->mode)
3563 {
3564 case METHOD:
3565 NEEDBYTE
3566 if (((s->sub.method = NEXTBYTE) & 0xf) != Z_DEFLATED)
3567 {
3568 s->mode = BAD;
3569 z->msg = (char*)"unknown compression method";
3570 s->sub.marker = 5; /* can't try inflateSync */
3571 break;
3572 }
3573 if ((s->sub.method >> 4) + 8 > s->wbits)
3574 {
3575 s->mode = BAD;
3576 z->msg = (char*)"invalid window size";
3577 s->sub.marker = 5; /* can't try inflateSync */
3578 break;
3579 }
3580 s->mode = FLAG;
3581 case FLAG:
3582 NEEDBYTE
3583 b = NEXTBYTE;
3584 if (((s->sub.method << 8) + b) % 31)
3585 {
3586 s->mode = BAD;
3587 z->msg = (char*)"incorrect header check";
3588 s->sub.marker = 5; /* can't try inflateSync */
3589 break;
3590 }
3591 Tracev((stderr, "inflate: zlib header ok\n"));
3592 if (!(b & PRESET_DICT))
3593 {
3594 s->mode = BLOCKS;
3595 break;
3596 }
3597 s->mode = DICT4;
3598 case DICT4:
3599 NEEDBYTE
3600 s->sub.check.need = (uLong)NEXTBYTE << 24;
3601 s->mode = DICT3;
3602 case DICT3:
3603 NEEDBYTE
3604 s->sub.check.need += (uLong)NEXTBYTE << 16;
3605 s->mode = DICT2;
3606 case DICT2:
3607 NEEDBYTE
3608 s->sub.check.need += (uLong)NEXTBYTE << 8;
3609 s->mode = DICT1;
3610 case DICT1:
3611 NEEDBYTE
3612 s->sub.check.need += (uLong)NEXTBYTE;
3613 z->adler = s->sub.check.need;
3614 s->mode = DICT0;
3615 return Z_NEED_DICT;
3616 case DICT0:
3617 s->mode = BAD;
3618 z->msg = (char*)"need dictionary";
3619 s->sub.marker = 0; /* can try inflateSync */
3620 return Z_STREAM_ERROR;
3621 case BLOCKS:
3622 r = inflate_blocks(s->blocks, z, r);
3623 if (r == Z_DATA_ERROR)
3624 {
3625 s->mode = BAD;
3626 s->sub.marker = 0; /* can try inflateSync */
3627 break;
3628 }
3629 if (r == Z_OK)
3630 r = f;
3631 if (r != Z_STREAM_END)
3632 return r;
3633 r = f;
3634 inflate_blocks_reset(s->blocks, z, &s->sub.check.was);
3635 if (s->nowrap)
3636 {
3637 s->mode = DONE;
3638 break;
3639 }
3640 s->mode = CHECK4;
3641 case CHECK4:
3642 NEEDBYTE
3643 s->sub.check.need = (uLong)NEXTBYTE << 24;
3644 s->mode = CHECK3;
3645 case CHECK3:
3646 NEEDBYTE
3647 s->sub.check.need += (uLong)NEXTBYTE << 16;
3648 s->mode = CHECK2;
3649 case CHECK2:
3650 NEEDBYTE
3651 s->sub.check.need += (uLong)NEXTBYTE << 8;
3652 s->mode = CHECK1;
3653 case CHECK1:
3654 NEEDBYTE
3655 s->sub.check.need += (uLong)NEXTBYTE;
3656
3657 if (s->sub.check.was != s->sub.check.need)
3658 {
3659 s->mode = BAD;
3660 z->msg = (char*)"incorrect data check";
3661 s->sub.marker = 5; /* can't try inflateSync */
3662 break;
3663 }
3664 Tracev((stderr, "inflate: zlib check ok\n"));
3665 s->mode = DONE;
3666 case DONE:
3667 return Z_STREAM_END;
3668 case BAD:
3669 return Z_DATA_ERROR;
3670 default:
3671 return Z_STREAM_ERROR;
3672 }
3673 #ifdef NEED_DUMMY_RETURN
3674 return Z_STREAM_ERROR; /* Some dumb compilers complain without this */
3675 #endif
3676 }
3677
3678
3679 int ZEXPORT inflateSetDictionary(z, dictionary, dictLength)
3680 z_streamp z;
3681 const Bytef *dictionary;
3682 uInt dictLength;
3683 {
3684 uInt length = dictLength;
3685 inflate_state* s;
3686
3687 if (z == Z_NULL || z->state == Z_NULL || ((inflate_state*)z->state)->mode != DICT0)
3688 return Z_STREAM_ERROR;
3689 s = (inflate_state*)z->state;
3690
3691 if (adler32(1L, dictionary, dictLength) != z->adler) return Z_DATA_ERROR;
3692 z->adler = 1L;
3693
3694 if (length >= ((uInt)1<<s->wbits))
3695 {
3696 length = (1<<s->wbits)-1;
3697 dictionary += dictLength - length;
3698 }
3699 inflate_set_dictionary(s->blocks, dictionary, length);
3700 s->mode = BLOCKS;
3701 return Z_OK;
3702 }
3703
3704
3705 int ZEXPORT inflateSync(z)
3706 z_streamp z;
3707 {
3708 uInt n; /* number of bytes to look at */
3709 Bytef *p; /* pointer to bytes */
3710 uInt m; /* number of marker bytes found in a row */
3711 uLong r, w; /* temporaries to save total_in and total_out */
3712 inflate_state* s;
3713
3714 /* set up */
3715 if (z == Z_NULL || z->state == Z_NULL)
3716 return Z_STREAM_ERROR;
3717 s = (inflate_state*)z->state;
3718 if (s->mode != BAD)
3719 {
3720 s->mode = BAD;
3721 s->sub.marker = 0;
3722 }
3723 if ((n = z->avail_in) == 0)
3724 return Z_BUF_ERROR;
3725 p = z->next_in;
3726 m = s->sub.marker;
3727
3728 /* search */
3729 while (n && m < 4)
3730 {
3731 static const Byte mark[4] = {0, 0, 0xff, 0xff};
3732 if (*p == mark[m])
3733 m++;
3734 else if (*p)
3735 m = 0;
3736 else
3737 m = 4 - m;
3738 p++, n--;
3739 }
3740
3741 /* restore */
3742 z->total_in += p - z->next_in;
3743 z->next_in = p;
3744 z->avail_in = n;
3745 s->sub.marker = m;
3746
3747 /* return no joy or set up to restart on a new block */
3748 if (m != 4)
3749 return Z_DATA_ERROR;
3750 r = z->total_in; w = z->total_out;
3751 inflateReset(z);
3752 z->total_in = r; z->total_out = w;
3753 s->mode = BLOCKS;
3754 return Z_OK;
3755 }
3756
3757
3758 /* Returns true if inflate is currently at the end of a block generated
3759 * by Z_SYNC_FLUSH or Z_FULL_FLUSH. This function is used by one PPP
3760 * implementation to provide an additional safety check. PPP uses Z_SYNC_FLUSH
3761 * but removes the length bytes of the resulting empty stored block. When
3762 * decompressing, PPP checks that at the end of input packet, inflate is
3763 * waiting for these length bytes.
3764 */
3765 int ZEXPORT inflateSyncPoint(z)
3766 z_streamp z;
3767 {
3768 if (z == Z_NULL || z->state == Z_NULL || ((inflate_state*)z->state)->blocks == Z_NULL)
3769 return Z_STREAM_ERROR;
3770 return inflate_blocks_sync_point(((inflate_state*)z->state)->blocks);
3771 }
3772 #undef NEEDBYTE
3773 #undef NEXTBYTE
3774 /* --- inflate.c */
3775
3776 /* +++ infblock.c */
3777 /* infblock.c -- interpret and process block types to last block
3778 * Copyright (C) 1995-2002 Mark Adler
3779 * For conditions of distribution and use, see copyright notice in zlib.h
3780 */
3781
3782 /* #include "zutil.h" */
3783 /* #include "infblock.h" */
3784
3785 /* +++ inftrees.h */
3786 /* inftrees.h -- header to use inftrees.c
3787 * Copyright (C) 1995-2002 Mark Adler
3788 * For conditions of distribution and use, see copyright notice in zlib.h
3789 */
3790
3791 /* WARNING: this file should *not* be used by applications. It is
3792 part of the implementation of the compression library and is
3793 subject to change. Applications should only use zlib.h.
3794 */
3795
3796 /* Huffman code lookup table entry--this entry is four bytes for machines
3797 that have 16-bit pointers (e.g. PC's in the small or medium model). */
3798
3799 typedef struct inflate_huft_s FAR inflate_huft;
3800
3801 struct inflate_huft_s {
3802 union {
3803 struct {
3804 Byte Exop; /* number of extra bits or operation */
3805 Byte Bits; /* number of bits in this code or subcode */
3806 } what;
3807 uInt pad; /* pad structure to a power of 2 (4 bytes for */
3808 } word; /* 16-bit, 8 bytes for 32-bit int's) */
3809 uInt base; /* literal, length base, distance base,
3810 or table offset */
3811 };
3812
3813 /* Maximum size of dynamic tree. The maximum found in a long but non-
3814 exhaustive search was 1004 huft structures (850 for length/literals
3815 and 154 for distances, the latter actually the result of an
3816 exhaustive search). The actual maximum is not known, but the
3817 value below is more than safe. */
3818 #define MANY 1440
3819
3820 extern int inflate_trees_bits OF((
3821 uIntf *, /* 19 code lengths */
3822 uIntf *, /* bits tree desired/actual depth */
3823 inflate_huft * FAR *, /* bits tree result */
3824 inflate_huft *, /* space for trees */
3825 z_streamp)); /* for messages */
3826
3827 extern int inflate_trees_dynamic OF((
3828 uInt, /* number of literal/length codes */
3829 uInt, /* number of distance codes */
3830 uIntf *, /* that many (total) code lengths */
3831 uIntf *, /* literal desired/actual bit depth */
3832 uIntf *, /* distance desired/actual bit depth */
3833 inflate_huft * FAR *, /* literal/length tree result */
3834 inflate_huft * FAR *, /* distance tree result */
3835 inflate_huft *, /* space for trees */
3836 z_streamp)); /* for messages */
3837
3838 extern int inflate_trees_fixed OF((
3839 uIntf *, /* literal desired/actual bit depth */
3840 uIntf *, /* distance desired/actual bit depth */
3841 inflate_huft * FAR *, /* literal/length tree result */
3842 inflate_huft * FAR *, /* distance tree result */
3843 z_streamp)); /* for memory allocation */
3844 /* --- inftrees.h */
3845
3846 /* +++ infcodes.h */
3847 /* infcodes.h -- header to use infcodes.c
3848 * Copyright (C) 1995-2002 Mark Adler
3849 * For conditions of distribution and use, see copyright notice in zlib.h
3850 */
3851
3852 /* WARNING: this file should *not* be used by applications. It is
3853 part of the implementation of the compression library and is
3854 subject to change. Applications should only use zlib.h.
3855 */
3856
3857 struct inflate_codes_state;
3858 typedef struct inflate_codes_state FAR inflate_codes_statef;
3859
3860 extern inflate_codes_statef *inflate_codes_new OF((
3861 uInt, uInt,
3862 inflate_huft *, inflate_huft *,
3863 z_streamp ));
3864
3865 extern int inflate_codes OF((
3866 inflate_blocks_statef *,
3867 z_streamp ,
3868 int));
3869
3870 extern void inflate_codes_free OF((
3871 inflate_codes_statef *,
3872 z_streamp ));
3873
3874 /* --- infcodes.h */
3875
3876 /* +++ infutil.h */
3877 /* infutil.h -- types and macros common to blocks and codes
3878 * Copyright (C) 1995-2002 Mark Adler
3879 * For conditions of distribution and use, see copyright notice in zlib.h
3880 */
3881
3882 /* WARNING: this file should *not* be used by applications. It is
3883 part of the implementation of the compression library and is
3884 subject to change. Applications should only use zlib.h.
3885 */
3886
3887 #ifndef _INFUTIL_H
3888 #define _INFUTIL_H
3889
3890 typedef enum {
3891 TYPE, /* get type bits (3, including end bit) */
3892 LENS, /* get lengths for stored */
3893 STORED, /* processing stored block */
3894 TABLE, /* get table lengths */
3895 BTREE, /* get bit lengths tree for a dynamic block */
3896 DTREE, /* get length, distance trees for a dynamic block */
3897 CODES, /* processing fixed or dynamic block */
3898 DRY, /* output remaining window bytes */
3899 DONEB, /* finished last block, done */
3900 BADB} /* got a data error--stuck here */
3901 inflate_block_mode;
3902
3903 /* inflate blocks semi-private state */
3904 struct inflate_blocks_state {
3905
3906 /* mode */
3907 inflate_block_mode mode; /* current inflate_block mode */
3908
3909 /* mode dependent information */
3910 union {
3911 uInt left; /* if STORED, bytes left to copy */
3912 struct {
3913 uInt table; /* table lengths (14 bits) */
3914 uInt index; /* index into blens (or border) */
3915 uIntf *blens; /* bit lengths of codes */
3916 uInt bb; /* bit length tree depth */
3917 inflate_huft *tb; /* bit length decoding tree */
3918 } trees; /* if DTREE, decoding info for trees */
3919 struct {
3920 inflate_codes_statef
3921 *codes;
3922 } decode; /* if CODES, current state */
3923 } sub; /* submode */
3924 uInt last; /* true if this block is the last block */
3925
3926 /* mode independent information */
3927 uInt bitk; /* bits in bit buffer */
3928 uLong bitb; /* bit buffer */
3929 inflate_huft *hufts; /* single malloc for tree space */
3930 Bytef *window; /* sliding window */
3931 Bytef *end; /* one byte after sliding window */
3932 Bytef *read; /* window read pointer */
3933 Bytef *write; /* window write pointer */
3934 check_func checkfn; /* check function */
3935 uLong check; /* check on output */
3936
3937 };
3938
3939
3940 /* defines for inflate input/output */
3941 /* update pointers and return */
3942 #define UPDBITS {s->bitb=b;s->bitk=k;}
3943 #define UPDIN {z->avail_in=n;z->total_in+=p-z->next_in;z->next_in=p;}
3944 #define UPDOUT {s->write=q;}
3945 #define UPDATE {UPDBITS UPDIN UPDOUT}
3946 #define LEAVE {UPDATE return inflate_flush(s,z,r);}
3947 /* get bytes and bits */
3948 #define LOADIN {p=z->next_in;n=z->avail_in;b=s->bitb;k=s->bitk;}
3949 #define NEEDBYTE {if(n)r=Z_OK;else LEAVE}
3950 #define NEXTBYTE (n--,*p++)
3951 #define NEEDBITS(j) {while(k<(j)){NEEDBYTE;b|=((uLong)NEXTBYTE)<<k;k+=8;}}
3952 #define DUMPBITS(j) {b>>=(j);k-=(j);}
3953 /* output bytes */
3954 #define WAVAIL (uInt)(q<s->read?s->read-q-1:s->end-q)
3955 #define LOADOUT {q=s->write;m=(uInt)WAVAIL;}
3956 #define WRAP {if(q==s->end&&s->read!=s->window){q=s->window;m=(uInt)WAVAIL;}}
3957 #define FLUSH {UPDOUT r=inflate_flush(s,z,r); LOADOUT}
3958 #define NEEDOUT {if(m==0){WRAP if(m==0){FLUSH WRAP if(m==0) LEAVE}}r=Z_OK;}
3959 #define OUTBYTE(a) {*q++=(Byte)(a);m--;}
3960 /* load local pointers */
3961 #define LOAD {LOADIN LOADOUT}
3962
3963 /* masks for lower bits (size given to avoid silly warnings with Visual C++) */
3964 extern uInt inflate_mask[17];
3965
3966 /* copy as much as possible from the sliding window to the output area */
3967 extern int inflate_flush OF((
3968 inflate_blocks_statef *,
3969 z_streamp ,
3970 int));
3971
3972 #ifndef NO_DUMMY_DECL
3973 struct internal_state {int dummy;}; /* for buggy compilers */
3974 #endif
3975
3976 #endif
3977 /* --- infutil.h */
3978
3979 #ifndef NO_DUMMY_DECL
3980 struct inflate_codes_state {int dummy;}; /* for buggy compilers */
3981 #endif
3982
3983 /* simplify the use of the inflate_huft type with some defines */
3984 #define exop word.what.Exop
3985 #define bits word.what.Bits
3986
3987 /* Table for deflate from PKZIP's appnote.txt. */
3988 local const uInt border[] = { /* Order of the bit length code lengths */
3989 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};
3990
3991 /*
3992 Notes beyond the 1.93a appnote.txt:
3993
3994 1. Distance pointers never point before the beginning of the output
3995 stream.
3996 2. Distance pointers can point back across blocks, up to 32k away.
3997 3. There is an implied maximum of 7 bits for the bit length table and
3998 15 bits for the actual data.
3999 4. If only one code exists, then it is encoded using one bit. (Zero
4000 would be more efficient, but perhaps a little confusing.) If two
4001 codes exist, they are coded using one bit each (0 and 1).
4002 5. There is no way of sending zero distance codes--a dummy must be
4003 sent if there are none. (History: a pre 2.0 version of PKZIP would
4004 store blocks with no distance codes, but this was discovered to be
4005 too harsh a criterion.) Valid only for 1.93a. 2.04c does allow
4006 zero distance codes, which is sent as one code of zero bits in
4007 length.
4008 6. There are up to 286 literal/length codes. Code 256 represents the
4009 end-of-block. Note however that the static length tree defines
4010 288 codes just to fill out the Huffman codes. Codes 286 and 287
4011 cannot be used though, since there is no length base or extra bits
4012 defined for them. Similarily, there are up to 30 distance codes.
4013 However, static trees define 32 codes (all 5 bits) to fill out the
4014 Huffman codes, but the last two had better not show up in the data.
4015 7. Unzip can check dynamic Huffman blocks for complete code sets.
4016 The exception is that a single code would not be complete (see #4).
4017 8. The five bits following the block type is really the number of
4018 literal codes sent minus 257.
4019 9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits
4020 (1+6+6). Therefore, to output three times the length, you output
4021 three codes (1+1+1), whereas to output four times the same length,
4022 you only need two codes (1+3). Hmm.
4023 10. In the tree reconstruction algorithm, Code = Code + Increment
4024 only if BitLength(i) is not zero. (Pretty obvious.)
4025 11. Correction: 4 Bits: # of Bit Length codes - 4 (4 - 19)
4026 12. Note: length code 284 can represent 227-258, but length code 285
4027 really is 258. The last length deserves its own, short code
4028 since it gets used a lot in very redundant files. The length
4029 258 is special since 258 - 3 (the min match length) is 255.
4030 13. The literal/length and distance code bit lengths are read as a
4031 single stream of lengths. It is possible (and advantageous) for
4032 a repeat code (16, 17, or 18) to go across the boundary between
4033 the two sets of lengths.
4034 */
4035
4036
4037 void inflate_blocks_reset(s, z, c)
4038 inflate_blocks_statef *s;
4039 z_streamp z;
4040 uLongf *c;
4041 {
4042 if (c != Z_NULL)
4043 *c = s->check;
4044 if (s->mode == BTREE || s->mode == DTREE)
4045 ZFREE(z, s->sub.trees.blens);
4046 if (s->mode == CODES)
4047 inflate_codes_free(s->sub.decode.codes, z);
4048 s->mode = TYPE;
4049 s->bitk = 0;
4050 s->bitb = 0;
4051 s->read = s->write = s->window;
4052 if (s->checkfn != Z_NULL)
4053 z->adler = s->check = (*s->checkfn)(0L, (const Bytef *)Z_NULL, 0);
4054 Tracev((stderr, "inflate: blocks reset\n"));
4055 }
4056
4057
4058 inflate_blocks_statef *inflate_blocks_new(z, c, w)
4059 z_streamp z;
4060 check_func c;
4061 uInt w;
4062 {
4063 inflate_blocks_statef *s;
4064
4065 if ((s = (inflate_blocks_statef *)ZALLOC
4066 (z,1,sizeof(struct inflate_blocks_state))) == Z_NULL)
4067 return s;
4068 if ((s->hufts =
4069 (inflate_huft *)ZALLOC(z, sizeof(inflate_huft), MANY)) == Z_NULL)
4070 {
4071 ZFREE(z, s);
4072 return Z_NULL;
4073 }
4074 if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL)
4075 {
4076 ZFREE(z, s->hufts);
4077 ZFREE(z, s);
4078 return Z_NULL;
4079 }
4080 s->end = s->window + w;
4081 s->checkfn = c;
4082 s->mode = TYPE;
4083 Tracev((stderr, "inflate: blocks allocated\n"));
4084 inflate_blocks_reset(s, z, Z_NULL);
4085 return s;
4086 }
4087
4088
4089 int inflate_blocks(s, z, r)
4090 inflate_blocks_statef *s;
4091 z_streamp z;
4092 int r;
4093 {
4094 uInt t; /* temporary storage */
4095 uLong b; /* bit buffer */
4096 uInt k; /* bits in bit buffer */
4097 Bytef *p; /* input data pointer */
4098 uInt n; /* bytes available there */
4099 Bytef *q; /* output window write pointer */
4100 uInt m; /* bytes to end of window or read pointer */
4101
4102 /* copy input/output information to locals (UPDATE macro restores) */
4103 LOAD
4104
4105 /* process input based on current state */
4106 while (1) switch (s->mode)
4107 {
4108 case TYPE:
4109 NEEDBITS(3)
4110 t = (uInt)b & 7;
4111 s->last = t & 1;
4112 switch (t >> 1)
4113 {
4114 case 0: /* stored */
4115 Tracev((stderr, "inflate: stored block%s\n",
4116 s->last ? " (last)" : ""));
4117 DUMPBITS(3)
4118 t = k & 7; /* go to byte boundary */
4119 DUMPBITS(t)
4120 s->mode = LENS; /* get length of stored block */
4121 break;
4122 case 1: /* fixed */
4123 Tracev((stderr, "inflate: fixed codes block%s\n",
4124 s->last ? " (last)" : ""));
4125 {
4126 uInt bl, bd;
4127 inflate_huft *tl, *td;
4128
4129 inflate_trees_fixed(&bl, &bd, &tl, &td, z);
4130 s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z);
4131 if (s->sub.decode.codes == Z_NULL)
4132 {
4133 r = Z_MEM_ERROR;
4134 LEAVE
4135 }
4136 }
4137 DUMPBITS(3)
4138 s->mode = CODES;
4139 break;
4140 case 2: /* dynamic */
4141 Tracev((stderr, "inflate: dynamic codes block%s\n",
4142 s->last ? " (last)" : ""));
4143 DUMPBITS(3)
4144 s->mode = TABLE;
4145 break;
4146 case 3: /* illegal */
4147 DUMPBITS(3)
4148 s->mode = BADB;
4149 z->msg = (char*)"invalid block type";
4150 r = Z_DATA_ERROR;
4151 LEAVE
4152 }
4153 break;
4154 case LENS:
4155 NEEDBITS(32)
4156 if ((((~b) >> 16) & 0xffff) != (b & 0xffff))
4157 {
4158 s->mode = BADB;
4159 z->msg = (char*)"invalid stored block lengths";
4160 r = Z_DATA_ERROR;
4161 LEAVE
4162 }
4163 s->sub.left = (uInt)b & 0xffff;
4164 b = k = 0; /* dump bits */
4165 Tracev((stderr, "inflate: stored length %u\n", s->sub.left));
4166 s->mode = s->sub.left ? STORED : (s->last ? DRY : TYPE);
4167 break;
4168 case STORED:
4169 if (n == 0)
4170 LEAVE
4171 NEEDOUT
4172 t = s->sub.left;
4173 if (t > n) t = n;
4174 if (t > m) t = m;
4175 zmemcpy(q, p, t);
4176 p += t; n -= t;
4177 q += t; m -= t;
4178 if ((s->sub.left -= t) != 0)
4179 break;
4180 Tracev((stderr, "inflate: stored end, %lu total out\n",
4181 z->total_out + (q >= s->read ? q - s->read :
4182 (s->end - s->read) + (q - s->window))));
4183 s->mode = s->last ? DRY : TYPE;
4184 break;
4185 case TABLE:
4186 NEEDBITS(14)
4187 s->sub.trees.table = t = (uInt)b & 0x3fff;
4188 #ifndef PKZIP_BUG_WORKAROUND
4189 if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29)
4190 {
4191 s->mode = BADB;
4192 z->msg = (char*)"too many length or distance symbols";
4193 r = Z_DATA_ERROR;
4194 LEAVE
4195 }
4196 #endif
4197 t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f);
4198 if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof(uInt))) == Z_NULL)
4199 {
4200 r = Z_MEM_ERROR;
4201 LEAVE
4202 }
4203 DUMPBITS(14)
4204 s->sub.trees.index = 0;
4205 Tracev((stderr, "inflate: table sizes ok\n"));
4206 s->mode = BTREE;
4207 case BTREE:
4208 while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10))
4209 {
4210 NEEDBITS(3)
4211 s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7;
4212 DUMPBITS(3)
4213 }
4214 while (s->sub.trees.index < 19)
4215 s->sub.trees.blens[border[s->sub.trees.index++]] = 0;
4216 s->sub.trees.bb = 7;
4217 t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb,
4218 &s->sub.trees.tb, s->hufts, z);
4219 if (t != Z_OK)
4220 {
4221 r = t;
4222 if (r == Z_DATA_ERROR)
4223 {
4224 ZFREE(z, s->sub.trees.blens);
4225 s->mode = BADB;
4226 }
4227 LEAVE
4228 }
4229 s->sub.trees.index = 0;
4230 Tracev((stderr, "inflate: bits tree ok\n"));
4231 s->mode = DTREE;
4232 case DTREE:
4233 while (t = s->sub.trees.table,
4234 s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f))
4235 {
4236 inflate_huft *h;
4237 uInt i, j, c;
4238
4239 t = s->sub.trees.bb;
4240 NEEDBITS(t)
4241 h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]);
4242 t = h->bits;
4243 c = h->base;
4244 if (c < 16)
4245 {
4246 DUMPBITS(t)
4247 s->sub.trees.blens[s->sub.trees.index++] = c;
4248 }
4249 else /* c == 16..18 */
4250 {
4251 i = c == 18 ? 7 : c - 14;
4252 j = c == 18 ? 11 : 3;
4253 NEEDBITS(t + i)
4254 DUMPBITS(t)
4255 j += (uInt)b & inflate_mask[i];
4256 DUMPBITS(i)
4257 i = s->sub.trees.index;
4258 t = s->sub.trees.table;
4259 if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) ||
4260 (c == 16 && i < 1))
4261 {
4262 ZFREE(z, s->sub.trees.blens);
4263 s->mode = BADB;
4264 z->msg = (char*)"invalid bit length repeat";
4265 r = Z_DATA_ERROR;
4266 LEAVE
4267 }
4268 c = c == 16 ? s->sub.trees.blens[i - 1] : 0;
4269 do {
4270 s->sub.trees.blens[i++] = c;
4271 } while (--j);
4272 s->sub.trees.index = i;
4273 }
4274 }
4275 s->sub.trees.tb = Z_NULL;
4276 {
4277 uInt bl, bd;
4278 inflate_huft *tl, *td;
4279 inflate_codes_statef *c;
4280
4281 bl = 9; /* must be <= 9 for lookahead assumptions */
4282 bd = 6; /* must be <= 9 for lookahead assumptions */
4283 t = s->sub.trees.table;
4284 t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f),
4285 s->sub.trees.blens, &bl, &bd, &tl, &td,
4286 s->hufts, z);
4287 if (t != Z_OK)
4288 {
4289 if (t == (uInt)Z_DATA_ERROR)
4290 {
4291 ZFREE(z, s->sub.trees.blens);
4292 s->mode = BADB;
4293 }
4294 r = t;
4295 LEAVE
4296 }
4297 Tracev((stderr, "inflate: trees ok\n"));
4298 if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL)
4299 {
4300 r = Z_MEM_ERROR;
4301 LEAVE
4302 }
4303 s->sub.decode.codes = c;
4304 }
4305 ZFREE(z, s->sub.trees.blens);
4306 s->mode = CODES;
4307 case CODES:
4308 UPDATE
4309 if ((r = inflate_codes(s, z, r)) != Z_STREAM_END)
4310 return inflate_flush(s, z, r);
4311 r = Z_OK;
4312 inflate_codes_free(s->sub.decode.codes, z);
4313 LOAD
4314 Tracev((stderr, "inflate: codes end, %lu total out\n",
4315 z->total_out + (q >= s->read ? q - s->read :
4316 (s->end - s->read) + (q - s->window))));
4317 if (!s->last)
4318 {
4319 s->mode = TYPE;
4320 break;
4321 }
4322 s->mode = DRY;
4323 case DRY:
4324 FLUSH
4325 if (s->read != s->write)
4326 LEAVE
4327 s->mode = DONEB;
4328 case DONEB:
4329 r = Z_STREAM_END;
4330 LEAVE
4331 case BADB:
4332 r = Z_DATA_ERROR;
4333 LEAVE
4334 default:
4335 r = Z_STREAM_ERROR;
4336 LEAVE
4337 }
4338 }
4339
4340
4341 int inflate_blocks_free(s, z)
4342 inflate_blocks_statef *s;
4343 z_streamp z;
4344 {
4345 inflate_blocks_reset(s, z, Z_NULL);
4346 ZFREE(z, s->window);
4347 ZFREE(z, s->hufts);
4348 ZFREE(z, s);
4349 Tracev((stderr, "inflate: blocks freed\n"));
4350 return Z_OK;
4351 }
4352
4353
4354 void inflate_set_dictionary(s, d, n)
4355 inflate_blocks_statef *s;
4356 const Bytef *d;
4357 uInt n;
4358 {
4359 zmemcpy(s->window, d, n);
4360 s->read = s->write = s->window + n;
4361 }
4362
4363
4364 /* Returns true if inflate is currently at the end of a block generated
4365 * by Z_SYNC_FLUSH or Z_FULL_FLUSH.
4366 * IN assertion: s != Z_NULL
4367 */
4368 int inflate_blocks_sync_point(s)
4369 inflate_blocks_statef *s;
4370 {
4371 return s->mode == LENS;
4372 }
4373 /* --- infblock.c */
4374
4375 /* +++ inftrees.c */
4376 /* inftrees.c -- generate Huffman trees for efficient decoding
4377 * Copyright (C) 1995-2002 Mark Adler
4378 * For conditions of distribution and use, see copyright notice in zlib.h
4379 */
4380
4381 /* #include "zutil.h" */
4382 /* #include "inftrees.h" */
4383
4384 #if !defined(BUILDFIXED) && !defined(STDC)
4385 # define BUILDFIXED /* non ANSI compilers may not accept inffixed.h */
4386 #endif
4387
4388 const char inflate_copyright[] =
4389 " inflate 1.1.4 Copyright 1995-2002 Mark Adler ";
4390 /*
4391 If you use the zlib library in a product, an acknowledgment is welcome
4392 in the documentation of your product. If for some reason you cannot
4393 include such an acknowledgment, I would appreciate that you keep this
4394 copyright string in the executable of your product.
4395 */
4396
4397 #ifndef NO_DUMMY_DECL
4398 struct internal_state {int dummy;}; /* for buggy compilers */
4399 #endif
4400
4401 /* simplify the use of the inflate_huft type with some defines */
4402 #define exop word.what.Exop
4403 #define bits word.what.Bits
4404
4405
4406 local int huft_build OF((
4407 uIntf *, /* code lengths in bits */
4408 uInt, /* number of codes */
4409 uInt, /* number of "simple" codes */
4410 const uIntf *, /* list of base values for non-simple codes */
4411 const uIntf *, /* list of extra bits for non-simple codes */
4412 inflate_huft * FAR*,/* result: starting table */
4413 uIntf *, /* maximum lookup bits (returns actual) */
4414 inflate_huft *, /* space for trees */
4415 uInt *, /* hufts used in space */
4416 uIntf * )); /* space for values */
4417
4418 /* Tables for deflate from PKZIP's appnote.txt. */
4419 local const uInt cplens[31] = { /* Copy lengths for literal codes 257..285 */
4420 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,
4421 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};
4422 /* see note #13 above about 258 */
4423 local const uInt cplext[31] = { /* Extra bits for literal codes 257..285 */
4424 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,
4425 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 112, 112}; /* 112==invalid */
4426 local const uInt cpdist[30] = { /* Copy offsets for distance codes 0..29 */
4427 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,
4428 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,
4429 8193, 12289, 16385, 24577};
4430 local const uInt cpdext[30] = { /* Extra bits for distance codes */
4431 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,
4432 7, 7, 8, 8, 9, 9, 10, 10, 11, 11,
4433 12, 12, 13, 13};
4434
4435 /*
4436 Huffman code decoding is performed using a multi-level table lookup.
4437 The fastest way to decode is to simply build a lookup table whose
4438 size is determined by the longest code. However, the time it takes
4439 to build this table can also be a factor if the data being decoded
4440 is not very long. The most common codes are necessarily the
4441 shortest codes, so those codes dominate the decoding time, and hence
4442 the speed. The idea is you can have a shorter table that decodes the
4443 shorter, more probable codes, and then point to subsidiary tables for
4444 the longer codes. The time it costs to decode the longer codes is
4445 then traded against the time it takes to make longer tables.
4446
4447 This results of this trade are in the variables lbits and dbits
4448 below. lbits is the number of bits the first level table for literal/
4449 length codes can decode in one step, and dbits is the same thing for
4450 the distance codes. Subsequent tables are also less than or equal to
4451 those sizes. These values may be adjusted either when all of the
4452 codes are shorter than that, in which case the longest code length in
4453 bits is used, or when the shortest code is *longer* than the requested
4454 table size, in which case the length of the shortest code in bits is
4455 used.
4456
4457 There are two different values for the two tables, since they code a
4458 different number of possibilities each. The literal/length table
4459 codes 286 possible values, or in a flat code, a little over eight
4460 bits. The distance table codes 30 possible values, or a little less
4461 than five bits, flat. The optimum values for speed end up being
4462 about one bit more than those, so lbits is 8+1 and dbits is 5+1.
4463 The optimum values may differ though from machine to machine, and
4464 possibly even between compilers. Your mileage may vary.
4465 */
4466
4467
4468 /* If BMAX needs to be larger than 16, then h and x[] should be uLong. */
4469 #define BMAX 15 /* maximum bit length of any code */
4470
4471 local int huft_build(b, n, s, d, e, t, m, hp, hn, v)
4472 uIntf *b; /* code lengths in bits (all assumed <= BMAX) */
4473 uInt n; /* number of codes (assumed <= 288) */
4474 uInt s; /* number of simple-valued codes (0..s-1) */
4475 const uIntf *d; /* list of base values for non-simple codes */
4476 const uIntf *e; /* list of extra bits for non-simple codes */
4477 inflate_huft * FAR *t; /* result: starting table */
4478 uIntf *m; /* maximum lookup bits, returns actual */
4479 inflate_huft *hp; /* space for trees */
4480 uInt *hn; /* hufts used in space */
4481 uIntf *v; /* working area: values in order of bit length */
4482 /* Given a list of code lengths and a maximum table size, make a set of
4483 tables to decode that set of codes. Return Z_OK on success, Z_BUF_ERROR
4484 if the given code set is incomplete (the tables are still built in this
4485 case), or Z_DATA_ERROR if the input is invalid. */
4486 {
4487
4488 uInt a; /* counter for codes of length k */
4489 uInt c[BMAX+1]; /* bit length count table */
4490 uInt f; /* i repeats in table every f entries */
4491 int g; /* maximum code length */
4492 int h; /* table level */
4493 register uInt i; /* counter, current code */
4494 register uInt j; /* counter */
4495 register int k; /* number of bits in current code */
4496 int l; /* bits per table (returned in m) */
4497 uInt mask; /* (1 << w) - 1, to avoid cc -O bug on HP */
4498 register uIntf *p; /* pointer into c[], b[], or v[] */
4499 inflate_huft *q; /* points to current table */
4500 struct inflate_huft_s r; /* table entry for structure assignment */
4501 inflate_huft *u[BMAX]; /* table stack */
4502 register int w; /* bits before this table == (l * h) */
4503 uInt x[BMAX+1]; /* bit offsets, then code stack */
4504 uIntf *xp; /* pointer into x */
4505 int y; /* number of dummy codes added */
4506 uInt z; /* number of entries in current table */
4507
4508
4509 /* Generate counts for each bit length */
4510 p = c;
4511 #define C0 *p++ = 0;
4512 #define C2 C0 C0 C0 C0
4513 #define C4 C2 C2 C2 C2
4514 C4 /* clear c[]--assume BMAX+1 is 16 */
4515 p = b; i = n;
4516 do {
4517 c[*p++]++; /* assume all entries <= BMAX */
4518 } while (--i);
4519 if (c[0] == n) /* null input--all zero length codes */
4520 {
4521 *t = (inflate_huft *)Z_NULL;
4522 *m = 0;
4523 return Z_OK;
4524 }
4525
4526
4527 /* Find minimum and maximum length, bound *m by those */
4528 l = *m;
4529 for (j = 1; j <= BMAX; j++)
4530 if (c[j])
4531 break;
4532 k = j; /* minimum code length */
4533 if ((uInt)l < j)
4534 l = j;
4535 for (i = BMAX; i; i--)
4536 if (c[i])
4537 break;
4538 g = i; /* maximum code length */
4539 if ((uInt)l > i)
4540 l = i;
4541 *m = l;
4542
4543
4544 /* Adjust last length count to fill out codes, if needed */
4545 for (y = 1 << j; j < i; j++, y <<= 1)
4546 if ((y -= c[j]) < 0)
4547 return Z_DATA_ERROR;
4548 if ((y -= c[i]) < 0)
4549 return Z_DATA_ERROR;
4550 c[i] += y;
4551
4552
4553 /* Generate starting offsets into the value table for each length */
4554 x[1] = j = 0;
4555 p = c + 1; xp = x + 2;
4556 while (--i) { /* note that i == g from above */
4557 *xp++ = (j += *p++);
4558 }
4559
4560
4561 /* Make a table of values in order of bit lengths */
4562 p = b; i = 0;
4563 do {
4564 if ((j = *p++) != 0)
4565 v[x[j]++] = i;
4566 } while (++i < n);
4567 n = x[g]; /* set n to length of v */
4568
4569
4570 /* Generate the Huffman codes and for each, make the table entries */
4571 x[0] = i = 0; /* first Huffman code is zero */
4572 p = v; /* grab values in bit order */
4573 h = -1; /* no tables yet--level -1 */
4574 w = -l; /* bits decoded == (l * h) */
4575 u[0] = (inflate_huft *)Z_NULL; /* just to keep compilers happy */
4576 q = (inflate_huft *)Z_NULL; /* ditto */
4577 z = 0; /* ditto */
4578
4579 /* go through the bit lengths (k already is bits in shortest code) */
4580 for (; k <= g; k++)
4581 {
4582 a = c[k];
4583 while (a--)
4584 {
4585 /* here i is the Huffman code of length k bits for value *p */
4586 /* make tables up to required level */
4587 while (k > w + l)
4588 {
4589 h++;
4590 w += l; /* previous table always l bits */
4591
4592 /* compute minimum size table less than or equal to l bits */
4593 z = g - w;
4594 z = z > (uInt)l ? l : z; /* table size upper limit */
4595 if ((f = 1 << (j = k - w)) > a + 1) /* try a k-w bit table */
4596 { /* too few codes for k-w bit table */
4597 f -= a + 1; /* deduct codes from patterns left */
4598 xp = c + k;
4599 if (j < z)
4600 while (++j < z) /* try smaller tables up to z bits */
4601 {
4602 if ((f <<= 1) <= *++xp)
4603 break; /* enough codes to use up j bits */
4604 f -= *xp; /* else deduct codes from patterns */
4605 }
4606 }
4607 z = 1 << j; /* table entries for j-bit table */
4608
4609 /* allocate new table */
4610 if (*hn + z > MANY) /* (note: doesn't matter for fixed) */
4611 return Z_DATA_ERROR; /* overflow of MANY */
4612 u[h] = q = hp + *hn;
4613 *hn += z;
4614
4615 /* connect to last table, if there is one */
4616 if (h)
4617 {
4618 x[h] = i; /* save pattern for backing up */
4619 r.bits = (Byte)l; /* bits to dump before this table */
4620 r.exop = (Byte)j; /* bits in this table */
4621 j = i >> (w - l);
4622 r.base = (uInt)(q - u[h-1] - j); /* offset to this table */
4623 u[h-1][j] = r; /* connect to last table */
4624 }
4625 else
4626 *t = q; /* first table is returned result */
4627 }
4628
4629 /* set up table entry in r */
4630 r.bits = (Byte)(k - w);
4631 if (p >= v + n)
4632 r.exop = 128 + 64; /* out of values--invalid code */
4633 else if (*p < s)
4634 {
4635 r.exop = (Byte)(*p < 256 ? 0 : 32 + 64); /* 256 is end-of-block */
4636 r.base = *p++; /* simple code is just the value */
4637 }
4638 else
4639 {
4640 r.exop = (Byte)(e[*p - s] + 16 + 64);/* non-simple--look up in lists */
4641 r.base = d[*p++ - s];
4642 }
4643
4644 /* fill code-like entries with r */
4645 f = 1 << (k - w);
4646 for (j = i >> w; j < z; j += f)
4647 q[j] = r;
4648
4649 /* backwards increment the k-bit code i */
4650 for (j = 1 << (k - 1); i & j; j >>= 1)
4651 i ^= j;
4652 i ^= j;
4653
4654 /* backup over finished tables */
4655 mask = (1 << w) - 1; /* needed on HP, cc -O bug */
4656 while ((i & mask) != x[h])
4657 {
4658 h--; /* don't need to update q */
4659 w -= l;
4660 mask = (1 << w) - 1;
4661 }
4662 }
4663 }
4664
4665
4666 /* Return Z_BUF_ERROR if we were given an incomplete table */
4667 return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK;
4668 }
4669
4670
4671 int inflate_trees_bits(c, bb, tb, hp, z)
4672 uIntf *c; /* 19 code lengths */
4673 uIntf *bb; /* bits tree desired/actual depth */
4674 inflate_huft * FAR *tb; /* bits tree result */
4675 inflate_huft *hp; /* space for trees */
4676 z_streamp z; /* for messages */
4677 {
4678 int r;
4679 uInt hn = 0; /* hufts used in space */
4680 uIntf *v; /* work area for huft_build */
4681
4682 if ((v = (uIntf*)ZALLOC(z, 19, sizeof(uInt))) == Z_NULL)
4683 return Z_MEM_ERROR;
4684 r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL,
4685 tb, bb, hp, &hn, v);
4686 if (r == Z_DATA_ERROR)
4687 z->msg = (char*)"oversubscribed dynamic bit lengths tree";
4688 else if (r == Z_BUF_ERROR || *bb == 0)
4689 {
4690 z->msg = (char*)"incomplete dynamic bit lengths tree";
4691 r = Z_DATA_ERROR;
4692 }
4693 ZFREE(z, v);
4694 return r;
4695 }
4696
4697
4698 int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, hp, z)
4699 uInt nl; /* number of literal/length codes */
4700 uInt nd; /* number of distance codes */
4701 uIntf *c; /* that many (total) code lengths */
4702 uIntf *bl; /* literal desired/actual bit depth */
4703 uIntf *bd; /* distance desired/actual bit depth */
4704 inflate_huft * FAR *tl; /* literal/length tree result */
4705 inflate_huft * FAR *td; /* distance tree result */
4706 inflate_huft *hp; /* space for trees */
4707 z_streamp z; /* for messages */
4708 {
4709 int r;
4710 uInt hn = 0; /* hufts used in space */
4711 uIntf *v; /* work area for huft_build */
4712
4713 /* allocate work area */
4714 if ((v = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL)
4715 return Z_MEM_ERROR;
4716
4717 /* build literal/length tree */
4718 r = huft_build(c, nl, 257, cplens, cplext, tl, bl, hp, &hn, v);
4719 if (r != Z_OK || *bl == 0)
4720 {
4721 if (r == Z_DATA_ERROR)
4722 z->msg = (char*)"oversubscribed literal/length tree";
4723 else if (r != Z_MEM_ERROR)
4724 {
4725 z->msg = (char*)"incomplete literal/length tree";
4726 r = Z_DATA_ERROR;
4727 }
4728 ZFREE(z, v);
4729 return r;
4730 }
4731
4732 /* build distance tree */
4733 r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, hp, &hn, v);
4734 if (r != Z_OK || (*bd == 0 && nl > 257))
4735 {
4736 if (r == Z_DATA_ERROR)
4737 z->msg = (char*)"oversubscribed distance tree";
4738 else if (r == Z_BUF_ERROR) {
4739 #ifdef PKZIP_BUG_WORKAROUND
4740 r = Z_OK;
4741 }
4742 #else
4743 z->msg = (char*)"incomplete distance tree";
4744 r = Z_DATA_ERROR;
4745 }
4746 else if (r != Z_MEM_ERROR)
4747 {
4748 z->msg = (char*)"empty distance tree with lengths";
4749 r = Z_DATA_ERROR;
4750 }
4751 ZFREE(z, v);
4752 return r;
4753 #endif
4754 }
4755
4756 /* done */
4757 ZFREE(z, v);
4758 return Z_OK;
4759 }
4760
4761
4762 /* build fixed tables only once--keep them here */
4763 #ifdef BUILDFIXED
4764 local int fixed_built = 0;
4765 #define FIXEDH 544 /* number of hufts used by fixed tables */
4766 local inflate_huft *fixed_mem = NULL;
4767 local uInt fixed_bl;
4768 local uInt fixed_bd;
4769 local inflate_huft *fixed_tl;
4770 local inflate_huft *fixed_td;
4771 #else
4772 /* +++ inffixed.h */
4773 /* inffixed.h -- table for decoding fixed codes
4774 * Generated automatically by the maketree.c program
4775 */
4776
4777 /* WARNING: this file should *not* be used by applications. It is
4778 part of the implementation of the compression library and is
4779 subject to change. Applications should only use zlib.h.
4780 */
4781
4782 local uInt fixed_bl = 9;
4783 local uInt fixed_bd = 5;
4784 local inflate_huft fixed_tl[] = {
4785 {{{96,7}},256}, {{{0,8}},80}, {{{0,8}},16}, {{{84,8}},115},
4786 {{{82,7}},31}, {{{0,8}},112}, {{{0,8}},48}, {{{0,9}},192},
4787 {{{80,7}},10}, {{{0,8}},96}, {{{0,8}},32}, {{{0,9}},160},
4788 {{{0,8}},0}, {{{0,8}},128}, {{{0,8}},64}, {{{0,9}},224},
4789 {{{80,7}},6}, {{{0,8}},88}, {{{0,8}},24}, {{{0,9}},144},
4790 {{{83,7}},59}, {{{0,8}},120}, {{{0,8}},56}, {{{0,9}},208},
4791 {{{81,7}},17}, {{{0,8}},104}, {{{0,8}},40}, {{{0,9}},176},
4792 {{{0,8}},8}, {{{0,8}},136}, {{{0,8}},72}, {{{0,9}},240},
4793 {{{80,7}},4}, {{{0,8}},84}, {{{0,8}},20}, {{{85,8}},227},
4794 {{{83,7}},43}, {{{0,8}},116}, {{{0,8}},52}, {{{0,9}},200},
4795 {{{81,7}},13}, {{{0,8}},100}, {{{0,8}},36}, {{{0,9}},168},
4796 {{{0,8}},4}, {{{0,8}},132}, {{{0,8}},68}, {{{0,9}},232},
4797 {{{80,7}},8}, {{{0,8}},92}, {{{0,8}},28}, {{{0,9}},152},
4798 {{{84,7}},83}, {{{0,8}},124}, {{{0,8}},60}, {{{0,9}},216},
4799 {{{82,7}},23}, {{{0,8}},108}, {{{0,8}},44}, {{{0,9}},184},
4800 {{{0,8}},12}, {{{0,8}},140}, {{{0,8}},76}, {{{0,9}},248},
4801 {{{80,7}},3}, {{{0,8}},82}, {{{0,8}},18}, {{{85,8}},163},
4802 {{{83,7}},35}, {{{0,8}},114}, {{{0,8}},50}, {{{0,9}},196},
4803 {{{81,7}},11}, {{{0,8}},98}, {{{0,8}},34}, {{{0,9}},164},
4804 {{{0,8}},2}, {{{0,8}},130}, {{{0,8}},66}, {{{0,9}},228},
4805 {{{80,7}},7}, {{{0,8}},90}, {{{0,8}},26}, {{{0,9}},148},
4806 {{{84,7}},67}, {{{0,8}},122}, {{{0,8}},58}, {{{0,9}},212},
4807 {{{82,7}},19}, {{{0,8}},106}, {{{0,8}},42}, {{{0,9}},180},
4808 {{{0,8}},10}, {{{0,8}},138}, {{{0,8}},74}, {{{0,9}},244},
4809 {{{80,7}},5}, {{{0,8}},86}, {{{0,8}},22}, {{{192,8}},0},
4810 {{{83,7}},51}, {{{0,8}},118}, {{{0,8}},54}, {{{0,9}},204},
4811 {{{81,7}},15}, {{{0,8}},102}, {{{0,8}},38}, {{{0,9}},172},
4812 {{{0,8}},6}, {{{0,8}},134}, {{{0,8}},70}, {{{0,9}},236},
4813 {{{80,7}},9}, {{{0,8}},94}, {{{0,8}},30}, {{{0,9}},156},
4814 {{{84,7}},99}, {{{0,8}},126}, {{{0,8}},62}, {{{0,9}},220},
4815 {{{82,7}},27}, {{{0,8}},110}, {{{0,8}},46}, {{{0,9}},188},
4816 {{{0,8}},14}, {{{0,8}},142}, {{{0,8}},78}, {{{0,9}},252},
4817 {{{96,7}},256}, {{{0,8}},81}, {{{0,8}},17}, {{{85,8}},131},
4818 {{{82,7}},31}, {{{0,8}},113}, {{{0,8}},49}, {{{0,9}},194},
4819 {{{80,7}},10}, {{{0,8}},97}, {{{0,8}},33}, {{{0,9}},162},
4820 {{{0,8}},1}, {{{0,8}},129}, {{{0,8}},65}, {{{0,9}},226},
4821 {{{80,7}},6}, {{{0,8}},89}, {{{0,8}},25}, {{{0,9}},146},
4822 {{{83,7}},59}, {{{0,8}},121}, {{{0,8}},57}, {{{0,9}},210},
4823 {{{81,7}},17}, {{{0,8}},105}, {{{0,8}},41}, {{{0,9}},178},
4824 {{{0,8}},9}, {{{0,8}},137}, {{{0,8}},73}, {{{0,9}},242},
4825 {{{80,7}},4}, {{{0,8}},85}, {{{0,8}},21}, {{{80,8}},258},
4826 {{{83,7}},43}, {{{0,8}},117}, {{{0,8}},53}, {{{0,9}},202},
4827 {{{81,7}},13}, {{{0,8}},101}, {{{0,8}},37}, {{{0,9}},170},
4828 {{{0,8}},5}, {{{0,8}},133}, {{{0,8}},69}, {{{0,9}},234},
4829 {{{80,7}},8}, {{{0,8}},93}, {{{0,8}},29}, {{{0,9}},154},
4830 {{{84,7}},83}, {{{0,8}},125}, {{{0,8}},61}, {{{0,9}},218},
4831 {{{82,7}},23}, {{{0,8}},109}, {{{0,8}},45}, {{{0,9}},186},
4832 {{{0,8}},13}, {{{0,8}},141}, {{{0,8}},77}, {{{0,9}},250},
4833 {{{80,7}},3}, {{{0,8}},83}, {{{0,8}},19}, {{{85,8}},195},
4834 {{{83,7}},35}, {{{0,8}},115}, {{{0,8}},51}, {{{0,9}},198},
4835 {{{81,7}},11}, {{{0,8}},99}, {{{0,8}},35}, {{{0,9}},166},
4836 {{{0,8}},3}, {{{0,8}},131}, {{{0,8}},67}, {{{0,9}},230},
4837 {{{80,7}},7}, {{{0,8}},91}, {{{0,8}},27}, {{{0,9}},150},
4838 {{{84,7}},67}, {{{0,8}},123}, {{{0,8}},59}, {{{0,9}},214},
4839 {{{82,7}},19}, {{{0,8}},107}, {{{0,8}},43}, {{{0,9}},182},
4840 {{{0,8}},11}, {{{0,8}},139}, {{{0,8}},75}, {{{0,9}},246},
4841 {{{80,7}},5}, {{{0,8}},87}, {{{0,8}},23}, {{{192,8}},0},
4842 {{{83,7}},51}, {{{0,8}},119}, {{{0,8}},55}, {{{0,9}},206},
4843 {{{81,7}},15}, {{{0,8}},103}, {{{0,8}},39}, {{{0,9}},174},
4844 {{{0,8}},7}, {{{0,8}},135}, {{{0,8}},71}, {{{0,9}},238},
4845 {{{80,7}},9}, {{{0,8}},95}, {{{0,8}},31}, {{{0,9}},158},
4846 {{{84,7}},99}, {{{0,8}},127}, {{{0,8}},63}, {{{0,9}},222},
4847 {{{82,7}},27}, {{{0,8}},111}, {{{0,8}},47}, {{{0,9}},190},
4848 {{{0,8}},15}, {{{0,8}},143}, {{{0,8}},79}, {{{0,9}},254},
4849 {{{96,7}},256}, {{{0,8}},80}, {{{0,8}},16}, {{{84,8}},115},
4850 {{{82,7}},31}, {{{0,8}},112}, {{{0,8}},48}, {{{0,9}},193},
4851 {{{80,7}},10}, {{{0,8}},96}, {{{0,8}},32}, {{{0,9}},161},
4852 {{{0,8}},0}, {{{0,8}},128}, {{{0,8}},64}, {{{0,9}},225},
4853 {{{80,7}},6}, {{{0,8}},88}, {{{0,8}},24}, {{{0,9}},145},
4854 {{{83,7}},59}, {{{0,8}},120}, {{{0,8}},56}, {{{0,9}},209},
4855 {{{81,7}},17}, {{{0,8}},104}, {{{0,8}},40}, {{{0,9}},177},
4856 {{{0,8}},8}, {{{0,8}},136}, {{{0,8}},72}, {{{0,9}},241},
4857 {{{80,7}},4}, {{{0,8}},84}, {{{0,8}},20}, {{{85,8}},227},
4858 {{{83,7}},43}, {{{0,8}},116}, {{{0,8}},52}, {{{0,9}},201},
4859 {{{81,7}},13}, {{{0,8}},100}, {{{0,8}},36}, {{{0,9}},169},
4860 {{{0,8}},4}, {{{0,8}},132}, {{{0,8}},68}, {{{0,9}},233},
4861 {{{80,7}},8}, {{{0,8}},92}, {{{0,8}},28}, {{{0,9}},153},
4862 {{{84,7}},83}, {{{0,8}},124}, {{{0,8}},60}, {{{0,9}},217},
4863 {{{82,7}},23}, {{{0,8}},108}, {{{0,8}},44}, {{{0,9}},185},
4864 {{{0,8}},12}, {{{0,8}},140}, {{{0,8}},76}, {{{0,9}},249},
4865 {{{80,7}},3}, {{{0,8}},82}, {{{0,8}},18}, {{{85,8}},163},
4866 {{{83,7}},35}, {{{0,8}},114}, {{{0,8}},50}, {{{0,9}},197},
4867 {{{81,7}},11}, {{{0,8}},98}, {{{0,8}},34}, {{{0,9}},165},
4868 {{{0,8}},2}, {{{0,8}},130}, {{{0,8}},66}, {{{0,9}},229},
4869 {{{80,7}},7}, {{{0,8}},90}, {{{0,8}},26}, {{{0,9}},149},
4870 {{{84,7}},67}, {{{0,8}},122}, {{{0,8}},58}, {{{0,9}},213},
4871 {{{82,7}},19}, {{{0,8}},106}, {{{0,8}},42}, {{{0,9}},181},
4872 {{{0,8}},10}, {{{0,8}},138}, {{{0,8}},74}, {{{0,9}},245},
4873 {{{80,7}},5}, {{{0,8}},86}, {{{0,8}},22}, {{{192,8}},0},
4874 {{{83,7}},51}, {{{0,8}},118}, {{{0,8}},54}, {{{0,9}},205},
4875 {{{81,7}},15}, {{{0,8}},102}, {{{0,8}},38}, {{{0,9}},173},
4876 {{{0,8}},6}, {{{0,8}},134}, {{{0,8}},70}, {{{0,9}},237},
4877 {{{80,7}},9}, {{{0,8}},94}, {{{0,8}},30}, {{{0,9}},157},
4878 {{{84,7}},99}, {{{0,8}},126}, {{{0,8}},62}, {{{0,9}},221},
4879 {{{82,7}},27}, {{{0,8}},110}, {{{0,8}},46}, {{{0,9}},189},
4880 {{{0,8}},14}, {{{0,8}},142}, {{{0,8}},78}, {{{0,9}},253},
4881 {{{96,7}},256}, {{{0,8}},81}, {{{0,8}},17}, {{{85,8}},131},
4882 {{{82,7}},31}, {{{0,8}},113}, {{{0,8}},49}, {{{0,9}},195},
4883 {{{80,7}},10}, {{{0,8}},97}, {{{0,8}},33}, {{{0,9}},163},
4884 {{{0,8}},1}, {{{0,8}},129}, {{{0,8}},65}, {{{0,9}},227},
4885 {{{80,7}},6}, {{{0,8}},89}, {{{0,8}},25}, {{{0,9}},147},
4886 {{{83,7}},59}, {{{0,8}},121}, {{{0,8}},57}, {{{0,9}},211},
4887 {{{81,7}},17}, {{{0,8}},105}, {{{0,8}},41}, {{{0,9}},179},
4888 {{{0,8}},9}, {{{0,8}},137}, {{{0,8}},73}, {{{0,9}},243},
4889 {{{80,7}},4}, {{{0,8}},85}, {{{0,8}},21}, {{{80,8}},258},
4890 {{{83,7}},43}, {{{0,8}},117}, {{{0,8}},53}, {{{0,9}},203},
4891 {{{81,7}},13}, {{{0,8}},101}, {{{0,8}},37}, {{{0,9}},171},
4892 {{{0,8}},5}, {{{0,8}},133}, {{{0,8}},69}, {{{0,9}},235},
4893 {{{80,7}},8}, {{{0,8}},93}, {{{0,8}},29}, {{{0,9}},155},
4894 {{{84,7}},83}, {{{0,8}},125}, {{{0,8}},61}, {{{0,9}},219},
4895 {{{82,7}},23}, {{{0,8}},109}, {{{0,8}},45}, {{{0,9}},187},
4896 {{{0,8}},13}, {{{0,8}},141}, {{{0,8}},77}, {{{0,9}},251},
4897 {{{80,7}},3}, {{{0,8}},83}, {{{0,8}},19}, {{{85,8}},195},
4898 {{{83,7}},35}, {{{0,8}},115}, {{{0,8}},51}, {{{0,9}},199},
4899 {{{81,7}},11}, {{{0,8}},99}, {{{0,8}},35}, {{{0,9}},167},
4900 {{{0,8}},3}, {{{0,8}},131}, {{{0,8}},67}, {{{0,9}},231},
4901 {{{80,7}},7}, {{{0,8}},91}, {{{0,8}},27}, {{{0,9}},151},
4902 {{{84,7}},67}, {{{0,8}},123}, {{{0,8}},59}, {{{0,9}},215},
4903 {{{82,7}},19}, {{{0,8}},107}, {{{0,8}},43}, {{{0,9}},183},
4904 {{{0,8}},11}, {{{0,8}},139}, {{{0,8}},75}, {{{0,9}},247},
4905 {{{80,7}},5}, {{{0,8}},87}, {{{0,8}},23}, {{{192,8}},0},
4906 {{{83,7}},51}, {{{0,8}},119}, {{{0,8}},55}, {{{0,9}},207},
4907 {{{81,7}},15}, {{{0,8}},103}, {{{0,8}},39}, {{{0,9}},175},
4908 {{{0,8}},7}, {{{0,8}},135}, {{{0,8}},71}, {{{0,9}},239},
4909 {{{80,7}},9}, {{{0,8}},95}, {{{0,8}},31}, {{{0,9}},159},
4910 {{{84,7}},99}, {{{0,8}},127}, {{{0,8}},63}, {{{0,9}},223},
4911 {{{82,7}},27}, {{{0,8}},111}, {{{0,8}},47}, {{{0,9}},191},
4912 {{{0,8}},15}, {{{0,8}},143}, {{{0,8}},79}, {{{0,9}},255}
4913 };
4914 local inflate_huft fixed_td[] = {
4915 {{{80,5}},1}, {{{87,5}},257}, {{{83,5}},17}, {{{91,5}},4097},
4916 {{{81,5}},5}, {{{89,5}},1025}, {{{85,5}},65}, {{{93,5}},16385},
4917 {{{80,5}},3}, {{{88,5}},513}, {{{84,5}},33}, {{{92,5}},8193},
4918 {{{82,5}},9}, {{{90,5}},2049}, {{{86,5}},129}, {{{192,5}},24577},
4919 {{{80,5}},2}, {{{87,5}},385}, {{{83,5}},25}, {{{91,5}},6145},
4920 {{{81,5}},7}, {{{89,5}},1537}, {{{85,5}},97}, {{{93,5}},24577},
4921 {{{80,5}},4}, {{{88,5}},769}, {{{84,5}},49}, {{{92,5}},12289},
4922 {{{82,5}},13}, {{{90,5}},3073}, {{{86,5}},193}, {{{192,5}},24577}
4923 };
4924 /* --- inffixed.h */
4925 #endif
4926
4927
4928 int inflate_trees_fixed(bl, bd, tl, td, z)
4929 uIntf *bl; /* literal desired/actual bit depth */
4930 uIntf *bd; /* distance desired/actual bit depth */
4931 inflate_huft * FAR *tl; /* literal/length tree result */
4932 inflate_huft * FAR *td; /* distance tree result */
4933 z_streamp z; /* for memory allocation */
4934 {
4935 #ifdef BUILDFIXED
4936 /* build fixed tables if not already */
4937 if (!fixed_built)
4938 {
4939 int k; /* temporary variable */
4940 uInt f = 0; /* number of hufts used in fixed_mem */
4941 uIntf *c; /* length list for huft_build */
4942 uIntf *v; /* work area for huft_build */
4943
4944 /* allocate memory */
4945 if ((c = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL)
4946 return Z_MEM_ERROR;
4947 if ((v = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL)
4948 {
4949 ZFREE(z, c);
4950 return Z_MEM_ERROR;
4951 }
4952
4953 if ((fixed_mem = (inflate_huft*)ZALLOC(z, FIXEDH, sizeof(inflate_huft))) == Z_NULL)
4954 {
4955 ZFREE(z, c);
4956 ZFREE(z, v);
4957 return Z_MEM_ERROR;
4958 }
4959
4960 /* literal table */
4961 for (k = 0; k < 144; k++)
4962 c[k] = 8;
4963 for (; k < 256; k++)
4964 c[k] = 9;
4965 for (; k < 280; k++)
4966 c[k] = 7;
4967 for (; k < 288; k++)
4968 c[k] = 8;
4969 fixed_bl = 9;
4970 huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl,
4971 fixed_mem, &f, v);
4972
4973 /* distance table */
4974 for (k = 0; k < 30; k++)
4975 c[k] = 5;
4976 fixed_bd = 5;
4977 huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd,
4978 fixed_mem, &f, v);
4979
4980 /* done */
4981 ZFREE(z, v);
4982 ZFREE(z, c);
4983 fixed_built = 1;
4984 }
4985 #endif
4986 *bl = fixed_bl;
4987 *bd = fixed_bd;
4988 *tl = fixed_tl;
4989 *td = fixed_td;
4990 return Z_OK;
4991 }
4992 /* --- inftrees.c */
4993
4994 /* +++ infcodes.c */
4995 /* infcodes.c -- process literals and length/distance pairs
4996 * Copyright (C) 1995-2002 Mark Adler
4997 * For conditions of distribution and use, see copyright notice in zlib.h
4998 */
4999
5000 /* #include "zutil.h" */
5001 /* #include "inftrees.h" */
5002 /* #include "infblock.h" */
5003 /* #include "infcodes.h" */
5004 /* #include "infutil.h" */
5005
5006 /* +++ inffast.h */
5007 /* inffast.h -- header to use inffast.c
5008 * Copyright (C) 1995-2002 Mark Adler
5009 * For conditions of distribution and use, see copyright notice in zlib.h
5010 */
5011
5012 /* WARNING: this file should *not* be used by applications. It is
5013 part of the implementation of the compression library and is
5014 subject to change. Applications should only use zlib.h.
5015 */
5016
5017 extern int inflate_fast OF((
5018 uInt,
5019 uInt,
5020 inflate_huft *,
5021 inflate_huft *,
5022 inflate_blocks_statef *,
5023 z_streamp ));
5024 /* --- inffast.h */
5025
5026 /* simplify the use of the inflate_huft type with some defines */
5027 #define exop word.what.Exop
5028 #define bits word.what.Bits
5029
5030 typedef enum { /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
5031 START, /* x: set up for LEN */
5032 LEN, /* i: get length/literal/eob next */
5033 LENEXT, /* i: getting length extra (have base) */
5034 DIST, /* i: get distance next */
5035 DISTEXT, /* i: getting distance extra */
5036 COPY, /* o: copying bytes in window, waiting for space */
5037 LIT, /* o: got literal, waiting for output space */
5038 WASH, /* o: got eob, possibly still output waiting */
5039 END, /* x: got eob and all data flushed */
5040 BADCODE} /* x: got error */
5041 inflate_codes_mode;
5042
5043 /* inflate codes private state */
5044 struct inflate_codes_state {
5045
5046 /* mode */
5047 inflate_codes_mode mode; /* current inflate_codes mode */
5048
5049 /* mode dependent information */
5050 uInt len;
5051 union {
5052 struct {
5053 inflate_huft *tree; /* pointer into tree */
5054 uInt need; /* bits needed */
5055 } code; /* if LEN or DIST, where in tree */
5056 uInt lit; /* if LIT, literal */
5057 struct {
5058 uInt get; /* bits to get for extra */
5059 uInt dist; /* distance back to copy from */
5060 } copy; /* if EXT or COPY, where and how much */
5061 } sub; /* submode */
5062
5063 /* mode independent information */
5064 Byte lbits; /* ltree bits decoded per branch */
5065 Byte dbits; /* dtree bits decoder per branch */
5066 inflate_huft *ltree; /* literal/length/eob tree */
5067 inflate_huft *dtree; /* distance tree */
5068
5069 };
5070
5071
5072 inflate_codes_statef *inflate_codes_new(bl, bd, tl, td, z)
5073 uInt bl, bd;
5074 inflate_huft *tl;
5075 inflate_huft *td; /* need separate declaration for Borland C++ */
5076 z_streamp z;
5077 {
5078 inflate_codes_statef *c;
5079
5080 if ((c = (inflate_codes_statef *)
5081 ZALLOC(z,1,sizeof(struct inflate_codes_state))) != Z_NULL)
5082 {
5083 c->mode = START;
5084 c->lbits = (Byte)bl;
5085 c->dbits = (Byte)bd;
5086 c->ltree = tl;
5087 c->dtree = td;
5088 Tracev((stderr, "inflate: codes new\n"));
5089 }
5090 return c;
5091 }
5092
5093
5094 int inflate_codes(s, z, r)
5095 inflate_blocks_statef *s;
5096 z_streamp z;
5097 int r;
5098 {
5099 uInt j; /* temporary storage */
5100 inflate_huft *t; /* temporary pointer */
5101 uInt e; /* extra bits or operation */
5102 uLong b; /* bit buffer */
5103 uInt k; /* bits in bit buffer */
5104 Bytef *p; /* input data pointer */
5105 uInt n; /* bytes available there */
5106 Bytef *q; /* output window write pointer */
5107 uInt m; /* bytes to end of window or read pointer */
5108 Bytef *f; /* pointer to copy strings from */
5109 inflate_codes_statef *c = s->sub.decode.codes; /* codes state */
5110
5111 /* copy input/output information to locals (UPDATE macro restores) */
5112 LOAD
5113
5114 /* process input and output based on current state */
5115 while (1) switch (c->mode)
5116 { /* waiting for "i:"=input, "o:"=output, "x:"=nothing */
5117 case START: /* x: set up for LEN */
5118 #ifndef SLOW
5119 if (m >= 258 && n >= 10)
5120 {
5121 UPDATE
5122 r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z);
5123 LOAD
5124 if (r != Z_OK)
5125 {
5126 c->mode = r == Z_STREAM_END ? WASH : BADCODE;
5127 break;
5128 }
5129 }
5130 #endif /* !SLOW */
5131 c->sub.code.need = c->lbits;
5132 c->sub.code.tree = c->ltree;
5133 c->mode = LEN;
5134 case LEN: /* i: get length/literal/eob next */
5135 j = c->sub.code.need;
5136 NEEDBITS(j)
5137 t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
5138 DUMPBITS(t->bits)
5139 e = (uInt)(t->exop);
5140 if (e == 0) /* literal */
5141 {
5142 c->sub.lit = t->base;
5143 Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
5144 "inflate: literal '%c'\n" :
5145 "inflate: literal 0x%02x\n", t->base));
5146 c->mode = LIT;
5147 break;
5148 }
5149 if (e & 16) /* length */
5150 {
5151 c->sub.copy.get = e & 15;
5152 c->len = t->base;
5153 c->mode = LENEXT;
5154 break;
5155 }
5156 if ((e & 64) == 0) /* next table */
5157 {
5158 c->sub.code.need = e;
5159 c->sub.code.tree = t + t->base;
5160 break;
5161 }
5162 if (e & 32) /* end of block */
5163 {
5164 Tracevv((stderr, "inflate: end of block\n"));
5165 c->mode = WASH;
5166 break;
5167 }
5168 c->mode = BADCODE; /* invalid code */
5169 z->msg = (char*)"invalid literal/length code";
5170 r = Z_DATA_ERROR;
5171 LEAVE
5172 case LENEXT: /* i: getting length extra (have base) */
5173 j = c->sub.copy.get;
5174 NEEDBITS(j)
5175 c->len += (uInt)b & inflate_mask[j];
5176 DUMPBITS(j)
5177 c->sub.code.need = c->dbits;
5178 c->sub.code.tree = c->dtree;
5179 Tracevv((stderr, "inflate: length %u\n", c->len));
5180 c->mode = DIST;
5181 case DIST: /* i: get distance next */
5182 j = c->sub.code.need;
5183 NEEDBITS(j)
5184 t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);
5185 DUMPBITS(t->bits)
5186 e = (uInt)(t->exop);
5187 if (e & 16) /* distance */
5188 {
5189 c->sub.copy.get = e & 15;
5190 c->sub.copy.dist = t->base;
5191 c->mode = DISTEXT;
5192 break;
5193 }
5194 if ((e & 64) == 0) /* next table */
5195 {
5196 c->sub.code.need = e;
5197 c->sub.code.tree = t + t->base;
5198 break;
5199 }
5200 c->mode = BADCODE; /* invalid code */
5201 z->msg = (char*)"invalid distance code";
5202 r = Z_DATA_ERROR;
5203 LEAVE
5204 case DISTEXT: /* i: getting distance extra */
5205 j = c->sub.copy.get;
5206 NEEDBITS(j)
5207 c->sub.copy.dist += (uInt)b & inflate_mask[j];
5208 DUMPBITS(j)
5209 Tracevv((stderr, "inflate: distance %u\n", c->sub.copy.dist));
5210 c->mode = COPY;
5211 case COPY: /* o: copying bytes in window, waiting for space */
5212 f = q - c->sub.copy.dist;
5213 while (f < s->window) /* modulo window size-"while" instead */
5214 f += s->end - s->window; /* of "if" handles invalid distances */
5215 while (c->len)
5216 {
5217 NEEDOUT
5218 OUTBYTE(*f++)
5219 if (f == s->end)
5220 f = s->window;
5221 c->len--;
5222 }
5223 c->mode = START;
5224 break;
5225 case LIT: /* o: got literal, waiting for output space */
5226 NEEDOUT
5227 OUTBYTE(c->sub.lit)
5228 c->mode = START;
5229 break;
5230 case WASH: /* o: got eob, possibly more output */
5231 if (k > 7) /* return unused byte, if any */
5232 {
5233 Assert(k < 16, "inflate_codes grabbed too many bytes")
5234 k -= 8;
5235 n++;
5236 p--; /* can always return one */
5237 }
5238 FLUSH
5239 if (s->read != s->write)
5240 LEAVE
5241 c->mode = END;
5242 case END:
5243 r = Z_STREAM_END;
5244 LEAVE
5245 case BADCODE: /* x: got error */
5246 r = Z_DATA_ERROR;
5247 LEAVE
5248 default:
5249 r = Z_STREAM_ERROR;
5250 LEAVE
5251 }
5252 #ifdef NEED_DUMMY_RETURN
5253 return Z_STREAM_ERROR; /* Some dumb compilers complain without this */
5254 #endif
5255 }
5256
5257
5258 void inflate_codes_free(c, z)
5259 inflate_codes_statef *c;
5260 z_streamp z;
5261 {
5262 ZFREE(z, c);
5263 Tracev((stderr, "inflate: codes free\n"));
5264 }
5265 /* --- infcodes.c */
5266
5267 /* +++ infutil.c */
5268 /* inflate_util.c -- data and routines common to blocks and codes
5269 * Copyright (C) 1995-2002 Mark Adler
5270 * For conditions of distribution and use, see copyright notice in zlib.h
5271 */
5272
5273 /* #include "zutil.h" */
5274 /* #include "infblock.h" */
5275 /* #include "inftrees.h" */
5276 /* #include "infcodes.h" */
5277 /* #include "infutil.h" */
5278
5279 #ifndef NO_DUMMY_DECL
5280 struct inflate_codes_state {int dummy;}; /* for buggy compilers */
5281 #endif
5282
5283 /* And'ing with mask[n] masks the lower n bits */
5284 uInt inflate_mask[17] = {
5285 0x0000,
5286 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,
5287 0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff
5288 };
5289
5290
5291 /* copy as much as possible from the sliding window to the output area */
5292 int inflate_flush(s, z, r)
5293 inflate_blocks_statef *s;
5294 z_streamp z;
5295 int r;
5296 {
5297 uInt n;
5298 Bytef *p;
5299 Bytef *q;
5300
5301 /* local copies of source and destination pointers */
5302 p = z->next_out;
5303 q = s->read;
5304
5305 /* compute number of bytes to copy as far as end of window */
5306 n = (uInt)((q <= s->write ? s->write : s->end) - q);
5307 if (n > z->avail_out) n = z->avail_out;
5308 if (n && r == Z_BUF_ERROR) r = Z_OK;
5309
5310 /* update counters */
5311 z->avail_out -= n;
5312 z->total_out += n;
5313
5314 /* update check information */
5315 if (s->checkfn != Z_NULL)
5316 z->adler = s->check = (*s->checkfn)(s->check, q, n);
5317
5318 /* copy as far as end of window */
5319 zmemcpy(p, q, n);
5320 p += n;
5321 q += n;
5322
5323 /* see if more to copy at beginning of window */
5324 if (q == s->end)
5325 {
5326 /* wrap pointers */
5327 q = s->window;
5328 if (s->write == s->end)
5329 s->write = s->window;
5330
5331 /* compute bytes to copy */
5332 n = (uInt)(s->write - q);
5333 if (n > z->avail_out) n = z->avail_out;
5334 if (n && r == Z_BUF_ERROR) r = Z_OK;
5335
5336 /* update counters */
5337 z->avail_out -= n;
5338 z->total_out += n;
5339
5340 /* update check information */
5341 if (s->checkfn != Z_NULL)
5342 z->adler = s->check = (*s->checkfn)(s->check, q, n);
5343
5344 /* copy */
5345 zmemcpy(p, q, n);
5346 p += n;
5347 q += n;
5348 }
5349
5350 /* update pointers */
5351 z->next_out = p;
5352 s->read = q;
5353
5354 /* done */
5355 return r;
5356 }
5357 /* --- infutil.c */
5358
5359 /* +++ inffast.c */
5360 /* inffast.c -- process literals and length/distance pairs fast
5361 * Copyright (C) 1995-2002 Mark Adler
5362 * For conditions of distribution and use, see copyright notice in zlib.h
5363 */
5364
5365 /* #include "zutil.h" */
5366 /* #include "inftrees.h" */
5367 /* #include "infblock.h" */
5368 /* #include "infcodes.h" */
5369 /* #include "infutil.h" */
5370 /* #include "inffast.h" */
5371
5372 #ifndef NO_DUMMY_DECL
5373 struct inflate_codes_state {int dummy;}; /* for buggy compilers */
5374 #endif
5375
5376 /* simplify the use of the inflate_huft type with some defines */
5377 #define exop word.what.Exop
5378 #define bits word.what.Bits
5379
5380 /* macros for bit input with no checking and for returning unused bytes */
5381 #define GRABBITS(j) {while(k<(j)){b|=((uLong)NEXTBYTE)<<k;k+=8;}}
5382 #define UNGRAB {c=z->avail_in-n;c=(k>>3)<c?k>>3:c;n+=c;p-=c;k-=c<<3;}
5383
5384 /* Called with number of bytes left to write in window at least 258
5385 (the maximum string length) and number of input bytes available
5386 at least ten. The ten bytes are six bytes for the longest length/
5387 distance pair plus four bytes for overloading the bit buffer. */
5388
5389 int inflate_fast(bl, bd, tl, td, s, z)
5390 uInt bl, bd;
5391 inflate_huft *tl;
5392 inflate_huft *td; /* need separate declaration for Borland C++ */
5393 inflate_blocks_statef *s;
5394 z_streamp z;
5395 {
5396 inflate_huft *t; /* temporary pointer */
5397 uInt e; /* extra bits or operation */
5398 uLong b; /* bit buffer */
5399 uInt k; /* bits in bit buffer */
5400 Bytef *p; /* input data pointer */
5401 uInt n; /* bytes available there */
5402 Bytef *q; /* output window write pointer */
5403 uInt m; /* bytes to end of window or read pointer */
5404 uInt ml; /* mask for literal/length tree */
5405 uInt md; /* mask for distance tree */
5406 uInt c; /* bytes to copy */
5407 uInt d; /* distance back to copy from */
5408 Bytef *r; /* copy source pointer */
5409
5410 /* load input, output, bit values */
5411 LOAD
5412
5413 /* initialize masks */
5414 ml = inflate_mask[bl];
5415 md = inflate_mask[bd];
5416
5417 /* do until not enough input or output space for fast loop */
5418 do { /* assume called with m >= 258 && n >= 10 */
5419 /* get literal/length code */
5420 GRABBITS(20) /* max bits for literal/length code */
5421 if ((e = (t = tl + ((uInt)b & ml))->exop) == 0)
5422 {
5423 DUMPBITS(t->bits)
5424 Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
5425 "inflate: * literal '%c'\n" :
5426 "inflate: * literal 0x%02x\n", t->base));
5427 *q++ = (Byte)t->base;
5428 m--;
5429 continue;
5430 }
5431 do {
5432 DUMPBITS(t->bits)
5433 if (e & 16)
5434 {
5435 /* get extra bits for length */
5436 e &= 15;
5437 c = t->base + ((uInt)b & inflate_mask[e]);
5438 DUMPBITS(e)
5439 Tracevv((stderr, "inflate: * length %u\n", c));
5440
5441 /* decode distance base of block to copy */
5442 GRABBITS(15); /* max bits for distance code */
5443 e = (t = td + ((uInt)b & md))->exop;
5444 do {
5445 DUMPBITS(t->bits)
5446 if (e & 16)
5447 {
5448 /* get extra bits to add to distance base */
5449 e &= 15;
5450 GRABBITS(e) /* get extra bits (up to 13) */
5451 d = t->base + ((uInt)b & inflate_mask[e]);
5452 DUMPBITS(e)
5453 Tracevv((stderr, "inflate: * distance %u\n", d));
5454
5455 /* do the copy */
5456 m -= c;
5457 r = q - d;
5458 if (r < s->window) /* wrap if needed */
5459 {
5460 do {
5461 r += s->end - s->window; /* force pointer in window */
5462 } while (r < s->window); /* covers invalid distances */
5463 e = s->end - r;
5464 if (c > e)
5465 {
5466 c -= e; /* wrapped copy */
5467 do {
5468 *q++ = *r++;
5469 } while (--e);
5470 r = s->window;
5471 do {
5472 *q++ = *r++;
5473 } while (--c);
5474 }
5475 else /* normal copy */
5476 {
5477 *q++ = *r++; c--;
5478 *q++ = *r++; c--;
5479 do {
5480 *q++ = *r++;
5481 } while (--c);
5482 }
5483 }
5484 else /* normal copy */
5485 {
5486 *q++ = *r++; c--;
5487 *q++ = *r++; c--;
5488 do {
5489 *q++ = *r++;
5490 } while (--c);
5491 }
5492 break;
5493 }
5494 else if ((e & 64) == 0)
5495 {
5496 t += t->base;
5497 e = (t += ((uInt)b & inflate_mask[e]))->exop;
5498 }
5499 else
5500 {
5501 z->msg = (char*)"invalid distance code";
5502 UNGRAB
5503 UPDATE
5504 return Z_DATA_ERROR;
5505 }
5506 } while (1);
5507 break;
5508 }
5509 if ((e & 64) == 0)
5510 {
5511 t += t->base;
5512 if ((e = (t += ((uInt)b & inflate_mask[e]))->exop) == 0)
5513 {
5514 DUMPBITS(t->bits)
5515 Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?
5516 "inflate: * literal '%c'\n" :
5517 "inflate: * literal 0x%02x\n", t->base));
5518 *q++ = (Byte)t->base;
5519 m--;
5520 break;
5521 }
5522 }
5523 else if (e & 32)
5524 {
5525 Tracevv((stderr, "inflate: * end of block\n"));
5526 UNGRAB
5527 UPDATE
5528 return Z_STREAM_END;
5529 }
5530 else
5531 {
5532 z->msg = (char*)"invalid literal/length code";
5533 UNGRAB
5534 UPDATE
5535 return Z_DATA_ERROR;
5536 }
5537 } while (1);
5538 } while (m >= 258 && n >= 10);
5539
5540 /* not enough input or output--restore pointers and return */
5541 UNGRAB
5542 UPDATE
5543 return Z_OK;
5544 }
5545 /* --- inffast.c */
5546
5547 /* +++ zutil.c */
5548 /* zutil.c -- target dependent utility functions for the compression library
5549 * Copyright (C) 1995-2002 Jean-loup Gailly.
5550 * For conditions of distribution and use, see copyright notice in zlib.h
5551 */
5552
5553 /* @(#) $Id: zlib.c,v 1.10 2004/07/29 19:17:20 lindak Exp $ */
5554
5555 /* #include "zutil.h" */
5556
5557 #ifndef NO_DUMMY_DECL
5558 struct internal_state {int dummy;}; /* for buggy compilers */
5559 #endif
5560
5561 #ifndef STDC
5562 extern void exit OF((int));
5563 #endif
5564
5565 const char *z_errmsg[10] = {
5566 "need dictionary", /* Z_NEED_DICT 2 */
5567 "stream end", /* Z_STREAM_END 1 */
5568 "", /* Z_OK 0 */
5569 "file error", /* Z_ERRNO (-1) */
5570 "stream error", /* Z_STREAM_ERROR (-2) */
5571 "data error", /* Z_DATA_ERROR (-3) */
5572 "insufficient memory", /* Z_MEM_ERROR (-4) */
5573 "buffer error", /* Z_BUF_ERROR (-5) */
5574 "incompatible version",/* Z_VERSION_ERROR (-6) */
5575 ""};
5576
5577
5578 const char * ZEXPORT zlibVersion()
5579 {
5580 return ZLIB_VERSION;
5581 }
5582
5583 #ifdef DEBUG_ZLIB
5584
5585 # ifndef verbose
5586 # define verbose 0
5587 # endif
5588 int z_verbose = verbose;
5589
5590 void z_error (m)
5591 char *m;
5592 {
5593 fprintf(stderr, "%s\n", m);
5594 exit(1);
5595 }
5596 #endif
5597
5598 /* exported to allow conversion of error code to string for compress() and
5599 * uncompress()
5600 */
5601 const char * ZEXPORT zError(err)
5602 int err;
5603 {
5604 return ERR_MSG(err);
5605 }
5606
5607
5608 #ifndef HAVE_MEMCPY
5609
5610 void zmemcpy(dest, source, len)
5611 Bytef* dest;
5612 const Bytef* source;
5613 uInt len;
5614 {
5615 if (len == 0) return;
5616 do {
5617 *dest++ = *source++; /* ??? to be unrolled */
5618 } while (--len != 0);
5619 }
5620
5621 int zmemcmp(s1, s2, len)
5622 const Bytef* s1;
5623 const Bytef* s2;
5624 uInt len;
5625 {
5626 uInt j;
5627
5628 for (j = 0; j < len; j++) {
5629 if (s1[j] != s2[j]) return 2*(s1[j] > s2[j])-1;
5630 }
5631 return 0;
5632 }
5633
5634 void zmemzero(dest, len)
5635 Bytef* dest;
5636 uInt len;
5637 {
5638 if (len == 0) return;
5639 do {
5640 *dest++ = 0; /* ??? to be unrolled */
5641 } while (--len != 0);
5642 }
5643 #endif
5644
5645 #ifdef __TURBOC__
5646 #if (defined( __BORLANDC__) || !defined(SMALL_MEDIUM)) && !defined(__32BIT__)
5647 /* Small and medium model in Turbo C are for now limited to near allocation
5648 * with reduced MAX_WBITS and MAX_MEM_LEVEL
5649 */
5650 # define MY_ZCALLOC
5651
5652 /* Turbo C malloc() does not allow dynamic allocation of 64K bytes
5653 * and farmalloc(64K) returns a pointer with an offset of 8, so we
5654 * must fix the pointer. Warning: the pointer must be put back to its
5655 * original form in order to free it, use zcfree().
5656 */
5657
5658 #define MAX_PTR 10
5659 /* 10*64K = 640K */
5660
5661 local int next_ptr = 0;
5662
5663 typedef struct ptr_table_s {
5664 voidpf org_ptr;
5665 voidpf new_ptr;
5666 } ptr_table;
5667
5668 local ptr_table table[MAX_PTR];
5669 /* This table is used to remember the original form of pointers
5670 * to large buffers (64K). Such pointers are normalized with a zero offset.
5671 * Since MSDOS is not a preemptive multitasking OS, this table is not
5672 * protected from concurrent access. This hack doesn't work anyway on
5673 * a protected system like OS/2. Use Microsoft C instead.
5674 */
5675
5676 voidpf zcalloc (voidpf opaque, unsigned items, unsigned size)
5677 {
5678 voidpf buf = opaque; /* just to make some compilers happy */
5679 ulg bsize = (ulg)items*size;
5680
5681 /* If we allocate less than 65520 bytes, we assume that farmalloc
5682 * will return a usable pointer which doesn't have to be normalized.
5683 */
5684 if (bsize < 65520L) {
5685 buf = farmalloc(bsize);
5686 if (*(ush*)&buf != 0) return buf;
5687 } else {
5688 buf = farmalloc(bsize + 16L);
5689 }
5690 if (buf == NULL || next_ptr >= MAX_PTR) return NULL;
5691 table[next_ptr].org_ptr = buf;
5692
5693 /* Normalize the pointer to seg:0 */
5694 *((ush*)&buf+1) += ((ush)((uch*)buf-0) + 15) >> 4;
5695 *(ush*)&buf = 0;
5696 table[next_ptr++].new_ptr = buf;
5697 return buf;
5698 }
5699
5700 void zcfree (voidpf opaque, voidpf ptr)
5701 {
5702 int n;
5703 if (*(ush*)&ptr != 0) { /* object < 64K */
5704 farfree(ptr);
5705 return;
5706 }
5707 /* Find the original pointer */
5708 for (n = 0; n < next_ptr; n++) {
5709 if (ptr != table[n].new_ptr) continue;
5710
5711 farfree(table[n].org_ptr);
5712 while (++n < next_ptr) {
5713 table[n-1] = table[n];
5714 }
5715 next_ptr--;
5716 return;
5717 }
5718 ptr = opaque; /* just to make some compilers happy */
5719 Assert(0, "zcfree: ptr not found");
5720 }
5721 #endif
5722 #endif /* __TURBOC__ */
5723
5724
5725 #if defined(M_I86) && !defined(__32BIT__)
5726 /* Microsoft C in 16-bit mode */
5727
5728 # define MY_ZCALLOC
5729
5730 #if (!defined(_MSC_VER) || (_MSC_VER <= 600))
5731 # define _halloc halloc
5732 # define _hfree hfree
5733 #endif
5734
5735 voidpf zcalloc (voidpf opaque, unsigned items, unsigned size)
5736 {
5737 if (opaque) opaque = 0; /* to make compiler happy */
5738 return _halloc((long)items, size);
5739 }
5740
5741 void zcfree (voidpf opaque, voidpf ptr)
5742 {
5743 if (opaque) opaque = 0; /* to make compiler happy */
5744 _hfree(ptr);
5745 }
5746
5747 #endif /* MSC */
5748
5749
5750 #ifndef MY_ZCALLOC /* Any system without a special alloc function */
5751
5752 #ifndef STDC
5753 extern voidp calloc OF((uInt items, uInt size));
5754 extern void free OF((voidpf ptr));
5755 #endif
5756
5757 voidpf zcalloc (opaque, items, size)
5758 voidpf opaque;
5759 unsigned items;
5760 unsigned size;
5761 {
5762 if (opaque) items += size - size; /* make compiler happy */
5763 return (voidpf)calloc(items, size);
5764 }
5765
5766 void zcfree (opaque, ptr)
5767 voidpf opaque;
5768 voidpf ptr;
5769 {
5770 _FREE(ptr);
5771 if (opaque) return; /* make compiler happy */
5772 }
5773
5774 #endif /* MY_ZCALLOC */
5775 /* --- zutil.c */
5776
5777 /* +++ adler32.c */
5778 /* adler32.c -- compute the Adler-32 checksum of a data stream
5779 * Copyright (C) 1995-2002 Mark Adler
5780 * For conditions of distribution and use, see copyright notice in zlib.h
5781 */
5782
5783 /* @(#) $Id: zlib.c,v 1.10 2004/07/29 19:17:20 lindak Exp $ */
5784
5785 /* #include "zlib.h" */
5786
5787 #define BASE 65521L /* largest prime smaller than 65536 */
5788 #define NMAX 5552
5789 /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
5790
5791 #define DO1(buf,i) {s1 += buf[i]; s2 += s1;}
5792 #define DO2(buf,i) DO1(buf,i); DO1(buf,i+1);
5793 #define DO4(buf,i) DO2(buf,i); DO2(buf,i+2);
5794 #define DO8(buf,i) DO4(buf,i); DO4(buf,i+4);
5795 #define DO16(buf) DO8(buf,0); DO8(buf,8);
5796
5797 /* ========================================================================= */
5798 uLong ZEXPORT adler32(adler, buf, len)
5799 uLong adler;
5800 const Bytef *buf;
5801 uInt len;
5802 {
5803 unsigned long s1 = adler & 0xffff;
5804 unsigned long s2 = (adler >> 16) & 0xffff;
5805 int k;
5806
5807 if (buf == Z_NULL) return 1L;
5808
5809 while (len > 0) {
5810 k = len < NMAX ? len : NMAX;
5811 len -= k;
5812 while (k >= 16) {
5813 DO16(buf);
5814 buf += 16;
5815 k -= 16;
5816 }
5817 if (k != 0) do {
5818 s1 += *buf++;
5819 s2 += s1;
5820 } while (--k);
5821 s1 %= BASE;
5822 s2 %= BASE;
5823 }
5824 return (s2 << 16) | s1;
5825 }
5826 /* --- adler32.c */