]>
Commit | Line | Data |
---|---|---|
1c79356b | 1 | /* |
5d5c5d0d A |
2 | * Copyright (c) 2000 Apple Computer, Inc. All rights reserved. |
3 | * | |
8f6c56a5 | 4 | * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ |
1c79356b | 5 | * |
8f6c56a5 A |
6 | * This file contains Original Code and/or Modifications of Original Code |
7 | * as defined in and that are subject to the Apple Public Source License | |
8 | * Version 2.0 (the 'License'). You may not use this file except in | |
9 | * compliance with the License. The rights granted to you under the License | |
10 | * may not be used to create, or enable the creation or redistribution of, | |
11 | * unlawful or unlicensed copies of an Apple operating system, or to | |
12 | * circumvent, violate, or enable the circumvention or violation of, any | |
13 | * terms of an Apple operating system software license agreement. | |
14 | * | |
15 | * Please obtain a copy of the License at | |
16 | * http://www.opensource.apple.com/apsl/ and read it before using this file. | |
17 | * | |
18 | * The Original Code and all software distributed under the License are | |
19 | * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER | |
20 | * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, | |
21 | * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, | |
22 | * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. | |
23 | * Please see the License for the specific language governing rights and | |
8ad349bb | 24 | * limitations under the License. |
8f6c56a5 A |
25 | * |
26 | * @APPLE_OSREFERENCE_LICENSE_HEADER_END@ | |
1c79356b A |
27 | */ |
28 | /* | |
29 | * This file is derived from various .h and .c files from the zlib-1.0.4 | |
30 | * distribution by Jean-loup Gailly and Mark Adler, with some additions | |
31 | * by Paul Mackerras to aid in implementing Deflate compression and | |
32 | * decompression for PPP packets. See zlib.h for conditions of | |
33 | * distribution and use. | |
34 | * | |
35 | * Changes that have been made include: | |
36 | * - added Z_PACKET_FLUSH (see zlib.h for details) | |
37 | * - added inflateIncomp and deflateOutputPending | |
38 | * - allow strm->next_out to be NULL, meaning discard the output | |
39 | * | |
9bccf70c | 40 | * $FreeBSD: src/sys/net/zlib.c,v 1.10 1999/12/29 04:38:38 peter Exp $ |
1c79356b A |
41 | */ |
42 | ||
43 | #define NO_DUMMY_DECL | |
44 | #define NO_ZCFUNCS | |
45 | #define MY_ZCALLOC | |
46 | ||
1c79356b A |
47 | /* +++ zutil.h */ |
48 | /* zutil.h -- internal interface and configuration of the compression library | |
9bccf70c | 49 | * Copyright (C) 1995-2002 Jean-loup Gailly. |
1c79356b A |
50 | * For conditions of distribution and use, see copyright notice in zlib.h |
51 | */ | |
52 | ||
53 | /* WARNING: this file should *not* be used by applications. It is | |
54 | part of the implementation of the compression library and is | |
55 | subject to change. Applications should only use zlib.h. | |
56 | */ | |
57 | ||
4452a7af | 58 | /* @(#) $Id: zlib.c,v 1.10.874.1 2005/06/24 01:47:11 lindak Exp $ */ |
1c79356b A |
59 | |
60 | #ifndef _Z_UTIL_H | |
61 | #define _Z_UTIL_H | |
62 | ||
9bccf70c | 63 | #ifdef KERNEL |
1c79356b | 64 | #include <net/zlib.h> |
9bccf70c A |
65 | #else |
66 | #include "zlib.h" | |
67 | #endif | |
1c79356b | 68 | |
9bccf70c | 69 | #ifdef KERNEL |
1c79356b | 70 | /* Assume this is a *BSD or SVR4 kernel */ |
9bccf70c | 71 | #include <sys/types.h> |
1c79356b | 72 | #include <sys/time.h> |
9bccf70c | 73 | #include <sys/systm.h> |
1c79356b A |
74 | # define HAVE_MEMCPY |
75 | # define memcpy(d, s, n) bcopy((s), (d), (n)) | |
76 | # define memset(d, v, n) bzero((d), (n)) | |
77 | # define memcmp bcmp | |
78 | ||
9bccf70c A |
79 | #else |
80 | #if defined(__KERNEL__) | |
81 | /* Assume this is a Linux kernel */ | |
82 | #include <linux/string.h> | |
83 | #define HAVE_MEMCPY | |
84 | ||
85 | #else /* not kernel */ | |
1c79356b | 86 | #ifdef STDC |
9bccf70c A |
87 | # include <stddef.h> |
88 | # include <string.h> | |
89 | # include <stdlib.h> | |
1c79356b | 90 | #endif |
9bccf70c A |
91 | #ifdef NO_ERRNO_H |
92 | extern int errno; | |
93 | #else | |
94 | # include <errno.h> | |
95 | #endif | |
96 | #endif /* __KERNEL__ */ | |
97 | #endif /* KERNEL */ | |
1c79356b A |
98 | |
99 | #ifndef local | |
100 | # define local static | |
101 | #endif | |
102 | /* compile with -Dlocal if your debugger can't find static symbols */ | |
103 | ||
104 | typedef unsigned char uch; | |
105 | typedef uch FAR uchf; | |
106 | typedef unsigned short ush; | |
107 | typedef ush FAR ushf; | |
108 | typedef unsigned long ulg; | |
109 | ||
110 | extern const char *z_errmsg[10]; /* indexed by 2-zlib_error */ | |
111 | /* (size given to avoid silly warnings with Visual C++) */ | |
112 | ||
113 | #define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)] | |
114 | ||
115 | #define ERR_RETURN(strm,err) \ | |
116 | return (strm->msg = (char*)ERR_MSG(err), (err)) | |
117 | /* To be used only when the state is known to be valid */ | |
118 | ||
119 | /* common constants */ | |
120 | ||
121 | #ifndef DEF_WBITS | |
122 | # define DEF_WBITS MAX_WBITS | |
123 | #endif | |
124 | /* default windowBits for decompression. MAX_WBITS is for compression only */ | |
125 | ||
126 | #if MAX_MEM_LEVEL >= 8 | |
127 | # define DEF_MEM_LEVEL 8 | |
128 | #else | |
129 | # define DEF_MEM_LEVEL MAX_MEM_LEVEL | |
130 | #endif | |
131 | /* default memLevel */ | |
132 | ||
133 | #define STORED_BLOCK 0 | |
134 | #define STATIC_TREES 1 | |
135 | #define DYN_TREES 2 | |
136 | /* The three kinds of block type */ | |
137 | ||
138 | #define MIN_MATCH 3 | |
139 | #define MAX_MATCH 258 | |
140 | /* The minimum and maximum match lengths */ | |
141 | ||
142 | #define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */ | |
143 | ||
144 | /* target dependencies */ | |
145 | ||
146 | #ifdef MSDOS | |
147 | # define OS_CODE 0x00 | |
9bccf70c A |
148 | # if defined(__TURBOC__) || defined(__BORLANDC__) |
149 | # if(__STDC__ == 1) && (defined(__LARGE__) || defined(__COMPACT__)) | |
150 | /* Allow compilation with ANSI keywords only enabled */ | |
151 | void _Cdecl farfree( void *block ); | |
152 | void *_Cdecl farmalloc( unsigned long nbytes ); | |
153 | # else | |
154 | # include <alloc.h> | |
155 | # endif | |
1c79356b A |
156 | # else /* MSC or DJGPP */ |
157 | # include <malloc.h> | |
158 | # endif | |
159 | #endif | |
160 | ||
161 | #ifdef OS2 | |
162 | # define OS_CODE 0x06 | |
163 | #endif | |
164 | ||
165 | #ifdef WIN32 /* Window 95 & Windows NT */ | |
166 | # define OS_CODE 0x0b | |
167 | #endif | |
168 | ||
169 | #if defined(VAXC) || defined(VMS) | |
170 | # define OS_CODE 0x02 | |
9bccf70c | 171 | # define F_OPEN(name, mode) \ |
1c79356b A |
172 | fopen((name), (mode), "mbc=60", "ctx=stm", "rfm=fix", "mrs=512") |
173 | #endif | |
174 | ||
175 | #ifdef AMIGA | |
176 | # define OS_CODE 0x01 | |
177 | #endif | |
178 | ||
179 | #if defined(ATARI) || defined(atarist) | |
180 | # define OS_CODE 0x05 | |
181 | #endif | |
182 | ||
9bccf70c | 183 | #if defined(MACOS) || defined(TARGET_OS_MAC) |
1c79356b | 184 | # define OS_CODE 0x07 |
9bccf70c A |
185 | # if defined(__MWERKS__) && __dest_os != __be_os && __dest_os != __win32_os |
186 | # include <unix.h> /* for fdopen */ | |
187 | # else | |
188 | # ifndef fdopen | |
189 | # define fdopen(fd,mode) NULL /* No fdopen() */ | |
190 | # endif | |
191 | # endif | |
1c79356b A |
192 | #endif |
193 | ||
194 | #ifdef __50SERIES /* Prime/PRIMOS */ | |
195 | # define OS_CODE 0x0F | |
196 | #endif | |
197 | ||
198 | #ifdef TOPS20 | |
199 | # define OS_CODE 0x0a | |
200 | #endif | |
201 | ||
202 | #if defined(_BEOS_) || defined(RISCOS) | |
203 | # define fdopen(fd,mode) NULL /* No fdopen() */ | |
204 | #endif | |
205 | ||
9bccf70c A |
206 | #if (defined(_MSC_VER) && (_MSC_VER > 600)) |
207 | # define fdopen(fd,type) _fdopen(fd,type) | |
208 | #endif | |
209 | ||
210 | ||
1c79356b A |
211 | /* Common defaults */ |
212 | ||
213 | #ifndef OS_CODE | |
214 | # define OS_CODE 0x03 /* assume Unix */ | |
215 | #endif | |
216 | ||
9bccf70c A |
217 | #ifndef F_OPEN |
218 | # define F_OPEN(name, mode) fopen((name), (mode)) | |
1c79356b A |
219 | #endif |
220 | ||
221 | /* functions */ | |
222 | ||
223 | #ifdef HAVE_STRERROR | |
224 | extern char *strerror OF((int)); | |
225 | # define zstrerror(errnum) strerror(errnum) | |
226 | #else | |
227 | # define zstrerror(errnum) "" | |
228 | #endif | |
229 | ||
230 | #if defined(pyr) | |
231 | # define NO_MEMCPY | |
232 | #endif | |
9bccf70c | 233 | #if defined(SMALL_MEDIUM) && !defined(_MSC_VER) && !defined(__SC__) |
1c79356b A |
234 | /* Use our own functions for small and medium model with MSC <= 5.0. |
235 | * You may have to use the same strategy for Borland C (untested). | |
9bccf70c | 236 | * The __SC__ check is for Symantec. |
1c79356b A |
237 | */ |
238 | # define NO_MEMCPY | |
239 | #endif | |
240 | #if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY) | |
241 | # define HAVE_MEMCPY | |
242 | #endif | |
243 | #ifdef HAVE_MEMCPY | |
244 | # ifdef SMALL_MEDIUM /* MSDOS small or medium model */ | |
245 | # define zmemcpy _fmemcpy | |
246 | # define zmemcmp _fmemcmp | |
247 | # define zmemzero(dest, len) _fmemset(dest, 0, len) | |
248 | # else | |
249 | # define zmemcpy memcpy | |
250 | # define zmemcmp memcmp | |
251 | # define zmemzero(dest, len) memset(dest, 0, len) | |
252 | # endif | |
253 | #else | |
9bccf70c A |
254 | extern void zmemcpy OF((Bytef* dest, const Bytef* source, uInt len)); |
255 | extern int zmemcmp OF((const Bytef* s1, const Bytef* s2, uInt len)); | |
1c79356b A |
256 | extern void zmemzero OF((Bytef* dest, uInt len)); |
257 | #endif | |
258 | ||
259 | /* Diagnostic functions */ | |
260 | #ifdef DEBUG_ZLIB | |
261 | # include <stdio.h> | |
9bccf70c | 262 | extern int z_verbose; |
1c79356b A |
263 | extern void z_error OF((char *m)); |
264 | # define Assert(cond,msg) {if(!(cond)) z_error(msg);} | |
9bccf70c A |
265 | # define Trace(x) {if (z_verbose>=0) fprintf x ;} |
266 | # define Tracev(x) {if (z_verbose>0) fprintf x ;} | |
267 | # define Tracevv(x) {if (z_verbose>1) fprintf x ;} | |
268 | # define Tracec(c,x) {if (z_verbose>0 && (c)) fprintf x ;} | |
269 | # define Tracecv(c,x) {if (z_verbose>1 && (c)) fprintf x ;} | |
1c79356b A |
270 | #else |
271 | # define Assert(cond,msg) | |
272 | # define Trace(x) | |
273 | # define Tracev(x) | |
274 | # define Tracevv(x) | |
275 | # define Tracec(c,x) | |
276 | # define Tracecv(c,x) | |
277 | #endif | |
278 | ||
279 | ||
9bccf70c A |
280 | typedef uLong (ZEXPORT *check_func) OF((uLong check, const Bytef *buf, |
281 | uInt len)); | |
1c79356b A |
282 | voidpf zcalloc OF((voidpf opaque, unsigned items, unsigned size)); |
283 | void zcfree OF((voidpf opaque, voidpf ptr)); | |
284 | ||
285 | #define ZALLOC(strm, items, size) \ | |
286 | (*((strm)->zalloc))((strm)->opaque, (items), (size)) | |
287 | #define ZFREE(strm, addr) (*((strm)->zfree))((strm)->opaque, (voidpf)(addr)) | |
288 | #define TRY_FREE(s, p) {if (p) ZFREE(s, p);} | |
289 | ||
290 | #endif /* _Z_UTIL_H */ | |
291 | /* --- zutil.h */ | |
292 | ||
293 | /* +++ deflate.h */ | |
294 | /* deflate.h -- internal compression state | |
9bccf70c | 295 | * Copyright (C) 1995-2002 Jean-loup Gailly |
1c79356b A |
296 | * For conditions of distribution and use, see copyright notice in zlib.h |
297 | */ | |
298 | ||
299 | /* WARNING: this file should *not* be used by applications. It is | |
300 | part of the implementation of the compression library and is | |
301 | subject to change. Applications should only use zlib.h. | |
302 | */ | |
303 | ||
4452a7af | 304 | /* @(#) $Id: zlib.c,v 1.10.874.1 2005/06/24 01:47:11 lindak Exp $ */ |
1c79356b A |
305 | |
306 | #ifndef _DEFLATE_H | |
307 | #define _DEFLATE_H | |
308 | ||
309 | /* #include "zutil.h" */ | |
310 | ||
311 | /* =========================================================================== | |
312 | * Internal compression state. | |
313 | */ | |
314 | ||
315 | #define LENGTH_CODES 29 | |
316 | /* number of length codes, not counting the special END_BLOCK code */ | |
317 | ||
318 | #define LITERALS 256 | |
319 | /* number of literal bytes 0..255 */ | |
320 | ||
321 | #define L_CODES (LITERALS+1+LENGTH_CODES) | |
322 | /* number of Literal or Length codes, including the END_BLOCK code */ | |
323 | ||
324 | #define D_CODES 30 | |
325 | /* number of distance codes */ | |
326 | ||
327 | #define BL_CODES 19 | |
328 | /* number of codes used to transfer the bit lengths */ | |
329 | ||
330 | #define HEAP_SIZE (2*L_CODES+1) | |
331 | /* maximum heap size */ | |
332 | ||
333 | #define MAX_BITS 15 | |
334 | /* All codes must not exceed MAX_BITS bits */ | |
335 | ||
336 | #define INIT_STATE 42 | |
337 | #define BUSY_STATE 113 | |
338 | #define FINISH_STATE 666 | |
339 | /* Stream status */ | |
340 | ||
341 | ||
342 | /* Data structure describing a single value and its code string. */ | |
343 | typedef struct ct_data_s { | |
344 | union { | |
345 | ush freq; /* frequency count */ | |
346 | ush code; /* bit string */ | |
347 | } fc; | |
348 | union { | |
349 | ush dad; /* father node in Huffman tree */ | |
350 | ush len; /* length of bit string */ | |
351 | } dl; | |
352 | } FAR ct_data; | |
353 | ||
354 | #define Freq fc.freq | |
355 | #define Code fc.code | |
356 | #define Dad dl.dad | |
357 | #define Len dl.len | |
358 | ||
359 | typedef struct static_tree_desc_s static_tree_desc; | |
360 | ||
361 | typedef struct tree_desc_s { | |
362 | ct_data *dyn_tree; /* the dynamic tree */ | |
363 | int max_code; /* largest code with non zero frequency */ | |
364 | static_tree_desc *stat_desc; /* the corresponding static tree */ | |
365 | } FAR tree_desc; | |
366 | ||
367 | typedef ush Pos; | |
368 | typedef Pos FAR Posf; | |
369 | typedef unsigned IPos; | |
370 | ||
371 | /* A Pos is an index in the character window. We use short instead of int to | |
372 | * save space in the various tables. IPos is used only for parameter passing. | |
373 | */ | |
374 | ||
375 | typedef struct deflate_state { | |
376 | z_streamp strm; /* pointer back to this zlib stream */ | |
377 | int status; /* as the name implies */ | |
378 | Bytef *pending_buf; /* output still pending */ | |
379 | ulg pending_buf_size; /* size of pending_buf */ | |
380 | Bytef *pending_out; /* next pending byte to output to the stream */ | |
381 | int pending; /* nb of bytes in the pending buffer */ | |
382 | int noheader; /* suppress zlib header and adler32 */ | |
383 | Byte data_type; /* UNKNOWN, BINARY or ASCII */ | |
384 | Byte method; /* STORED (for zip only) or DEFLATED */ | |
385 | int last_flush; /* value of flush param for previous deflate call */ | |
386 | ||
387 | /* used by deflate.c: */ | |
388 | ||
389 | uInt w_size; /* LZ77 window size (32K by default) */ | |
390 | uInt w_bits; /* log2(w_size) (8..16) */ | |
391 | uInt w_mask; /* w_size - 1 */ | |
392 | ||
393 | Bytef *window; | |
394 | /* Sliding window. Input bytes are read into the second half of the window, | |
395 | * and move to the first half later to keep a dictionary of at least wSize | |
396 | * bytes. With this organization, matches are limited to a distance of | |
397 | * wSize-MAX_MATCH bytes, but this ensures that IO is always | |
398 | * performed with a length multiple of the block size. Also, it limits | |
399 | * the window size to 64K, which is quite useful on MSDOS. | |
400 | * To do: use the user input buffer as sliding window. | |
401 | */ | |
402 | ||
403 | ulg window_size; | |
404 | /* Actual size of window: 2*wSize, except when the user input buffer | |
405 | * is directly used as sliding window. | |
406 | */ | |
407 | ||
408 | Posf *prev; | |
409 | /* Link to older string with same hash index. To limit the size of this | |
410 | * array to 64K, this link is maintained only for the last 32K strings. | |
411 | * An index in this array is thus a window index modulo 32K. | |
412 | */ | |
413 | ||
414 | Posf *head; /* Heads of the hash chains or NIL. */ | |
415 | ||
416 | uInt ins_h; /* hash index of string to be inserted */ | |
417 | uInt hash_size; /* number of elements in hash table */ | |
418 | uInt hash_bits; /* log2(hash_size) */ | |
419 | uInt hash_mask; /* hash_size-1 */ | |
420 | ||
421 | uInt hash_shift; | |
422 | /* Number of bits by which ins_h must be shifted at each input | |
423 | * step. It must be such that after MIN_MATCH steps, the oldest | |
424 | * byte no longer takes part in the hash key, that is: | |
425 | * hash_shift * MIN_MATCH >= hash_bits | |
426 | */ | |
427 | ||
428 | long block_start; | |
429 | /* Window position at the beginning of the current output block. Gets | |
430 | * negative when the window is moved backwards. | |
431 | */ | |
432 | ||
433 | uInt match_length; /* length of best match */ | |
434 | IPos prev_match; /* previous match */ | |
435 | int match_available; /* set if previous match exists */ | |
436 | uInt strstart; /* start of string to insert */ | |
437 | uInt match_start; /* start of matching string */ | |
438 | uInt lookahead; /* number of valid bytes ahead in window */ | |
439 | ||
440 | uInt prev_length; | |
441 | /* Length of the best match at previous step. Matches not greater than this | |
442 | * are discarded. This is used in the lazy match evaluation. | |
443 | */ | |
444 | ||
445 | uInt max_chain_length; | |
446 | /* To speed up deflation, hash chains are never searched beyond this | |
447 | * length. A higher limit improves compression ratio but degrades the | |
448 | * speed. | |
449 | */ | |
450 | ||
451 | uInt max_lazy_match; | |
452 | /* Attempt to find a better match only when the current match is strictly | |
453 | * smaller than this value. This mechanism is used only for compression | |
454 | * levels >= 4. | |
455 | */ | |
456 | # define max_insert_length max_lazy_match | |
457 | /* Insert new strings in the hash table only if the match length is not | |
458 | * greater than this length. This saves time but degrades compression. | |
459 | * max_insert_length is used only for compression levels <= 3. | |
460 | */ | |
461 | ||
462 | int level; /* compression level (1..9) */ | |
463 | int strategy; /* favor or force Huffman coding*/ | |
464 | ||
465 | uInt good_match; | |
466 | /* Use a faster search when the previous match is longer than this */ | |
467 | ||
468 | int nice_match; /* Stop searching when current match exceeds this */ | |
469 | ||
470 | /* used by trees.c: */ | |
471 | /* Didn't use ct_data typedef below to supress compiler warning */ | |
472 | struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */ | |
473 | struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */ | |
474 | struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */ | |
475 | ||
476 | struct tree_desc_s l_desc; /* desc. for literal tree */ | |
477 | struct tree_desc_s d_desc; /* desc. for distance tree */ | |
478 | struct tree_desc_s bl_desc; /* desc. for bit length tree */ | |
479 | ||
480 | ush bl_count[MAX_BITS+1]; | |
481 | /* number of codes at each bit length for an optimal tree */ | |
482 | ||
483 | int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */ | |
484 | int heap_len; /* number of elements in the heap */ | |
485 | int heap_max; /* element of largest frequency */ | |
486 | /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. | |
487 | * The same heap array is used to build all trees. | |
488 | */ | |
489 | ||
490 | uch depth[2*L_CODES+1]; | |
491 | /* Depth of each subtree used as tie breaker for trees of equal frequency | |
492 | */ | |
493 | ||
494 | uchf *l_buf; /* buffer for literals or lengths */ | |
495 | ||
496 | uInt lit_bufsize; | |
497 | /* Size of match buffer for literals/lengths. There are 4 reasons for | |
498 | * limiting lit_bufsize to 64K: | |
499 | * - frequencies can be kept in 16 bit counters | |
500 | * - if compression is not successful for the first block, all input | |
501 | * data is still in the window so we can still emit a stored block even | |
502 | * when input comes from standard input. (This can also be done for | |
503 | * all blocks if lit_bufsize is not greater than 32K.) | |
504 | * - if compression is not successful for a file smaller than 64K, we can | |
505 | * even emit a stored file instead of a stored block (saving 5 bytes). | |
506 | * This is applicable only for zip (not gzip or zlib). | |
507 | * - creating new Huffman trees less frequently may not provide fast | |
508 | * adaptation to changes in the input data statistics. (Take for | |
509 | * example a binary file with poorly compressible code followed by | |
510 | * a highly compressible string table.) Smaller buffer sizes give | |
511 | * fast adaptation but have of course the overhead of transmitting | |
512 | * trees more frequently. | |
513 | * - I can't count above 4 | |
514 | */ | |
515 | ||
516 | uInt last_lit; /* running index in l_buf */ | |
517 | ||
518 | ushf *d_buf; | |
519 | /* Buffer for distances. To simplify the code, d_buf and l_buf have | |
520 | * the same number of elements. To use different lengths, an extra flag | |
521 | * array would be necessary. | |
522 | */ | |
523 | ||
524 | ulg opt_len; /* bit length of current block with optimal trees */ | |
525 | ulg static_len; /* bit length of current block with static trees */ | |
1c79356b A |
526 | uInt matches; /* number of string matches in current block */ |
527 | int last_eob_len; /* bit length of EOB code for last block */ | |
528 | ||
529 | #ifdef DEBUG_ZLIB | |
9bccf70c A |
530 | ulg compressed_len; /* total bit length of compressed file mod 2^32 */ |
531 | ulg bits_sent; /* bit length of compressed data sent mod 2^32 */ | |
1c79356b A |
532 | #endif |
533 | ||
534 | ush bi_buf; | |
535 | /* Output buffer. bits are inserted starting at the bottom (least | |
536 | * significant bits). | |
537 | */ | |
538 | int bi_valid; | |
539 | /* Number of valid bits in bi_buf. All bits above the last valid bit | |
540 | * are always zero. | |
541 | */ | |
542 | ||
543 | } FAR deflate_state; | |
544 | ||
545 | /* Output a byte on the stream. | |
546 | * IN assertion: there is enough room in pending_buf. | |
547 | */ | |
548 | #define put_byte(s, c) {s->pending_buf[s->pending++] = (c);} | |
549 | ||
550 | ||
551 | #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) | |
552 | /* Minimum amount of lookahead, except at the end of the input file. | |
553 | * See deflate.c for comments about the MIN_MATCH+1. | |
554 | */ | |
555 | ||
556 | #define MAX_DIST(s) ((s)->w_size-MIN_LOOKAHEAD) | |
557 | /* In order to simplify the code, particularly on 16 bit machines, match | |
558 | * distances are limited to MAX_DIST instead of WSIZE. | |
559 | */ | |
560 | ||
561 | /* in trees.c */ | |
562 | void _tr_init OF((deflate_state *s)); | |
563 | int _tr_tally OF((deflate_state *s, unsigned dist, unsigned lc)); | |
9bccf70c | 564 | void _tr_flush_block OF((deflate_state *s, charf *buf, ulg stored_len, |
1c79356b A |
565 | int eof)); |
566 | void _tr_align OF((deflate_state *s)); | |
567 | void _tr_stored_block OF((deflate_state *s, charf *buf, ulg stored_len, | |
568 | int eof)); | |
9bccf70c A |
569 | |
570 | #define d_code(dist) \ | |
571 | ((dist) < 256 ? _dist_code[dist] : _dist_code[256+((dist)>>7)]) | |
572 | /* Mapping from a distance to a distance code. dist is the distance - 1 and | |
573 | * must not have side effects. _dist_code[256] and _dist_code[257] are never | |
574 | * used. | |
575 | */ | |
576 | ||
577 | #ifndef DEBUG_ZLIB | |
578 | /* Inline versions of _tr_tally for speed: */ | |
579 | ||
580 | #if defined(GEN_TREES_H) || !defined(STDC) | |
581 | extern uch _length_code[]; | |
582 | extern uch _dist_code[]; | |
583 | #else | |
584 | extern const uch _length_code[]; | |
585 | extern const uch _dist_code[]; | |
586 | #endif | |
587 | ||
588 | # define _tr_tally_lit(s, c, flush) \ | |
589 | { uch cc = (c); \ | |
590 | s->d_buf[s->last_lit] = 0; \ | |
591 | s->l_buf[s->last_lit++] = cc; \ | |
592 | s->dyn_ltree[cc].Freq++; \ | |
593 | flush = (s->last_lit == s->lit_bufsize-1); \ | |
594 | } | |
595 | # define _tr_tally_dist(s, distance, length, flush) \ | |
596 | { uch len = (length); \ | |
597 | ush dist = (distance); \ | |
598 | s->d_buf[s->last_lit] = dist; \ | |
599 | s->l_buf[s->last_lit++] = len; \ | |
600 | dist--; \ | |
601 | s->dyn_ltree[_length_code[len]+LITERALS+1].Freq++; \ | |
602 | s->dyn_dtree[d_code(dist)].Freq++; \ | |
603 | flush = (s->last_lit == s->lit_bufsize-1); \ | |
604 | } | |
605 | #else | |
606 | # define _tr_tally_lit(s, c, flush) flush = _tr_tally(s, 0, c) | |
607 | # define _tr_tally_dist(s, distance, length, flush) \ | |
608 | flush = _tr_tally(s, distance, length) | |
609 | #endif | |
1c79356b A |
610 | |
611 | #endif | |
612 | /* --- deflate.h */ | |
613 | ||
614 | /* +++ deflate.c */ | |
615 | /* deflate.c -- compress data using the deflation algorithm | |
9bccf70c | 616 | * Copyright (C) 1995-2002 Jean-loup Gailly. |
1c79356b A |
617 | * For conditions of distribution and use, see copyright notice in zlib.h |
618 | */ | |
619 | ||
620 | /* | |
621 | * ALGORITHM | |
622 | * | |
623 | * The "deflation" process depends on being able to identify portions | |
624 | * of the input text which are identical to earlier input (within a | |
625 | * sliding window trailing behind the input currently being processed). | |
626 | * | |
627 | * The most straightforward technique turns out to be the fastest for | |
628 | * most input files: try all possible matches and select the longest. | |
629 | * The key feature of this algorithm is that insertions into the string | |
630 | * dictionary are very simple and thus fast, and deletions are avoided | |
631 | * completely. Insertions are performed at each input character, whereas | |
632 | * string matches are performed only when the previous match ends. So it | |
633 | * is preferable to spend more time in matches to allow very fast string | |
634 | * insertions and avoid deletions. The matching algorithm for small | |
635 | * strings is inspired from that of Rabin & Karp. A brute force approach | |
636 | * is used to find longer strings when a small match has been found. | |
637 | * A similar algorithm is used in comic (by Jan-Mark Wams) and freeze | |
638 | * (by Leonid Broukhis). | |
639 | * A previous version of this file used a more sophisticated algorithm | |
640 | * (by Fiala and Greene) which is guaranteed to run in linear amortized | |
641 | * time, but has a larger average cost, uses more memory and is patented. | |
642 | * However the F&G algorithm may be faster for some highly redundant | |
643 | * files if the parameter max_chain_length (described below) is too large. | |
644 | * | |
645 | * ACKNOWLEDGEMENTS | |
646 | * | |
647 | * The idea of lazy evaluation of matches is due to Jan-Mark Wams, and | |
648 | * I found it in 'freeze' written by Leonid Broukhis. | |
649 | * Thanks to many people for bug reports and testing. | |
650 | * | |
651 | * REFERENCES | |
652 | * | |
653 | * Deutsch, L.P.,"DEFLATE Compressed Data Format Specification". | |
654 | * Available in ftp://ds.internic.net/rfc/rfc1951.txt | |
655 | * | |
656 | * A description of the Rabin and Karp algorithm is given in the book | |
657 | * "Algorithms" by R. Sedgewick, Addison-Wesley, p252. | |
658 | * | |
659 | * Fiala,E.R., and Greene,D.H. | |
660 | * Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595 | |
661 | * | |
662 | */ | |
663 | ||
4452a7af | 664 | /* @(#) $Id: zlib.c,v 1.10.874.1 2005/06/24 01:47:11 lindak Exp $ */ |
1c79356b A |
665 | |
666 | /* #include "deflate.h" */ | |
667 | ||
9bccf70c A |
668 | const char deflate_copyright[] = |
669 | " deflate 1.1.4 Copyright 1995-2002 Jean-loup Gailly "; | |
1c79356b A |
670 | /* |
671 | If you use the zlib library in a product, an acknowledgment is welcome | |
672 | in the documentation of your product. If for some reason you cannot | |
673 | include such an acknowledgment, I would appreciate that you keep this | |
674 | copyright string in the executable of your product. | |
675 | */ | |
676 | ||
677 | /* =========================================================================== | |
678 | * Function prototypes. | |
679 | */ | |
680 | typedef enum { | |
681 | need_more, /* block not completed, need more input or more output */ | |
682 | block_done, /* block flush performed */ | |
683 | finish_started, /* finish started, need only more output at next deflate */ | |
684 | finish_done /* finish done, accept no more input or output */ | |
685 | } block_state; | |
686 | ||
687 | typedef block_state (*compress_func) OF((deflate_state *s, int flush)); | |
688 | /* Compression function. Returns the block state after the call. */ | |
689 | ||
690 | local void fill_window OF((deflate_state *s)); | |
691 | local block_state deflate_stored OF((deflate_state *s, int flush)); | |
692 | local block_state deflate_fast OF((deflate_state *s, int flush)); | |
693 | local block_state deflate_slow OF((deflate_state *s, int flush)); | |
694 | local void lm_init OF((deflate_state *s)); | |
695 | local void putShortMSB OF((deflate_state *s, uInt b)); | |
696 | local void flush_pending OF((z_streamp strm)); | |
9bccf70c | 697 | local int read_buf OF((z_streamp strm, Bytef *buf, unsigned size)); |
1c79356b A |
698 | #ifdef ASMV |
699 | void match_init OF((void)); /* asm code initialization */ | |
700 | uInt longest_match OF((deflate_state *s, IPos cur_match)); | |
701 | #else | |
702 | local uInt longest_match OF((deflate_state *s, IPos cur_match)); | |
703 | #endif | |
704 | ||
705 | #ifdef DEBUG_ZLIB | |
706 | local void check_match OF((deflate_state *s, IPos start, IPos match, | |
707 | int length)); | |
708 | #endif | |
709 | ||
710 | /* =========================================================================== | |
711 | * Local data | |
712 | */ | |
713 | ||
714 | #define NIL 0 | |
715 | /* Tail of hash chains */ | |
716 | ||
717 | #ifndef TOO_FAR | |
718 | # define TOO_FAR 4096 | |
719 | #endif | |
720 | /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */ | |
721 | ||
722 | #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) | |
723 | /* Minimum amount of lookahead, except at the end of the input file. | |
724 | * See deflate.c for comments about the MIN_MATCH+1. | |
725 | */ | |
726 | ||
727 | /* Values for max_lazy_match, good_match and max_chain_length, depending on | |
728 | * the desired pack level (0..9). The values given below have been tuned to | |
729 | * exclude worst case performance for pathological files. Better values may be | |
730 | * found for specific files. | |
731 | */ | |
732 | typedef struct config_s { | |
733 | ush good_length; /* reduce lazy search above this match length */ | |
734 | ush max_lazy; /* do not perform lazy search above this match length */ | |
735 | ush nice_length; /* quit search above this match length */ | |
736 | ush max_chain; | |
737 | compress_func func; | |
738 | } config; | |
739 | ||
9bccf70c | 740 | local const config configuration_table[10] = { |
1c79356b A |
741 | /* good lazy nice chain */ |
742 | /* 0 */ {0, 0, 0, 0, deflate_stored}, /* store only */ | |
743 | /* 1 */ {4, 4, 8, 4, deflate_fast}, /* maximum speed, no lazy matches */ | |
744 | /* 2 */ {4, 5, 16, 8, deflate_fast}, | |
745 | /* 3 */ {4, 6, 32, 32, deflate_fast}, | |
746 | ||
747 | /* 4 */ {4, 4, 16, 16, deflate_slow}, /* lazy matches */ | |
748 | /* 5 */ {8, 16, 32, 32, deflate_slow}, | |
749 | /* 6 */ {8, 16, 128, 128, deflate_slow}, | |
750 | /* 7 */ {8, 32, 128, 256, deflate_slow}, | |
751 | /* 8 */ {32, 128, 258, 1024, deflate_slow}, | |
752 | /* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* maximum compression */ | |
753 | ||
754 | /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4 | |
755 | * For deflate_fast() (levels <= 3) good is ignored and lazy has a different | |
756 | * meaning. | |
757 | */ | |
758 | ||
759 | #define EQUAL 0 | |
760 | /* result of memcmp for equal strings */ | |
761 | ||
762 | #ifndef NO_DUMMY_DECL | |
763 | struct static_tree_desc_s {int dummy;}; /* for buggy compilers */ | |
764 | #endif | |
765 | ||
766 | /* =========================================================================== | |
767 | * Update a hash value with the given input byte | |
768 | * IN assertion: all calls to to UPDATE_HASH are made with consecutive | |
769 | * input characters, so that a running hash key can be computed from the | |
770 | * previous key instead of complete recalculation each time. | |
771 | */ | |
772 | #define UPDATE_HASH(s,h,c) (h = (((h)<<s->hash_shift) ^ (c)) & s->hash_mask) | |
773 | ||
774 | ||
775 | /* =========================================================================== | |
776 | * Insert string str in the dictionary and set match_head to the previous head | |
777 | * of the hash chain (the most recent string with same hash key). Return | |
778 | * the previous length of the hash chain. | |
9bccf70c A |
779 | * If this file is compiled with -DFASTEST, the compression level is forced |
780 | * to 1, and no hash chains are maintained. | |
1c79356b A |
781 | * IN assertion: all calls to to INSERT_STRING are made with consecutive |
782 | * input characters and the first MIN_MATCH bytes of str are valid | |
783 | * (except for the last MIN_MATCH-1 bytes of the input file). | |
784 | */ | |
9bccf70c A |
785 | #ifdef FASTEST |
786 | #define INSERT_STRING(s, str, match_head) \ | |
787 | (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \ | |
788 | match_head = s->head[s->ins_h], \ | |
789 | s->head[s->ins_h] = (Pos)(str)) | |
790 | #else | |
1c79356b A |
791 | #define INSERT_STRING(s, str, match_head) \ |
792 | (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \ | |
793 | s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \ | |
794 | s->head[s->ins_h] = (Pos)(str)) | |
9bccf70c | 795 | #endif |
1c79356b A |
796 | |
797 | /* =========================================================================== | |
798 | * Initialize the hash table (avoiding 64K overflow for 16 bit systems). | |
799 | * prev[] will be initialized on the fly. | |
800 | */ | |
801 | #define CLEAR_HASH(s) \ | |
802 | s->head[s->hash_size-1] = NIL; \ | |
9bccf70c | 803 | zmemzero((Bytef *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head)); |
1c79356b A |
804 | |
805 | /* ========================================================================= */ | |
9bccf70c | 806 | int ZEXPORT deflateInit_(strm, level, version, stream_size) |
1c79356b A |
807 | z_streamp strm; |
808 | int level; | |
809 | const char *version; | |
810 | int stream_size; | |
811 | { | |
812 | return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, | |
813 | Z_DEFAULT_STRATEGY, version, stream_size); | |
814 | /* To do: ignore strm->next_in if we use it as window */ | |
815 | } | |
816 | ||
817 | /* ========================================================================= */ | |
9bccf70c | 818 | int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy, |
1c79356b A |
819 | version, stream_size) |
820 | z_streamp strm; | |
821 | int level; | |
822 | int method; | |
823 | int windowBits; | |
824 | int memLevel; | |
825 | int strategy; | |
826 | const char *version; | |
827 | int stream_size; | |
828 | { | |
829 | deflate_state *s; | |
830 | int noheader = 0; | |
9bccf70c | 831 | static const char* my_version = ZLIB_VERSION; |
1c79356b A |
832 | |
833 | ushf *overlay; | |
834 | /* We overlay pending_buf and d_buf+l_buf. This works since the average | |
835 | * output size for (length,distance) codes is <= 24 bits. | |
836 | */ | |
837 | ||
838 | if (version == Z_NULL || version[0] != my_version[0] || | |
839 | stream_size != sizeof(z_stream)) { | |
840 | return Z_VERSION_ERROR; | |
841 | } | |
842 | if (strm == Z_NULL) return Z_STREAM_ERROR; | |
843 | ||
844 | strm->msg = Z_NULL; | |
845 | #ifndef NO_ZCFUNCS | |
846 | if (strm->zalloc == Z_NULL) { | |
847 | strm->zalloc = zcalloc; | |
848 | strm->opaque = (voidpf)0; | |
849 | } | |
850 | if (strm->zfree == Z_NULL) strm->zfree = zcfree; | |
851 | #endif | |
852 | ||
853 | if (level == Z_DEFAULT_COMPRESSION) level = 6; | |
9bccf70c A |
854 | #ifdef FASTEST |
855 | level = 1; | |
856 | #endif | |
1c79356b A |
857 | |
858 | if (windowBits < 0) { /* undocumented feature: suppress zlib header */ | |
859 | noheader = 1; | |
860 | windowBits = -windowBits; | |
861 | } | |
862 | if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != Z_DEFLATED || | |
9bccf70c | 863 | windowBits < 9 || windowBits > 15 || level < 0 || level > 9 || |
1c79356b A |
864 | strategy < 0 || strategy > Z_HUFFMAN_ONLY) { |
865 | return Z_STREAM_ERROR; | |
866 | } | |
867 | s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state)); | |
868 | if (s == Z_NULL) return Z_MEM_ERROR; | |
869 | strm->state = (struct internal_state FAR *)s; | |
870 | s->strm = strm; | |
871 | ||
872 | s->noheader = noheader; | |
873 | s->w_bits = windowBits; | |
874 | s->w_size = 1 << s->w_bits; | |
875 | s->w_mask = s->w_size - 1; | |
876 | ||
877 | s->hash_bits = memLevel + 7; | |
878 | s->hash_size = 1 << s->hash_bits; | |
879 | s->hash_mask = s->hash_size - 1; | |
880 | s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH); | |
881 | ||
882 | s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte)); | |
883 | s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof(Pos)); | |
884 | s->head = (Posf *) ZALLOC(strm, s->hash_size, sizeof(Pos)); | |
885 | ||
886 | s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */ | |
887 | ||
888 | overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2); | |
889 | s->pending_buf = (uchf *) overlay; | |
890 | s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L); | |
891 | ||
892 | if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL || | |
893 | s->pending_buf == Z_NULL) { | |
894 | strm->msg = (char*)ERR_MSG(Z_MEM_ERROR); | |
895 | deflateEnd (strm); | |
896 | return Z_MEM_ERROR; | |
897 | } | |
898 | s->d_buf = overlay + s->lit_bufsize/sizeof(ush); | |
899 | s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize; | |
900 | ||
901 | s->level = level; | |
902 | s->strategy = strategy; | |
903 | s->method = (Byte)method; | |
904 | ||
905 | return deflateReset(strm); | |
906 | } | |
907 | ||
908 | /* ========================================================================= */ | |
9bccf70c | 909 | int ZEXPORT deflateSetDictionary (strm, dictionary, dictLength) |
1c79356b A |
910 | z_streamp strm; |
911 | const Bytef *dictionary; | |
912 | uInt dictLength; | |
913 | { | |
914 | deflate_state *s; | |
915 | uInt length = dictLength; | |
916 | uInt n; | |
917 | IPos hash_head = 0; | |
918 | ||
9bccf70c A |
919 | if (strm == Z_NULL || strm->state == Z_NULL || dictionary == Z_NULL || |
920 | ((deflate_state*)strm->state)->status != INIT_STATE) return Z_STREAM_ERROR; | |
1c79356b | 921 | |
9bccf70c | 922 | s = (deflate_state*)strm->state; |
1c79356b A |
923 | strm->adler = adler32(strm->adler, dictionary, dictLength); |
924 | ||
925 | if (length < MIN_MATCH) return Z_OK; | |
926 | if (length > MAX_DIST(s)) { | |
927 | length = MAX_DIST(s); | |
928 | #ifndef USE_DICT_HEAD | |
929 | dictionary += dictLength - length; /* use the tail of the dictionary */ | |
930 | #endif | |
931 | } | |
9bccf70c | 932 | zmemcpy(s->window, dictionary, length); |
1c79356b A |
933 | s->strstart = length; |
934 | s->block_start = (long)length; | |
935 | ||
936 | /* Insert all strings in the hash table (except for the last two bytes). | |
937 | * s->lookahead stays null, so s->ins_h will be recomputed at the next | |
938 | * call of fill_window. | |
939 | */ | |
940 | s->ins_h = s->window[0]; | |
941 | UPDATE_HASH(s, s->ins_h, s->window[1]); | |
942 | for (n = 0; n <= length - MIN_MATCH; n++) { | |
943 | INSERT_STRING(s, n, hash_head); | |
944 | } | |
945 | if (hash_head) hash_head = 0; /* to make compiler happy */ | |
946 | return Z_OK; | |
947 | } | |
948 | ||
949 | /* ========================================================================= */ | |
9bccf70c | 950 | int ZEXPORT deflateReset (strm) |
1c79356b A |
951 | z_streamp strm; |
952 | { | |
953 | deflate_state *s; | |
954 | ||
955 | if (strm == Z_NULL || strm->state == Z_NULL || | |
956 | strm->zalloc == Z_NULL || strm->zfree == Z_NULL) return Z_STREAM_ERROR; | |
957 | ||
958 | strm->total_in = strm->total_out = 0; | |
959 | strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */ | |
960 | strm->data_type = Z_UNKNOWN; | |
961 | ||
962 | s = (deflate_state *)strm->state; | |
963 | s->pending = 0; | |
964 | s->pending_out = s->pending_buf; | |
965 | ||
966 | if (s->noheader < 0) { | |
967 | s->noheader = 0; /* was set to -1 by deflate(..., Z_FINISH); */ | |
968 | } | |
969 | s->status = s->noheader ? BUSY_STATE : INIT_STATE; | |
970 | strm->adler = 1; | |
971 | s->last_flush = Z_NO_FLUSH; | |
972 | ||
973 | _tr_init(s); | |
974 | lm_init(s); | |
975 | ||
976 | return Z_OK; | |
977 | } | |
978 | ||
979 | /* ========================================================================= */ | |
9bccf70c | 980 | int ZEXPORT deflateParams(strm, level, strategy) |
1c79356b A |
981 | z_streamp strm; |
982 | int level; | |
983 | int strategy; | |
984 | { | |
985 | deflate_state *s; | |
986 | compress_func func; | |
987 | int err = Z_OK; | |
988 | ||
989 | if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR; | |
9bccf70c | 990 | s = (deflate_state*)strm->state; |
1c79356b A |
991 | |
992 | if (level == Z_DEFAULT_COMPRESSION) { | |
993 | level = 6; | |
994 | } | |
995 | if (level < 0 || level > 9 || strategy < 0 || strategy > Z_HUFFMAN_ONLY) { | |
996 | return Z_STREAM_ERROR; | |
997 | } | |
998 | func = configuration_table[s->level].func; | |
999 | ||
1000 | if (func != configuration_table[level].func && strm->total_in != 0) { | |
1001 | /* Flush the last buffer: */ | |
1002 | err = deflate(strm, Z_PARTIAL_FLUSH); | |
1003 | } | |
1004 | if (s->level != level) { | |
1005 | s->level = level; | |
1006 | s->max_lazy_match = configuration_table[level].max_lazy; | |
1007 | s->good_match = configuration_table[level].good_length; | |
1008 | s->nice_match = configuration_table[level].nice_length; | |
1009 | s->max_chain_length = configuration_table[level].max_chain; | |
1010 | } | |
1011 | s->strategy = strategy; | |
1012 | return err; | |
1013 | } | |
1014 | ||
1015 | /* ========================================================================= | |
1016 | * Put a short in the pending buffer. The 16-bit value is put in MSB order. | |
1017 | * IN assertion: the stream state is correct and there is enough room in | |
1018 | * pending_buf. | |
1019 | */ | |
1020 | local void putShortMSB (s, b) | |
1021 | deflate_state *s; | |
1022 | uInt b; | |
1023 | { | |
1024 | put_byte(s, (Byte)(b >> 8)); | |
1025 | put_byte(s, (Byte)(b & 0xff)); | |
1026 | } | |
1027 | ||
1028 | /* ========================================================================= | |
1029 | * Flush as much pending output as possible. All deflate() output goes | |
1030 | * through this function so some applications may wish to modify it | |
1031 | * to avoid allocating a large strm->next_out buffer and copying into it. | |
1032 | * (See also read_buf()). | |
1033 | */ | |
1034 | local void flush_pending(strm) | |
1035 | z_streamp strm; | |
1036 | { | |
9bccf70c | 1037 | deflate_state* s = (deflate_state*)strm->state; |
1c79356b A |
1038 | unsigned len = s->pending; |
1039 | ||
1040 | if (len > strm->avail_out) len = strm->avail_out; | |
1041 | if (len == 0) return; | |
1042 | ||
9bccf70c A |
1043 | zmemcpy(strm->next_out, s->pending_out, len); |
1044 | strm->next_out += len; | |
1045 | s->pending_out += len; | |
1c79356b A |
1046 | strm->total_out += len; |
1047 | strm->avail_out -= len; | |
1048 | s->pending -= len; | |
1049 | if (s->pending == 0) { | |
1050 | s->pending_out = s->pending_buf; | |
1051 | } | |
1052 | } | |
1053 | ||
1054 | /* ========================================================================= */ | |
9bccf70c | 1055 | int ZEXPORT deflate (strm, flush) |
1c79356b A |
1056 | z_streamp strm; |
1057 | int flush; | |
1058 | { | |
1059 | int old_flush; /* value of flush param for previous deflate call */ | |
1060 | deflate_state *s; | |
1061 | ||
1062 | if (strm == Z_NULL || strm->state == Z_NULL || | |
1063 | flush > Z_FINISH || flush < 0) { | |
1064 | return Z_STREAM_ERROR; | |
1065 | } | |
9bccf70c | 1066 | s = (deflate_state*)strm->state; |
1c79356b | 1067 | |
9bccf70c A |
1068 | if (strm->next_out == Z_NULL || |
1069 | (strm->next_in == Z_NULL && strm->avail_in != 0) || | |
1c79356b A |
1070 | (s->status == FINISH_STATE && flush != Z_FINISH)) { |
1071 | ERR_RETURN(strm, Z_STREAM_ERROR); | |
1072 | } | |
1073 | if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR); | |
1074 | ||
1075 | s->strm = strm; /* just in case */ | |
1076 | old_flush = s->last_flush; | |
1077 | s->last_flush = flush; | |
1078 | ||
1079 | /* Write the zlib header */ | |
1080 | if (s->status == INIT_STATE) { | |
1081 | ||
1082 | uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8; | |
1083 | uInt level_flags = (s->level-1) >> 1; | |
1084 | ||
1085 | if (level_flags > 3) level_flags = 3; | |
1086 | header |= (level_flags << 6); | |
1087 | if (s->strstart != 0) header |= PRESET_DICT; | |
1088 | header += 31 - (header % 31); | |
1089 | ||
1090 | s->status = BUSY_STATE; | |
1091 | putShortMSB(s, header); | |
1092 | ||
1093 | /* Save the adler32 of the preset dictionary: */ | |
1094 | if (s->strstart != 0) { | |
1095 | putShortMSB(s, (uInt)(strm->adler >> 16)); | |
1096 | putShortMSB(s, (uInt)(strm->adler & 0xffff)); | |
1097 | } | |
1098 | strm->adler = 1L; | |
1099 | } | |
1100 | ||
1101 | /* Flush as much pending output as possible */ | |
1102 | if (s->pending != 0) { | |
1103 | flush_pending(strm); | |
1104 | if (strm->avail_out == 0) { | |
1105 | /* Since avail_out is 0, deflate will be called again with | |
1106 | * more output space, but possibly with both pending and | |
1107 | * avail_in equal to zero. There won't be anything to do, | |
1108 | * but this is not an error situation so make sure we | |
1109 | * return OK instead of BUF_ERROR at next call of deflate: | |
1110 | */ | |
1111 | s->last_flush = -1; | |
1112 | return Z_OK; | |
1113 | } | |
1114 | ||
1115 | /* Make sure there is something to do and avoid duplicate consecutive | |
1116 | * flushes. For repeated and useless calls with Z_FINISH, we keep | |
1117 | * returning Z_STREAM_END instead of Z_BUFF_ERROR. | |
1118 | */ | |
1119 | } else if (strm->avail_in == 0 && flush <= old_flush && | |
1120 | flush != Z_FINISH) { | |
1121 | ERR_RETURN(strm, Z_BUF_ERROR); | |
1122 | } | |
1123 | ||
1124 | /* User must not provide more input after the first FINISH: */ | |
1125 | if (s->status == FINISH_STATE && strm->avail_in != 0) { | |
1126 | ERR_RETURN(strm, Z_BUF_ERROR); | |
1127 | } | |
1128 | ||
1129 | /* Start a new block or continue the current one. | |
1130 | */ | |
1131 | if (strm->avail_in != 0 || s->lookahead != 0 || | |
1132 | (flush != Z_NO_FLUSH && s->status != FINISH_STATE)) { | |
1133 | block_state bstate; | |
1134 | ||
1135 | bstate = (*(configuration_table[s->level].func))(s, flush); | |
1136 | ||
1137 | if (bstate == finish_started || bstate == finish_done) { | |
1138 | s->status = FINISH_STATE; | |
1139 | } | |
1140 | if (bstate == need_more || bstate == finish_started) { | |
1141 | if (strm->avail_out == 0) { | |
1142 | s->last_flush = -1; /* avoid BUF_ERROR next call, see above */ | |
1143 | } | |
1144 | return Z_OK; | |
1145 | /* If flush != Z_NO_FLUSH && avail_out == 0, the next call | |
1146 | * of deflate should use the same flush parameter to make sure | |
1147 | * that the flush is complete. So we don't have to output an | |
1148 | * empty block here, this will be done at next call. This also | |
1149 | * ensures that for a very small output buffer, we emit at most | |
1150 | * one empty block. | |
1151 | */ | |
1152 | } | |
1153 | if (bstate == block_done) { | |
1154 | if (flush == Z_PARTIAL_FLUSH) { | |
1155 | _tr_align(s); | |
1c79356b A |
1156 | } else { /* FULL_FLUSH or SYNC_FLUSH */ |
1157 | _tr_stored_block(s, (char*)0, 0L, 0); | |
1158 | /* For a full flush, this empty block will be recognized | |
1159 | * as a special marker by inflate_sync(). | |
1160 | */ | |
1161 | if (flush == Z_FULL_FLUSH) { | |
1162 | CLEAR_HASH(s); /* forget history */ | |
1163 | } | |
1164 | } | |
1165 | flush_pending(strm); | |
1166 | if (strm->avail_out == 0) { | |
1167 | s->last_flush = -1; /* avoid BUF_ERROR at next call, see above */ | |
1168 | return Z_OK; | |
1169 | } | |
1170 | } | |
1171 | } | |
1172 | Assert(strm->avail_out > 0, "bug2"); | |
1173 | ||
1174 | if (flush != Z_FINISH) return Z_OK; | |
1175 | if (s->noheader) return Z_STREAM_END; | |
1176 | ||
1177 | /* Write the zlib trailer (adler32) */ | |
1178 | putShortMSB(s, (uInt)(strm->adler >> 16)); | |
1179 | putShortMSB(s, (uInt)(strm->adler & 0xffff)); | |
1180 | flush_pending(strm); | |
1181 | /* If avail_out is zero, the application will call deflate again | |
1182 | * to flush the rest. | |
1183 | */ | |
1184 | s->noheader = -1; /* write the trailer only once! */ | |
1185 | return s->pending != 0 ? Z_OK : Z_STREAM_END; | |
1186 | } | |
1187 | ||
1188 | /* ========================================================================= */ | |
9bccf70c | 1189 | int ZEXPORT deflateEnd (strm) |
1c79356b A |
1190 | z_streamp strm; |
1191 | { | |
9bccf70c | 1192 | deflate_state* s; |
1c79356b | 1193 | int status; |
1c79356b A |
1194 | |
1195 | if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR; | |
1c79356b | 1196 | |
9bccf70c | 1197 | s = (deflate_state*)strm->state; |
1c79356b A |
1198 | status = s->status; |
1199 | if (status != INIT_STATE && status != BUSY_STATE && | |
1200 | status != FINISH_STATE) { | |
1201 | return Z_STREAM_ERROR; | |
1202 | } | |
1203 | ||
1204 | /* Deallocate in reverse order of allocations: */ | |
1205 | TRY_FREE(strm, s->pending_buf); | |
1206 | TRY_FREE(strm, s->head); | |
1207 | TRY_FREE(strm, s->prev); | |
1208 | TRY_FREE(strm, s->window); | |
1209 | ||
1210 | ZFREE(strm, s); | |
1211 | strm->state = Z_NULL; | |
1212 | ||
1213 | return status == BUSY_STATE ? Z_DATA_ERROR : Z_OK; | |
1214 | } | |
1215 | ||
1216 | /* ========================================================================= | |
1217 | * Copy the source state to the destination state. | |
9bccf70c A |
1218 | * To simplify the source, this is not supported for 16-bit MSDOS (which |
1219 | * doesn't have enough memory anyway to duplicate compression states). | |
1c79356b | 1220 | */ |
9bccf70c | 1221 | int ZEXPORT deflateCopy (dest, source) |
1c79356b A |
1222 | z_streamp dest; |
1223 | z_streamp source; | |
1224 | { | |
9bccf70c A |
1225 | #ifdef MAXSEG_64K |
1226 | return Z_STREAM_ERROR; | |
1227 | #else | |
1c79356b A |
1228 | deflate_state *ds; |
1229 | deflate_state *ss; | |
1230 | ushf *overlay; | |
1231 | ||
9bccf70c A |
1232 | |
1233 | if (source == Z_NULL || dest == Z_NULL || source->state == Z_NULL) { | |
1c79356b | 1234 | return Z_STREAM_ERROR; |
9bccf70c | 1235 | } |
1c79356b | 1236 | |
9bccf70c A |
1237 | ss = (deflate_state*)source->state; |
1238 | ||
1239 | *dest = *source; | |
1c79356b A |
1240 | |
1241 | ds = (deflate_state *) ZALLOC(dest, 1, sizeof(deflate_state)); | |
1242 | if (ds == Z_NULL) return Z_MEM_ERROR; | |
1243 | dest->state = (struct internal_state FAR *) ds; | |
9bccf70c | 1244 | *ds = *ss; |
1c79356b A |
1245 | ds->strm = dest; |
1246 | ||
1247 | ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte)); | |
1248 | ds->prev = (Posf *) ZALLOC(dest, ds->w_size, sizeof(Pos)); | |
1249 | ds->head = (Posf *) ZALLOC(dest, ds->hash_size, sizeof(Pos)); | |
1250 | overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof(ush)+2); | |
1251 | ds->pending_buf = (uchf *) overlay; | |
1252 | ||
1253 | if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL || | |
1254 | ds->pending_buf == Z_NULL) { | |
1255 | deflateEnd (dest); | |
1256 | return Z_MEM_ERROR; | |
1257 | } | |
9bccf70c | 1258 | /* following zmemcpy do not work for 16-bit MSDOS */ |
1c79356b A |
1259 | zmemcpy(ds->window, ss->window, ds->w_size * 2 * sizeof(Byte)); |
1260 | zmemcpy(ds->prev, ss->prev, ds->w_size * sizeof(Pos)); | |
1261 | zmemcpy(ds->head, ss->head, ds->hash_size * sizeof(Pos)); | |
1262 | zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size); | |
1263 | ||
1264 | ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf); | |
1265 | ds->d_buf = overlay + ds->lit_bufsize/sizeof(ush); | |
1266 | ds->l_buf = ds->pending_buf + (1+sizeof(ush))*ds->lit_bufsize; | |
1267 | ||
1268 | ds->l_desc.dyn_tree = ds->dyn_ltree; | |
1269 | ds->d_desc.dyn_tree = ds->dyn_dtree; | |
1270 | ds->bl_desc.dyn_tree = ds->bl_tree; | |
1271 | ||
1272 | return Z_OK; | |
9bccf70c | 1273 | #endif |
1c79356b A |
1274 | } |
1275 | ||
1276 | /* =========================================================================== | |
1277 | * Read a new buffer from the current input stream, update the adler32 | |
1278 | * and total number of bytes read. All deflate() input goes through | |
1279 | * this function so some applications may wish to modify it to avoid | |
1280 | * allocating a large strm->next_in buffer and copying from it. | |
1281 | * (See also flush_pending()). | |
1282 | */ | |
1283 | local int read_buf(strm, buf, size) | |
1284 | z_streamp strm; | |
9bccf70c | 1285 | Bytef *buf; |
1c79356b A |
1286 | unsigned size; |
1287 | { | |
1288 | unsigned len = strm->avail_in; | |
1289 | ||
1290 | if (len > size) len = size; | |
1291 | if (len == 0) return 0; | |
1292 | ||
1293 | strm->avail_in -= len; | |
1294 | ||
9bccf70c | 1295 | if (!((deflate_state*)strm->state)->noheader) { |
1c79356b A |
1296 | strm->adler = adler32(strm->adler, strm->next_in, len); |
1297 | } | |
1298 | zmemcpy(buf, strm->next_in, len); | |
1299 | strm->next_in += len; | |
1300 | strm->total_in += len; | |
1301 | ||
1302 | return (int)len; | |
1303 | } | |
1304 | ||
1305 | /* =========================================================================== | |
1306 | * Initialize the "longest match" routines for a new zlib stream | |
1307 | */ | |
1308 | local void lm_init (s) | |
1309 | deflate_state *s; | |
1310 | { | |
1311 | s->window_size = (ulg)2L*s->w_size; | |
1312 | ||
1313 | CLEAR_HASH(s); | |
1314 | ||
1315 | /* Set the default configuration parameters: | |
1316 | */ | |
1317 | s->max_lazy_match = configuration_table[s->level].max_lazy; | |
1318 | s->good_match = configuration_table[s->level].good_length; | |
1319 | s->nice_match = configuration_table[s->level].nice_length; | |
1320 | s->max_chain_length = configuration_table[s->level].max_chain; | |
1321 | ||
1322 | s->strstart = 0; | |
1323 | s->block_start = 0L; | |
1324 | s->lookahead = 0; | |
1325 | s->match_length = s->prev_length = MIN_MATCH-1; | |
1326 | s->match_available = 0; | |
1327 | s->ins_h = 0; | |
1328 | #ifdef ASMV | |
1329 | match_init(); /* initialize the asm code */ | |
1330 | #endif | |
1331 | } | |
1332 | ||
1333 | /* =========================================================================== | |
1334 | * Set match_start to the longest match starting at the given string and | |
1335 | * return its length. Matches shorter or equal to prev_length are discarded, | |
1336 | * in which case the result is equal to prev_length and match_start is | |
1337 | * garbage. | |
1338 | * IN assertions: cur_match is the head of the hash chain for the current | |
1339 | * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 | |
1340 | * OUT assertion: the match length is not greater than s->lookahead. | |
1341 | */ | |
1342 | #ifndef ASMV | |
1343 | /* For 80x86 and 680x0, an optimized version will be provided in match.asm or | |
1344 | * match.S. The code will be functionally equivalent. | |
1345 | */ | |
9bccf70c | 1346 | #ifndef FASTEST |
1c79356b A |
1347 | local uInt longest_match(s, cur_match) |
1348 | deflate_state *s; | |
1349 | IPos cur_match; /* current match */ | |
1350 | { | |
1351 | unsigned chain_length = s->max_chain_length;/* max hash chain length */ | |
1352 | register Bytef *scan = s->window + s->strstart; /* current string */ | |
1353 | register Bytef *match; /* matched string */ | |
1354 | register int len; /* length of current match */ | |
1355 | int best_len = s->prev_length; /* best match length so far */ | |
1356 | int nice_match = s->nice_match; /* stop if match long enough */ | |
1357 | IPos limit = s->strstart > (IPos)MAX_DIST(s) ? | |
1358 | s->strstart - (IPos)MAX_DIST(s) : NIL; | |
1359 | /* Stop when cur_match becomes <= limit. To simplify the code, | |
1360 | * we prevent matches with the string of window index 0. | |
1361 | */ | |
1362 | Posf *prev = s->prev; | |
1363 | uInt wmask = s->w_mask; | |
1364 | ||
1365 | #ifdef UNALIGNED_OK | |
1366 | /* Compare two bytes at a time. Note: this is not always beneficial. | |
1367 | * Try with and without -DUNALIGNED_OK to check. | |
1368 | */ | |
1369 | register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1; | |
1370 | register ush scan_start = *(ushf*)scan; | |
1371 | register ush scan_end = *(ushf*)(scan+best_len-1); | |
1372 | #else | |
1373 | register Bytef *strend = s->window + s->strstart + MAX_MATCH; | |
1374 | register Byte scan_end1 = scan[best_len-1]; | |
1375 | register Byte scan_end = scan[best_len]; | |
1376 | #endif | |
1377 | ||
1378 | /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. | |
1379 | * It is easy to get rid of this optimization if necessary. | |
1380 | */ | |
1381 | Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); | |
1382 | ||
1383 | /* Do not waste too much time if we already have a good match: */ | |
1384 | if (s->prev_length >= s->good_match) { | |
1385 | chain_length >>= 2; | |
1386 | } | |
1387 | /* Do not look for matches beyond the end of the input. This is necessary | |
1388 | * to make deflate deterministic. | |
1389 | */ | |
1390 | if ((uInt)nice_match > s->lookahead) nice_match = s->lookahead; | |
1391 | ||
1392 | Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); | |
1393 | ||
1394 | do { | |
1395 | Assert(cur_match < s->strstart, "no future"); | |
1396 | match = s->window + cur_match; | |
1397 | ||
1398 | /* Skip to next match if the match length cannot increase | |
1399 | * or if the match length is less than 2: | |
1400 | */ | |
1401 | #if (defined(UNALIGNED_OK) && MAX_MATCH == 258) | |
1402 | /* This code assumes sizeof(unsigned short) == 2. Do not use | |
1403 | * UNALIGNED_OK if your compiler uses a different size. | |
1404 | */ | |
1405 | if (*(ushf*)(match+best_len-1) != scan_end || | |
1406 | *(ushf*)match != scan_start) continue; | |
1407 | ||
1408 | /* It is not necessary to compare scan[2] and match[2] since they are | |
1409 | * always equal when the other bytes match, given that the hash keys | |
1410 | * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at | |
1411 | * strstart+3, +5, ... up to strstart+257. We check for insufficient | |
1412 | * lookahead only every 4th comparison; the 128th check will be made | |
1413 | * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is | |
1414 | * necessary to put more guard bytes at the end of the window, or | |
1415 | * to check more often for insufficient lookahead. | |
1416 | */ | |
1417 | Assert(scan[2] == match[2], "scan[2]?"); | |
1418 | scan++, match++; | |
1419 | do { | |
1420 | } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) && | |
1421 | *(ushf*)(scan+=2) == *(ushf*)(match+=2) && | |
1422 | *(ushf*)(scan+=2) == *(ushf*)(match+=2) && | |
1423 | *(ushf*)(scan+=2) == *(ushf*)(match+=2) && | |
1424 | scan < strend); | |
1425 | /* The funny "do {}" generates better code on most compilers */ | |
1426 | ||
1427 | /* Here, scan <= window+strstart+257 */ | |
1428 | Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); | |
1429 | if (*scan == *match) scan++; | |
1430 | ||
1431 | len = (MAX_MATCH - 1) - (int)(strend-scan); | |
1432 | scan = strend - (MAX_MATCH-1); | |
1433 | ||
1434 | #else /* UNALIGNED_OK */ | |
1435 | ||
1436 | if (match[best_len] != scan_end || | |
1437 | match[best_len-1] != scan_end1 || | |
1438 | *match != *scan || | |
1439 | *++match != scan[1]) continue; | |
1440 | ||
1441 | /* The check at best_len-1 can be removed because it will be made | |
1442 | * again later. (This heuristic is not always a win.) | |
1443 | * It is not necessary to compare scan[2] and match[2] since they | |
1444 | * are always equal when the other bytes match, given that | |
1445 | * the hash keys are equal and that HASH_BITS >= 8. | |
1446 | */ | |
1447 | scan += 2, match++; | |
1448 | Assert(*scan == *match, "match[2]?"); | |
1449 | ||
1450 | /* We check for insufficient lookahead only every 8th comparison; | |
1451 | * the 256th check will be made at strstart+258. | |
1452 | */ | |
1453 | do { | |
1454 | } while (*++scan == *++match && *++scan == *++match && | |
1455 | *++scan == *++match && *++scan == *++match && | |
1456 | *++scan == *++match && *++scan == *++match && | |
1457 | *++scan == *++match && *++scan == *++match && | |
1458 | scan < strend); | |
1459 | ||
1460 | Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); | |
1461 | ||
1462 | len = MAX_MATCH - (int)(strend - scan); | |
1463 | scan = strend - MAX_MATCH; | |
1464 | ||
1465 | #endif /* UNALIGNED_OK */ | |
1466 | ||
1467 | if (len > best_len) { | |
1468 | s->match_start = cur_match; | |
1469 | best_len = len; | |
1470 | if (len >= nice_match) break; | |
1471 | #ifdef UNALIGNED_OK | |
1472 | scan_end = *(ushf*)(scan+best_len-1); | |
1473 | #else | |
1474 | scan_end1 = scan[best_len-1]; | |
1475 | scan_end = scan[best_len]; | |
1476 | #endif | |
1477 | } | |
1478 | } while ((cur_match = prev[cur_match & wmask]) > limit | |
1479 | && --chain_length != 0); | |
1480 | ||
9bccf70c | 1481 | if ((uInt)best_len <= s->lookahead) return (uInt)best_len; |
1c79356b A |
1482 | return s->lookahead; |
1483 | } | |
9bccf70c A |
1484 | |
1485 | #else /* FASTEST */ | |
1486 | /* --------------------------------------------------------------------------- | |
1487 | * Optimized version for level == 1 only | |
1488 | */ | |
1489 | local uInt longest_match(s, cur_match) | |
1490 | deflate_state *s; | |
1491 | IPos cur_match; /* current match */ | |
1492 | { | |
1493 | register Bytef *scan = s->window + s->strstart; /* current string */ | |
1494 | register Bytef *match; /* matched string */ | |
1495 | register int len; /* length of current match */ | |
1496 | register Bytef *strend = s->window + s->strstart + MAX_MATCH; | |
1497 | ||
1498 | /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. | |
1499 | * It is easy to get rid of this optimization if necessary. | |
1500 | */ | |
1501 | Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever"); | |
1502 | ||
1503 | Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead"); | |
1504 | ||
1505 | Assert(cur_match < s->strstart, "no future"); | |
1506 | ||
1507 | match = s->window + cur_match; | |
1508 | ||
1509 | /* Return failure if the match length is less than 2: | |
1510 | */ | |
1511 | if (match[0] != scan[0] || match[1] != scan[1]) return MIN_MATCH-1; | |
1512 | ||
1513 | /* The check at best_len-1 can be removed because it will be made | |
1514 | * again later. (This heuristic is not always a win.) | |
1515 | * It is not necessary to compare scan[2] and match[2] since they | |
1516 | * are always equal when the other bytes match, given that | |
1517 | * the hash keys are equal and that HASH_BITS >= 8. | |
1518 | */ | |
1519 | scan += 2, match += 2; | |
1520 | Assert(*scan == *match, "match[2]?"); | |
1521 | ||
1522 | /* We check for insufficient lookahead only every 8th comparison; | |
1523 | * the 256th check will be made at strstart+258. | |
1524 | */ | |
1525 | do { | |
1526 | } while (*++scan == *++match && *++scan == *++match && | |
1527 | *++scan == *++match && *++scan == *++match && | |
1528 | *++scan == *++match && *++scan == *++match && | |
1529 | *++scan == *++match && *++scan == *++match && | |
1530 | scan < strend); | |
1531 | ||
1532 | Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan"); | |
1533 | ||
1534 | len = MAX_MATCH - (int)(strend - scan); | |
1535 | ||
1536 | if (len < MIN_MATCH) return MIN_MATCH - 1; | |
1537 | ||
1538 | s->match_start = cur_match; | |
1539 | return len <= s->lookahead ? len : s->lookahead; | |
1540 | } | |
1541 | #endif /* FASTEST */ | |
1c79356b A |
1542 | #endif /* ASMV */ |
1543 | ||
1544 | #ifdef DEBUG_ZLIB | |
1545 | /* =========================================================================== | |
1546 | * Check that the match at match_start is indeed a match. | |
1547 | */ | |
1548 | local void check_match(s, start, match, length) | |
1549 | deflate_state *s; | |
1550 | IPos start, match; | |
1551 | int length; | |
1552 | { | |
1553 | /* check that the match is indeed a match */ | |
9bccf70c A |
1554 | if (zmemcmp(s->window + match, |
1555 | s->window + start, length) != EQUAL) { | |
1c79356b A |
1556 | fprintf(stderr, " start %u, match %u, length %d\n", |
1557 | start, match, length); | |
1558 | do { | |
1559 | fprintf(stderr, "%c%c", s->window[match++], s->window[start++]); | |
1560 | } while (--length != 0); | |
1561 | z_error("invalid match"); | |
1562 | } | |
1563 | if (z_verbose > 1) { | |
1564 | fprintf(stderr,"\\[%d,%d]", start-match, length); | |
1565 | do { putc(s->window[start++], stderr); } while (--length != 0); | |
1566 | } | |
1567 | } | |
1568 | #else | |
1569 | # define check_match(s, start, match, length) | |
1570 | #endif | |
1571 | ||
1572 | /* =========================================================================== | |
1573 | * Fill the window when the lookahead becomes insufficient. | |
1574 | * Updates strstart and lookahead. | |
1575 | * | |
1576 | * IN assertion: lookahead < MIN_LOOKAHEAD | |
1577 | * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD | |
1578 | * At least one byte has been read, or avail_in == 0; reads are | |
1579 | * performed for at least two bytes (required for the zip translate_eol | |
1580 | * option -- not supported here). | |
1581 | */ | |
1582 | local void fill_window(s) | |
1583 | deflate_state *s; | |
1584 | { | |
1585 | register unsigned n, m; | |
1586 | register Posf *p; | |
1587 | unsigned more; /* Amount of free space at the end of the window. */ | |
1588 | uInt wsize = s->w_size; | |
1589 | ||
1590 | do { | |
1591 | more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart); | |
1592 | ||
1593 | /* Deal with !@#$% 64K limit: */ | |
1594 | if (more == 0 && s->strstart == 0 && s->lookahead == 0) { | |
1595 | more = wsize; | |
1596 | ||
1597 | } else if (more == (unsigned)(-1)) { | |
1598 | /* Very unlikely, but possible on 16 bit machine if strstart == 0 | |
1599 | * and lookahead == 1 (input done one byte at time) | |
1600 | */ | |
1601 | more--; | |
1602 | ||
1603 | /* If the window is almost full and there is insufficient lookahead, | |
1604 | * move the upper half to the lower one to make room in the upper half. | |
1605 | */ | |
1606 | } else if (s->strstart >= wsize+MAX_DIST(s)) { | |
1607 | ||
9bccf70c | 1608 | zmemcpy(s->window, s->window+wsize, (unsigned)wsize); |
1c79356b A |
1609 | s->match_start -= wsize; |
1610 | s->strstart -= wsize; /* we now have strstart >= MAX_DIST */ | |
1611 | s->block_start -= (long) wsize; | |
1612 | ||
1613 | /* Slide the hash table (could be avoided with 32 bit values | |
1614 | at the expense of memory usage). We slide even when level == 0 | |
1615 | to keep the hash table consistent if we switch back to level > 0 | |
1616 | later. (Using level 0 permanently is not an optimal usage of | |
1617 | zlib, so we don't care about this pathological case.) | |
1618 | */ | |
9bccf70c A |
1619 | n = s->hash_size; |
1620 | p = &s->head[n]; | |
1621 | do { | |
1622 | m = *--p; | |
1623 | *p = (Pos)(m >= wsize ? m-wsize : NIL); | |
1624 | } while (--n); | |
1625 | ||
1626 | n = wsize; | |
1627 | #ifndef FASTEST | |
1628 | p = &s->prev[n]; | |
1629 | do { | |
1630 | m = *--p; | |
1631 | *p = (Pos)(m >= wsize ? m-wsize : NIL); | |
1632 | /* If n is not on any hash chain, prev[n] is garbage but | |
1633 | * its value will never be used. | |
1634 | */ | |
1635 | } while (--n); | |
1636 | #endif | |
1c79356b A |
1637 | more += wsize; |
1638 | } | |
1639 | if (s->strm->avail_in == 0) return; | |
1640 | ||
1641 | /* If there was no sliding: | |
1642 | * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 && | |
1643 | * more == window_size - lookahead - strstart | |
1644 | * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1) | |
1645 | * => more >= window_size - 2*WSIZE + 2 | |
1646 | * In the BIG_MEM or MMAP case (not yet supported), | |
1647 | * window_size == input_size + MIN_LOOKAHEAD && | |
1648 | * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD. | |
1649 | * Otherwise, window_size == 2*WSIZE so more >= 2. | |
1650 | * If there was sliding, more >= WSIZE. So in all cases, more >= 2. | |
1651 | */ | |
1652 | Assert(more >= 2, "more < 2"); | |
1653 | ||
9bccf70c | 1654 | n = read_buf(s->strm, s->window + s->strstart + s->lookahead, more); |
1c79356b A |
1655 | s->lookahead += n; |
1656 | ||
1657 | /* Initialize the hash value now that we have some input: */ | |
1658 | if (s->lookahead >= MIN_MATCH) { | |
1659 | s->ins_h = s->window[s->strstart]; | |
1660 | UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]); | |
1661 | #if MIN_MATCH != 3 | |
1662 | Call UPDATE_HASH() MIN_MATCH-3 more times | |
1663 | #endif | |
1664 | } | |
1665 | /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage, | |
1666 | * but this is not important since only literal bytes will be emitted. | |
1667 | */ | |
1668 | ||
1669 | } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0); | |
1670 | } | |
1671 | ||
1672 | /* =========================================================================== | |
1673 | * Flush the current block, with given end-of-file flag. | |
1674 | * IN assertion: strstart is set to the end of the current match. | |
1675 | */ | |
1676 | #define FLUSH_BLOCK_ONLY(s, eof) { \ | |
1677 | _tr_flush_block(s, (s->block_start >= 0L ? \ | |
1678 | (charf *)&s->window[(unsigned)s->block_start] : \ | |
1679 | (charf *)Z_NULL), \ | |
1680 | (ulg)((long)s->strstart - s->block_start), \ | |
1681 | (eof)); \ | |
1682 | s->block_start = s->strstart; \ | |
1683 | flush_pending(s->strm); \ | |
1684 | Tracev((stderr,"[FLUSH]")); \ | |
1685 | } | |
1686 | ||
1687 | /* Same but force premature exit if necessary. */ | |
1688 | #define FLUSH_BLOCK(s, eof) { \ | |
1689 | FLUSH_BLOCK_ONLY(s, eof); \ | |
1690 | if (s->strm->avail_out == 0) return (eof) ? finish_started : need_more; \ | |
1691 | } | |
1692 | ||
1693 | /* =========================================================================== | |
1694 | * Copy without compression as much as possible from the input stream, return | |
1695 | * the current block state. | |
1696 | * This function does not insert new strings in the dictionary since | |
1697 | * uncompressible data is probably not useful. This function is used | |
1698 | * only for the level=0 compression option. | |
1699 | * NOTE: this function should be optimized to avoid extra copying from | |
1700 | * window to pending_buf. | |
1701 | */ | |
1702 | local block_state deflate_stored(s, flush) | |
1703 | deflate_state *s; | |
1704 | int flush; | |
1705 | { | |
1706 | /* Stored blocks are limited to 0xffff bytes, pending_buf is limited | |
1707 | * to pending_buf_size, and each stored block has a 5 byte header: | |
1708 | */ | |
1709 | ulg max_block_size = 0xffff; | |
1710 | ulg max_start; | |
1711 | ||
1712 | if (max_block_size > s->pending_buf_size - 5) { | |
1713 | max_block_size = s->pending_buf_size - 5; | |
1714 | } | |
1715 | ||
1716 | /* Copy as much as possible from input to output: */ | |
1717 | for (;;) { | |
1718 | /* Fill the window as much as possible: */ | |
1719 | if (s->lookahead <= 1) { | |
1720 | ||
1721 | Assert(s->strstart < s->w_size+MAX_DIST(s) || | |
1722 | s->block_start >= (long)s->w_size, "slide too late"); | |
1723 | ||
1724 | fill_window(s); | |
1725 | if (s->lookahead == 0 && flush == Z_NO_FLUSH) return need_more; | |
1726 | ||
1727 | if (s->lookahead == 0) break; /* flush the current block */ | |
1728 | } | |
1729 | Assert(s->block_start >= 0L, "block gone"); | |
1730 | ||
1731 | s->strstart += s->lookahead; | |
1732 | s->lookahead = 0; | |
1733 | ||
1734 | /* Emit a stored block if pending_buf will be full: */ | |
1735 | max_start = s->block_start + max_block_size; | |
1736 | if (s->strstart == 0 || (ulg)s->strstart >= max_start) { | |
1737 | /* strstart == 0 is possible when wraparound on 16-bit machine */ | |
1738 | s->lookahead = (uInt)(s->strstart - max_start); | |
1739 | s->strstart = (uInt)max_start; | |
1740 | FLUSH_BLOCK(s, 0); | |
1741 | } | |
1742 | /* Flush if we may have to slide, otherwise block_start may become | |
1743 | * negative and the data will be gone: | |
1744 | */ | |
1745 | if (s->strstart - (uInt)s->block_start >= MAX_DIST(s)) { | |
1746 | FLUSH_BLOCK(s, 0); | |
1747 | } | |
1748 | } | |
1749 | FLUSH_BLOCK(s, flush == Z_FINISH); | |
1750 | return flush == Z_FINISH ? finish_done : block_done; | |
1751 | } | |
1752 | ||
1753 | /* =========================================================================== | |
1754 | * Compress as much as possible from the input stream, return the current | |
1755 | * block state. | |
1756 | * This function does not perform lazy evaluation of matches and inserts | |
1757 | * new strings in the dictionary only for unmatched strings or for short | |
1758 | * matches. It is used only for the fast compression options. | |
1759 | */ | |
1760 | local block_state deflate_fast(s, flush) | |
1761 | deflate_state *s; | |
1762 | int flush; | |
1763 | { | |
1764 | IPos hash_head = NIL; /* head of the hash chain */ | |
1765 | int bflush; /* set if current block must be flushed */ | |
1766 | ||
1767 | for (;;) { | |
1768 | /* Make sure that we always have enough lookahead, except | |
1769 | * at the end of the input file. We need MAX_MATCH bytes | |
1770 | * for the next match, plus MIN_MATCH bytes to insert the | |
1771 | * string following the next match. | |
1772 | */ | |
1773 | if (s->lookahead < MIN_LOOKAHEAD) { | |
1774 | fill_window(s); | |
1775 | if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) { | |
1776 | return need_more; | |
1777 | } | |
1778 | if (s->lookahead == 0) break; /* flush the current block */ | |
1779 | } | |
1780 | ||
1781 | /* Insert the string window[strstart .. strstart+2] in the | |
1782 | * dictionary, and set hash_head to the head of the hash chain: | |
1783 | */ | |
1784 | if (s->lookahead >= MIN_MATCH) { | |
1785 | INSERT_STRING(s, s->strstart, hash_head); | |
1786 | } | |
1787 | ||
1788 | /* Find the longest match, discarding those <= prev_length. | |
1789 | * At this point we have always match_length < MIN_MATCH | |
1790 | */ | |
1791 | if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) { | |
1792 | /* To simplify the code, we prevent matches with the string | |
1793 | * of window index 0 (in particular we have to avoid a match | |
1794 | * of the string with itself at the start of the input file). | |
1795 | */ | |
1796 | if (s->strategy != Z_HUFFMAN_ONLY) { | |
1797 | s->match_length = longest_match (s, hash_head); | |
1798 | } | |
1799 | /* longest_match() sets match_start */ | |
1800 | } | |
1801 | if (s->match_length >= MIN_MATCH) { | |
1802 | check_match(s, s->strstart, s->match_start, s->match_length); | |
1803 | ||
9bccf70c A |
1804 | _tr_tally_dist(s, s->strstart - s->match_start, |
1805 | s->match_length - MIN_MATCH, bflush); | |
1c79356b A |
1806 | |
1807 | s->lookahead -= s->match_length; | |
1808 | ||
1809 | /* Insert new strings in the hash table only if the match length | |
1810 | * is not too large. This saves time but degrades compression. | |
1811 | */ | |
9bccf70c | 1812 | #ifndef FASTEST |
1c79356b A |
1813 | if (s->match_length <= s->max_insert_length && |
1814 | s->lookahead >= MIN_MATCH) { | |
1815 | s->match_length--; /* string at strstart already in hash table */ | |
1816 | do { | |
1817 | s->strstart++; | |
1818 | INSERT_STRING(s, s->strstart, hash_head); | |
1819 | /* strstart never exceeds WSIZE-MAX_MATCH, so there are | |
1820 | * always MIN_MATCH bytes ahead. | |
1821 | */ | |
1822 | } while (--s->match_length != 0); | |
1823 | s->strstart++; | |
9bccf70c A |
1824 | } else |
1825 | #endif | |
1826 | { | |
1c79356b A |
1827 | s->strstart += s->match_length; |
1828 | s->match_length = 0; | |
1829 | s->ins_h = s->window[s->strstart]; | |
1830 | UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]); | |
1831 | #if MIN_MATCH != 3 | |
1832 | Call UPDATE_HASH() MIN_MATCH-3 more times | |
1833 | #endif | |
1834 | /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not | |
1835 | * matter since it will be recomputed at next deflate call. | |
1836 | */ | |
1837 | } | |
1838 | } else { | |
1839 | /* No match, output a literal byte */ | |
1840 | Tracevv((stderr,"%c", s->window[s->strstart])); | |
9bccf70c | 1841 | _tr_tally_lit (s, s->window[s->strstart], bflush); |
1c79356b A |
1842 | s->lookahead--; |
1843 | s->strstart++; | |
1844 | } | |
1845 | if (bflush) FLUSH_BLOCK(s, 0); | |
1846 | } | |
1847 | FLUSH_BLOCK(s, flush == Z_FINISH); | |
1848 | return flush == Z_FINISH ? finish_done : block_done; | |
1849 | } | |
1850 | ||
1851 | /* =========================================================================== | |
1852 | * Same as above, but achieves better compression. We use a lazy | |
1853 | * evaluation for matches: a match is finally adopted only if there is | |
1854 | * no better match at the next window position. | |
1855 | */ | |
1856 | local block_state deflate_slow(s, flush) | |
1857 | deflate_state *s; | |
1858 | int flush; | |
1859 | { | |
1860 | IPos hash_head = NIL; /* head of hash chain */ | |
1861 | int bflush; /* set if current block must be flushed */ | |
1862 | ||
1863 | /* Process the input block. */ | |
1864 | for (;;) { | |
1865 | /* Make sure that we always have enough lookahead, except | |
1866 | * at the end of the input file. We need MAX_MATCH bytes | |
1867 | * for the next match, plus MIN_MATCH bytes to insert the | |
1868 | * string following the next match. | |
1869 | */ | |
1870 | if (s->lookahead < MIN_LOOKAHEAD) { | |
1871 | fill_window(s); | |
1872 | if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) { | |
1873 | return need_more; | |
1874 | } | |
1875 | if (s->lookahead == 0) break; /* flush the current block */ | |
1876 | } | |
1877 | ||
1878 | /* Insert the string window[strstart .. strstart+2] in the | |
1879 | * dictionary, and set hash_head to the head of the hash chain: | |
1880 | */ | |
1881 | if (s->lookahead >= MIN_MATCH) { | |
1882 | INSERT_STRING(s, s->strstart, hash_head); | |
1883 | } | |
1884 | ||
1885 | /* Find the longest match, discarding those <= prev_length. | |
1886 | */ | |
1887 | s->prev_length = s->match_length, s->prev_match = s->match_start; | |
1888 | s->match_length = MIN_MATCH-1; | |
1889 | ||
1890 | if (hash_head != NIL && s->prev_length < s->max_lazy_match && | |
1891 | s->strstart - hash_head <= MAX_DIST(s)) { | |
1892 | /* To simplify the code, we prevent matches with the string | |
1893 | * of window index 0 (in particular we have to avoid a match | |
1894 | * of the string with itself at the start of the input file). | |
1895 | */ | |
1896 | if (s->strategy != Z_HUFFMAN_ONLY) { | |
1897 | s->match_length = longest_match (s, hash_head); | |
1898 | } | |
1899 | /* longest_match() sets match_start */ | |
1900 | ||
1901 | if (s->match_length <= 5 && (s->strategy == Z_FILTERED || | |
1902 | (s->match_length == MIN_MATCH && | |
1903 | s->strstart - s->match_start > TOO_FAR))) { | |
1904 | ||
1905 | /* If prev_match is also MIN_MATCH, match_start is garbage | |
1906 | * but we will ignore the current match anyway. | |
1907 | */ | |
1908 | s->match_length = MIN_MATCH-1; | |
1909 | } | |
1910 | } | |
1911 | /* If there was a match at the previous step and the current | |
1912 | * match is not better, output the previous match: | |
1913 | */ | |
1914 | if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) { | |
1915 | uInt max_insert = s->strstart + s->lookahead - MIN_MATCH; | |
1916 | /* Do not insert strings in hash table beyond this. */ | |
1917 | ||
1918 | check_match(s, s->strstart-1, s->prev_match, s->prev_length); | |
1919 | ||
9bccf70c A |
1920 | _tr_tally_dist(s, s->strstart -1 - s->prev_match, |
1921 | s->prev_length - MIN_MATCH, bflush); | |
1c79356b A |
1922 | |
1923 | /* Insert in hash table all strings up to the end of the match. | |
1924 | * strstart-1 and strstart are already inserted. If there is not | |
1925 | * enough lookahead, the last two strings are not inserted in | |
1926 | * the hash table. | |
1927 | */ | |
1928 | s->lookahead -= s->prev_length-1; | |
1929 | s->prev_length -= 2; | |
1930 | do { | |
1931 | if (++s->strstart <= max_insert) { | |
1932 | INSERT_STRING(s, s->strstart, hash_head); | |
1933 | } | |
1934 | } while (--s->prev_length != 0); | |
1935 | s->match_available = 0; | |
1936 | s->match_length = MIN_MATCH-1; | |
1937 | s->strstart++; | |
1938 | ||
1939 | if (bflush) FLUSH_BLOCK(s, 0); | |
1940 | ||
1941 | } else if (s->match_available) { | |
1942 | /* If there was no match at the previous position, output a | |
1943 | * single literal. If there was a match but the current match | |
1944 | * is longer, truncate the previous match to a single literal. | |
1945 | */ | |
1946 | Tracevv((stderr,"%c", s->window[s->strstart-1])); | |
9bccf70c A |
1947 | _tr_tally_lit(s, s->window[s->strstart-1], bflush); |
1948 | if (bflush) { | |
1c79356b A |
1949 | FLUSH_BLOCK_ONLY(s, 0); |
1950 | } | |
1951 | s->strstart++; | |
1952 | s->lookahead--; | |
1953 | if (s->strm->avail_out == 0) return need_more; | |
1954 | } else { | |
1955 | /* There is no previous match to compare with, wait for | |
1956 | * the next step to decide. | |
1957 | */ | |
1958 | s->match_available = 1; | |
1959 | s->strstart++; | |
1960 | s->lookahead--; | |
1961 | } | |
1962 | } | |
1963 | Assert (flush != Z_NO_FLUSH, "no flush?"); | |
1964 | if (s->match_available) { | |
1965 | Tracevv((stderr,"%c", s->window[s->strstart-1])); | |
9bccf70c | 1966 | _tr_tally_lit(s, s->window[s->strstart-1], bflush); |
1c79356b A |
1967 | s->match_available = 0; |
1968 | } | |
1969 | FLUSH_BLOCK(s, flush == Z_FINISH); | |
1970 | return flush == Z_FINISH ? finish_done : block_done; | |
1971 | } | |
1972 | /* --- deflate.c */ | |
1973 | ||
1974 | /* +++ trees.c */ | |
1975 | /* trees.c -- output deflated data using Huffman coding | |
9bccf70c | 1976 | * Copyright (C) 1995-2002 Jean-loup Gailly |
1c79356b A |
1977 | * For conditions of distribution and use, see copyright notice in zlib.h |
1978 | */ | |
1979 | ||
1980 | /* | |
1981 | * ALGORITHM | |
1982 | * | |
1983 | * The "deflation" process uses several Huffman trees. The more | |
1984 | * common source values are represented by shorter bit sequences. | |
1985 | * | |
1986 | * Each code tree is stored in a compressed form which is itself | |
1987 | * a Huffman encoding of the lengths of all the code strings (in | |
1988 | * ascending order by source values). The actual code strings are | |
1989 | * reconstructed from the lengths in the inflate process, as described | |
1990 | * in the deflate specification. | |
1991 | * | |
1992 | * REFERENCES | |
1993 | * | |
1994 | * Deutsch, L.P.,"'Deflate' Compressed Data Format Specification". | |
1995 | * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc | |
1996 | * | |
1997 | * Storer, James A. | |
1998 | * Data Compression: Methods and Theory, pp. 49-50. | |
1999 | * Computer Science Press, 1988. ISBN 0-7167-8156-5. | |
2000 | * | |
2001 | * Sedgewick, R. | |
2002 | * Algorithms, p290. | |
2003 | * Addison-Wesley, 1983. ISBN 0-201-06672-6. | |
2004 | */ | |
2005 | ||
4452a7af | 2006 | /* @(#) $Id: zlib.c,v 1.10.874.1 2005/06/24 01:47:11 lindak Exp $ */ |
9bccf70c A |
2007 | |
2008 | /* #define GEN_TREES_H */ | |
1c79356b A |
2009 | |
2010 | /* #include "deflate.h" */ | |
2011 | ||
2012 | #ifdef DEBUG_ZLIB | |
2013 | # include <ctype.h> | |
2014 | #endif | |
2015 | ||
2016 | /* =========================================================================== | |
2017 | * Constants | |
2018 | */ | |
2019 | ||
2020 | #define MAX_BL_BITS 7 | |
2021 | /* Bit length codes must not exceed MAX_BL_BITS bits */ | |
2022 | ||
2023 | #define END_BLOCK 256 | |
2024 | /* end of block literal code */ | |
2025 | ||
2026 | #define REP_3_6 16 | |
2027 | /* repeat previous bit length 3-6 times (2 bits of repeat count) */ | |
2028 | ||
2029 | #define REPZ_3_10 17 | |
2030 | /* repeat a zero length 3-10 times (3 bits of repeat count) */ | |
2031 | ||
2032 | #define REPZ_11_138 18 | |
2033 | /* repeat a zero length 11-138 times (7 bits of repeat count) */ | |
2034 | ||
9bccf70c | 2035 | local const int extra_lbits[LENGTH_CODES] /* extra bits for each length code */ |
1c79356b A |
2036 | = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0}; |
2037 | ||
9bccf70c | 2038 | local const int extra_dbits[D_CODES] /* extra bits for each distance code */ |
1c79356b A |
2039 | = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13}; |
2040 | ||
9bccf70c | 2041 | local const int extra_blbits[BL_CODES]/* extra bits for each bit length code */ |
1c79356b A |
2042 | = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7}; |
2043 | ||
9bccf70c | 2044 | local const uch bl_order[BL_CODES] |
1c79356b A |
2045 | = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15}; |
2046 | /* The lengths of the bit length codes are sent in order of decreasing | |
2047 | * probability, to avoid transmitting the lengths for unused bit length codes. | |
2048 | */ | |
2049 | ||
2050 | #define Buf_size (8 * 2*sizeof(char)) | |
2051 | /* Number of bits used within bi_buf. (bi_buf might be implemented on | |
2052 | * more than 16 bits on some systems.) | |
2053 | */ | |
2054 | ||
2055 | /* =========================================================================== | |
2056 | * Local data. These are initialized only once. | |
2057 | */ | |
2058 | ||
9bccf70c A |
2059 | #define DIST_CODE_LEN 512 /* see definition of array dist_code below */ |
2060 | ||
2061 | #if defined(GEN_TREES_H) || !defined(STDC) | |
2062 | /* non ANSI compilers may not accept trees.h */ | |
2063 | ||
55e303ae | 2064 | local ct_data *static_ltree = Z_NULL; |
1c79356b A |
2065 | /* The static literal tree. Since the bit lengths are imposed, there is no |
2066 | * need for the L_CODES extra codes used during heap construction. However | |
2067 | * The codes 286 and 287 are needed to build a canonical tree (see _tr_init | |
2068 | * below). | |
2069 | */ | |
2070 | ||
55e303ae | 2071 | local ct_data *static_dtree = Z_NULL; |
1c79356b A |
2072 | /* The static distance tree. (Actually a trivial tree since all codes use |
2073 | * 5 bits.) | |
2074 | */ | |
2075 | ||
55e303ae | 2076 | uch *_dist_code = Z_NULL; |
9bccf70c | 2077 | /* Distance codes. The first 256 values correspond to the distances |
1c79356b A |
2078 | * 3 .. 258, the last 256 values correspond to the top 8 bits of |
2079 | * the 15 bit distances. | |
2080 | */ | |
2081 | ||
55e303ae | 2082 | uch *_length_code = Z_NULL; |
1c79356b A |
2083 | /* length code for each normalized match length (0 == MIN_MATCH) */ |
2084 | ||
55e303ae | 2085 | local int *base_length = Z_NULL; |
1c79356b A |
2086 | /* First normalized length for each code (0 = MIN_MATCH) */ |
2087 | ||
55e303ae | 2088 | local int *base_dist = Z_NULL; |
1c79356b A |
2089 | /* First normalized distance for each code (0 = distance of 1) */ |
2090 | ||
9bccf70c A |
2091 | #else |
2092 | /* +++ trees.h */ | |
2093 | /* header created automatically with -DGEN_TREES_H */ | |
2094 | ||
2095 | local const ct_data static_ltree[L_CODES+2] = { | |
2096 | {{ 12},{ 8}}, {{140},{ 8}}, {{ 76},{ 8}}, {{204},{ 8}}, {{ 44},{ 8}}, | |
2097 | {{172},{ 8}}, {{108},{ 8}}, {{236},{ 8}}, {{ 28},{ 8}}, {{156},{ 8}}, | |
2098 | {{ 92},{ 8}}, {{220},{ 8}}, {{ 60},{ 8}}, {{188},{ 8}}, {{124},{ 8}}, | |
2099 | {{252},{ 8}}, {{ 2},{ 8}}, {{130},{ 8}}, {{ 66},{ 8}}, {{194},{ 8}}, | |
2100 | {{ 34},{ 8}}, {{162},{ 8}}, {{ 98},{ 8}}, {{226},{ 8}}, {{ 18},{ 8}}, | |
2101 | {{146},{ 8}}, {{ 82},{ 8}}, {{210},{ 8}}, {{ 50},{ 8}}, {{178},{ 8}}, | |
2102 | {{114},{ 8}}, {{242},{ 8}}, {{ 10},{ 8}}, {{138},{ 8}}, {{ 74},{ 8}}, | |
2103 | {{202},{ 8}}, {{ 42},{ 8}}, {{170},{ 8}}, {{106},{ 8}}, {{234},{ 8}}, | |
2104 | {{ 26},{ 8}}, {{154},{ 8}}, {{ 90},{ 8}}, {{218},{ 8}}, {{ 58},{ 8}}, | |
2105 | {{186},{ 8}}, {{122},{ 8}}, {{250},{ 8}}, {{ 6},{ 8}}, {{134},{ 8}}, | |
2106 | {{ 70},{ 8}}, {{198},{ 8}}, {{ 38},{ 8}}, {{166},{ 8}}, {{102},{ 8}}, | |
2107 | {{230},{ 8}}, {{ 22},{ 8}}, {{150},{ 8}}, {{ 86},{ 8}}, {{214},{ 8}}, | |
2108 | {{ 54},{ 8}}, {{182},{ 8}}, {{118},{ 8}}, {{246},{ 8}}, {{ 14},{ 8}}, | |
2109 | {{142},{ 8}}, {{ 78},{ 8}}, {{206},{ 8}}, {{ 46},{ 8}}, {{174},{ 8}}, | |
2110 | {{110},{ 8}}, {{238},{ 8}}, {{ 30},{ 8}}, {{158},{ 8}}, {{ 94},{ 8}}, | |
2111 | {{222},{ 8}}, {{ 62},{ 8}}, {{190},{ 8}}, {{126},{ 8}}, {{254},{ 8}}, | |
2112 | {{ 1},{ 8}}, {{129},{ 8}}, {{ 65},{ 8}}, {{193},{ 8}}, {{ 33},{ 8}}, | |
2113 | {{161},{ 8}}, {{ 97},{ 8}}, {{225},{ 8}}, {{ 17},{ 8}}, {{145},{ 8}}, | |
2114 | {{ 81},{ 8}}, {{209},{ 8}}, {{ 49},{ 8}}, {{177},{ 8}}, {{113},{ 8}}, | |
2115 | {{241},{ 8}}, {{ 9},{ 8}}, {{137},{ 8}}, {{ 73},{ 8}}, {{201},{ 8}}, | |
2116 | {{ 41},{ 8}}, {{169},{ 8}}, {{105},{ 8}}, {{233},{ 8}}, {{ 25},{ 8}}, | |
2117 | {{153},{ 8}}, {{ 89},{ 8}}, {{217},{ 8}}, {{ 57},{ 8}}, {{185},{ 8}}, | |
2118 | {{121},{ 8}}, {{249},{ 8}}, {{ 5},{ 8}}, {{133},{ 8}}, {{ 69},{ 8}}, | |
2119 | {{197},{ 8}}, {{ 37},{ 8}}, {{165},{ 8}}, {{101},{ 8}}, {{229},{ 8}}, | |
2120 | {{ 21},{ 8}}, {{149},{ 8}}, {{ 85},{ 8}}, {{213},{ 8}}, {{ 53},{ 8}}, | |
2121 | {{181},{ 8}}, {{117},{ 8}}, {{245},{ 8}}, {{ 13},{ 8}}, {{141},{ 8}}, | |
2122 | {{ 77},{ 8}}, {{205},{ 8}}, {{ 45},{ 8}}, {{173},{ 8}}, {{109},{ 8}}, | |
2123 | {{237},{ 8}}, {{ 29},{ 8}}, {{157},{ 8}}, {{ 93},{ 8}}, {{221},{ 8}}, | |
2124 | {{ 61},{ 8}}, {{189},{ 8}}, {{125},{ 8}}, {{253},{ 8}}, {{ 19},{ 9}}, | |
2125 | {{275},{ 9}}, {{147},{ 9}}, {{403},{ 9}}, {{ 83},{ 9}}, {{339},{ 9}}, | |
2126 | {{211},{ 9}}, {{467},{ 9}}, {{ 51},{ 9}}, {{307},{ 9}}, {{179},{ 9}}, | |
2127 | {{435},{ 9}}, {{115},{ 9}}, {{371},{ 9}}, {{243},{ 9}}, {{499},{ 9}}, | |
2128 | {{ 11},{ 9}}, {{267},{ 9}}, {{139},{ 9}}, {{395},{ 9}}, {{ 75},{ 9}}, | |
2129 | {{331},{ 9}}, {{203},{ 9}}, {{459},{ 9}}, {{ 43},{ 9}}, {{299},{ 9}}, | |
2130 | {{171},{ 9}}, {{427},{ 9}}, {{107},{ 9}}, {{363},{ 9}}, {{235},{ 9}}, | |
2131 | {{491},{ 9}}, {{ 27},{ 9}}, {{283},{ 9}}, {{155},{ 9}}, {{411},{ 9}}, | |
2132 | {{ 91},{ 9}}, {{347},{ 9}}, {{219},{ 9}}, {{475},{ 9}}, {{ 59},{ 9}}, | |
2133 | {{315},{ 9}}, {{187},{ 9}}, {{443},{ 9}}, {{123},{ 9}}, {{379},{ 9}}, | |
2134 | {{251},{ 9}}, {{507},{ 9}}, {{ 7},{ 9}}, {{263},{ 9}}, {{135},{ 9}}, | |
2135 | {{391},{ 9}}, {{ 71},{ 9}}, {{327},{ 9}}, {{199},{ 9}}, {{455},{ 9}}, | |
2136 | {{ 39},{ 9}}, {{295},{ 9}}, {{167},{ 9}}, {{423},{ 9}}, {{103},{ 9}}, | |
2137 | {{359},{ 9}}, {{231},{ 9}}, {{487},{ 9}}, {{ 23},{ 9}}, {{279},{ 9}}, | |
2138 | {{151},{ 9}}, {{407},{ 9}}, {{ 87},{ 9}}, {{343},{ 9}}, {{215},{ 9}}, | |
2139 | {{471},{ 9}}, {{ 55},{ 9}}, {{311},{ 9}}, {{183},{ 9}}, {{439},{ 9}}, | |
2140 | {{119},{ 9}}, {{375},{ 9}}, {{247},{ 9}}, {{503},{ 9}}, {{ 15},{ 9}}, | |
2141 | {{271},{ 9}}, {{143},{ 9}}, {{399},{ 9}}, {{ 79},{ 9}}, {{335},{ 9}}, | |
2142 | {{207},{ 9}}, {{463},{ 9}}, {{ 47},{ 9}}, {{303},{ 9}}, {{175},{ 9}}, | |
2143 | {{431},{ 9}}, {{111},{ 9}}, {{367},{ 9}}, {{239},{ 9}}, {{495},{ 9}}, | |
2144 | {{ 31},{ 9}}, {{287},{ 9}}, {{159},{ 9}}, {{415},{ 9}}, {{ 95},{ 9}}, | |
2145 | {{351},{ 9}}, {{223},{ 9}}, {{479},{ 9}}, {{ 63},{ 9}}, {{319},{ 9}}, | |
2146 | {{191},{ 9}}, {{447},{ 9}}, {{127},{ 9}}, {{383},{ 9}}, {{255},{ 9}}, | |
2147 | {{511},{ 9}}, {{ 0},{ 7}}, {{ 64},{ 7}}, {{ 32},{ 7}}, {{ 96},{ 7}}, | |
2148 | {{ 16},{ 7}}, {{ 80},{ 7}}, {{ 48},{ 7}}, {{112},{ 7}}, {{ 8},{ 7}}, | |
2149 | {{ 72},{ 7}}, {{ 40},{ 7}}, {{104},{ 7}}, {{ 24},{ 7}}, {{ 88},{ 7}}, | |
2150 | {{ 56},{ 7}}, {{120},{ 7}}, {{ 4},{ 7}}, {{ 68},{ 7}}, {{ 36},{ 7}}, | |
2151 | {{100},{ 7}}, {{ 20},{ 7}}, {{ 84},{ 7}}, {{ 52},{ 7}}, {{116},{ 7}}, | |
2152 | {{ 3},{ 8}}, {{131},{ 8}}, {{ 67},{ 8}}, {{195},{ 8}}, {{ 35},{ 8}}, | |
2153 | {{163},{ 8}}, {{ 99},{ 8}}, {{227},{ 8}} | |
2154 | }; | |
2155 | ||
2156 | local const ct_data static_dtree[D_CODES] = { | |
2157 | {{ 0},{ 5}}, {{16},{ 5}}, {{ 8},{ 5}}, {{24},{ 5}}, {{ 4},{ 5}}, | |
2158 | {{20},{ 5}}, {{12},{ 5}}, {{28},{ 5}}, {{ 2},{ 5}}, {{18},{ 5}}, | |
2159 | {{10},{ 5}}, {{26},{ 5}}, {{ 6},{ 5}}, {{22},{ 5}}, {{14},{ 5}}, | |
2160 | {{30},{ 5}}, {{ 1},{ 5}}, {{17},{ 5}}, {{ 9},{ 5}}, {{25},{ 5}}, | |
2161 | {{ 5},{ 5}}, {{21},{ 5}}, {{13},{ 5}}, {{29},{ 5}}, {{ 3},{ 5}}, | |
2162 | {{19},{ 5}}, {{11},{ 5}}, {{27},{ 5}}, {{ 7},{ 5}}, {{23},{ 5}} | |
2163 | }; | |
2164 | ||
2165 | const uch _dist_code[DIST_CODE_LEN] = { | |
2166 | 0, 1, 2, 3, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8, | |
2167 | 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 10, | |
2168 | 10, 10, 10, 10, 10, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, | |
2169 | 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, | |
2170 | 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13, | |
2171 | 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, | |
2172 | 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, | |
2173 | 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, | |
2174 | 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, | |
2175 | 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15, | |
2176 | 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, | |
2177 | 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, | |
2178 | 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 0, 0, 16, 17, | |
2179 | 18, 18, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 22, | |
2180 | 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, | |
2181 | 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, | |
2182 | 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, | |
2183 | 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, | |
2184 | 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, | |
2185 | 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, | |
2186 | 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, | |
2187 | 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, | |
2188 | 28, 28, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, | |
2189 | 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, | |
2190 | 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, | |
2191 | 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29 | |
2192 | }; | |
2193 | ||
2194 | const uch _length_code[MAX_MATCH-MIN_MATCH+1]= { | |
2195 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 12, 12, | |
2196 | 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 16, 16, | |
2197 | 17, 17, 17, 17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, | |
2198 | 19, 19, 19, 19, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, | |
2199 | 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, | |
2200 | 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, 23, 23, 23, 23, | |
2201 | 23, 23, 23, 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, | |
2202 | 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, | |
2203 | 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, | |
2204 | 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, 26, 26, | |
2205 | 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, | |
2206 | 26, 26, 26, 26, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, | |
2207 | 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 28 | |
2208 | }; | |
2209 | ||
2210 | local const int base_length[LENGTH_CODES] = { | |
2211 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56, | |
2212 | 64, 80, 96, 112, 128, 160, 192, 224, 0 | |
2213 | }; | |
2214 | ||
2215 | local const int base_dist[D_CODES] = { | |
2216 | 0, 1, 2, 3, 4, 6, 8, 12, 16, 24, | |
2217 | 32, 48, 64, 96, 128, 192, 256, 384, 512, 768, | |
2218 | 1024, 1536, 2048, 3072, 4096, 6144, 8192, 12288, 16384, 24576 | |
2219 | }; | |
2220 | ||
2221 | /* --- trees.h */ | |
2222 | #endif /* GEN_TREES_H */ | |
2223 | ||
1c79356b | 2224 | struct static_tree_desc_s { |
9bccf70c A |
2225 | const ct_data *static_tree; /* static tree or NULL */ |
2226 | const intf *extra_bits; /* extra bits for each code or NULL */ | |
1c79356b A |
2227 | int extra_base; /* base index for extra_bits */ |
2228 | int elems; /* max number of elements in the tree */ | |
2229 | int max_length; /* max bit length for the codes */ | |
2230 | }; | |
2231 | ||
2232 | local static_tree_desc static_l_desc = | |
55e303ae | 2233 | {NULL, extra_lbits, LITERALS+1, L_CODES, MAX_BITS}; |
1c79356b A |
2234 | |
2235 | local static_tree_desc static_d_desc = | |
55e303ae | 2236 | {NULL, extra_dbits, 0, D_CODES, MAX_BITS}; |
1c79356b A |
2237 | |
2238 | local static_tree_desc static_bl_desc = | |
9bccf70c | 2239 | {(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS}; |
1c79356b A |
2240 | |
2241 | /* =========================================================================== | |
2242 | * Local (static) routines in this file. | |
2243 | */ | |
2244 | ||
55e303ae | 2245 | local int tr_static_init OF((z_streamp z)); |
1c79356b A |
2246 | local void init_block OF((deflate_state *s)); |
2247 | local void pqdownheap OF((deflate_state *s, ct_data *tree, int k)); | |
2248 | local void gen_bitlen OF((deflate_state *s, tree_desc *desc)); | |
2249 | local void gen_codes OF((ct_data *tree, int max_code, ushf *bl_count)); | |
2250 | local void build_tree OF((deflate_state *s, tree_desc *desc)); | |
2251 | local void scan_tree OF((deflate_state *s, ct_data *tree, int max_code)); | |
2252 | local void send_tree OF((deflate_state *s, ct_data *tree, int max_code)); | |
2253 | local int build_bl_tree OF((deflate_state *s)); | |
2254 | local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes, | |
2255 | int blcodes)); | |
2256 | local void compress_block OF((deflate_state *s, ct_data *ltree, | |
2257 | ct_data *dtree)); | |
2258 | local void set_data_type OF((deflate_state *s)); | |
2259 | local unsigned bi_reverse OF((unsigned value, int length)); | |
2260 | local void bi_windup OF((deflate_state *s)); | |
2261 | local void bi_flush OF((deflate_state *s)); | |
2262 | local void copy_block OF((deflate_state *s, charf *buf, unsigned len, | |
2263 | int header)); | |
2264 | ||
9bccf70c A |
2265 | #ifdef GEN_TREES_H |
2266 | local void gen_trees_header OF((void)); | |
2267 | #endif | |
2268 | ||
1c79356b A |
2269 | #ifndef DEBUG_ZLIB |
2270 | # define send_code(s, c, tree) send_bits(s, tree[c].Code, tree[c].Len) | |
2271 | /* Send a code of the given tree. c and tree must not have side effects */ | |
2272 | ||
2273 | #else /* DEBUG_ZLIB */ | |
2274 | # define send_code(s, c, tree) \ | |
9bccf70c | 2275 | { if (z_verbose>2) fprintf(stderr,"\ncd %3d ",(c)); \ |
1c79356b A |
2276 | send_bits(s, tree[c].Code, tree[c].Len); } |
2277 | #endif | |
2278 | ||
1c79356b A |
2279 | /* =========================================================================== |
2280 | * Output a short LSB first on the stream. | |
2281 | * IN assertion: there is enough room in pendingBuf. | |
2282 | */ | |
2283 | #define put_short(s, w) { \ | |
2284 | put_byte(s, (uch)((w) & 0xff)); \ | |
2285 | put_byte(s, (uch)((ush)(w) >> 8)); \ | |
2286 | } | |
2287 | ||
2288 | /* =========================================================================== | |
2289 | * Send a value on a given number of bits. | |
2290 | * IN assertion: length <= 16 and value fits in length bits. | |
2291 | */ | |
2292 | #ifdef DEBUG_ZLIB | |
2293 | local void send_bits OF((deflate_state *s, int value, int length)); | |
2294 | ||
2295 | local void send_bits(s, value, length) | |
2296 | deflate_state *s; | |
2297 | int value; /* value to send */ | |
2298 | int length; /* number of bits */ | |
2299 | { | |
2300 | Tracevv((stderr," l %2d v %4x ", length, value)); | |
2301 | Assert(length > 0 && length <= 15, "invalid length"); | |
2302 | s->bits_sent += (ulg)length; | |
2303 | ||
2304 | /* If not enough room in bi_buf, use (valid) bits from bi_buf and | |
2305 | * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) | |
2306 | * unused bits in value. | |
2307 | */ | |
2308 | if (s->bi_valid > (int)Buf_size - length) { | |
2309 | s->bi_buf |= (value << s->bi_valid); | |
2310 | put_short(s, s->bi_buf); | |
2311 | s->bi_buf = (ush)value >> (Buf_size - s->bi_valid); | |
2312 | s->bi_valid += length - Buf_size; | |
2313 | } else { | |
2314 | s->bi_buf |= value << s->bi_valid; | |
2315 | s->bi_valid += length; | |
2316 | } | |
2317 | } | |
2318 | #else /* !DEBUG_ZLIB */ | |
2319 | ||
2320 | #define send_bits(s, value, length) \ | |
2321 | { int len = length;\ | |
2322 | if (s->bi_valid > (int)Buf_size - len) {\ | |
2323 | int val = value;\ | |
2324 | s->bi_buf |= (val << s->bi_valid);\ | |
2325 | put_short(s, s->bi_buf);\ | |
2326 | s->bi_buf = (ush)val >> (Buf_size - s->bi_valid);\ | |
2327 | s->bi_valid += len - Buf_size;\ | |
2328 | } else {\ | |
2329 | s->bi_buf |= (value) << s->bi_valid;\ | |
2330 | s->bi_valid += len;\ | |
2331 | }\ | |
2332 | } | |
2333 | #endif /* DEBUG_ZLIB */ | |
2334 | ||
2335 | ||
9bccf70c | 2336 | #ifndef MAX |
1c79356b | 2337 | #define MAX(a,b) (a >= b ? a : b) |
9bccf70c | 2338 | #endif |
1c79356b A |
2339 | /* the arguments must not have side effects */ |
2340 | ||
55e303ae A |
2341 | typedef struct { |
2342 | ct_data static_ltree[L_CODES+2]; | |
2343 | ct_data static_dtree[D_CODES]; | |
2344 | uch _dist_code[DIST_CODE_LEN]; | |
2345 | uch _length_code[MAX_MATCH-MIN_MATCH+1]; | |
2346 | int base_length[LENGTH_CODES]; | |
2347 | int base_dist[D_CODES]; | |
2348 | } __used_to_be_static; | |
2349 | ||
2350 | static __used_to_be_static *static_storage = Z_NULL; | |
2351 | ||
1c79356b | 2352 | /* =========================================================================== |
9bccf70c | 2353 | * Initialize the various 'constant' tables. |
1c79356b | 2354 | */ |
55e303ae A |
2355 | local int tr_static_init( |
2356 | z_streamp z) | |
1c79356b | 2357 | { |
9bccf70c | 2358 | #if defined(GEN_TREES_H) || !defined(STDC) |
1c79356b A |
2359 | static int static_init_done = 0; |
2360 | int n; /* iterates over tree elements */ | |
2361 | int bits; /* bit counter */ | |
2362 | int length; /* length value */ | |
2363 | int code; /* code value */ | |
2364 | int dist; /* distance index */ | |
2365 | ush bl_count[MAX_BITS+1]; | |
2366 | /* number of codes at each bit length for an optimal tree */ | |
2367 | ||
91447636 | 2368 | if (static_init_done) return Z_OK; |
55e303ae A |
2369 | |
2370 | /* allocate storage for static structures */ | |
2371 | if (static_storage == Z_NULL) { | |
2372 | static_storage = (__used_to_be_static*)ZALLOC(z, 1, sizeof(__used_to_be_static)); | |
2373 | if (static_storage == Z_NULL) | |
2374 | return Z_MEM_ERROR; | |
2375 | } | |
2376 | ||
2377 | static_ltree = static_storage->static_ltree; | |
2378 | static_dtree = static_storage->static_dtree; | |
2379 | _dist_code = static_storage->_dist_code; | |
2380 | _length_code = static_storage->_length_code; | |
2381 | base_length = static_storage->base_length; | |
2382 | base_dist = static_storage->base_dist; | |
2383 | ||
9bccf70c A |
2384 | /* For some embedded targets, global variables are not initialized: */ |
2385 | static_l_desc.static_tree = static_ltree; | |
2386 | static_l_desc.extra_bits = extra_lbits; | |
2387 | static_d_desc.static_tree = static_dtree; | |
2388 | static_d_desc.extra_bits = extra_dbits; | |
2389 | static_bl_desc.extra_bits = extra_blbits; | |
2390 | ||
1c79356b A |
2391 | /* Initialize the mapping length (0..255) -> length code (0..28) */ |
2392 | length = 0; | |
2393 | for (code = 0; code < LENGTH_CODES-1; code++) { | |
2394 | base_length[code] = length; | |
2395 | for (n = 0; n < (1<<extra_lbits[code]); n++) { | |
9bccf70c | 2396 | _length_code[length++] = (uch)code; |
1c79356b A |
2397 | } |
2398 | } | |
2399 | Assert (length == 256, "tr_static_init: length != 256"); | |
2400 | /* Note that the length 255 (match length 258) can be represented | |
2401 | * in two different ways: code 284 + 5 bits or code 285, so we | |
2402 | * overwrite length_code[255] to use the best encoding: | |
2403 | */ | |
9bccf70c | 2404 | _length_code[length-1] = (uch)code; |
1c79356b A |
2405 | |
2406 | /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ | |
2407 | dist = 0; | |
2408 | for (code = 0 ; code < 16; code++) { | |
2409 | base_dist[code] = dist; | |
2410 | for (n = 0; n < (1<<extra_dbits[code]); n++) { | |
9bccf70c | 2411 | _dist_code[dist++] = (uch)code; |
1c79356b A |
2412 | } |
2413 | } | |
2414 | Assert (dist == 256, "tr_static_init: dist != 256"); | |
2415 | dist >>= 7; /* from now on, all distances are divided by 128 */ | |
2416 | for ( ; code < D_CODES; code++) { | |
2417 | base_dist[code] = dist << 7; | |
2418 | for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) { | |
9bccf70c | 2419 | _dist_code[256 + dist++] = (uch)code; |
1c79356b A |
2420 | } |
2421 | } | |
2422 | Assert (dist == 256, "tr_static_init: 256+dist != 512"); | |
2423 | ||
2424 | /* Construct the codes of the static literal tree */ | |
2425 | for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0; | |
2426 | n = 0; | |
2427 | while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++; | |
2428 | while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++; | |
2429 | while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++; | |
2430 | while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++; | |
2431 | /* Codes 286 and 287 do not exist, but we must include them in the | |
2432 | * tree construction to get a canonical Huffman tree (longest code | |
2433 | * all ones) | |
2434 | */ | |
2435 | gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count); | |
2436 | ||
2437 | /* The static distance tree is trivial: */ | |
2438 | for (n = 0; n < D_CODES; n++) { | |
2439 | static_dtree[n].Len = 5; | |
2440 | static_dtree[n].Code = bi_reverse((unsigned)n, 5); | |
2441 | } | |
2442 | static_init_done = 1; | |
9bccf70c A |
2443 | |
2444 | # ifdef GEN_TREES_H | |
2445 | gen_trees_header(); | |
2446 | # endif | |
2447 | #endif /* defined(GEN_TREES_H) || !defined(STDC) */ | |
91447636 | 2448 | return Z_OK; |
9bccf70c A |
2449 | } |
2450 | ||
2451 | /* =========================================================================== | |
2452 | * Genererate the file trees.h describing the static trees. | |
2453 | */ | |
2454 | #ifdef GEN_TREES_H | |
2455 | # ifndef DEBUG_ZLIB | |
2456 | # include <stdio.h> | |
2457 | # endif | |
2458 | ||
2459 | # define SEPARATOR(i, last, width) \ | |
2460 | ((i) == (last)? "\n};\n\n" : \ | |
2461 | ((i) % (width) == (width)-1 ? ",\n" : ", ")) | |
2462 | ||
2463 | void gen_trees_header() | |
2464 | { | |
2465 | FILE *header = fopen("trees.h", "w"); | |
2466 | int i; | |
2467 | ||
2468 | Assert (header != NULL, "Can't open trees.h"); | |
2469 | fprintf(header, | |
2470 | "/* header created automatically with -DGEN_TREES_H */\n\n"); | |
2471 | ||
2472 | fprintf(header, "local const ct_data static_ltree[L_CODES+2] = {\n"); | |
2473 | for (i = 0; i < L_CODES+2; i++) { | |
2474 | fprintf(header, "{{%3u},{%3u}}%s", static_ltree[i].Code, | |
2475 | static_ltree[i].Len, SEPARATOR(i, L_CODES+1, 5)); | |
2476 | } | |
2477 | ||
2478 | fprintf(header, "local const ct_data static_dtree[D_CODES] = {\n"); | |
2479 | for (i = 0; i < D_CODES; i++) { | |
2480 | fprintf(header, "{{%2u},{%2u}}%s", static_dtree[i].Code, | |
2481 | static_dtree[i].Len, SEPARATOR(i, D_CODES-1, 5)); | |
2482 | } | |
2483 | ||
2484 | fprintf(header, "const uch _dist_code[DIST_CODE_LEN] = {\n"); | |
2485 | for (i = 0; i < DIST_CODE_LEN; i++) { | |
2486 | fprintf(header, "%2u%s", _dist_code[i], | |
2487 | SEPARATOR(i, DIST_CODE_LEN-1, 20)); | |
2488 | } | |
2489 | ||
2490 | fprintf(header, "const uch _length_code[MAX_MATCH-MIN_MATCH+1]= {\n"); | |
2491 | for (i = 0; i < MAX_MATCH-MIN_MATCH+1; i++) { | |
2492 | fprintf(header, "%2u%s", _length_code[i], | |
2493 | SEPARATOR(i, MAX_MATCH-MIN_MATCH, 20)); | |
2494 | } | |
2495 | ||
2496 | fprintf(header, "local const int base_length[LENGTH_CODES] = {\n"); | |
2497 | for (i = 0; i < LENGTH_CODES; i++) { | |
2498 | fprintf(header, "%1u%s", base_length[i], | |
2499 | SEPARATOR(i, LENGTH_CODES-1, 20)); | |
2500 | } | |
2501 | ||
2502 | fprintf(header, "local const int base_dist[D_CODES] = {\n"); | |
2503 | for (i = 0; i < D_CODES; i++) { | |
2504 | fprintf(header, "%5u%s", base_dist[i], | |
2505 | SEPARATOR(i, D_CODES-1, 10)); | |
2506 | } | |
2507 | ||
2508 | fclose(header); | |
1c79356b | 2509 | } |
9bccf70c | 2510 | #endif /* GEN_TREES_H */ |
1c79356b A |
2511 | |
2512 | /* =========================================================================== | |
2513 | * Initialize the tree data structures for a new zlib stream. | |
2514 | */ | |
2515 | void _tr_init(s) | |
2516 | deflate_state *s; | |
2517 | { | |
55e303ae | 2518 | tr_static_init(s->strm); |
1c79356b | 2519 | |
1c79356b A |
2520 | s->l_desc.dyn_tree = s->dyn_ltree; |
2521 | s->l_desc.stat_desc = &static_l_desc; | |
2522 | ||
2523 | s->d_desc.dyn_tree = s->dyn_dtree; | |
2524 | s->d_desc.stat_desc = &static_d_desc; | |
2525 | ||
2526 | s->bl_desc.dyn_tree = s->bl_tree; | |
2527 | s->bl_desc.stat_desc = &static_bl_desc; | |
2528 | ||
2529 | s->bi_buf = 0; | |
2530 | s->bi_valid = 0; | |
2531 | s->last_eob_len = 8; /* enough lookahead for inflate */ | |
2532 | #ifdef DEBUG_ZLIB | |
9bccf70c | 2533 | s->compressed_len = 0L; |
1c79356b A |
2534 | s->bits_sent = 0L; |
2535 | #endif | |
2536 | ||
2537 | /* Initialize the first block of the first file: */ | |
2538 | init_block(s); | |
2539 | } | |
2540 | ||
2541 | /* =========================================================================== | |
2542 | * Initialize a new block. | |
2543 | */ | |
2544 | local void init_block(s) | |
2545 | deflate_state *s; | |
2546 | { | |
2547 | int n; /* iterates over tree elements */ | |
2548 | ||
2549 | /* Initialize the trees. */ | |
2550 | for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0; | |
2551 | for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0; | |
2552 | for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0; | |
2553 | ||
2554 | s->dyn_ltree[END_BLOCK].Freq = 1; | |
2555 | s->opt_len = s->static_len = 0L; | |
2556 | s->last_lit = s->matches = 0; | |
2557 | } | |
2558 | ||
2559 | #define SMALLEST 1 | |
2560 | /* Index within the heap array of least frequent node in the Huffman tree */ | |
2561 | ||
2562 | ||
2563 | /* =========================================================================== | |
2564 | * Remove the smallest element from the heap and recreate the heap with | |
2565 | * one less element. Updates heap and heap_len. | |
2566 | */ | |
2567 | #define pqremove(s, tree, top) \ | |
2568 | {\ | |
2569 | top = s->heap[SMALLEST]; \ | |
2570 | s->heap[SMALLEST] = s->heap[s->heap_len--]; \ | |
2571 | pqdownheap(s, tree, SMALLEST); \ | |
2572 | } | |
2573 | ||
2574 | /* =========================================================================== | |
2575 | * Compares to subtrees, using the tree depth as tie breaker when | |
2576 | * the subtrees have equal frequency. This minimizes the worst case length. | |
2577 | */ | |
2578 | #define smaller(tree, n, m, depth) \ | |
2579 | (tree[n].Freq < tree[m].Freq || \ | |
2580 | (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m])) | |
2581 | ||
2582 | /* =========================================================================== | |
2583 | * Restore the heap property by moving down the tree starting at node k, | |
2584 | * exchanging a node with the smallest of its two sons if necessary, stopping | |
2585 | * when the heap property is re-established (each father smaller than its | |
2586 | * two sons). | |
2587 | */ | |
2588 | local void pqdownheap(s, tree, k) | |
2589 | deflate_state *s; | |
2590 | ct_data *tree; /* the tree to restore */ | |
2591 | int k; /* node to move down */ | |
2592 | { | |
2593 | int v = s->heap[k]; | |
2594 | int j = k << 1; /* left son of k */ | |
2595 | while (j <= s->heap_len) { | |
2596 | /* Set j to the smallest of the two sons: */ | |
2597 | if (j < s->heap_len && | |
2598 | smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { | |
2599 | j++; | |
2600 | } | |
2601 | /* Exit if v is smaller than both sons */ | |
2602 | if (smaller(tree, v, s->heap[j], s->depth)) break; | |
2603 | ||
2604 | /* Exchange v with the smallest son */ | |
2605 | s->heap[k] = s->heap[j]; k = j; | |
2606 | ||
2607 | /* And continue down the tree, setting j to the left son of k */ | |
2608 | j <<= 1; | |
2609 | } | |
2610 | s->heap[k] = v; | |
2611 | } | |
2612 | ||
2613 | /* =========================================================================== | |
2614 | * Compute the optimal bit lengths for a tree and update the total bit length | |
2615 | * for the current block. | |
2616 | * IN assertion: the fields freq and dad are set, heap[heap_max] and | |
2617 | * above are the tree nodes sorted by increasing frequency. | |
2618 | * OUT assertions: the field len is set to the optimal bit length, the | |
2619 | * array bl_count contains the frequencies for each bit length. | |
2620 | * The length opt_len is updated; static_len is also updated if stree is | |
2621 | * not null. | |
2622 | */ | |
2623 | local void gen_bitlen(s, desc) | |
2624 | deflate_state *s; | |
2625 | tree_desc *desc; /* the tree descriptor */ | |
2626 | { | |
9bccf70c A |
2627 | ct_data *tree = desc->dyn_tree; |
2628 | int max_code = desc->max_code; | |
2629 | const ct_data *stree = desc->stat_desc->static_tree; | |
2630 | const intf *extra = desc->stat_desc->extra_bits; | |
2631 | int base = desc->stat_desc->extra_base; | |
2632 | int max_length = desc->stat_desc->max_length; | |
1c79356b A |
2633 | int h; /* heap index */ |
2634 | int n, m; /* iterate over the tree elements */ | |
2635 | int bits; /* bit length */ | |
2636 | int xbits; /* extra bits */ | |
2637 | ush f; /* frequency */ | |
2638 | int overflow = 0; /* number of elements with bit length too large */ | |
2639 | ||
2640 | for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0; | |
2641 | ||
2642 | /* In a first pass, compute the optimal bit lengths (which may | |
2643 | * overflow in the case of the bit length tree). | |
2644 | */ | |
2645 | tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ | |
2646 | ||
2647 | for (h = s->heap_max+1; h < HEAP_SIZE; h++) { | |
2648 | n = s->heap[h]; | |
2649 | bits = tree[tree[n].Dad].Len + 1; | |
2650 | if (bits > max_length) bits = max_length, overflow++; | |
2651 | tree[n].Len = (ush)bits; | |
2652 | /* We overwrite tree[n].Dad which is no longer needed */ | |
2653 | ||
2654 | if (n > max_code) continue; /* not a leaf node */ | |
2655 | ||
2656 | s->bl_count[bits]++; | |
2657 | xbits = 0; | |
2658 | if (n >= base) xbits = extra[n-base]; | |
2659 | f = tree[n].Freq; | |
2660 | s->opt_len += (ulg)f * (bits + xbits); | |
2661 | if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits); | |
2662 | } | |
2663 | if (overflow == 0) return; | |
2664 | ||
2665 | Trace((stderr,"\nbit length overflow\n")); | |
2666 | /* This happens for example on obj2 and pic of the Calgary corpus */ | |
2667 | ||
2668 | /* Find the first bit length which could increase: */ | |
2669 | do { | |
2670 | bits = max_length-1; | |
2671 | while (s->bl_count[bits] == 0) bits--; | |
2672 | s->bl_count[bits]--; /* move one leaf down the tree */ | |
2673 | s->bl_count[bits+1] += 2; /* move one overflow item as its brother */ | |
2674 | s->bl_count[max_length]--; | |
2675 | /* The brother of the overflow item also moves one step up, | |
2676 | * but this does not affect bl_count[max_length] | |
2677 | */ | |
2678 | overflow -= 2; | |
2679 | } while (overflow > 0); | |
2680 | ||
2681 | /* Now recompute all bit lengths, scanning in increasing frequency. | |
2682 | * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all | |
2683 | * lengths instead of fixing only the wrong ones. This idea is taken | |
2684 | * from 'ar' written by Haruhiko Okumura.) | |
2685 | */ | |
2686 | for (bits = max_length; bits != 0; bits--) { | |
2687 | n = s->bl_count[bits]; | |
2688 | while (n != 0) { | |
2689 | m = s->heap[--h]; | |
2690 | if (m > max_code) continue; | |
2691 | if (tree[m].Len != (unsigned) bits) { | |
2692 | Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits)); | |
2693 | s->opt_len += ((long)bits - (long)tree[m].Len) | |
2694 | *(long)tree[m].Freq; | |
2695 | tree[m].Len = (ush)bits; | |
2696 | } | |
2697 | n--; | |
2698 | } | |
2699 | } | |
2700 | } | |
2701 | ||
2702 | /* =========================================================================== | |
2703 | * Generate the codes for a given tree and bit counts (which need not be | |
2704 | * optimal). | |
2705 | * IN assertion: the array bl_count contains the bit length statistics for | |
2706 | * the given tree and the field len is set for all tree elements. | |
2707 | * OUT assertion: the field code is set for all tree elements of non | |
2708 | * zero code length. | |
2709 | */ | |
2710 | local void gen_codes (tree, max_code, bl_count) | |
2711 | ct_data *tree; /* the tree to decorate */ | |
2712 | int max_code; /* largest code with non zero frequency */ | |
2713 | ushf *bl_count; /* number of codes at each bit length */ | |
2714 | { | |
2715 | ush next_code[MAX_BITS+1]; /* next code value for each bit length */ | |
2716 | ush code = 0; /* running code value */ | |
2717 | int bits; /* bit index */ | |
2718 | int n; /* code index */ | |
2719 | ||
2720 | /* The distribution counts are first used to generate the code values | |
2721 | * without bit reversal. | |
2722 | */ | |
2723 | for (bits = 1; bits <= MAX_BITS; bits++) { | |
2724 | next_code[bits] = code = (code + bl_count[bits-1]) << 1; | |
2725 | } | |
2726 | /* Check that the bit counts in bl_count are consistent. The last code | |
2727 | * must be all ones. | |
2728 | */ | |
2729 | Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1, | |
2730 | "inconsistent bit counts"); | |
2731 | Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); | |
2732 | ||
2733 | for (n = 0; n <= max_code; n++) { | |
2734 | int len = tree[n].Len; | |
2735 | if (len == 0) continue; | |
2736 | /* Now reverse the bits */ | |
2737 | tree[n].Code = bi_reverse(next_code[len]++, len); | |
2738 | ||
2739 | Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", | |
2740 | n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1)); | |
2741 | } | |
2742 | } | |
2743 | ||
2744 | /* =========================================================================== | |
2745 | * Construct one Huffman tree and assigns the code bit strings and lengths. | |
2746 | * Update the total bit length for the current block. | |
2747 | * IN assertion: the field freq is set for all tree elements. | |
2748 | * OUT assertions: the fields len and code are set to the optimal bit length | |
2749 | * and corresponding code. The length opt_len is updated; static_len is | |
2750 | * also updated if stree is not null. The field max_code is set. | |
2751 | */ | |
2752 | local void build_tree(s, desc) | |
2753 | deflate_state *s; | |
2754 | tree_desc *desc; /* the tree descriptor */ | |
2755 | { | |
9bccf70c A |
2756 | ct_data *tree = desc->dyn_tree; |
2757 | const ct_data *stree = desc->stat_desc->static_tree; | |
2758 | int elems = desc->stat_desc->elems; | |
1c79356b A |
2759 | int n, m; /* iterate over heap elements */ |
2760 | int max_code = -1; /* largest code with non zero frequency */ | |
2761 | int node; /* new node being created */ | |
2762 | ||
2763 | /* Construct the initial heap, with least frequent element in | |
2764 | * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. | |
2765 | * heap[0] is not used. | |
2766 | */ | |
2767 | s->heap_len = 0, s->heap_max = HEAP_SIZE; | |
2768 | ||
2769 | for (n = 0; n < elems; n++) { | |
2770 | if (tree[n].Freq != 0) { | |
2771 | s->heap[++(s->heap_len)] = max_code = n; | |
2772 | s->depth[n] = 0; | |
2773 | } else { | |
2774 | tree[n].Len = 0; | |
2775 | } | |
2776 | } | |
2777 | ||
2778 | /* The pkzip format requires that at least one distance code exists, | |
2779 | * and that at least one bit should be sent even if there is only one | |
2780 | * possible code. So to avoid special checks later on we force at least | |
2781 | * two codes of non zero frequency. | |
2782 | */ | |
2783 | while (s->heap_len < 2) { | |
2784 | node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0); | |
2785 | tree[node].Freq = 1; | |
2786 | s->depth[node] = 0; | |
2787 | s->opt_len--; if (stree) s->static_len -= stree[node].Len; | |
2788 | /* node is 0 or 1 so it does not have extra bits */ | |
2789 | } | |
2790 | desc->max_code = max_code; | |
2791 | ||
2792 | /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, | |
2793 | * establish sub-heaps of increasing lengths: | |
2794 | */ | |
2795 | for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n); | |
2796 | ||
2797 | /* Construct the Huffman tree by repeatedly combining the least two | |
2798 | * frequent nodes. | |
2799 | */ | |
2800 | node = elems; /* next internal node of the tree */ | |
2801 | do { | |
2802 | pqremove(s, tree, n); /* n = node of least frequency */ | |
2803 | m = s->heap[SMALLEST]; /* m = node of next least frequency */ | |
2804 | ||
2805 | s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */ | |
2806 | s->heap[--(s->heap_max)] = m; | |
2807 | ||
2808 | /* Create a new node father of n and m */ | |
2809 | tree[node].Freq = tree[n].Freq + tree[m].Freq; | |
2810 | s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1); | |
2811 | tree[n].Dad = tree[m].Dad = (ush)node; | |
2812 | #ifdef DUMP_BL_TREE | |
2813 | if (tree == s->bl_tree) { | |
2814 | fprintf(stderr,"\nnode %d(%d), sons %d(%d) %d(%d)", | |
2815 | node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); | |
2816 | } | |
2817 | #endif | |
2818 | /* and insert the new node in the heap */ | |
2819 | s->heap[SMALLEST] = node++; | |
2820 | pqdownheap(s, tree, SMALLEST); | |
2821 | ||
2822 | } while (s->heap_len >= 2); | |
2823 | ||
2824 | s->heap[--(s->heap_max)] = s->heap[SMALLEST]; | |
2825 | ||
2826 | /* At this point, the fields freq and dad are set. We can now | |
2827 | * generate the bit lengths. | |
2828 | */ | |
2829 | gen_bitlen(s, (tree_desc *)desc); | |
2830 | ||
2831 | /* The field len is now set, we can generate the bit codes */ | |
2832 | gen_codes ((ct_data *)tree, max_code, s->bl_count); | |
2833 | } | |
2834 | ||
2835 | /* =========================================================================== | |
2836 | * Scan a literal or distance tree to determine the frequencies of the codes | |
2837 | * in the bit length tree. | |
2838 | */ | |
2839 | local void scan_tree (s, tree, max_code) | |
2840 | deflate_state *s; | |
2841 | ct_data *tree; /* the tree to be scanned */ | |
2842 | int max_code; /* and its largest code of non zero frequency */ | |
2843 | { | |
2844 | int n; /* iterates over all tree elements */ | |
2845 | int prevlen = -1; /* last emitted length */ | |
2846 | int curlen; /* length of current code */ | |
2847 | int nextlen = tree[0].Len; /* length of next code */ | |
2848 | int count = 0; /* repeat count of the current code */ | |
2849 | int max_count = 7; /* max repeat count */ | |
2850 | int min_count = 4; /* min repeat count */ | |
2851 | ||
2852 | if (nextlen == 0) max_count = 138, min_count = 3; | |
2853 | tree[max_code+1].Len = (ush)0xffff; /* guard */ | |
2854 | ||
2855 | for (n = 0; n <= max_code; n++) { | |
2856 | curlen = nextlen; nextlen = tree[n+1].Len; | |
2857 | if (++count < max_count && curlen == nextlen) { | |
2858 | continue; | |
2859 | } else if (count < min_count) { | |
2860 | s->bl_tree[curlen].Freq += count; | |
2861 | } else if (curlen != 0) { | |
2862 | if (curlen != prevlen) s->bl_tree[curlen].Freq++; | |
2863 | s->bl_tree[REP_3_6].Freq++; | |
2864 | } else if (count <= 10) { | |
2865 | s->bl_tree[REPZ_3_10].Freq++; | |
2866 | } else { | |
2867 | s->bl_tree[REPZ_11_138].Freq++; | |
2868 | } | |
2869 | count = 0; prevlen = curlen; | |
2870 | if (nextlen == 0) { | |
2871 | max_count = 138, min_count = 3; | |
2872 | } else if (curlen == nextlen) { | |
2873 | max_count = 6, min_count = 3; | |
2874 | } else { | |
2875 | max_count = 7, min_count = 4; | |
2876 | } | |
2877 | } | |
2878 | } | |
2879 | ||
2880 | /* =========================================================================== | |
2881 | * Send a literal or distance tree in compressed form, using the codes in | |
2882 | * bl_tree. | |
2883 | */ | |
2884 | local void send_tree (s, tree, max_code) | |
2885 | deflate_state *s; | |
2886 | ct_data *tree; /* the tree to be scanned */ | |
2887 | int max_code; /* and its largest code of non zero frequency */ | |
2888 | { | |
2889 | int n; /* iterates over all tree elements */ | |
2890 | int prevlen = -1; /* last emitted length */ | |
2891 | int curlen; /* length of current code */ | |
2892 | int nextlen = tree[0].Len; /* length of next code */ | |
2893 | int count = 0; /* repeat count of the current code */ | |
2894 | int max_count = 7; /* max repeat count */ | |
2895 | int min_count = 4; /* min repeat count */ | |
2896 | ||
2897 | /* tree[max_code+1].Len = -1; */ /* guard already set */ | |
2898 | if (nextlen == 0) max_count = 138, min_count = 3; | |
2899 | ||
2900 | for (n = 0; n <= max_code; n++) { | |
2901 | curlen = nextlen; nextlen = tree[n+1].Len; | |
2902 | if (++count < max_count && curlen == nextlen) { | |
2903 | continue; | |
2904 | } else if (count < min_count) { | |
2905 | do { send_code(s, curlen, s->bl_tree); } while (--count != 0); | |
2906 | ||
2907 | } else if (curlen != 0) { | |
2908 | if (curlen != prevlen) { | |
2909 | send_code(s, curlen, s->bl_tree); count--; | |
2910 | } | |
2911 | Assert(count >= 3 && count <= 6, " 3_6?"); | |
2912 | send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2); | |
2913 | ||
2914 | } else if (count <= 10) { | |
2915 | send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3); | |
2916 | ||
2917 | } else { | |
2918 | send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7); | |
2919 | } | |
2920 | count = 0; prevlen = curlen; | |
2921 | if (nextlen == 0) { | |
2922 | max_count = 138, min_count = 3; | |
2923 | } else if (curlen == nextlen) { | |
2924 | max_count = 6, min_count = 3; | |
2925 | } else { | |
2926 | max_count = 7, min_count = 4; | |
2927 | } | |
2928 | } | |
2929 | } | |
2930 | ||
2931 | /* =========================================================================== | |
2932 | * Construct the Huffman tree for the bit lengths and return the index in | |
2933 | * bl_order of the last bit length code to send. | |
2934 | */ | |
2935 | local int build_bl_tree(s) | |
2936 | deflate_state *s; | |
2937 | { | |
2938 | int max_blindex; /* index of last bit length code of non zero freq */ | |
2939 | ||
2940 | /* Determine the bit length frequencies for literal and distance trees */ | |
2941 | scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code); | |
2942 | scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code); | |
2943 | ||
2944 | /* Build the bit length tree: */ | |
2945 | build_tree(s, (tree_desc *)(&(s->bl_desc))); | |
2946 | /* opt_len now includes the length of the tree representations, except | |
2947 | * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. | |
2948 | */ | |
2949 | ||
2950 | /* Determine the number of bit length codes to send. The pkzip format | |
2951 | * requires that at least 4 bit length codes be sent. (appnote.txt says | |
2952 | * 3 but the actual value used is 4.) | |
2953 | */ | |
2954 | for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) { | |
2955 | if (s->bl_tree[bl_order[max_blindex]].Len != 0) break; | |
2956 | } | |
2957 | /* Update opt_len to include the bit length tree and counts */ | |
2958 | s->opt_len += 3*(max_blindex+1) + 5+5+4; | |
2959 | Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", | |
2960 | s->opt_len, s->static_len)); | |
2961 | ||
2962 | return max_blindex; | |
2963 | } | |
2964 | ||
2965 | /* =========================================================================== | |
2966 | * Send the header for a block using dynamic Huffman trees: the counts, the | |
2967 | * lengths of the bit length codes, the literal tree and the distance tree. | |
2968 | * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. | |
2969 | */ | |
2970 | local void send_all_trees(s, lcodes, dcodes, blcodes) | |
2971 | deflate_state *s; | |
2972 | int lcodes, dcodes, blcodes; /* number of codes for each tree */ | |
2973 | { | |
2974 | int rank; /* index in bl_order */ | |
2975 | ||
2976 | Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); | |
2977 | Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, | |
2978 | "too many codes"); | |
2979 | Tracev((stderr, "\nbl counts: ")); | |
2980 | send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */ | |
2981 | send_bits(s, dcodes-1, 5); | |
2982 | send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */ | |
2983 | for (rank = 0; rank < blcodes; rank++) { | |
2984 | Tracev((stderr, "\nbl code %2d ", bl_order[rank])); | |
2985 | send_bits(s, s->bl_tree[bl_order[rank]].Len, 3); | |
2986 | } | |
2987 | Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent)); | |
2988 | ||
2989 | send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */ | |
2990 | Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent)); | |
2991 | ||
2992 | send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */ | |
2993 | Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent)); | |
2994 | } | |
2995 | ||
2996 | /* =========================================================================== | |
2997 | * Send a stored block | |
2998 | */ | |
2999 | void _tr_stored_block(s, buf, stored_len, eof) | |
3000 | deflate_state *s; | |
3001 | charf *buf; /* input block */ | |
3002 | ulg stored_len; /* length of input block */ | |
3003 | int eof; /* true if this is the last block for a file */ | |
3004 | { | |
3005 | send_bits(s, (STORED_BLOCK<<1)+eof, 3); /* send block type */ | |
9bccf70c | 3006 | #ifdef DEBUG_ZLIB |
1c79356b A |
3007 | s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L; |
3008 | s->compressed_len += (stored_len + 4) << 3; | |
9bccf70c | 3009 | #endif |
1c79356b A |
3010 | copy_block(s, buf, (unsigned)stored_len, 1); /* with header */ |
3011 | } | |
3012 | ||
1c79356b A |
3013 | /* =========================================================================== |
3014 | * Send one empty static block to give enough lookahead for inflate. | |
3015 | * This takes 10 bits, of which 7 may remain in the bit buffer. | |
3016 | * The current inflate code requires 9 bits of lookahead. If the | |
3017 | * last two codes for the previous block (real code plus EOB) were coded | |
3018 | * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode | |
3019 | * the last real code. In this case we send two empty static blocks instead | |
3020 | * of one. (There are no problems if the previous block is stored or fixed.) | |
3021 | * To simplify the code, we assume the worst case of last real code encoded | |
3022 | * on one bit only. | |
3023 | */ | |
3024 | void _tr_align(s) | |
3025 | deflate_state *s; | |
3026 | { | |
3027 | send_bits(s, STATIC_TREES<<1, 3); | |
3028 | send_code(s, END_BLOCK, static_ltree); | |
9bccf70c | 3029 | #ifdef DEBUG_ZLIB |
1c79356b | 3030 | s->compressed_len += 10L; /* 3 for block type, 7 for EOB */ |
9bccf70c | 3031 | #endif |
1c79356b A |
3032 | bi_flush(s); |
3033 | /* Of the 10 bits for the empty block, we have already sent | |
3034 | * (10 - bi_valid) bits. The lookahead for the last real code (before | |
3035 | * the EOB of the previous block) was thus at least one plus the length | |
3036 | * of the EOB plus what we have just sent of the empty static block. | |
3037 | */ | |
3038 | if (1 + s->last_eob_len + 10 - s->bi_valid < 9) { | |
3039 | send_bits(s, STATIC_TREES<<1, 3); | |
3040 | send_code(s, END_BLOCK, static_ltree); | |
9bccf70c | 3041 | #ifdef DEBUG_ZLIB |
1c79356b | 3042 | s->compressed_len += 10L; |
9bccf70c | 3043 | #endif |
1c79356b A |
3044 | bi_flush(s); |
3045 | } | |
3046 | s->last_eob_len = 7; | |
3047 | } | |
3048 | ||
3049 | /* =========================================================================== | |
3050 | * Determine the best encoding for the current block: dynamic trees, static | |
9bccf70c | 3051 | * trees or store, and output the encoded block to the zip file. |
1c79356b | 3052 | */ |
9bccf70c | 3053 | void _tr_flush_block(s, buf, stored_len, eof) |
1c79356b A |
3054 | deflate_state *s; |
3055 | charf *buf; /* input block, or NULL if too old */ | |
3056 | ulg stored_len; /* length of input block */ | |
3057 | int eof; /* true if this is the last block for a file */ | |
3058 | { | |
3059 | ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ | |
3060 | int max_blindex = 0; /* index of last bit length code of non zero freq */ | |
3061 | ||
3062 | /* Build the Huffman trees unless a stored block is forced */ | |
3063 | if (s->level > 0) { | |
3064 | ||
3065 | /* Check if the file is ascii or binary */ | |
3066 | if (s->data_type == Z_UNKNOWN) set_data_type(s); | |
3067 | ||
3068 | /* Construct the literal and distance trees */ | |
3069 | build_tree(s, (tree_desc *)(&(s->l_desc))); | |
3070 | Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len, | |
3071 | s->static_len)); | |
3072 | ||
3073 | build_tree(s, (tree_desc *)(&(s->d_desc))); | |
3074 | Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len, | |
3075 | s->static_len)); | |
3076 | /* At this point, opt_len and static_len are the total bit lengths of | |
3077 | * the compressed block data, excluding the tree representations. | |
3078 | */ | |
3079 | ||
3080 | /* Build the bit length tree for the above two trees, and get the index | |
3081 | * in bl_order of the last bit length code to send. | |
3082 | */ | |
3083 | max_blindex = build_bl_tree(s); | |
3084 | ||
3085 | /* Determine the best encoding. Compute first the block length in bytes*/ | |
3086 | opt_lenb = (s->opt_len+3+7)>>3; | |
3087 | static_lenb = (s->static_len+3+7)>>3; | |
3088 | ||
3089 | Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ", | |
3090 | opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len, | |
3091 | s->last_lit)); | |
3092 | ||
3093 | if (static_lenb <= opt_lenb) opt_lenb = static_lenb; | |
3094 | ||
3095 | } else { | |
3096 | Assert(buf != (char*)0, "lost buf"); | |
3097 | opt_lenb = static_lenb = stored_len + 5; /* force a stored block */ | |
3098 | } | |
3099 | ||
1c79356b A |
3100 | #ifdef FORCE_STORED |
3101 | if (buf != (char*)0) { /* force stored block */ | |
3102 | #else | |
3103 | if (stored_len+4 <= opt_lenb && buf != (char*)0) { | |
3104 | /* 4: two words for the lengths */ | |
3105 | #endif | |
3106 | /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. | |
3107 | * Otherwise we can't have processed more than WSIZE input bytes since | |
3108 | * the last block flush, because compression would have been | |
3109 | * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to | |
3110 | * transform a block into a stored block. | |
3111 | */ | |
3112 | _tr_stored_block(s, buf, stored_len, eof); | |
3113 | ||
3114 | #ifdef FORCE_STATIC | |
3115 | } else if (static_lenb >= 0) { /* force static trees */ | |
3116 | #else | |
3117 | } else if (static_lenb == opt_lenb) { | |
3118 | #endif | |
3119 | send_bits(s, (STATIC_TREES<<1)+eof, 3); | |
3120 | compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree); | |
9bccf70c | 3121 | #ifdef DEBUG_ZLIB |
1c79356b | 3122 | s->compressed_len += 3 + s->static_len; |
9bccf70c | 3123 | #endif |
1c79356b A |
3124 | } else { |
3125 | send_bits(s, (DYN_TREES<<1)+eof, 3); | |
3126 | send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1, | |
3127 | max_blindex+1); | |
3128 | compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree); | |
9bccf70c | 3129 | #ifdef DEBUG_ZLIB |
1c79356b | 3130 | s->compressed_len += 3 + s->opt_len; |
9bccf70c | 3131 | #endif |
1c79356b A |
3132 | } |
3133 | Assert (s->compressed_len == s->bits_sent, "bad compressed size"); | |
9bccf70c A |
3134 | /* The above check is made mod 2^32, for files larger than 512 MB |
3135 | * and uLong implemented on 32 bits. | |
3136 | */ | |
1c79356b A |
3137 | init_block(s); |
3138 | ||
3139 | if (eof) { | |
3140 | bi_windup(s); | |
9bccf70c | 3141 | #ifdef DEBUG_ZLIB |
1c79356b | 3142 | s->compressed_len += 7; /* align on byte boundary */ |
9bccf70c | 3143 | #endif |
1c79356b A |
3144 | } |
3145 | Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3, | |
3146 | s->compressed_len-7*eof)); | |
1c79356b A |
3147 | } |
3148 | ||
3149 | /* =========================================================================== | |
3150 | * Save the match info and tally the frequency counts. Return true if | |
3151 | * the current block must be flushed. | |
3152 | */ | |
3153 | int _tr_tally (s, dist, lc) | |
3154 | deflate_state *s; | |
3155 | unsigned dist; /* distance of matched string */ | |
3156 | unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */ | |
3157 | { | |
3158 | s->d_buf[s->last_lit] = (ush)dist; | |
3159 | s->l_buf[s->last_lit++] = (uch)lc; | |
3160 | if (dist == 0) { | |
3161 | /* lc is the unmatched char */ | |
3162 | s->dyn_ltree[lc].Freq++; | |
3163 | } else { | |
3164 | s->matches++; | |
3165 | /* Here, lc is the match length - MIN_MATCH */ | |
3166 | dist--; /* dist = match distance - 1 */ | |
3167 | Assert((ush)dist < (ush)MAX_DIST(s) && | |
3168 | (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && | |
3169 | (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match"); | |
3170 | ||
9bccf70c | 3171 | s->dyn_ltree[_length_code[lc]+LITERALS+1].Freq++; |
1c79356b A |
3172 | s->dyn_dtree[d_code(dist)].Freq++; |
3173 | } | |
3174 | ||
9bccf70c | 3175 | #ifdef TRUNCATE_BLOCK |
1c79356b | 3176 | /* Try to guess if it is profitable to stop the current block here */ |
9bccf70c | 3177 | if ((s->last_lit & 0x1fff) == 0 && s->level > 2) { |
1c79356b A |
3178 | /* Compute an upper bound for the compressed length */ |
3179 | ulg out_length = (ulg)s->last_lit*8L; | |
3180 | ulg in_length = (ulg)((long)s->strstart - s->block_start); | |
3181 | int dcode; | |
3182 | for (dcode = 0; dcode < D_CODES; dcode++) { | |
3183 | out_length += (ulg)s->dyn_dtree[dcode].Freq * | |
3184 | (5L+extra_dbits[dcode]); | |
3185 | } | |
3186 | out_length >>= 3; | |
3187 | Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ", | |
3188 | s->last_lit, in_length, out_length, | |
3189 | 100L - out_length*100L/in_length)); | |
3190 | if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1; | |
3191 | } | |
9bccf70c | 3192 | #endif |
1c79356b A |
3193 | return (s->last_lit == s->lit_bufsize-1); |
3194 | /* We avoid equality with lit_bufsize because of wraparound at 64K | |
3195 | * on 16 bit machines and because stored blocks are restricted to | |
3196 | * 64K-1 bytes. | |
3197 | */ | |
3198 | } | |
3199 | ||
3200 | /* =========================================================================== | |
3201 | * Send the block data compressed using the given Huffman trees | |
3202 | */ | |
3203 | local void compress_block(s, ltree, dtree) | |
3204 | deflate_state *s; | |
3205 | ct_data *ltree; /* literal tree */ | |
3206 | ct_data *dtree; /* distance tree */ | |
3207 | { | |
3208 | unsigned dist; /* distance of matched string */ | |
3209 | int lc; /* match length or unmatched char (if dist == 0) */ | |
3210 | unsigned lx = 0; /* running index in l_buf */ | |
3211 | unsigned code; /* the code to send */ | |
3212 | int extra; /* number of extra bits to send */ | |
3213 | ||
3214 | if (s->last_lit != 0) do { | |
3215 | dist = s->d_buf[lx]; | |
3216 | lc = s->l_buf[lx++]; | |
3217 | if (dist == 0) { | |
3218 | send_code(s, lc, ltree); /* send a literal byte */ | |
3219 | Tracecv(isgraph(lc), (stderr," '%c' ", lc)); | |
3220 | } else { | |
3221 | /* Here, lc is the match length - MIN_MATCH */ | |
9bccf70c | 3222 | code = _length_code[lc]; |
1c79356b A |
3223 | send_code(s, code+LITERALS+1, ltree); /* send the length code */ |
3224 | extra = extra_lbits[code]; | |
3225 | if (extra != 0) { | |
3226 | lc -= base_length[code]; | |
3227 | send_bits(s, lc, extra); /* send the extra length bits */ | |
3228 | } | |
3229 | dist--; /* dist is now the match distance - 1 */ | |
3230 | code = d_code(dist); | |
3231 | Assert (code < D_CODES, "bad d_code"); | |
3232 | ||
3233 | send_code(s, code, dtree); /* send the distance code */ | |
3234 | extra = extra_dbits[code]; | |
3235 | if (extra != 0) { | |
3236 | dist -= base_dist[code]; | |
3237 | send_bits(s, dist, extra); /* send the extra distance bits */ | |
3238 | } | |
3239 | } /* literal or match pair ? */ | |
3240 | ||
3241 | /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */ | |
3242 | Assert(s->pending < s->lit_bufsize + 2*lx, "pendingBuf overflow"); | |
3243 | ||
3244 | } while (lx < s->last_lit); | |
3245 | ||
3246 | send_code(s, END_BLOCK, ltree); | |
3247 | s->last_eob_len = ltree[END_BLOCK].Len; | |
3248 | } | |
3249 | ||
3250 | /* =========================================================================== | |
3251 | * Set the data type to ASCII or BINARY, using a crude approximation: | |
3252 | * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise. | |
3253 | * IN assertion: the fields freq of dyn_ltree are set and the total of all | |
3254 | * frequencies does not exceed 64K (to fit in an int on 16 bit machines). | |
3255 | */ | |
3256 | local void set_data_type(s) | |
3257 | deflate_state *s; | |
3258 | { | |
3259 | int n = 0; | |
3260 | unsigned ascii_freq = 0; | |
3261 | unsigned bin_freq = 0; | |
3262 | while (n < 7) bin_freq += s->dyn_ltree[n++].Freq; | |
3263 | while (n < 128) ascii_freq += s->dyn_ltree[n++].Freq; | |
3264 | while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq; | |
3265 | s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : Z_ASCII); | |
3266 | } | |
3267 | ||
3268 | /* =========================================================================== | |
3269 | * Reverse the first len bits of a code, using straightforward code (a faster | |
3270 | * method would use a table) | |
3271 | * IN assertion: 1 <= len <= 15 | |
3272 | */ | |
3273 | local unsigned bi_reverse(code, len) | |
3274 | unsigned code; /* the value to invert */ | |
3275 | int len; /* its bit length */ | |
3276 | { | |
3277 | register unsigned res = 0; | |
3278 | do { | |
3279 | res |= code & 1; | |
3280 | code >>= 1, res <<= 1; | |
3281 | } while (--len > 0); | |
3282 | return res >> 1; | |
3283 | } | |
3284 | ||
3285 | /* =========================================================================== | |
3286 | * Flush the bit buffer, keeping at most 7 bits in it. | |
3287 | */ | |
3288 | local void bi_flush(s) | |
3289 | deflate_state *s; | |
3290 | { | |
3291 | if (s->bi_valid == 16) { | |
3292 | put_short(s, s->bi_buf); | |
3293 | s->bi_buf = 0; | |
3294 | s->bi_valid = 0; | |
3295 | } else if (s->bi_valid >= 8) { | |
3296 | put_byte(s, (Byte)s->bi_buf); | |
3297 | s->bi_buf >>= 8; | |
3298 | s->bi_valid -= 8; | |
3299 | } | |
3300 | } | |
3301 | ||
3302 | /* =========================================================================== | |
3303 | * Flush the bit buffer and align the output on a byte boundary | |
3304 | */ | |
3305 | local void bi_windup(s) | |
3306 | deflate_state *s; | |
3307 | { | |
3308 | if (s->bi_valid > 8) { | |
3309 | put_short(s, s->bi_buf); | |
3310 | } else if (s->bi_valid > 0) { | |
3311 | put_byte(s, (Byte)s->bi_buf); | |
3312 | } | |
3313 | s->bi_buf = 0; | |
3314 | s->bi_valid = 0; | |
3315 | #ifdef DEBUG_ZLIB | |
3316 | s->bits_sent = (s->bits_sent+7) & ~7; | |
3317 | #endif | |
3318 | } | |
3319 | ||
3320 | /* =========================================================================== | |
3321 | * Copy a stored block, storing first the length and its | |
3322 | * one's complement if requested. | |
3323 | */ | |
3324 | local void copy_block(s, buf, len, header) | |
3325 | deflate_state *s; | |
3326 | charf *buf; /* the input data */ | |
3327 | unsigned len; /* its length */ | |
3328 | int header; /* true if block header must be written */ | |
3329 | { | |
3330 | bi_windup(s); /* align on byte boundary */ | |
3331 | s->last_eob_len = 8; /* enough lookahead for inflate */ | |
3332 | ||
3333 | if (header) { | |
3334 | put_short(s, (ush)len); | |
3335 | put_short(s, (ush)~len); | |
3336 | #ifdef DEBUG_ZLIB | |
3337 | s->bits_sent += 2*16; | |
3338 | #endif | |
3339 | } | |
3340 | #ifdef DEBUG_ZLIB | |
3341 | s->bits_sent += (ulg)len<<3; | |
3342 | #endif | |
9bccf70c A |
3343 | while (len--) { |
3344 | put_byte(s, *buf++); | |
3345 | } | |
1c79356b A |
3346 | } |
3347 | /* --- trees.c */ | |
3348 | ||
3349 | /* +++ inflate.c */ | |
3350 | /* inflate.c -- zlib interface to inflate modules | |
9bccf70c | 3351 | * Copyright (C) 1995-2002 Mark Adler |
1c79356b A |
3352 | * For conditions of distribution and use, see copyright notice in zlib.h |
3353 | */ | |
3354 | ||
3355 | /* #include "zutil.h" */ | |
3356 | ||
3357 | /* +++ infblock.h */ | |
3358 | /* infblock.h -- header to use infblock.c | |
9bccf70c | 3359 | * Copyright (C) 1995-2002 Mark Adler |
1c79356b A |
3360 | * For conditions of distribution and use, see copyright notice in zlib.h |
3361 | */ | |
3362 | ||
3363 | /* WARNING: this file should *not* be used by applications. It is | |
3364 | part of the implementation of the compression library and is | |
3365 | subject to change. Applications should only use zlib.h. | |
3366 | */ | |
3367 | ||
3368 | struct inflate_blocks_state; | |
3369 | typedef struct inflate_blocks_state FAR inflate_blocks_statef; | |
3370 | ||
3371 | extern inflate_blocks_statef * inflate_blocks_new OF(( | |
3372 | z_streamp z, | |
3373 | check_func c, /* check function */ | |
3374 | uInt w)); /* window size */ | |
3375 | ||
3376 | extern int inflate_blocks OF(( | |
3377 | inflate_blocks_statef *, | |
3378 | z_streamp , | |
3379 | int)); /* initial return code */ | |
3380 | ||
3381 | extern void inflate_blocks_reset OF(( | |
3382 | inflate_blocks_statef *, | |
3383 | z_streamp , | |
3384 | uLongf *)); /* check value on output */ | |
3385 | ||
3386 | extern int inflate_blocks_free OF(( | |
3387 | inflate_blocks_statef *, | |
9bccf70c | 3388 | z_streamp)); |
1c79356b A |
3389 | |
3390 | extern void inflate_set_dictionary OF(( | |
3391 | inflate_blocks_statef *s, | |
3392 | const Bytef *d, /* dictionary */ | |
3393 | uInt n)); /* dictionary length */ | |
3394 | ||
9bccf70c A |
3395 | extern int inflate_blocks_sync_point OF(( |
3396 | inflate_blocks_statef *s)); | |
1c79356b A |
3397 | /* --- infblock.h */ |
3398 | ||
3399 | #ifndef NO_DUMMY_DECL | |
3400 | struct inflate_blocks_state {int dummy;}; /* for buggy compilers */ | |
3401 | #endif | |
3402 | ||
3403 | /* inflate private state */ | |
9bccf70c | 3404 | typedef struct inflate_state { |
1c79356b A |
3405 | |
3406 | /* mode */ | |
3407 | enum { | |
3408 | METHOD, /* waiting for method byte */ | |
3409 | FLAG, /* waiting for flag byte */ | |
3410 | DICT4, /* four dictionary check bytes to go */ | |
3411 | DICT3, /* three dictionary check bytes to go */ | |
3412 | DICT2, /* two dictionary check bytes to go */ | |
3413 | DICT1, /* one dictionary check byte to go */ | |
3414 | DICT0, /* waiting for inflateSetDictionary */ | |
3415 | BLOCKS, /* decompressing blocks */ | |
3416 | CHECK4, /* four check bytes to go */ | |
3417 | CHECK3, /* three check bytes to go */ | |
3418 | CHECK2, /* two check bytes to go */ | |
3419 | CHECK1, /* one check byte to go */ | |
3420 | DONE, /* finished check, done */ | |
3421 | BAD} /* got an error--stay here */ | |
3422 | mode; /* current inflate mode */ | |
3423 | ||
3424 | /* mode dependent information */ | |
3425 | union { | |
3426 | uInt method; /* if FLAGS, method byte */ | |
3427 | struct { | |
3428 | uLong was; /* computed check value */ | |
3429 | uLong need; /* stream check value */ | |
3430 | } check; /* if CHECK, check values to compare */ | |
3431 | uInt marker; /* if BAD, inflateSync's marker bytes count */ | |
3432 | } sub; /* submode */ | |
3433 | ||
3434 | /* mode independent information */ | |
3435 | int nowrap; /* flag for no wrapper */ | |
3436 | uInt wbits; /* log2(window size) (8..15, defaults to 15) */ | |
3437 | inflate_blocks_statef | |
3438 | *blocks; /* current inflate_blocks state */ | |
3439 | ||
9bccf70c | 3440 | }inflate_state; |
1c79356b A |
3441 | |
3442 | ||
9bccf70c | 3443 | int ZEXPORT inflateReset(z) |
1c79356b A |
3444 | z_streamp z; |
3445 | { | |
9bccf70c A |
3446 | inflate_state* s; |
3447 | if (z == Z_NULL || z->state == Z_NULL) | |
3448 | return Z_STREAM_ERROR; | |
3449 | ||
3450 | s = (inflate_state*)z->state; | |
3451 | z->total_in = z->total_out = 0; | |
3452 | z->msg = Z_NULL; | |
3453 | s->mode = s->nowrap ? BLOCKS : METHOD; | |
3454 | inflate_blocks_reset(s->blocks, z, Z_NULL); | |
3455 | Tracev((stderr, "inflate: reset\n")); | |
3456 | return Z_OK; | |
1c79356b A |
3457 | } |
3458 | ||
3459 | ||
9bccf70c | 3460 | int ZEXPORT inflateEnd(z) |
1c79356b A |
3461 | z_streamp z; |
3462 | { | |
1c79356b A |
3463 | if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL) |
3464 | return Z_STREAM_ERROR; | |
9bccf70c A |
3465 | if (((inflate_state*)z->state)->blocks != Z_NULL) |
3466 | inflate_blocks_free(((inflate_state*)z->state)->blocks, z); | |
1c79356b A |
3467 | ZFREE(z, z->state); |
3468 | z->state = Z_NULL; | |
9bccf70c | 3469 | Tracev((stderr, "inflate: end\n")); |
1c79356b A |
3470 | return Z_OK; |
3471 | } | |
3472 | ||
3473 | ||
9bccf70c | 3474 | int ZEXPORT inflateInit2_(z, w, version, stream_size) |
1c79356b A |
3475 | z_streamp z; |
3476 | int w; | |
3477 | const char *version; | |
3478 | int stream_size; | |
3479 | { | |
9bccf70c | 3480 | inflate_state* s; |
1c79356b A |
3481 | if (version == Z_NULL || version[0] != ZLIB_VERSION[0] || |
3482 | stream_size != sizeof(z_stream)) | |
3483 | return Z_VERSION_ERROR; | |
3484 | ||
3485 | /* initialize state */ | |
3486 | if (z == Z_NULL) | |
3487 | return Z_STREAM_ERROR; | |
3488 | z->msg = Z_NULL; | |
3489 | #ifndef NO_ZCFUNCS | |
3490 | if (z->zalloc == Z_NULL) | |
3491 | { | |
3492 | z->zalloc = zcalloc; | |
3493 | z->opaque = (voidpf)0; | |
3494 | } | |
3495 | if (z->zfree == Z_NULL) z->zfree = zcfree; | |
3496 | #endif | |
3497 | if ((z->state = (struct internal_state FAR *) | |
9bccf70c | 3498 | ZALLOC(z,1,sizeof(struct inflate_state))) == Z_NULL) |
1c79356b | 3499 | return Z_MEM_ERROR; |
9bccf70c A |
3500 | s = (inflate_state*)z->state; |
3501 | s->blocks = Z_NULL; | |
1c79356b A |
3502 | |
3503 | /* handle undocumented nowrap option (no zlib header or check) */ | |
9bccf70c | 3504 | s->nowrap = 0; |
1c79356b A |
3505 | if (w < 0) |
3506 | { | |
3507 | w = - w; | |
9bccf70c | 3508 | s->nowrap = 1; |
1c79356b A |
3509 | } |
3510 | ||
3511 | /* set window size */ | |
3512 | if (w < 8 || w > 15) | |
3513 | { | |
3514 | inflateEnd(z); | |
3515 | return Z_STREAM_ERROR; | |
3516 | } | |
9bccf70c | 3517 | s->wbits = (uInt)w; |
1c79356b A |
3518 | |
3519 | /* create inflate_blocks state */ | |
9bccf70c A |
3520 | if ((s->blocks = |
3521 | inflate_blocks_new(z, s->nowrap ? Z_NULL : adler32, (uInt)1 << w)) | |
1c79356b A |
3522 | == Z_NULL) |
3523 | { | |
3524 | inflateEnd(z); | |
3525 | return Z_MEM_ERROR; | |
3526 | } | |
9bccf70c | 3527 | Tracev((stderr, "inflate: allocated\n")); |
1c79356b A |
3528 | |
3529 | /* reset state */ | |
3530 | inflateReset(z); | |
3531 | return Z_OK; | |
3532 | } | |
3533 | ||
3534 | ||
9bccf70c | 3535 | int ZEXPORT inflateInit_(z, version, stream_size) |
1c79356b A |
3536 | z_streamp z; |
3537 | const char *version; | |
3538 | int stream_size; | |
3539 | { | |
3540 | return inflateInit2_(z, DEF_WBITS, version, stream_size); | |
3541 | } | |
3542 | ||
3543 | ||
9bccf70c | 3544 | #define NEEDBYTE {if(z->avail_in==0)return r;r=f;} |
1c79356b A |
3545 | #define NEXTBYTE (z->avail_in--,z->total_in++,*z->next_in++) |
3546 | ||
9bccf70c | 3547 | int ZEXPORT inflate(z, f) |
1c79356b A |
3548 | z_streamp z; |
3549 | int f; | |
3550 | { | |
3551 | int r; | |
3552 | uInt b; | |
9bccf70c | 3553 | inflate_state* s; |
1c79356b | 3554 | |
9bccf70c | 3555 | if (z == Z_NULL || z->state == Z_NULL || z->next_in == Z_NULL) |
1c79356b | 3556 | return Z_STREAM_ERROR; |
9bccf70c | 3557 | f = f == Z_FINISH ? Z_BUF_ERROR : Z_OK; |
1c79356b | 3558 | r = Z_BUF_ERROR; |
9bccf70c A |
3559 | s = (inflate_state*)z->state; |
3560 | while (1) switch (s->mode) | |
1c79356b A |
3561 | { |
3562 | case METHOD: | |
3563 | NEEDBYTE | |
9bccf70c | 3564 | if (((s->sub.method = NEXTBYTE) & 0xf) != Z_DEFLATED) |
1c79356b | 3565 | { |
9bccf70c | 3566 | s->mode = BAD; |
1c79356b | 3567 | z->msg = (char*)"unknown compression method"; |
9bccf70c | 3568 | s->sub.marker = 5; /* can't try inflateSync */ |
1c79356b A |
3569 | break; |
3570 | } | |
9bccf70c | 3571 | if ((s->sub.method >> 4) + 8 > s->wbits) |
1c79356b | 3572 | { |
9bccf70c | 3573 | s->mode = BAD; |
1c79356b | 3574 | z->msg = (char*)"invalid window size"; |
9bccf70c | 3575 | s->sub.marker = 5; /* can't try inflateSync */ |
1c79356b A |
3576 | break; |
3577 | } | |
9bccf70c | 3578 | s->mode = FLAG; |
1c79356b A |
3579 | case FLAG: |
3580 | NEEDBYTE | |
3581 | b = NEXTBYTE; | |
9bccf70c | 3582 | if (((s->sub.method << 8) + b) % 31) |
1c79356b | 3583 | { |
9bccf70c | 3584 | s->mode = BAD; |
1c79356b | 3585 | z->msg = (char*)"incorrect header check"; |
9bccf70c | 3586 | s->sub.marker = 5; /* can't try inflateSync */ |
1c79356b A |
3587 | break; |
3588 | } | |
9bccf70c | 3589 | Tracev((stderr, "inflate: zlib header ok\n")); |
1c79356b A |
3590 | if (!(b & PRESET_DICT)) |
3591 | { | |
9bccf70c A |
3592 | s->mode = BLOCKS; |
3593 | break; | |
1c79356b | 3594 | } |
9bccf70c | 3595 | s->mode = DICT4; |
1c79356b A |
3596 | case DICT4: |
3597 | NEEDBYTE | |
9bccf70c A |
3598 | s->sub.check.need = (uLong)NEXTBYTE << 24; |
3599 | s->mode = DICT3; | |
1c79356b A |
3600 | case DICT3: |
3601 | NEEDBYTE | |
9bccf70c A |
3602 | s->sub.check.need += (uLong)NEXTBYTE << 16; |
3603 | s->mode = DICT2; | |
1c79356b A |
3604 | case DICT2: |
3605 | NEEDBYTE | |
9bccf70c A |
3606 | s->sub.check.need += (uLong)NEXTBYTE << 8; |
3607 | s->mode = DICT1; | |
1c79356b A |
3608 | case DICT1: |
3609 | NEEDBYTE | |
9bccf70c A |
3610 | s->sub.check.need += (uLong)NEXTBYTE; |
3611 | z->adler = s->sub.check.need; | |
3612 | s->mode = DICT0; | |
1c79356b A |
3613 | return Z_NEED_DICT; |
3614 | case DICT0: | |
9bccf70c | 3615 | s->mode = BAD; |
1c79356b | 3616 | z->msg = (char*)"need dictionary"; |
9bccf70c | 3617 | s->sub.marker = 0; /* can try inflateSync */ |
1c79356b A |
3618 | return Z_STREAM_ERROR; |
3619 | case BLOCKS: | |
9bccf70c | 3620 | r = inflate_blocks(s->blocks, z, r); |
1c79356b A |
3621 | if (r == Z_DATA_ERROR) |
3622 | { | |
9bccf70c A |
3623 | s->mode = BAD; |
3624 | s->sub.marker = 0; /* can try inflateSync */ | |
1c79356b A |
3625 | break; |
3626 | } | |
9bccf70c A |
3627 | if (r == Z_OK) |
3628 | r = f; | |
1c79356b A |
3629 | if (r != Z_STREAM_END) |
3630 | return r; | |
9bccf70c A |
3631 | r = f; |
3632 | inflate_blocks_reset(s->blocks, z, &s->sub.check.was); | |
3633 | if (s->nowrap) | |
1c79356b | 3634 | { |
9bccf70c | 3635 | s->mode = DONE; |
1c79356b A |
3636 | break; |
3637 | } | |
9bccf70c | 3638 | s->mode = CHECK4; |
1c79356b A |
3639 | case CHECK4: |
3640 | NEEDBYTE | |
9bccf70c A |
3641 | s->sub.check.need = (uLong)NEXTBYTE << 24; |
3642 | s->mode = CHECK3; | |
1c79356b A |
3643 | case CHECK3: |
3644 | NEEDBYTE | |
9bccf70c A |
3645 | s->sub.check.need += (uLong)NEXTBYTE << 16; |
3646 | s->mode = CHECK2; | |
1c79356b A |
3647 | case CHECK2: |
3648 | NEEDBYTE | |
9bccf70c A |
3649 | s->sub.check.need += (uLong)NEXTBYTE << 8; |
3650 | s->mode = CHECK1; | |
1c79356b A |
3651 | case CHECK1: |
3652 | NEEDBYTE | |
9bccf70c | 3653 | s->sub.check.need += (uLong)NEXTBYTE; |
1c79356b | 3654 | |
9bccf70c | 3655 | if (s->sub.check.was != s->sub.check.need) |
1c79356b | 3656 | { |
9bccf70c | 3657 | s->mode = BAD; |
1c79356b | 3658 | z->msg = (char*)"incorrect data check"; |
9bccf70c | 3659 | s->sub.marker = 5; /* can't try inflateSync */ |
1c79356b A |
3660 | break; |
3661 | } | |
9bccf70c A |
3662 | Tracev((stderr, "inflate: zlib check ok\n")); |
3663 | s->mode = DONE; | |
1c79356b A |
3664 | case DONE: |
3665 | return Z_STREAM_END; | |
3666 | case BAD: | |
3667 | return Z_DATA_ERROR; | |
3668 | default: | |
3669 | return Z_STREAM_ERROR; | |
3670 | } | |
9bccf70c A |
3671 | #ifdef NEED_DUMMY_RETURN |
3672 | return Z_STREAM_ERROR; /* Some dumb compilers complain without this */ | |
3673 | #endif | |
1c79356b A |
3674 | } |
3675 | ||
3676 | ||
9bccf70c | 3677 | int ZEXPORT inflateSetDictionary(z, dictionary, dictLength) |
1c79356b A |
3678 | z_streamp z; |
3679 | const Bytef *dictionary; | |
3680 | uInt dictLength; | |
3681 | { | |
3682 | uInt length = dictLength; | |
9bccf70c | 3683 | inflate_state* s; |
1c79356b | 3684 | |
9bccf70c | 3685 | if (z == Z_NULL || z->state == Z_NULL || ((inflate_state*)z->state)->mode != DICT0) |
1c79356b | 3686 | return Z_STREAM_ERROR; |
9bccf70c | 3687 | s = (inflate_state*)z->state; |
1c79356b A |
3688 | |
3689 | if (adler32(1L, dictionary, dictLength) != z->adler) return Z_DATA_ERROR; | |
3690 | z->adler = 1L; | |
3691 | ||
9bccf70c | 3692 | if (length >= ((uInt)1<<s->wbits)) |
1c79356b | 3693 | { |
9bccf70c | 3694 | length = (1<<s->wbits)-1; |
1c79356b A |
3695 | dictionary += dictLength - length; |
3696 | } | |
9bccf70c A |
3697 | inflate_set_dictionary(s->blocks, dictionary, length); |
3698 | s->mode = BLOCKS; | |
1c79356b A |
3699 | return Z_OK; |
3700 | } | |
3701 | ||
1c79356b | 3702 | |
9bccf70c | 3703 | int ZEXPORT inflateSync(z) |
1c79356b A |
3704 | z_streamp z; |
3705 | { | |
3706 | uInt n; /* number of bytes to look at */ | |
3707 | Bytef *p; /* pointer to bytes */ | |
3708 | uInt m; /* number of marker bytes found in a row */ | |
3709 | uLong r, w; /* temporaries to save total_in and total_out */ | |
9bccf70c | 3710 | inflate_state* s; |
1c79356b A |
3711 | |
3712 | /* set up */ | |
3713 | if (z == Z_NULL || z->state == Z_NULL) | |
3714 | return Z_STREAM_ERROR; | |
9bccf70c A |
3715 | s = (inflate_state*)z->state; |
3716 | if (s->mode != BAD) | |
1c79356b | 3717 | { |
9bccf70c A |
3718 | s->mode = BAD; |
3719 | s->sub.marker = 0; | |
1c79356b A |
3720 | } |
3721 | if ((n = z->avail_in) == 0) | |
3722 | return Z_BUF_ERROR; | |
3723 | p = z->next_in; | |
9bccf70c | 3724 | m = s->sub.marker; |
1c79356b A |
3725 | |
3726 | /* search */ | |
3727 | while (n && m < 4) | |
3728 | { | |
9bccf70c A |
3729 | static const Byte mark[4] = {0, 0, 0xff, 0xff}; |
3730 | if (*p == mark[m]) | |
1c79356b A |
3731 | m++; |
3732 | else if (*p) | |
3733 | m = 0; | |
3734 | else | |
3735 | m = 4 - m; | |
3736 | p++, n--; | |
3737 | } | |
3738 | ||
3739 | /* restore */ | |
3740 | z->total_in += p - z->next_in; | |
3741 | z->next_in = p; | |
3742 | z->avail_in = n; | |
9bccf70c | 3743 | s->sub.marker = m; |
1c79356b A |
3744 | |
3745 | /* return no joy or set up to restart on a new block */ | |
3746 | if (m != 4) | |
3747 | return Z_DATA_ERROR; | |
3748 | r = z->total_in; w = z->total_out; | |
3749 | inflateReset(z); | |
3750 | z->total_in = r; z->total_out = w; | |
9bccf70c | 3751 | s->mode = BLOCKS; |
1c79356b A |
3752 | return Z_OK; |
3753 | } | |
3754 | ||
9bccf70c A |
3755 | |
3756 | /* Returns true if inflate is currently at the end of a block generated | |
3757 | * by Z_SYNC_FLUSH or Z_FULL_FLUSH. This function is used by one PPP | |
3758 | * implementation to provide an additional safety check. PPP uses Z_SYNC_FLUSH | |
3759 | * but removes the length bytes of the resulting empty stored block. When | |
3760 | * decompressing, PPP checks that at the end of input packet, inflate is | |
3761 | * waiting for these length bytes. | |
3762 | */ | |
3763 | int ZEXPORT inflateSyncPoint(z) | |
3764 | z_streamp z; | |
3765 | { | |
3766 | if (z == Z_NULL || z->state == Z_NULL || ((inflate_state*)z->state)->blocks == Z_NULL) | |
3767 | return Z_STREAM_ERROR; | |
3768 | return inflate_blocks_sync_point(((inflate_state*)z->state)->blocks); | |
3769 | } | |
1c79356b A |
3770 | #undef NEEDBYTE |
3771 | #undef NEXTBYTE | |
3772 | /* --- inflate.c */ | |
3773 | ||
3774 | /* +++ infblock.c */ | |
3775 | /* infblock.c -- interpret and process block types to last block | |
9bccf70c | 3776 | * Copyright (C) 1995-2002 Mark Adler |
1c79356b A |
3777 | * For conditions of distribution and use, see copyright notice in zlib.h |
3778 | */ | |
3779 | ||
3780 | /* #include "zutil.h" */ | |
3781 | /* #include "infblock.h" */ | |
3782 | ||
3783 | /* +++ inftrees.h */ | |
3784 | /* inftrees.h -- header to use inftrees.c | |
9bccf70c | 3785 | * Copyright (C) 1995-2002 Mark Adler |
1c79356b A |
3786 | * For conditions of distribution and use, see copyright notice in zlib.h |
3787 | */ | |
3788 | ||
3789 | /* WARNING: this file should *not* be used by applications. It is | |
3790 | part of the implementation of the compression library and is | |
3791 | subject to change. Applications should only use zlib.h. | |
3792 | */ | |
3793 | ||
3794 | /* Huffman code lookup table entry--this entry is four bytes for machines | |
3795 | that have 16-bit pointers (e.g. PC's in the small or medium model). */ | |
3796 | ||
3797 | typedef struct inflate_huft_s FAR inflate_huft; | |
3798 | ||
3799 | struct inflate_huft_s { | |
3800 | union { | |
3801 | struct { | |
3802 | Byte Exop; /* number of extra bits or operation */ | |
3803 | Byte Bits; /* number of bits in this code or subcode */ | |
3804 | } what; | |
9bccf70c A |
3805 | uInt pad; /* pad structure to a power of 2 (4 bytes for */ |
3806 | } word; /* 16-bit, 8 bytes for 32-bit int's) */ | |
3807 | uInt base; /* literal, length base, distance base, | |
3808 | or table offset */ | |
1c79356b A |
3809 | }; |
3810 | ||
9bccf70c A |
3811 | /* Maximum size of dynamic tree. The maximum found in a long but non- |
3812 | exhaustive search was 1004 huft structures (850 for length/literals | |
3813 | and 154 for distances, the latter actually the result of an | |
3814 | exhaustive search). The actual maximum is not known, but the | |
3815 | value below is more than safe. */ | |
3816 | #define MANY 1440 | |
1c79356b A |
3817 | |
3818 | extern int inflate_trees_bits OF(( | |
3819 | uIntf *, /* 19 code lengths */ | |
3820 | uIntf *, /* bits tree desired/actual depth */ | |
3821 | inflate_huft * FAR *, /* bits tree result */ | |
9bccf70c A |
3822 | inflate_huft *, /* space for trees */ |
3823 | z_streamp)); /* for messages */ | |
1c79356b A |
3824 | |
3825 | extern int inflate_trees_dynamic OF(( | |
3826 | uInt, /* number of literal/length codes */ | |
3827 | uInt, /* number of distance codes */ | |
3828 | uIntf *, /* that many (total) code lengths */ | |
3829 | uIntf *, /* literal desired/actual bit depth */ | |
3830 | uIntf *, /* distance desired/actual bit depth */ | |
3831 | inflate_huft * FAR *, /* literal/length tree result */ | |
3832 | inflate_huft * FAR *, /* distance tree result */ | |
9bccf70c A |
3833 | inflate_huft *, /* space for trees */ |
3834 | z_streamp)); /* for messages */ | |
1c79356b A |
3835 | |
3836 | extern int inflate_trees_fixed OF(( | |
3837 | uIntf *, /* literal desired/actual bit depth */ | |
3838 | uIntf *, /* distance desired/actual bit depth */ | |
3839 | inflate_huft * FAR *, /* literal/length tree result */ | |
9bccf70c A |
3840 | inflate_huft * FAR *, /* distance tree result */ |
3841 | z_streamp)); /* for memory allocation */ | |
1c79356b A |
3842 | /* --- inftrees.h */ |
3843 | ||
3844 | /* +++ infcodes.h */ | |
3845 | /* infcodes.h -- header to use infcodes.c | |
9bccf70c | 3846 | * Copyright (C) 1995-2002 Mark Adler |
1c79356b A |
3847 | * For conditions of distribution and use, see copyright notice in zlib.h |
3848 | */ | |
3849 | ||
3850 | /* WARNING: this file should *not* be used by applications. It is | |
3851 | part of the implementation of the compression library and is | |
3852 | subject to change. Applications should only use zlib.h. | |
3853 | */ | |
3854 | ||
3855 | struct inflate_codes_state; | |
3856 | typedef struct inflate_codes_state FAR inflate_codes_statef; | |
3857 | ||
3858 | extern inflate_codes_statef *inflate_codes_new OF(( | |
3859 | uInt, uInt, | |
3860 | inflate_huft *, inflate_huft *, | |
3861 | z_streamp )); | |
3862 | ||
3863 | extern int inflate_codes OF(( | |
3864 | inflate_blocks_statef *, | |
3865 | z_streamp , | |
3866 | int)); | |
3867 | ||
3868 | extern void inflate_codes_free OF(( | |
3869 | inflate_codes_statef *, | |
3870 | z_streamp )); | |
3871 | ||
3872 | /* --- infcodes.h */ | |
3873 | ||
3874 | /* +++ infutil.h */ | |
3875 | /* infutil.h -- types and macros common to blocks and codes | |
9bccf70c | 3876 | * Copyright (C) 1995-2002 Mark Adler |
1c79356b A |
3877 | * For conditions of distribution and use, see copyright notice in zlib.h |
3878 | */ | |
3879 | ||
3880 | /* WARNING: this file should *not* be used by applications. It is | |
3881 | part of the implementation of the compression library and is | |
3882 | subject to change. Applications should only use zlib.h. | |
3883 | */ | |
3884 | ||
3885 | #ifndef _INFUTIL_H | |
3886 | #define _INFUTIL_H | |
3887 | ||
3888 | typedef enum { | |
3889 | TYPE, /* get type bits (3, including end bit) */ | |
3890 | LENS, /* get lengths for stored */ | |
3891 | STORED, /* processing stored block */ | |
3892 | TABLE, /* get table lengths */ | |
3893 | BTREE, /* get bit lengths tree for a dynamic block */ | |
3894 | DTREE, /* get length, distance trees for a dynamic block */ | |
3895 | CODES, /* processing fixed or dynamic block */ | |
3896 | DRY, /* output remaining window bytes */ | |
9bccf70c A |
3897 | DONEB, /* finished last block, done */ |
3898 | BADB} /* got a data error--stuck here */ | |
1c79356b A |
3899 | inflate_block_mode; |
3900 | ||
3901 | /* inflate blocks semi-private state */ | |
3902 | struct inflate_blocks_state { | |
3903 | ||
3904 | /* mode */ | |
3905 | inflate_block_mode mode; /* current inflate_block mode */ | |
3906 | ||
3907 | /* mode dependent information */ | |
3908 | union { | |
3909 | uInt left; /* if STORED, bytes left to copy */ | |
3910 | struct { | |
3911 | uInt table; /* table lengths (14 bits) */ | |
3912 | uInt index; /* index into blens (or border) */ | |
3913 | uIntf *blens; /* bit lengths of codes */ | |
3914 | uInt bb; /* bit length tree depth */ | |
3915 | inflate_huft *tb; /* bit length decoding tree */ | |
3916 | } trees; /* if DTREE, decoding info for trees */ | |
3917 | struct { | |
1c79356b A |
3918 | inflate_codes_statef |
3919 | *codes; | |
3920 | } decode; /* if CODES, current state */ | |
3921 | } sub; /* submode */ | |
3922 | uInt last; /* true if this block is the last block */ | |
3923 | ||
3924 | /* mode independent information */ | |
3925 | uInt bitk; /* bits in bit buffer */ | |
3926 | uLong bitb; /* bit buffer */ | |
9bccf70c | 3927 | inflate_huft *hufts; /* single malloc for tree space */ |
1c79356b A |
3928 | Bytef *window; /* sliding window */ |
3929 | Bytef *end; /* one byte after sliding window */ | |
3930 | Bytef *read; /* window read pointer */ | |
3931 | Bytef *write; /* window write pointer */ | |
3932 | check_func checkfn; /* check function */ | |
3933 | uLong check; /* check on output */ | |
3934 | ||
3935 | }; | |
3936 | ||
3937 | ||
3938 | /* defines for inflate input/output */ | |
3939 | /* update pointers and return */ | |
3940 | #define UPDBITS {s->bitb=b;s->bitk=k;} | |
3941 | #define UPDIN {z->avail_in=n;z->total_in+=p-z->next_in;z->next_in=p;} | |
3942 | #define UPDOUT {s->write=q;} | |
3943 | #define UPDATE {UPDBITS UPDIN UPDOUT} | |
3944 | #define LEAVE {UPDATE return inflate_flush(s,z,r);} | |
3945 | /* get bytes and bits */ | |
3946 | #define LOADIN {p=z->next_in;n=z->avail_in;b=s->bitb;k=s->bitk;} | |
3947 | #define NEEDBYTE {if(n)r=Z_OK;else LEAVE} | |
3948 | #define NEXTBYTE (n--,*p++) | |
3949 | #define NEEDBITS(j) {while(k<(j)){NEEDBYTE;b|=((uLong)NEXTBYTE)<<k;k+=8;}} | |
3950 | #define DUMPBITS(j) {b>>=(j);k-=(j);} | |
3951 | /* output bytes */ | |
3952 | #define WAVAIL (uInt)(q<s->read?s->read-q-1:s->end-q) | |
3953 | #define LOADOUT {q=s->write;m=(uInt)WAVAIL;} | |
9bccf70c | 3954 | #define WRAP {if(q==s->end&&s->read!=s->window){q=s->window;m=(uInt)WAVAIL;}} |
1c79356b | 3955 | #define FLUSH {UPDOUT r=inflate_flush(s,z,r); LOADOUT} |
9bccf70c | 3956 | #define NEEDOUT {if(m==0){WRAP if(m==0){FLUSH WRAP if(m==0) LEAVE}}r=Z_OK;} |
1c79356b A |
3957 | #define OUTBYTE(a) {*q++=(Byte)(a);m--;} |
3958 | /* load local pointers */ | |
3959 | #define LOAD {LOADIN LOADOUT} | |
3960 | ||
3961 | /* masks for lower bits (size given to avoid silly warnings with Visual C++) */ | |
3962 | extern uInt inflate_mask[17]; | |
3963 | ||
3964 | /* copy as much as possible from the sliding window to the output area */ | |
3965 | extern int inflate_flush OF(( | |
3966 | inflate_blocks_statef *, | |
3967 | z_streamp , | |
3968 | int)); | |
3969 | ||
3970 | #ifndef NO_DUMMY_DECL | |
3971 | struct internal_state {int dummy;}; /* for buggy compilers */ | |
3972 | #endif | |
3973 | ||
3974 | #endif | |
3975 | /* --- infutil.h */ | |
3976 | ||
3977 | #ifndef NO_DUMMY_DECL | |
3978 | struct inflate_codes_state {int dummy;}; /* for buggy compilers */ | |
3979 | #endif | |
3980 | ||
9bccf70c A |
3981 | /* simplify the use of the inflate_huft type with some defines */ |
3982 | #define exop word.what.Exop | |
3983 | #define bits word.what.Bits | |
3984 | ||
1c79356b A |
3985 | /* Table for deflate from PKZIP's appnote.txt. */ |
3986 | local const uInt border[] = { /* Order of the bit length code lengths */ | |
3987 | 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15}; | |
3988 | ||
3989 | /* | |
3990 | Notes beyond the 1.93a appnote.txt: | |
3991 | ||
3992 | 1. Distance pointers never point before the beginning of the output | |
3993 | stream. | |
3994 | 2. Distance pointers can point back across blocks, up to 32k away. | |
3995 | 3. There is an implied maximum of 7 bits for the bit length table and | |
3996 | 15 bits for the actual data. | |
3997 | 4. If only one code exists, then it is encoded using one bit. (Zero | |
3998 | would be more efficient, but perhaps a little confusing.) If two | |
3999 | codes exist, they are coded using one bit each (0 and 1). | |
4000 | 5. There is no way of sending zero distance codes--a dummy must be | |
4001 | sent if there are none. (History: a pre 2.0 version of PKZIP would | |
4002 | store blocks with no distance codes, but this was discovered to be | |
4003 | too harsh a criterion.) Valid only for 1.93a. 2.04c does allow | |
4004 | zero distance codes, which is sent as one code of zero bits in | |
4005 | length. | |
4006 | 6. There are up to 286 literal/length codes. Code 256 represents the | |
4007 | end-of-block. Note however that the static length tree defines | |
4008 | 288 codes just to fill out the Huffman codes. Codes 286 and 287 | |
4009 | cannot be used though, since there is no length base or extra bits | |
4010 | defined for them. Similarily, there are up to 30 distance codes. | |
4011 | However, static trees define 32 codes (all 5 bits) to fill out the | |
4012 | Huffman codes, but the last two had better not show up in the data. | |
4013 | 7. Unzip can check dynamic Huffman blocks for complete code sets. | |
4014 | The exception is that a single code would not be complete (see #4). | |
4015 | 8. The five bits following the block type is really the number of | |
4016 | literal codes sent minus 257. | |
4017 | 9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits | |
4018 | (1+6+6). Therefore, to output three times the length, you output | |
4019 | three codes (1+1+1), whereas to output four times the same length, | |
4020 | you only need two codes (1+3). Hmm. | |
4021 | 10. In the tree reconstruction algorithm, Code = Code + Increment | |
4022 | only if BitLength(i) is not zero. (Pretty obvious.) | |
4023 | 11. Correction: 4 Bits: # of Bit Length codes - 4 (4 - 19) | |
4024 | 12. Note: length code 284 can represent 227-258, but length code 285 | |
4025 | really is 258. The last length deserves its own, short code | |
4026 | since it gets used a lot in very redundant files. The length | |
4027 | 258 is special since 258 - 3 (the min match length) is 255. | |
4028 | 13. The literal/length and distance code bit lengths are read as a | |
4029 | single stream of lengths. It is possible (and advantageous) for | |
4030 | a repeat code (16, 17, or 18) to go across the boundary between | |
4031 | the two sets of lengths. | |
4032 | */ | |
4033 | ||
4034 | ||
4035 | void inflate_blocks_reset(s, z, c) | |
4036 | inflate_blocks_statef *s; | |
4037 | z_streamp z; | |
4038 | uLongf *c; | |
4039 | { | |
9bccf70c | 4040 | if (c != Z_NULL) |
1c79356b A |
4041 | *c = s->check; |
4042 | if (s->mode == BTREE || s->mode == DTREE) | |
4043 | ZFREE(z, s->sub.trees.blens); | |
4044 | if (s->mode == CODES) | |
1c79356b | 4045 | inflate_codes_free(s->sub.decode.codes, z); |
1c79356b A |
4046 | s->mode = TYPE; |
4047 | s->bitk = 0; | |
4048 | s->bitb = 0; | |
4049 | s->read = s->write = s->window; | |
4050 | if (s->checkfn != Z_NULL) | |
9bccf70c A |
4051 | z->adler = s->check = (*s->checkfn)(0L, (const Bytef *)Z_NULL, 0); |
4052 | Tracev((stderr, "inflate: blocks reset\n")); | |
1c79356b A |
4053 | } |
4054 | ||
4055 | ||
4056 | inflate_blocks_statef *inflate_blocks_new(z, c, w) | |
4057 | z_streamp z; | |
4058 | check_func c; | |
4059 | uInt w; | |
4060 | { | |
4061 | inflate_blocks_statef *s; | |
4062 | ||
4063 | if ((s = (inflate_blocks_statef *)ZALLOC | |
4064 | (z,1,sizeof(struct inflate_blocks_state))) == Z_NULL) | |
4065 | return s; | |
9bccf70c A |
4066 | if ((s->hufts = |
4067 | (inflate_huft *)ZALLOC(z, sizeof(inflate_huft), MANY)) == Z_NULL) | |
4068 | { | |
4069 | ZFREE(z, s); | |
4070 | return Z_NULL; | |
4071 | } | |
1c79356b A |
4072 | if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL) |
4073 | { | |
9bccf70c | 4074 | ZFREE(z, s->hufts); |
1c79356b A |
4075 | ZFREE(z, s); |
4076 | return Z_NULL; | |
4077 | } | |
4078 | s->end = s->window + w; | |
4079 | s->checkfn = c; | |
4080 | s->mode = TYPE; | |
9bccf70c A |
4081 | Tracev((stderr, "inflate: blocks allocated\n")); |
4082 | inflate_blocks_reset(s, z, Z_NULL); | |
1c79356b A |
4083 | return s; |
4084 | } | |
4085 | ||
4086 | ||
1c79356b A |
4087 | int inflate_blocks(s, z, r) |
4088 | inflate_blocks_statef *s; | |
4089 | z_streamp z; | |
4090 | int r; | |
4091 | { | |
4092 | uInt t; /* temporary storage */ | |
4093 | uLong b; /* bit buffer */ | |
4094 | uInt k; /* bits in bit buffer */ | |
4095 | Bytef *p; /* input data pointer */ | |
4096 | uInt n; /* bytes available there */ | |
4097 | Bytef *q; /* output window write pointer */ | |
4098 | uInt m; /* bytes to end of window or read pointer */ | |
4099 | ||
4100 | /* copy input/output information to locals (UPDATE macro restores) */ | |
4101 | LOAD | |
4102 | ||
4103 | /* process input based on current state */ | |
4104 | while (1) switch (s->mode) | |
4105 | { | |
4106 | case TYPE: | |
4107 | NEEDBITS(3) | |
4108 | t = (uInt)b & 7; | |
4109 | s->last = t & 1; | |
4110 | switch (t >> 1) | |
4111 | { | |
4112 | case 0: /* stored */ | |
9bccf70c | 4113 | Tracev((stderr, "inflate: stored block%s\n", |
1c79356b A |
4114 | s->last ? " (last)" : "")); |
4115 | DUMPBITS(3) | |
4116 | t = k & 7; /* go to byte boundary */ | |
4117 | DUMPBITS(t) | |
4118 | s->mode = LENS; /* get length of stored block */ | |
4119 | break; | |
4120 | case 1: /* fixed */ | |
9bccf70c | 4121 | Tracev((stderr, "inflate: fixed codes block%s\n", |
1c79356b A |
4122 | s->last ? " (last)" : "")); |
4123 | { | |
4124 | uInt bl, bd; | |
4125 | inflate_huft *tl, *td; | |
4126 | ||
9bccf70c | 4127 | inflate_trees_fixed(&bl, &bd, &tl, &td, z); |
1c79356b A |
4128 | s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z); |
4129 | if (s->sub.decode.codes == Z_NULL) | |
4130 | { | |
4131 | r = Z_MEM_ERROR; | |
4132 | LEAVE | |
4133 | } | |
1c79356b A |
4134 | } |
4135 | DUMPBITS(3) | |
4136 | s->mode = CODES; | |
4137 | break; | |
4138 | case 2: /* dynamic */ | |
9bccf70c | 4139 | Tracev((stderr, "inflate: dynamic codes block%s\n", |
1c79356b A |
4140 | s->last ? " (last)" : "")); |
4141 | DUMPBITS(3) | |
4142 | s->mode = TABLE; | |
4143 | break; | |
4144 | case 3: /* illegal */ | |
4145 | DUMPBITS(3) | |
4146 | s->mode = BADB; | |
4147 | z->msg = (char*)"invalid block type"; | |
4148 | r = Z_DATA_ERROR; | |
4149 | LEAVE | |
4150 | } | |
4151 | break; | |
4152 | case LENS: | |
4153 | NEEDBITS(32) | |
4154 | if ((((~b) >> 16) & 0xffff) != (b & 0xffff)) | |
4155 | { | |
4156 | s->mode = BADB; | |
4157 | z->msg = (char*)"invalid stored block lengths"; | |
4158 | r = Z_DATA_ERROR; | |
4159 | LEAVE | |
4160 | } | |
4161 | s->sub.left = (uInt)b & 0xffff; | |
4162 | b = k = 0; /* dump bits */ | |
4163 | Tracev((stderr, "inflate: stored length %u\n", s->sub.left)); | |
4164 | s->mode = s->sub.left ? STORED : (s->last ? DRY : TYPE); | |
4165 | break; | |
4166 | case STORED: | |
4167 | if (n == 0) | |
4168 | LEAVE | |
4169 | NEEDOUT | |
4170 | t = s->sub.left; | |
4171 | if (t > n) t = n; | |
4172 | if (t > m) t = m; | |
4173 | zmemcpy(q, p, t); | |
4174 | p += t; n -= t; | |
4175 | q += t; m -= t; | |
4176 | if ((s->sub.left -= t) != 0) | |
4177 | break; | |
4178 | Tracev((stderr, "inflate: stored end, %lu total out\n", | |
4179 | z->total_out + (q >= s->read ? q - s->read : | |
4180 | (s->end - s->read) + (q - s->window)))); | |
4181 | s->mode = s->last ? DRY : TYPE; | |
4182 | break; | |
4183 | case TABLE: | |
4184 | NEEDBITS(14) | |
4185 | s->sub.trees.table = t = (uInt)b & 0x3fff; | |
4186 | #ifndef PKZIP_BUG_WORKAROUND | |
4187 | if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29) | |
4188 | { | |
4189 | s->mode = BADB; | |
4190 | z->msg = (char*)"too many length or distance symbols"; | |
4191 | r = Z_DATA_ERROR; | |
4192 | LEAVE | |
4193 | } | |
4194 | #endif | |
4195 | t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f); | |
1c79356b A |
4196 | if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof(uInt))) == Z_NULL) |
4197 | { | |
4198 | r = Z_MEM_ERROR; | |
4199 | LEAVE | |
4200 | } | |
4201 | DUMPBITS(14) | |
4202 | s->sub.trees.index = 0; | |
4203 | Tracev((stderr, "inflate: table sizes ok\n")); | |
4204 | s->mode = BTREE; | |
4205 | case BTREE: | |
4206 | while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10)) | |
4207 | { | |
4208 | NEEDBITS(3) | |
4209 | s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7; | |
4210 | DUMPBITS(3) | |
4211 | } | |
4212 | while (s->sub.trees.index < 19) | |
4213 | s->sub.trees.blens[border[s->sub.trees.index++]] = 0; | |
4214 | s->sub.trees.bb = 7; | |
4215 | t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb, | |
9bccf70c | 4216 | &s->sub.trees.tb, s->hufts, z); |
1c79356b A |
4217 | if (t != Z_OK) |
4218 | { | |
1c79356b A |
4219 | r = t; |
4220 | if (r == Z_DATA_ERROR) | |
9bccf70c A |
4221 | { |
4222 | ZFREE(z, s->sub.trees.blens); | |
1c79356b | 4223 | s->mode = BADB; |
9bccf70c | 4224 | } |
1c79356b A |
4225 | LEAVE |
4226 | } | |
4227 | s->sub.trees.index = 0; | |
4228 | Tracev((stderr, "inflate: bits tree ok\n")); | |
4229 | s->mode = DTREE; | |
4230 | case DTREE: | |
4231 | while (t = s->sub.trees.table, | |
4232 | s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f)) | |
4233 | { | |
4234 | inflate_huft *h; | |
4235 | uInt i, j, c; | |
4236 | ||
4237 | t = s->sub.trees.bb; | |
4238 | NEEDBITS(t) | |
4239 | h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]); | |
9bccf70c A |
4240 | t = h->bits; |
4241 | c = h->base; | |
1c79356b A |
4242 | if (c < 16) |
4243 | { | |
4244 | DUMPBITS(t) | |
4245 | s->sub.trees.blens[s->sub.trees.index++] = c; | |
4246 | } | |
4247 | else /* c == 16..18 */ | |
4248 | { | |
4249 | i = c == 18 ? 7 : c - 14; | |
4250 | j = c == 18 ? 11 : 3; | |
4251 | NEEDBITS(t + i) | |
4252 | DUMPBITS(t) | |
4253 | j += (uInt)b & inflate_mask[i]; | |
4254 | DUMPBITS(i) | |
4255 | i = s->sub.trees.index; | |
4256 | t = s->sub.trees.table; | |
4257 | if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) || | |
4258 | (c == 16 && i < 1)) | |
4259 | { | |
1c79356b A |
4260 | ZFREE(z, s->sub.trees.blens); |
4261 | s->mode = BADB; | |
4262 | z->msg = (char*)"invalid bit length repeat"; | |
4263 | r = Z_DATA_ERROR; | |
4264 | LEAVE | |
4265 | } | |
4266 | c = c == 16 ? s->sub.trees.blens[i - 1] : 0; | |
4267 | do { | |
4268 | s->sub.trees.blens[i++] = c; | |
4269 | } while (--j); | |
4270 | s->sub.trees.index = i; | |
4271 | } | |
4272 | } | |
1c79356b A |
4273 | s->sub.trees.tb = Z_NULL; |
4274 | { | |
4275 | uInt bl, bd; | |
4276 | inflate_huft *tl, *td; | |
4277 | inflate_codes_statef *c; | |
4278 | ||
4279 | bl = 9; /* must be <= 9 for lookahead assumptions */ | |
4280 | bd = 6; /* must be <= 9 for lookahead assumptions */ | |
4281 | t = s->sub.trees.table; | |
1c79356b | 4282 | t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f), |
9bccf70c A |
4283 | s->sub.trees.blens, &bl, &bd, &tl, &td, |
4284 | s->hufts, z); | |
1c79356b A |
4285 | if (t != Z_OK) |
4286 | { | |
4287 | if (t == (uInt)Z_DATA_ERROR) | |
9bccf70c A |
4288 | { |
4289 | ZFREE(z, s->sub.trees.blens); | |
1c79356b | 4290 | s->mode = BADB; |
9bccf70c | 4291 | } |
1c79356b A |
4292 | r = t; |
4293 | LEAVE | |
4294 | } | |
9bccf70c | 4295 | Tracev((stderr, "inflate: trees ok\n")); |
1c79356b A |
4296 | if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL) |
4297 | { | |
1c79356b A |
4298 | r = Z_MEM_ERROR; |
4299 | LEAVE | |
4300 | } | |
4301 | s->sub.decode.codes = c; | |
1c79356b | 4302 | } |
9bccf70c | 4303 | ZFREE(z, s->sub.trees.blens); |
1c79356b A |
4304 | s->mode = CODES; |
4305 | case CODES: | |
4306 | UPDATE | |
4307 | if ((r = inflate_codes(s, z, r)) != Z_STREAM_END) | |
4308 | return inflate_flush(s, z, r); | |
4309 | r = Z_OK; | |
4310 | inflate_codes_free(s->sub.decode.codes, z); | |
1c79356b A |
4311 | LOAD |
4312 | Tracev((stderr, "inflate: codes end, %lu total out\n", | |
4313 | z->total_out + (q >= s->read ? q - s->read : | |
4314 | (s->end - s->read) + (q - s->window)))); | |
4315 | if (!s->last) | |
4316 | { | |
4317 | s->mode = TYPE; | |
4318 | break; | |
4319 | } | |
1c79356b A |
4320 | s->mode = DRY; |
4321 | case DRY: | |
4322 | FLUSH | |
4323 | if (s->read != s->write) | |
4324 | LEAVE | |
4325 | s->mode = DONEB; | |
4326 | case DONEB: | |
4327 | r = Z_STREAM_END; | |
4328 | LEAVE | |
4329 | case BADB: | |
4330 | r = Z_DATA_ERROR; | |
4331 | LEAVE | |
4332 | default: | |
4333 | r = Z_STREAM_ERROR; | |
4334 | LEAVE | |
4335 | } | |
4336 | } | |
4337 | ||
4338 | ||
9bccf70c | 4339 | int inflate_blocks_free(s, z) |
1c79356b A |
4340 | inflate_blocks_statef *s; |
4341 | z_streamp z; | |
1c79356b | 4342 | { |
9bccf70c | 4343 | inflate_blocks_reset(s, z, Z_NULL); |
1c79356b | 4344 | ZFREE(z, s->window); |
9bccf70c | 4345 | ZFREE(z, s->hufts); |
1c79356b | 4346 | ZFREE(z, s); |
9bccf70c | 4347 | Tracev((stderr, "inflate: blocks freed\n")); |
1c79356b A |
4348 | return Z_OK; |
4349 | } | |
4350 | ||
4351 | ||
4352 | void inflate_set_dictionary(s, d, n) | |
4353 | inflate_blocks_statef *s; | |
4354 | const Bytef *d; | |
4355 | uInt n; | |
4356 | { | |
9bccf70c | 4357 | zmemcpy(s->window, d, n); |
1c79356b A |
4358 | s->read = s->write = s->window + n; |
4359 | } | |
4360 | ||
1c79356b | 4361 | |
9bccf70c A |
4362 | /* Returns true if inflate is currently at the end of a block generated |
4363 | * by Z_SYNC_FLUSH or Z_FULL_FLUSH. | |
4364 | * IN assertion: s != Z_NULL | |
1c79356b | 4365 | */ |
9bccf70c A |
4366 | int inflate_blocks_sync_point(s) |
4367 | inflate_blocks_statef *s; | |
1c79356b | 4368 | { |
9bccf70c | 4369 | return s->mode == LENS; |
1c79356b A |
4370 | } |
4371 | /* --- infblock.c */ | |
4372 | ||
4373 | /* +++ inftrees.c */ | |
4374 | /* inftrees.c -- generate Huffman trees for efficient decoding | |
9bccf70c | 4375 | * Copyright (C) 1995-2002 Mark Adler |
1c79356b A |
4376 | * For conditions of distribution and use, see copyright notice in zlib.h |
4377 | */ | |
4378 | ||
4379 | /* #include "zutil.h" */ | |
4380 | /* #include "inftrees.h" */ | |
4381 | ||
9bccf70c A |
4382 | #if !defined(BUILDFIXED) && !defined(STDC) |
4383 | # define BUILDFIXED /* non ANSI compilers may not accept inffixed.h */ | |
4384 | #endif | |
4385 | ||
4386 | const char inflate_copyright[] = | |
4387 | " inflate 1.1.4 Copyright 1995-2002 Mark Adler "; | |
1c79356b A |
4388 | /* |
4389 | If you use the zlib library in a product, an acknowledgment is welcome | |
4390 | in the documentation of your product. If for some reason you cannot | |
4391 | include such an acknowledgment, I would appreciate that you keep this | |
4392 | copyright string in the executable of your product. | |
4393 | */ | |
9bccf70c | 4394 | |
1c79356b A |
4395 | #ifndef NO_DUMMY_DECL |
4396 | struct internal_state {int dummy;}; /* for buggy compilers */ | |
4397 | #endif | |
4398 | ||
4399 | /* simplify the use of the inflate_huft type with some defines */ | |
1c79356b A |
4400 | #define exop word.what.Exop |
4401 | #define bits word.what.Bits | |
4402 | ||
4403 | ||
4404 | local int huft_build OF(( | |
4405 | uIntf *, /* code lengths in bits */ | |
4406 | uInt, /* number of codes */ | |
4407 | uInt, /* number of "simple" codes */ | |
4408 | const uIntf *, /* list of base values for non-simple codes */ | |
4409 | const uIntf *, /* list of extra bits for non-simple codes */ | |
4410 | inflate_huft * FAR*,/* result: starting table */ | |
4411 | uIntf *, /* maximum lookup bits (returns actual) */ | |
9bccf70c A |
4412 | inflate_huft *, /* space for trees */ |
4413 | uInt *, /* hufts used in space */ | |
4414 | uIntf * )); /* space for values */ | |
1c79356b A |
4415 | |
4416 | /* Tables for deflate from PKZIP's appnote.txt. */ | |
4417 | local const uInt cplens[31] = { /* Copy lengths for literal codes 257..285 */ | |
4418 | 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, | |
4419 | 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0}; | |
4420 | /* see note #13 above about 258 */ | |
4421 | local const uInt cplext[31] = { /* Extra bits for literal codes 257..285 */ | |
4422 | 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, | |
4423 | 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 112, 112}; /* 112==invalid */ | |
4424 | local const uInt cpdist[30] = { /* Copy offsets for distance codes 0..29 */ | |
4425 | 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, | |
4426 | 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, | |
4427 | 8193, 12289, 16385, 24577}; | |
4428 | local const uInt cpdext[30] = { /* Extra bits for distance codes */ | |
4429 | 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, | |
4430 | 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, | |
4431 | 12, 12, 13, 13}; | |
4432 | ||
4433 | /* | |
4434 | Huffman code decoding is performed using a multi-level table lookup. | |
4435 | The fastest way to decode is to simply build a lookup table whose | |
4436 | size is determined by the longest code. However, the time it takes | |
4437 | to build this table can also be a factor if the data being decoded | |
4438 | is not very long. The most common codes are necessarily the | |
4439 | shortest codes, so those codes dominate the decoding time, and hence | |
4440 | the speed. The idea is you can have a shorter table that decodes the | |
4441 | shorter, more probable codes, and then point to subsidiary tables for | |
4442 | the longer codes. The time it costs to decode the longer codes is | |
4443 | then traded against the time it takes to make longer tables. | |
4444 | ||
4445 | This results of this trade are in the variables lbits and dbits | |
4446 | below. lbits is the number of bits the first level table for literal/ | |
4447 | length codes can decode in one step, and dbits is the same thing for | |
4448 | the distance codes. Subsequent tables are also less than or equal to | |
4449 | those sizes. These values may be adjusted either when all of the | |
4450 | codes are shorter than that, in which case the longest code length in | |
4451 | bits is used, or when the shortest code is *longer* than the requested | |
4452 | table size, in which case the length of the shortest code in bits is | |
4453 | used. | |
4454 | ||
4455 | There are two different values for the two tables, since they code a | |
4456 | different number of possibilities each. The literal/length table | |
4457 | codes 286 possible values, or in a flat code, a little over eight | |
4458 | bits. The distance table codes 30 possible values, or a little less | |
4459 | than five bits, flat. The optimum values for speed end up being | |
4460 | about one bit more than those, so lbits is 8+1 and dbits is 5+1. | |
4461 | The optimum values may differ though from machine to machine, and | |
4462 | possibly even between compilers. Your mileage may vary. | |
4463 | */ | |
4464 | ||
4465 | ||
4466 | /* If BMAX needs to be larger than 16, then h and x[] should be uLong. */ | |
4467 | #define BMAX 15 /* maximum bit length of any code */ | |
1c79356b | 4468 | |
9bccf70c | 4469 | local int huft_build(b, n, s, d, e, t, m, hp, hn, v) |
1c79356b | 4470 | uIntf *b; /* code lengths in bits (all assumed <= BMAX) */ |
9bccf70c | 4471 | uInt n; /* number of codes (assumed <= 288) */ |
1c79356b A |
4472 | uInt s; /* number of simple-valued codes (0..s-1) */ |
4473 | const uIntf *d; /* list of base values for non-simple codes */ | |
4474 | const uIntf *e; /* list of extra bits for non-simple codes */ | |
4475 | inflate_huft * FAR *t; /* result: starting table */ | |
4476 | uIntf *m; /* maximum lookup bits, returns actual */ | |
9bccf70c A |
4477 | inflate_huft *hp; /* space for trees */ |
4478 | uInt *hn; /* hufts used in space */ | |
4479 | uIntf *v; /* working area: values in order of bit length */ | |
1c79356b A |
4480 | /* Given a list of code lengths and a maximum table size, make a set of |
4481 | tables to decode that set of codes. Return Z_OK on success, Z_BUF_ERROR | |
4482 | if the given code set is incomplete (the tables are still built in this | |
9bccf70c | 4483 | case), or Z_DATA_ERROR if the input is invalid. */ |
1c79356b A |
4484 | { |
4485 | ||
4486 | uInt a; /* counter for codes of length k */ | |
4487 | uInt c[BMAX+1]; /* bit length count table */ | |
4488 | uInt f; /* i repeats in table every f entries */ | |
4489 | int g; /* maximum code length */ | |
4490 | int h; /* table level */ | |
4491 | register uInt i; /* counter, current code */ | |
4492 | register uInt j; /* counter */ | |
4493 | register int k; /* number of bits in current code */ | |
4494 | int l; /* bits per table (returned in m) */ | |
9bccf70c | 4495 | uInt mask; /* (1 << w) - 1, to avoid cc -O bug on HP */ |
1c79356b A |
4496 | register uIntf *p; /* pointer into c[], b[], or v[] */ |
4497 | inflate_huft *q; /* points to current table */ | |
4498 | struct inflate_huft_s r; /* table entry for structure assignment */ | |
4499 | inflate_huft *u[BMAX]; /* table stack */ | |
1c79356b A |
4500 | register int w; /* bits before this table == (l * h) */ |
4501 | uInt x[BMAX+1]; /* bit offsets, then code stack */ | |
4502 | uIntf *xp; /* pointer into x */ | |
4503 | int y; /* number of dummy codes added */ | |
4504 | uInt z; /* number of entries in current table */ | |
4505 | ||
4506 | ||
4507 | /* Generate counts for each bit length */ | |
4508 | p = c; | |
4509 | #define C0 *p++ = 0; | |
4510 | #define C2 C0 C0 C0 C0 | |
4511 | #define C4 C2 C2 C2 C2 | |
4512 | C4 /* clear c[]--assume BMAX+1 is 16 */ | |
4513 | p = b; i = n; | |
4514 | do { | |
4515 | c[*p++]++; /* assume all entries <= BMAX */ | |
4516 | } while (--i); | |
4517 | if (c[0] == n) /* null input--all zero length codes */ | |
4518 | { | |
4519 | *t = (inflate_huft *)Z_NULL; | |
4520 | *m = 0; | |
4521 | return Z_OK; | |
4522 | } | |
4523 | ||
4524 | ||
4525 | /* Find minimum and maximum length, bound *m by those */ | |
4526 | l = *m; | |
4527 | for (j = 1; j <= BMAX; j++) | |
4528 | if (c[j]) | |
4529 | break; | |
4530 | k = j; /* minimum code length */ | |
4531 | if ((uInt)l < j) | |
4532 | l = j; | |
4533 | for (i = BMAX; i; i--) | |
4534 | if (c[i]) | |
4535 | break; | |
4536 | g = i; /* maximum code length */ | |
4537 | if ((uInt)l > i) | |
4538 | l = i; | |
4539 | *m = l; | |
4540 | ||
4541 | ||
4542 | /* Adjust last length count to fill out codes, if needed */ | |
4543 | for (y = 1 << j; j < i; j++, y <<= 1) | |
4544 | if ((y -= c[j]) < 0) | |
4545 | return Z_DATA_ERROR; | |
4546 | if ((y -= c[i]) < 0) | |
4547 | return Z_DATA_ERROR; | |
4548 | c[i] += y; | |
4549 | ||
4550 | ||
4551 | /* Generate starting offsets into the value table for each length */ | |
4552 | x[1] = j = 0; | |
4553 | p = c + 1; xp = x + 2; | |
4554 | while (--i) { /* note that i == g from above */ | |
4555 | *xp++ = (j += *p++); | |
4556 | } | |
4557 | ||
4558 | ||
4559 | /* Make a table of values in order of bit lengths */ | |
4560 | p = b; i = 0; | |
4561 | do { | |
4562 | if ((j = *p++) != 0) | |
4563 | v[x[j]++] = i; | |
4564 | } while (++i < n); | |
9bccf70c | 4565 | n = x[g]; /* set n to length of v */ |
1c79356b A |
4566 | |
4567 | ||
4568 | /* Generate the Huffman codes and for each, make the table entries */ | |
4569 | x[0] = i = 0; /* first Huffman code is zero */ | |
4570 | p = v; /* grab values in bit order */ | |
4571 | h = -1; /* no tables yet--level -1 */ | |
4572 | w = -l; /* bits decoded == (l * h) */ | |
4573 | u[0] = (inflate_huft *)Z_NULL; /* just to keep compilers happy */ | |
4574 | q = (inflate_huft *)Z_NULL; /* ditto */ | |
4575 | z = 0; /* ditto */ | |
4576 | ||
4577 | /* go through the bit lengths (k already is bits in shortest code) */ | |
4578 | for (; k <= g; k++) | |
4579 | { | |
4580 | a = c[k]; | |
4581 | while (a--) | |
4582 | { | |
4583 | /* here i is the Huffman code of length k bits for value *p */ | |
4584 | /* make tables up to required level */ | |
4585 | while (k > w + l) | |
4586 | { | |
4587 | h++; | |
4588 | w += l; /* previous table always l bits */ | |
4589 | ||
4590 | /* compute minimum size table less than or equal to l bits */ | |
4591 | z = g - w; | |
4592 | z = z > (uInt)l ? l : z; /* table size upper limit */ | |
4593 | if ((f = 1 << (j = k - w)) > a + 1) /* try a k-w bit table */ | |
4594 | { /* too few codes for k-w bit table */ | |
4595 | f -= a + 1; /* deduct codes from patterns left */ | |
4596 | xp = c + k; | |
4597 | if (j < z) | |
4598 | while (++j < z) /* try smaller tables up to z bits */ | |
4599 | { | |
4600 | if ((f <<= 1) <= *++xp) | |
4601 | break; /* enough codes to use up j bits */ | |
4602 | f -= *xp; /* else deduct codes from patterns */ | |
4603 | } | |
4604 | } | |
4605 | z = 1 << j; /* table entries for j-bit table */ | |
4606 | ||
9bccf70c A |
4607 | /* allocate new table */ |
4608 | if (*hn + z > MANY) /* (note: doesn't matter for fixed) */ | |
4609 | return Z_DATA_ERROR; /* overflow of MANY */ | |
4610 | u[h] = q = hp + *hn; | |
4611 | *hn += z; | |
1c79356b A |
4612 | |
4613 | /* connect to last table, if there is one */ | |
4614 | if (h) | |
4615 | { | |
4616 | x[h] = i; /* save pattern for backing up */ | |
4617 | r.bits = (Byte)l; /* bits to dump before this table */ | |
4618 | r.exop = (Byte)j; /* bits in this table */ | |
9bccf70c A |
4619 | j = i >> (w - l); |
4620 | r.base = (uInt)(q - u[h-1] - j); /* offset to this table */ | |
1c79356b A |
4621 | u[h-1][j] = r; /* connect to last table */ |
4622 | } | |
9bccf70c A |
4623 | else |
4624 | *t = q; /* first table is returned result */ | |
1c79356b A |
4625 | } |
4626 | ||
4627 | /* set up table entry in r */ | |
4628 | r.bits = (Byte)(k - w); | |
4629 | if (p >= v + n) | |
4630 | r.exop = 128 + 64; /* out of values--invalid code */ | |
4631 | else if (*p < s) | |
4632 | { | |
4633 | r.exop = (Byte)(*p < 256 ? 0 : 32 + 64); /* 256 is end-of-block */ | |
4634 | r.base = *p++; /* simple code is just the value */ | |
4635 | } | |
4636 | else | |
4637 | { | |
4638 | r.exop = (Byte)(e[*p - s] + 16 + 64);/* non-simple--look up in lists */ | |
4639 | r.base = d[*p++ - s]; | |
4640 | } | |
4641 | ||
4642 | /* fill code-like entries with r */ | |
4643 | f = 1 << (k - w); | |
4644 | for (j = i >> w; j < z; j += f) | |
4645 | q[j] = r; | |
4646 | ||
4647 | /* backwards increment the k-bit code i */ | |
4648 | for (j = 1 << (k - 1); i & j; j >>= 1) | |
4649 | i ^= j; | |
4650 | i ^= j; | |
4651 | ||
4652 | /* backup over finished tables */ | |
9bccf70c A |
4653 | mask = (1 << w) - 1; /* needed on HP, cc -O bug */ |
4654 | while ((i & mask) != x[h]) | |
1c79356b A |
4655 | { |
4656 | h--; /* don't need to update q */ | |
4657 | w -= l; | |
9bccf70c | 4658 | mask = (1 << w) - 1; |
1c79356b A |
4659 | } |
4660 | } | |
4661 | } | |
4662 | ||
4663 | ||
4664 | /* Return Z_BUF_ERROR if we were given an incomplete table */ | |
4665 | return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK; | |
4666 | } | |
4667 | ||
4668 | ||
9bccf70c | 4669 | int inflate_trees_bits(c, bb, tb, hp, z) |
1c79356b A |
4670 | uIntf *c; /* 19 code lengths */ |
4671 | uIntf *bb; /* bits tree desired/actual depth */ | |
4672 | inflate_huft * FAR *tb; /* bits tree result */ | |
9bccf70c A |
4673 | inflate_huft *hp; /* space for trees */ |
4674 | z_streamp z; /* for messages */ | |
1c79356b A |
4675 | { |
4676 | int r; | |
9bccf70c A |
4677 | uInt hn = 0; /* hufts used in space */ |
4678 | uIntf *v; /* work area for huft_build */ | |
1c79356b | 4679 | |
9bccf70c A |
4680 | if ((v = (uIntf*)ZALLOC(z, 19, sizeof(uInt))) == Z_NULL) |
4681 | return Z_MEM_ERROR; | |
4682 | r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL, | |
4683 | tb, bb, hp, &hn, v); | |
1c79356b A |
4684 | if (r == Z_DATA_ERROR) |
4685 | z->msg = (char*)"oversubscribed dynamic bit lengths tree"; | |
4686 | else if (r == Z_BUF_ERROR || *bb == 0) | |
4687 | { | |
1c79356b A |
4688 | z->msg = (char*)"incomplete dynamic bit lengths tree"; |
4689 | r = Z_DATA_ERROR; | |
4690 | } | |
9bccf70c | 4691 | ZFREE(z, v); |
1c79356b A |
4692 | return r; |
4693 | } | |
4694 | ||
4695 | ||
9bccf70c | 4696 | int inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, hp, z) |
1c79356b A |
4697 | uInt nl; /* number of literal/length codes */ |
4698 | uInt nd; /* number of distance codes */ | |
4699 | uIntf *c; /* that many (total) code lengths */ | |
4700 | uIntf *bl; /* literal desired/actual bit depth */ | |
4701 | uIntf *bd; /* distance desired/actual bit depth */ | |
4702 | inflate_huft * FAR *tl; /* literal/length tree result */ | |
4703 | inflate_huft * FAR *td; /* distance tree result */ | |
9bccf70c A |
4704 | inflate_huft *hp; /* space for trees */ |
4705 | z_streamp z; /* for messages */ | |
1c79356b A |
4706 | { |
4707 | int r; | |
9bccf70c A |
4708 | uInt hn = 0; /* hufts used in space */ |
4709 | uIntf *v; /* work area for huft_build */ | |
4710 | ||
4711 | /* allocate work area */ | |
4712 | if ((v = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL) | |
4713 | return Z_MEM_ERROR; | |
1c79356b A |
4714 | |
4715 | /* build literal/length tree */ | |
9bccf70c | 4716 | r = huft_build(c, nl, 257, cplens, cplext, tl, bl, hp, &hn, v); |
1c79356b A |
4717 | if (r != Z_OK || *bl == 0) |
4718 | { | |
4719 | if (r == Z_DATA_ERROR) | |
4720 | z->msg = (char*)"oversubscribed literal/length tree"; | |
4721 | else if (r != Z_MEM_ERROR) | |
4722 | { | |
1c79356b A |
4723 | z->msg = (char*)"incomplete literal/length tree"; |
4724 | r = Z_DATA_ERROR; | |
4725 | } | |
9bccf70c | 4726 | ZFREE(z, v); |
1c79356b A |
4727 | return r; |
4728 | } | |
4729 | ||
4730 | /* build distance tree */ | |
9bccf70c | 4731 | r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, hp, &hn, v); |
1c79356b A |
4732 | if (r != Z_OK || (*bd == 0 && nl > 257)) |
4733 | { | |
4734 | if (r == Z_DATA_ERROR) | |
4735 | z->msg = (char*)"oversubscribed distance tree"; | |
4736 | else if (r == Z_BUF_ERROR) { | |
4737 | #ifdef PKZIP_BUG_WORKAROUND | |
4738 | r = Z_OK; | |
4739 | } | |
4740 | #else | |
1c79356b A |
4741 | z->msg = (char*)"incomplete distance tree"; |
4742 | r = Z_DATA_ERROR; | |
4743 | } | |
4744 | else if (r != Z_MEM_ERROR) | |
4745 | { | |
4746 | z->msg = (char*)"empty distance tree with lengths"; | |
4747 | r = Z_DATA_ERROR; | |
4748 | } | |
9bccf70c | 4749 | ZFREE(z, v); |
1c79356b A |
4750 | return r; |
4751 | #endif | |
4752 | } | |
4753 | ||
4754 | /* done */ | |
9bccf70c | 4755 | ZFREE(z, v); |
1c79356b A |
4756 | return Z_OK; |
4757 | } | |
4758 | ||
4759 | ||
4760 | /* build fixed tables only once--keep them here */ | |
9bccf70c | 4761 | #ifdef BUILDFIXED |
1c79356b | 4762 | local int fixed_built = 0; |
9bccf70c | 4763 | #define FIXEDH 544 /* number of hufts used by fixed tables */ |
55e303ae | 4764 | local inflate_huft *fixed_mem = NULL; |
1c79356b A |
4765 | local uInt fixed_bl; |
4766 | local uInt fixed_bd; | |
4767 | local inflate_huft *fixed_tl; | |
4768 | local inflate_huft *fixed_td; | |
9bccf70c A |
4769 | #else |
4770 | /* +++ inffixed.h */ | |
4771 | /* inffixed.h -- table for decoding fixed codes | |
4772 | * Generated automatically by the maketree.c program | |
4773 | */ | |
1c79356b | 4774 | |
9bccf70c A |
4775 | /* WARNING: this file should *not* be used by applications. It is |
4776 | part of the implementation of the compression library and is | |
4777 | subject to change. Applications should only use zlib.h. | |
4778 | */ | |
1c79356b | 4779 | |
9bccf70c A |
4780 | local uInt fixed_bl = 9; |
4781 | local uInt fixed_bd = 5; | |
4782 | local inflate_huft fixed_tl[] = { | |
4783 | {{{96,7}},256}, {{{0,8}},80}, {{{0,8}},16}, {{{84,8}},115}, | |
4784 | {{{82,7}},31}, {{{0,8}},112}, {{{0,8}},48}, {{{0,9}},192}, | |
4785 | {{{80,7}},10}, {{{0,8}},96}, {{{0,8}},32}, {{{0,9}},160}, | |
4786 | {{{0,8}},0}, {{{0,8}},128}, {{{0,8}},64}, {{{0,9}},224}, | |
4787 | {{{80,7}},6}, {{{0,8}},88}, {{{0,8}},24}, {{{0,9}},144}, | |
4788 | {{{83,7}},59}, {{{0,8}},120}, {{{0,8}},56}, {{{0,9}},208}, | |
4789 | {{{81,7}},17}, {{{0,8}},104}, {{{0,8}},40}, {{{0,9}},176}, | |
4790 | {{{0,8}},8}, {{{0,8}},136}, {{{0,8}},72}, {{{0,9}},240}, | |
4791 | {{{80,7}},4}, {{{0,8}},84}, {{{0,8}},20}, {{{85,8}},227}, | |
4792 | {{{83,7}},43}, {{{0,8}},116}, {{{0,8}},52}, {{{0,9}},200}, | |
4793 | {{{81,7}},13}, {{{0,8}},100}, {{{0,8}},36}, {{{0,9}},168}, | |
4794 | {{{0,8}},4}, {{{0,8}},132}, {{{0,8}},68}, {{{0,9}},232}, | |
4795 | {{{80,7}},8}, {{{0,8}},92}, {{{0,8}},28}, {{{0,9}},152}, | |
4796 | {{{84,7}},83}, {{{0,8}},124}, {{{0,8}},60}, {{{0,9}},216}, | |
4797 | {{{82,7}},23}, {{{0,8}},108}, {{{0,8}},44}, {{{0,9}},184}, | |
4798 | {{{0,8}},12}, {{{0,8}},140}, {{{0,8}},76}, {{{0,9}},248}, | |
4799 | {{{80,7}},3}, {{{0,8}},82}, {{{0,8}},18}, {{{85,8}},163}, | |
4800 | {{{83,7}},35}, {{{0,8}},114}, {{{0,8}},50}, {{{0,9}},196}, | |
4801 | {{{81,7}},11}, {{{0,8}},98}, {{{0,8}},34}, {{{0,9}},164}, | |
4802 | {{{0,8}},2}, {{{0,8}},130}, {{{0,8}},66}, {{{0,9}},228}, | |
4803 | {{{80,7}},7}, {{{0,8}},90}, {{{0,8}},26}, {{{0,9}},148}, | |
4804 | {{{84,7}},67}, {{{0,8}},122}, {{{0,8}},58}, {{{0,9}},212}, | |
4805 | {{{82,7}},19}, {{{0,8}},106}, {{{0,8}},42}, {{{0,9}},180}, | |
4806 | {{{0,8}},10}, {{{0,8}},138}, {{{0,8}},74}, {{{0,9}},244}, | |
4807 | {{{80,7}},5}, {{{0,8}},86}, {{{0,8}},22}, {{{192,8}},0}, | |
4808 | {{{83,7}},51}, {{{0,8}},118}, {{{0,8}},54}, {{{0,9}},204}, | |
4809 | {{{81,7}},15}, {{{0,8}},102}, {{{0,8}},38}, {{{0,9}},172}, | |
4810 | {{{0,8}},6}, {{{0,8}},134}, {{{0,8}},70}, {{{0,9}},236}, | |
4811 | {{{80,7}},9}, {{{0,8}},94}, {{{0,8}},30}, {{{0,9}},156}, | |
4812 | {{{84,7}},99}, {{{0,8}},126}, {{{0,8}},62}, {{{0,9}},220}, | |
4813 | {{{82,7}},27}, {{{0,8}},110}, {{{0,8}},46}, {{{0,9}},188}, | |
4814 | {{{0,8}},14}, {{{0,8}},142}, {{{0,8}},78}, {{{0,9}},252}, | |
4815 | {{{96,7}},256}, {{{0,8}},81}, {{{0,8}},17}, {{{85,8}},131}, | |
4816 | {{{82,7}},31}, {{{0,8}},113}, {{{0,8}},49}, {{{0,9}},194}, | |
4817 | {{{80,7}},10}, {{{0,8}},97}, {{{0,8}},33}, {{{0,9}},162}, | |
4818 | {{{0,8}},1}, {{{0,8}},129}, {{{0,8}},65}, {{{0,9}},226}, | |
4819 | {{{80,7}},6}, {{{0,8}},89}, {{{0,8}},25}, {{{0,9}},146}, | |
4820 | {{{83,7}},59}, {{{0,8}},121}, {{{0,8}},57}, {{{0,9}},210}, | |
4821 | {{{81,7}},17}, {{{0,8}},105}, {{{0,8}},41}, {{{0,9}},178}, | |
4822 | {{{0,8}},9}, {{{0,8}},137}, {{{0,8}},73}, {{{0,9}},242}, | |
4823 | {{{80,7}},4}, {{{0,8}},85}, {{{0,8}},21}, {{{80,8}},258}, | |
4824 | {{{83,7}},43}, {{{0,8}},117}, {{{0,8}},53}, {{{0,9}},202}, | |
4825 | {{{81,7}},13}, {{{0,8}},101}, {{{0,8}},37}, {{{0,9}},170}, | |
4826 | {{{0,8}},5}, {{{0,8}},133}, {{{0,8}},69}, {{{0,9}},234}, | |
4827 | {{{80,7}},8}, {{{0,8}},93}, {{{0,8}},29}, {{{0,9}},154}, | |
4828 | {{{84,7}},83}, {{{0,8}},125}, {{{0,8}},61}, {{{0,9}},218}, | |
4829 | {{{82,7}},23}, {{{0,8}},109}, {{{0,8}},45}, {{{0,9}},186}, | |
4830 | {{{0,8}},13}, {{{0,8}},141}, {{{0,8}},77}, {{{0,9}},250}, | |
4831 | {{{80,7}},3}, {{{0,8}},83}, {{{0,8}},19}, {{{85,8}},195}, | |
4832 | {{{83,7}},35}, {{{0,8}},115}, {{{0,8}},51}, {{{0,9}},198}, | |
4833 | {{{81,7}},11}, {{{0,8}},99}, {{{0,8}},35}, {{{0,9}},166}, | |
4834 | {{{0,8}},3}, {{{0,8}},131}, {{{0,8}},67}, {{{0,9}},230}, | |
4835 | {{{80,7}},7}, {{{0,8}},91}, {{{0,8}},27}, {{{0,9}},150}, | |
4836 | {{{84,7}},67}, {{{0,8}},123}, {{{0,8}},59}, {{{0,9}},214}, | |
4837 | {{{82,7}},19}, {{{0,8}},107}, {{{0,8}},43}, {{{0,9}},182}, | |
4838 | {{{0,8}},11}, {{{0,8}},139}, {{{0,8}},75}, {{{0,9}},246}, | |
4839 | {{{80,7}},5}, {{{0,8}},87}, {{{0,8}},23}, {{{192,8}},0}, | |
4840 | {{{83,7}},51}, {{{0,8}},119}, {{{0,8}},55}, {{{0,9}},206}, | |
4841 | {{{81,7}},15}, {{{0,8}},103}, {{{0,8}},39}, {{{0,9}},174}, | |
4842 | {{{0,8}},7}, {{{0,8}},135}, {{{0,8}},71}, {{{0,9}},238}, | |
4843 | {{{80,7}},9}, {{{0,8}},95}, {{{0,8}},31}, {{{0,9}},158}, | |
4844 | {{{84,7}},99}, {{{0,8}},127}, {{{0,8}},63}, {{{0,9}},222}, | |
4845 | {{{82,7}},27}, {{{0,8}},111}, {{{0,8}},47}, {{{0,9}},190}, | |
4846 | {{{0,8}},15}, {{{0,8}},143}, {{{0,8}},79}, {{{0,9}},254}, | |
4847 | {{{96,7}},256}, {{{0,8}},80}, {{{0,8}},16}, {{{84,8}},115}, | |
4848 | {{{82,7}},31}, {{{0,8}},112}, {{{0,8}},48}, {{{0,9}},193}, | |
4849 | {{{80,7}},10}, {{{0,8}},96}, {{{0,8}},32}, {{{0,9}},161}, | |
4850 | {{{0,8}},0}, {{{0,8}},128}, {{{0,8}},64}, {{{0,9}},225}, | |
4851 | {{{80,7}},6}, {{{0,8}},88}, {{{0,8}},24}, {{{0,9}},145}, | |
4852 | {{{83,7}},59}, {{{0,8}},120}, {{{0,8}},56}, {{{0,9}},209}, | |
4853 | {{{81,7}},17}, {{{0,8}},104}, {{{0,8}},40}, {{{0,9}},177}, | |
4854 | {{{0,8}},8}, {{{0,8}},136}, {{{0,8}},72}, {{{0,9}},241}, | |
4855 | {{{80,7}},4}, {{{0,8}},84}, {{{0,8}},20}, {{{85,8}},227}, | |
4856 | {{{83,7}},43}, {{{0,8}},116}, {{{0,8}},52}, {{{0,9}},201}, | |
4857 | {{{81,7}},13}, {{{0,8}},100}, {{{0,8}},36}, {{{0,9}},169}, | |
4858 | {{{0,8}},4}, {{{0,8}},132}, {{{0,8}},68}, {{{0,9}},233}, | |
4859 | {{{80,7}},8}, {{{0,8}},92}, {{{0,8}},28}, {{{0,9}},153}, | |
4860 | {{{84,7}},83}, {{{0,8}},124}, {{{0,8}},60}, {{{0,9}},217}, | |
4861 | {{{82,7}},23}, {{{0,8}},108}, {{{0,8}},44}, {{{0,9}},185}, | |
4862 | {{{0,8}},12}, {{{0,8}},140}, {{{0,8}},76}, {{{0,9}},249}, | |
4863 | {{{80,7}},3}, {{{0,8}},82}, {{{0,8}},18}, {{{85,8}},163}, | |
4864 | {{{83,7}},35}, {{{0,8}},114}, {{{0,8}},50}, {{{0,9}},197}, | |
4865 | {{{81,7}},11}, {{{0,8}},98}, {{{0,8}},34}, {{{0,9}},165}, | |
4866 | {{{0,8}},2}, {{{0,8}},130}, {{{0,8}},66}, {{{0,9}},229}, | |
4867 | {{{80,7}},7}, {{{0,8}},90}, {{{0,8}},26}, {{{0,9}},149}, | |
4868 | {{{84,7}},67}, {{{0,8}},122}, {{{0,8}},58}, {{{0,9}},213}, | |
4869 | {{{82,7}},19}, {{{0,8}},106}, {{{0,8}},42}, {{{0,9}},181}, | |
4870 | {{{0,8}},10}, {{{0,8}},138}, {{{0,8}},74}, {{{0,9}},245}, | |
4871 | {{{80,7}},5}, {{{0,8}},86}, {{{0,8}},22}, {{{192,8}},0}, | |
4872 | {{{83,7}},51}, {{{0,8}},118}, {{{0,8}},54}, {{{0,9}},205}, | |
4873 | {{{81,7}},15}, {{{0,8}},102}, {{{0,8}},38}, {{{0,9}},173}, | |
4874 | {{{0,8}},6}, {{{0,8}},134}, {{{0,8}},70}, {{{0,9}},237}, | |
4875 | {{{80,7}},9}, {{{0,8}},94}, {{{0,8}},30}, {{{0,9}},157}, | |
4876 | {{{84,7}},99}, {{{0,8}},126}, {{{0,8}},62}, {{{0,9}},221}, | |
4877 | {{{82,7}},27}, {{{0,8}},110}, {{{0,8}},46}, {{{0,9}},189}, | |
4878 | {{{0,8}},14}, {{{0,8}},142}, {{{0,8}},78}, {{{0,9}},253}, | |
4879 | {{{96,7}},256}, {{{0,8}},81}, {{{0,8}},17}, {{{85,8}},131}, | |
4880 | {{{82,7}},31}, {{{0,8}},113}, {{{0,8}},49}, {{{0,9}},195}, | |
4881 | {{{80,7}},10}, {{{0,8}},97}, {{{0,8}},33}, {{{0,9}},163}, | |
4882 | {{{0,8}},1}, {{{0,8}},129}, {{{0,8}},65}, {{{0,9}},227}, | |
4883 | {{{80,7}},6}, {{{0,8}},89}, {{{0,8}},25}, {{{0,9}},147}, | |
4884 | {{{83,7}},59}, {{{0,8}},121}, {{{0,8}},57}, {{{0,9}},211}, | |
4885 | {{{81,7}},17}, {{{0,8}},105}, {{{0,8}},41}, {{{0,9}},179}, | |
4886 | {{{0,8}},9}, {{{0,8}},137}, {{{0,8}},73}, {{{0,9}},243}, | |
4887 | {{{80,7}},4}, {{{0,8}},85}, {{{0,8}},21}, {{{80,8}},258}, | |
4888 | {{{83,7}},43}, {{{0,8}},117}, {{{0,8}},53}, {{{0,9}},203}, | |
4889 | {{{81,7}},13}, {{{0,8}},101}, {{{0,8}},37}, {{{0,9}},171}, | |
4890 | {{{0,8}},5}, {{{0,8}},133}, {{{0,8}},69}, {{{0,9}},235}, | |
4891 | {{{80,7}},8}, {{{0,8}},93}, {{{0,8}},29}, {{{0,9}},155}, | |
4892 | {{{84,7}},83}, {{{0,8}},125}, {{{0,8}},61}, {{{0,9}},219}, | |
4893 | {{{82,7}},23}, {{{0,8}},109}, {{{0,8}},45}, {{{0,9}},187}, | |
4894 | {{{0,8}},13}, {{{0,8}},141}, {{{0,8}},77}, {{{0,9}},251}, | |
4895 | {{{80,7}},3}, {{{0,8}},83}, {{{0,8}},19}, {{{85,8}},195}, | |
4896 | {{{83,7}},35}, {{{0,8}},115}, {{{0,8}},51}, {{{0,9}},199}, | |
4897 | {{{81,7}},11}, {{{0,8}},99}, {{{0,8}},35}, {{{0,9}},167}, | |
4898 | {{{0,8}},3}, {{{0,8}},131}, {{{0,8}},67}, {{{0,9}},231}, | |
4899 | {{{80,7}},7}, {{{0,8}},91}, {{{0,8}},27}, {{{0,9}},151}, | |
4900 | {{{84,7}},67}, {{{0,8}},123}, {{{0,8}},59}, {{{0,9}},215}, | |
4901 | {{{82,7}},19}, {{{0,8}},107}, {{{0,8}},43}, {{{0,9}},183}, | |
4902 | {{{0,8}},11}, {{{0,8}},139}, {{{0,8}},75}, {{{0,9}},247}, | |
4903 | {{{80,7}},5}, {{{0,8}},87}, {{{0,8}},23}, {{{192,8}},0}, | |
4904 | {{{83,7}},51}, {{{0,8}},119}, {{{0,8}},55}, {{{0,9}},207}, | |
4905 | {{{81,7}},15}, {{{0,8}},103}, {{{0,8}},39}, {{{0,9}},175}, | |
4906 | {{{0,8}},7}, {{{0,8}},135}, {{{0,8}},71}, {{{0,9}},239}, | |
4907 | {{{80,7}},9}, {{{0,8}},95}, {{{0,8}},31}, {{{0,9}},159}, | |
4908 | {{{84,7}},99}, {{{0,8}},127}, {{{0,8}},63}, {{{0,9}},223}, | |
4909 | {{{82,7}},27}, {{{0,8}},111}, {{{0,8}},47}, {{{0,9}},191}, | |
4910 | {{{0,8}},15}, {{{0,8}},143}, {{{0,8}},79}, {{{0,9}},255} | |
4911 | }; | |
4912 | local inflate_huft fixed_td[] = { | |
4913 | {{{80,5}},1}, {{{87,5}},257}, {{{83,5}},17}, {{{91,5}},4097}, | |
4914 | {{{81,5}},5}, {{{89,5}},1025}, {{{85,5}},65}, {{{93,5}},16385}, | |
4915 | {{{80,5}},3}, {{{88,5}},513}, {{{84,5}},33}, {{{92,5}},8193}, | |
4916 | {{{82,5}},9}, {{{90,5}},2049}, {{{86,5}},129}, {{{192,5}},24577}, | |
4917 | {{{80,5}},2}, {{{87,5}},385}, {{{83,5}},25}, {{{91,5}},6145}, | |
4918 | {{{81,5}},7}, {{{89,5}},1537}, {{{85,5}},97}, {{{93,5}},24577}, | |
4919 | {{{80,5}},4}, {{{88,5}},769}, {{{84,5}},49}, {{{92,5}},12289}, | |
4920 | {{{82,5}},13}, {{{90,5}},3073}, {{{86,5}},193}, {{{192,5}},24577} | |
4921 | }; | |
4922 | /* --- inffixed.h */ | |
4923 | #endif | |
1c79356b A |
4924 | |
4925 | ||
9bccf70c | 4926 | int inflate_trees_fixed(bl, bd, tl, td, z) |
1c79356b A |
4927 | uIntf *bl; /* literal desired/actual bit depth */ |
4928 | uIntf *bd; /* distance desired/actual bit depth */ | |
4929 | inflate_huft * FAR *tl; /* literal/length tree result */ | |
4930 | inflate_huft * FAR *td; /* distance tree result */ | |
9bccf70c | 4931 | z_streamp z; /* for memory allocation */ |
1c79356b | 4932 | { |
9bccf70c A |
4933 | #ifdef BUILDFIXED |
4934 | /* build fixed tables if not already */ | |
1c79356b A |
4935 | if (!fixed_built) |
4936 | { | |
4937 | int k; /* temporary variable */ | |
9bccf70c A |
4938 | uInt f = 0; /* number of hufts used in fixed_mem */ |
4939 | uIntf *c; /* length list for huft_build */ | |
4940 | uIntf *v; /* work area for huft_build */ | |
4941 | ||
4942 | /* allocate memory */ | |
4943 | if ((c = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL) | |
4944 | return Z_MEM_ERROR; | |
4945 | if ((v = (uIntf*)ZALLOC(z, 288, sizeof(uInt))) == Z_NULL) | |
4946 | { | |
4947 | ZFREE(z, c); | |
4948 | return Z_MEM_ERROR; | |
4949 | } | |
55e303ae A |
4950 | |
4951 | if ((fixed_mem = (inflate_huft*)ZALLOC(z, FIXEDH, sizeof(inflate_huft))) == Z_NULL) | |
4952 | { | |
4953 | ZFREE(z, c); | |
4954 | ZFREE(z, v); | |
4955 | return Z_MEM_ERROR; | |
4956 | } | |
1c79356b A |
4957 | |
4958 | /* literal table */ | |
4959 | for (k = 0; k < 144; k++) | |
4960 | c[k] = 8; | |
4961 | for (; k < 256; k++) | |
4962 | c[k] = 9; | |
4963 | for (; k < 280; k++) | |
4964 | c[k] = 7; | |
4965 | for (; k < 288; k++) | |
4966 | c[k] = 8; | |
9bccf70c A |
4967 | fixed_bl = 9; |
4968 | huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl, | |
4969 | fixed_mem, &f, v); | |
1c79356b A |
4970 | |
4971 | /* distance table */ | |
4972 | for (k = 0; k < 30; k++) | |
4973 | c[k] = 5; | |
4974 | fixed_bd = 5; | |
9bccf70c A |
4975 | huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd, |
4976 | fixed_mem, &f, v); | |
1c79356b A |
4977 | |
4978 | /* done */ | |
9bccf70c A |
4979 | ZFREE(z, v); |
4980 | ZFREE(z, c); | |
1c79356b A |
4981 | fixed_built = 1; |
4982 | } | |
9bccf70c | 4983 | #endif |
1c79356b A |
4984 | *bl = fixed_bl; |
4985 | *bd = fixed_bd; | |
4986 | *tl = fixed_tl; | |
4987 | *td = fixed_td; | |
4988 | return Z_OK; | |
4989 | } | |
1c79356b A |
4990 | /* --- inftrees.c */ |
4991 | ||
4992 | /* +++ infcodes.c */ | |
4993 | /* infcodes.c -- process literals and length/distance pairs | |
9bccf70c | 4994 | * Copyright (C) 1995-2002 Mark Adler |
1c79356b A |
4995 | * For conditions of distribution and use, see copyright notice in zlib.h |
4996 | */ | |
4997 | ||
4998 | /* #include "zutil.h" */ | |
4999 | /* #include "inftrees.h" */ | |
5000 | /* #include "infblock.h" */ | |
5001 | /* #include "infcodes.h" */ | |
5002 | /* #include "infutil.h" */ | |
5003 | ||
5004 | /* +++ inffast.h */ | |
5005 | /* inffast.h -- header to use inffast.c | |
9bccf70c | 5006 | * Copyright (C) 1995-2002 Mark Adler |
1c79356b A |
5007 | * For conditions of distribution and use, see copyright notice in zlib.h |
5008 | */ | |
5009 | ||
5010 | /* WARNING: this file should *not* be used by applications. It is | |
5011 | part of the implementation of the compression library and is | |
5012 | subject to change. Applications should only use zlib.h. | |
5013 | */ | |
5014 | ||
5015 | extern int inflate_fast OF(( | |
5016 | uInt, | |
5017 | uInt, | |
5018 | inflate_huft *, | |
5019 | inflate_huft *, | |
5020 | inflate_blocks_statef *, | |
5021 | z_streamp )); | |
5022 | /* --- inffast.h */ | |
5023 | ||
5024 | /* simplify the use of the inflate_huft type with some defines */ | |
1c79356b A |
5025 | #define exop word.what.Exop |
5026 | #define bits word.what.Bits | |
5027 | ||
9bccf70c | 5028 | typedef enum { /* waiting for "i:"=input, "o:"=output, "x:"=nothing */ |
1c79356b A |
5029 | START, /* x: set up for LEN */ |
5030 | LEN, /* i: get length/literal/eob next */ | |
5031 | LENEXT, /* i: getting length extra (have base) */ | |
5032 | DIST, /* i: get distance next */ | |
5033 | DISTEXT, /* i: getting distance extra */ | |
5034 | COPY, /* o: copying bytes in window, waiting for space */ | |
5035 | LIT, /* o: got literal, waiting for output space */ | |
5036 | WASH, /* o: got eob, possibly still output waiting */ | |
5037 | END, /* x: got eob and all data flushed */ | |
5038 | BADCODE} /* x: got error */ | |
9bccf70c A |
5039 | inflate_codes_mode; |
5040 | ||
5041 | /* inflate codes private state */ | |
5042 | struct inflate_codes_state { | |
5043 | ||
5044 | /* mode */ | |
5045 | inflate_codes_mode mode; /* current inflate_codes mode */ | |
1c79356b A |
5046 | |
5047 | /* mode dependent information */ | |
5048 | uInt len; | |
5049 | union { | |
5050 | struct { | |
5051 | inflate_huft *tree; /* pointer into tree */ | |
5052 | uInt need; /* bits needed */ | |
5053 | } code; /* if LEN or DIST, where in tree */ | |
5054 | uInt lit; /* if LIT, literal */ | |
5055 | struct { | |
5056 | uInt get; /* bits to get for extra */ | |
5057 | uInt dist; /* distance back to copy from */ | |
5058 | } copy; /* if EXT or COPY, where and how much */ | |
5059 | } sub; /* submode */ | |
5060 | ||
5061 | /* mode independent information */ | |
5062 | Byte lbits; /* ltree bits decoded per branch */ | |
5063 | Byte dbits; /* dtree bits decoder per branch */ | |
5064 | inflate_huft *ltree; /* literal/length/eob tree */ | |
5065 | inflate_huft *dtree; /* distance tree */ | |
5066 | ||
5067 | }; | |
5068 | ||
5069 | ||
5070 | inflate_codes_statef *inflate_codes_new(bl, bd, tl, td, z) | |
5071 | uInt bl, bd; | |
5072 | inflate_huft *tl; | |
5073 | inflate_huft *td; /* need separate declaration for Borland C++ */ | |
5074 | z_streamp z; | |
5075 | { | |
5076 | inflate_codes_statef *c; | |
5077 | ||
5078 | if ((c = (inflate_codes_statef *) | |
5079 | ZALLOC(z,1,sizeof(struct inflate_codes_state))) != Z_NULL) | |
5080 | { | |
5081 | c->mode = START; | |
5082 | c->lbits = (Byte)bl; | |
5083 | c->dbits = (Byte)bd; | |
5084 | c->ltree = tl; | |
5085 | c->dtree = td; | |
5086 | Tracev((stderr, "inflate: codes new\n")); | |
5087 | } | |
5088 | return c; | |
5089 | } | |
5090 | ||
5091 | ||
5092 | int inflate_codes(s, z, r) | |
5093 | inflate_blocks_statef *s; | |
5094 | z_streamp z; | |
5095 | int r; | |
5096 | { | |
5097 | uInt j; /* temporary storage */ | |
5098 | inflate_huft *t; /* temporary pointer */ | |
5099 | uInt e; /* extra bits or operation */ | |
5100 | uLong b; /* bit buffer */ | |
5101 | uInt k; /* bits in bit buffer */ | |
5102 | Bytef *p; /* input data pointer */ | |
5103 | uInt n; /* bytes available there */ | |
5104 | Bytef *q; /* output window write pointer */ | |
5105 | uInt m; /* bytes to end of window or read pointer */ | |
5106 | Bytef *f; /* pointer to copy strings from */ | |
5107 | inflate_codes_statef *c = s->sub.decode.codes; /* codes state */ | |
5108 | ||
5109 | /* copy input/output information to locals (UPDATE macro restores) */ | |
5110 | LOAD | |
5111 | ||
5112 | /* process input and output based on current state */ | |
5113 | while (1) switch (c->mode) | |
5114 | { /* waiting for "i:"=input, "o:"=output, "x:"=nothing */ | |
5115 | case START: /* x: set up for LEN */ | |
5116 | #ifndef SLOW | |
5117 | if (m >= 258 && n >= 10) | |
5118 | { | |
5119 | UPDATE | |
5120 | r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z); | |
5121 | LOAD | |
5122 | if (r != Z_OK) | |
5123 | { | |
5124 | c->mode = r == Z_STREAM_END ? WASH : BADCODE; | |
5125 | break; | |
5126 | } | |
5127 | } | |
5128 | #endif /* !SLOW */ | |
5129 | c->sub.code.need = c->lbits; | |
5130 | c->sub.code.tree = c->ltree; | |
5131 | c->mode = LEN; | |
5132 | case LEN: /* i: get length/literal/eob next */ | |
5133 | j = c->sub.code.need; | |
5134 | NEEDBITS(j) | |
5135 | t = c->sub.code.tree + ((uInt)b & inflate_mask[j]); | |
5136 | DUMPBITS(t->bits) | |
5137 | e = (uInt)(t->exop); | |
5138 | if (e == 0) /* literal */ | |
5139 | { | |
5140 | c->sub.lit = t->base; | |
5141 | Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ? | |
5142 | "inflate: literal '%c'\n" : | |
5143 | "inflate: literal 0x%02x\n", t->base)); | |
5144 | c->mode = LIT; | |
5145 | break; | |
5146 | } | |
5147 | if (e & 16) /* length */ | |
5148 | { | |
5149 | c->sub.copy.get = e & 15; | |
5150 | c->len = t->base; | |
5151 | c->mode = LENEXT; | |
5152 | break; | |
5153 | } | |
5154 | if ((e & 64) == 0) /* next table */ | |
5155 | { | |
5156 | c->sub.code.need = e; | |
9bccf70c | 5157 | c->sub.code.tree = t + t->base; |
1c79356b A |
5158 | break; |
5159 | } | |
5160 | if (e & 32) /* end of block */ | |
5161 | { | |
5162 | Tracevv((stderr, "inflate: end of block\n")); | |
5163 | c->mode = WASH; | |
5164 | break; | |
5165 | } | |
5166 | c->mode = BADCODE; /* invalid code */ | |
5167 | z->msg = (char*)"invalid literal/length code"; | |
5168 | r = Z_DATA_ERROR; | |
5169 | LEAVE | |
5170 | case LENEXT: /* i: getting length extra (have base) */ | |
5171 | j = c->sub.copy.get; | |
5172 | NEEDBITS(j) | |
5173 | c->len += (uInt)b & inflate_mask[j]; | |
5174 | DUMPBITS(j) | |
5175 | c->sub.code.need = c->dbits; | |
5176 | c->sub.code.tree = c->dtree; | |
5177 | Tracevv((stderr, "inflate: length %u\n", c->len)); | |
5178 | c->mode = DIST; | |
5179 | case DIST: /* i: get distance next */ | |
5180 | j = c->sub.code.need; | |
5181 | NEEDBITS(j) | |
5182 | t = c->sub.code.tree + ((uInt)b & inflate_mask[j]); | |
5183 | DUMPBITS(t->bits) | |
5184 | e = (uInt)(t->exop); | |
5185 | if (e & 16) /* distance */ | |
5186 | { | |
5187 | c->sub.copy.get = e & 15; | |
5188 | c->sub.copy.dist = t->base; | |
5189 | c->mode = DISTEXT; | |
5190 | break; | |
5191 | } | |
5192 | if ((e & 64) == 0) /* next table */ | |
5193 | { | |
5194 | c->sub.code.need = e; | |
9bccf70c | 5195 | c->sub.code.tree = t + t->base; |
1c79356b A |
5196 | break; |
5197 | } | |
5198 | c->mode = BADCODE; /* invalid code */ | |
5199 | z->msg = (char*)"invalid distance code"; | |
5200 | r = Z_DATA_ERROR; | |
5201 | LEAVE | |
5202 | case DISTEXT: /* i: getting distance extra */ | |
5203 | j = c->sub.copy.get; | |
5204 | NEEDBITS(j) | |
5205 | c->sub.copy.dist += (uInt)b & inflate_mask[j]; | |
5206 | DUMPBITS(j) | |
5207 | Tracevv((stderr, "inflate: distance %u\n", c->sub.copy.dist)); | |
5208 | c->mode = COPY; | |
5209 | case COPY: /* o: copying bytes in window, waiting for space */ | |
1c79356b | 5210 | f = q - c->sub.copy.dist; |
9bccf70c A |
5211 | while (f < s->window) /* modulo window size-"while" instead */ |
5212 | f += s->end - s->window; /* of "if" handles invalid distances */ | |
1c79356b A |
5213 | while (c->len) |
5214 | { | |
5215 | NEEDOUT | |
5216 | OUTBYTE(*f++) | |
5217 | if (f == s->end) | |
5218 | f = s->window; | |
5219 | c->len--; | |
5220 | } | |
5221 | c->mode = START; | |
5222 | break; | |
5223 | case LIT: /* o: got literal, waiting for output space */ | |
5224 | NEEDOUT | |
5225 | OUTBYTE(c->sub.lit) | |
5226 | c->mode = START; | |
5227 | break; | |
5228 | case WASH: /* o: got eob, possibly more output */ | |
9bccf70c A |
5229 | if (k > 7) /* return unused byte, if any */ |
5230 | { | |
5231 | Assert(k < 16, "inflate_codes grabbed too many bytes") | |
5232 | k -= 8; | |
5233 | n++; | |
5234 | p--; /* can always return one */ | |
5235 | } | |
1c79356b A |
5236 | FLUSH |
5237 | if (s->read != s->write) | |
5238 | LEAVE | |
5239 | c->mode = END; | |
5240 | case END: | |
5241 | r = Z_STREAM_END; | |
5242 | LEAVE | |
5243 | case BADCODE: /* x: got error */ | |
5244 | r = Z_DATA_ERROR; | |
5245 | LEAVE | |
5246 | default: | |
5247 | r = Z_STREAM_ERROR; | |
5248 | LEAVE | |
5249 | } | |
9bccf70c A |
5250 | #ifdef NEED_DUMMY_RETURN |
5251 | return Z_STREAM_ERROR; /* Some dumb compilers complain without this */ | |
5252 | #endif | |
1c79356b A |
5253 | } |
5254 | ||
5255 | ||
5256 | void inflate_codes_free(c, z) | |
5257 | inflate_codes_statef *c; | |
5258 | z_streamp z; | |
5259 | { | |
5260 | ZFREE(z, c); | |
5261 | Tracev((stderr, "inflate: codes free\n")); | |
5262 | } | |
5263 | /* --- infcodes.c */ | |
5264 | ||
5265 | /* +++ infutil.c */ | |
5266 | /* inflate_util.c -- data and routines common to blocks and codes | |
9bccf70c | 5267 | * Copyright (C) 1995-2002 Mark Adler |
1c79356b A |
5268 | * For conditions of distribution and use, see copyright notice in zlib.h |
5269 | */ | |
5270 | ||
5271 | /* #include "zutil.h" */ | |
5272 | /* #include "infblock.h" */ | |
5273 | /* #include "inftrees.h" */ | |
5274 | /* #include "infcodes.h" */ | |
5275 | /* #include "infutil.h" */ | |
5276 | ||
5277 | #ifndef NO_DUMMY_DECL | |
5278 | struct inflate_codes_state {int dummy;}; /* for buggy compilers */ | |
5279 | #endif | |
5280 | ||
5281 | /* And'ing with mask[n] masks the lower n bits */ | |
5282 | uInt inflate_mask[17] = { | |
5283 | 0x0000, | |
5284 | 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff, | |
5285 | 0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff | |
5286 | }; | |
5287 | ||
5288 | ||
5289 | /* copy as much as possible from the sliding window to the output area */ | |
5290 | int inflate_flush(s, z, r) | |
5291 | inflate_blocks_statef *s; | |
5292 | z_streamp z; | |
5293 | int r; | |
5294 | { | |
5295 | uInt n; | |
5296 | Bytef *p; | |
5297 | Bytef *q; | |
5298 | ||
5299 | /* local copies of source and destination pointers */ | |
5300 | p = z->next_out; | |
5301 | q = s->read; | |
5302 | ||
5303 | /* compute number of bytes to copy as far as end of window */ | |
5304 | n = (uInt)((q <= s->write ? s->write : s->end) - q); | |
5305 | if (n > z->avail_out) n = z->avail_out; | |
5306 | if (n && r == Z_BUF_ERROR) r = Z_OK; | |
5307 | ||
5308 | /* update counters */ | |
5309 | z->avail_out -= n; | |
5310 | z->total_out += n; | |
5311 | ||
5312 | /* update check information */ | |
5313 | if (s->checkfn != Z_NULL) | |
5314 | z->adler = s->check = (*s->checkfn)(s->check, q, n); | |
5315 | ||
5316 | /* copy as far as end of window */ | |
9bccf70c A |
5317 | zmemcpy(p, q, n); |
5318 | p += n; | |
1c79356b A |
5319 | q += n; |
5320 | ||
5321 | /* see if more to copy at beginning of window */ | |
5322 | if (q == s->end) | |
5323 | { | |
5324 | /* wrap pointers */ | |
5325 | q = s->window; | |
5326 | if (s->write == s->end) | |
5327 | s->write = s->window; | |
5328 | ||
5329 | /* compute bytes to copy */ | |
5330 | n = (uInt)(s->write - q); | |
5331 | if (n > z->avail_out) n = z->avail_out; | |
5332 | if (n && r == Z_BUF_ERROR) r = Z_OK; | |
5333 | ||
5334 | /* update counters */ | |
5335 | z->avail_out -= n; | |
5336 | z->total_out += n; | |
5337 | ||
5338 | /* update check information */ | |
5339 | if (s->checkfn != Z_NULL) | |
5340 | z->adler = s->check = (*s->checkfn)(s->check, q, n); | |
5341 | ||
5342 | /* copy */ | |
9bccf70c A |
5343 | zmemcpy(p, q, n); |
5344 | p += n; | |
1c79356b A |
5345 | q += n; |
5346 | } | |
5347 | ||
5348 | /* update pointers */ | |
5349 | z->next_out = p; | |
5350 | s->read = q; | |
5351 | ||
5352 | /* done */ | |
5353 | return r; | |
5354 | } | |
5355 | /* --- infutil.c */ | |
5356 | ||
5357 | /* +++ inffast.c */ | |
5358 | /* inffast.c -- process literals and length/distance pairs fast | |
9bccf70c | 5359 | * Copyright (C) 1995-2002 Mark Adler |
1c79356b A |
5360 | * For conditions of distribution and use, see copyright notice in zlib.h |
5361 | */ | |
5362 | ||
5363 | /* #include "zutil.h" */ | |
5364 | /* #include "inftrees.h" */ | |
5365 | /* #include "infblock.h" */ | |
5366 | /* #include "infcodes.h" */ | |
5367 | /* #include "infutil.h" */ | |
5368 | /* #include "inffast.h" */ | |
5369 | ||
5370 | #ifndef NO_DUMMY_DECL | |
5371 | struct inflate_codes_state {int dummy;}; /* for buggy compilers */ | |
5372 | #endif | |
5373 | ||
5374 | /* simplify the use of the inflate_huft type with some defines */ | |
1c79356b A |
5375 | #define exop word.what.Exop |
5376 | #define bits word.what.Bits | |
5377 | ||
5378 | /* macros for bit input with no checking and for returning unused bytes */ | |
5379 | #define GRABBITS(j) {while(k<(j)){b|=((uLong)NEXTBYTE)<<k;k+=8;}} | |
9bccf70c | 5380 | #define UNGRAB {c=z->avail_in-n;c=(k>>3)<c?k>>3:c;n+=c;p-=c;k-=c<<3;} |
1c79356b A |
5381 | |
5382 | /* Called with number of bytes left to write in window at least 258 | |
5383 | (the maximum string length) and number of input bytes available | |
5384 | at least ten. The ten bytes are six bytes for the longest length/ | |
5385 | distance pair plus four bytes for overloading the bit buffer. */ | |
5386 | ||
5387 | int inflate_fast(bl, bd, tl, td, s, z) | |
5388 | uInt bl, bd; | |
5389 | inflate_huft *tl; | |
5390 | inflate_huft *td; /* need separate declaration for Borland C++ */ | |
5391 | inflate_blocks_statef *s; | |
5392 | z_streamp z; | |
5393 | { | |
5394 | inflate_huft *t; /* temporary pointer */ | |
5395 | uInt e; /* extra bits or operation */ | |
5396 | uLong b; /* bit buffer */ | |
5397 | uInt k; /* bits in bit buffer */ | |
5398 | Bytef *p; /* input data pointer */ | |
5399 | uInt n; /* bytes available there */ | |
5400 | Bytef *q; /* output window write pointer */ | |
5401 | uInt m; /* bytes to end of window or read pointer */ | |
5402 | uInt ml; /* mask for literal/length tree */ | |
5403 | uInt md; /* mask for distance tree */ | |
5404 | uInt c; /* bytes to copy */ | |
5405 | uInt d; /* distance back to copy from */ | |
5406 | Bytef *r; /* copy source pointer */ | |
5407 | ||
5408 | /* load input, output, bit values */ | |
5409 | LOAD | |
5410 | ||
5411 | /* initialize masks */ | |
5412 | ml = inflate_mask[bl]; | |
5413 | md = inflate_mask[bd]; | |
5414 | ||
5415 | /* do until not enough input or output space for fast loop */ | |
5416 | do { /* assume called with m >= 258 && n >= 10 */ | |
5417 | /* get literal/length code */ | |
5418 | GRABBITS(20) /* max bits for literal/length code */ | |
5419 | if ((e = (t = tl + ((uInt)b & ml))->exop) == 0) | |
5420 | { | |
5421 | DUMPBITS(t->bits) | |
5422 | Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ? | |
5423 | "inflate: * literal '%c'\n" : | |
5424 | "inflate: * literal 0x%02x\n", t->base)); | |
5425 | *q++ = (Byte)t->base; | |
5426 | m--; | |
5427 | continue; | |
5428 | } | |
5429 | do { | |
5430 | DUMPBITS(t->bits) | |
5431 | if (e & 16) | |
5432 | { | |
5433 | /* get extra bits for length */ | |
5434 | e &= 15; | |
5435 | c = t->base + ((uInt)b & inflate_mask[e]); | |
5436 | DUMPBITS(e) | |
5437 | Tracevv((stderr, "inflate: * length %u\n", c)); | |
5438 | ||
5439 | /* decode distance base of block to copy */ | |
5440 | GRABBITS(15); /* max bits for distance code */ | |
5441 | e = (t = td + ((uInt)b & md))->exop; | |
5442 | do { | |
5443 | DUMPBITS(t->bits) | |
5444 | if (e & 16) | |
5445 | { | |
5446 | /* get extra bits to add to distance base */ | |
5447 | e &= 15; | |
5448 | GRABBITS(e) /* get extra bits (up to 13) */ | |
5449 | d = t->base + ((uInt)b & inflate_mask[e]); | |
5450 | DUMPBITS(e) | |
5451 | Tracevv((stderr, "inflate: * distance %u\n", d)); | |
5452 | ||
5453 | /* do the copy */ | |
5454 | m -= c; | |
9bccf70c A |
5455 | r = q - d; |
5456 | if (r < s->window) /* wrap if needed */ | |
1c79356b | 5457 | { |
9bccf70c A |
5458 | do { |
5459 | r += s->end - s->window; /* force pointer in window */ | |
5460 | } while (r < s->window); /* covers invalid distances */ | |
5461 | e = s->end - r; | |
5462 | if (c > e) | |
1c79356b | 5463 | { |
9bccf70c | 5464 | c -= e; /* wrapped copy */ |
1c79356b | 5465 | do { |
9bccf70c | 5466 | *q++ = *r++; |
1c79356b | 5467 | } while (--e); |
9bccf70c A |
5468 | r = s->window; |
5469 | do { | |
5470 | *q++ = *r++; | |
5471 | } while (--c); | |
5472 | } | |
5473 | else /* normal copy */ | |
5474 | { | |
5475 | *q++ = *r++; c--; | |
5476 | *q++ = *r++; c--; | |
5477 | do { | |
5478 | *q++ = *r++; | |
5479 | } while (--c); | |
1c79356b A |
5480 | } |
5481 | } | |
9bccf70c A |
5482 | else /* normal copy */ |
5483 | { | |
5484 | *q++ = *r++; c--; | |
5485 | *q++ = *r++; c--; | |
5486 | do { | |
5487 | *q++ = *r++; | |
5488 | } while (--c); | |
5489 | } | |
1c79356b A |
5490 | break; |
5491 | } | |
5492 | else if ((e & 64) == 0) | |
9bccf70c A |
5493 | { |
5494 | t += t->base; | |
5495 | e = (t += ((uInt)b & inflate_mask[e]))->exop; | |
5496 | } | |
1c79356b A |
5497 | else |
5498 | { | |
5499 | z->msg = (char*)"invalid distance code"; | |
5500 | UNGRAB | |
5501 | UPDATE | |
5502 | return Z_DATA_ERROR; | |
5503 | } | |
5504 | } while (1); | |
5505 | break; | |
5506 | } | |
5507 | if ((e & 64) == 0) | |
5508 | { | |
9bccf70c A |
5509 | t += t->base; |
5510 | if ((e = (t += ((uInt)b & inflate_mask[e]))->exop) == 0) | |
1c79356b A |
5511 | { |
5512 | DUMPBITS(t->bits) | |
5513 | Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ? | |
5514 | "inflate: * literal '%c'\n" : | |
5515 | "inflate: * literal 0x%02x\n", t->base)); | |
5516 | *q++ = (Byte)t->base; | |
5517 | m--; | |
5518 | break; | |
5519 | } | |
5520 | } | |
5521 | else if (e & 32) | |
5522 | { | |
5523 | Tracevv((stderr, "inflate: * end of block\n")); | |
5524 | UNGRAB | |
5525 | UPDATE | |
5526 | return Z_STREAM_END; | |
5527 | } | |
5528 | else | |
5529 | { | |
5530 | z->msg = (char*)"invalid literal/length code"; | |
5531 | UNGRAB | |
5532 | UPDATE | |
5533 | return Z_DATA_ERROR; | |
5534 | } | |
5535 | } while (1); | |
5536 | } while (m >= 258 && n >= 10); | |
5537 | ||
5538 | /* not enough input or output--restore pointers and return */ | |
5539 | UNGRAB | |
5540 | UPDATE | |
5541 | return Z_OK; | |
5542 | } | |
5543 | /* --- inffast.c */ | |
5544 | ||
5545 | /* +++ zutil.c */ | |
5546 | /* zutil.c -- target dependent utility functions for the compression library | |
9bccf70c | 5547 | * Copyright (C) 1995-2002 Jean-loup Gailly. |
1c79356b A |
5548 | * For conditions of distribution and use, see copyright notice in zlib.h |
5549 | */ | |
5550 | ||
4452a7af | 5551 | /* @(#) $Id: zlib.c,v 1.10.874.1 2005/06/24 01:47:11 lindak Exp $ */ |
1c79356b A |
5552 | |
5553 | /* #include "zutil.h" */ | |
5554 | ||
5555 | #ifndef NO_DUMMY_DECL | |
5556 | struct internal_state {int dummy;}; /* for buggy compilers */ | |
5557 | #endif | |
5558 | ||
5559 | #ifndef STDC | |
5560 | extern void exit OF((int)); | |
5561 | #endif | |
5562 | ||
9bccf70c | 5563 | const char *z_errmsg[10] = { |
1c79356b A |
5564 | "need dictionary", /* Z_NEED_DICT 2 */ |
5565 | "stream end", /* Z_STREAM_END 1 */ | |
5566 | "", /* Z_OK 0 */ | |
5567 | "file error", /* Z_ERRNO (-1) */ | |
5568 | "stream error", /* Z_STREAM_ERROR (-2) */ | |
5569 | "data error", /* Z_DATA_ERROR (-3) */ | |
5570 | "insufficient memory", /* Z_MEM_ERROR (-4) */ | |
5571 | "buffer error", /* Z_BUF_ERROR (-5) */ | |
5572 | "incompatible version",/* Z_VERSION_ERROR (-6) */ | |
5573 | ""}; | |
5574 | ||
5575 | ||
9bccf70c | 5576 | const char * ZEXPORT zlibVersion() |
1c79356b A |
5577 | { |
5578 | return ZLIB_VERSION; | |
5579 | } | |
5580 | ||
5581 | #ifdef DEBUG_ZLIB | |
9bccf70c A |
5582 | |
5583 | # ifndef verbose | |
5584 | # define verbose 0 | |
5585 | # endif | |
5586 | int z_verbose = verbose; | |
5587 | ||
1c79356b A |
5588 | void z_error (m) |
5589 | char *m; | |
5590 | { | |
5591 | fprintf(stderr, "%s\n", m); | |
5592 | exit(1); | |
5593 | } | |
5594 | #endif | |
5595 | ||
9bccf70c A |
5596 | /* exported to allow conversion of error code to string for compress() and |
5597 | * uncompress() | |
5598 | */ | |
5599 | const char * ZEXPORT zError(err) | |
5600 | int err; | |
5601 | { | |
5602 | return ERR_MSG(err); | |
5603 | } | |
5604 | ||
5605 | ||
1c79356b A |
5606 | #ifndef HAVE_MEMCPY |
5607 | ||
5608 | void zmemcpy(dest, source, len) | |
5609 | Bytef* dest; | |
9bccf70c | 5610 | const Bytef* source; |
1c79356b A |
5611 | uInt len; |
5612 | { | |
5613 | if (len == 0) return; | |
5614 | do { | |
5615 | *dest++ = *source++; /* ??? to be unrolled */ | |
5616 | } while (--len != 0); | |
5617 | } | |
5618 | ||
5619 | int zmemcmp(s1, s2, len) | |
9bccf70c A |
5620 | const Bytef* s1; |
5621 | const Bytef* s2; | |
1c79356b A |
5622 | uInt len; |
5623 | { | |
5624 | uInt j; | |
5625 | ||
5626 | for (j = 0; j < len; j++) { | |
5627 | if (s1[j] != s2[j]) return 2*(s1[j] > s2[j])-1; | |
5628 | } | |
5629 | return 0; | |
5630 | } | |
5631 | ||
5632 | void zmemzero(dest, len) | |
5633 | Bytef* dest; | |
5634 | uInt len; | |
5635 | { | |
5636 | if (len == 0) return; | |
5637 | do { | |
5638 | *dest++ = 0; /* ??? to be unrolled */ | |
5639 | } while (--len != 0); | |
5640 | } | |
5641 | #endif | |
5642 | ||
5643 | #ifdef __TURBOC__ | |
5644 | #if (defined( __BORLANDC__) || !defined(SMALL_MEDIUM)) && !defined(__32BIT__) | |
5645 | /* Small and medium model in Turbo C are for now limited to near allocation | |
5646 | * with reduced MAX_WBITS and MAX_MEM_LEVEL | |
5647 | */ | |
5648 | # define MY_ZCALLOC | |
5649 | ||
5650 | /* Turbo C malloc() does not allow dynamic allocation of 64K bytes | |
5651 | * and farmalloc(64K) returns a pointer with an offset of 8, so we | |
5652 | * must fix the pointer. Warning: the pointer must be put back to its | |
5653 | * original form in order to free it, use zcfree(). | |
5654 | */ | |
5655 | ||
5656 | #define MAX_PTR 10 | |
5657 | /* 10*64K = 640K */ | |
5658 | ||
5659 | local int next_ptr = 0; | |
5660 | ||
5661 | typedef struct ptr_table_s { | |
5662 | voidpf org_ptr; | |
5663 | voidpf new_ptr; | |
5664 | } ptr_table; | |
5665 | ||
5666 | local ptr_table table[MAX_PTR]; | |
5667 | /* This table is used to remember the original form of pointers | |
5668 | * to large buffers (64K). Such pointers are normalized with a zero offset. | |
5669 | * Since MSDOS is not a preemptive multitasking OS, this table is not | |
5670 | * protected from concurrent access. This hack doesn't work anyway on | |
5671 | * a protected system like OS/2. Use Microsoft C instead. | |
5672 | */ | |
5673 | ||
5674 | voidpf zcalloc (voidpf opaque, unsigned items, unsigned size) | |
5675 | { | |
5676 | voidpf buf = opaque; /* just to make some compilers happy */ | |
5677 | ulg bsize = (ulg)items*size; | |
5678 | ||
5679 | /* If we allocate less than 65520 bytes, we assume that farmalloc | |
5680 | * will return a usable pointer which doesn't have to be normalized. | |
5681 | */ | |
5682 | if (bsize < 65520L) { | |
5683 | buf = farmalloc(bsize); | |
5684 | if (*(ush*)&buf != 0) return buf; | |
5685 | } else { | |
5686 | buf = farmalloc(bsize + 16L); | |
5687 | } | |
5688 | if (buf == NULL || next_ptr >= MAX_PTR) return NULL; | |
5689 | table[next_ptr].org_ptr = buf; | |
5690 | ||
5691 | /* Normalize the pointer to seg:0 */ | |
5692 | *((ush*)&buf+1) += ((ush)((uch*)buf-0) + 15) >> 4; | |
5693 | *(ush*)&buf = 0; | |
5694 | table[next_ptr++].new_ptr = buf; | |
5695 | return buf; | |
5696 | } | |
5697 | ||
5698 | void zcfree (voidpf opaque, voidpf ptr) | |
5699 | { | |
5700 | int n; | |
5701 | if (*(ush*)&ptr != 0) { /* object < 64K */ | |
5702 | farfree(ptr); | |
5703 | return; | |
5704 | } | |
5705 | /* Find the original pointer */ | |
5706 | for (n = 0; n < next_ptr; n++) { | |
5707 | if (ptr != table[n].new_ptr) continue; | |
5708 | ||
5709 | farfree(table[n].org_ptr); | |
5710 | while (++n < next_ptr) { | |
5711 | table[n-1] = table[n]; | |
5712 | } | |
5713 | next_ptr--; | |
5714 | return; | |
5715 | } | |
5716 | ptr = opaque; /* just to make some compilers happy */ | |
5717 | Assert(0, "zcfree: ptr not found"); | |
5718 | } | |
5719 | #endif | |
5720 | #endif /* __TURBOC__ */ | |
5721 | ||
5722 | ||
5723 | #if defined(M_I86) && !defined(__32BIT__) | |
5724 | /* Microsoft C in 16-bit mode */ | |
5725 | ||
5726 | # define MY_ZCALLOC | |
5727 | ||
9bccf70c | 5728 | #if (!defined(_MSC_VER) || (_MSC_VER <= 600)) |
1c79356b A |
5729 | # define _halloc halloc |
5730 | # define _hfree hfree | |
5731 | #endif | |
5732 | ||
5733 | voidpf zcalloc (voidpf opaque, unsigned items, unsigned size) | |
5734 | { | |
5735 | if (opaque) opaque = 0; /* to make compiler happy */ | |
5736 | return _halloc((long)items, size); | |
5737 | } | |
5738 | ||
5739 | void zcfree (voidpf opaque, voidpf ptr) | |
5740 | { | |
5741 | if (opaque) opaque = 0; /* to make compiler happy */ | |
5742 | _hfree(ptr); | |
5743 | } | |
5744 | ||
5745 | #endif /* MSC */ | |
5746 | ||
5747 | ||
5748 | #ifndef MY_ZCALLOC /* Any system without a special alloc function */ | |
5749 | ||
5750 | #ifndef STDC | |
5751 | extern voidp calloc OF((uInt items, uInt size)); | |
5752 | extern void free OF((voidpf ptr)); | |
5753 | #endif | |
5754 | ||
5755 | voidpf zcalloc (opaque, items, size) | |
5756 | voidpf opaque; | |
5757 | unsigned items; | |
5758 | unsigned size; | |
5759 | { | |
5760 | if (opaque) items += size - size; /* make compiler happy */ | |
5761 | return (voidpf)calloc(items, size); | |
5762 | } | |
5763 | ||
5764 | void zcfree (opaque, ptr) | |
5765 | voidpf opaque; | |
5766 | voidpf ptr; | |
5767 | { | |
5768 | _FREE(ptr); | |
5769 | if (opaque) return; /* make compiler happy */ | |
5770 | } | |
5771 | ||
5772 | #endif /* MY_ZCALLOC */ | |
5773 | /* --- zutil.c */ | |
5774 | ||
5775 | /* +++ adler32.c */ | |
5776 | /* adler32.c -- compute the Adler-32 checksum of a data stream | |
9bccf70c | 5777 | * Copyright (C) 1995-2002 Mark Adler |
1c79356b A |
5778 | * For conditions of distribution and use, see copyright notice in zlib.h |
5779 | */ | |
5780 | ||
4452a7af | 5781 | /* @(#) $Id: zlib.c,v 1.10.874.1 2005/06/24 01:47:11 lindak Exp $ */ |
1c79356b A |
5782 | |
5783 | /* #include "zlib.h" */ | |
5784 | ||
5785 | #define BASE 65521L /* largest prime smaller than 65536 */ | |
5786 | #define NMAX 5552 | |
5787 | /* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */ | |
5788 | ||
5789 | #define DO1(buf,i) {s1 += buf[i]; s2 += s1;} | |
5790 | #define DO2(buf,i) DO1(buf,i); DO1(buf,i+1); | |
5791 | #define DO4(buf,i) DO2(buf,i); DO2(buf,i+2); | |
5792 | #define DO8(buf,i) DO4(buf,i); DO4(buf,i+4); | |
5793 | #define DO16(buf) DO8(buf,0); DO8(buf,8); | |
5794 | ||
5795 | /* ========================================================================= */ | |
9bccf70c | 5796 | uLong ZEXPORT adler32(adler, buf, len) |
1c79356b A |
5797 | uLong adler; |
5798 | const Bytef *buf; | |
5799 | uInt len; | |
5800 | { | |
5801 | unsigned long s1 = adler & 0xffff; | |
5802 | unsigned long s2 = (adler >> 16) & 0xffff; | |
5803 | int k; | |
5804 | ||
5805 | if (buf == Z_NULL) return 1L; | |
5806 | ||
5807 | while (len > 0) { | |
5808 | k = len < NMAX ? len : NMAX; | |
5809 | len -= k; | |
5810 | while (k >= 16) { | |
5811 | DO16(buf); | |
5812 | buf += 16; | |
5813 | k -= 16; | |
5814 | } | |
5815 | if (k != 0) do { | |
5816 | s1 += *buf++; | |
5817 | s2 += s1; | |
5818 | } while (--k); | |
5819 | s1 %= BASE; | |
5820 | s2 %= BASE; | |
5821 | } | |
5822 | return (s2 << 16) | s1; | |
5823 | } | |
5824 | /* --- adler32.c */ |