source: MondoRescue/branches/2.2.5/mindi-busybox/archival/gzip.c@ 2142

Last change on this file since 2142 was 1765, checked in by Bruno Cornec, 16 years ago

Update to busybox 1.7.2

File size: 63.0 KB
RevLine 
[821]1/* vi: set sw=4 ts=4: */
2/*
3 * Gzip implementation for busybox
4 *
5 * Based on GNU gzip Copyright (C) 1992-1993 Jean-loup Gailly.
6 *
7 * Originally adjusted for busybox by Charles P. Wright <cpw@unix.asb.com>
[1765]8 * "this is a stripped down version of gzip I put into busybox, it does
9 * only standard in to standard out with -9 compression. It also requires
10 * the zcat module for some important functions."
[821]11 *
12 * Adjusted further by Erik Andersen <andersen@codepoet.org> to support
13 * files as well as stdin/stdout, and to generally behave itself wrt
14 * command line handling.
15 *
16 * Licensed under GPLv2 or later, see file LICENSE in this tarball for details.
17 */
18
[1765]19/* big objects in bss:
20 * 00000020 b bl_count
21 * 00000074 b base_length
22 * 00000078 b base_dist
23 * 00000078 b static_dtree
24 * 0000009c b bl_tree
25 * 000000f4 b dyn_dtree
26 * 00000100 b length_code
27 * 00000200 b dist_code
28 * 0000023d b depth
29 * 00000400 b flag_buf
30 * 0000047a b heap
31 * 00000480 b static_ltree
32 * 000008f4 b dyn_ltree
33 */
[821]34
[1765]35/* TODO: full support for -v for DESKTOP
36 * "/usr/bin/gzip -v a bogus aa" should say:
37a: 85.1% -- replaced with a.gz
38gzip: bogus: No such file or directory
39aa: 85.1% -- replaced with aa.gz
40*/
[821]41
[1765]42#include "libbb.h"
[821]43
44
[1765]45/* ===========================================================================
46 */
47//#define DEBUG 1
48/* Diagnostic functions */
49#ifdef DEBUG
50# define Assert(cond,msg) { if (!(cond)) bb_error_msg(msg); }
51# define Trace(x) fprintf x
52# define Tracev(x) {if (verbose) fprintf x; }
53# define Tracevv(x) {if (verbose > 1) fprintf x; }
54# define Tracec(c,x) {if (verbose && (c)) fprintf x; }
55# define Tracecv(c,x) {if (verbose > 1 && (c)) fprintf x; }
56#else
57# define Assert(cond,msg)
58# define Trace(x)
59# define Tracev(x)
60# define Tracevv(x)
61# define Tracec(c,x)
62# define Tracecv(c,x)
63#endif
[821]64
[1765]65
66/* ===========================================================================
[821]67 */
[1765]68#define SMALL_MEM
[821]69
70#ifndef INBUFSIZ
71# ifdef SMALL_MEM
72# define INBUFSIZ 0x2000 /* input buffer size */
73# else
74# define INBUFSIZ 0x8000 /* input buffer size */
75# endif
76#endif
77
78#ifndef OUTBUFSIZ
79# ifdef SMALL_MEM
80# define OUTBUFSIZ 8192 /* output buffer size */
81# else
82# define OUTBUFSIZ 16384 /* output buffer size */
83# endif
84#endif
85
86#ifndef DIST_BUFSIZE
87# ifdef SMALL_MEM
88# define DIST_BUFSIZE 0x2000 /* buffer for distances, see trees.c */
89# else
90# define DIST_BUFSIZE 0x8000 /* buffer for distances, see trees.c */
91# endif
92#endif
93
94/* gzip flag byte */
95#define ASCII_FLAG 0x01 /* bit 0 set: file probably ascii text */
96#define CONTINUATION 0x02 /* bit 1 set: continuation of multi-part gzip file */
97#define EXTRA_FIELD 0x04 /* bit 2 set: extra field present */
98#define ORIG_NAME 0x08 /* bit 3 set: original file name present */
99#define COMMENT 0x10 /* bit 4 set: file comment present */
100#define RESERVED 0xC0 /* bit 6,7: reserved */
101
102/* internal file attribute */
103#define UNKNOWN 0xffff
104#define BINARY 0
105#define ASCII 1
106
107#ifndef WSIZE
[1765]108# define WSIZE 0x8000 /* window size--must be a power of two, and */
109#endif /* at least 32K for zip's deflate method */
[821]110
111#define MIN_MATCH 3
112#define MAX_MATCH 258
113/* The minimum and maximum match lengths */
114
115#define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
116/* Minimum amount of lookahead, except at the end of the input file.
117 * See deflate.c for comments about the MIN_MATCH+1.
118 */
119
120#define MAX_DIST (WSIZE-MIN_LOOKAHEAD)
121/* In order to simplify the code, particularly on 16 bit machines, match
122 * distances are limited to MAX_DIST instead of WSIZE.
123 */
124
[1765]125#ifndef MAX_PATH_LEN
126# define MAX_PATH_LEN 1024 /* max pathname length */
[821]127#endif
128
129#define seekable() 0 /* force sequential output */
130#define translate_eol 0 /* no option -a yet */
131
132#ifndef BITS
133# define BITS 16
134#endif
135#define INIT_BITS 9 /* Initial number of bits per code */
136
137#define BIT_MASK 0x1f /* Mask for 'number of compression bits' */
138/* Mask 0x20 is reserved to mean a fourth header byte, and 0x40 is free.
139 * It's a pity that old uncompress does not check bit 0x20. That makes
140 * extension of the format actually undesirable because old compress
141 * would just crash on the new format instead of giving a meaningful
142 * error message. It does check the number of bits, but it's more
143 * helpful to say "unsupported format, get a new version" than
144 * "can only handle 16 bits".
145 */
146
[1765]147#ifdef MAX_EXT_CHARS
148# define MAX_SUFFIX MAX_EXT_CHARS
149#else
150# define MAX_SUFFIX 30
151#endif
[821]152
[1765]153
154/* ===========================================================================
155 * Compile with MEDIUM_MEM to reduce the memory requirements or
156 * with SMALL_MEM to use as little memory as possible. Use BIG_MEM if the
157 * entire input file can be held in memory (not possible on 16 bit systems).
158 * Warning: defining these symbols affects HASH_BITS (see below) and thus
159 * affects the compression ratio. The compressed output
160 * is still correct, and might even be smaller in some cases.
[821]161 */
162
[1765]163#ifdef SMALL_MEM
164# define HASH_BITS 13 /* Number of bits used to hash strings */
[821]165#endif
[1765]166#ifdef MEDIUM_MEM
167# define HASH_BITS 14
[821]168#endif
[1765]169#ifndef HASH_BITS
170# define HASH_BITS 15
171 /* For portability to 16 bit machines, do not use values above 15. */
[821]172#endif
173
[1765]174#define HASH_SIZE (unsigned)(1<<HASH_BITS)
175#define HASH_MASK (HASH_SIZE-1)
176#define WMASK (WSIZE-1)
177/* HASH_SIZE and WSIZE must be powers of two */
178#ifndef TOO_FAR
179# define TOO_FAR 4096
[821]180#endif
[1765]181/* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
[821]182
183
[1765]184/* ===========================================================================
185 * These types are not really 'char', 'short' and 'long'
186 */
187typedef uint8_t uch;
188typedef uint16_t ush;
189typedef uint32_t ulg;
190typedef int32_t lng;
[821]191
[1765]192typedef ush Pos;
193typedef unsigned IPos;
194/* A Pos is an index in the character window. We use short instead of int to
195 * save space in the various tables. IPos is used only for parameter passing.
196 */
[821]197
[1765]198enum {
199 WINDOW_SIZE = 2 * WSIZE,
200/* window size, 2*WSIZE except for MMAP or BIG_MEM, where it is the
201 * input file length plus MIN_LOOKAHEAD.
202 */
[821]203
[1765]204 max_chain_length = 4096,
205/* To speed up deflation, hash chains are never searched beyond this length.
206 * A higher limit improves compression ratio but degrades the speed.
207 */
[821]208
[1765]209 max_lazy_match = 258,
210/* Attempt to find a better match only when the current match is strictly
211 * smaller than this value. This mechanism is used only for compression
212 * levels >= 4.
213 */
[821]214
[1765]215 max_insert_length = max_lazy_match,
216/* Insert new strings in the hash table only if the match length
217 * is not greater than this length. This saves time but degrades compression.
218 * max_insert_length is used only for compression levels <= 3.
219 */
[821]220
[1765]221 good_match = 32,
222/* Use a faster search when the previous match is longer than this */
223
224/* Values for max_lazy_match, good_match and max_chain_length, depending on
225 * the desired pack level (0..9). The values given below have been tuned to
226 * exclude worst case performance for pathological files. Better values may be
227 * found for specific files.
[821]228 */
229
[1765]230 nice_match = 258, /* Stop searching when current match exceeds this */
231/* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
232 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
233 * meaning.
[821]234 */
[1765]235};
[821]236
[1765]237
238struct globals {
239
240 lng block_start;
241
242/* window position at the beginning of the current output block. Gets
243 * negative when the window is moved backwards.
[821]244 */
[1765]245 unsigned ins_h; /* hash index of string to be inserted */
[821]246
[1765]247#define H_SHIFT ((HASH_BITS+MIN_MATCH-1) / MIN_MATCH)
248/* Number of bits by which ins_h and del_h must be shifted at each
249 * input step. It must be such that after MIN_MATCH steps, the oldest
250 * byte no longer takes part in the hash key, that is:
251 * H_SHIFT * MIN_MATCH >= HASH_BITS
252 */
[821]253
[1765]254 unsigned prev_length;
255
256/* Length of the best match at previous step. Matches not greater than this
257 * are discarded. This is used in the lazy match evaluation.
[821]258 */
259
[1765]260 unsigned strstart; /* start of string to insert */
261 unsigned match_start; /* start of matching string */
262 unsigned lookahead; /* number of valid bytes ahead in window */
[821]263
[1765]264/* ===========================================================================
[821]265 */
[1765]266#define DECLARE(type, array, size) \
267 type * array
268#define ALLOC(type, array, size) \
269 array = xzalloc((size_t)(((size)+1L)/2) * 2*sizeof(type));
270#define FREE(array) \
271 do { free(array); array = NULL; } while (0)
[821]272
[1765]273 /* global buffers */
[821]274
[1765]275 /* buffer for literals or lengths */
276 /* DECLARE(uch, l_buf, LIT_BUFSIZE); */
277 DECLARE(uch, l_buf, INBUFSIZ);
278
279 DECLARE(ush, d_buf, DIST_BUFSIZE);
280 DECLARE(uch, outbuf, OUTBUFSIZ);
281
282/* Sliding window. Input bytes are read into the second half of the window,
283 * and move to the first half later to keep a dictionary of at least WSIZE
284 * bytes. With this organization, matches are limited to a distance of
285 * WSIZE-MAX_MATCH bytes, but this ensures that IO is always
286 * performed with a length multiple of the block size. Also, it limits
287 * the window size to 64K, which is quite useful on MSDOS.
288 * To do: limit the window size to WSIZE+BSZ if SMALL_MEM (the code would
289 * be less efficient).
[821]290 */
[1765]291 DECLARE(uch, window, 2L * WSIZE);
[821]292
[1765]293/* Link to older string with same hash index. To limit the size of this
294 * array to 64K, this link is maintained only for the last 32K strings.
295 * An index in this array is thus a window index modulo 32K.
296 */
297 /* DECLARE(Pos, prev, WSIZE); */
298 DECLARE(ush, prev, 1L << BITS);
299
300/* Heads of the hash chains or 0. */
301 /* DECLARE(Pos, head, 1<<HASH_BITS); */
302#define head (G1.prev + WSIZE) /* hash head (see deflate.c) */
303
304/* number of input bytes */
305 ulg isize; /* only 32 bits stored in .gz file */
306
307/* bbox always use stdin/stdout */
308#define ifd STDIN_FILENO /* input file descriptor */
309#define ofd STDOUT_FILENO /* output file descriptor */
310
311#ifdef DEBUG
312 unsigned insize; /* valid bytes in l_buf */
313#endif
314 unsigned outcnt; /* bytes in output buffer */
315
316 smallint eofile; /* flag set at end of input file */
317
[821]318/* ===========================================================================
319 * Local data used by the "bit string" routines.
320 */
321
[1765]322 unsigned short bi_buf;
[821]323
324/* Output buffer. bits are inserted starting at the bottom (least significant
325 * bits).
326 */
327
[1765]328#undef BUF_SIZE
329#define BUF_SIZE (8 * sizeof(G1.bi_buf))
[821]330/* Number of bits used within bi_buf. (bi_buf might be implemented on
331 * more than 16 bits on some systems.)
332 */
333
[1765]334 int bi_valid;
[821]335
336/* Current input function. Set to mem_read for in-memory compression */
337
338#ifdef DEBUG
[1765]339 ulg bits_sent; /* bit length of the compressed data */
[821]340#endif
341
[1765]342 uint32_t *crc_32_tab;
343 uint32_t crc; /* shift register contents */
344};
345
346#define G1 (*(ptr_to_globals - 1))
347
348
[821]349/* ===========================================================================
[1765]350 * Write the output buffer outbuf[0..outcnt-1] and update bytes_out.
351 * (used for the compressed data only)
[821]352 */
[1765]353static void flush_outbuf(void)
[821]354{
[1765]355 if (G1.outcnt == 0)
356 return;
357
358 xwrite(ofd, (char *) G1.outbuf, G1.outcnt);
359 G1.outcnt = 0;
360}
361
362
363/* ===========================================================================
364 */
365/* put_8bit is used for the compressed output */
366#define put_8bit(c) \
367do { \
368 G1.outbuf[G1.outcnt++] = (c); \
369 if (G1.outcnt == OUTBUFSIZ) flush_outbuf(); \
370} while (0)
371
372/* Output a 16 bit value, lsb first */
373static void put_16bit(ush w)
374{
375 if (G1.outcnt < OUTBUFSIZ - 2) {
376 G1.outbuf[G1.outcnt++] = w;
377 G1.outbuf[G1.outcnt++] = w >> 8;
378 } else {
379 put_8bit(w);
380 put_8bit(w >> 8);
381 }
382}
383
384static void put_32bit(ulg n)
385{
386 put_16bit(n);
387 put_16bit(n >> 16);
388}
389
390/* ===========================================================================
391 * Clear input and output buffers
392 */
393static void clear_bufs(void)
394{
395 G1.outcnt = 0;
[821]396#ifdef DEBUG
[1765]397 G1.insize = 0;
[821]398#endif
[1765]399 G1.isize = 0;
400}
[821]401
[1765]402
403/* ===========================================================================
404 * Run a set of bytes through the crc shift register. If s is a NULL
405 * pointer, then initialize the crc shift register contents instead.
406 * Return the current crc in either case.
407 */
408static uint32_t updcrc(uch * s, unsigned n)
409{
410 uint32_t c = G1.crc;
411 while (n) {
412 c = G1.crc_32_tab[(uch)(c ^ *s++)] ^ (c >> 8);
413 n--;
[821]414 }
[1765]415 G1.crc = c;
416 return c;
[821]417}
418
[1765]419
[821]420/* ===========================================================================
[1765]421 * Read a new buffer from the current input file, perform end-of-line
422 * translation, and update the crc and input file size.
423 * IN assertion: size >= 2 (for end-of-line translation)
424 */
425static unsigned file_read(void *buf, unsigned size)
426{
427 unsigned len;
428
429 Assert(G1.insize == 0, "l_buf not empty");
430
431 len = safe_read(ifd, buf, size);
432 if (len == (unsigned)(-1) || len == 0)
433 return len;
434
435 updcrc(buf, len);
436 G1.isize += len;
437 return len;
438}
439
440
441/* ===========================================================================
[821]442 * Send a value on a given number of bits.
443 * IN assertion: length <= 16 and value fits in length bits.
444 */
445static void send_bits(int value, int length)
446{
447#ifdef DEBUG
448 Tracev((stderr, " l %2d v %4x ", length, value));
449 Assert(length > 0 && length <= 15, "invalid length");
[1765]450 G1.bits_sent += length;
[821]451#endif
452 /* If not enough room in bi_buf, use (valid) bits from bi_buf and
453 * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
454 * unused bits in value.
455 */
[1765]456 if (G1.bi_valid > (int) BUF_SIZE - length) {
457 G1.bi_buf |= (value << G1.bi_valid);
458 put_16bit(G1.bi_buf);
459 G1.bi_buf = (ush) value >> (BUF_SIZE - G1.bi_valid);
460 G1.bi_valid += length - BUF_SIZE;
[821]461 } else {
[1765]462 G1.bi_buf |= value << G1.bi_valid;
463 G1.bi_valid += length;
[821]464 }
465}
466
[1765]467
[821]468/* ===========================================================================
469 * Reverse the first len bits of a code, using straightforward code (a faster
470 * method would use a table)
471 * IN assertion: 1 <= len <= 15
472 */
473static unsigned bi_reverse(unsigned code, int len)
474{
[1765]475 unsigned res = 0;
[821]476
[1765]477 while (1) {
[821]478 res |= code & 1;
[1765]479 if (--len <= 0) return res;
480 code >>= 1;
481 res <<= 1;
482 }
[821]483}
484
[1765]485
[821]486/* ===========================================================================
487 * Write out any remaining bits in an incomplete byte.
488 */
489static void bi_windup(void)
490{
[1765]491 if (G1.bi_valid > 8) {
492 put_16bit(G1.bi_buf);
493 } else if (G1.bi_valid > 0) {
494 put_8bit(G1.bi_buf);
[821]495 }
[1765]496 G1.bi_buf = 0;
497 G1.bi_valid = 0;
[821]498#ifdef DEBUG
[1765]499 G1.bits_sent = (G1.bits_sent + 7) & ~7;
[821]500#endif
501}
502
[1765]503
[821]504/* ===========================================================================
505 * Copy a stored block to the zip file, storing first the length and its
506 * one's complement if requested.
507 */
508static void copy_block(char *buf, unsigned len, int header)
509{
510 bi_windup(); /* align on byte boundary */
511
512 if (header) {
[1765]513 put_16bit(len);
514 put_16bit(~len);
[821]515#ifdef DEBUG
[1765]516 G1.bits_sent += 2 * 16;
[821]517#endif
518 }
519#ifdef DEBUG
[1765]520 G1.bits_sent += (ulg) len << 3;
[821]521#endif
522 while (len--) {
[1765]523 put_8bit(*buf++);
[821]524 }
525}
526
527
528/* ===========================================================================
[1765]529 * Fill the window when the lookahead becomes insufficient.
530 * Updates strstart and lookahead, and sets eofile if end of input file.
531 * IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0
532 * OUT assertions: at least one byte has been read, or eofile is set;
533 * file reads are performed for at least two bytes (required for the
534 * translate_eol option).
[821]535 */
[1765]536static void fill_window(void)
[821]537{
[1765]538 unsigned n, m;
539 unsigned more = WINDOW_SIZE - G1.lookahead - G1.strstart;
540 /* Amount of free space at the end of the window. */
[821]541
[1765]542 /* If the window is almost full and there is insufficient lookahead,
543 * move the upper half to the lower one to make room in the upper half.
544 */
545 if (more == (unsigned) -1) {
546 /* Very unlikely, but possible on 16 bit machine if strstart == 0
547 * and lookahead == 1 (input done one byte at time)
548 */
549 more--;
550 } else if (G1.strstart >= WSIZE + MAX_DIST) {
551 /* By the IN assertion, the window is not empty so we can't confuse
552 * more == 0 with more == 64K on a 16 bit machine.
553 */
554 Assert(WINDOW_SIZE == 2 * WSIZE, "no sliding with BIG_MEM");
[821]555
[1765]556 memcpy(G1.window, G1.window + WSIZE, WSIZE);
557 G1.match_start -= WSIZE;
558 G1.strstart -= WSIZE; /* we now have strstart >= MAX_DIST: */
[821]559
[1765]560 G1.block_start -= WSIZE;
[821]561
[1765]562 for (n = 0; n < HASH_SIZE; n++) {
563 m = head[n];
564 head[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0);
565 }
566 for (n = 0; n < WSIZE; n++) {
567 m = G1.prev[n];
568 G1.prev[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0);
569 /* If n is not on any hash chain, prev[n] is garbage but
570 * its value will never be used.
571 */
572 }
573 more += WSIZE;
[821]574 }
[1765]575 /* At this point, more >= 2 */
576 if (!G1.eofile) {
577 n = file_read(G1.window + G1.strstart + G1.lookahead, more);
578 if (n == 0 || n == (unsigned) -1) {
579 G1.eofile = 1;
580 } else {
581 G1.lookahead += n;
582 }
583 }
[821]584}
585
[1765]586
[821]587/* ===========================================================================
588 * Set match_start to the longest match starting at the given string and
589 * return its length. Matches shorter or equal to prev_length are discarded,
590 * in which case the result is equal to prev_length and match_start is
591 * garbage.
592 * IN assertions: cur_match is the head of the hash chain for the current
593 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
594 */
595
596/* For MSDOS, OS/2 and 386 Unix, an optimized version is in match.asm or
597 * match.s. The code is functionally equivalent, so you can use the C version
598 * if desired.
599 */
600static int longest_match(IPos cur_match)
601{
602 unsigned chain_length = max_chain_length; /* max hash chain length */
[1765]603 uch *scan = G1.window + G1.strstart; /* current string */
604 uch *match; /* matched string */
605 int len; /* length of current match */
606 int best_len = G1.prev_length; /* best match length so far */
607 IPos limit = G1.strstart > (IPos) MAX_DIST ? G1.strstart - (IPos) MAX_DIST : 0;
[821]608 /* Stop when cur_match becomes <= limit. To simplify the code,
609 * we prevent matches with the string of window index 0.
610 */
611
612/* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
613 * It is easy to get rid of this optimization if necessary.
614 */
615#if HASH_BITS < 8 || MAX_MATCH != 258
616# error Code too clever
617#endif
[1765]618 uch *strend = G1.window + G1.strstart + MAX_MATCH;
619 uch scan_end1 = scan[best_len - 1];
620 uch scan_end = scan[best_len];
[821]621
622 /* Do not waste too much time if we already have a good match: */
[1765]623 if (G1.prev_length >= good_match) {
[821]624 chain_length >>= 2;
625 }
[1765]626 Assert(G1.strstart <= WINDOW_SIZE - MIN_LOOKAHEAD, "insufficient lookahead");
[821]627
628 do {
[1765]629 Assert(cur_match < G1.strstart, "no future");
630 match = G1.window + cur_match;
[821]631
632 /* Skip to next match if the match length cannot increase
633 * or if the match length is less than 2:
634 */
635 if (match[best_len] != scan_end ||
636 match[best_len - 1] != scan_end1 ||
637 *match != *scan || *++match != scan[1])
638 continue;
639
640 /* The check at best_len-1 can be removed because it will be made
641 * again later. (This heuristic is not always a win.)
642 * It is not necessary to compare scan[2] and match[2] since they
643 * are always equal when the other bytes match, given that
644 * the hash keys are equal and that HASH_BITS >= 8.
645 */
646 scan += 2, match++;
647
648 /* We check for insufficient lookahead only every 8th comparison;
649 * the 256th check will be made at strstart+258.
650 */
651 do {
652 } while (*++scan == *++match && *++scan == *++match &&
653 *++scan == *++match && *++scan == *++match &&
654 *++scan == *++match && *++scan == *++match &&
655 *++scan == *++match && *++scan == *++match && scan < strend);
656
657 len = MAX_MATCH - (int) (strend - scan);
658 scan = strend - MAX_MATCH;
659
660 if (len > best_len) {
[1765]661 G1.match_start = cur_match;
[821]662 best_len = len;
663 if (len >= nice_match)
664 break;
665 scan_end1 = scan[best_len - 1];
666 scan_end = scan[best_len];
667 }
[1765]668 } while ((cur_match = G1.prev[cur_match & WMASK]) > limit
[821]669 && --chain_length != 0);
670
671 return best_len;
672}
673
[1765]674
[821]675#ifdef DEBUG
676/* ===========================================================================
677 * Check that the match at match_start is indeed a match.
678 */
679static void check_match(IPos start, IPos match, int length)
680{
681 /* check that the match is indeed a match */
[1765]682 if (memcmp(G1.window + match, G1.window + start, length) != 0) {
[821]683 bb_error_msg(" start %d, match %d, length %d", start, match, length);
684 bb_error_msg("invalid match");
685 }
686 if (verbose > 1) {
687 bb_error_msg("\\[%d,%d]", start - match, length);
688 do {
[1765]689 putc(G1.window[start++], stderr);
[821]690 } while (--length != 0);
691 }
692}
693#else
[1765]694# define check_match(start, match, length) ((void)0)
[821]695#endif
696
697
698/* trees.c -- output deflated data using Huffman coding
699 * Copyright (C) 1992-1993 Jean-loup Gailly
700 * This is free software; you can redistribute it and/or modify it under the
701 * terms of the GNU General Public License, see the file COPYING.
702 */
703
[1765]704/* PURPOSE
[821]705 * Encode various sets of source values using variable-length
706 * binary code trees.
707 *
708 * DISCUSSION
709 * The PKZIP "deflation" process uses several Huffman trees. The more
710 * common source values are represented by shorter bit sequences.
711 *
712 * Each code tree is stored in the ZIP file in a compressed form
713 * which is itself a Huffman encoding of the lengths of
714 * all the code strings (in ascending order by source values).
715 * The actual code strings are reconstructed from the lengths in
716 * the UNZIP process, as described in the "application note"
717 * (APPNOTE.TXT) distributed as part of PKWARE's PKZIP program.
718 *
719 * REFERENCES
720 * Lynch, Thomas J.
721 * Data Compression: Techniques and Applications, pp. 53-55.
722 * Lifetime Learning Publications, 1985. ISBN 0-534-03418-7.
723 *
724 * Storer, James A.
725 * Data Compression: Methods and Theory, pp. 49-50.
726 * Computer Science Press, 1988. ISBN 0-7167-8156-5.
727 *
728 * Sedgewick, R.
729 * Algorithms, p290.
730 * Addison-Wesley, 1983. ISBN 0-201-06672-6.
731 *
732 * INTERFACE
[1765]733 * void ct_init()
734 * Allocate the match buffer, initialize the various tables [and save
[821]735 * the location of the internal file attribute (ascii/binary) and
[1765]736 * method (DEFLATE/STORE) -- deleted in bbox]
[821]737 *
[1765]738 * void ct_tally(int dist, int lc);
[821]739 * Save the match info and tally the frequency counts.
740 *
[1765]741 * ulg flush_block(char *buf, ulg stored_len, int eof)
[821]742 * Determine the best encoding for the current block: dynamic trees,
743 * static trees or store, and output the encoded block to the zip
744 * file. Returns the total compressed length for the file so far.
745 */
746
747#define MAX_BITS 15
748/* All codes must not exceed MAX_BITS bits */
749
750#define MAX_BL_BITS 7
751/* Bit length codes must not exceed MAX_BL_BITS bits */
752
753#define LENGTH_CODES 29
754/* number of length codes, not counting the special END_BLOCK code */
755
756#define LITERALS 256
757/* number of literal bytes 0..255 */
758
759#define END_BLOCK 256
760/* end of block literal code */
761
762#define L_CODES (LITERALS+1+LENGTH_CODES)
763/* number of Literal or Length codes, including the END_BLOCK code */
764
765#define D_CODES 30
766/* number of distance codes */
767
768#define BL_CODES 19
769/* number of codes used to transfer the bit lengths */
770
771/* extra bits for each length code */
[1765]772static const uint8_t extra_lbits[LENGTH_CODES] ALIGN1 = {
773 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4,
[821]774 4, 4, 5, 5, 5, 5, 0
775};
776
777/* extra bits for each distance code */
[1765]778static const uint8_t extra_dbits[D_CODES] ALIGN1 = {
779 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9,
[821]780 10, 10, 11, 11, 12, 12, 13, 13
781};
782
783/* extra bits for each bit length code */
[1765]784static const uint8_t extra_blbits[BL_CODES] ALIGN1 = {
785 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7 };
[821]786
[1765]787/* number of codes at each bit length for an optimal tree */
788static const uint8_t bl_order[BL_CODES] ALIGN1 = {
789 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
790
[821]791#define STORED_BLOCK 0
792#define STATIC_TREES 1
793#define DYN_TREES 2
794/* The three kinds of block type */
795
796#ifndef LIT_BUFSIZE
797# ifdef SMALL_MEM
798# define LIT_BUFSIZE 0x2000
799# else
800# ifdef MEDIUM_MEM
801# define LIT_BUFSIZE 0x4000
802# else
803# define LIT_BUFSIZE 0x8000
804# endif
805# endif
806#endif
807#ifndef DIST_BUFSIZE
808# define DIST_BUFSIZE LIT_BUFSIZE
809#endif
810/* Sizes of match buffers for literals/lengths and distances. There are
811 * 4 reasons for limiting LIT_BUFSIZE to 64K:
812 * - frequencies can be kept in 16 bit counters
813 * - if compression is not successful for the first block, all input data is
814 * still in the window so we can still emit a stored block even when input
815 * comes from standard input. (This can also be done for all blocks if
816 * LIT_BUFSIZE is not greater than 32K.)
817 * - if compression is not successful for a file smaller than 64K, we can
818 * even emit a stored file instead of a stored block (saving 5 bytes).
819 * - creating new Huffman trees less frequently may not provide fast
820 * adaptation to changes in the input data statistics. (Take for
821 * example a binary file with poorly compressible code followed by
822 * a highly compressible string table.) Smaller buffer sizes give
823 * fast adaptation but have of course the overhead of transmitting trees
824 * more frequently.
825 * - I can't count above 4
826 * The current code is general and allows DIST_BUFSIZE < LIT_BUFSIZE (to save
827 * memory at the expense of compression). Some optimizations would be possible
828 * if we rely on DIST_BUFSIZE == LIT_BUFSIZE.
829 */
830#define REP_3_6 16
831/* repeat previous bit length 3-6 times (2 bits of repeat count) */
832#define REPZ_3_10 17
833/* repeat a zero length 3-10 times (3 bits of repeat count) */
834#define REPZ_11_138 18
835/* repeat a zero length 11-138 times (7 bits of repeat count) */
836
837/* ===========================================================================
[1765]838*/
[821]839/* Data structure describing a single value and its code string. */
840typedef struct ct_data {
841 union {
842 ush freq; /* frequency count */
843 ush code; /* bit string */
844 } fc;
845 union {
846 ush dad; /* father node in Huffman tree */
847 ush len; /* length of bit string */
848 } dl;
849} ct_data;
850
851#define Freq fc.freq
852#define Code fc.code
853#define Dad dl.dad
854#define Len dl.len
855
[1765]856#define HEAP_SIZE (2*L_CODES + 1)
[821]857/* maximum heap size */
858
[1765]859typedef struct tree_desc {
860 ct_data *dyn_tree; /* the dynamic tree */
861 ct_data *static_tree; /* corresponding static tree or NULL */
862 const uint8_t *extra_bits; /* extra bits for each code or NULL */
863 int extra_base; /* base index for extra_bits */
864 int elems; /* max number of elements in the tree */
865 int max_length; /* max bit length for the codes */
866 int max_code; /* largest code with non zero frequency */
867} tree_desc;
[821]868
[1765]869struct globals2 {
[821]870
[1765]871 ush heap[HEAP_SIZE]; /* heap used to build the Huffman trees */
872 int heap_len; /* number of elements in the heap */
873 int heap_max; /* element of largest frequency */
874
875/* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
876 * The same heap array is used to build all trees.
877 */
878
879 ct_data dyn_ltree[HEAP_SIZE]; /* literal and length tree */
880 ct_data dyn_dtree[2 * D_CODES + 1]; /* distance tree */
881
882 ct_data static_ltree[L_CODES + 2];
883
[821]884/* The static literal tree. Since the bit lengths are imposed, there is no
885 * need for the L_CODES extra codes used during heap construction. However
886 * The codes 286 and 287 are needed to build a canonical tree (see ct_init
887 * below).
888 */
889
[1765]890 ct_data static_dtree[D_CODES];
[821]891
892/* The static distance tree. (Actually a trivial tree since all codes use
893 * 5 bits.)
894 */
895
[1765]896 ct_data bl_tree[2 * BL_CODES + 1];
[821]897
898/* Huffman tree for the bit lengths */
899
[1765]900 tree_desc l_desc;
901 tree_desc d_desc;
902 tree_desc bl_desc;
[821]903
[1765]904 ush bl_count[MAX_BITS + 1];
[821]905
906/* The lengths of the bit length codes are sent in order of decreasing
907 * probability, to avoid transmitting the lengths for unused bit length codes.
908 */
909
[1765]910 uch depth[2 * L_CODES + 1];
[821]911
912/* Depth of each subtree used as tie breaker for trees of equal frequency */
913
[1765]914 uch length_code[MAX_MATCH - MIN_MATCH + 1];
[821]915
916/* length code for each normalized match length (0 == MIN_MATCH) */
917
[1765]918 uch dist_code[512];
[821]919
920/* distance codes. The first 256 values correspond to the distances
921 * 3 .. 258, the last 256 values correspond to the top 8 bits of
922 * the 15 bit distances.
923 */
924
[1765]925 int base_length[LENGTH_CODES];
[821]926
927/* First normalized length for each code (0 = MIN_MATCH) */
928
[1765]929 int base_dist[D_CODES];
[821]930
931/* First normalized distance for each code (0 = distance of 1) */
932
[1765]933 uch flag_buf[LIT_BUFSIZE / 8];
[821]934
935/* flag_buf is a bit array distinguishing literals from lengths in
936 * l_buf, thus indicating the presence or absence of a distance.
937 */
938
[1765]939 unsigned last_lit; /* running index in l_buf */
940 unsigned last_dist; /* running index in d_buf */
941 unsigned last_flags; /* running index in flag_buf */
942 uch flags; /* current flags not yet saved in flag_buf */
943 uch flag_bit; /* current bit used in flags */
[821]944
945/* bits are filled in flags starting at bit 0 (least significant).
946 * Note: these flags are overkill in the current code since we don't
947 * take advantage of DIST_BUFSIZE == LIT_BUFSIZE.
948 */
949
[1765]950 ulg opt_len; /* bit length of current block with optimal trees */
951 ulg static_len; /* bit length of current block with static trees */
[821]952
[1765]953 ulg compressed_len; /* total bit length of compressed file */
954};
[821]955
[1765]956#define G2ptr ((struct globals2*)(ptr_to_globals))
957#define G2 (*G2ptr)
[821]958
959
960/* ===========================================================================
961 */
962static void gen_codes(ct_data * tree, int max_code);
963static void build_tree(tree_desc * desc);
964static void scan_tree(ct_data * tree, int max_code);
965static void send_tree(ct_data * tree, int max_code);
966static int build_bl_tree(void);
967static void send_all_trees(int lcodes, int dcodes, int blcodes);
968static void compress_block(ct_data * ltree, ct_data * dtree);
969
970
971#ifndef DEBUG
[1765]972/* Send a code of the given tree. c and tree must not have side effects */
973# define SEND_CODE(c, tree) send_bits(tree[c].Code, tree[c].Len)
974#else
975# define SEND_CODE(c, tree) \
976{ \
977 if (verbose > 1) bb_error_msg("\ncd %3d ",(c)); \
978 send_bits(tree[c].Code, tree[c].Len); \
979}
[821]980#endif
981
[1765]982#define D_CODE(dist) \
983 ((dist) < 256 ? G2.dist_code[dist] : G2.dist_code[256 + ((dist)>>7)])
[821]984/* Mapping from a distance to a distance code. dist is the distance - 1 and
985 * must not have side effects. dist_code[256] and dist_code[257] are never
986 * used.
[1765]987 * The arguments must not have side effects.
[821]988 */
989
990
991/* ===========================================================================
992 * Initialize a new block.
993 */
994static void init_block(void)
995{
[1765]996 int n; /* iterates over tree elements */
[821]997
998 /* Initialize the trees. */
999 for (n = 0; n < L_CODES; n++)
[1765]1000 G2.dyn_ltree[n].Freq = 0;
[821]1001 for (n = 0; n < D_CODES; n++)
[1765]1002 G2.dyn_dtree[n].Freq = 0;
[821]1003 for (n = 0; n < BL_CODES; n++)
[1765]1004 G2.bl_tree[n].Freq = 0;
[821]1005
[1765]1006 G2.dyn_ltree[END_BLOCK].Freq = 1;
1007 G2.opt_len = G2.static_len = 0;
1008 G2.last_lit = G2.last_dist = G2.last_flags = 0;
1009 G2.flags = 0;
1010 G2.flag_bit = 1;
[821]1011}
1012
1013
1014/* ===========================================================================
1015 * Restore the heap property by moving down the tree starting at node k,
1016 * exchanging a node with the smallest of its two sons if necessary, stopping
1017 * when the heap property is re-established (each father smaller than its
1018 * two sons).
1019 */
[1765]1020
1021/* Compares to subtrees, using the tree depth as tie breaker when
1022 * the subtrees have equal frequency. This minimizes the worst case length. */
1023#define SMALLER(tree, n, m) \
1024 (tree[n].Freq < tree[m].Freq \
1025 || (tree[n].Freq == tree[m].Freq && G2.depth[n] <= G2.depth[m]))
1026
[821]1027static void pqdownheap(ct_data * tree, int k)
1028{
[1765]1029 int v = G2.heap[k];
[821]1030 int j = k << 1; /* left son of k */
1031
[1765]1032 while (j <= G2.heap_len) {
[821]1033 /* Set j to the smallest of the two sons: */
[1765]1034 if (j < G2.heap_len && SMALLER(tree, G2.heap[j + 1], G2.heap[j]))
[821]1035 j++;
1036
1037 /* Exit if v is smaller than both sons */
[1765]1038 if (SMALLER(tree, v, G2.heap[j]))
[821]1039 break;
1040
1041 /* Exchange v with the smallest son */
[1765]1042 G2.heap[k] = G2.heap[j];
[821]1043 k = j;
1044
1045 /* And continue down the tree, setting j to the left son of k */
1046 j <<= 1;
1047 }
[1765]1048 G2.heap[k] = v;
[821]1049}
1050
[1765]1051
[821]1052/* ===========================================================================
1053 * Compute the optimal bit lengths for a tree and update the total bit length
1054 * for the current block.
1055 * IN assertion: the fields freq and dad are set, heap[heap_max] and
1056 * above are the tree nodes sorted by increasing frequency.
1057 * OUT assertions: the field len is set to the optimal bit length, the
1058 * array bl_count contains the frequencies for each bit length.
1059 * The length opt_len is updated; static_len is also updated if stree is
1060 * not null.
1061 */
1062static void gen_bitlen(tree_desc * desc)
1063{
1064 ct_data *tree = desc->dyn_tree;
[1765]1065 const uint8_t *extra = desc->extra_bits;
[821]1066 int base = desc->extra_base;
1067 int max_code = desc->max_code;
1068 int max_length = desc->max_length;
1069 ct_data *stree = desc->static_tree;
1070 int h; /* heap index */
1071 int n, m; /* iterate over the tree elements */
1072 int bits; /* bit length */
1073 int xbits; /* extra bits */
1074 ush f; /* frequency */
1075 int overflow = 0; /* number of elements with bit length too large */
1076
1077 for (bits = 0; bits <= MAX_BITS; bits++)
[1765]1078 G2.bl_count[bits] = 0;
[821]1079
1080 /* In a first pass, compute the optimal bit lengths (which may
1081 * overflow in the case of the bit length tree).
1082 */
[1765]1083 tree[G2.heap[G2.heap_max]].Len = 0; /* root of the heap */
[821]1084
[1765]1085 for (h = G2.heap_max + 1; h < HEAP_SIZE; h++) {
1086 n = G2.heap[h];
[821]1087 bits = tree[tree[n].Dad].Len + 1;
[1765]1088 if (bits > max_length) {
1089 bits = max_length;
1090 overflow++;
1091 }
[821]1092 tree[n].Len = (ush) bits;
1093 /* We overwrite tree[n].Dad which is no longer needed */
1094
1095 if (n > max_code)
1096 continue; /* not a leaf node */
1097
[1765]1098 G2.bl_count[bits]++;
[821]1099 xbits = 0;
1100 if (n >= base)
1101 xbits = extra[n - base];
1102 f = tree[n].Freq;
[1765]1103 G2.opt_len += (ulg) f *(bits + xbits);
[821]1104
1105 if (stree)
[1765]1106 G2.static_len += (ulg) f * (stree[n].Len + xbits);
[821]1107 }
1108 if (overflow == 0)
1109 return;
1110
1111 Trace((stderr, "\nbit length overflow\n"));
1112 /* This happens for example on obj2 and pic of the Calgary corpus */
1113
1114 /* Find the first bit length which could increase: */
1115 do {
1116 bits = max_length - 1;
[1765]1117 while (G2.bl_count[bits] == 0)
[821]1118 bits--;
[1765]1119 G2.bl_count[bits]--; /* move one leaf down the tree */
1120 G2.bl_count[bits + 1] += 2; /* move one overflow item as its brother */
1121 G2.bl_count[max_length]--;
[821]1122 /* The brother of the overflow item also moves one step up,
1123 * but this does not affect bl_count[max_length]
1124 */
1125 overflow -= 2;
1126 } while (overflow > 0);
1127
1128 /* Now recompute all bit lengths, scanning in increasing frequency.
1129 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
1130 * lengths instead of fixing only the wrong ones. This idea is taken
1131 * from 'ar' written by Haruhiko Okumura.)
1132 */
1133 for (bits = max_length; bits != 0; bits--) {
[1765]1134 n = G2.bl_count[bits];
[821]1135 while (n != 0) {
[1765]1136 m = G2.heap[--h];
[821]1137 if (m > max_code)
1138 continue;
1139 if (tree[m].Len != (unsigned) bits) {
[1765]1140 Trace((stderr, "code %d bits %d->%d\n", m, tree[m].Len, bits));
1141 G2.opt_len += ((int32_t) bits - tree[m].Len) * tree[m].Freq;
1142 tree[m].Len = bits;
[821]1143 }
1144 n--;
1145 }
1146 }
1147}
1148
[1765]1149
[821]1150/* ===========================================================================
1151 * Generate the codes for a given tree and bit counts (which need not be
1152 * optimal).
1153 * IN assertion: the array bl_count contains the bit length statistics for
1154 * the given tree and the field len is set for all tree elements.
1155 * OUT assertion: the field code is set for all tree elements of non
1156 * zero code length.
1157 */
1158static void gen_codes(ct_data * tree, int max_code)
1159{
1160 ush next_code[MAX_BITS + 1]; /* next code value for each bit length */
1161 ush code = 0; /* running code value */
1162 int bits; /* bit index */
1163 int n; /* code index */
1164
1165 /* The distribution counts are first used to generate the code values
1166 * without bit reversal.
1167 */
1168 for (bits = 1; bits <= MAX_BITS; bits++) {
[1765]1169 next_code[bits] = code = (code + G2.bl_count[bits - 1]) << 1;
[821]1170 }
1171 /* Check that the bit counts in bl_count are consistent. The last code
1172 * must be all ones.
1173 */
[1765]1174 Assert(code + G2.bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
[821]1175 "inconsistent bit counts");
1176 Tracev((stderr, "\ngen_codes: max_code %d ", max_code));
1177
1178 for (n = 0; n <= max_code; n++) {
1179 int len = tree[n].Len;
1180
1181 if (len == 0)
1182 continue;
1183 /* Now reverse the bits */
1184 tree[n].Code = bi_reverse(next_code[len]++, len);
1185
[1765]1186 Tracec(tree != G2.static_ltree,
[821]1187 (stderr, "\nn %3d %c l %2d c %4x (%x) ", n,
1188 (isgraph(n) ? n : ' '), len, tree[n].Code,
1189 next_code[len] - 1));
1190 }
1191}
1192
[1765]1193
[821]1194/* ===========================================================================
1195 * Construct one Huffman tree and assigns the code bit strings and lengths.
1196 * Update the total bit length for the current block.
1197 * IN assertion: the field freq is set for all tree elements.
1198 * OUT assertions: the fields len and code are set to the optimal bit length
1199 * and corresponding code. The length opt_len is updated; static_len is
1200 * also updated if stree is not null. The field max_code is set.
1201 */
[1765]1202
1203/* Remove the smallest element from the heap and recreate the heap with
1204 * one less element. Updates heap and heap_len. */
1205
1206#define SMALLEST 1
1207/* Index within the heap array of least frequent node in the Huffman tree */
1208
1209#define PQREMOVE(tree, top) \
1210do { \
1211 top = G2.heap[SMALLEST]; \
1212 G2.heap[SMALLEST] = G2.heap[G2.heap_len--]; \
1213 pqdownheap(tree, SMALLEST); \
1214} while (0)
1215
[821]1216static void build_tree(tree_desc * desc)
1217{
1218 ct_data *tree = desc->dyn_tree;
1219 ct_data *stree = desc->static_tree;
1220 int elems = desc->elems;
1221 int n, m; /* iterate over heap elements */
1222 int max_code = -1; /* largest code with non zero frequency */
1223 int node = elems; /* next internal node of the tree */
1224
1225 /* Construct the initial heap, with least frequent element in
1226 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
1227 * heap[0] is not used.
1228 */
[1765]1229 G2.heap_len = 0;
1230 G2.heap_max = HEAP_SIZE;
[821]1231
1232 for (n = 0; n < elems; n++) {
1233 if (tree[n].Freq != 0) {
[1765]1234 G2.heap[++G2.heap_len] = max_code = n;
1235 G2.depth[n] = 0;
[821]1236 } else {
1237 tree[n].Len = 0;
1238 }
1239 }
1240
1241 /* The pkzip format requires that at least one distance code exists,
1242 * and that at least one bit should be sent even if there is only one
1243 * possible code. So to avoid special checks later on we force at least
1244 * two codes of non zero frequency.
1245 */
[1765]1246 while (G2.heap_len < 2) {
1247 int new = G2.heap[++G2.heap_len] = (max_code < 2 ? ++max_code : 0);
[821]1248
1249 tree[new].Freq = 1;
[1765]1250 G2.depth[new] = 0;
1251 G2.opt_len--;
[821]1252 if (stree)
[1765]1253 G2.static_len -= stree[new].Len;
[821]1254 /* new is 0 or 1 so it does not have extra bits */
1255 }
1256 desc->max_code = max_code;
1257
1258 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
1259 * establish sub-heaps of increasing lengths:
1260 */
[1765]1261 for (n = G2.heap_len / 2; n >= 1; n--)
[821]1262 pqdownheap(tree, n);
1263
1264 /* Construct the Huffman tree by repeatedly combining the least two
1265 * frequent nodes.
1266 */
1267 do {
[1765]1268 PQREMOVE(tree, n); /* n = node of least frequency */
1269 m = G2.heap[SMALLEST]; /* m = node of next least frequency */
[821]1270
[1765]1271 G2.heap[--G2.heap_max] = n; /* keep the nodes sorted by frequency */
1272 G2.heap[--G2.heap_max] = m;
[821]1273
1274 /* Create a new node father of n and m */
1275 tree[node].Freq = tree[n].Freq + tree[m].Freq;
[1765]1276 G2.depth[node] = MAX(G2.depth[n], G2.depth[m]) + 1;
[821]1277 tree[n].Dad = tree[m].Dad = (ush) node;
1278#ifdef DUMP_BL_TREE
[1765]1279 if (tree == G2.bl_tree) {
[821]1280 bb_error_msg("\nnode %d(%d), sons %d(%d) %d(%d)",
1281 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
1282 }
1283#endif
1284 /* and insert the new node in the heap */
[1765]1285 G2.heap[SMALLEST] = node++;
[821]1286 pqdownheap(tree, SMALLEST);
1287
[1765]1288 } while (G2.heap_len >= 2);
[821]1289
[1765]1290 G2.heap[--G2.heap_max] = G2.heap[SMALLEST];
[821]1291
1292 /* At this point, the fields freq and dad are set. We can now
1293 * generate the bit lengths.
1294 */
1295 gen_bitlen((tree_desc *) desc);
1296
1297 /* The field len is now set, we can generate the bit codes */
1298 gen_codes((ct_data *) tree, max_code);
1299}
1300
[1765]1301
[821]1302/* ===========================================================================
1303 * Scan a literal or distance tree to determine the frequencies of the codes
1304 * in the bit length tree. Updates opt_len to take into account the repeat
1305 * counts. (The contribution of the bit length codes will be added later
1306 * during the construction of bl_tree.)
1307 */
1308static void scan_tree(ct_data * tree, int max_code)
1309{
1310 int n; /* iterates over all tree elements */
1311 int prevlen = -1; /* last emitted length */
1312 int curlen; /* length of current code */
1313 int nextlen = tree[0].Len; /* length of next code */
1314 int count = 0; /* repeat count of the current code */
1315 int max_count = 7; /* max repeat count */
1316 int min_count = 4; /* min repeat count */
1317
[1765]1318 if (nextlen == 0) {
1319 max_count = 138;
1320 min_count = 3;
1321 }
1322 tree[max_code + 1].Len = 0xffff; /* guard */
[821]1323
1324 for (n = 0; n <= max_code; n++) {
1325 curlen = nextlen;
1326 nextlen = tree[n + 1].Len;
[1765]1327 if (++count < max_count && curlen == nextlen)
[821]1328 continue;
[1765]1329
1330 if (count < min_count) {
1331 G2.bl_tree[curlen].Freq += count;
[821]1332 } else if (curlen != 0) {
1333 if (curlen != prevlen)
[1765]1334 G2.bl_tree[curlen].Freq++;
1335 G2.bl_tree[REP_3_6].Freq++;
[821]1336 } else if (count <= 10) {
[1765]1337 G2.bl_tree[REPZ_3_10].Freq++;
[821]1338 } else {
[1765]1339 G2.bl_tree[REPZ_11_138].Freq++;
[821]1340 }
1341 count = 0;
1342 prevlen = curlen;
[1765]1343
1344 max_count = 7;
1345 min_count = 4;
[821]1346 if (nextlen == 0) {
[1765]1347 max_count = 138;
1348 min_count = 3;
[821]1349 } else if (curlen == nextlen) {
[1765]1350 max_count = 6;
1351 min_count = 3;
[821]1352 }
1353 }
1354}
1355
[1765]1356
[821]1357/* ===========================================================================
1358 * Send a literal or distance tree in compressed form, using the codes in
1359 * bl_tree.
1360 */
1361static void send_tree(ct_data * tree, int max_code)
1362{
1363 int n; /* iterates over all tree elements */
1364 int prevlen = -1; /* last emitted length */
1365 int curlen; /* length of current code */
1366 int nextlen = tree[0].Len; /* length of next code */
1367 int count = 0; /* repeat count of the current code */
1368 int max_count = 7; /* max repeat count */
1369 int min_count = 4; /* min repeat count */
1370
1371/* tree[max_code+1].Len = -1; *//* guard already set */
1372 if (nextlen == 0)
1373 max_count = 138, min_count = 3;
1374
1375 for (n = 0; n <= max_code; n++) {
1376 curlen = nextlen;
1377 nextlen = tree[n + 1].Len;
1378 if (++count < max_count && curlen == nextlen) {
1379 continue;
1380 } else if (count < min_count) {
1381 do {
[1765]1382 SEND_CODE(curlen, G2.bl_tree);
1383 } while (--count);
[821]1384 } else if (curlen != 0) {
1385 if (curlen != prevlen) {
[1765]1386 SEND_CODE(curlen, G2.bl_tree);
[821]1387 count--;
1388 }
1389 Assert(count >= 3 && count <= 6, " 3_6?");
[1765]1390 SEND_CODE(REP_3_6, G2.bl_tree);
[821]1391 send_bits(count - 3, 2);
1392 } else if (count <= 10) {
[1765]1393 SEND_CODE(REPZ_3_10, G2.bl_tree);
[821]1394 send_bits(count - 3, 3);
1395 } else {
[1765]1396 SEND_CODE(REPZ_11_138, G2.bl_tree);
[821]1397 send_bits(count - 11, 7);
1398 }
1399 count = 0;
1400 prevlen = curlen;
1401 if (nextlen == 0) {
[1765]1402 max_count = 138;
1403 min_count = 3;
[821]1404 } else if (curlen == nextlen) {
[1765]1405 max_count = 6;
1406 min_count = 3;
[821]1407 } else {
[1765]1408 max_count = 7;
1409 min_count = 4;
[821]1410 }
1411 }
1412}
1413
[1765]1414
[821]1415/* ===========================================================================
1416 * Construct the Huffman tree for the bit lengths and return the index in
1417 * bl_order of the last bit length code to send.
1418 */
1419static int build_bl_tree(void)
1420{
1421 int max_blindex; /* index of last bit length code of non zero freq */
1422
1423 /* Determine the bit length frequencies for literal and distance trees */
[1765]1424 scan_tree(G2.dyn_ltree, G2.l_desc.max_code);
1425 scan_tree(G2.dyn_dtree, G2.d_desc.max_code);
[821]1426
1427 /* Build the bit length tree: */
[1765]1428 build_tree(&G2.bl_desc);
[821]1429 /* opt_len now includes the length of the tree representations, except
1430 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
1431 */
1432
1433 /* Determine the number of bit length codes to send. The pkzip format
1434 * requires that at least 4 bit length codes be sent. (appnote.txt says
1435 * 3 but the actual value used is 4.)
1436 */
1437 for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) {
[1765]1438 if (G2.bl_tree[bl_order[max_blindex]].Len != 0)
[821]1439 break;
1440 }
1441 /* Update opt_len to include the bit length tree and counts */
[1765]1442 G2.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4;
1443 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", G2.opt_len, G2.static_len));
[821]1444
1445 return max_blindex;
1446}
1447
[1765]1448
[821]1449/* ===========================================================================
1450 * Send the header for a block using dynamic Huffman trees: the counts, the
1451 * lengths of the bit length codes, the literal tree and the distance tree.
1452 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
1453 */
1454static void send_all_trees(int lcodes, int dcodes, int blcodes)
1455{
1456 int rank; /* index in bl_order */
1457
1458 Assert(lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
1459 Assert(lcodes <= L_CODES && dcodes <= D_CODES
1460 && blcodes <= BL_CODES, "too many codes");
1461 Tracev((stderr, "\nbl counts: "));
1462 send_bits(lcodes - 257, 5); /* not +255 as stated in appnote.txt */
1463 send_bits(dcodes - 1, 5);
1464 send_bits(blcodes - 4, 4); /* not -3 as stated in appnote.txt */
1465 for (rank = 0; rank < blcodes; rank++) {
1466 Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
[1765]1467 send_bits(G2.bl_tree[bl_order[rank]].Len, 3);
[821]1468 }
[1765]1469 Tracev((stderr, "\nbl tree: sent %ld", G1.bits_sent));
[821]1470
[1765]1471 send_tree((ct_data *) G2.dyn_ltree, lcodes - 1); /* send the literal tree */
1472 Tracev((stderr, "\nlit tree: sent %ld", G1.bits_sent));
[821]1473
[1765]1474 send_tree((ct_data *) G2.dyn_dtree, dcodes - 1); /* send the distance tree */
1475 Tracev((stderr, "\ndist tree: sent %ld", G1.bits_sent));
[821]1476}
1477
[1765]1478
[821]1479/* ===========================================================================
[1765]1480 * Save the match info and tally the frequency counts. Return true if
1481 * the current block must be flushed.
1482 */
1483static int ct_tally(int dist, int lc)
1484{
1485 G1.l_buf[G2.last_lit++] = lc;
1486 if (dist == 0) {
1487 /* lc is the unmatched char */
1488 G2.dyn_ltree[lc].Freq++;
1489 } else {
1490 /* Here, lc is the match length - MIN_MATCH */
1491 dist--; /* dist = match distance - 1 */
1492 Assert((ush) dist < (ush) MAX_DIST
1493 && (ush) lc <= (ush) (MAX_MATCH - MIN_MATCH)
1494 && (ush) D_CODE(dist) < (ush) D_CODES, "ct_tally: bad match"
1495 );
1496
1497 G2.dyn_ltree[G2.length_code[lc] + LITERALS + 1].Freq++;
1498 G2.dyn_dtree[D_CODE(dist)].Freq++;
1499
1500 G1.d_buf[G2.last_dist++] = dist;
1501 G2.flags |= G2.flag_bit;
1502 }
1503 G2.flag_bit <<= 1;
1504
1505 /* Output the flags if they fill a byte: */
1506 if ((G2.last_lit & 7) == 0) {
1507 G2.flag_buf[G2.last_flags++] = G2.flags;
1508 G2.flags = 0;
1509 G2.flag_bit = 1;
1510 }
1511 /* Try to guess if it is profitable to stop the current block here */
1512 if ((G2.last_lit & 0xfff) == 0) {
1513 /* Compute an upper bound for the compressed length */
1514 ulg out_length = G2.last_lit * 8L;
1515 ulg in_length = (ulg) G1.strstart - G1.block_start;
1516 int dcode;
1517
1518 for (dcode = 0; dcode < D_CODES; dcode++) {
1519 out_length += G2.dyn_dtree[dcode].Freq * (5L + extra_dbits[dcode]);
1520 }
1521 out_length >>= 3;
1522 Trace((stderr,
1523 "\nlast_lit %u, last_dist %u, in %ld, out ~%ld(%ld%%) ",
1524 G2.last_lit, G2.last_dist, in_length, out_length,
1525 100L - out_length * 100L / in_length));
1526 if (G2.last_dist < G2.last_lit / 2 && out_length < in_length / 2)
1527 return 1;
1528 }
1529 return (G2.last_lit == LIT_BUFSIZE - 1 || G2.last_dist == DIST_BUFSIZE);
1530 /* We avoid equality with LIT_BUFSIZE because of wraparound at 64K
1531 * on 16 bit machines and because stored blocks are restricted to
1532 * 64K-1 bytes.
1533 */
1534}
1535
1536/* ===========================================================================
1537 * Send the block data compressed using the given Huffman trees
1538 */
1539static void compress_block(ct_data * ltree, ct_data * dtree)
1540{
1541 unsigned dist; /* distance of matched string */
1542 int lc; /* match length or unmatched char (if dist == 0) */
1543 unsigned lx = 0; /* running index in l_buf */
1544 unsigned dx = 0; /* running index in d_buf */
1545 unsigned fx = 0; /* running index in flag_buf */
1546 uch flag = 0; /* current flags */
1547 unsigned code; /* the code to send */
1548 int extra; /* number of extra bits to send */
1549
1550 if (G2.last_lit != 0) do {
1551 if ((lx & 7) == 0)
1552 flag = G2.flag_buf[fx++];
1553 lc = G1.l_buf[lx++];
1554 if ((flag & 1) == 0) {
1555 SEND_CODE(lc, ltree); /* send a literal byte */
1556 Tracecv(isgraph(lc), (stderr, " '%c' ", lc));
1557 } else {
1558 /* Here, lc is the match length - MIN_MATCH */
1559 code = G2.length_code[lc];
1560 SEND_CODE(code + LITERALS + 1, ltree); /* send the length code */
1561 extra = extra_lbits[code];
1562 if (extra != 0) {
1563 lc -= G2.base_length[code];
1564 send_bits(lc, extra); /* send the extra length bits */
1565 }
1566 dist = G1.d_buf[dx++];
1567 /* Here, dist is the match distance - 1 */
1568 code = D_CODE(dist);
1569 Assert(code < D_CODES, "bad d_code");
1570
1571 SEND_CODE(code, dtree); /* send the distance code */
1572 extra = extra_dbits[code];
1573 if (extra != 0) {
1574 dist -= G2.base_dist[code];
1575 send_bits(dist, extra); /* send the extra distance bits */
1576 }
1577 } /* literal or match pair ? */
1578 flag >>= 1;
1579 } while (lx < G2.last_lit);
1580
1581 SEND_CODE(END_BLOCK, ltree);
1582}
1583
1584
1585/* ===========================================================================
[821]1586 * Determine the best encoding for the current block: dynamic trees, static
1587 * trees or store, and output the encoded block to the zip file. This function
1588 * returns the total compressed length for the file so far.
1589 */
1590static ulg flush_block(char *buf, ulg stored_len, int eof)
1591{
[1765]1592 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
1593 int max_blindex; /* index of last bit length code of non zero freq */
[821]1594
[1765]1595 G2.flag_buf[G2.last_flags] = G2.flags; /* Save the flags for the last 8 items */
[821]1596
1597 /* Construct the literal and distance trees */
[1765]1598 build_tree(&G2.l_desc);
1599 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", G2.opt_len, G2.static_len));
[821]1600
[1765]1601 build_tree(&G2.d_desc);
1602 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", G2.opt_len, G2.static_len));
[821]1603 /* At this point, opt_len and static_len are the total bit lengths of
1604 * the compressed block data, excluding the tree representations.
1605 */
1606
1607 /* Build the bit length tree for the above two trees, and get the index
1608 * in bl_order of the last bit length code to send.
1609 */
1610 max_blindex = build_bl_tree();
1611
1612 /* Determine the best encoding. Compute first the block length in bytes */
[1765]1613 opt_lenb = (G2.opt_len + 3 + 7) >> 3;
1614 static_lenb = (G2.static_len + 3 + 7) >> 3;
[821]1615
1616 Trace((stderr,
1617 "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u dist %u ",
[1765]1618 opt_lenb, G2.opt_len, static_lenb, G2.static_len, stored_len,
1619 G2.last_lit, G2.last_dist));
[821]1620
1621 if (static_lenb <= opt_lenb)
1622 opt_lenb = static_lenb;
1623
1624 /* If compression failed and this is the first and last block,
1625 * and if the zip file can be seeked (to rewrite the local header),
1626 * the whole file is transformed into a stored file:
1627 */
[1765]1628 if (stored_len <= opt_lenb && eof && G2.compressed_len == 0L && seekable()) {
[821]1629 /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */
[1765]1630 if (buf == NULL)
[821]1631 bb_error_msg("block vanished");
1632
1633 copy_block(buf, (unsigned) stored_len, 0); /* without header */
[1765]1634 G2.compressed_len = stored_len << 3;
[821]1635
[1765]1636 } else if (stored_len + 4 <= opt_lenb && buf != NULL) {
[821]1637 /* 4: two words for the lengths */
1638 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
1639 * Otherwise we can't have processed more than WSIZE input bytes since
1640 * the last block flush, because compression would have been
1641 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
1642 * transform a block into a stored block.
1643 */
1644 send_bits((STORED_BLOCK << 1) + eof, 3); /* send block type */
[1765]1645 G2.compressed_len = (G2.compressed_len + 3 + 7) & ~7L;
1646 G2.compressed_len += (stored_len + 4) << 3;
[821]1647
1648 copy_block(buf, (unsigned) stored_len, 1); /* with header */
1649
1650 } else if (static_lenb == opt_lenb) {
1651 send_bits((STATIC_TREES << 1) + eof, 3);
[1765]1652 compress_block((ct_data *) G2.static_ltree, (ct_data *) G2.static_dtree);
1653 G2.compressed_len += 3 + G2.static_len;
[821]1654 } else {
1655 send_bits((DYN_TREES << 1) + eof, 3);
[1765]1656 send_all_trees(G2.l_desc.max_code + 1, G2.d_desc.max_code + 1,
[821]1657 max_blindex + 1);
[1765]1658 compress_block((ct_data *) G2.dyn_ltree, (ct_data *) G2.dyn_dtree);
1659 G2.compressed_len += 3 + G2.opt_len;
[821]1660 }
[1765]1661 Assert(G2.compressed_len == G1.bits_sent, "bad compressed size");
[821]1662 init_block();
1663
1664 if (eof) {
1665 bi_windup();
[1765]1666 G2.compressed_len += 7; /* align on byte boundary */
[821]1667 }
[1765]1668 Tracev((stderr, "\ncomprlen %lu(%lu) ", G2.compressed_len >> 3,
1669 G2.compressed_len - 7 * eof));
[821]1670
[1765]1671 return G2.compressed_len >> 3;
[821]1672}
1673
[1765]1674
[821]1675/* ===========================================================================
[1765]1676 * Update a hash value with the given input byte
1677 * IN assertion: all calls to to UPDATE_HASH are made with consecutive
1678 * input characters, so that a running hash key can be computed from the
1679 * previous key instead of complete recalculation each time.
[821]1680 */
[1765]1681#define UPDATE_HASH(h, c) (h = (((h)<<H_SHIFT) ^ (c)) & HASH_MASK)
1682
1683
1684/* ===========================================================================
1685 * Same as above, but achieves better compression. We use a lazy
1686 * evaluation for matches: a match is finally adopted only if there is
1687 * no better match at the next window position.
1688 *
1689 * Processes a new input file and return its compressed length. Sets
1690 * the compressed length, crc, deflate flags and internal file
1691 * attributes.
1692 */
1693
1694/* Flush the current block, with given end-of-file flag.
1695 * IN assertion: strstart is set to the end of the current match. */
1696#define FLUSH_BLOCK(eof) \
1697 flush_block( \
1698 G1.block_start >= 0L \
1699 ? (char*)&G1.window[(unsigned)G1.block_start] \
1700 : (char*)NULL, \
1701 (ulg)G1.strstart - G1.block_start, \
1702 (eof) \
1703 )
1704
1705/* Insert string s in the dictionary and set match_head to the previous head
1706 * of the hash chain (the most recent string with same hash key). Return
1707 * the previous length of the hash chain.
1708 * IN assertion: all calls to to INSERT_STRING are made with consecutive
1709 * input characters and the first MIN_MATCH bytes of s are valid
1710 * (except for the last MIN_MATCH-1 bytes of the input file). */
1711#define INSERT_STRING(s, match_head) \
1712do { \
1713 UPDATE_HASH(G1.ins_h, G1.window[(s) + MIN_MATCH-1]); \
1714 G1.prev[(s) & WMASK] = match_head = head[G1.ins_h]; \
1715 head[G1.ins_h] = (s); \
1716} while (0)
1717
1718static ulg deflate(void)
[821]1719{
[1765]1720 IPos hash_head; /* head of hash chain */
1721 IPos prev_match; /* previous match */
1722 int flush; /* set if current block must be flushed */
1723 int match_available = 0; /* set if previous match exists */
1724 unsigned match_length = MIN_MATCH - 1; /* length of best match */
[821]1725
[1765]1726 /* Process the input block. */
1727 while (G1.lookahead != 0) {
1728 /* Insert the string window[strstart .. strstart+2] in the
1729 * dictionary, and set hash_head to the head of the hash chain:
1730 */
1731 INSERT_STRING(G1.strstart, hash_head);
[821]1732
[1765]1733 /* Find the longest match, discarding those <= prev_length.
1734 */
1735 G1.prev_length = match_length;
1736 prev_match = G1.match_start;
1737 match_length = MIN_MATCH - 1;
[821]1738
[1765]1739 if (hash_head != 0 && G1.prev_length < max_lazy_match
1740 && G1.strstart - hash_head <= MAX_DIST
1741 ) {
1742 /* To simplify the code, we prevent matches with the string
1743 * of window index 0 (in particular we have to avoid a match
1744 * of the string with itself at the start of the input file).
1745 */
1746 match_length = longest_match(hash_head);
1747 /* longest_match() sets match_start */
1748 if (match_length > G1.lookahead)
1749 match_length = G1.lookahead;
[821]1750
[1765]1751 /* Ignore a length 3 match if it is too distant: */
1752 if (match_length == MIN_MATCH && G1.strstart - G1.match_start > TOO_FAR) {
1753 /* If prev_match is also MIN_MATCH, G1.match_start is garbage
1754 * but we will ignore the current match anyway.
1755 */
1756 match_length--;
1757 }
[821]1758 }
[1765]1759 /* If there was a match at the previous step and the current
1760 * match is not better, output the previous match:
1761 */
1762 if (G1.prev_length >= MIN_MATCH && match_length <= G1.prev_length) {
1763 check_match(G1.strstart - 1, prev_match, G1.prev_length);
1764 flush = ct_tally(G1.strstart - 1 - prev_match, G1.prev_length - MIN_MATCH);
1765
1766 /* Insert in hash table all strings up to the end of the match.
1767 * strstart-1 and strstart are already inserted.
1768 */
1769 G1.lookahead -= G1.prev_length - 1;
1770 G1.prev_length -= 2;
1771 do {
1772 G1.strstart++;
1773 INSERT_STRING(G1.strstart, hash_head);
1774 /* strstart never exceeds WSIZE-MAX_MATCH, so there are
1775 * always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH
1776 * these bytes are garbage, but it does not matter since the
1777 * next lookahead bytes will always be emitted as literals.
1778 */
1779 } while (--G1.prev_length != 0);
1780 match_available = 0;
1781 match_length = MIN_MATCH - 1;
1782 G1.strstart++;
1783 if (flush) {
1784 FLUSH_BLOCK(0);
1785 G1.block_start = G1.strstart;
1786 }
1787 } else if (match_available) {
1788 /* If there was no match at the previous position, output a
1789 * single literal. If there was a match but the current match
1790 * is longer, truncate the previous match to a single literal.
1791 */
1792 Tracevv((stderr, "%c", G1.window[G1.strstart - 1]));
1793 if (ct_tally(0, G1.window[G1.strstart - 1])) {
1794 FLUSH_BLOCK(0);
1795 G1.block_start = G1.strstart;
1796 }
1797 G1.strstart++;
1798 G1.lookahead--;
1799 } else {
1800 /* There is no previous match to compare with, wait for
1801 * the next step to decide.
1802 */
1803 match_available = 1;
1804 G1.strstart++;
1805 G1.lookahead--;
1806 }
1807 Assert(G1.strstart <= G1.isize && lookahead <= G1.isize, "a bit too far");
1808
1809 /* Make sure that we always have enough lookahead, except
1810 * at the end of the input file. We need MAX_MATCH bytes
1811 * for the next match, plus MIN_MATCH bytes to insert the
1812 * string following the next match.
1813 */
1814 while (G1.lookahead < MIN_LOOKAHEAD && !G1.eofile)
1815 fill_window();
[821]1816 }
[1765]1817 if (match_available)
1818 ct_tally(0, G1.window[G1.strstart - 1]);
1819
1820 return FLUSH_BLOCK(1); /* eof */
[821]1821}
1822
[1765]1823
[821]1824/* ===========================================================================
[1765]1825 * Initialize the bit string routines.
[821]1826 */
[1765]1827static void bi_init(void)
[821]1828{
[1765]1829 G1.bi_buf = 0;
1830 G1.bi_valid = 0;
1831#ifdef DEBUG
1832 G1.bits_sent = 0L;
1833#endif
1834}
[821]1835
1836
[1765]1837/* ===========================================================================
1838 * Initialize the "longest match" routines for a new file
1839 */
1840static void lm_init(ush * flagsp)
1841{
1842 unsigned j;
[821]1843
[1765]1844 /* Initialize the hash table. */
1845 memset(head, 0, HASH_SIZE * sizeof(*head));
1846 /* prev will be initialized on the fly */
1847
1848 /* speed options for the general purpose bit flag */
1849 *flagsp |= 2; /* FAST 4, SLOW 2 */
1850 /* ??? reduce max_chain_length for binary files */
1851
1852 G1.strstart = 0;
1853 G1.block_start = 0L;
1854
1855 G1.lookahead = file_read(G1.window,
1856 sizeof(int) <= 2 ? (unsigned) WSIZE : 2 * WSIZE);
1857
1858 if (G1.lookahead == 0 || G1.lookahead == (unsigned) -1) {
1859 G1.eofile = 1;
1860 G1.lookahead = 0;
1861 return;
1862 }
1863 G1.eofile = 0;
1864 /* Make sure that we always have enough lookahead. This is important
1865 * if input comes from a device such as a tty.
1866 */
1867 while (G1.lookahead < MIN_LOOKAHEAD && !G1.eofile)
1868 fill_window();
1869
1870 G1.ins_h = 0;
1871 for (j = 0; j < MIN_MATCH - 1; j++)
1872 UPDATE_HASH(G1.ins_h, G1.window[j]);
1873 /* If lookahead < MIN_MATCH, ins_h is garbage, but this is
1874 * not important since only literal bytes will be emitted.
1875 */
[821]1876}
1877
[1765]1878
[821]1879/* ===========================================================================
[1765]1880 * Allocate the match buffer, initialize the various tables and save the
1881 * location of the internal file attribute (ascii/binary) and method
1882 * (DEFLATE/STORE).
1883 * One callsite in zip()
[821]1884 */
[1765]1885static void ct_init(void)
[821]1886{
[1765]1887 int n; /* iterates over tree elements */
1888 int length; /* length value */
1889 int code; /* code value */
1890 int dist; /* distance index */
[821]1891
[1765]1892 G2.compressed_len = 0L;
1893
1894#ifdef NOT_NEEDED
1895 if (G2.static_dtree[0].Len != 0)
1896 return; /* ct_init already called */
1897#endif
1898
1899 /* Initialize the mapping length (0..255) -> length code (0..28) */
1900 length = 0;
1901 for (code = 0; code < LENGTH_CODES - 1; code++) {
1902 G2.base_length[code] = length;
1903 for (n = 0; n < (1 << extra_lbits[code]); n++) {
1904 G2.length_code[length++] = code;
1905 }
[821]1906 }
[1765]1907 Assert(length == 256, "ct_init: length != 256");
1908 /* Note that the length 255 (match length 258) can be represented
1909 * in two different ways: code 284 + 5 bits or code 285, so we
1910 * overwrite length_code[255] to use the best encoding:
1911 */
1912 G2.length_code[length - 1] = code;
[821]1913
[1765]1914 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
1915 dist = 0;
1916 for (code = 0; code < 16; code++) {
1917 G2.base_dist[code] = dist;
1918 for (n = 0; n < (1 << extra_dbits[code]); n++) {
1919 G2.dist_code[dist++] = code;
1920 }
1921 }
1922 Assert(dist == 256, "ct_init: dist != 256");
1923 dist >>= 7; /* from now on, all distances are divided by 128 */
1924 for (; code < D_CODES; code++) {
1925 G2.base_dist[code] = dist << 7;
1926 for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) {
1927 G2.dist_code[256 + dist++] = code;
1928 }
1929 }
1930 Assert(dist == 256, "ct_init: 256+dist != 512");
[821]1931
[1765]1932 /* Construct the codes of the static literal tree */
1933 /* already zeroed - it's in bss
1934 for (n = 0; n <= MAX_BITS; n++)
1935 G2.bl_count[n] = 0; */
[821]1936
[1765]1937 n = 0;
1938 while (n <= 143) {
1939 G2.static_ltree[n++].Len = 8;
1940 G2.bl_count[8]++;
1941 }
1942 while (n <= 255) {
1943 G2.static_ltree[n++].Len = 9;
1944 G2.bl_count[9]++;
1945 }
1946 while (n <= 279) {
1947 G2.static_ltree[n++].Len = 7;
1948 G2.bl_count[7]++;
1949 }
1950 while (n <= 287) {
1951 G2.static_ltree[n++].Len = 8;
1952 G2.bl_count[8]++;
1953 }
1954 /* Codes 286 and 287 do not exist, but we must include them in the
1955 * tree construction to get a canonical Huffman tree (longest code
1956 * all ones)
1957 */
1958 gen_codes((ct_data *) G2.static_ltree, L_CODES + 1);
[821]1959
[1765]1960 /* The static distance tree is trivial: */
1961 for (n = 0; n < D_CODES; n++) {
1962 G2.static_dtree[n].Len = 5;
1963 G2.static_dtree[n].Code = bi_reverse(n, 5);
1964 }
1965
1966 /* Initialize the first block of the first file: */
1967 init_block();
[821]1968}
1969
1970
1971/* ===========================================================================
1972 * Deflate in to out.
1973 * IN assertions: the input and output buffers are cleared.
1974 */
[1765]1975
1976static void zip(ulg time_stamp)
[821]1977{
[1765]1978 ush deflate_flags = 0; /* pkzip -es, -en or -ex equivalent */
[821]1979
[1765]1980 G1.outcnt = 0;
[821]1981
1982 /* Write the header to the gzip file. See algorithm.doc for the format */
[1765]1983 /* magic header for gzip files: 1F 8B */
1984 /* compression method: 8 (DEFLATED) */
1985 /* general flags: 0 */
1986 put_32bit(0x00088b1f);
1987 put_32bit(time_stamp);
[821]1988
1989 /* Write deflated file to zip file */
[1765]1990 G1.crc = ~0;
[821]1991
[1765]1992 bi_init();
1993 ct_init();
[821]1994 lm_init(&deflate_flags);
1995
[1765]1996 put_8bit(deflate_flags); /* extra flags */
1997 put_8bit(3); /* OS identifier = 3 (Unix) */
[821]1998
[1765]1999 deflate();
[821]2000
2001 /* Write the crc and uncompressed size */
[1765]2002 put_32bit(~G1.crc);
2003 put_32bit(G1.isize);
[821]2004
2005 flush_outbuf();
2006}
2007
2008
[1765]2009/* ======================================================================== */
2010static
2011char* make_new_name_gzip(char *filename)
[821]2012{
[1765]2013 return xasprintf("%s.gz", filename);
2014}
[821]2015
[1765]2016static
2017USE_DESKTOP(long long) int pack_gzip(void)
2018{
2019 struct stat s;
[821]2020
[1765]2021 clear_bufs();
2022 s.st_ctime = 0;
2023 fstat(STDIN_FILENO, &s);
2024 zip(s.st_ctime);
2025 return 0;
[821]2026}
2027
[1765]2028int gzip_main(int argc, char **argv);
2029int gzip_main(int argc, char **argv)
[821]2030{
[1765]2031 unsigned opt;
[821]2032
[1765]2033 /* Must match bbunzip's constants OPT_STDOUT, OPT_FORCE! */
2034 opt = getopt32(argv, "cfv" USE_GUNZIP("d") "q123456789" );
2035 option_mask32 &= 0x7; /* Clear -d, ignore -q, -0..9 */
2036 //if (opt & 0x1) // -c
2037 //if (opt & 0x2) // -f
2038 //if (opt & 0x4) // -v
2039#if ENABLE_GUNZIP /* gunzip_main may not be visible... */
2040 if (opt & 0x8) { // -d
2041 return gunzip_main(argc, argv);
2042 }
2043#endif
2044 argv += optind;
2045
2046 PTR_TO_GLOBALS = xzalloc(sizeof(struct globals) + sizeof(struct globals2))
2047 + sizeof(struct globals);
2048 G2.l_desc.dyn_tree = G2.dyn_ltree;
2049 G2.l_desc.static_tree = G2.static_ltree;
2050 G2.l_desc.extra_bits = extra_lbits;
2051 G2.l_desc.extra_base = LITERALS + 1;
2052 G2.l_desc.elems = L_CODES;
2053 G2.l_desc.max_length = MAX_BITS;
2054 //G2.l_desc.max_code = 0;
2055
2056 G2.d_desc.dyn_tree = G2.dyn_dtree;
2057 G2.d_desc.static_tree = G2.static_dtree;
2058 G2.d_desc.extra_bits = extra_dbits;
2059 //G2.d_desc.extra_base = 0;
2060 G2.d_desc.elems = D_CODES;
2061 G2.d_desc.max_length = MAX_BITS;
2062 //G2.d_desc.max_code = 0;
2063
2064 G2.bl_desc.dyn_tree = G2.bl_tree;
2065 //G2.bl_desc.static_tree = NULL;
2066 G2.bl_desc.extra_bits = extra_blbits,
2067 //G2.bl_desc.extra_base = 0;
2068 G2.bl_desc.elems = BL_CODES;
2069 G2.bl_desc.max_length = MAX_BL_BITS;
2070 //G2.bl_desc.max_code = 0;
2071
2072 /* Allocate all global buffers (for DYN_ALLOC option) */
2073 ALLOC(uch, G1.l_buf, INBUFSIZ);
2074 ALLOC(uch, G1.outbuf, OUTBUFSIZ);
2075 ALLOC(ush, G1.d_buf, DIST_BUFSIZE);
2076 ALLOC(uch, G1.window, 2L * WSIZE);
2077 ALLOC(ush, G1.prev, 1L << BITS);
2078
2079 /* Initialise the CRC32 table */
2080 G1.crc_32_tab = crc32_filltable(NULL, 0);
2081
2082 return bbunpack(argv, make_new_name_gzip, pack_gzip);
[821]2083}
Note: See TracBrowser for help on using the repository browser.