1 /* vi: set sw=4 ts=4: */
3 * Gzip implementation for busybox
5 * Based on GNU gzip Copyright (C) 1992-1993 Jean-loup Gailly.
7 * Originally adjusted for busybox by Charles P. Wright <cpw@unix.asb.com>
8 * "this is a stripped down version of gzip I put into busybox, it does
9 * only standard in to standard out with -9 compression. It also requires
10 * the zcat module for some important functions."
12 * Adjusted further by Erik Andersen <andersen@codepoet.org> to support
13 * files as well as stdin/stdout, and to generally behave itself wrt
14 * command line handling.
16 * Licensed under GPLv2 or later, see file LICENSE in this source tree.
18 /* big objects in bss:
20 * 00000074 b base_length
21 * 00000078 b base_dist
22 * 00000078 b static_dtree
24 * 000000f4 b dyn_dtree
25 * 00000100 b length_code
26 * 00000200 b dist_code
30 * 00000480 b static_ltree
31 * 000008f4 b dyn_ltree
33 /* TODO: full support for -v for DESKTOP
34 * "/usr/bin/gzip -v a bogus aa" should say:
35 a: 85.1% -- replaced with a.gz
36 gzip: bogus: No such file or directory
37 aa: 85.1% -- replaced with aa.gz
44 //config: gzip is used to compress files.
45 //config: It's probably the most widely used UNIX compression program.
47 //config:config FEATURE_GZIP_LONG_OPTIONS
48 //config: bool "Enable long options"
50 //config: depends on GZIP && LONG_OPTS
52 //config: Enable use of long options, increases size by about 106 Bytes
54 //config:config GZIP_FAST
55 //config: int "Trade memory for gzip speed (0:small,slow - 2:fast,big)"
58 //config: depends on GZIP
60 //config: Enable big memory options for gzip.
61 //config: 0: small buffers, small hash-tables
62 //config: 1: larger buffers, larger hash-tables
63 //config: 2: larger buffers, largest hash-tables
64 //config: Larger models may give slightly better compression
66 //applet:IF_GZIP(APPLET(gzip, BB_DIR_BIN, BB_SUID_DROP))
67 //kbuild:lib-$(CONFIG_GZIP) += gzip.o
69 //usage:#define gzip_trivial_usage
70 //usage: "[-cfd] [FILE]..."
71 //usage:#define gzip_full_usage "\n\n"
72 //usage: "Compress FILEs (or stdin)\n"
73 //usage: "\n -d Decompress"
74 //usage: "\n -c Write to stdout"
75 //usage: "\n -f Force"
77 //usage:#define gzip_example_usage
78 //usage: "$ ls -la /tmp/busybox*\n"
79 //usage: "-rw-rw-r-- 1 andersen andersen 1761280 Apr 14 17:47 /tmp/busybox.tar\n"
80 //usage: "$ gzip /tmp/busybox.tar\n"
81 //usage: "$ ls -la /tmp/busybox*\n"
82 //usage: "-rw-rw-r-- 1 andersen andersen 554058 Apr 14 17:49 /tmp/busybox.tar.gz\n"
85 #include "bb_archive.h"
88 /* ===========================================================================
91 /* Diagnostic functions */
93 # define Assert(cond,msg) { if (!(cond)) bb_error_msg(msg); }
94 # define Trace(x) fprintf x
95 # define Tracev(x) {if (verbose) fprintf x; }
96 # define Tracevv(x) {if (verbose > 1) fprintf x; }
97 # define Tracec(c,x) {if (verbose && (c)) fprintf x; }
98 # define Tracecv(c,x) {if (verbose > 1 && (c)) fprintf x; }
100 # define Assert(cond,msg)
105 # define Tracecv(c,x)
109 /* ===========================================================================
111 #if CONFIG_GZIP_FAST == 0
113 #elif CONFIG_GZIP_FAST == 1
115 #elif CONFIG_GZIP_FAST == 2
118 # error "Invalid CONFIG_GZIP_FAST value"
123 # define INBUFSIZ 0x2000 /* input buffer size */
125 # define INBUFSIZ 0x8000 /* input buffer size */
131 # define OUTBUFSIZ 8192 /* output buffer size */
133 # define OUTBUFSIZ 16384 /* output buffer size */
139 # define DIST_BUFSIZE 0x2000 /* buffer for distances, see trees.c */
141 # define DIST_BUFSIZE 0x8000 /* buffer for distances, see trees.c */
146 #define ASCII_FLAG 0x01 /* bit 0 set: file probably ascii text */
147 #define CONTINUATION 0x02 /* bit 1 set: continuation of multi-part gzip file */
148 #define EXTRA_FIELD 0x04 /* bit 2 set: extra field present */
149 #define ORIG_NAME 0x08 /* bit 3 set: original file name present */
150 #define COMMENT 0x10 /* bit 4 set: file comment present */
151 #define RESERVED 0xC0 /* bit 6,7: reserved */
153 /* internal file attribute */
154 #define UNKNOWN 0xffff
159 # define WSIZE 0x8000 /* window size--must be a power of two, and */
160 #endif /* at least 32K for zip's deflate method */
163 #define MAX_MATCH 258
164 /* The minimum and maximum match lengths */
166 #define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)
167 /* Minimum amount of lookahead, except at the end of the input file.
168 * See deflate.c for comments about the MIN_MATCH+1.
171 #define MAX_DIST (WSIZE-MIN_LOOKAHEAD)
172 /* In order to simplify the code, particularly on 16 bit machines, match
173 * distances are limited to MAX_DIST instead of WSIZE.
177 # define MAX_PATH_LEN 1024 /* max pathname length */
180 #define seekable() 0 /* force sequential output */
181 #define translate_eol 0 /* no option -a yet */
186 #define INIT_BITS 9 /* Initial number of bits per code */
188 #define BIT_MASK 0x1f /* Mask for 'number of compression bits' */
189 /* Mask 0x20 is reserved to mean a fourth header byte, and 0x40 is free.
190 * It's a pity that old uncompress does not check bit 0x20. That makes
191 * extension of the format actually undesirable because old compress
192 * would just crash on the new format instead of giving a meaningful
193 * error message. It does check the number of bits, but it's more
194 * helpful to say "unsupported format, get a new version" than
195 * "can only handle 16 bits".
199 # define MAX_SUFFIX MAX_EXT_CHARS
201 # define MAX_SUFFIX 30
205 /* ===========================================================================
206 * Compile with MEDIUM_MEM to reduce the memory requirements or
207 * with SMALL_MEM to use as little memory as possible. Use BIG_MEM if the
208 * entire input file can be held in memory (not possible on 16 bit systems).
209 * Warning: defining these symbols affects HASH_BITS (see below) and thus
210 * affects the compression ratio. The compressed output
211 * is still correct, and might even be smaller in some cases.
215 # define HASH_BITS 13 /* Number of bits used to hash strings */
218 # define HASH_BITS 14
221 # define HASH_BITS 15
222 /* For portability to 16 bit machines, do not use values above 15. */
225 #define HASH_SIZE (unsigned)(1<<HASH_BITS)
226 #define HASH_MASK (HASH_SIZE-1)
227 #define WMASK (WSIZE-1)
228 /* HASH_SIZE and WSIZE must be powers of two */
230 # define TOO_FAR 4096
232 /* Matches of length 3 are discarded if their distance exceeds TOO_FAR */
235 /* ===========================================================================
236 * These types are not really 'char', 'short' and 'long'
239 typedef uint16_t ush;
240 typedef uint32_t ulg;
244 typedef unsigned IPos;
245 /* A Pos is an index in the character window. We use short instead of int to
246 * save space in the various tables. IPos is used only for parameter passing.
250 WINDOW_SIZE = 2 * WSIZE,
251 /* window size, 2*WSIZE except for MMAP or BIG_MEM, where it is the
252 * input file length plus MIN_LOOKAHEAD.
255 max_chain_length = 4096,
256 /* To speed up deflation, hash chains are never searched beyond this length.
257 * A higher limit improves compression ratio but degrades the speed.
260 max_lazy_match = 258,
261 /* Attempt to find a better match only when the current match is strictly
262 * smaller than this value. This mechanism is used only for compression
266 max_insert_length = max_lazy_match,
267 /* Insert new strings in the hash table only if the match length
268 * is not greater than this length. This saves time but degrades compression.
269 * max_insert_length is used only for compression levels <= 3.
273 /* Use a faster search when the previous match is longer than this */
275 /* Values for max_lazy_match, good_match and max_chain_length, depending on
276 * the desired pack level (0..9). The values given below have been tuned to
277 * exclude worst case performance for pathological files. Better values may be
278 * found for specific files.
281 nice_match = 258, /* Stop searching when current match exceeds this */
282 /* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4
283 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different
293 /* window position at the beginning of the current output block. Gets
294 * negative when the window is moved backwards.
296 unsigned ins_h; /* hash index of string to be inserted */
298 #define H_SHIFT ((HASH_BITS+MIN_MATCH-1) / MIN_MATCH)
299 /* Number of bits by which ins_h and del_h must be shifted at each
300 * input step. It must be such that after MIN_MATCH steps, the oldest
301 * byte no longer takes part in the hash key, that is:
302 * H_SHIFT * MIN_MATCH >= HASH_BITS
305 unsigned prev_length;
307 /* Length of the best match at previous step. Matches not greater than this
308 * are discarded. This is used in the lazy match evaluation.
311 unsigned strstart; /* start of string to insert */
312 unsigned match_start; /* start of matching string */
313 unsigned lookahead; /* number of valid bytes ahead in window */
315 /* ===========================================================================
317 #define DECLARE(type, array, size) \
319 #define ALLOC(type, array, size) \
320 array = xzalloc((size_t)(((size)+1L)/2) * 2*sizeof(type))
321 #define FREE(array) \
322 do { free(array); array = NULL; } while (0)
326 /* buffer for literals or lengths */
327 /* DECLARE(uch, l_buf, LIT_BUFSIZE); */
328 DECLARE(uch, l_buf, INBUFSIZ);
330 DECLARE(ush, d_buf, DIST_BUFSIZE);
331 DECLARE(uch, outbuf, OUTBUFSIZ);
333 /* Sliding window. Input bytes are read into the second half of the window,
334 * and move to the first half later to keep a dictionary of at least WSIZE
335 * bytes. With this organization, matches are limited to a distance of
336 * WSIZE-MAX_MATCH bytes, but this ensures that IO is always
337 * performed with a length multiple of the block size. Also, it limits
338 * the window size to 64K, which is quite useful on MSDOS.
339 * To do: limit the window size to WSIZE+BSZ if SMALL_MEM (the code would
340 * be less efficient).
342 DECLARE(uch, window, 2L * WSIZE);
344 /* Link to older string with same hash index. To limit the size of this
345 * array to 64K, this link is maintained only for the last 32K strings.
346 * An index in this array is thus a window index modulo 32K.
348 /* DECLARE(Pos, prev, WSIZE); */
349 DECLARE(ush, prev, 1L << BITS);
351 /* Heads of the hash chains or 0. */
352 /* DECLARE(Pos, head, 1<<HASH_BITS); */
353 #define head (G1.prev + WSIZE) /* hash head (see deflate.c) */
355 /* number of input bytes */
356 ulg isize; /* only 32 bits stored in .gz file */
358 /* bbox always use stdin/stdout */
359 #define ifd STDIN_FILENO /* input file descriptor */
360 #define ofd STDOUT_FILENO /* output file descriptor */
363 unsigned insize; /* valid bytes in l_buf */
365 unsigned outcnt; /* bytes in output buffer */
367 smallint eofile; /* flag set at end of input file */
369 /* ===========================================================================
370 * Local data used by the "bit string" routines.
373 unsigned short bi_buf;
375 /* Output buffer. bits are inserted starting at the bottom (least significant
380 #define BUF_SIZE (8 * sizeof(G1.bi_buf))
381 /* Number of bits used within bi_buf. (bi_buf might be implemented on
382 * more than 16 bits on some systems.)
387 /* Current input function. Set to mem_read for in-memory compression */
390 ulg bits_sent; /* bit length of the compressed data */
393 /*uint32_t *crc_32_tab;*/
394 uint32_t crc; /* shift register contents */
397 #define G1 (*(ptr_to_globals - 1))
400 /* ===========================================================================
401 * Write the output buffer outbuf[0..outcnt-1] and update bytes_out.
402 * (used for the compressed data only)
404 static void flush_outbuf(void)
409 xwrite(ofd, (char *) G1.outbuf, G1.outcnt);
414 /* ===========================================================================
416 /* put_8bit is used for the compressed output */
417 #define put_8bit(c) \
419 G1.outbuf[G1.outcnt++] = (c); \
420 if (G1.outcnt == OUTBUFSIZ) flush_outbuf(); \
423 /* Output a 16 bit value, lsb first */
424 static void put_16bit(ush w)
426 if (G1.outcnt < OUTBUFSIZ - 2) {
427 G1.outbuf[G1.outcnt++] = w;
428 G1.outbuf[G1.outcnt++] = w >> 8;
435 static void put_32bit(ulg n)
441 /* ===========================================================================
442 * Run a set of bytes through the crc shift register. If s is a NULL
443 * pointer, then initialize the crc shift register contents instead.
444 * Return the current crc in either case.
446 static void updcrc(uch * s, unsigned n)
448 G1.crc = crc32_block_endian0(G1.crc, s, n, global_crc32_table /*G1.crc_32_tab*/);
452 /* ===========================================================================
453 * Read a new buffer from the current input file, perform end-of-line
454 * translation, and update the crc and input file size.
455 * IN assertion: size >= 2 (for end-of-line translation)
457 static unsigned file_read(void *buf, unsigned size)
461 Assert(G1.insize == 0, "l_buf not empty");
463 len = safe_read(ifd, buf, size);
464 if (len == (unsigned)(-1) || len == 0)
473 /* ===========================================================================
474 * Send a value on a given number of bits.
475 * IN assertion: length <= 16 and value fits in length bits.
477 static void send_bits(int value, int length)
480 Tracev((stderr, " l %2d v %4x ", length, value));
481 Assert(length > 0 && length <= 15, "invalid length");
482 G1.bits_sent += length;
484 /* If not enough room in bi_buf, use (valid) bits from bi_buf and
485 * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
486 * unused bits in value.
488 if (G1.bi_valid > (int) BUF_SIZE - length) {
489 G1.bi_buf |= (value << G1.bi_valid);
490 put_16bit(G1.bi_buf);
491 G1.bi_buf = (ush) value >> (BUF_SIZE - G1.bi_valid);
492 G1.bi_valid += length - BUF_SIZE;
494 G1.bi_buf |= value << G1.bi_valid;
495 G1.bi_valid += length;
500 /* ===========================================================================
501 * Reverse the first len bits of a code, using straightforward code (a faster
502 * method would use a table)
503 * IN assertion: 1 <= len <= 15
505 static unsigned bi_reverse(unsigned code, int len)
511 if (--len <= 0) return res;
518 /* ===========================================================================
519 * Write out any remaining bits in an incomplete byte.
521 static void bi_windup(void)
523 if (G1.bi_valid > 8) {
524 put_16bit(G1.bi_buf);
525 } else if (G1.bi_valid > 0) {
531 G1.bits_sent = (G1.bits_sent + 7) & ~7;
536 /* ===========================================================================
537 * Copy a stored block to the zip file, storing first the length and its
538 * one's complement if requested.
540 static void copy_block(char *buf, unsigned len, int header)
542 bi_windup(); /* align on byte boundary */
548 G1.bits_sent += 2 * 16;
552 G1.bits_sent += (ulg) len << 3;
560 /* ===========================================================================
561 * Fill the window when the lookahead becomes insufficient.
562 * Updates strstart and lookahead, and sets eofile if end of input file.
563 * IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0
564 * OUT assertions: at least one byte has been read, or eofile is set;
565 * file reads are performed for at least two bytes (required for the
566 * translate_eol option).
568 static void fill_window(void)
571 unsigned more = WINDOW_SIZE - G1.lookahead - G1.strstart;
572 /* Amount of free space at the end of the window. */
574 /* If the window is almost full and there is insufficient lookahead,
575 * move the upper half to the lower one to make room in the upper half.
577 if (more == (unsigned) -1) {
578 /* Very unlikely, but possible on 16 bit machine if strstart == 0
579 * and lookahead == 1 (input done one byte at time)
582 } else if (G1.strstart >= WSIZE + MAX_DIST) {
583 /* By the IN assertion, the window is not empty so we can't confuse
584 * more == 0 with more == 64K on a 16 bit machine.
586 Assert(WINDOW_SIZE == 2 * WSIZE, "no sliding with BIG_MEM");
588 memcpy(G1.window, G1.window + WSIZE, WSIZE);
589 G1.match_start -= WSIZE;
590 G1.strstart -= WSIZE; /* we now have strstart >= MAX_DIST: */
592 G1.block_start -= WSIZE;
594 for (n = 0; n < HASH_SIZE; n++) {
596 head[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0);
598 for (n = 0; n < WSIZE; n++) {
600 G1.prev[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0);
601 /* If n is not on any hash chain, prev[n] is garbage but
602 * its value will never be used.
607 /* At this point, more >= 2 */
609 n = file_read(G1.window + G1.strstart + G1.lookahead, more);
610 if (n == 0 || n == (unsigned) -1) {
619 /* ===========================================================================
620 * Set match_start to the longest match starting at the given string and
621 * return its length. Matches shorter or equal to prev_length are discarded,
622 * in which case the result is equal to prev_length and match_start is
624 * IN assertions: cur_match is the head of the hash chain for the current
625 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
628 /* For MSDOS, OS/2 and 386 Unix, an optimized version is in match.asm or
629 * match.s. The code is functionally equivalent, so you can use the C version
632 static int longest_match(IPos cur_match)
634 unsigned chain_length = max_chain_length; /* max hash chain length */
635 uch *scan = G1.window + G1.strstart; /* current string */
636 uch *match; /* matched string */
637 int len; /* length of current match */
638 int best_len = G1.prev_length; /* best match length so far */
639 IPos limit = G1.strstart > (IPos) MAX_DIST ? G1.strstart - (IPos) MAX_DIST : 0;
640 /* Stop when cur_match becomes <= limit. To simplify the code,
641 * we prevent matches with the string of window index 0.
644 /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.
645 * It is easy to get rid of this optimization if necessary.
647 #if HASH_BITS < 8 || MAX_MATCH != 258
648 # error Code too clever
650 uch *strend = G1.window + G1.strstart + MAX_MATCH;
651 uch scan_end1 = scan[best_len - 1];
652 uch scan_end = scan[best_len];
654 /* Do not waste too much time if we already have a good match: */
655 if (G1.prev_length >= good_match) {
658 Assert(G1.strstart <= WINDOW_SIZE - MIN_LOOKAHEAD, "insufficient lookahead");
661 Assert(cur_match < G1.strstart, "no future");
662 match = G1.window + cur_match;
664 /* Skip to next match if the match length cannot increase
665 * or if the match length is less than 2:
667 if (match[best_len] != scan_end
668 || match[best_len - 1] != scan_end1
669 || *match != *scan || *++match != scan[1]
674 /* The check at best_len-1 can be removed because it will be made
675 * again later. (This heuristic is not always a win.)
676 * It is not necessary to compare scan[2] and match[2] since they
677 * are always equal when the other bytes match, given that
678 * the hash keys are equal and that HASH_BITS >= 8.
682 /* We check for insufficient lookahead only every 8th comparison;
683 * the 256th check will be made at strstart+258.
686 } while (*++scan == *++match && *++scan == *++match &&
687 *++scan == *++match && *++scan == *++match &&
688 *++scan == *++match && *++scan == *++match &&
689 *++scan == *++match && *++scan == *++match && scan < strend);
691 len = MAX_MATCH - (int) (strend - scan);
692 scan = strend - MAX_MATCH;
694 if (len > best_len) {
695 G1.match_start = cur_match;
697 if (len >= nice_match)
699 scan_end1 = scan[best_len - 1];
700 scan_end = scan[best_len];
702 } while ((cur_match = G1.prev[cur_match & WMASK]) > limit
703 && --chain_length != 0);
710 /* ===========================================================================
711 * Check that the match at match_start is indeed a match.
713 static void check_match(IPos start, IPos match, int length)
715 /* check that the match is indeed a match */
716 if (memcmp(G1.window + match, G1.window + start, length) != 0) {
717 bb_error_msg(" start %d, match %d, length %d", start, match, length);
718 bb_error_msg("invalid match");
721 bb_error_msg("\\[%d,%d]", start - match, length);
723 bb_putchar_stderr(G1.window[start++]);
724 } while (--length != 0);
728 # define check_match(start, match, length) ((void)0)
732 /* trees.c -- output deflated data using Huffman coding
733 * Copyright (C) 1992-1993 Jean-loup Gailly
734 * This is free software; you can redistribute it and/or modify it under the
735 * terms of the GNU General Public License, see the file COPYING.
739 * Encode various sets of source values using variable-length
743 * The PKZIP "deflation" process uses several Huffman trees. The more
744 * common source values are represented by shorter bit sequences.
746 * Each code tree is stored in the ZIP file in a compressed form
747 * which is itself a Huffman encoding of the lengths of
748 * all the code strings (in ascending order by source values).
749 * The actual code strings are reconstructed from the lengths in
750 * the UNZIP process, as described in the "application note"
751 * (APPNOTE.TXT) distributed as part of PKWARE's PKZIP program.
755 * Data Compression: Techniques and Applications, pp. 53-55.
756 * Lifetime Learning Publications, 1985. ISBN 0-534-03418-7.
759 * Data Compression: Methods and Theory, pp. 49-50.
760 * Computer Science Press, 1988. ISBN 0-7167-8156-5.
764 * Addison-Wesley, 1983. ISBN 0-201-06672-6.
768 * Allocate the match buffer, initialize the various tables [and save
769 * the location of the internal file attribute (ascii/binary) and
770 * method (DEFLATE/STORE) -- deleted in bbox]
772 * void ct_tally(int dist, int lc);
773 * Save the match info and tally the frequency counts.
775 * ulg flush_block(char *buf, ulg stored_len, int eof)
776 * Determine the best encoding for the current block: dynamic trees,
777 * static trees or store, and output the encoded block to the zip
778 * file. Returns the total compressed length for the file so far.
782 /* All codes must not exceed MAX_BITS bits */
784 #define MAX_BL_BITS 7
785 /* Bit length codes must not exceed MAX_BL_BITS bits */
787 #define LENGTH_CODES 29
788 /* number of length codes, not counting the special END_BLOCK code */
791 /* number of literal bytes 0..255 */
793 #define END_BLOCK 256
794 /* end of block literal code */
796 #define L_CODES (LITERALS+1+LENGTH_CODES)
797 /* number of Literal or Length codes, including the END_BLOCK code */
800 /* number of distance codes */
803 /* number of codes used to transfer the bit lengths */
805 /* extra bits for each length code */
806 static const uint8_t extra_lbits[LENGTH_CODES] ALIGN1 = {
807 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4,
811 /* extra bits for each distance code */
812 static const uint8_t extra_dbits[D_CODES] ALIGN1 = {
813 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9,
814 10, 10, 11, 11, 12, 12, 13, 13
817 /* extra bits for each bit length code */
818 static const uint8_t extra_blbits[BL_CODES] ALIGN1 = {
819 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7 };
821 /* number of codes at each bit length for an optimal tree */
822 static const uint8_t bl_order[BL_CODES] ALIGN1 = {
823 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
825 #define STORED_BLOCK 0
826 #define STATIC_TREES 1
828 /* The three kinds of block type */
832 # define LIT_BUFSIZE 0x2000
835 # define LIT_BUFSIZE 0x4000
837 # define LIT_BUFSIZE 0x8000
842 # define DIST_BUFSIZE LIT_BUFSIZE
844 /* Sizes of match buffers for literals/lengths and distances. There are
845 * 4 reasons for limiting LIT_BUFSIZE to 64K:
846 * - frequencies can be kept in 16 bit counters
847 * - if compression is not successful for the first block, all input data is
848 * still in the window so we can still emit a stored block even when input
849 * comes from standard input. (This can also be done for all blocks if
850 * LIT_BUFSIZE is not greater than 32K.)
851 * - if compression is not successful for a file smaller than 64K, we can
852 * even emit a stored file instead of a stored block (saving 5 bytes).
853 * - creating new Huffman trees less frequently may not provide fast
854 * adaptation to changes in the input data statistics. (Take for
855 * example a binary file with poorly compressible code followed by
856 * a highly compressible string table.) Smaller buffer sizes give
857 * fast adaptation but have of course the overhead of transmitting trees
859 * - I can't count above 4
860 * The current code is general and allows DIST_BUFSIZE < LIT_BUFSIZE (to save
861 * memory at the expense of compression). Some optimizations would be possible
862 * if we rely on DIST_BUFSIZE == LIT_BUFSIZE.
865 /* repeat previous bit length 3-6 times (2 bits of repeat count) */
867 /* repeat a zero length 3-10 times (3 bits of repeat count) */
868 #define REPZ_11_138 18
869 /* repeat a zero length 11-138 times (7 bits of repeat count) */
871 /* ===========================================================================
873 /* Data structure describing a single value and its code string. */
874 typedef struct ct_data {
876 ush freq; /* frequency count */
877 ush code; /* bit string */
880 ush dad; /* father node in Huffman tree */
881 ush len; /* length of bit string */
890 #define HEAP_SIZE (2*L_CODES + 1)
891 /* maximum heap size */
893 typedef struct tree_desc {
894 ct_data *dyn_tree; /* the dynamic tree */
895 ct_data *static_tree; /* corresponding static tree or NULL */
896 const uint8_t *extra_bits; /* extra bits for each code or NULL */
897 int extra_base; /* base index for extra_bits */
898 int elems; /* max number of elements in the tree */
899 int max_length; /* max bit length for the codes */
900 int max_code; /* largest code with non zero frequency */
905 ush heap[HEAP_SIZE]; /* heap used to build the Huffman trees */
906 int heap_len; /* number of elements in the heap */
907 int heap_max; /* element of largest frequency */
909 /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.
910 * The same heap array is used to build all trees.
913 ct_data dyn_ltree[HEAP_SIZE]; /* literal and length tree */
914 ct_data dyn_dtree[2 * D_CODES + 1]; /* distance tree */
916 ct_data static_ltree[L_CODES + 2];
918 /* The static literal tree. Since the bit lengths are imposed, there is no
919 * need for the L_CODES extra codes used during heap construction. However
920 * The codes 286 and 287 are needed to build a canonical tree (see ct_init
924 ct_data static_dtree[D_CODES];
926 /* The static distance tree. (Actually a trivial tree since all codes use
930 ct_data bl_tree[2 * BL_CODES + 1];
932 /* Huffman tree for the bit lengths */
938 ush bl_count[MAX_BITS + 1];
940 /* The lengths of the bit length codes are sent in order of decreasing
941 * probability, to avoid transmitting the lengths for unused bit length codes.
944 uch depth[2 * L_CODES + 1];
946 /* Depth of each subtree used as tie breaker for trees of equal frequency */
948 uch length_code[MAX_MATCH - MIN_MATCH + 1];
950 /* length code for each normalized match length (0 == MIN_MATCH) */
954 /* distance codes. The first 256 values correspond to the distances
955 * 3 .. 258, the last 256 values correspond to the top 8 bits of
956 * the 15 bit distances.
959 int base_length[LENGTH_CODES];
961 /* First normalized length for each code (0 = MIN_MATCH) */
963 int base_dist[D_CODES];
965 /* First normalized distance for each code (0 = distance of 1) */
967 uch flag_buf[LIT_BUFSIZE / 8];
969 /* flag_buf is a bit array distinguishing literals from lengths in
970 * l_buf, thus indicating the presence or absence of a distance.
973 unsigned last_lit; /* running index in l_buf */
974 unsigned last_dist; /* running index in d_buf */
975 unsigned last_flags; /* running index in flag_buf */
976 uch flags; /* current flags not yet saved in flag_buf */
977 uch flag_bit; /* current bit used in flags */
979 /* bits are filled in flags starting at bit 0 (least significant).
980 * Note: these flags are overkill in the current code since we don't
981 * take advantage of DIST_BUFSIZE == LIT_BUFSIZE.
984 ulg opt_len; /* bit length of current block with optimal trees */
985 ulg static_len; /* bit length of current block with static trees */
987 ulg compressed_len; /* total bit length of compressed file */
990 #define G2ptr ((struct globals2*)(ptr_to_globals))
994 /* ===========================================================================
996 static void gen_codes(ct_data * tree, int max_code);
997 static void build_tree(tree_desc * desc);
998 static void scan_tree(ct_data * tree, int max_code);
999 static void send_tree(ct_data * tree, int max_code);
1000 static int build_bl_tree(void);
1001 static void send_all_trees(int lcodes, int dcodes, int blcodes);
1002 static void compress_block(ct_data * ltree, ct_data * dtree);
1006 /* Send a code of the given tree. c and tree must not have side effects */
1007 # define SEND_CODE(c, tree) send_bits(tree[c].Code, tree[c].Len)
1009 # define SEND_CODE(c, tree) \
1011 if (verbose > 1) bb_error_msg("\ncd %3d ", (c)); \
1012 send_bits(tree[c].Code, tree[c].Len); \
1016 #define D_CODE(dist) \
1017 ((dist) < 256 ? G2.dist_code[dist] : G2.dist_code[256 + ((dist)>>7)])
1018 /* Mapping from a distance to a distance code. dist is the distance - 1 and
1019 * must not have side effects. dist_code[256] and dist_code[257] are never
1021 * The arguments must not have side effects.
1025 /* ===========================================================================
1026 * Initialize a new block.
1028 static void init_block(void)
1030 int n; /* iterates over tree elements */
1032 /* Initialize the trees. */
1033 for (n = 0; n < L_CODES; n++)
1034 G2.dyn_ltree[n].Freq = 0;
1035 for (n = 0; n < D_CODES; n++)
1036 G2.dyn_dtree[n].Freq = 0;
1037 for (n = 0; n < BL_CODES; n++)
1038 G2.bl_tree[n].Freq = 0;
1040 G2.dyn_ltree[END_BLOCK].Freq = 1;
1041 G2.opt_len = G2.static_len = 0;
1042 G2.last_lit = G2.last_dist = G2.last_flags = 0;
1048 /* ===========================================================================
1049 * Restore the heap property by moving down the tree starting at node k,
1050 * exchanging a node with the smallest of its two sons if necessary, stopping
1051 * when the heap property is re-established (each father smaller than its
1055 /* Compares to subtrees, using the tree depth as tie breaker when
1056 * the subtrees have equal frequency. This minimizes the worst case length. */
1057 #define SMALLER(tree, n, m) \
1058 (tree[n].Freq < tree[m].Freq \
1059 || (tree[n].Freq == tree[m].Freq && G2.depth[n] <= G2.depth[m]))
1061 static void pqdownheap(ct_data * tree, int k)
1064 int j = k << 1; /* left son of k */
1066 while (j <= G2.heap_len) {
1067 /* Set j to the smallest of the two sons: */
1068 if (j < G2.heap_len && SMALLER(tree, G2.heap[j + 1], G2.heap[j]))
1071 /* Exit if v is smaller than both sons */
1072 if (SMALLER(tree, v, G2.heap[j]))
1075 /* Exchange v with the smallest son */
1076 G2.heap[k] = G2.heap[j];
1079 /* And continue down the tree, setting j to the left son of k */
1086 /* ===========================================================================
1087 * Compute the optimal bit lengths for a tree and update the total bit length
1088 * for the current block.
1089 * IN assertion: the fields freq and dad are set, heap[heap_max] and
1090 * above are the tree nodes sorted by increasing frequency.
1091 * OUT assertions: the field len is set to the optimal bit length, the
1092 * array bl_count contains the frequencies for each bit length.
1093 * The length opt_len is updated; static_len is also updated if stree is
1096 static void gen_bitlen(tree_desc * desc)
1098 ct_data *tree = desc->dyn_tree;
1099 const uint8_t *extra = desc->extra_bits;
1100 int base = desc->extra_base;
1101 int max_code = desc->max_code;
1102 int max_length = desc->max_length;
1103 ct_data *stree = desc->static_tree;
1104 int h; /* heap index */
1105 int n, m; /* iterate over the tree elements */
1106 int bits; /* bit length */
1107 int xbits; /* extra bits */
1108 ush f; /* frequency */
1109 int overflow = 0; /* number of elements with bit length too large */
1111 for (bits = 0; bits <= MAX_BITS; bits++)
1112 G2.bl_count[bits] = 0;
1114 /* In a first pass, compute the optimal bit lengths (which may
1115 * overflow in the case of the bit length tree).
1117 tree[G2.heap[G2.heap_max]].Len = 0; /* root of the heap */
1119 for (h = G2.heap_max + 1; h < HEAP_SIZE; h++) {
1121 bits = tree[tree[n].Dad].Len + 1;
1122 if (bits > max_length) {
1126 tree[n].Len = (ush) bits;
1127 /* We overwrite tree[n].Dad which is no longer needed */
1130 continue; /* not a leaf node */
1132 G2.bl_count[bits]++;
1135 xbits = extra[n - base];
1137 G2.opt_len += (ulg) f *(bits + xbits);
1140 G2.static_len += (ulg) f * (stree[n].Len + xbits);
1145 Trace((stderr, "\nbit length overflow\n"));
1146 /* This happens for example on obj2 and pic of the Calgary corpus */
1148 /* Find the first bit length which could increase: */
1150 bits = max_length - 1;
1151 while (G2.bl_count[bits] == 0)
1153 G2.bl_count[bits]--; /* move one leaf down the tree */
1154 G2.bl_count[bits + 1] += 2; /* move one overflow item as its brother */
1155 G2.bl_count[max_length]--;
1156 /* The brother of the overflow item also moves one step up,
1157 * but this does not affect bl_count[max_length]
1160 } while (overflow > 0);
1162 /* Now recompute all bit lengths, scanning in increasing frequency.
1163 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
1164 * lengths instead of fixing only the wrong ones. This idea is taken
1165 * from 'ar' written by Haruhiko Okumura.)
1167 for (bits = max_length; bits != 0; bits--) {
1168 n = G2.bl_count[bits];
1173 if (tree[m].Len != (unsigned) bits) {
1174 Trace((stderr, "code %d bits %d->%d\n", m, tree[m].Len, bits));
1175 G2.opt_len += ((int32_t) bits - tree[m].Len) * tree[m].Freq;
1184 /* ===========================================================================
1185 * Generate the codes for a given tree and bit counts (which need not be
1187 * IN assertion: the array bl_count contains the bit length statistics for
1188 * the given tree and the field len is set for all tree elements.
1189 * OUT assertion: the field code is set for all tree elements of non
1192 static void gen_codes(ct_data * tree, int max_code)
1194 ush next_code[MAX_BITS + 1]; /* next code value for each bit length */
1195 ush code = 0; /* running code value */
1196 int bits; /* bit index */
1197 int n; /* code index */
1199 /* The distribution counts are first used to generate the code values
1200 * without bit reversal.
1202 for (bits = 1; bits <= MAX_BITS; bits++) {
1203 next_code[bits] = code = (code + G2.bl_count[bits - 1]) << 1;
1205 /* Check that the bit counts in bl_count are consistent. The last code
1208 Assert(code + G2.bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
1209 "inconsistent bit counts");
1210 Tracev((stderr, "\ngen_codes: max_code %d ", max_code));
1212 for (n = 0; n <= max_code; n++) {
1213 int len = tree[n].Len;
1217 /* Now reverse the bits */
1218 tree[n].Code = bi_reverse(next_code[len]++, len);
1220 Tracec(tree != G2.static_ltree,
1221 (stderr, "\nn %3d %c l %2d c %4x (%x) ", n,
1222 (n > ' ' ? n : ' '), len, tree[n].Code,
1223 next_code[len] - 1));
1228 /* ===========================================================================
1229 * Construct one Huffman tree and assigns the code bit strings and lengths.
1230 * Update the total bit length for the current block.
1231 * IN assertion: the field freq is set for all tree elements.
1232 * OUT assertions: the fields len and code are set to the optimal bit length
1233 * and corresponding code. The length opt_len is updated; static_len is
1234 * also updated if stree is not null. The field max_code is set.
1237 /* Remove the smallest element from the heap and recreate the heap with
1238 * one less element. Updates heap and heap_len. */
1241 /* Index within the heap array of least frequent node in the Huffman tree */
1243 #define PQREMOVE(tree, top) \
1245 top = G2.heap[SMALLEST]; \
1246 G2.heap[SMALLEST] = G2.heap[G2.heap_len--]; \
1247 pqdownheap(tree, SMALLEST); \
1250 static void build_tree(tree_desc * desc)
1252 ct_data *tree = desc->dyn_tree;
1253 ct_data *stree = desc->static_tree;
1254 int elems = desc->elems;
1255 int n, m; /* iterate over heap elements */
1256 int max_code = -1; /* largest code with non zero frequency */
1257 int node = elems; /* next internal node of the tree */
1259 /* Construct the initial heap, with least frequent element in
1260 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
1261 * heap[0] is not used.
1264 G2.heap_max = HEAP_SIZE;
1266 for (n = 0; n < elems; n++) {
1267 if (tree[n].Freq != 0) {
1268 G2.heap[++G2.heap_len] = max_code = n;
1275 /* The pkzip format requires that at least one distance code exists,
1276 * and that at least one bit should be sent even if there is only one
1277 * possible code. So to avoid special checks later on we force at least
1278 * two codes of non zero frequency.
1280 while (G2.heap_len < 2) {
1281 int new = G2.heap[++G2.heap_len] = (max_code < 2 ? ++max_code : 0);
1287 G2.static_len -= stree[new].Len;
1288 /* new is 0 or 1 so it does not have extra bits */
1290 desc->max_code = max_code;
1292 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
1293 * establish sub-heaps of increasing lengths:
1295 for (n = G2.heap_len / 2; n >= 1; n--)
1296 pqdownheap(tree, n);
1298 /* Construct the Huffman tree by repeatedly combining the least two
1302 PQREMOVE(tree, n); /* n = node of least frequency */
1303 m = G2.heap[SMALLEST]; /* m = node of next least frequency */
1305 G2.heap[--G2.heap_max] = n; /* keep the nodes sorted by frequency */
1306 G2.heap[--G2.heap_max] = m;
1308 /* Create a new node father of n and m */
1309 tree[node].Freq = tree[n].Freq + tree[m].Freq;
1310 G2.depth[node] = MAX(G2.depth[n], G2.depth[m]) + 1;
1311 tree[n].Dad = tree[m].Dad = (ush) node;
1313 if (tree == G2.bl_tree) {
1314 bb_error_msg("\nnode %d(%d), sons %d(%d) %d(%d)",
1315 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);
1318 /* and insert the new node in the heap */
1319 G2.heap[SMALLEST] = node++;
1320 pqdownheap(tree, SMALLEST);
1322 } while (G2.heap_len >= 2);
1324 G2.heap[--G2.heap_max] = G2.heap[SMALLEST];
1326 /* At this point, the fields freq and dad are set. We can now
1327 * generate the bit lengths.
1329 gen_bitlen((tree_desc *) desc);
1331 /* The field len is now set, we can generate the bit codes */
1332 gen_codes((ct_data *) tree, max_code);
1336 /* ===========================================================================
1337 * Scan a literal or distance tree to determine the frequencies of the codes
1338 * in the bit length tree. Updates opt_len to take into account the repeat
1339 * counts. (The contribution of the bit length codes will be added later
1340 * during the construction of bl_tree.)
1342 static void scan_tree(ct_data * tree, int max_code)
1344 int n; /* iterates over all tree elements */
1345 int prevlen = -1; /* last emitted length */
1346 int curlen; /* length of current code */
1347 int nextlen = tree[0].Len; /* length of next code */
1348 int count = 0; /* repeat count of the current code */
1349 int max_count = 7; /* max repeat count */
1350 int min_count = 4; /* min repeat count */
1356 tree[max_code + 1].Len = 0xffff; /* guard */
1358 for (n = 0; n <= max_code; n++) {
1360 nextlen = tree[n + 1].Len;
1361 if (++count < max_count && curlen == nextlen)
1364 if (count < min_count) {
1365 G2.bl_tree[curlen].Freq += count;
1366 } else if (curlen != 0) {
1367 if (curlen != prevlen)
1368 G2.bl_tree[curlen].Freq++;
1369 G2.bl_tree[REP_3_6].Freq++;
1370 } else if (count <= 10) {
1371 G2.bl_tree[REPZ_3_10].Freq++;
1373 G2.bl_tree[REPZ_11_138].Freq++;
1383 } else if (curlen == nextlen) {
1391 /* ===========================================================================
1392 * Send a literal or distance tree in compressed form, using the codes in
1395 static void send_tree(ct_data * tree, int max_code)
1397 int n; /* iterates over all tree elements */
1398 int prevlen = -1; /* last emitted length */
1399 int curlen; /* length of current code */
1400 int nextlen = tree[0].Len; /* length of next code */
1401 int count = 0; /* repeat count of the current code */
1402 int max_count = 7; /* max repeat count */
1403 int min_count = 4; /* min repeat count */
1405 /* tree[max_code+1].Len = -1; *//* guard already set */
1407 max_count = 138, min_count = 3;
1409 for (n = 0; n <= max_code; n++) {
1411 nextlen = tree[n + 1].Len;
1412 if (++count < max_count && curlen == nextlen) {
1414 } else if (count < min_count) {
1416 SEND_CODE(curlen, G2.bl_tree);
1418 } else if (curlen != 0) {
1419 if (curlen != prevlen) {
1420 SEND_CODE(curlen, G2.bl_tree);
1423 Assert(count >= 3 && count <= 6, " 3_6?");
1424 SEND_CODE(REP_3_6, G2.bl_tree);
1425 send_bits(count - 3, 2);
1426 } else if (count <= 10) {
1427 SEND_CODE(REPZ_3_10, G2.bl_tree);
1428 send_bits(count - 3, 3);
1430 SEND_CODE(REPZ_11_138, G2.bl_tree);
1431 send_bits(count - 11, 7);
1438 } else if (curlen == nextlen) {
1449 /* ===========================================================================
1450 * Construct the Huffman tree for the bit lengths and return the index in
1451 * bl_order of the last bit length code to send.
1453 static int build_bl_tree(void)
1455 int max_blindex; /* index of last bit length code of non zero freq */
1457 /* Determine the bit length frequencies for literal and distance trees */
1458 scan_tree(G2.dyn_ltree, G2.l_desc.max_code);
1459 scan_tree(G2.dyn_dtree, G2.d_desc.max_code);
1461 /* Build the bit length tree: */
1462 build_tree(&G2.bl_desc);
1463 /* opt_len now includes the length of the tree representations, except
1464 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
1467 /* Determine the number of bit length codes to send. The pkzip format
1468 * requires that at least 4 bit length codes be sent. (appnote.txt says
1469 * 3 but the actual value used is 4.)
1471 for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) {
1472 if (G2.bl_tree[bl_order[max_blindex]].Len != 0)
1475 /* Update opt_len to include the bit length tree and counts */
1476 G2.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4;
1477 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", G2.opt_len, G2.static_len));
1483 /* ===========================================================================
1484 * Send the header for a block using dynamic Huffman trees: the counts, the
1485 * lengths of the bit length codes, the literal tree and the distance tree.
1486 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
1488 static void send_all_trees(int lcodes, int dcodes, int blcodes)
1490 int rank; /* index in bl_order */
1492 Assert(lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
1493 Assert(lcodes <= L_CODES && dcodes <= D_CODES
1494 && blcodes <= BL_CODES, "too many codes");
1495 Tracev((stderr, "\nbl counts: "));
1496 send_bits(lcodes - 257, 5); /* not +255 as stated in appnote.txt */
1497 send_bits(dcodes - 1, 5);
1498 send_bits(blcodes - 4, 4); /* not -3 as stated in appnote.txt */
1499 for (rank = 0; rank < blcodes; rank++) {
1500 Tracev((stderr, "\nbl code %2d ", bl_order[rank]));
1501 send_bits(G2.bl_tree[bl_order[rank]].Len, 3);
1503 Tracev((stderr, "\nbl tree: sent %ld", G1.bits_sent));
1505 send_tree((ct_data *) G2.dyn_ltree, lcodes - 1); /* send the literal tree */
1506 Tracev((stderr, "\nlit tree: sent %ld", G1.bits_sent));
1508 send_tree((ct_data *) G2.dyn_dtree, dcodes - 1); /* send the distance tree */
1509 Tracev((stderr, "\ndist tree: sent %ld", G1.bits_sent));
1513 /* ===========================================================================
1514 * Save the match info and tally the frequency counts. Return true if
1515 * the current block must be flushed.
1517 static int ct_tally(int dist, int lc)
1519 G1.l_buf[G2.last_lit++] = lc;
1521 /* lc is the unmatched char */
1522 G2.dyn_ltree[lc].Freq++;
1524 /* Here, lc is the match length - MIN_MATCH */
1525 dist--; /* dist = match distance - 1 */
1526 Assert((ush) dist < (ush) MAX_DIST
1527 && (ush) lc <= (ush) (MAX_MATCH - MIN_MATCH)
1528 && (ush) D_CODE(dist) < (ush) D_CODES, "ct_tally: bad match"
1531 G2.dyn_ltree[G2.length_code[lc] + LITERALS + 1].Freq++;
1532 G2.dyn_dtree[D_CODE(dist)].Freq++;
1534 G1.d_buf[G2.last_dist++] = dist;
1535 G2.flags |= G2.flag_bit;
1539 /* Output the flags if they fill a byte: */
1540 if ((G2.last_lit & 7) == 0) {
1541 G2.flag_buf[G2.last_flags++] = G2.flags;
1545 /* Try to guess if it is profitable to stop the current block here */
1546 if ((G2.last_lit & 0xfff) == 0) {
1547 /* Compute an upper bound for the compressed length */
1548 ulg out_length = G2.last_lit * 8L;
1549 ulg in_length = (ulg) G1.strstart - G1.block_start;
1552 for (dcode = 0; dcode < D_CODES; dcode++) {
1553 out_length += G2.dyn_dtree[dcode].Freq * (5L + extra_dbits[dcode]);
1557 "\nlast_lit %u, last_dist %u, in %ld, out ~%ld(%ld%%) ",
1558 G2.last_lit, G2.last_dist, in_length, out_length,
1559 100L - out_length * 100L / in_length));
1560 if (G2.last_dist < G2.last_lit / 2 && out_length < in_length / 2)
1563 return (G2.last_lit == LIT_BUFSIZE - 1 || G2.last_dist == DIST_BUFSIZE);
1564 /* We avoid equality with LIT_BUFSIZE because of wraparound at 64K
1565 * on 16 bit machines and because stored blocks are restricted to
1570 /* ===========================================================================
1571 * Send the block data compressed using the given Huffman trees
1573 static void compress_block(ct_data * ltree, ct_data * dtree)
1575 unsigned dist; /* distance of matched string */
1576 int lc; /* match length or unmatched char (if dist == 0) */
1577 unsigned lx = 0; /* running index in l_buf */
1578 unsigned dx = 0; /* running index in d_buf */
1579 unsigned fx = 0; /* running index in flag_buf */
1580 uch flag = 0; /* current flags */
1581 unsigned code; /* the code to send */
1582 int extra; /* number of extra bits to send */
1584 if (G2.last_lit != 0) do {
1586 flag = G2.flag_buf[fx++];
1587 lc = G1.l_buf[lx++];
1588 if ((flag & 1) == 0) {
1589 SEND_CODE(lc, ltree); /* send a literal byte */
1590 Tracecv(lc > ' ', (stderr, " '%c' ", lc));
1592 /* Here, lc is the match length - MIN_MATCH */
1593 code = G2.length_code[lc];
1594 SEND_CODE(code + LITERALS + 1, ltree); /* send the length code */
1595 extra = extra_lbits[code];
1597 lc -= G2.base_length[code];
1598 send_bits(lc, extra); /* send the extra length bits */
1600 dist = G1.d_buf[dx++];
1601 /* Here, dist is the match distance - 1 */
1602 code = D_CODE(dist);
1603 Assert(code < D_CODES, "bad d_code");
1605 SEND_CODE(code, dtree); /* send the distance code */
1606 extra = extra_dbits[code];
1608 dist -= G2.base_dist[code];
1609 send_bits(dist, extra); /* send the extra distance bits */
1611 } /* literal or match pair ? */
1613 } while (lx < G2.last_lit);
1615 SEND_CODE(END_BLOCK, ltree);
1619 /* ===========================================================================
1620 * Determine the best encoding for the current block: dynamic trees, static
1621 * trees or store, and output the encoded block to the zip file. This function
1622 * returns the total compressed length for the file so far.
1624 static ulg flush_block(char *buf, ulg stored_len, int eof)
1626 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
1627 int max_blindex; /* index of last bit length code of non zero freq */
1629 G2.flag_buf[G2.last_flags] = G2.flags; /* Save the flags for the last 8 items */
1631 /* Construct the literal and distance trees */
1632 build_tree(&G2.l_desc);
1633 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", G2.opt_len, G2.static_len));
1635 build_tree(&G2.d_desc);
1636 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", G2.opt_len, G2.static_len));
1637 /* At this point, opt_len and static_len are the total bit lengths of
1638 * the compressed block data, excluding the tree representations.
1641 /* Build the bit length tree for the above two trees, and get the index
1642 * in bl_order of the last bit length code to send.
1644 max_blindex = build_bl_tree();
1646 /* Determine the best encoding. Compute first the block length in bytes */
1647 opt_lenb = (G2.opt_len + 3 + 7) >> 3;
1648 static_lenb = (G2.static_len + 3 + 7) >> 3;
1651 "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u dist %u ",
1652 opt_lenb, G2.opt_len, static_lenb, G2.static_len, stored_len,
1653 G2.last_lit, G2.last_dist));
1655 if (static_lenb <= opt_lenb)
1656 opt_lenb = static_lenb;
1658 /* If compression failed and this is the first and last block,
1659 * and if the zip file can be seeked (to rewrite the local header),
1660 * the whole file is transformed into a stored file:
1662 if (stored_len <= opt_lenb && eof && G2.compressed_len == 0L && seekable()) {
1663 /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */
1665 bb_error_msg("block vanished");
1667 copy_block(buf, (unsigned) stored_len, 0); /* without header */
1668 G2.compressed_len = stored_len << 3;
1670 } else if (stored_len + 4 <= opt_lenb && buf != NULL) {
1671 /* 4: two words for the lengths */
1672 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
1673 * Otherwise we can't have processed more than WSIZE input bytes since
1674 * the last block flush, because compression would have been
1675 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
1676 * transform a block into a stored block.
1678 send_bits((STORED_BLOCK << 1) + eof, 3); /* send block type */
1679 G2.compressed_len = (G2.compressed_len + 3 + 7) & ~7L;
1680 G2.compressed_len += (stored_len + 4) << 3;
1682 copy_block(buf, (unsigned) stored_len, 1); /* with header */
1684 } else if (static_lenb == opt_lenb) {
1685 send_bits((STATIC_TREES << 1) + eof, 3);
1686 compress_block((ct_data *) G2.static_ltree, (ct_data *) G2.static_dtree);
1687 G2.compressed_len += 3 + G2.static_len;
1689 send_bits((DYN_TREES << 1) + eof, 3);
1690 send_all_trees(G2.l_desc.max_code + 1, G2.d_desc.max_code + 1,
1692 compress_block((ct_data *) G2.dyn_ltree, (ct_data *) G2.dyn_dtree);
1693 G2.compressed_len += 3 + G2.opt_len;
1695 Assert(G2.compressed_len == G1.bits_sent, "bad compressed size");
1700 G2.compressed_len += 7; /* align on byte boundary */
1702 Tracev((stderr, "\ncomprlen %lu(%lu) ", G2.compressed_len >> 3,
1703 G2.compressed_len - 7 * eof));
1705 return G2.compressed_len >> 3;
1709 /* ===========================================================================
1710 * Update a hash value with the given input byte
1711 * IN assertion: all calls to UPDATE_HASH are made with consecutive
1712 * input characters, so that a running hash key can be computed from the
1713 * previous key instead of complete recalculation each time.
1715 #define UPDATE_HASH(h, c) (h = (((h)<<H_SHIFT) ^ (c)) & HASH_MASK)
1718 /* ===========================================================================
1719 * Same as above, but achieves better compression. We use a lazy
1720 * evaluation for matches: a match is finally adopted only if there is
1721 * no better match at the next window position.
1723 * Processes a new input file and return its compressed length. Sets
1724 * the compressed length, crc, deflate flags and internal file
1728 /* Flush the current block, with given end-of-file flag.
1729 * IN assertion: strstart is set to the end of the current match. */
1730 #define FLUSH_BLOCK(eof) \
1732 G1.block_start >= 0L \
1733 ? (char*)&G1.window[(unsigned)G1.block_start] \
1735 (ulg)G1.strstart - G1.block_start, \
1739 /* Insert string s in the dictionary and set match_head to the previous head
1740 * of the hash chain (the most recent string with same hash key). Return
1741 * the previous length of the hash chain.
1742 * IN assertion: all calls to INSERT_STRING are made with consecutive
1743 * input characters and the first MIN_MATCH bytes of s are valid
1744 * (except for the last MIN_MATCH-1 bytes of the input file). */
1745 #define INSERT_STRING(s, match_head) \
1747 UPDATE_HASH(G1.ins_h, G1.window[(s) + MIN_MATCH-1]); \
1748 G1.prev[(s) & WMASK] = match_head = head[G1.ins_h]; \
1749 head[G1.ins_h] = (s); \
1752 static ulg deflate(void)
1754 IPos hash_head; /* head of hash chain */
1755 IPos prev_match; /* previous match */
1756 int flush; /* set if current block must be flushed */
1757 int match_available = 0; /* set if previous match exists */
1758 unsigned match_length = MIN_MATCH - 1; /* length of best match */
1760 /* Process the input block. */
1761 while (G1.lookahead != 0) {
1762 /* Insert the string window[strstart .. strstart+2] in the
1763 * dictionary, and set hash_head to the head of the hash chain:
1765 INSERT_STRING(G1.strstart, hash_head);
1767 /* Find the longest match, discarding those <= prev_length.
1769 G1.prev_length = match_length;
1770 prev_match = G1.match_start;
1771 match_length = MIN_MATCH - 1;
1773 if (hash_head != 0 && G1.prev_length < max_lazy_match
1774 && G1.strstart - hash_head <= MAX_DIST
1776 /* To simplify the code, we prevent matches with the string
1777 * of window index 0 (in particular we have to avoid a match
1778 * of the string with itself at the start of the input file).
1780 match_length = longest_match(hash_head);
1781 /* longest_match() sets match_start */
1782 if (match_length > G1.lookahead)
1783 match_length = G1.lookahead;
1785 /* Ignore a length 3 match if it is too distant: */
1786 if (match_length == MIN_MATCH && G1.strstart - G1.match_start > TOO_FAR) {
1787 /* If prev_match is also MIN_MATCH, G1.match_start is garbage
1788 * but we will ignore the current match anyway.
1793 /* If there was a match at the previous step and the current
1794 * match is not better, output the previous match:
1796 if (G1.prev_length >= MIN_MATCH && match_length <= G1.prev_length) {
1797 check_match(G1.strstart - 1, prev_match, G1.prev_length);
1798 flush = ct_tally(G1.strstart - 1 - prev_match, G1.prev_length - MIN_MATCH);
1800 /* Insert in hash table all strings up to the end of the match.
1801 * strstart-1 and strstart are already inserted.
1803 G1.lookahead -= G1.prev_length - 1;
1804 G1.prev_length -= 2;
1807 INSERT_STRING(G1.strstart, hash_head);
1808 /* strstart never exceeds WSIZE-MAX_MATCH, so there are
1809 * always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH
1810 * these bytes are garbage, but it does not matter since the
1811 * next lookahead bytes will always be emitted as literals.
1813 } while (--G1.prev_length != 0);
1814 match_available = 0;
1815 match_length = MIN_MATCH - 1;
1819 G1.block_start = G1.strstart;
1821 } else if (match_available) {
1822 /* If there was no match at the previous position, output a
1823 * single literal. If there was a match but the current match
1824 * is longer, truncate the previous match to a single literal.
1826 Tracevv((stderr, "%c", G1.window[G1.strstart - 1]));
1827 if (ct_tally(0, G1.window[G1.strstart - 1])) {
1829 G1.block_start = G1.strstart;
1834 /* There is no previous match to compare with, wait for
1835 * the next step to decide.
1837 match_available = 1;
1841 Assert(G1.strstart <= G1.isize && lookahead <= G1.isize, "a bit too far");
1843 /* Make sure that we always have enough lookahead, except
1844 * at the end of the input file. We need MAX_MATCH bytes
1845 * for the next match, plus MIN_MATCH bytes to insert the
1846 * string following the next match.
1848 while (G1.lookahead < MIN_LOOKAHEAD && !G1.eofile)
1851 if (match_available)
1852 ct_tally(0, G1.window[G1.strstart - 1]);
1854 return FLUSH_BLOCK(1); /* eof */
1858 /* ===========================================================================
1859 * Initialize the bit string routines.
1861 static void bi_init(void)
1871 /* ===========================================================================
1872 * Initialize the "longest match" routines for a new file
1874 static void lm_init(ush * flagsp)
1878 /* Initialize the hash table. */
1879 memset(head, 0, HASH_SIZE * sizeof(*head));
1880 /* prev will be initialized on the fly */
1882 /* speed options for the general purpose bit flag */
1883 *flagsp |= 2; /* FAST 4, SLOW 2 */
1884 /* ??? reduce max_chain_length for binary files */
1887 G1.block_start = 0L;
1889 G1.lookahead = file_read(G1.window,
1890 sizeof(int) <= 2 ? (unsigned) WSIZE : 2 * WSIZE);
1892 if (G1.lookahead == 0 || G1.lookahead == (unsigned) -1) {
1898 /* Make sure that we always have enough lookahead. This is important
1899 * if input comes from a device such as a tty.
1901 while (G1.lookahead < MIN_LOOKAHEAD && !G1.eofile)
1905 for (j = 0; j < MIN_MATCH - 1; j++)
1906 UPDATE_HASH(G1.ins_h, G1.window[j]);
1907 /* If lookahead < MIN_MATCH, ins_h is garbage, but this is
1908 * not important since only literal bytes will be emitted.
1913 /* ===========================================================================
1914 * Allocate the match buffer, initialize the various tables and save the
1915 * location of the internal file attribute (ascii/binary) and method
1917 * One callsite in zip()
1919 static void ct_init(void)
1921 int n; /* iterates over tree elements */
1922 int length; /* length value */
1923 int code; /* code value */
1924 int dist; /* distance index */
1926 G2.compressed_len = 0L;
1929 if (G2.static_dtree[0].Len != 0)
1930 return; /* ct_init already called */
1933 /* Initialize the mapping length (0..255) -> length code (0..28) */
1935 for (code = 0; code < LENGTH_CODES - 1; code++) {
1936 G2.base_length[code] = length;
1937 for (n = 0; n < (1 << extra_lbits[code]); n++) {
1938 G2.length_code[length++] = code;
1941 Assert(length == 256, "ct_init: length != 256");
1942 /* Note that the length 255 (match length 258) can be represented
1943 * in two different ways: code 284 + 5 bits or code 285, so we
1944 * overwrite length_code[255] to use the best encoding:
1946 G2.length_code[length - 1] = code;
1948 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */
1950 for (code = 0; code < 16; code++) {
1951 G2.base_dist[code] = dist;
1952 for (n = 0; n < (1 << extra_dbits[code]); n++) {
1953 G2.dist_code[dist++] = code;
1956 Assert(dist == 256, "ct_init: dist != 256");
1957 dist >>= 7; /* from now on, all distances are divided by 128 */
1958 for (; code < D_CODES; code++) {
1959 G2.base_dist[code] = dist << 7;
1960 for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) {
1961 G2.dist_code[256 + dist++] = code;
1964 Assert(dist == 256, "ct_init: 256+dist != 512");
1966 /* Construct the codes of the static literal tree */
1967 /* already zeroed - it's in bss
1968 for (n = 0; n <= MAX_BITS; n++)
1969 G2.bl_count[n] = 0; */
1973 G2.static_ltree[n++].Len = 8;
1977 G2.static_ltree[n++].Len = 9;
1981 G2.static_ltree[n++].Len = 7;
1985 G2.static_ltree[n++].Len = 8;
1988 /* Codes 286 and 287 do not exist, but we must include them in the
1989 * tree construction to get a canonical Huffman tree (longest code
1992 gen_codes((ct_data *) G2.static_ltree, L_CODES + 1);
1994 /* The static distance tree is trivial: */
1995 for (n = 0; n < D_CODES; n++) {
1996 G2.static_dtree[n].Len = 5;
1997 G2.static_dtree[n].Code = bi_reverse(n, 5);
2000 /* Initialize the first block of the first file: */
2005 /* ===========================================================================
2006 * Deflate in to out.
2007 * IN assertions: the input and output buffers are cleared.
2010 static void zip(ulg time_stamp)
2012 ush deflate_flags = 0; /* pkzip -es, -en or -ex equivalent */
2016 /* Write the header to the gzip file. See algorithm.doc for the format */
2017 /* magic header for gzip files: 1F 8B */
2018 /* compression method: 8 (DEFLATED) */
2019 /* general flags: 0 */
2020 put_32bit(0x00088b1f);
2021 put_32bit(time_stamp);
2023 /* Write deflated file to zip file */
2028 lm_init(&deflate_flags);
2030 put_8bit(deflate_flags); /* extra flags */
2031 put_8bit(3); /* OS identifier = 3 (Unix) */
2035 /* Write the crc and uncompressed size */
2037 put_32bit(G1.isize);
2043 /* ======================================================================== */
2045 IF_DESKTOP(long long) int FAST_FUNC pack_gzip(transformer_aux_data_t *aux UNUSED_PARAM)
2049 /* Clear input and output buffers */
2057 memset(&G2, 0, sizeof(G2));
2058 G2.l_desc.dyn_tree = G2.dyn_ltree;
2059 G2.l_desc.static_tree = G2.static_ltree;
2060 G2.l_desc.extra_bits = extra_lbits;
2061 G2.l_desc.extra_base = LITERALS + 1;
2062 G2.l_desc.elems = L_CODES;
2063 G2.l_desc.max_length = MAX_BITS;
2064 //G2.l_desc.max_code = 0;
2065 G2.d_desc.dyn_tree = G2.dyn_dtree;
2066 G2.d_desc.static_tree = G2.static_dtree;
2067 G2.d_desc.extra_bits = extra_dbits;
2068 //G2.d_desc.extra_base = 0;
2069 G2.d_desc.elems = D_CODES;
2070 G2.d_desc.max_length = MAX_BITS;
2071 //G2.d_desc.max_code = 0;
2072 G2.bl_desc.dyn_tree = G2.bl_tree;
2073 //G2.bl_desc.static_tree = NULL;
2074 G2.bl_desc.extra_bits = extra_blbits,
2075 //G2.bl_desc.extra_base = 0;
2076 G2.bl_desc.elems = BL_CODES;
2077 G2.bl_desc.max_length = MAX_BL_BITS;
2078 //G2.bl_desc.max_code = 0;
2081 fstat(STDIN_FILENO, &s);
2086 #if ENABLE_FEATURE_GZIP_LONG_OPTIONS
2087 static const char gzip_longopts[] ALIGN1 =
2088 "stdout\0" No_argument "c"
2089 "to-stdout\0" No_argument "c"
2090 "force\0" No_argument "f"
2091 "verbose\0" No_argument "v"
2093 "decompress\0" No_argument "d"
2094 "uncompress\0" No_argument "d"
2095 "test\0" No_argument "t"
2097 "quiet\0" No_argument "q"
2098 "fast\0" No_argument "1"
2099 "best\0" No_argument "9"
2104 * Linux kernel build uses gzip -d -n. We accept and ignore -n.
2107 * gzip: do not save the original file name and time stamp.
2108 * (The original name is always saved if the name had to be truncated.)
2109 * gunzip: do not restore the original file name/time even if present
2110 * (remove only the gzip suffix from the compressed file name).
2111 * This option is the default when decompressing.
2113 * gzip: always save the original file name and time stamp (this is the default)
2114 * gunzip: restore the original file name and time stamp if present.
2117 int gzip_main(int argc, char **argv) MAIN_EXTERNALLY_VISIBLE;
2119 int gzip_main(int argc, char **argv)
2121 int gzip_main(int argc UNUSED_PARAM, char **argv)
2126 #if ENABLE_FEATURE_GZIP_LONG_OPTIONS
2127 applet_long_options = gzip_longopts;
2129 /* Must match bbunzip's constants OPT_STDOUT, OPT_FORCE! */
2130 opt = getopt32(argv, "cfv" IF_GUNZIP("dt") "q123456789n");
2131 #if ENABLE_GUNZIP /* gunzip_main may not be visible... */
2132 if (opt & 0x18) // -d and/or -t
2133 return gunzip_main(argc, argv);
2135 option_mask32 &= 0x7; /* ignore -q, -0..9 */
2136 //if (opt & 0x1) // -c
2137 //if (opt & 0x2) // -f
2138 //if (opt & 0x4) // -v
2141 SET_PTR_TO_GLOBALS((char *)xzalloc(sizeof(struct globals)+sizeof(struct globals2))
2142 + sizeof(struct globals));
2144 /* Allocate all global buffers (for DYN_ALLOC option) */
2145 ALLOC(uch, G1.l_buf, INBUFSIZ);
2146 ALLOC(uch, G1.outbuf, OUTBUFSIZ);
2147 ALLOC(ush, G1.d_buf, DIST_BUFSIZE);
2148 ALLOC(uch, G1.window, 2L * WSIZE);
2149 ALLOC(ush, G1.prev, 1L << BITS);
2151 /* Initialize the CRC32 table */
2152 global_crc32_table = crc32_filltable(NULL, 0);
2154 return bbunpack(argv, pack_gzip, append_ext, "gz");