1/* vi: set sw=4 ts=4: */ 2/* 3 * Gzip implementation for busybox 4 * 5 * Based on GNU gzip Copyright (C) 1992-1993 Jean-loup Gailly. 6 * 7 * Originally adjusted for busybox by Charles P. Wright <cpw@unix.asb.com> 8 * "this is a stripped down version of gzip I put into busybox, it does 9 * only standard in to standard out with -9 compression. It also requires 10 * the zcat module for some important functions." 11 * 12 * Adjusted further by Erik Andersen <andersen@codepoet.org> to support 13 * files as well as stdin/stdout, and to generally behave itself wrt 14 * command line handling. 15 * 16 * Licensed under GPLv2 or later, see file LICENSE in this source tree. 17 */ 18/* TODO: full support for -v for DESKTOP 19 * "/usr/bin/gzip -v a bogus aa" should say: 20a: 85.1% -- replaced with a.gz 21gzip: bogus: No such file or directory 22aa: 85.1% -- replaced with aa.gz 23*/ 24//config:config GZIP 25//config: bool "gzip (17 kb)" 26//config: default y 27//config: help 28//config: gzip is used to compress files. 29//config: It's probably the most widely used UNIX compression program. 30//config: 31//config:config FEATURE_GZIP_LONG_OPTIONS 32//config: bool "Enable long options" 33//config: default y 34//config: depends on GZIP && LONG_OPTS 35//config: 36//config:config GZIP_FAST 37//config: int "Trade memory for speed (0:small,slow - 2:fast,big)" 38//config: default 0 39//config: range 0 2 40//config: depends on GZIP 41//config: help 42//config: Enable big memory options for gzip. 43//config: 0: small buffers, small hash-tables 44//config: 1: larger buffers, larger hash-tables 45//config: 2: larger buffers, largest hash-tables 46//config: Larger models may give slightly better compression 47//config: 48//config:config FEATURE_GZIP_LEVELS 49//config: bool "Enable compression levels" 50//config: default n 51//config: depends on GZIP 52//config: help 53//config: Enable support for compression levels 4-9. The default level 54//config: is 6. If levels 1-3 are specified, 4 is used. 55//config: If this option is not selected, -N options are ignored and -9 56//config: is used. 57//config: 58//config:config FEATURE_GZIP_DECOMPRESS 59//config: bool "Enable decompression" 60//config: default y 61//config: depends on GZIP || GUNZIP || ZCAT 62//config: help 63//config: Enable -d (--decompress) and -t (--test) options for gzip. 64//config: This will be automatically selected if gunzip or zcat is 65//config: enabled. 66 67//applet:IF_GZIP(APPLET(gzip, BB_DIR_BIN, BB_SUID_DROP)) 68 69//kbuild:lib-$(CONFIG_GZIP) += gzip.o 70 71//usage:#define gzip_trivial_usage 72//usage: "[-cfk" IF_FEATURE_GZIP_DECOMPRESS("dt") IF_FEATURE_GZIP_LEVELS("123456789") "] [FILE]..." 73//usage:#define gzip_full_usage "\n\n" 74//usage: "Compress FILEs (or stdin)\n" 75//usage: IF_FEATURE_GZIP_LEVELS( 76//usage: "\n -1..9 Compression level" 77//usage: ) 78//usage: IF_FEATURE_GZIP_DECOMPRESS( 79//usage: "\n -d Decompress" 80//usage: "\n -t Test file integrity" 81//usage: ) 82//usage: "\n -c Write to stdout" 83//usage: "\n -f Force" 84//usage: "\n -k Keep input files" 85//usage: 86//usage:#define gzip_example_usage 87//usage: "$ ls -la /tmp/busybox*\n" 88//usage: "-rw-rw-r-- 1 andersen andersen 1761280 Apr 14 17:47 /tmp/busybox.tar\n" 89//usage: "$ gzip /tmp/busybox.tar\n" 90//usage: "$ ls -la /tmp/busybox*\n" 91//usage: "-rw-rw-r-- 1 andersen andersen 554058 Apr 14 17:49 /tmp/busybox.tar.gz\n" 92 93#include "libbb.h" 94#include "bb_archive.h" 95 96/* =========================================================================== 97 */ 98//#define DEBUG 1 99/* Diagnostic functions */ 100#ifdef DEBUG 101static int verbose; 102# define Assert(cond,msg) { if (!(cond)) bb_error_msg(msg); } 103# define Trace(x) fprintf x 104# define Tracev(x) {if (verbose) fprintf x; } 105# define Tracevv(x) {if (verbose > 1) fprintf x; } 106# define Tracec(c,x) {if (verbose && (c)) fprintf x; } 107# define Tracecv(c,x) {if (verbose > 1 && (c)) fprintf x; } 108#else 109# define Assert(cond,msg) 110# define Trace(x) 111# define Tracev(x) 112# define Tracevv(x) 113# define Tracec(c,x) 114# define Tracecv(c,x) 115#endif 116 117/* =========================================================================== 118 */ 119#if CONFIG_GZIP_FAST == 0 120# define SMALL_MEM 121#elif CONFIG_GZIP_FAST == 1 122# define MEDIUM_MEM 123#elif CONFIG_GZIP_FAST == 2 124# define BIG_MEM 125#else 126# error "Invalid CONFIG_GZIP_FAST value" 127#endif 128 129#ifndef INBUFSIZ 130# ifdef SMALL_MEM 131# define INBUFSIZ 0x2000 /* input buffer size */ 132# else 133# define INBUFSIZ 0x8000 /* input buffer size */ 134# endif 135#endif 136 137#ifndef OUTBUFSIZ 138# ifdef SMALL_MEM 139# define OUTBUFSIZ 8192 /* output buffer size */ 140# else 141# define OUTBUFSIZ 16384 /* output buffer size */ 142# endif 143#endif 144 145#ifndef DIST_BUFSIZE 146# ifdef SMALL_MEM 147# define DIST_BUFSIZE 0x2000 /* buffer for distances, see trees.c */ 148# else 149# define DIST_BUFSIZE 0x8000 /* buffer for distances, see trees.c */ 150# endif 151#endif 152 153/* gzip flag byte */ 154#define ASCII_FLAG 0x01 /* bit 0 set: file probably ascii text */ 155#define CONTINUATION 0x02 /* bit 1 set: continuation of multi-part gzip file */ 156#define EXTRA_FIELD 0x04 /* bit 2 set: extra field present */ 157#define ORIG_NAME 0x08 /* bit 3 set: original file name present */ 158#define COMMENT 0x10 /* bit 4 set: file comment present */ 159#define RESERVED 0xC0 /* bit 6,7: reserved */ 160 161/* internal file attribute */ 162#define UNKNOWN 0xffff 163#define BINARY 0 164#define ASCII 1 165 166#ifndef WSIZE 167# define WSIZE 0x8000 /* window size--must be a power of two, and */ 168#endif /* at least 32K for zip's deflate method */ 169 170#define MIN_MATCH 3 171#define MAX_MATCH 258 172/* The minimum and maximum match lengths */ 173 174#define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) 175/* Minimum amount of lookahead, except at the end of the input file. 176 * See deflate.c for comments about the MIN_MATCH+1. 177 */ 178 179#define MAX_DIST (WSIZE-MIN_LOOKAHEAD) 180/* In order to simplify the code, particularly on 16 bit machines, match 181 * distances are limited to MAX_DIST instead of WSIZE. 182 */ 183 184#ifndef MAX_PATH_LEN 185# define MAX_PATH_LEN 1024 /* max pathname length */ 186#endif 187 188#define seekable() 0 /* force sequential output */ 189#define translate_eol 0 /* no option -a yet */ 190 191#ifndef BITS 192# define BITS 16 193#endif 194#define INIT_BITS 9 /* Initial number of bits per code */ 195 196#define BIT_MASK 0x1f /* Mask for 'number of compression bits' */ 197/* Mask 0x20 is reserved to mean a fourth header byte, and 0x40 is free. 198 * It's a pity that old uncompress does not check bit 0x20. That makes 199 * extension of the format actually undesirable because old compress 200 * would just crash on the new format instead of giving a meaningful 201 * error message. It does check the number of bits, but it's more 202 * helpful to say "unsupported format, get a new version" than 203 * "can only handle 16 bits". 204 */ 205 206#ifdef MAX_EXT_CHARS 207# define MAX_SUFFIX MAX_EXT_CHARS 208#else 209# define MAX_SUFFIX 30 210#endif 211 212/* =========================================================================== 213 * Compile with MEDIUM_MEM to reduce the memory requirements or 214 * with SMALL_MEM to use as little memory as possible. Use BIG_MEM if the 215 * entire input file can be held in memory (not possible on 16 bit systems). 216 * Warning: defining these symbols affects HASH_BITS (see below) and thus 217 * affects the compression ratio. The compressed output 218 * is still correct, and might even be smaller in some cases. 219 */ 220#ifdef SMALL_MEM 221# define HASH_BITS 13 /* Number of bits used to hash strings */ 222#endif 223#ifdef MEDIUM_MEM 224# define HASH_BITS 14 225#endif 226#ifndef HASH_BITS 227# define HASH_BITS 15 228 /* For portability to 16 bit machines, do not use values above 15. */ 229#endif 230 231#define HASH_SIZE (unsigned)(1<<HASH_BITS) 232#define HASH_MASK (HASH_SIZE-1) 233#define WMASK (WSIZE-1) 234/* HASH_SIZE and WSIZE must be powers of two */ 235#ifndef TOO_FAR 236# define TOO_FAR 4096 237#endif 238/* Matches of length 3 are discarded if their distance exceeds TOO_FAR */ 239 240/* =========================================================================== 241 * These types are not really 'char', 'short' and 'long' 242 */ 243typedef uint8_t uch; 244typedef uint16_t ush; 245typedef uint32_t ulg; 246typedef int32_t lng; 247 248typedef ush Pos; 249typedef unsigned IPos; 250/* A Pos is an index in the character window. We use short instead of int to 251 * save space in the various tables. IPos is used only for parameter passing. 252 */ 253 254enum { 255 WINDOW_SIZE = 2 * WSIZE, 256/* window size, 2*WSIZE except for MMAP or BIG_MEM, where it is the 257 * input file length plus MIN_LOOKAHEAD. 258 */ 259 260#if !ENABLE_FEATURE_GZIP_LEVELS 261 262 max_chain_length = 4096, 263/* To speed up deflation, hash chains are never searched beyond this length. 264 * A higher limit improves compression ratio but degrades the speed. 265 */ 266 267 max_lazy_match = 258, 268/* Attempt to find a better match only when the current match is strictly 269 * smaller than this value. This mechanism is used only for compression 270 * levels >= 4. 271 */ 272 273 max_insert_length = max_lazy_match, 274/* Insert new strings in the hash table only if the match length 275 * is not greater than this length. This saves time but degrades compression. 276 * max_insert_length is used only for compression levels <= 3. 277 */ 278 279 good_match = 32, 280/* Use a faster search when the previous match is longer than this */ 281 282/* Values for max_lazy_match, good_match and max_chain_length, depending on 283 * the desired pack level (0..9). The values given below have been tuned to 284 * exclude worst case performance for pathological files. Better values may be 285 * found for specific files. 286 */ 287 288 nice_match = 258, /* Stop searching when current match exceeds this */ 289/* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4 290 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different 291 * meaning. 292 */ 293#endif /* ENABLE_FEATURE_GZIP_LEVELS */ 294}; 295 296struct globals { 297/* =========================================================================== */ 298/* global buffers, allocated once */ 299 300#define DECLARE(type, array, size) \ 301 type * array 302#define ALLOC(type, array, size) \ 303 array = xzalloc((size_t)(((size)+1L)/2) * 2*sizeof(type)) 304#define FREE(array) \ 305 do { free(array); array = NULL; } while (0) 306 307 /* buffer for literals or lengths */ 308 /* DECLARE(uch, l_buf, LIT_BUFSIZE); */ 309 DECLARE(uch, l_buf, INBUFSIZ); 310 311 DECLARE(ush, d_buf, DIST_BUFSIZE); 312 DECLARE(uch, outbuf, OUTBUFSIZ); 313 314/* Sliding window. Input bytes are read into the second half of the window, 315 * and move to the first half later to keep a dictionary of at least WSIZE 316 * bytes. With this organization, matches are limited to a distance of 317 * WSIZE-MAX_MATCH bytes, but this ensures that IO is always 318 * performed with a length multiple of the block size. Also, it limits 319 * the window size to 64K, which is quite useful on MSDOS. 320 * To do: limit the window size to WSIZE+BSZ if SMALL_MEM (the code would 321 * be less efficient). 322 */ 323 DECLARE(uch, window, 2L * WSIZE); 324 325/* Link to older string with same hash index. To limit the size of this 326 * array to 64K, this link is maintained only for the last 32K strings. 327 * An index in this array is thus a window index modulo 32K. 328 */ 329 /* DECLARE(Pos, prev, WSIZE); */ 330 DECLARE(ush, prev, 1L << BITS); 331 332/* Heads of the hash chains or 0. */ 333 /* DECLARE(Pos, head, 1<<HASH_BITS); */ 334#define head (G1.prev + WSIZE) /* hash head (see deflate.c) */ 335 336#if ENABLE_FEATURE_GZIP_LEVELS 337 unsigned max_chain_length; 338 unsigned max_lazy_match; 339 unsigned good_match; 340 unsigned nice_match; 341#define max_chain_length (G1.max_chain_length) 342#define max_lazy_match (G1.max_lazy_match) 343#define good_match (G1.good_match) 344#define nice_match (G1.nice_match) 345#endif 346 347/* =========================================================================== */ 348/* all members below are zeroed out in pack_gzip() for each next file */ 349 350 uint32_t crc; /* shift register contents */ 351 /*uint32_t *crc_32_tab;*/ 352 353/* window position at the beginning of the current output block. Gets 354 * negative when the window is moved backwards. 355 */ 356 lng block_start; 357 358 unsigned ins_h; /* hash index of string to be inserted */ 359 360/* Number of bits by which ins_h and del_h must be shifted at each 361 * input step. It must be such that after MIN_MATCH steps, the oldest 362 * byte no longer takes part in the hash key, that is: 363 * H_SHIFT * MIN_MATCH >= HASH_BITS 364 */ 365#define H_SHIFT ((HASH_BITS+MIN_MATCH-1) / MIN_MATCH) 366 367/* Length of the best match at previous step. Matches not greater than this 368 * are discarded. This is used in the lazy match evaluation. 369 */ 370 unsigned prev_length; 371 372 unsigned strstart; /* start of string to insert */ 373 unsigned match_start; /* start of matching string */ 374 unsigned lookahead; /* number of valid bytes ahead in window */ 375 376/* number of input bytes */ 377 ulg isize; /* only 32 bits stored in .gz file */ 378 379/* bbox always use stdin/stdout */ 380#define ifd STDIN_FILENO /* input file descriptor */ 381#define ofd STDOUT_FILENO /* output file descriptor */ 382 383#ifdef DEBUG 384 unsigned insize; /* valid bytes in l_buf */ 385#endif 386 unsigned outcnt; /* bytes in output buffer */ 387 smallint eofile; /* flag set at end of input file */ 388 389/* =========================================================================== 390 * Local data used by the "bit string" routines. 391 */ 392 393/* Output buffer. bits are inserted starting at the bottom (least significant 394 * bits). 395 */ 396 unsigned bi_buf; /* was unsigned short */ 397 398#undef BUF_SIZE 399#define BUF_SIZE (int)(8 * sizeof(G1.bi_buf)) 400 401/* Number of bits used within bi_buf. (bi_buf might be implemented on 402 * more than 16 bits on some systems.) 403 */ 404 unsigned bi_valid; 405 406#ifdef DEBUG 407 ulg bits_sent; /* bit length of the compressed data */ 408# define DEBUG_bits_sent(v) (void)(G1.bits_sent v) 409#else 410# define DEBUG_bits_sent(v) ((void)0) 411#endif 412}; 413 414#define G1 (*(ptr_to_globals - 1)) 415 416/* =========================================================================== 417 * Write the output buffer outbuf[0..outcnt-1] and update bytes_out. 418 * (used for the compressed data only) 419 */ 420static void flush_outbuf(void) 421{ 422 if (G1.outcnt == 0) 423 return; 424 425 xwrite(ofd, (char *) G1.outbuf, G1.outcnt); 426 G1.outcnt = 0; 427} 428 429/* =========================================================================== 430 */ 431/* put_8bit is used for the compressed output */ 432#define put_8bit(c) \ 433do { \ 434 G1.outbuf[G1.outcnt++] = (c); \ 435 if (G1.outcnt == OUTBUFSIZ) \ 436 flush_outbuf(); \ 437} while (0) 438 439/* Output a 16 bit value, lsb first */ 440static void put_16bit(ush w) 441{ 442 /* GCC 4.2.1 won't optimize out redundant loads of G1.outcnt 443 * (probably because of fear of aliasing with G1.outbuf[] 444 * stores), do it explicitly: 445 */ 446 unsigned outcnt = G1.outcnt; 447 uch *dst = &G1.outbuf[outcnt]; 448 449#if BB_UNALIGNED_MEMACCESS_OK && BB_LITTLE_ENDIAN 450 if (outcnt < OUTBUFSIZ-2) { 451 /* Common case */ 452 ush *dst16 = (void*) dst; 453 *dst16 = w; /* unaligned LSB 16-bit store */ 454 G1.outcnt = outcnt + 2; 455 return; 456 } 457 *dst = (uch)w; 458 w >>= 8; 459 G1.outcnt = ++outcnt; 460#else 461 *dst = (uch)w; 462 w >>= 8; 463 if (outcnt < OUTBUFSIZ-2) { 464 /* Common case */ 465 dst[1] = w; 466 G1.outcnt = outcnt + 2; 467 return; 468 } 469 G1.outcnt = ++outcnt; 470#endif 471 472 /* Slowpath: we will need to do flush_outbuf() */ 473 if (outcnt == OUTBUFSIZ) 474 flush_outbuf(); /* here */ 475 put_8bit(w); /* or here */ 476} 477 478#define OPTIMIZED_PUT_32BIT (CONFIG_GZIP_FAST > 0 && BB_UNALIGNED_MEMACCESS_OK && BB_LITTLE_ENDIAN) 479static void put_32bit(ulg n) 480{ 481 if (OPTIMIZED_PUT_32BIT) { 482 unsigned outcnt = G1.outcnt; 483 if (outcnt < OUTBUFSIZ-4) { 484 /* Common case */ 485 ulg *dst32 = (void*) &G1.outbuf[outcnt]; 486 *dst32 = n; /* unaligned LSB 32-bit store */ 487 //bb_error_msg("%p", dst32); // store alignment debugging 488 G1.outcnt = outcnt + 4; 489 return; 490 } 491 } 492 put_16bit(n); 493 put_16bit(n >> 16); 494} 495static ALWAYS_INLINE void flush_outbuf_if_32bit_optimized(void) 496{ 497 /* If put_32bit() performs 32bit stores && it is used in send_bits() */ 498 if (OPTIMIZED_PUT_32BIT && BUF_SIZE > 16) 499 flush_outbuf(); 500} 501 502/* =========================================================================== 503 * Run a set of bytes through the crc shift register. If s is a NULL 504 * pointer, then initialize the crc shift register contents instead. 505 * Return the current crc in either case. 506 */ 507static void updcrc(uch * s, unsigned n) 508{ 509 G1.crc = crc32_block_endian0(G1.crc, s, n, global_crc32_table /*G1.crc_32_tab*/); 510} 511 512/* =========================================================================== 513 * Read a new buffer from the current input file, perform end-of-line 514 * translation, and update the crc and input file size. 515 * IN assertion: size >= 2 (for end-of-line translation) 516 */ 517static unsigned file_read(void *buf, unsigned size) 518{ 519 unsigned len; 520 521 Assert(G1.insize == 0, "l_buf not empty"); 522 523 len = safe_read(ifd, buf, size); 524 if (len == (unsigned)(-1) || len == 0) 525 return len; 526 527 updcrc(buf, len); 528 G1.isize += len; 529 return len; 530} 531 532/* =========================================================================== 533 * Send a value on a given number of bits. 534 * IN assertion: length <= 16 and value fits in length bits. 535 */ 536static void send_bits(unsigned value, unsigned length) 537{ 538 unsigned new_buf; 539 540#ifdef DEBUG 541 Tracev((stderr, " l %2d v %4x ", length, value)); 542 Assert(length > 0 && length <= 15, "invalid length"); 543 DEBUG_bits_sent(+= length); 544#endif 545 BUILD_BUG_ON(BUF_SIZE != 32 && BUF_SIZE != 16); 546 547 new_buf = G1.bi_buf | (value << G1.bi_valid); 548 /* NB: the above may sometimes do "<< 32" shift (undefined) 549 * if check below is changed to "length > BUF_SIZE" instead of >= */ 550 length += G1.bi_valid; 551 552 /* If bi_buf is full */ 553 if (length >= BUF_SIZE) { 554 /* ...use (valid) bits from bi_buf and 555 * (BUF_SIZE - bi_valid) bits from value, 556 * leaving (width - (BUF_SIZE-bi_valid)) unused bits in value. 557 */ 558 value >>= (BUF_SIZE - G1.bi_valid); 559 if (BUF_SIZE == 32) { 560 put_32bit(new_buf); 561 } else { /* 16 */ 562 put_16bit(new_buf); 563 } 564 new_buf = value; 565 length -= BUF_SIZE; 566 } 567 G1.bi_buf = new_buf; 568 G1.bi_valid = length; 569} 570 571/* =========================================================================== 572 * Reverse the first len bits of a code, using straightforward code (a faster 573 * method would use a table) 574 * IN assertion: 1 <= len <= 15 575 */ 576static unsigned bi_reverse(unsigned code, int len) 577{ 578 unsigned res = 0; 579 580 while (1) { 581 res |= code & 1; 582 if (--len <= 0) return res; 583 code >>= 1; 584 res <<= 1; 585 } 586} 587 588/* =========================================================================== 589 * Write out any remaining bits in an incomplete byte. 590 */ 591static void bi_windup(void) 592{ 593 unsigned bits = G1.bi_buf; 594 int cnt = G1.bi_valid; 595 596 while (cnt > 0) { 597 put_8bit(bits); 598 bits >>= 8; 599 cnt -= 8; 600 } 601 G1.bi_buf = 0; 602 G1.bi_valid = 0; 603 DEBUG_bits_sent(= (G1.bits_sent + 7) & ~7); 604} 605 606/* =========================================================================== 607 * Copy a stored block to the zip file, storing first the length and its 608 * one's complement if requested. 609 */ 610static void copy_block(char *buf, unsigned len, int header) 611{ 612 bi_windup(); /* align on byte boundary */ 613 614 if (header) { 615 unsigned v = ((uint16_t)len) | ((~len) << 16); 616 put_32bit(v); 617 DEBUG_bits_sent(+= 2 * 16); 618 } 619 DEBUG_bits_sent(+= (ulg) len << 3); 620 while (len--) { 621 put_8bit(*buf++); 622 } 623 /* The above can 32-bit misalign outbuf */ 624 if (G1.outcnt & 3) /* syscalls are expensive, is it really misaligned? */ 625 flush_outbuf_if_32bit_optimized(); 626} 627 628/* =========================================================================== 629 * Fill the window when the lookahead becomes insufficient. 630 * Updates strstart and lookahead, and sets eofile if end of input file. 631 * IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0 632 * OUT assertions: at least one byte has been read, or eofile is set; 633 * file reads are performed for at least two bytes (required for the 634 * translate_eol option). 635 */ 636static void fill_window(void) 637{ 638 unsigned n, m; 639 unsigned more = WINDOW_SIZE - G1.lookahead - G1.strstart; 640 /* Amount of free space at the end of the window. */ 641 642 /* If the window is almost full and there is insufficient lookahead, 643 * move the upper half to the lower one to make room in the upper half. 644 */ 645 if (more == (unsigned) -1) { 646 /* Very unlikely, but possible on 16 bit machine if strstart == 0 647 * and lookahead == 1 (input done one byte at time) 648 */ 649 more--; 650 } else if (G1.strstart >= WSIZE + MAX_DIST) { 651 /* By the IN assertion, the window is not empty so we can't confuse 652 * more == 0 with more == 64K on a 16 bit machine. 653 */ 654 Assert(WINDOW_SIZE == 2 * WSIZE, "no sliding with BIG_MEM"); 655 656 memcpy(G1.window, G1.window + WSIZE, WSIZE); 657 G1.match_start -= WSIZE; 658 G1.strstart -= WSIZE; /* we now have strstart >= MAX_DIST: */ 659 660 G1.block_start -= WSIZE; 661 662 for (n = 0; n < HASH_SIZE; n++) { 663 m = head[n]; 664 head[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0); 665 } 666 for (n = 0; n < WSIZE; n++) { 667 m = G1.prev[n]; 668 G1.prev[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0); 669 /* If n is not on any hash chain, prev[n] is garbage but 670 * its value will never be used. 671 */ 672 } 673 more += WSIZE; 674 } 675 /* At this point, more >= 2 */ 676 if (!G1.eofile) { 677 n = file_read(G1.window + G1.strstart + G1.lookahead, more); 678 if (n == 0 || n == (unsigned) -1) { 679 G1.eofile = 1; 680 } else { 681 G1.lookahead += n; 682 } 683 } 684} 685/* Both users fill window with the same loop: */ 686static void fill_window_if_needed(void) 687{ 688 while (G1.lookahead < MIN_LOOKAHEAD && !G1.eofile) 689 fill_window(); 690} 691 692/* =========================================================================== 693 * Set match_start to the longest match starting at the given string and 694 * return its length. Matches shorter or equal to prev_length are discarded, 695 * in which case the result is equal to prev_length and match_start is 696 * garbage. 697 * IN assertions: cur_match is the head of the hash chain for the current 698 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 699 */ 700 701/* For MSDOS, OS/2 and 386 Unix, an optimized version is in match.asm or 702 * match.s. The code is functionally equivalent, so you can use the C version 703 * if desired. 704 */ 705static int longest_match(IPos cur_match) 706{ 707 unsigned chain_length = max_chain_length; /* max hash chain length */ 708 uch *scan = G1.window + G1.strstart; /* current string */ 709 uch *match; /* matched string */ 710 int len; /* length of current match */ 711 int best_len = G1.prev_length; /* best match length so far */ 712 IPos limit = G1.strstart > (IPos) MAX_DIST ? G1.strstart - (IPos) MAX_DIST : 0; 713 /* Stop when cur_match becomes <= limit. To simplify the code, 714 * we prevent matches with the string of window index 0. 715 */ 716 717/* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. 718 * It is easy to get rid of this optimization if necessary. 719 */ 720#if HASH_BITS < 8 || MAX_MATCH != 258 721# error Code too clever 722#endif 723 uch *strend = G1.window + G1.strstart + MAX_MATCH; 724 uch scan_end1 = scan[best_len - 1]; 725 uch scan_end = scan[best_len]; 726 727 /* Do not waste too much time if we already have a good match: */ 728 if (G1.prev_length >= good_match) { 729 chain_length >>= 2; 730 } 731 Assert(G1.strstart <= WINDOW_SIZE - MIN_LOOKAHEAD, "insufficient lookahead"); 732 733 do { 734 Assert(cur_match < G1.strstart, "no future"); 735 match = G1.window + cur_match; 736 737 /* Skip to next match if the match length cannot increase 738 * or if the match length is less than 2: 739 */ 740 if (match[best_len] != scan_end 741 || match[best_len - 1] != scan_end1 742 || *match != *scan || *++match != scan[1] 743 ) { 744 continue; 745 } 746 747 /* The check at best_len-1 can be removed because it will be made 748 * again later. (This heuristic is not always a win.) 749 * It is not necessary to compare scan[2] and match[2] since they 750 * are always equal when the other bytes match, given that 751 * the hash keys are equal and that HASH_BITS >= 8. 752 */ 753 scan += 2, match++; 754 755 /* We check for insufficient lookahead only every 8th comparison; 756 * the 256th check will be made at strstart+258. 757 */ 758 do { 759 } while (*++scan == *++match && *++scan == *++match && 760 *++scan == *++match && *++scan == *++match && 761 *++scan == *++match && *++scan == *++match && 762 *++scan == *++match && *++scan == *++match && scan < strend); 763 764 len = MAX_MATCH - (int) (strend - scan); 765 scan = strend - MAX_MATCH; 766 767 if (len > best_len) { 768 G1.match_start = cur_match; 769 best_len = len; 770 if (len >= nice_match) 771 break; 772 scan_end1 = scan[best_len - 1]; 773 scan_end = scan[best_len]; 774 } 775 } while ((cur_match = G1.prev[cur_match & WMASK]) > limit 776 && --chain_length != 0); 777 778 return best_len; 779} 780 781#ifdef DEBUG 782/* =========================================================================== 783 * Check that the match at match_start is indeed a match. 784 */ 785static void check_match(IPos start, IPos match, int length) 786{ 787 /* check that the match is indeed a match */ 788 if (memcmp(G1.window + match, G1.window + start, length) != 0) { 789 bb_error_msg(" start %d, match %d, length %d", start, match, length); 790 bb_error_msg("invalid match"); 791 } 792 if (verbose > 1) { 793 bb_error_msg("\\[%d,%d]", start - match, length); 794 do { 795 bb_putchar_stderr(G1.window[start++]); 796 } while (--length != 0); 797 } 798} 799#else 800# define check_match(start, match, length) ((void)0) 801#endif 802 803 804/* trees.c -- output deflated data using Huffman coding 805 * Copyright (C) 1992-1993 Jean-loup Gailly 806 * This is free software; you can redistribute it and/or modify it under the 807 * terms of the GNU General Public License, see the file COPYING. 808 */ 809 810/* PURPOSE 811 * Encode various sets of source values using variable-length 812 * binary code trees. 813 * 814 * DISCUSSION 815 * The PKZIP "deflation" process uses several Huffman trees. The more 816 * common source values are represented by shorter bit sequences. 817 * 818 * Each code tree is stored in the ZIP file in a compressed form 819 * which is itself a Huffman encoding of the lengths of 820 * all the code strings (in ascending order by source values). 821 * The actual code strings are reconstructed from the lengths in 822 * the UNZIP process, as described in the "application note" 823 * (APPNOTE.TXT) distributed as part of PKWARE's PKZIP program. 824 * 825 * REFERENCES 826 * Lynch, Thomas J. 827 * Data Compression: Techniques and Applications, pp. 53-55. 828 * Lifetime Learning Publications, 1985. ISBN 0-534-03418-7. 829 * 830 * Storer, James A. 831 * Data Compression: Methods and Theory, pp. 49-50. 832 * Computer Science Press, 1988. ISBN 0-7167-8156-5. 833 * 834 * Sedgewick, R. 835 * Algorithms, p290. 836 * Addison-Wesley, 1983. ISBN 0-201-06672-6. 837 * 838 * INTERFACE 839 * void ct_init() 840 * Allocate the match buffer, initialize the various tables [and save 841 * the location of the internal file attribute (ascii/binary) and 842 * method (DEFLATE/STORE) -- deleted in bbox] 843 * 844 * void ct_tally(int dist, int lc); 845 * Save the match info and tally the frequency counts. 846 * 847 * ulg flush_block(char *buf, ulg stored_len, int eof) 848 * Determine the best encoding for the current block: dynamic trees, 849 * static trees or store, and output the encoded block to the zip 850 * file. Returns the total compressed length for the file so far. 851 */ 852 853#define MAX_BITS 15 854/* All codes must not exceed MAX_BITS bits */ 855 856#define MAX_BL_BITS 7 857/* Bit length codes must not exceed MAX_BL_BITS bits */ 858 859#define LENGTH_CODES 29 860/* number of length codes, not counting the special END_BLOCK code */ 861 862#define LITERALS 256 863/* number of literal bytes 0..255 */ 864 865#define END_BLOCK 256 866/* end of block literal code */ 867 868#define L_CODES (LITERALS+1+LENGTH_CODES) 869/* number of Literal or Length codes, including the END_BLOCK code */ 870 871#define D_CODES 30 872/* number of distance codes */ 873 874#define BL_CODES 19 875/* number of codes used to transfer the bit lengths */ 876 877/* extra bits for each length code */ 878static const uint8_t extra_lbits[LENGTH_CODES] ALIGN1 = { 879 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 880 4, 4, 5, 5, 5, 5, 0 881}; 882 883/* extra bits for each distance code */ 884static const uint8_t extra_dbits[D_CODES] ALIGN1 = { 885 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 886 10, 10, 11, 11, 12, 12, 13, 13 887}; 888 889/* extra bits for each bit length code */ 890static const uint8_t extra_blbits[BL_CODES] ALIGN1 = { 891 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7 }; 892 893/* number of codes at each bit length for an optimal tree */ 894static const uint8_t bl_order[BL_CODES] ALIGN1 = { 895 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 }; 896 897#define STORED_BLOCK 0 898#define STATIC_TREES 1 899#define DYN_TREES 2 900/* The three kinds of block type */ 901 902#ifndef LIT_BUFSIZE 903# ifdef SMALL_MEM 904# define LIT_BUFSIZE 0x2000 905# else 906# ifdef MEDIUM_MEM 907# define LIT_BUFSIZE 0x4000 908# else 909# define LIT_BUFSIZE 0x8000 910# endif 911# endif 912#endif 913#ifndef DIST_BUFSIZE 914# define DIST_BUFSIZE LIT_BUFSIZE 915#endif 916/* Sizes of match buffers for literals/lengths and distances. There are 917 * 4 reasons for limiting LIT_BUFSIZE to 64K: 918 * - frequencies can be kept in 16 bit counters 919 * - if compression is not successful for the first block, all input data is 920 * still in the window so we can still emit a stored block even when input 921 * comes from standard input. (This can also be done for all blocks if 922 * LIT_BUFSIZE is not greater than 32K.) 923 * - if compression is not successful for a file smaller than 64K, we can 924 * even emit a stored file instead of a stored block (saving 5 bytes). 925 * - creating new Huffman trees less frequently may not provide fast 926 * adaptation to changes in the input data statistics. (Take for 927 * example a binary file with poorly compressible code followed by 928 * a highly compressible string table.) Smaller buffer sizes give 929 * fast adaptation but have of course the overhead of transmitting trees 930 * more frequently. 931 * - I can't count above 4 932 * The current code is general and allows DIST_BUFSIZE < LIT_BUFSIZE (to save 933 * memory at the expense of compression). Some optimizations would be possible 934 * if we rely on DIST_BUFSIZE == LIT_BUFSIZE. 935 */ 936#define REP_3_6 16 937/* repeat previous bit length 3-6 times (2 bits of repeat count) */ 938#define REPZ_3_10 17 939/* repeat a zero length 3-10 times (3 bits of repeat count) */ 940#define REPZ_11_138 18 941/* repeat a zero length 11-138 times (7 bits of repeat count) */ 942 943/* =========================================================================== 944*/ 945/* Data structure describing a single value and its code string. */ 946typedef struct ct_data { 947 union { 948 ush freq; /* frequency count */ 949 ush code; /* bit string */ 950 } fc; 951 union { 952 ush dad; /* father node in Huffman tree */ 953 ush len; /* length of bit string */ 954 } dl; 955} ct_data; 956 957#define Freq fc.freq 958#define Code fc.code 959#define Dad dl.dad 960#define Len dl.len 961 962#define HEAP_SIZE (2*L_CODES + 1) 963/* maximum heap size */ 964 965typedef struct tree_desc { 966 ct_data *dyn_tree; /* the dynamic tree */ 967 ct_data *static_tree; /* corresponding static tree or NULL */ 968 const uint8_t *extra_bits; /* extra bits for each code or NULL */ 969 int extra_base; /* base index for extra_bits */ 970 int elems; /* max number of elements in the tree */ 971 int max_length; /* max bit length for the codes */ 972 int max_code; /* largest code with non zero frequency */ 973} tree_desc; 974 975struct globals2 { 976 977 ush heap[HEAP_SIZE]; /* heap used to build the Huffman trees */ 978 int heap_len; /* number of elements in the heap */ 979 int heap_max; /* element of largest frequency */ 980 981/* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. 982 * The same heap array is used to build all trees. 983 */ 984 985 ct_data dyn_ltree[HEAP_SIZE]; /* literal and length tree */ 986 ct_data dyn_dtree[2 * D_CODES + 1]; /* distance tree */ 987 988 ct_data static_ltree[L_CODES + 2]; 989 990/* The static literal tree. Since the bit lengths are imposed, there is no 991 * need for the L_CODES extra codes used during heap construction. However 992 * The codes 286 and 287 are needed to build a canonical tree (see ct_init 993 * below). 994 */ 995 996 ct_data static_dtree[D_CODES]; 997 998/* The static distance tree. (Actually a trivial tree since all codes use 999 * 5 bits.) 1000 */
1001 1002 ct_data bl_tree[2 * BL_CODES + 1]; 1003 1004/* Huffman tree for the bit lengths */ 1005 1006 tree_desc l_desc; 1007 tree_desc d_desc; 1008 tree_desc bl_desc; 1009 1010 ush bl_count[MAX_BITS + 1]; 1011 1012/* The lengths of the bit length codes are sent in order of decreasing 1013 * probability, to avoid transmitting the lengths for unused bit length codes. 1014 */ 1015 1016 uch depth[2 * L_CODES + 1]; 1017 1018/* Depth of each subtree used as tie breaker for trees of equal frequency */ 1019 1020 uch length_code[MAX_MATCH - MIN_MATCH + 1]; 1021 1022/* length code for each normalized match length (0 == MIN_MATCH) */ 1023 1024 uch dist_code[512]; 1025 1026/* distance codes. The first 256 values correspond to the distances 1027 * 3 .. 258, the last 256 values correspond to the top 8 bits of 1028 * the 15 bit distances. 1029 */ 1030 1031 int base_length[LENGTH_CODES]; 1032 1033/* First normalized length for each code (0 = MIN_MATCH) */ 1034 1035 int base_dist[D_CODES]; 1036 1037/* First normalized distance for each code (0 = distance of 1) */ 1038 1039 uch flag_buf[LIT_BUFSIZE / 8]; 1040 1041/* flag_buf is a bit array distinguishing literals from lengths in 1042 * l_buf, thus indicating the presence or absence of a distance. 1043 */ 1044 1045 unsigned last_lit; /* running index in l_buf */ 1046 unsigned last_dist; /* running index in d_buf */ 1047 unsigned last_flags; /* running index in flag_buf */ 1048 uch flags; /* current flags not yet saved in flag_buf */ 1049 uch flag_bit; /* current bit used in flags */ 1050 1051/* bits are filled in flags starting at bit 0 (least significant). 1052 * Note: these flags are overkill in the current code since we don't 1053 * take advantage of DIST_BUFSIZE == LIT_BUFSIZE. 1054 */ 1055 1056 ulg opt_len; /* bit length of current block with optimal trees */ 1057 ulg static_len; /* bit length of current block with static trees */ 1058 1059// ulg compressed_len; /* total bit length of compressed file */ 1060}; 1061 1062#define G2ptr ((struct globals2*)(ptr_to_globals)) 1063#define G2 (*G2ptr) 1064 1065/* =========================================================================== 1066 */ 1067#ifndef DEBUG 1068/* Send a code of the given tree. c and tree must not have side effects */ 1069# define SEND_CODE(c, tree) send_bits(tree[c].Code, tree[c].Len) 1070#else 1071# define SEND_CODE(c, tree) \ 1072{ \ 1073 if (verbose > 1) bb_error_msg("\ncd %3d ", (c)); \ 1074 send_bits(tree[c].Code, tree[c].Len); \ 1075} 1076#endif 1077 1078#define D_CODE(dist) \ 1079 ((dist) < 256 ? G2.dist_code[dist] : G2.dist_code[256 + ((dist)>>7)]) 1080/* Mapping from a distance to a distance code. dist is the distance - 1 and 1081 * must not have side effects. dist_code[256] and dist_code[257] are never 1082 * used. 1083 * The arguments must not have side effects. 1084 */ 1085 1086/* =========================================================================== 1087 * Initialize a new block. 1088 */ 1089static void init_block(void) 1090{ 1091 int n; /* iterates over tree elements */ 1092 1093 /* Initialize the trees. */ 1094 for (n = 0; n < L_CODES; n++) 1095 G2.dyn_ltree[n].Freq = 0; 1096 for (n = 0; n < D_CODES; n++) 1097 G2.dyn_dtree[n].Freq = 0; 1098 for (n = 0; n < BL_CODES; n++) 1099 G2.bl_tree[n].Freq = 0; 1100 1101 G2.dyn_ltree[END_BLOCK].Freq = 1; 1102 G2.opt_len = G2.static_len = 0; 1103 G2.last_lit = G2.last_dist = G2.last_flags = 0; 1104 G2.flags = 0; 1105 G2.flag_bit = 1; 1106} 1107 1108/* =========================================================================== 1109 * Restore the heap property by moving down the tree starting at node k, 1110 * exchanging a node with the smallest of its two sons if necessary, stopping 1111 * when the heap property is re-established (each father smaller than its 1112 * two sons). 1113 */ 1114 1115/* Compares to subtrees, using the tree depth as tie breaker when 1116 * the subtrees have equal frequency. This minimizes the worst case length. */ 1117#define SMALLER(tree, n, m) \ 1118 (tree[n].Freq < tree[m].Freq \ 1119 || (tree[n].Freq == tree[m].Freq && G2.depth[n] <= G2.depth[m])) 1120 1121static void pqdownheap(ct_data * tree, int k) 1122{ 1123 int v = G2.heap[k]; 1124 int j = k << 1; /* left son of k */ 1125 1126 while (j <= G2.heap_len) { 1127 /* Set j to the smallest of the two sons: */ 1128 if (j < G2.heap_len && SMALLER(tree, G2.heap[j + 1], G2.heap[j])) 1129 j++; 1130 1131 /* Exit if v is smaller than both sons */ 1132 if (SMALLER(tree, v, G2.heap[j])) 1133 break; 1134 1135 /* Exchange v with the smallest son */ 1136 G2.heap[k] = G2.heap[j]; 1137 k = j; 1138 1139 /* And continue down the tree, setting j to the left son of k */ 1140 j <<= 1; 1141 } 1142 G2.heap[k] = v; 1143} 1144 1145/* =========================================================================== 1146 * Compute the optimal bit lengths for a tree and update the total bit length 1147 * for the current block. 1148 * IN assertion: the fields freq and dad are set, heap[heap_max] and 1149 * above are the tree nodes sorted by increasing frequency. 1150 * OUT assertions: the field len is set to the optimal bit length, the 1151 * array bl_count contains the frequencies for each bit length. 1152 * The length opt_len is updated; static_len is also updated if stree is 1153 * not null. 1154 */ 1155static void gen_bitlen(tree_desc * desc) 1156{ 1157 ct_data *tree = desc->dyn_tree; 1158 const uint8_t *extra = desc->extra_bits; 1159 int base = desc->extra_base; 1160 int max_code = desc->max_code; 1161 int max_length = desc->max_length; 1162 ct_data *stree = desc->static_tree; 1163 int h; /* heap index */ 1164 int n, m; /* iterate over the tree elements */ 1165 int bits; /* bit length */ 1166 int xbits; /* extra bits */ 1167 ush f; /* frequency */ 1168 int overflow = 0; /* number of elements with bit length too large */ 1169 1170 for (bits = 0; bits <= MAX_BITS; bits++) 1171 G2.bl_count[bits] = 0; 1172 1173 /* In a first pass, compute the optimal bit lengths (which may 1174 * overflow in the case of the bit length tree). 1175 */ 1176 tree[G2.heap[G2.heap_max]].Len = 0; /* root of the heap */ 1177 1178 for (h = G2.heap_max + 1; h < HEAP_SIZE; h++) { 1179 n = G2.heap[h]; 1180 bits = tree[tree[n].Dad].Len + 1; 1181 if (bits > max_length) { 1182 bits = max_length; 1183 overflow++; 1184 } 1185 tree[n].Len = (ush) bits; 1186 /* We overwrite tree[n].Dad which is no longer needed */ 1187 1188 if (n > max_code) 1189 continue; /* not a leaf node */ 1190 1191 G2.bl_count[bits]++; 1192 xbits = 0; 1193 if (n >= base) 1194 xbits = extra[n - base]; 1195 f = tree[n].Freq; 1196 G2.opt_len += (ulg) f *(bits + xbits); 1197 1198 if (stree) 1199 G2.static_len += (ulg) f * (stree[n].Len + xbits); 1200 } 1201 if (overflow == 0) 1202 return; 1203 1204 Trace((stderr, "\nbit length overflow\n")); 1205 /* This happens for example on obj2 and pic of the Calgary corpus */ 1206 1207 /* Find the first bit length which could increase: */ 1208 do { 1209 bits = max_length - 1; 1210 while (G2.bl_count[bits] == 0) 1211 bits--; 1212 G2.bl_count[bits]--; /* move one leaf down the tree */ 1213 G2.bl_count[bits + 1] += 2; /* move one overflow item as its brother */ 1214 G2.bl_count[max_length]--; 1215 /* The brother of the overflow item also moves one step up, 1216 * but this does not affect bl_count[max_length] 1217 */ 1218 overflow -= 2; 1219 } while (overflow > 0); 1220 1221 /* Now recompute all bit lengths, scanning in increasing frequency. 1222 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all 1223 * lengths instead of fixing only the wrong ones. This idea is taken 1224 * from 'ar' written by Haruhiko Okumura.) 1225 */ 1226 for (bits = max_length; bits != 0; bits--) { 1227 n = G2.bl_count[bits]; 1228 while (n != 0) { 1229 m = G2.heap[--h]; 1230 if (m > max_code) 1231 continue; 1232 if (tree[m].Len != (unsigned) bits) { 1233 Trace((stderr, "code %d bits %d->%d\n", m, tree[m].Len, bits)); 1234 G2.opt_len += ((int32_t) bits - tree[m].Len) * tree[m].Freq; 1235 tree[m].Len = bits; 1236 } 1237 n--; 1238 } 1239 } 1240} 1241 1242/* =========================================================================== 1243 * Generate the codes for a given tree and bit counts (which need not be 1244 * optimal). 1245 * IN assertion: the array bl_count contains the bit length statistics for 1246 * the given tree and the field len is set for all tree elements. 1247 * OUT assertion: the field code is set for all tree elements of non 1248 * zero code length. 1249 */ 1250static void gen_codes(ct_data * tree, int max_code) 1251{ 1252 ush next_code[MAX_BITS + 1]; /* next code value for each bit length */ 1253 ush code = 0; /* running code value */ 1254 int bits; /* bit index */ 1255 int n; /* code index */ 1256 1257 /* The distribution counts are first used to generate the code values 1258 * without bit reversal. 1259 */ 1260 for (bits = 1; bits <= MAX_BITS; bits++) { 1261 next_code[bits] = code = (code + G2.bl_count[bits - 1]) << 1; 1262 } 1263 /* Check that the bit counts in bl_count are consistent. The last code 1264 * must be all ones. 1265 */ 1266 Assert(code + G2.bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1, 1267 "inconsistent bit counts"); 1268 Tracev((stderr, "\ngen_codes: max_code %d ", max_code)); 1269 1270 for (n = 0; n <= max_code; n++) { 1271 int len = tree[n].Len; 1272 1273 if (len == 0) 1274 continue; 1275 /* Now reverse the bits */ 1276 tree[n].Code = bi_reverse(next_code[len]++, len); 1277 1278 Tracec(tree != G2.static_ltree, 1279 (stderr, "\nn %3d %c l %2d c %4x (%x) ", n, 1280 (n > ' ' ? n : ' '), len, tree[n].Code, 1281 next_code[len] - 1)); 1282 } 1283} 1284 1285/* =========================================================================== 1286 * Construct one Huffman tree and assigns the code bit strings and lengths. 1287 * Update the total bit length for the current block. 1288 * IN assertion: the field freq is set for all tree elements. 1289 * OUT assertions: the fields len and code are set to the optimal bit length 1290 * and corresponding code. The length opt_len is updated; static_len is 1291 * also updated if stree is not null. The field max_code is set. 1292 */ 1293 1294/* Remove the smallest element from the heap and recreate the heap with 1295 * one less element. Updates heap and heap_len. */ 1296 1297#define SMALLEST 1 1298/* Index within the heap array of least frequent node in the Huffman tree */ 1299 1300#define PQREMOVE(tree, top) \ 1301do { \ 1302 top = G2.heap[SMALLEST]; \ 1303 G2.heap[SMALLEST] = G2.heap[G2.heap_len--]; \ 1304 pqdownheap(tree, SMALLEST); \ 1305} while (0) 1306 1307static void build_tree(tree_desc * desc) 1308{ 1309 ct_data *tree = desc->dyn_tree; 1310 ct_data *stree = desc->static_tree; 1311 int elems = desc->elems; 1312 int n, m; /* iterate over heap elements */ 1313 int max_code = -1; /* largest code with non zero frequency */ 1314 int node = elems; /* next internal node of the tree */ 1315 1316 /* Construct the initial heap, with least frequent element in 1317 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. 1318 * heap[0] is not used. 1319 */ 1320 G2.heap_len = 0; 1321 G2.heap_max = HEAP_SIZE; 1322 1323 for (n = 0; n < elems; n++) { 1324 if (tree[n].Freq != 0) { 1325 G2.heap[++G2.heap_len] = max_code = n; 1326 G2.depth[n] = 0; 1327 } else { 1328 tree[n].Len = 0; 1329 } 1330 } 1331 1332 /* The pkzip format requires that at least one distance code exists, 1333 * and that at least one bit should be sent even if there is only one 1334 * possible code. So to avoid special checks later on we force at least 1335 * two codes of non zero frequency. 1336 */ 1337 while (G2.heap_len < 2) { 1338 int new = G2.heap[++G2.heap_len] = (max_code < 2 ? ++max_code : 0); 1339 1340 tree[new].Freq = 1; 1341 G2.depth[new] = 0; 1342 G2.opt_len--; 1343 if (stree) 1344 G2.static_len -= stree[new].Len; 1345 /* new is 0 or 1 so it does not have extra bits */ 1346 } 1347 desc->max_code = max_code; 1348 1349 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, 1350 * establish sub-heaps of increasing lengths: 1351 */ 1352 for (n = G2.heap_len / 2; n >= 1; n--) 1353 pqdownheap(tree, n); 1354 1355 /* Construct the Huffman tree by repeatedly combining the least two 1356 * frequent nodes. 1357 */ 1358 do { 1359 PQREMOVE(tree, n); /* n = node of least frequency */ 1360 m = G2.heap[SMALLEST]; /* m = node of next least frequency */ 1361 1362 G2.heap[--G2.heap_max] = n; /* keep the nodes sorted by frequency */ 1363 G2.heap[--G2.heap_max] = m; 1364 1365 /* Create a new node father of n and m */ 1366 tree[node].Freq = tree[n].Freq + tree[m].Freq; 1367 G2.depth[node] = MAX(G2.depth[n], G2.depth[m]) + 1; 1368 tree[n].Dad = tree[m].Dad = (ush) node; 1369#ifdef DUMP_BL_TREE 1370 if (tree == G2.bl_tree) { 1371 bb_error_msg("\nnode %d(%d), sons %d(%d) %d(%d)", 1372 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); 1373 } 1374#endif 1375 /* and insert the new node in the heap */ 1376 G2.heap[SMALLEST] = node++; 1377 pqdownheap(tree, SMALLEST); 1378 } while (G2.heap_len >= 2); 1379 1380 G2.heap[--G2.heap_max] = G2.heap[SMALLEST]; 1381 1382 /* At this point, the fields freq and dad are set. We can now 1383 * generate the bit lengths. 1384 */ 1385 gen_bitlen((tree_desc *) desc); 1386 1387 /* The field len is now set, we can generate the bit codes */ 1388 gen_codes((ct_data *) tree, max_code); 1389} 1390 1391/* =========================================================================== 1392 * Scan a literal or distance tree to determine the frequencies of the codes 1393 * in the bit length tree. Updates opt_len to take into account the repeat 1394 * counts. (The contribution of the bit length codes will be added later 1395 * during the construction of bl_tree.) 1396 */ 1397static void scan_tree(ct_data * tree, int max_code) 1398{ 1399 int n; /* iterates over all tree elements */ 1400 int prevlen = -1; /* last emitted length */ 1401 int curlen; /* length of current code */ 1402 int nextlen = tree[0].Len; /* length of next code */ 1403 int count = 0; /* repeat count of the current code */ 1404 int max_count = 7; /* max repeat count */ 1405 int min_count = 4; /* min repeat count */ 1406 1407 if (nextlen == 0) { 1408 max_count = 138; 1409 min_count = 3; 1410 } 1411 tree[max_code + 1].Len = 0xffff; /* guard */ 1412 1413 for (n = 0; n <= max_code; n++) { 1414 curlen = nextlen; 1415 nextlen = tree[n + 1].Len; 1416 if (++count < max_count && curlen == nextlen) 1417 continue; 1418 1419 if (count < min_count) { 1420 G2.bl_tree[curlen].Freq += count; 1421 } else if (curlen != 0) { 1422 if (curlen != prevlen) 1423 G2.bl_tree[curlen].Freq++; 1424 G2.bl_tree[REP_3_6].Freq++; 1425 } else if (count <= 10) { 1426 G2.bl_tree[REPZ_3_10].Freq++; 1427 } else { 1428 G2.bl_tree[REPZ_11_138].Freq++; 1429 } 1430 count = 0; 1431 prevlen = curlen; 1432 1433 max_count = 7; 1434 min_count = 4; 1435 if (nextlen == 0) { 1436 max_count = 138; 1437 min_count = 3; 1438 } else if (curlen == nextlen) { 1439 max_count = 6; 1440 min_count = 3; 1441 } 1442 } 1443} 1444 1445/* =========================================================================== 1446 * Send a literal or distance tree in compressed form, using the codes in 1447 * bl_tree. 1448 */ 1449static void send_tree(ct_data * tree, int max_code) 1450{ 1451 int n; /* iterates over all tree elements */ 1452 int prevlen = -1; /* last emitted length */ 1453 int curlen; /* length of current code */ 1454 int nextlen = tree[0].Len; /* length of next code */ 1455 int count = 0; /* repeat count of the current code */ 1456 int max_count = 7; /* max repeat count */ 1457 int min_count = 4; /* min repeat count */ 1458 1459/* tree[max_code+1].Len = -1; *//* guard already set */ 1460 if (nextlen == 0) 1461 max_count = 138, min_count = 3; 1462 1463 for (n = 0; n <= max_code; n++) { 1464 curlen = nextlen; 1465 nextlen = tree[n + 1].Len; 1466 if (++count < max_count && curlen == nextlen) { 1467 continue; 1468 } else if (count < min_count) { 1469 do { 1470 SEND_CODE(curlen, G2.bl_tree); 1471 } while (--count); 1472 } else if (curlen != 0) { 1473 if (curlen != prevlen) { 1474 SEND_CODE(curlen, G2.bl_tree); 1475 count--; 1476 } 1477 Assert(count >= 3 && count <= 6, " 3_6?"); 1478 SEND_CODE(REP_3_6, G2.bl_tree); 1479 send_bits(count - 3, 2); 1480 } else if (count <= 10) { 1481 SEND_CODE(REPZ_3_10, G2.bl_tree); 1482 send_bits(count - 3, 3); 1483 } else { 1484 SEND_CODE(REPZ_11_138, G2.bl_tree); 1485 send_bits(count - 11, 7); 1486 } 1487 count = 0; 1488 prevlen = curlen; 1489 if (nextlen == 0) { 1490 max_count = 138; 1491 min_count = 3; 1492 } else if (curlen == nextlen) { 1493 max_count = 6; 1494 min_count = 3; 1495 } else { 1496 max_count = 7; 1497 min_count = 4; 1498 } 1499 } 1500} 1501 1502/* =========================================================================== 1503 * Construct the Huffman tree for the bit lengths and return the index in 1504 * bl_order of the last bit length code to send. 1505 */ 1506static int build_bl_tree(void) 1507{ 1508 int max_blindex; /* index of last bit length code of non zero freq */ 1509 1510 /* Determine the bit length frequencies for literal and distance trees */ 1511 scan_tree(G2.dyn_ltree, G2.l_desc.max_code); 1512 scan_tree(G2.dyn_dtree, G2.d_desc.max_code); 1513 1514 /* Build the bit length tree: */ 1515 build_tree(&G2.bl_desc); 1516 /* opt_len now includes the length of the tree representations, except 1517 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. 1518 */ 1519 1520 /* Determine the number of bit length codes to send. The pkzip format 1521 * requires that at least 4 bit length codes be sent. (appnote.txt says 1522 * 3 but the actual value used is 4.) 1523 */ 1524 for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { 1525 if (G2.bl_tree[bl_order[max_blindex]].Len != 0) 1526 break; 1527 } 1528 /* Update opt_len to include the bit length tree and counts */ 1529 G2.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; 1530 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", (long)G2.opt_len, (long)G2.static_len)); 1531 1532 return max_blindex; 1533} 1534 1535/* =========================================================================== 1536 * Send the header for a block using dynamic Huffman trees: the counts, the 1537 * lengths of the bit length codes, the literal tree and the distance tree. 1538 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. 1539 */ 1540static void send_all_trees(int lcodes, int dcodes, int blcodes) 1541{ 1542 int rank; /* index in bl_order */ 1543 1544 Assert(lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); 1545 Assert(lcodes <= L_CODES && dcodes <= D_CODES 1546 && blcodes <= BL_CODES, "too many codes"); 1547 Tracev((stderr, "\nbl counts: ")); 1548 send_bits(lcodes - 257, 5); /* not +255 as stated in appnote.txt */ 1549 send_bits(dcodes - 1, 5); 1550 send_bits(blcodes - 4, 4); /* not -3 as stated in appnote.txt */ 1551 for (rank = 0; rank < blcodes; rank++) { 1552 Tracev((stderr, "\nbl code %2d ", bl_order[rank])); 1553 send_bits(G2.bl_tree[bl_order[rank]].Len, 3); 1554 } 1555 Tracev((stderr, "\nbl tree: sent %ld", (long)G1.bits_sent)); 1556 1557 send_tree((ct_data *) G2.dyn_ltree, lcodes - 1); /* send the literal tree */ 1558 Tracev((stderr, "\nlit tree: sent %ld", (long)G1.bits_sent)); 1559 1560 send_tree((ct_data *) G2.dyn_dtree, dcodes - 1); /* send the distance tree */ 1561 Tracev((stderr, "\ndist tree: sent %ld", (long)G1.bits_sent)); 1562} 1563 1564/* =========================================================================== 1565 * Save the match info and tally the frequency counts. Return true if 1566 * the current block must be flushed. 1567 */ 1568static int ct_tally(int dist, int lc) 1569{ 1570 G1.l_buf[G2.last_lit++] = lc; 1571 if (dist == 0) { 1572 /* lc is the unmatched char */ 1573 G2.dyn_ltree[lc].Freq++; 1574 } else { 1575 /* Here, lc is the match length - MIN_MATCH */ 1576 dist--; /* dist = match distance - 1 */ 1577 Assert((ush) dist < (ush) MAX_DIST 1578 && (ush) lc <= (ush) (MAX_MATCH - MIN_MATCH) 1579 && (ush) D_CODE(dist) < (ush) D_CODES, "ct_tally: bad match" 1580 ); 1581 1582 G2.dyn_ltree[G2.length_code[lc] + LITERALS + 1].Freq++; 1583 G2.dyn_dtree[D_CODE(dist)].Freq++; 1584 1585 G1.d_buf[G2.last_dist++] = dist; 1586 G2.flags |= G2.flag_bit; 1587 } 1588 G2.flag_bit <<= 1; 1589 1590 /* Output the flags if they fill a byte: */ 1591 if ((G2.last_lit & 7) == 0) { 1592 G2.flag_buf[G2.last_flags++] = G2.flags; 1593 G2.flags = 0; 1594 G2.flag_bit = 1; 1595 } 1596 /* Try to guess if it is profitable to stop the current block here */ 1597 if ((G2.last_lit & 0xfff) == 0) { 1598 /* Compute an upper bound for the compressed length */ 1599 ulg out_length = G2.last_lit * 8L; 1600 ulg in_length = (ulg) G1.strstart - G1.block_start; 1601 int dcode; 1602 1603 for (dcode = 0; dcode < D_CODES; dcode++) { 1604 out_length += G2.dyn_dtree[dcode].Freq * (5L + extra_dbits[dcode]); 1605 } 1606 out_length >>= 3; 1607 Trace((stderr, 1608 "\nlast_lit %u, last_dist %u, in %ld, out ~%ld(%ld%%) ", 1609 G2.last_lit, G2.last_dist, 1610 (long)in_length, (long)out_length, 1611 100L - out_length * 100L / in_length)); 1612 if (G2.last_dist < G2.last_lit / 2 && out_length < in_length / 2) 1613 return 1; 1614 } 1615 return (G2.last_lit == LIT_BUFSIZE - 1 || G2.last_dist == DIST_BUFSIZE); 1616 /* We avoid equality with LIT_BUFSIZE because of wraparound at 64K 1617 * on 16 bit machines and because stored blocks are restricted to 1618 * 64K-1 bytes. 1619 */ 1620} 1621 1622/* =========================================================================== 1623 * Send the block data compressed using the given Huffman trees 1624 */ 1625static void compress_block(ct_data * ltree, ct_data * dtree) 1626{ 1627 unsigned dist; /* distance of matched string */ 1628 int lc; /* match length or unmatched char (if dist == 0) */ 1629 unsigned lx = 0; /* running index in l_buf */ 1630 unsigned dx = 0; /* running index in d_buf */ 1631 unsigned fx = 0; /* running index in flag_buf */ 1632 uch flag = 0; /* current flags */ 1633 unsigned code; /* the code to send */ 1634 int extra; /* number of extra bits to send */ 1635 1636 if (G2.last_lit != 0) do { 1637 if ((lx & 7) == 0) 1638 flag = G2.flag_buf[fx++]; 1639 lc = G1.l_buf[lx++]; 1640 if ((flag & 1) == 0) { 1641 SEND_CODE(lc, ltree); /* send a literal byte */ 1642 Tracecv(lc > ' ', (stderr, " '%c' ", lc)); 1643 } else { 1644 /* Here, lc is the match length - MIN_MATCH */ 1645 code = G2.length_code[lc]; 1646 SEND_CODE(code + LITERALS + 1, ltree); /* send the length code */ 1647 extra = extra_lbits[code]; 1648 if (extra != 0) { 1649 lc -= G2.base_length[code]; 1650 send_bits(lc, extra); /* send the extra length bits */ 1651 } 1652 dist = G1.d_buf[dx++]; 1653 /* Here, dist is the match distance - 1 */ 1654 code = D_CODE(dist); 1655 Assert(code < D_CODES, "bad d_code"); 1656 1657 SEND_CODE(code, dtree); /* send the distance code */ 1658 extra = extra_dbits[code]; 1659 if (extra != 0) { 1660 dist -= G2.base_dist[code]; 1661 send_bits(dist, extra); /* send the extra distance bits */ 1662 } 1663 } /* literal or match pair ? */ 1664 flag >>= 1; 1665 } while (lx < G2.last_lit); 1666 1667 SEND_CODE(END_BLOCK, ltree); 1668} 1669 1670/* =========================================================================== 1671 * Determine the best encoding for the current block: dynamic trees, static 1672 * trees or store, and output the encoded block to the zip file. This function 1673 * returns the total compressed length for the file so far. 1674 */ 1675static void flush_block(char *buf, ulg stored_len, int eof) 1676{ 1677 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ 1678 int max_blindex; /* index of last bit length code of non zero freq */ 1679 1680 G2.flag_buf[G2.last_flags] = G2.flags; /* Save the flags for the last 8 items */ 1681 1682 /* Construct the literal and distance trees */ 1683 build_tree(&G2.l_desc); 1684 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", (long)G2.opt_len, (long)G2.static_len)); 1685 1686 build_tree(&G2.d_desc); 1687 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", (long)G2.opt_len, (long)G2.static_len)); 1688 /* At this point, opt_len and static_len are the total bit lengths of 1689 * the compressed block data, excluding the tree representations. 1690 */ 1691 1692 /* Build the bit length tree for the above two trees, and get the index 1693 * in bl_order of the last bit length code to send. 1694 */ 1695 max_blindex = build_bl_tree(); 1696 1697 /* Determine the best encoding. Compute first the block length in bytes */ 1698 opt_lenb = (G2.opt_len + 3 + 7) >> 3; 1699 static_lenb = (G2.static_len + 3 + 7) >> 3; 1700 1701 Trace((stderr, 1702 "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u dist %u ", 1703 (unsigned long)opt_lenb, (unsigned long)G2.opt_len, 1704 (unsigned long)static_lenb, (unsigned long)G2.static_len, 1705 (unsigned long)stored_len, 1706 G2.last_lit, G2.last_dist)); 1707 1708 if (static_lenb <= opt_lenb) 1709 opt_lenb = static_lenb; 1710 1711 /* If compression failed and this is the first and last block, 1712 * and if the zip file can be seeked (to rewrite the local header), 1713 * the whole file is transformed into a stored file: 1714 */ 1715// seekable() is constant FALSE in busybox, and G2.compressed_len is disabled 1716// (this was the only user) 1717// if (stored_len <= opt_lenb && eof && G2.compressed_len == 0L && seekable()) { 1718// /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */ 1719// if (buf == NULL) 1720// bb_error_msg("block vanished"); 1721// 1722// G2.compressed_len = stored_len << 3; 1723// copy_block(buf, (unsigned) stored_len, 0); /* without header */ 1724// } else 1725 if (stored_len + 4 <= opt_lenb && buf != NULL) { 1726 /* 4: two words for the lengths */ 1727 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. 1728 * Otherwise we can't have processed more than WSIZE input bytes since 1729 * the last block flush, because compression would have been 1730 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to 1731 * transform a block into a stored block. 1732 */ 1733 send_bits((STORED_BLOCK << 1) + eof, 3); /* send block type */ 1734// G2.compressed_len = ((G2.compressed_len + 3 + 7) & ~7L) 1735// + ((stored_len + 4) << 3); 1736 copy_block(buf, (unsigned) stored_len, 1); /* with header */ 1737 } else 1738 if (static_lenb == opt_lenb) { 1739 send_bits((STATIC_TREES << 1) + eof, 3); 1740 compress_block((ct_data *) G2.static_ltree, (ct_data *) G2.static_dtree); 1741// G2.compressed_len += 3 + G2.static_len; 1742 } else { 1743 send_bits((DYN_TREES << 1) + eof, 3); 1744 send_all_trees(G2.l_desc.max_code + 1, G2.d_desc.max_code + 1, 1745 max_blindex + 1); 1746 compress_block((ct_data *) G2.dyn_ltree, (ct_data *) G2.dyn_dtree); 1747// G2.compressed_len += 3 + G2.opt_len; 1748 } 1749// Assert(G2.compressed_len == G1.bits_sent, "bad compressed size"); 1750 init_block(); 1751 1752 if (eof) { 1753 bi_windup(); 1754// G2.compressed_len += 7; /* align on byte boundary */ 1755 } 1756// Tracev((stderr, "\ncomprlen %lu(%lu) ", 1757// (unsigned long)G2.compressed_len >> 3, 1758// (unsigned long)G2.compressed_len - 7 * eof)); 1759 1760 return; /* was "return G2.compressed_len >> 3;" */ 1761} 1762 1763/* =========================================================================== 1764 * Update a hash value with the given input byte 1765 * IN assertion: all calls to UPDATE_HASH are made with consecutive 1766 * input characters, so that a running hash key can be computed from the 1767 * previous key instead of complete recalculation each time. 1768 */ 1769#define UPDATE_HASH(h, c) (h = (((h)<<H_SHIFT) ^ (c)) & HASH_MASK) 1770 1771/* =========================================================================== 1772 * Same as above, but achieves better compression. We use a lazy 1773 * evaluation for matches: a match is finally adopted only if there is 1774 * no better match at the next window position. 1775 * 1776 * Processes a new input file and return its compressed length. Sets 1777 * the compressed length, crc, deflate flags and internal file 1778 * attributes. 1779 */ 1780 1781/* Flush the current block, with given end-of-file flag. 1782 * IN assertion: strstart is set to the end of the current match. */ 1783#define FLUSH_BLOCK(eof) \ 1784 flush_block( \ 1785 G1.block_start >= 0L \ 1786 ? (char*)&G1.window[(unsigned)G1.block_start] \ 1787 : (char*)NULL, \ 1788 (ulg)G1.strstart - G1.block_start, \ 1789 (eof) \ 1790 ) 1791 1792/* Insert string s in the dictionary and set match_head to the previous head 1793 * of the hash chain (the most recent string with same hash key). Return 1794 * the previous length of the hash chain. 1795 * IN assertion: all calls to INSERT_STRING are made with consecutive 1796 * input characters and the first MIN_MATCH bytes of s are valid 1797 * (except for the last MIN_MATCH-1 bytes of the input file). */ 1798#define INSERT_STRING(s, match_head) \ 1799do { \ 1800 UPDATE_HASH(G1.ins_h, G1.window[(s) + MIN_MATCH-1]); \ 1801 G1.prev[(s) & WMASK] = match_head = head[G1.ins_h]; \ 1802 head[G1.ins_h] = (s); \ 1803} while (0) 1804 1805static NOINLINE void deflate(void) 1806{ 1807 IPos hash_head; /* head of hash chain */ 1808 IPos prev_match; /* previous match */ 1809 int flush; /* set if current block must be flushed */ 1810 int match_available = 0; /* set if previous match exists */ 1811 unsigned match_length = MIN_MATCH - 1; /* length of best match */ 1812 1813 /* Process the input block. */ 1814 while (G1.lookahead != 0) { 1815 /* Insert the string window[strstart .. strstart+2] in the 1816 * dictionary, and set hash_head to the head of the hash chain: 1817 */ 1818 INSERT_STRING(G1.strstart, hash_head); 1819 1820 /* Find the longest match, discarding those <= prev_length. 1821 */ 1822 G1.prev_length = match_length; 1823 prev_match = G1.match_start; 1824 match_length = MIN_MATCH - 1; 1825 1826 if (hash_head != 0 && G1.prev_length < max_lazy_match 1827 && G1.strstart - hash_head <= MAX_DIST 1828 ) { 1829 /* To simplify the code, we prevent matches with the string 1830 * of window index 0 (in particular we have to avoid a match 1831 * of the string with itself at the start of the input file). 1832 */ 1833 match_length = longest_match(hash_head); 1834 /* longest_match() sets match_start */ 1835 if (match_length > G1.lookahead) 1836 match_length = G1.lookahead; 1837 1838 /* Ignore a length 3 match if it is too distant: */ 1839 if (match_length == MIN_MATCH && G1.strstart - G1.match_start > TOO_FAR) { 1840 /* If prev_match is also MIN_MATCH, G1.match_start is garbage 1841 * but we will ignore the current match anyway. 1842 */ 1843 match_length--; 1844 } 1845 } 1846 /* If there was a match at the previous step and the current 1847 * match is not better, output the previous match: 1848 */ 1849 if (G1.prev_length >= MIN_MATCH && match_length <= G1.prev_length) { 1850 check_match(G1.strstart - 1, prev_match, G1.prev_length); 1851 flush = ct_tally(G1.strstart - 1 - prev_match, G1.prev_length - MIN_MATCH); 1852 1853 /* Insert in hash table all strings up to the end of the match. 1854 * strstart-1 and strstart are already inserted. 1855 */ 1856 G1.lookahead -= G1.prev_length - 1; 1857 G1.prev_length -= 2; 1858 do { 1859 G1.strstart++; 1860 INSERT_STRING(G1.strstart, hash_head); 1861 /* strstart never exceeds WSIZE-MAX_MATCH, so there are 1862 * always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH 1863 * these bytes are garbage, but it does not matter since the 1864 * next lookahead bytes will always be emitted as literals. 1865 */ 1866 } while (--G1.prev_length != 0); 1867 match_available = 0; 1868 match_length = MIN_MATCH - 1; 1869 G1.strstart++; 1870 if (flush) { 1871 FLUSH_BLOCK(0); 1872 G1.block_start = G1.strstart; 1873 } 1874 } else if (match_available) { 1875 /* If there was no match at the previous position, output a 1876 * single literal. If there was a match but the current match 1877 * is longer, truncate the previous match to a single literal. 1878 */ 1879 Tracevv((stderr, "%c", G1.window[G1.strstart - 1])); 1880 if (ct_tally(0, G1.window[G1.strstart - 1])) { 1881 FLUSH_BLOCK(0); 1882 G1.block_start = G1.strstart; 1883 } 1884 G1.strstart++; 1885 G1.lookahead--; 1886 } else { 1887 /* There is no previous match to compare with, wait for 1888 * the next step to decide. 1889 */ 1890 match_available = 1; 1891 G1.strstart++; 1892 G1.lookahead--; 1893 } 1894 Assert(G1.strstart <= G1.isize && G1.lookahead <= G1.isize, "a bit too far"); 1895 1896 /* Make sure that we always have enough lookahead, except 1897 * at the end of the input file. We need MAX_MATCH bytes 1898 * for the next match, plus MIN_MATCH bytes to insert the 1899 * string following the next match. 1900 */ 1901 fill_window_if_needed(); 1902 } 1903 if (match_available) 1904 ct_tally(0, G1.window[G1.strstart - 1]); 1905 1906 FLUSH_BLOCK(1); /* eof */ 1907} 1908 1909/* =========================================================================== 1910 * Initialize the bit string routines. 1911 */ 1912static void bi_init(void) 1913{ 1914 //G1.bi_buf = 0; // globals are zeroed in pack_gzip() 1915 //G1.bi_valid = 0; // globals are zeroed in pack_gzip() 1916 //DEBUG_bits_sent(= 0L); // globals are zeroed in pack_gzip() 1917} 1918 1919/* =========================================================================== 1920 * Initialize the "longest match" routines for a new file 1921 */ 1922static void lm_init(unsigned *flags16p) 1923{ 1924 unsigned j; 1925 1926 /* Initialize the hash table. */ 1927 memset(head, 0, HASH_SIZE * sizeof(*head)); 1928 /* prev will be initialized on the fly */ 1929 1930 /* speed options for the general purpose bit flag */ 1931 *flags16p |= 2; /* FAST 4, SLOW 2 */ 1932 /* ??? reduce max_chain_length for binary files */ 1933 1934 //G1.strstart = 0; // globals are zeroed in pack_gzip() 1935 //G1.block_start = 0L; // globals are zeroed in pack_gzip() 1936 1937 G1.lookahead = file_read(G1.window, 1938 sizeof(int) <= 2 ? (unsigned) WSIZE : 2 * WSIZE); 1939 1940 if (G1.lookahead == 0 || G1.lookahead == (unsigned) -1) { 1941 G1.eofile = 1; 1942 G1.lookahead = 0; 1943 return; 1944 } 1945 //G1.eofile = 0; // globals are zeroed in pack_gzip() 1946 1947 /* Make sure that we always have enough lookahead. This is important 1948 * if input comes from a device such as a tty. 1949 */ 1950 fill_window_if_needed(); 1951 1952 //G1.ins_h = 0; // globals are zeroed in pack_gzip() 1953 for (j = 0; j < MIN_MATCH - 1; j++) 1954 UPDATE_HASH(G1.ins_h, G1.window[j]); 1955 /* If lookahead < MIN_MATCH, ins_h is garbage, but this is 1956 * not important since only literal bytes will be emitted. 1957 */ 1958} 1959 1960/* =========================================================================== 1961 * Allocate the match buffer, initialize the various tables and save the 1962 * location of the internal file attribute (ascii/binary) and method 1963 * (DEFLATE/STORE). 1964 * One callsite in zip() 1965 */ 1966static void ct_init(void) 1967{ 1968 int n; /* iterates over tree elements */ 1969 int length; /* length value */ 1970 int code; /* code value */ 1971 int dist; /* distance index */ 1972 1973// //G2.compressed_len = 0L; // globals are zeroed in pack_gzip() 1974 1975#ifdef NOT_NEEDED 1976 if (G2.static_dtree[0].Len != 0) 1977 return; /* ct_init already called */ 1978#endif 1979 1980 /* Initialize the mapping length (0..255) -> length code (0..28) */ 1981 length = 0; 1982 for (code = 0; code < LENGTH_CODES - 1; code++) { 1983 G2.base_length[code] = length; 1984 for (n = 0; n < (1 << extra_lbits[code]); n++) { 1985 G2.length_code[length++] = code; 1986 } 1987 } 1988 Assert(length == 256, "ct_init: length != 256"); 1989 /* Note that the length 255 (match length 258) can be represented 1990 * in two different ways: code 284 + 5 bits or code 285, so we 1991 * overwrite length_code[255] to use the best encoding: 1992 */ 1993 G2.length_code[length - 1] = code; 1994 1995 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ 1996 dist = 0; 1997 for (code = 0; code < 16; code++) { 1998 G2.base_dist[code] = dist; 1999 for (n = 0; n < (1 << extra_dbits[code]); n++) { 2000 G2.dist_code[dist++] = code;
2001 } 2002 } 2003 Assert(dist == 256, "ct_init: dist != 256"); 2004 dist >>= 7; /* from now on, all distances are divided by 128 */ 2005 for (; code < D_CODES; code++) { 2006 G2.base_dist[code] = dist << 7; 2007 for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { 2008 G2.dist_code[256 + dist++] = code; 2009 } 2010 } 2011 Assert(dist == 256, "ct_init: 256+dist != 512"); 2012 2013 /* Construct the codes of the static literal tree */ 2014 //for (n = 0; n <= MAX_BITS; n++) // globals are zeroed in pack_gzip() 2015 // G2.bl_count[n] = 0; 2016 2017 n = 0; 2018 while (n <= 143) { 2019 G2.static_ltree[n++].Len = 8; 2020 //G2.bl_count[8]++; 2021 } 2022 //G2.bl_count[8] = 143 + 1; 2023 while (n <= 255) { 2024 G2.static_ltree[n++].Len = 9; 2025 //G2.bl_count[9]++; 2026 } 2027 //G2.bl_count[9] = 255 - 143; 2028 while (n <= 279) { 2029 G2.static_ltree[n++].Len = 7; 2030 //G2.bl_count[7]++; 2031 } 2032 //G2.bl_count[7] = 279 - 255; 2033 while (n <= 287) { 2034 G2.static_ltree[n++].Len = 8; 2035 //G2.bl_count[8]++; 2036 } 2037 //G2.bl_count[8] += 287 - 279; 2038 G2.bl_count[7] = 279 - 255; 2039 G2.bl_count[8] = (143 + 1) + (287 - 279); 2040 G2.bl_count[9] = 255 - 143; 2041 /* Codes 286 and 287 do not exist, but we must include them in the 2042 * tree construction to get a canonical Huffman tree (longest code 2043 * all ones) 2044 */ 2045 gen_codes((ct_data *) G2.static_ltree, L_CODES + 1); 2046 2047 /* The static distance tree is trivial: */ 2048 for (n = 0; n < D_CODES; n++) { 2049 G2.static_dtree[n].Len = 5; 2050 G2.static_dtree[n].Code = bi_reverse(n, 5); 2051 } 2052 2053 /* Initialize the first block of the first file: */ 2054 init_block(); 2055} 2056 2057/* =========================================================================== 2058 * Deflate in to out. 2059 * IN assertions: the input and output buffers are cleared. 2060 */ 2061static void zip(void) 2062{ 2063 unsigned deflate_flags; 2064 2065 //G1.outcnt = 0; // globals are zeroed in pack_gzip() 2066 2067 /* Write the header to the gzip file. See algorithm.doc for the format */ 2068 /* magic header for gzip files: 1F 8B */ 2069 /* compression method: 8 (DEFLATED) */ 2070 /* general flags: 0 */ 2071 put_32bit(0x00088b1f); 2072 put_32bit(0); /* Unix timestamp */ 2073 2074 /* Write deflated file to zip file */ 2075 G1.crc = ~0; 2076 2077 bi_init(); 2078 ct_init(); 2079 deflate_flags = 0; /* pkzip -es, -en or -ex equivalent */ 2080 lm_init(&deflate_flags); 2081 2082 put_16bit(deflate_flags | 0x300); /* extra flags. OS id = 3 (Unix) */ 2083 2084 /* The above 32-bit misaligns outbuf (10 bytes are stored), flush it */ 2085 flush_outbuf_if_32bit_optimized(); 2086 2087 deflate(); 2088 2089 /* Write the crc and uncompressed size */ 2090 put_32bit(~G1.crc); 2091 put_32bit(G1.isize); 2092 2093 flush_outbuf(); 2094} 2095 2096/* ======================================================================== */ 2097static 2098IF_DESKTOP(long long) int FAST_FUNC pack_gzip(transformer_state_t *xstate UNUSED_PARAM) 2099{ 2100 /* Reinit G1.xxx except pointers to allocated buffers, and entire G2 */ 2101 memset(&G1.crc, 0, (sizeof(G1) - offsetof(struct globals, crc)) + sizeof(G2)); 2102 2103 /* Clear input and output buffers */ 2104 //G1.outcnt = 0; 2105#ifdef DEBUG 2106 //G1.insize = 0; 2107#endif 2108 //G1.isize = 0; 2109 2110 /* Reinit G2.xxx */ 2111 G2.l_desc.dyn_tree = G2.dyn_ltree; 2112 G2.l_desc.static_tree = G2.static_ltree; 2113 G2.l_desc.extra_bits = extra_lbits; 2114 G2.l_desc.extra_base = LITERALS + 1; 2115 G2.l_desc.elems = L_CODES; 2116 G2.l_desc.max_length = MAX_BITS; 2117 //G2.l_desc.max_code = 0; 2118 G2.d_desc.dyn_tree = G2.dyn_dtree; 2119 G2.d_desc.static_tree = G2.static_dtree; 2120 G2.d_desc.extra_bits = extra_dbits; 2121 //G2.d_desc.extra_base = 0; 2122 G2.d_desc.elems = D_CODES; 2123 G2.d_desc.max_length = MAX_BITS; 2124 //G2.d_desc.max_code = 0; 2125 G2.bl_desc.dyn_tree = G2.bl_tree; 2126 //G2.bl_desc.static_tree = NULL; 2127 G2.bl_desc.extra_bits = extra_blbits, 2128 //G2.bl_desc.extra_base = 0; 2129 G2.bl_desc.elems = BL_CODES; 2130 G2.bl_desc.max_length = MAX_BL_BITS; 2131 //G2.bl_desc.max_code = 0; 2132 2133#if 0 2134 /* Saving of timestamp is disabled. Why? 2135 * - it is not Y2038-safe. 2136 * - some people want deterministic results 2137 * (normally they'd use -n, but our -n is a nop). 2138 * - it's bloat. 2139 * Per RFC 1952, gzfile.time=0 is "no timestamp". 2140 * If users will demand this to be reinstated, 2141 * implement -n "don't save timestamp". 2142 */ 2143 struct stat s; 2144 s.st_ctime = 0; 2145 fstat(STDIN_FILENO, &s); 2146 zip(s.st_ctime); 2147#else 2148 zip(); 2149#endif 2150 return 0; 2151} 2152 2153#if ENABLE_FEATURE_GZIP_LONG_OPTIONS 2154static const char gzip_longopts[] ALIGN1 = 2155 "stdout\0" No_argument "c" 2156 "to-stdout\0" No_argument "c" 2157 "force\0" No_argument "f" 2158 "verbose\0" No_argument "v" 2159#if ENABLE_FEATURE_GZIP_DECOMPRESS 2160 "decompress\0" No_argument "d" 2161 "uncompress\0" No_argument "d" 2162 "test\0" No_argument "t" 2163#endif 2164 "quiet\0" No_argument "q" 2165 "fast\0" No_argument "1" 2166 "best\0" No_argument "9" 2167 "no-name\0" No_argument "n" 2168 ; 2169#endif 2170 2171/* 2172 * Linux kernel build uses gzip -d -n. We accept and ignore -n. 2173 * Man page says: 2174 * -n --no-name 2175 * gzip: do not save the original file name and time stamp. 2176 * (The original name is always saved if the name had to be truncated.) 2177 * gunzip: do not restore the original file name/time even if present 2178 * (remove only the gzip suffix from the compressed file name). 2179 * This option is the default when decompressing. 2180 * -N --name 2181 * gzip: always save the original file name and time stamp (this is the default) 2182 * gunzip: restore the original file name and time stamp if present. 2183 */ 2184 2185int gzip_main(int argc, char **argv) MAIN_EXTERNALLY_VISIBLE; 2186#if ENABLE_FEATURE_GZIP_DECOMPRESS 2187int gzip_main(int argc, char **argv) 2188#else 2189int gzip_main(int argc UNUSED_PARAM, char **argv) 2190#endif 2191{ 2192 unsigned opt; 2193#if ENABLE_FEATURE_GZIP_LEVELS 2194 static const struct { 2195 uint8_t good; 2196 uint8_t chain_shift; 2197 uint8_t lazy2; 2198 uint8_t nice2; 2199 } gzip_level_config[6] = { 2200 {4, 4, 4/2, 16/2}, /* Level 4 */ 2201 {8, 5, 16/2, 32/2}, /* Level 5 */ 2202 {8, 7, 16/2, 128/2}, /* Level 6 */ 2203 {8, 8, 32/2, 128/2}, /* Level 7 */ 2204 {32, 10, 128/2, 258/2}, /* Level 8 */ 2205 {32, 12, 258/2, 258/2}, /* Level 9 */ 2206 }; 2207#endif 2208 2209 SET_PTR_TO_GLOBALS((char *)xzalloc(sizeof(struct globals)+sizeof(struct globals2)) 2210 + sizeof(struct globals)); 2211 2212 /* Must match bbunzip's constants OPT_STDOUT, OPT_FORCE! */ 2213#if ENABLE_FEATURE_GZIP_LONG_OPTIONS 2214 opt = getopt32long(argv, BBUNPK_OPTSTR IF_FEATURE_GZIP_DECOMPRESS("dt") "n123456789", gzip_longopts); 2215#else 2216 opt = getopt32(argv, BBUNPK_OPTSTR IF_FEATURE_GZIP_DECOMPRESS("dt") "n123456789"); 2217#endif 2218#if ENABLE_FEATURE_GZIP_DECOMPRESS /* gunzip_main may not be visible... */ 2219 if (opt & (BBUNPK_OPT_DECOMPRESS|BBUNPK_OPT_TEST)) /* -d and/or -t */ 2220 return gunzip_main(argc, argv); 2221#endif 2222#if ENABLE_FEATURE_GZIP_LEVELS 2223 opt >>= (BBUNPK_OPTSTRLEN IF_FEATURE_GZIP_DECOMPRESS(+ 2) + 1); /* drop cfkvq[dt]n bits */ 2224 if (opt == 0) 2225 opt = 1 << 6; /* default: 6 */ 2226 opt = ffs(opt >> 4); /* Maps -1..-4 to [0], -5 to [1] ... -9 to [5] */ 2227 max_chain_length = 1 << gzip_level_config[opt].chain_shift; 2228 good_match = gzip_level_config[opt].good; 2229 max_lazy_match = gzip_level_config[opt].lazy2 * 2; 2230 nice_match = gzip_level_config[opt].nice2 * 2; 2231#endif 2232 option_mask32 &= BBUNPK_OPTSTRMASK; /* retain only -cfkvq */ 2233 2234 /* Allocate all global buffers (for DYN_ALLOC option) */ 2235 ALLOC(uch, G1.l_buf, INBUFSIZ); 2236 ALLOC(uch, G1.outbuf, OUTBUFSIZ); 2237 ALLOC(ush, G1.d_buf, DIST_BUFSIZE); 2238 ALLOC(uch, G1.window, 2L * WSIZE); 2239 ALLOC(ush, G1.prev, 1L << BITS); 2240 2241 /* Initialize the CRC32 table */ 2242 global_crc32_new_table_le(); 2243 2244 argv += optind; 2245 return bbunpack(argv, pack_gzip, append_ext, "gz"); 2246} 2247