1/* vi: set sw=4 ts=4: */ 2/* 3 * Gzip implementation for busybox 4 * 5 * Based on GNU gzip Copyright (C) 1992-1993 Jean-loup Gailly. 6 * 7 * Originally adjusted for busybox by Charles P. Wright <cpw@unix.asb.com> 8 * "this is a stripped down version of gzip I put into busybox, it does 9 * only standard in to standard out with -9 compression. It also requires 10 * the zcat module for some important functions." 11 * 12 * Adjusted further by Erik Andersen <andersen@codepoet.org> to support 13 * files as well as stdin/stdout, and to generally behave itself wrt 14 * command line handling. 15 * 16 * Licensed under GPLv2 or later, see file LICENSE in this source tree. 17 */ 18/* TODO: full support for -v for DESKTOP 19 * "/usr/bin/gzip -v a bogus aa" should say: 20a: 85.1% -- replaced with a.gz 21gzip: bogus: No such file or directory 22aa: 85.1% -- replaced with aa.gz 23*/ 24//config:config GZIP 25//config: bool "gzip (17 kb)" 26//config: default y 27//config: help 28//config: gzip is used to compress files. 29//config: It's probably the most widely used UNIX compression program. 30//config: 31//config:config FEATURE_GZIP_LONG_OPTIONS 32//config: bool "Enable long options" 33//config: default y 34//config: depends on GZIP && LONG_OPTS 35//config: 36//config:config GZIP_FAST 37//config: int "Trade memory for speed (0:small,slow - 2:fast,big)" 38//config: default 0 39//config: range 0 2 40//config: depends on GZIP 41//config: help 42//config: Enable big memory options for gzip. 43//config: 0: small buffers, small hash-tables 44//config: 1: larger buffers, larger hash-tables 45//config: 2: larger buffers, largest hash-tables 46//config: Larger models may give slightly better compression 47//config: 48//config:config FEATURE_GZIP_LEVELS 49//config: bool "Enable compression levels" 50//config: default n 51//config: depends on GZIP 52//config: help 53//config: Enable support for compression levels 4-9. The default level 54//config: is 6. If levels 1-3 are specified, 4 is used. 55//config: If this option is not selected, -N options are ignored and -6 56//config: is used. 57//config: 58//config:config FEATURE_GZIP_DECOMPRESS 59//config: bool "Enable decompression" 60//config: default y 61//config: depends on GZIP || GUNZIP || ZCAT 62//config: help 63//config: Enable -d (--decompress) and -t (--test) options for gzip. 64//config: This will be automatically selected if gunzip or zcat is 65//config: enabled. 66 67//applet:IF_GZIP(APPLET(gzip, BB_DIR_BIN, BB_SUID_DROP)) 68 69//kbuild:lib-$(CONFIG_GZIP) += gzip.o 70 71//usage:#define gzip_trivial_usage 72//usage: "[-cfk" IF_FEATURE_GZIP_DECOMPRESS("dt") IF_FEATURE_GZIP_LEVELS("123456789") "] [FILE]..." 73//usage:#define gzip_full_usage "\n\n" 74//usage: "Compress FILEs (or stdin)\n" 75//usage: IF_FEATURE_GZIP_LEVELS( 76//usage: "\n -1..9 Compression level" 77//usage: ) 78//usage: IF_FEATURE_GZIP_DECOMPRESS( 79//usage: "\n -d Decompress" 80//usage: ) 81//usage: "\n -c Write to stdout" 82//usage: "\n -f Force" 83//usage: "\n -k Keep input files" 84//usage: IF_FEATURE_GZIP_DECOMPRESS( 85//usage: "\n -t Test integrity" 86//usage: ) 87//usage: 88//usage:#define gzip_example_usage 89//usage: "$ ls -la /tmp/busybox*\n" 90//usage: "-rw-rw-r-- 1 andersen andersen 1761280 Apr 14 17:47 /tmp/busybox.tar\n" 91//usage: "$ gzip /tmp/busybox.tar\n" 92//usage: "$ ls -la /tmp/busybox*\n" 93//usage: "-rw-rw-r-- 1 andersen andersen 554058 Apr 14 17:49 /tmp/busybox.tar.gz\n" 94 95#include "libbb.h" 96#include "bb_archive.h" 97 98/* =========================================================================== 99 */ 100//#define DEBUG 1 101/* Diagnostic functions */ 102#ifdef DEBUG 103static int verbose; 104# define Assert(cond,msg) { if (!(cond)) bb_simple_error_msg(msg); } 105# define Trace(x) fprintf x 106# define Tracev(x) {if (verbose) fprintf x; } 107# define Tracevv(x) {if (verbose > 1) fprintf x; } 108# define Tracec(c,x) {if (verbose && (c)) fprintf x; } 109# define Tracecv(c,x) {if (verbose > 1 && (c)) fprintf x; } 110#else 111# define Assert(cond,msg) 112# define Trace(x) 113# define Tracev(x) 114# define Tracevv(x) 115# define Tracec(c,x) 116# define Tracecv(c,x) 117#endif 118 119/* =========================================================================== 120 */ 121#if CONFIG_GZIP_FAST == 0 122# define SMALL_MEM 123#elif CONFIG_GZIP_FAST == 1 124# define MEDIUM_MEM 125#elif CONFIG_GZIP_FAST == 2 126# define BIG_MEM 127#else 128# error "Invalid CONFIG_GZIP_FAST value" 129#endif 130 131#ifndef INBUFSIZ 132# ifdef SMALL_MEM 133# define INBUFSIZ 0x2000 /* input buffer size */ 134# else 135# define INBUFSIZ 0x8000 /* input buffer size */ 136# endif 137#endif 138 139#ifndef OUTBUFSIZ 140# ifdef SMALL_MEM 141# define OUTBUFSIZ 8192 /* output buffer size */ 142# else 143# define OUTBUFSIZ 16384 /* output buffer size */ 144# endif 145#endif 146 147#ifndef DIST_BUFSIZE 148# ifdef SMALL_MEM 149# define DIST_BUFSIZE 0x2000 /* buffer for distances, see trees.c */ 150# else 151# define DIST_BUFSIZE 0x8000 /* buffer for distances, see trees.c */ 152# endif 153#endif 154 155/* gzip flag byte */ 156#define ASCII_FLAG 0x01 /* bit 0 set: file probably ascii text */ 157#define CONTINUATION 0x02 /* bit 1 set: continuation of multi-part gzip file */ 158#define EXTRA_FIELD 0x04 /* bit 2 set: extra field present */ 159#define ORIG_NAME 0x08 /* bit 3 set: original file name present */ 160#define COMMENT 0x10 /* bit 4 set: file comment present */ 161#define RESERVED 0xC0 /* bit 6,7: reserved */ 162 163/* internal file attribute */ 164#define UNKNOWN 0xffff 165#define BINARY 0 166#define ASCII 1 167 168#ifndef WSIZE 169# define WSIZE 0x8000 /* window size--must be a power of two, and */ 170#endif /* at least 32K for zip's deflate method */ 171 172#define MIN_MATCH 3 173#define MAX_MATCH 258 174/* The minimum and maximum match lengths */ 175 176#define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1) 177/* Minimum amount of lookahead, except at the end of the input file. 178 * See deflate.c for comments about the MIN_MATCH+1. 179 */ 180 181#define MAX_DIST (WSIZE-MIN_LOOKAHEAD) 182/* In order to simplify the code, particularly on 16 bit machines, match 183 * distances are limited to MAX_DIST instead of WSIZE. 184 */ 185 186#ifndef MAX_PATH_LEN 187# define MAX_PATH_LEN 1024 /* max pathname length */ 188#endif 189 190#define seekable() 0 /* force sequential output */ 191#define translate_eol 0 /* no option -a yet */ 192 193#ifndef BITS 194# define BITS 16 195#endif 196#define INIT_BITS 9 /* Initial number of bits per code */ 197 198#define BIT_MASK 0x1f /* Mask for 'number of compression bits' */ 199/* Mask 0x20 is reserved to mean a fourth header byte, and 0x40 is free. 200 * It's a pity that old uncompress does not check bit 0x20. That makes 201 * extension of the format actually undesirable because old compress 202 * would just crash on the new format instead of giving a meaningful 203 * error message. It does check the number of bits, but it's more 204 * helpful to say "unsupported format, get a new version" than 205 * "can only handle 16 bits". 206 */ 207 208#ifdef MAX_EXT_CHARS 209# define MAX_SUFFIX MAX_EXT_CHARS 210#else 211# define MAX_SUFFIX 30 212#endif 213 214/* =========================================================================== 215 * Compile with MEDIUM_MEM to reduce the memory requirements or 216 * with SMALL_MEM to use as little memory as possible. Use BIG_MEM if the 217 * entire input file can be held in memory (not possible on 16 bit systems). 218 * Warning: defining these symbols affects HASH_BITS (see below) and thus 219 * affects the compression ratio. The compressed output 220 * is still correct, and might even be smaller in some cases. 221 */ 222#ifdef SMALL_MEM 223# define HASH_BITS 13 /* Number of bits used to hash strings */ 224#endif 225#ifdef MEDIUM_MEM 226# define HASH_BITS 14 227#endif 228#ifndef HASH_BITS 229# define HASH_BITS 15 230 /* For portability to 16 bit machines, do not use values above 15. */ 231#endif 232 233#define HASH_SIZE (unsigned)(1<<HASH_BITS) 234#define HASH_MASK (HASH_SIZE-1) 235#define WMASK (WSIZE-1) 236/* HASH_SIZE and WSIZE must be powers of two */ 237#ifndef TOO_FAR 238# define TOO_FAR 4096 239#endif 240/* Matches of length 3 are discarded if their distance exceeds TOO_FAR */ 241 242/* =========================================================================== 243 * These types are not really 'char', 'short' and 'long' 244 */ 245typedef uint8_t uch; 246typedef uint16_t ush; 247typedef uint32_t ulg; 248typedef int32_t lng; 249 250typedef ush Pos; 251typedef unsigned IPos; 252/* A Pos is an index in the character window. We use short instead of int to 253 * save space in the various tables. IPos is used only for parameter passing. 254 */ 255 256enum { 257 WINDOW_SIZE = 2 * WSIZE, 258/* window size, 2*WSIZE except for MMAP or BIG_MEM, where it is the 259 * input file length plus MIN_LOOKAHEAD. 260 */ 261 262#if !ENABLE_FEATURE_GZIP_LEVELS 263 264 comp_level_minus4 = 6 - 4, 265 max_chain_length = 128, 266/* To speed up deflation, hash chains are never searched beyond this length. 267 * A higher limit improves compression ratio but degrades the speed. 268 */ 269 270 max_lazy_match = 16, 271/* Attempt to find a better match only when the current match is strictly 272 * smaller than this value. This mechanism is used only for compression 273 * levels >= 4. 274 */ 275 276 max_insert_length = max_lazy_match, 277/* Insert new strings in the hash table only if the match length 278 * is not greater than this length. This saves time but degrades compression. 279 * max_insert_length is used only for compression levels <= 3. 280 */ 281 282 good_match = 8, 283/* Use a faster search when the previous match is longer than this */ 284 285/* Values for max_lazy_match, good_match and max_chain_length, depending on 286 * the desired pack level (0..9). The values given below have been tuned to 287 * exclude worst case performance for pathological files. Better values may be 288 * found for specific files. 289 */ 290 291 nice_match = 128, /* Stop searching when current match exceeds this */ 292/* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4 293 * For deflate_fast() (levels <= 3) good is ignored and lazy has a different 294 * meaning. 295 */ 296#endif /* ENABLE_FEATURE_GZIP_LEVELS */ 297}; 298 299struct globals { 300/* =========================================================================== */ 301/* global buffers, allocated once */ 302 303#define DECLARE(type, array, size) \ 304 type * array 305#define ALLOC(type, array, size) \ 306 array = xzalloc((size_t)(((size)+1L)/2) * 2*sizeof(type)) 307#define FREE(array) \ 308 do { free(array); array = NULL; } while (0) 309 310 /* buffer for literals or lengths */ 311 /* DECLARE(uch, l_buf, LIT_BUFSIZE); */ 312 DECLARE(uch, l_buf, INBUFSIZ); 313 314 DECLARE(ush, d_buf, DIST_BUFSIZE); 315 DECLARE(uch, outbuf, OUTBUFSIZ); 316 317/* Sliding window. Input bytes are read into the second half of the window, 318 * and move to the first half later to keep a dictionary of at least WSIZE 319 * bytes. With this organization, matches are limited to a distance of 320 * WSIZE-MAX_MATCH bytes, but this ensures that IO is always 321 * performed with a length multiple of the block size. Also, it limits 322 * the window size to 64K, which is quite useful on MSDOS. 323 * To do: limit the window size to WSIZE+BSZ if SMALL_MEM (the code would 324 * be less efficient). 325 */ 326 DECLARE(uch, window, 2L * WSIZE); 327 328/* Link to older string with same hash index. To limit the size of this 329 * array to 64K, this link is maintained only for the last 32K strings. 330 * An index in this array is thus a window index modulo 32K. 331 */ 332 /* DECLARE(Pos, prev, WSIZE); */ 333 DECLARE(ush, prev, 1L << BITS); 334 335/* Heads of the hash chains or 0. */ 336 /* DECLARE(Pos, head, 1<<HASH_BITS); */ 337#define head (G1.prev + WSIZE) /* hash head (see deflate.c) */ 338 339#if ENABLE_FEATURE_GZIP_LEVELS 340 unsigned comp_level_minus4; /* can be a byte */ 341 unsigned max_chain_length; 342 unsigned max_lazy_match; 343 unsigned good_match; 344 unsigned nice_match; 345#define comp_level_minus4 (G1.comp_level_minus4) 346#define max_chain_length (G1.max_chain_length) 347#define max_lazy_match (G1.max_lazy_match) 348#define good_match (G1.good_match) 349#define nice_match (G1.nice_match) 350#endif 351 352/* =========================================================================== */ 353/* all members below are zeroed out in pack_gzip() for each next file */ 354 355 uint32_t crc; /* shift register contents */ 356 /*uint32_t *crc_32_tab;*/ 357 358/* window position at the beginning of the current output block. Gets 359 * negative when the window is moved backwards. 360 */ 361 lng block_start; 362 363 unsigned ins_h; /* hash index of string to be inserted */ 364 365/* Number of bits by which ins_h and del_h must be shifted at each 366 * input step. It must be such that after MIN_MATCH steps, the oldest 367 * byte no longer takes part in the hash key, that is: 368 * H_SHIFT * MIN_MATCH >= HASH_BITS 369 */ 370#define H_SHIFT ((HASH_BITS+MIN_MATCH-1) / MIN_MATCH) 371 372/* Length of the best match at previous step. Matches not greater than this 373 * are discarded. This is used in the lazy match evaluation. 374 */ 375 unsigned prev_length; 376 377 unsigned strstart; /* start of string to insert */ 378 unsigned match_start; /* start of matching string */ 379 unsigned lookahead; /* number of valid bytes ahead in window */ 380 381/* number of input bytes */ 382 ulg isize; /* only 32 bits stored in .gz file */ 383 384/* bbox always use stdin/stdout */ 385#define ifd STDIN_FILENO /* input file descriptor */ 386#define ofd STDOUT_FILENO /* output file descriptor */ 387 388#ifdef DEBUG 389 unsigned insize; /* valid bytes in l_buf */ 390#endif 391 unsigned outcnt; /* bytes in output buffer */ 392 smallint eofile; /* flag set at end of input file */ 393 394/* =========================================================================== 395 * Local data used by the "bit string" routines. 396 */ 397 398/* Output buffer. bits are inserted starting at the bottom (least significant 399 * bits). 400 */ 401 unsigned bi_buf; /* was unsigned short */ 402 403#undef BUF_SIZE 404#define BUF_SIZE (int)(8 * sizeof(G1.bi_buf)) 405 406/* Number of bits used within bi_buf. (bi_buf might be implemented on 407 * more than 16 bits on some systems.) 408 */ 409 unsigned bi_valid; 410 411#ifdef DEBUG 412 ulg bits_sent; /* bit length of the compressed data */ 413# define DEBUG_bits_sent(v) (void)(G1.bits_sent v) 414#else 415# define DEBUG_bits_sent(v) ((void)0) 416#endif 417}; 418 419#define G1 (*(ptr_to_globals - 1)) 420 421/* =========================================================================== 422 * Write the output buffer outbuf[0..outcnt-1] and update bytes_out. 423 * (used for the compressed data only) 424 */ 425static void flush_outbuf(void) 426{ 427 if (G1.outcnt == 0) 428 return; 429 430 xwrite(ofd, (char *) G1.outbuf, G1.outcnt); 431 G1.outcnt = 0; 432} 433 434/* =========================================================================== 435 */ 436/* put_8bit is used for the compressed output */ 437#define put_8bit(c) \ 438do { \ 439 G1.outbuf[G1.outcnt++] = (c); \ 440 if (G1.outcnt == OUTBUFSIZ) \ 441 flush_outbuf(); \ 442} while (0) 443 444/* Output a 16 bit value, lsb first */ 445static void put_16bit(ush w) 446{ 447 /* GCC 4.2.1 won't optimize out redundant loads of G1.outcnt 448 * (probably because of fear of aliasing with G1.outbuf[] 449 * stores), do it explicitly: 450 */ 451 unsigned outcnt = G1.outcnt; 452 uch *dst = &G1.outbuf[outcnt]; 453 454#if BB_UNALIGNED_MEMACCESS_OK && BB_LITTLE_ENDIAN 455 if (outcnt < OUTBUFSIZ-2) { 456 /* Common case */ 457 ush *dst16 = (void*) dst; 458 *dst16 = w; /* unaligned LSB 16-bit store */ 459 G1.outcnt = outcnt + 2; 460 return; 461 } 462 *dst = (uch)w; 463 w >>= 8; 464 G1.outcnt = ++outcnt; 465#else 466 *dst = (uch)w; 467 w >>= 8; 468 if (outcnt < OUTBUFSIZ-2) { 469 /* Common case */ 470 dst[1] = w; 471 G1.outcnt = outcnt + 2; 472 return; 473 } 474 G1.outcnt = ++outcnt; 475#endif 476 477 /* Slowpath: we will need to do flush_outbuf() */ 478 if (outcnt == OUTBUFSIZ) 479 flush_outbuf(); /* here */ 480 put_8bit(w); /* or here */ 481} 482 483#define OPTIMIZED_PUT_32BIT (CONFIG_GZIP_FAST > 0 && BB_UNALIGNED_MEMACCESS_OK && BB_LITTLE_ENDIAN) 484static void put_32bit(ulg n) 485{ 486 if (OPTIMIZED_PUT_32BIT) { 487 unsigned outcnt = G1.outcnt; 488 if (outcnt < OUTBUFSIZ-4) { 489 /* Common case */ 490 ulg *dst32 = (void*) &G1.outbuf[outcnt]; 491 *dst32 = n; /* unaligned LSB 32-bit store */ 492 //bb_error_msg("%p", dst32); // store alignment debugging 493 G1.outcnt = outcnt + 4; 494 return; 495 } 496 } 497 put_16bit(n); 498 put_16bit(n >> 16); 499} 500static ALWAYS_INLINE void flush_outbuf_if_32bit_optimized(void) 501{ 502 /* If put_32bit() performs 32bit stores && it is used in send_bits() */ 503 if (OPTIMIZED_PUT_32BIT && BUF_SIZE > 16) 504 flush_outbuf(); 505} 506 507/* =========================================================================== 508 * Run a set of bytes through the crc shift register. If s is a NULL 509 * pointer, then initialize the crc shift register contents instead. 510 * Return the current crc in either case. 511 */ 512static void updcrc(uch *s, unsigned n) 513{ 514 G1.crc = crc32_block_endian0(G1.crc, s, n, global_crc32_table /*G1.crc_32_tab*/); 515} 516 517/* =========================================================================== 518 * Read a new buffer from the current input file, perform end-of-line 519 * translation, and update the crc and input file size. 520 * IN assertion: size >= 2 (for end-of-line translation) 521 */ 522static unsigned file_read(void *buf, unsigned size) 523{ 524 unsigned len; 525 526 Assert(G1.insize == 0, "l_buf not empty"); 527 528 len = safe_read(ifd, buf, size); 529 if (len == (unsigned)(-1) || len == 0) 530 return len; 531 532 updcrc(buf, len); 533 G1.isize += len; 534 return len; 535} 536 537/* =========================================================================== 538 * Send a value on a given number of bits. 539 * IN assertion: length <= 16 and value fits in length bits. 540 */ 541static void send_bits(unsigned value, unsigned length) 542{ 543 unsigned new_buf; 544 545#ifdef DEBUG 546 Tracev((stderr, " l %2d v %4x ", length, value)); 547 Assert(length > 0 && length <= 15, "invalid length"); 548 DEBUG_bits_sent(+= length); 549#endif 550 BUILD_BUG_ON(BUF_SIZE != 32 && BUF_SIZE != 16); 551 552 new_buf = G1.bi_buf | (value << G1.bi_valid); 553 /* NB: the above may sometimes do "<< 32" shift (undefined) 554 * if check below is changed to "length > BUF_SIZE" instead of >= */ 555 length += G1.bi_valid; 556 557 /* If bi_buf is full */ 558 if (length >= BUF_SIZE) { 559 /* ...use (valid) bits from bi_buf and 560 * (BUF_SIZE - bi_valid) bits from value, 561 * leaving (width - (BUF_SIZE-bi_valid)) unused bits in value. 562 */ 563 value >>= (BUF_SIZE - G1.bi_valid); 564 if (BUF_SIZE == 32) { 565 put_32bit(new_buf); 566 } else { /* 16 */ 567 put_16bit(new_buf); 568 } 569 new_buf = value; 570 length -= BUF_SIZE; 571 } 572 G1.bi_buf = new_buf; 573 G1.bi_valid = length; 574} 575 576/* =========================================================================== 577 * Reverse the first len bits of a code, using straightforward code (a faster 578 * method would use a table) 579 * IN assertion: 1 <= len <= 15 580 */ 581static unsigned bi_reverse(unsigned code, int len) 582{ 583 unsigned res = 0; 584 585 while (1) { 586 res |= code & 1; 587 if (--len <= 0) return res; 588 code >>= 1; 589 res <<= 1; 590 } 591} 592 593/* =========================================================================== 594 * Write out any remaining bits in an incomplete byte. 595 */ 596static void bi_windup(void) 597{ 598 unsigned bits = G1.bi_buf; 599 int cnt = G1.bi_valid; 600 601 while (cnt > 0) { 602 put_8bit(bits); 603 bits >>= 8; 604 cnt -= 8; 605 } 606 G1.bi_buf = 0; 607 G1.bi_valid = 0; 608 DEBUG_bits_sent(= (G1.bits_sent + 7) & ~7); 609} 610 611/* =========================================================================== 612 * Copy a stored block to the zip file, storing first the length and its 613 * one's complement if requested. 614 */ 615static void copy_block(const char *buf, unsigned len, int header) 616{ 617 bi_windup(); /* align on byte boundary */ 618 619 if (header) { 620 unsigned v = ((uint16_t)len) | ((~len) << 16); 621 put_32bit(v); 622 DEBUG_bits_sent(+= 2 * 16); 623 } 624 DEBUG_bits_sent(+= (ulg) len << 3); 625 while (len--) { 626 put_8bit(*buf++); 627 } 628 /* The above can 32-bit misalign outbuf */ 629 if (G1.outcnt & 3) /* syscalls are expensive, is it really misaligned? */ 630 flush_outbuf_if_32bit_optimized(); 631} 632 633/* =========================================================================== 634 * Fill the window when the lookahead becomes insufficient. 635 * Updates strstart and lookahead, and sets eofile if end of input file. 636 * IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0 637 * OUT assertions: at least one byte has been read, or eofile is set; 638 * file reads are performed for at least two bytes (required for the 639 * translate_eol option). 640 */ 641static void fill_window(void) 642{ 643 unsigned n, m; 644 unsigned more = WINDOW_SIZE - G1.lookahead - G1.strstart; 645 /* Amount of free space at the end of the window. */ 646 647 /* If the window is almost full and there is insufficient lookahead, 648 * move the upper half to the lower one to make room in the upper half. 649 */ 650 if (more == (unsigned) -1) { 651 /* Very unlikely, but possible on 16 bit machine if strstart == 0 652 * and lookahead == 1 (input done one byte at time) 653 */ 654 more--; 655 } else if (G1.strstart >= WSIZE + MAX_DIST) { 656 /* By the IN assertion, the window is not empty so we can't confuse 657 * more == 0 with more == 64K on a 16 bit machine. 658 */ 659 Assert(WINDOW_SIZE == 2 * WSIZE, "no sliding with BIG_MEM"); 660 661 memcpy(G1.window, G1.window + WSIZE, WSIZE); 662 G1.match_start -= WSIZE; 663 G1.strstart -= WSIZE; /* we now have strstart >= MAX_DIST: */ 664 665 G1.block_start -= WSIZE; 666 667 for (n = 0; n < HASH_SIZE; n++) { 668 m = head[n]; 669 head[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0); 670 } 671 for (n = 0; n < WSIZE; n++) { 672 m = G1.prev[n]; 673 G1.prev[n] = (Pos) (m >= WSIZE ? m - WSIZE : 0); 674 /* If n is not on any hash chain, prev[n] is garbage but 675 * its value will never be used. 676 */ 677 } 678 more += WSIZE; 679 } 680 /* At this point, more >= 2 */ 681 if (!G1.eofile) { 682 n = file_read(G1.window + G1.strstart + G1.lookahead, more); 683 if (n == 0 || n == (unsigned) -1) { 684 G1.eofile = 1; 685 } else { 686 G1.lookahead += n; 687 } 688 } 689} 690/* Both users fill window with the same loop: */ 691static void fill_window_if_needed(void) 692{ 693 while (G1.lookahead < MIN_LOOKAHEAD && !G1.eofile) 694 fill_window(); 695} 696 697/* =========================================================================== 698 * Set match_start to the longest match starting at the given string and 699 * return its length. Matches shorter or equal to prev_length are discarded, 700 * in which case the result is equal to prev_length and match_start is 701 * garbage. 702 * IN assertions: cur_match is the head of the hash chain for the current 703 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 704 */ 705 706/* For MSDOS, OS/2 and 386 Unix, an optimized version is in match.asm or 707 * match.s. The code is functionally equivalent, so you can use the C version 708 * if desired. 709 */ 710static int longest_match(IPos cur_match) 711{ 712 unsigned chain_length = max_chain_length; /* max hash chain length */ 713 uch *scan = G1.window + G1.strstart; /* current string */ 714 uch *match; /* matched string */ 715 int len; /* length of current match */ 716 int best_len = G1.prev_length; /* best match length so far */ 717 IPos limit = G1.strstart > (IPos) MAX_DIST ? G1.strstart - (IPos) MAX_DIST : 0; 718 /* Stop when cur_match becomes <= limit. To simplify the code, 719 * we prevent matches with the string of window index 0. 720 */ 721 722/* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16. 723 * It is easy to get rid of this optimization if necessary. 724 */ 725#if HASH_BITS < 8 || MAX_MATCH != 258 726# error Code too clever 727#endif 728 uch *strend = G1.window + G1.strstart + MAX_MATCH; 729 uch scan_end1 = scan[best_len - 1]; 730 uch scan_end = scan[best_len]; 731 732 /* Do not waste too much time if we already have a good match: */ 733 if (G1.prev_length >= good_match) { 734 chain_length >>= 2; 735 } 736 Assert(G1.strstart <= WINDOW_SIZE - MIN_LOOKAHEAD, "insufficient lookahead"); 737 738 do { 739 Assert(cur_match < G1.strstart, "no future"); 740 match = G1.window + cur_match; 741 742 /* Skip to next match if the match length cannot increase 743 * or if the match length is less than 2: 744 */ 745 if (match[best_len] != scan_end 746 || match[best_len - 1] != scan_end1 747 || *match != *scan || *++match != scan[1] 748 ) { 749 continue; 750 } 751 752 /* The check at best_len-1 can be removed because it will be made 753 * again later. (This heuristic is not always a win.) 754 * It is not necessary to compare scan[2] and match[2] since they 755 * are always equal when the other bytes match, given that 756 * the hash keys are equal and that HASH_BITS >= 8. 757 */ 758 scan += 2, match++; 759 760 /* We check for insufficient lookahead only every 8th comparison; 761 * the 256th check will be made at strstart+258. 762 */ 763 do { 764 } while (*++scan == *++match && *++scan == *++match && 765 *++scan == *++match && *++scan == *++match && 766 *++scan == *++match && *++scan == *++match && 767 *++scan == *++match && *++scan == *++match && scan < strend); 768 769 len = MAX_MATCH - (int) (strend - scan); 770 scan = strend - MAX_MATCH; 771 772 if (len > best_len) { 773 G1.match_start = cur_match; 774 best_len = len; 775 if (len >= nice_match) 776 break; 777 scan_end1 = scan[best_len - 1]; 778 scan_end = scan[best_len]; 779 } 780 } while ((cur_match = G1.prev[cur_match & WMASK]) > limit 781 && --chain_length != 0); 782 783 return best_len; 784} 785 786#ifdef DEBUG 787/* =========================================================================== 788 * Check that the match at match_start is indeed a match. 789 */ 790static void check_match(IPos start, IPos match, int length) 791{ 792 /* check that the match is indeed a match */ 793 if (memcmp(G1.window + match, G1.window + start, length) != 0) { 794 bb_error_msg(" start %d, match %d, length %d", start, match, length); 795 bb_simple_error_msg("invalid match"); 796 } 797 if (verbose > 1) { 798 bb_error_msg("\\[%d,%d]", start - match, length); 799 do { 800 bb_putchar_stderr(G1.window[start++]); 801 } while (--length != 0); 802 } 803} 804#else 805# define check_match(start, match, length) ((void)0) 806#endif 807 808 809/* trees.c -- output deflated data using Huffman coding 810 * Copyright (C) 1992-1993 Jean-loup Gailly 811 * This is free software; you can redistribute it and/or modify it under the 812 * terms of the GNU General Public License, see the file COPYING. 813 */ 814 815/* PURPOSE 816 * Encode various sets of source values using variable-length 817 * binary code trees. 818 * 819 * DISCUSSION 820 * The PKZIP "deflation" process uses several Huffman trees. The more 821 * common source values are represented by shorter bit sequences. 822 * 823 * Each code tree is stored in the ZIP file in a compressed form 824 * which is itself a Huffman encoding of the lengths of 825 * all the code strings (in ascending order by source values). 826 * The actual code strings are reconstructed from the lengths in 827 * the UNZIP process, as described in the "application note" 828 * (APPNOTE.TXT) distributed as part of PKWARE's PKZIP program. 829 * 830 * REFERENCES 831 * Lynch, Thomas J. 832 * Data Compression: Techniques and Applications, pp. 53-55. 833 * Lifetime Learning Publications, 1985. ISBN 0-534-03418-7. 834 * 835 * Storer, James A. 836 * Data Compression: Methods and Theory, pp. 49-50. 837 * Computer Science Press, 1988. ISBN 0-7167-8156-5. 838 * 839 * Sedgewick, R. 840 * Algorithms, p290. 841 * Addison-Wesley, 1983. ISBN 0-201-06672-6. 842 * 843 * INTERFACE 844 * void ct_init() 845 * Allocate the match buffer, initialize the various tables [and save 846 * the location of the internal file attribute (ascii/binary) and 847 * method (DEFLATE/STORE) -- deleted in bbox] 848 * 849 * void ct_tally(int dist, int lc); 850 * Save the match info and tally the frequency counts. 851 * 852 * ulg flush_block(char *buf, ulg stored_len, int eof) 853 * Determine the best encoding for the current block: dynamic trees, 854 * static trees or store, and output the encoded block to the zip 855 * file. Returns the total compressed length for the file so far. 856 */ 857 858#define MAX_BITS 15 859/* All codes must not exceed MAX_BITS bits */ 860 861#define MAX_BL_BITS 7 862/* Bit length codes must not exceed MAX_BL_BITS bits */ 863 864#define LENGTH_CODES 29 865/* number of length codes, not counting the special END_BLOCK code */ 866 867#define LITERALS 256 868/* number of literal bytes 0..255 */ 869 870#define END_BLOCK 256 871/* end of block literal code */ 872 873#define L_CODES (LITERALS+1+LENGTH_CODES) 874/* number of Literal or Length codes, including the END_BLOCK code */ 875 876#define D_CODES 30 877/* number of distance codes */ 878 879#define BL_CODES 19 880/* number of codes used to transfer the bit lengths */ 881 882/* extra bits for each length code */ 883static const uint8_t extra_lbits[LENGTH_CODES] ALIGN1 = { 884 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 885 4, 4, 5, 5, 5, 5, 0 886}; 887 888/* extra bits for each distance code */ 889static const uint8_t extra_dbits[D_CODES] ALIGN1 = { 890 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 891 10, 10, 11, 11, 12, 12, 13, 13 892}; 893 894/* extra bits for each bit length code */ 895static const uint8_t extra_blbits[BL_CODES] ALIGN1 = { 896 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7 }; 897 898/* number of codes at each bit length for an optimal tree */ 899static const uint8_t bl_order[BL_CODES] ALIGN1 = { 900 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 }; 901 902#define STORED_BLOCK 0 903#define STATIC_TREES 1 904#define DYN_TREES 2 905/* The three kinds of block type */ 906 907#ifndef LIT_BUFSIZE 908# ifdef SMALL_MEM 909# define LIT_BUFSIZE 0x2000 910# else 911# ifdef MEDIUM_MEM 912# define LIT_BUFSIZE 0x4000 913# else 914# define LIT_BUFSIZE 0x8000 915# endif 916# endif 917#endif 918#ifndef DIST_BUFSIZE 919# define DIST_BUFSIZE LIT_BUFSIZE 920#endif 921/* Sizes of match buffers for literals/lengths and distances. There are 922 * 4 reasons for limiting LIT_BUFSIZE to 64K: 923 * - frequencies can be kept in 16 bit counters 924 * - if compression is not successful for the first block, all input data is 925 * still in the window so we can still emit a stored block even when input 926 * comes from standard input. (This can also be done for all blocks if 927 * LIT_BUFSIZE is not greater than 32K.) 928 * - if compression is not successful for a file smaller than 64K, we can 929 * even emit a stored file instead of a stored block (saving 5 bytes). 930 * - creating new Huffman trees less frequently may not provide fast 931 * adaptation to changes in the input data statistics. (Take for 932 * example a binary file with poorly compressible code followed by 933 * a highly compressible string table.) Smaller buffer sizes give 934 * fast adaptation but have of course the overhead of transmitting trees 935 * more frequently. 936 * - I can't count above 4 937 * The current code is general and allows DIST_BUFSIZE < LIT_BUFSIZE (to save 938 * memory at the expense of compression). Some optimizations would be possible 939 * if we rely on DIST_BUFSIZE == LIT_BUFSIZE. 940 */ 941#define REP_3_6 16 942/* repeat previous bit length 3-6 times (2 bits of repeat count) */ 943#define REPZ_3_10 17 944/* repeat a zero length 3-10 times (3 bits of repeat count) */ 945#define REPZ_11_138 18 946/* repeat a zero length 11-138 times (7 bits of repeat count) */ 947 948/* =========================================================================== 949*/ 950/* Data structure describing a single value and its code string. */ 951typedef struct ct_data { 952 union { 953 ush freq; /* frequency count */ 954 ush code; /* bit string */ 955 } fc; 956 union { 957 ush dad; /* father node in Huffman tree */ 958 ush len; /* length of bit string */ 959 } dl; 960} ct_data; 961 962#define Freq fc.freq 963#define Code fc.code 964#define Dad dl.dad 965#define Len dl.len 966 967#define HEAP_SIZE (2*L_CODES + 1) 968/* maximum heap size */ 969 970typedef struct tree_desc { 971 ct_data *dyn_tree; /* the dynamic tree */ 972 ct_data *static_tree; /* corresponding static tree or NULL */ 973 const uint8_t *extra_bits; /* extra bits for each code or NULL */ 974 int extra_base; /* base index for extra_bits */ 975 int elems; /* max number of elements in the tree */ 976 int max_length; /* max bit length for the codes */ 977 int max_code; /* largest code with non zero frequency */ 978} tree_desc; 979 980struct globals2 { 981 982 ush heap[HEAP_SIZE]; /* heap used to build the Huffman trees */ 983 int heap_len; /* number of elements in the heap */ 984 int heap_max; /* element of largest frequency */ 985 986/* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used. 987 * The same heap array is used to build all trees. 988 */ 989 990 ct_data dyn_ltree[HEAP_SIZE]; /* literal and length tree */ 991 ct_data dyn_dtree[2 * D_CODES + 1]; /* distance tree */ 992 993 ct_data static_ltree[L_CODES + 2]; 994 995/* The static literal tree. Since the bit lengths are imposed, there is no 996 * need for the L_CODES extra codes used during heap construction. However 997 * The codes 286 and 287 are needed to build a canonical tree (see ct_init 998 * below). 999 */ 1000
1001 ct_data static_dtree[D_CODES]; 1002 1003/* The static distance tree. (Actually a trivial tree since all codes use 1004 * 5 bits.) 1005 */ 1006 1007 ct_data bl_tree[2 * BL_CODES + 1]; 1008 1009/* Huffman tree for the bit lengths */ 1010 1011 tree_desc l_desc; 1012 tree_desc d_desc; 1013 tree_desc bl_desc; 1014 1015 /* was "ush", but "unsigned" results in smaller code */ 1016 unsigned bl_count[MAX_BITS + 1]; 1017 1018/* The lengths of the bit length codes are sent in order of decreasing 1019 * probability, to avoid transmitting the lengths for unused bit length codes. 1020 */ 1021 1022 uch depth[2 * L_CODES + 1]; 1023 1024/* Depth of each subtree used as tie breaker for trees of equal frequency */ 1025 1026 uch length_code[MAX_MATCH - MIN_MATCH + 1]; 1027 1028/* length code for each normalized match length (0 == MIN_MATCH) */ 1029 1030 uch dist_code[512]; 1031 1032/* distance codes. The first 256 values correspond to the distances 1033 * 3 .. 258, the last 256 values correspond to the top 8 bits of 1034 * the 15 bit distances. 1035 */ 1036 1037 int base_length[LENGTH_CODES]; 1038 1039/* First normalized length for each code (0 = MIN_MATCH) */ 1040 1041 int base_dist[D_CODES]; 1042 1043/* First normalized distance for each code (0 = distance of 1) */ 1044 1045 uch flag_buf[LIT_BUFSIZE / 8]; 1046 1047/* flag_buf is a bit array distinguishing literals from lengths in 1048 * l_buf, thus indicating the presence or absence of a distance. 1049 */ 1050 1051 unsigned last_lit; /* running index in l_buf */ 1052 unsigned last_dist; /* running index in d_buf */ 1053 unsigned last_flags; /* running index in flag_buf */ 1054 uch flags; /* current flags not yet saved in flag_buf */ 1055 uch flag_bit; /* current bit used in flags */ 1056 1057/* bits are filled in flags starting at bit 0 (least significant). 1058 * Note: these flags are overkill in the current code since we don't 1059 * take advantage of DIST_BUFSIZE == LIT_BUFSIZE. 1060 */ 1061 1062 ulg opt_len; /* bit length of current block with optimal trees */ 1063 ulg static_len; /* bit length of current block with static trees */ 1064 1065// ulg compressed_len; /* total bit length of compressed file */ 1066}; 1067 1068#define G2ptr ((struct globals2*)(ptr_to_globals)) 1069#define G2 (*G2ptr) 1070 1071/* =========================================================================== 1072 */ 1073#ifndef DEBUG 1074/* Send a code of the given tree. c and tree must not have side effects */ 1075# define SEND_CODE(c, tree) send_bits(tree[c].Code, tree[c].Len) 1076#else 1077# define SEND_CODE(c, tree) \ 1078{ \ 1079 if (verbose > 1) bb_error_msg("\ncd %3d ", (c)); \ 1080 send_bits(tree[c].Code, tree[c].Len); \ 1081} 1082#endif 1083 1084#define D_CODE(dist) \ 1085 ((dist) < 256 ? G2.dist_code[dist] : G2.dist_code[256 + ((dist)>>7)]) 1086/* Mapping from a distance to a distance code. dist is the distance - 1 and 1087 * must not have side effects. dist_code[256] and dist_code[257] are never 1088 * used. 1089 * The arguments must not have side effects. 1090 */ 1091 1092/* =========================================================================== 1093 * Initialize a new block. 1094 */ 1095static void init_block(void) 1096{ 1097 int n; /* iterates over tree elements */ 1098 1099 /* Initialize the trees. */ 1100 for (n = 0; n < L_CODES; n++) 1101 G2.dyn_ltree[n].Freq = 0; 1102 for (n = 0; n < D_CODES; n++) 1103 G2.dyn_dtree[n].Freq = 0; 1104 for (n = 0; n < BL_CODES; n++) 1105 G2.bl_tree[n].Freq = 0; 1106 1107 G2.dyn_ltree[END_BLOCK].Freq = 1; 1108 G2.opt_len = G2.static_len = 0; 1109 G2.last_lit = G2.last_dist = G2.last_flags = 0; 1110 G2.flags = 0; 1111 G2.flag_bit = 1; 1112} 1113 1114/* =========================================================================== 1115 * Restore the heap property by moving down the tree starting at node k, 1116 * exchanging a node with the smallest of its two sons if necessary, stopping 1117 * when the heap property is re-established (each father smaller than its 1118 * two sons). 1119 */ 1120 1121/* Compares to subtrees, using the tree depth as tie breaker when 1122 * the subtrees have equal frequency. This minimizes the worst case length. */ 1123#define SMALLER(tree, n, m) \ 1124 (tree[n].Freq < tree[m].Freq \ 1125 || (tree[n].Freq == tree[m].Freq && G2.depth[n] <= G2.depth[m])) 1126 1127static void pqdownheap(const ct_data *tree, int k) 1128{ 1129 int v = G2.heap[k]; 1130 int j = k << 1; /* left son of k */ 1131 1132 while (j <= G2.heap_len) { 1133 /* Set j to the smallest of the two sons: */ 1134 if (j < G2.heap_len && SMALLER(tree, G2.heap[j + 1], G2.heap[j])) 1135 j++; 1136 1137 /* Exit if v is smaller than both sons */ 1138 if (SMALLER(tree, v, G2.heap[j])) 1139 break; 1140 1141 /* Exchange v with the smallest son */ 1142 G2.heap[k] = G2.heap[j]; 1143 k = j; 1144 1145 /* And continue down the tree, setting j to the left son of k */ 1146 j <<= 1; 1147 } 1148 G2.heap[k] = v; 1149} 1150 1151/* =========================================================================== 1152 * Compute the optimal bit lengths for a tree and update the total bit length 1153 * for the current block. 1154 * IN assertion: the fields freq and dad are set, heap[heap_max] and 1155 * above are the tree nodes sorted by increasing frequency. 1156 * OUT assertions: the field len is set to the optimal bit length, the 1157 * array bl_count contains the frequencies for each bit length. 1158 * The length opt_len is updated; static_len is also updated if stree is 1159 * not null. 1160 */ 1161static void gen_bitlen(const tree_desc *desc) 1162{ 1163#define tree desc->dyn_tree 1164 int h; /* heap index */ 1165 int n, m; /* iterate over the tree elements */ 1166 int bits; /* bit length */ 1167 int overflow; /* number of elements with bit length too large */ 1168 1169 for (bits = 0; bits < ARRAY_SIZE(G2.bl_count); bits++) 1170 G2.bl_count[bits] = 0; 1171 1172 /* In a first pass, compute the optimal bit lengths (which may 1173 * overflow in the case of the bit length tree). 1174 */ 1175 tree[G2.heap[G2.heap_max]].Len = 0; /* root of the heap */ 1176 1177 overflow = 0; 1178 for (h = G2.heap_max + 1; h < HEAP_SIZE; h++) { 1179 ulg f; /* frequency */ 1180 int xbits; /* extra bits */ 1181 1182 n = G2.heap[h]; 1183 bits = tree[tree[n].Dad].Len + 1; 1184 if (bits > desc->max_length) { 1185 bits = desc->max_length; 1186 overflow++; 1187 } 1188 tree[n].Len = (ush) bits; 1189 /* We overwrite tree[n].Dad which is no longer needed */ 1190 1191 if (n > desc->max_code) 1192 continue; /* not a leaf node */ 1193 1194 G2.bl_count[bits]++; 1195 xbits = 0; 1196 if (n >= desc->extra_base) 1197 xbits = desc->extra_bits[n - desc->extra_base]; 1198 f = tree[n].Freq; 1199 G2.opt_len += f * (bits + xbits); 1200 1201 if (desc->static_tree) 1202 G2.static_len += f * (desc->static_tree[n].Len + xbits); 1203 } 1204 if (overflow == 0) 1205 return; 1206 1207 Trace((stderr, "\nbit length overflow\n")); 1208 /* This happens for example on obj2 and pic of the Calgary corpus */ 1209 1210 /* Find the first bit length which could increase: */ 1211 do { 1212 bits = desc->max_length - 1; 1213 while (G2.bl_count[bits] == 0) 1214 bits--; 1215 G2.bl_count[bits]--; /* move one leaf down the tree */ 1216 G2.bl_count[bits + 1] += 2; /* move one overflow item as its brother */ 1217 G2.bl_count[desc->max_length]--; 1218 /* The brother of the overflow item also moves one step up, 1219 * but this does not affect bl_count[desc->max_length] 1220 */ 1221 overflow -= 2; 1222 } while (overflow > 0); 1223 1224 /* Now recompute all bit lengths, scanning in increasing frequency. 1225 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all 1226 * lengths instead of fixing only the wrong ones. This idea is taken 1227 * from 'ar' written by Haruhiko Okumura.) 1228 */ 1229 for (bits = desc->max_length; bits != 0; bits--) { 1230 n = G2.bl_count[bits]; 1231 while (n != 0) { 1232 m = G2.heap[--h]; 1233 if (m > desc->max_code) 1234 continue; 1235 if (tree[m].Len != (unsigned) bits) { 1236 Trace((stderr, "code %d bits %d->%d\n", m, tree[m].Len, bits)); 1237 G2.opt_len += ((int32_t) bits - tree[m].Len) * tree[m].Freq; 1238 tree[m].Len = bits; 1239 } 1240 n--; 1241 } 1242 } 1243#undef tree 1244} 1245 1246/* =========================================================================== 1247 * Generate the codes for a given tree and bit counts (which need not be 1248 * optimal). 1249 * IN assertion: the array bl_count contains the bit length statistics for 1250 * the given tree and the field len is set for all tree elements. 1251 * OUT assertion: the field code is set for all tree elements of non 1252 * zero code length. 1253 */ 1254static void gen_codes(ct_data *tree, int max_code) 1255{ 1256 /* next_code[] and code used to be "ush", but "unsigned" results in smaller code */ 1257 unsigned next_code[MAX_BITS + 1]; /* next code value for each bit length */ 1258 unsigned code = 0; /* running code value */ 1259 int bits; /* bit index */ 1260 int n; /* code index */ 1261 1262 /* The distribution counts are first used to generate the code values 1263 * without bit reversal. 1264 */ 1265 for (bits = 1; bits <= MAX_BITS; bits++) { 1266 next_code[bits] = code = (code + G2.bl_count[bits - 1]) << 1; 1267 } 1268 /* Check that the bit counts in bl_count are consistent. The last code 1269 * must be all ones. 1270 */ 1271 Assert(code + G2.bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1, 1272 "inconsistent bit counts"); 1273 Tracev((stderr, "\ngen_codes: max_code %d ", max_code)); 1274 1275 for (n = 0; n <= max_code; n++) { 1276 int len = tree[n].Len; 1277 1278 if (len == 0) 1279 continue; 1280 /* Now reverse the bits */ 1281 tree[n].Code = bi_reverse(next_code[len]++, len); 1282 1283 Tracec(tree != G2.static_ltree, 1284 (stderr, "\nn %3d %c l %2d c %4x (%x) ", n, 1285 (n > ' ' ? n : ' '), len, tree[n].Code, 1286 next_code[len] - 1)); 1287 } 1288} 1289 1290/* =========================================================================== 1291 * Construct one Huffman tree and assigns the code bit strings and lengths. 1292 * Update the total bit length for the current block. 1293 * IN assertion: the field freq is set for all tree elements. 1294 * OUT assertions: the fields len and code are set to the optimal bit length 1295 * and corresponding code. The length opt_len is updated; static_len is 1296 * also updated if stree is not null. The field max_code is set. 1297 */ 1298 1299/* Remove the smallest element from the heap and recreate the heap with 1300 * one less element. Updates heap and heap_len. */ 1301 1302#define SMALLEST 1 1303/* Index within the heap array of least frequent node in the Huffman tree */ 1304 1305#define PQREMOVE(tree, top) \ 1306do { \ 1307 top = G2.heap[SMALLEST]; \ 1308 G2.heap[SMALLEST] = G2.heap[G2.heap_len--]; \ 1309 pqdownheap(tree, SMALLEST); \ 1310} while (0) 1311 1312static void build_tree(tree_desc *desc) 1313{ 1314 ct_data *tree = desc->dyn_tree; 1315 ct_data *stree = desc->static_tree; 1316 int elems = desc->elems; 1317 int n, m; /* iterate over heap elements */ 1318 int max_code = -1; /* largest code with non zero frequency */ 1319 int node = elems; /* next internal node of the tree */ 1320 1321 /* Construct the initial heap, with least frequent element in 1322 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. 1323 * heap[0] is not used. 1324 */ 1325 G2.heap_len = 0; 1326 G2.heap_max = HEAP_SIZE; 1327 1328 for (n = 0; n < elems; n++) { 1329 if (tree[n].Freq != 0) { 1330 G2.heap[++G2.heap_len] = max_code = n; 1331 G2.depth[n] = 0; 1332 } else { 1333 tree[n].Len = 0; 1334 } 1335 } 1336 1337 /* The pkzip format requires that at least one distance code exists, 1338 * and that at least one bit should be sent even if there is only one 1339 * possible code. So to avoid special checks later on we force at least 1340 * two codes of non zero frequency. 1341 */ 1342 while (G2.heap_len < 2) { 1343 int new = G2.heap[++G2.heap_len] = (max_code < 2 ? ++max_code : 0); 1344 1345 tree[new].Freq = 1; 1346 G2.depth[new] = 0; 1347 G2.opt_len--; 1348 if (stree) 1349 G2.static_len -= stree[new].Len; 1350 /* new is 0 or 1 so it does not have extra bits */ 1351 } 1352 desc->max_code = max_code; 1353 1354 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, 1355 * establish sub-heaps of increasing lengths: 1356 */ 1357 for (n = G2.heap_len / 2; n >= 1; n--) 1358 pqdownheap(tree, n); 1359 1360 /* Construct the Huffman tree by repeatedly combining the least two 1361 * frequent nodes. 1362 */ 1363 do { 1364 PQREMOVE(tree, n); /* n = node of least frequency */ 1365 m = G2.heap[SMALLEST]; /* m = node of next least frequency */ 1366 1367 G2.heap[--G2.heap_max] = n; /* keep the nodes sorted by frequency */ 1368 G2.heap[--G2.heap_max] = m; 1369 1370 /* Create a new node father of n and m */ 1371 tree[node].Freq = tree[n].Freq + tree[m].Freq; 1372 G2.depth[node] = MAX(G2.depth[n], G2.depth[m]) + 1; 1373 tree[n].Dad = tree[m].Dad = (ush) node; 1374#ifdef DUMP_BL_TREE 1375 if (tree == G2.bl_tree) { 1376 bb_error_msg("\nnode %d(%d), sons %d(%d) %d(%d)", 1377 node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq); 1378 } 1379#endif 1380 /* and insert the new node in the heap */ 1381 G2.heap[SMALLEST] = node++; 1382 pqdownheap(tree, SMALLEST); 1383 } while (G2.heap_len >= 2); 1384 1385 G2.heap[--G2.heap_max] = G2.heap[SMALLEST]; 1386 1387 /* At this point, the fields freq and dad are set. We can now 1388 * generate the bit lengths. 1389 */ 1390 gen_bitlen(desc); 1391 1392 /* The field len is now set, we can generate the bit codes */ 1393 gen_codes(tree, max_code); 1394} 1395 1396/* =========================================================================== 1397 * Scan a literal or distance tree to determine the frequencies of the codes 1398 * in the bit length tree. Updates opt_len to take into account the repeat 1399 * counts. (The contribution of the bit length codes will be added later 1400 * during the construction of bl_tree.) 1401 */ 1402static void scan_tree(ct_data *tree, int max_code) 1403{ 1404 int n; /* iterates over all tree elements */ 1405 int prevlen = -1; /* last emitted length */ 1406 int curlen; /* length of current code */ 1407 int nextlen = tree[0].Len; /* length of next code */ 1408 int count = 0; /* repeat count of the current code */ 1409 int max_count = 7; /* max repeat count */ 1410 int min_count = 4; /* min repeat count */ 1411 1412 if (nextlen == 0) { 1413 max_count = 138; 1414 min_count = 3; 1415 } 1416 tree[max_code + 1].Len = 0xffff; /* guard */ 1417 1418 for (n = 0; n <= max_code; n++) { 1419 curlen = nextlen; 1420 nextlen = tree[n + 1].Len; 1421 if (++count < max_count && curlen == nextlen) 1422 continue; 1423 1424 if (count < min_count) { 1425 G2.bl_tree[curlen].Freq += count; 1426 } else if (curlen != 0) { 1427 if (curlen != prevlen) 1428 G2.bl_tree[curlen].Freq++; 1429 G2.bl_tree[REP_3_6].Freq++; 1430 } else if (count <= 10) { 1431 G2.bl_tree[REPZ_3_10].Freq++; 1432 } else { 1433 G2.bl_tree[REPZ_11_138].Freq++; 1434 } 1435 count = 0; 1436 prevlen = curlen; 1437 1438 max_count = 7; 1439 min_count = 4; 1440 if (nextlen == 0) { 1441 max_count = 138; 1442 min_count = 3; 1443 } else if (curlen == nextlen) { 1444 max_count = 6; 1445 min_count = 3; 1446 } 1447 } 1448} 1449 1450/* =========================================================================== 1451 * Send a literal or distance tree in compressed form, using the codes in 1452 * bl_tree. 1453 */ 1454static void send_tree(const ct_data *tree, int max_code) 1455{ 1456 int n; /* iterates over all tree elements */ 1457 int prevlen = -1; /* last emitted length */ 1458 int curlen; /* length of current code */ 1459 int nextlen = tree[0].Len; /* length of next code */ 1460 int count = 0; /* repeat count of the current code */ 1461 int max_count = 7; /* max repeat count */ 1462 int min_count = 4; /* min repeat count */ 1463 1464/* tree[max_code+1].Len = -1; *//* guard already set */ 1465 if (nextlen == 0) 1466 max_count = 138, min_count = 3; 1467 1468 for (n = 0; n <= max_code; n++) { 1469 curlen = nextlen; 1470 nextlen = tree[n + 1].Len; 1471 if (++count < max_count && curlen == nextlen) { 1472 continue; 1473 } else if (count < min_count) { 1474 do { 1475 SEND_CODE(curlen, G2.bl_tree); 1476 } while (--count); 1477 } else if (curlen != 0) { 1478 if (curlen != prevlen) { 1479 SEND_CODE(curlen, G2.bl_tree); 1480 count--; 1481 } 1482 Assert(count >= 3 && count <= 6, " 3_6?"); 1483 SEND_CODE(REP_3_6, G2.bl_tree); 1484 send_bits(count - 3, 2); 1485 } else if (count <= 10) { 1486 SEND_CODE(REPZ_3_10, G2.bl_tree); 1487 send_bits(count - 3, 3); 1488 } else { 1489 SEND_CODE(REPZ_11_138, G2.bl_tree); 1490 send_bits(count - 11, 7); 1491 } 1492 count = 0; 1493 prevlen = curlen; 1494 if (nextlen == 0) { 1495 max_count = 138; 1496 min_count = 3; 1497 } else if (curlen == nextlen) { 1498 max_count = 6; 1499 min_count = 3; 1500 } else { 1501 max_count = 7; 1502 min_count = 4; 1503 } 1504 } 1505} 1506 1507/* =========================================================================== 1508 * Construct the Huffman tree for the bit lengths and return the index in 1509 * bl_order of the last bit length code to send. 1510 */ 1511static int build_bl_tree(void) 1512{ 1513 int max_blindex; /* index of last bit length code of non zero freq */ 1514 1515 /* Determine the bit length frequencies for literal and distance trees */ 1516 scan_tree(G2.dyn_ltree, G2.l_desc.max_code); 1517 scan_tree(G2.dyn_dtree, G2.d_desc.max_code); 1518 1519 /* Build the bit length tree: */ 1520 build_tree(&G2.bl_desc); 1521 /* opt_len now includes the length of the tree representations, except 1522 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. 1523 */ 1524 1525 /* Determine the number of bit length codes to send. The pkzip format 1526 * requires that at least 4 bit length codes be sent. (appnote.txt says 1527 * 3 but the actual value used is 4.) 1528 */ 1529 for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) { 1530 if (G2.bl_tree[bl_order[max_blindex]].Len != 0) 1531 break; 1532 } 1533 /* Update opt_len to include the bit length tree and counts */ 1534 G2.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; 1535 Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", (long)G2.opt_len, (long)G2.static_len)); 1536 1537 return max_blindex; 1538} 1539 1540/* =========================================================================== 1541 * Send the header for a block using dynamic Huffman trees: the counts, the 1542 * lengths of the bit length codes, the literal tree and the distance tree. 1543 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. 1544 */ 1545static void send_all_trees(int lcodes, int dcodes, int blcodes) 1546{ 1547 int rank; /* index in bl_order */ 1548 1549 Assert(lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); 1550 Assert(lcodes <= L_CODES && dcodes <= D_CODES 1551 && blcodes <= BL_CODES, "too many codes"); 1552 Tracev((stderr, "\nbl counts: ")); 1553 send_bits(lcodes - 257, 5); /* not +255 as stated in appnote.txt */ 1554 send_bits(dcodes - 1, 5); 1555 send_bits(blcodes - 4, 4); /* not -3 as stated in appnote.txt */ 1556 for (rank = 0; rank < blcodes; rank++) { 1557 Tracev((stderr, "\nbl code %2d ", bl_order[rank])); 1558 send_bits(G2.bl_tree[bl_order[rank]].Len, 3); 1559 } 1560 Tracev((stderr, "\nbl tree: sent %ld", (long)G1.bits_sent)); 1561 1562 send_tree((ct_data *) G2.dyn_ltree, lcodes - 1); /* send the literal tree */ 1563 Tracev((stderr, "\nlit tree: sent %ld", (long)G1.bits_sent)); 1564 1565 send_tree((ct_data *) G2.dyn_dtree, dcodes - 1); /* send the distance tree */ 1566 Tracev((stderr, "\ndist tree: sent %ld", (long)G1.bits_sent)); 1567} 1568 1569/* =========================================================================== 1570 * Save the match info and tally the frequency counts. Return true if 1571 * the current block must be flushed. 1572 */ 1573static int ct_tally(int dist, int lc) 1574{ 1575 G1.l_buf[G2.last_lit++] = lc; 1576 if (dist == 0) { 1577 /* lc is the unmatched char */ 1578 G2.dyn_ltree[lc].Freq++; 1579 } else { 1580 /* Here, lc is the match length - MIN_MATCH */ 1581 dist--; /* dist = match distance - 1 */ 1582 Assert((ush) dist < (ush) MAX_DIST 1583 && (ush) lc <= (ush) (MAX_MATCH - MIN_MATCH) 1584 && (ush) D_CODE(dist) < (ush) D_CODES, "ct_tally: bad match" 1585 ); 1586 1587 G2.dyn_ltree[G2.length_code[lc] + LITERALS + 1].Freq++; 1588 G2.dyn_dtree[D_CODE(dist)].Freq++; 1589 1590 G1.d_buf[G2.last_dist++] = dist; 1591 G2.flags |= G2.flag_bit; 1592 } 1593 G2.flag_bit <<= 1; 1594 1595 /* Output the flags if they fill a byte: */ 1596 if ((G2.last_lit & 7) == 0) { 1597 G2.flag_buf[G2.last_flags++] = G2.flags; 1598 G2.flags = 0; 1599 G2.flag_bit = 1; 1600 } 1601 /* Try to guess if it is profitable to stop the current block here */ 1602 if ((G2.last_lit & 0xfff) == 0) { 1603 /* Compute an upper bound for the compressed length */ 1604 ulg out_length = G2.last_lit * 8L; 1605 ulg in_length = (ulg) G1.strstart - G1.block_start; 1606 int dcode; 1607 1608 for (dcode = 0; dcode < D_CODES; dcode++) { 1609 out_length += G2.dyn_dtree[dcode].Freq * (5L + extra_dbits[dcode]); 1610 } 1611 out_length >>= 3; 1612 Trace((stderr, 1613 "\nlast_lit %u, last_dist %u, in %ld, out ~%ld(%ld%%) ", 1614 G2.last_lit, G2.last_dist, 1615 (long)in_length, (long)out_length, 1616 100L - out_length * 100L / in_length)); 1617 if (G2.last_dist < G2.last_lit / 2 && out_length < in_length / 2) 1618 return 1; 1619 } 1620 return (G2.last_lit == LIT_BUFSIZE - 1 || G2.last_dist == DIST_BUFSIZE); 1621 /* We avoid equality with LIT_BUFSIZE because of wraparound at 64K 1622 * on 16 bit machines and because stored blocks are restricted to 1623 * 64K-1 bytes. 1624 */ 1625} 1626 1627/* =========================================================================== 1628 * Send the block data compressed using the given Huffman trees 1629 */ 1630static void compress_block(const ct_data *ltree, const ct_data *dtree) 1631{ 1632 unsigned dist; /* distance of matched string */ 1633 int lc; /* match length or unmatched char (if dist == 0) */ 1634 unsigned lx = 0; /* running index in l_buf */ 1635 unsigned dx = 0; /* running index in d_buf */ 1636 unsigned fx = 0; /* running index in flag_buf */ 1637 uch flag = 0; /* current flags */ 1638 unsigned code; /* the code to send */ 1639 int extra; /* number of extra bits to send */ 1640 1641 if (G2.last_lit != 0) do { 1642 if ((lx & 7) == 0) 1643 flag = G2.flag_buf[fx++]; 1644 lc = G1.l_buf[lx++]; 1645 if ((flag & 1) == 0) { 1646 SEND_CODE(lc, ltree); /* send a literal byte */ 1647 Tracecv(lc > ' ', (stderr, " '%c' ", lc)); 1648 } else { 1649 /* Here, lc is the match length - MIN_MATCH */ 1650 code = G2.length_code[lc]; 1651 SEND_CODE(code + LITERALS + 1, ltree); /* send the length code */ 1652 extra = extra_lbits[code]; 1653 if (extra != 0) { 1654 lc -= G2.base_length[code]; 1655 send_bits(lc, extra); /* send the extra length bits */ 1656 } 1657 dist = G1.d_buf[dx++]; 1658 /* Here, dist is the match distance - 1 */ 1659 code = D_CODE(dist); 1660 Assert(code < D_CODES, "bad d_code"); 1661 1662 SEND_CODE(code, dtree); /* send the distance code */ 1663 extra = extra_dbits[code]; 1664 if (extra != 0) { 1665 dist -= G2.base_dist[code]; 1666 send_bits(dist, extra); /* send the extra distance bits */ 1667 } 1668 } /* literal or match pair ? */ 1669 flag >>= 1; 1670 } while (lx < G2.last_lit); 1671 1672 SEND_CODE(END_BLOCK, ltree); 1673} 1674 1675/* =========================================================================== 1676 * Determine the best encoding for the current block: dynamic trees, static 1677 * trees or store, and output the encoded block to the zip file. This function 1678 * returns the total compressed length for the file so far. 1679 */ 1680static void flush_block(const char *buf, ulg stored_len, int eof) 1681{ 1682 ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */ 1683 int max_blindex; /* index of last bit length code of non zero freq */ 1684 1685 G2.flag_buf[G2.last_flags] = G2.flags; /* Save the flags for the last 8 items */ 1686 1687 /* Construct the literal and distance trees */ 1688 build_tree(&G2.l_desc); 1689 Tracev((stderr, "\nlit data: dyn %ld, stat %ld", (long)G2.opt_len, (long)G2.static_len)); 1690 1691 build_tree(&G2.d_desc); 1692 Tracev((stderr, "\ndist data: dyn %ld, stat %ld", (long)G2.opt_len, (long)G2.static_len)); 1693 /* At this point, opt_len and static_len are the total bit lengths of 1694 * the compressed block data, excluding the tree representations. 1695 */ 1696 1697 /* Build the bit length tree for the above two trees, and get the index 1698 * in bl_order of the last bit length code to send. 1699 */ 1700 max_blindex = build_bl_tree(); 1701 1702 /* Determine the best encoding. Compute first the block length in bytes */ 1703 opt_lenb = (G2.opt_len + 3 + 7) >> 3; 1704 static_lenb = (G2.static_len + 3 + 7) >> 3; 1705 1706 Trace((stderr, 1707 "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u dist %u ", 1708 (unsigned long)opt_lenb, (unsigned long)G2.opt_len, 1709 (unsigned long)static_lenb, (unsigned long)G2.static_len, 1710 (unsigned long)stored_len, 1711 G2.last_lit, G2.last_dist)); 1712 1713 if (static_lenb <= opt_lenb) 1714 opt_lenb = static_lenb; 1715 1716 /* If compression failed and this is the first and last block, 1717 * and if the zip file can be seeked (to rewrite the local header), 1718 * the whole file is transformed into a stored file: 1719 */ 1720// seekable() is constant FALSE in busybox, and G2.compressed_len is disabled 1721// (this was the only user) 1722// if (stored_len <= opt_lenb && eof && G2.compressed_len == 0L && seekable()) { 1723// /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */ 1724// if (buf == NULL) 1725// bb_error_msg("block vanished"); 1726// 1727// G2.compressed_len = stored_len << 3; 1728// copy_block(buf, (unsigned) stored_len, 0); /* without header */ 1729// } else 1730 if (stored_len + 4 <= opt_lenb && buf != NULL) { 1731 /* 4: two words for the lengths */ 1732 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. 1733 * Otherwise we can't have processed more than WSIZE input bytes since 1734 * the last block flush, because compression would have been 1735 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to 1736 * transform a block into a stored block. 1737 */ 1738 send_bits((STORED_BLOCK << 1) + eof, 3); /* send block type */ 1739// G2.compressed_len = ((G2.compressed_len + 3 + 7) & ~7L) 1740// + ((stored_len + 4) << 3); 1741 copy_block(buf, (unsigned) stored_len, 1); /* with header */ 1742 } else 1743 if (static_lenb == opt_lenb) { 1744 send_bits((STATIC_TREES << 1) + eof, 3); 1745 compress_block((ct_data *) G2.static_ltree, (ct_data *) G2.static_dtree); 1746// G2.compressed_len += 3 + G2.static_len; 1747 } else { 1748 send_bits((DYN_TREES << 1) + eof, 3); 1749 send_all_trees(G2.l_desc.max_code + 1, G2.d_desc.max_code + 1, 1750 max_blindex + 1); 1751 compress_block((ct_data *) G2.dyn_ltree, (ct_data *) G2.dyn_dtree); 1752// G2.compressed_len += 3 + G2.opt_len; 1753 } 1754// Assert(G2.compressed_len == G1.bits_sent, "bad compressed size"); 1755 init_block(); 1756 1757 if (eof) { 1758 bi_windup(); 1759// G2.compressed_len += 7; /* align on byte boundary */ 1760 } 1761// Tracev((stderr, "\ncomprlen %lu(%lu) ", 1762// (unsigned long)G2.compressed_len >> 3, 1763// (unsigned long)G2.compressed_len - 7 * eof)); 1764 1765 return; /* was "return G2.compressed_len >> 3;" */ 1766} 1767 1768/* =========================================================================== 1769 * Update a hash value with the given input byte 1770 * IN assertion: all calls to UPDATE_HASH are made with consecutive 1771 * input characters, so that a running hash key can be computed from the 1772 * previous key instead of complete recalculation each time. 1773 */ 1774#define UPDATE_HASH(h, c) (h = (((h)<<H_SHIFT) ^ (c)) & HASH_MASK) 1775 1776/* =========================================================================== 1777 * Same as above, but achieves better compression. We use a lazy 1778 * evaluation for matches: a match is finally adopted only if there is 1779 * no better match at the next window position. 1780 * 1781 * Processes a new input file and return its compressed length. Sets 1782 * the compressed length, crc, deflate flags and internal file 1783 * attributes. 1784 */ 1785 1786/* Flush the current block, with given end-of-file flag. 1787 * IN assertion: strstart is set to the end of the current match. */ 1788#define FLUSH_BLOCK(eof) \ 1789 flush_block( \ 1790 G1.block_start >= 0L \ 1791 ? (char*)&G1.window[(unsigned)G1.block_start] \ 1792 : (char*)NULL, \ 1793 (ulg)G1.strstart - G1.block_start, \ 1794 (eof) \ 1795 ) 1796 1797/* Insert string s in the dictionary and set match_head to the previous head 1798 * of the hash chain (the most recent string with same hash key). Return 1799 * the previous length of the hash chain. 1800 * IN assertion: all calls to INSERT_STRING are made with consecutive 1801 * input characters and the first MIN_MATCH bytes of s are valid 1802 * (except for the last MIN_MATCH-1 bytes of the input file). */ 1803#define INSERT_STRING(s, match_head) \ 1804do { \ 1805 UPDATE_HASH(G1.ins_h, G1.window[(s) + MIN_MATCH-1]); \ 1806 G1.prev[(s) & WMASK] = match_head = head[G1.ins_h]; \ 1807 head[G1.ins_h] = (s); \ 1808} while (0) 1809 1810static NOINLINE void deflate(void) 1811{ 1812 IPos hash_head; /* head of hash chain */ 1813 IPos prev_match; /* previous match */ 1814 int flush; /* set if current block must be flushed */ 1815 int match_available = 0; /* set if previous match exists */ 1816 unsigned match_length = MIN_MATCH - 1; /* length of best match */ 1817 1818 /* Process the input block. */ 1819 while (G1.lookahead != 0) { 1820 /* Insert the string window[strstart .. strstart+2] in the 1821 * dictionary, and set hash_head to the head of the hash chain: 1822 */ 1823 INSERT_STRING(G1.strstart, hash_head); 1824 1825 /* Find the longest match, discarding those <= prev_length. 1826 */ 1827 G1.prev_length = match_length; 1828 prev_match = G1.match_start; 1829 match_length = MIN_MATCH - 1; 1830 1831 if (hash_head != 0 && G1.prev_length < max_lazy_match 1832 && G1.strstart - hash_head <= MAX_DIST 1833 ) { 1834 /* To simplify the code, we prevent matches with the string 1835 * of window index 0 (in particular we have to avoid a match 1836 * of the string with itself at the start of the input file). 1837 */ 1838 match_length = longest_match(hash_head); 1839 /* longest_match() sets match_start */ 1840 if (match_length > G1.lookahead) 1841 match_length = G1.lookahead; 1842 1843 /* Ignore a length 3 match if it is too distant: */ 1844 if (match_length == MIN_MATCH && G1.strstart - G1.match_start > TOO_FAR) { 1845 /* If prev_match is also MIN_MATCH, G1.match_start is garbage 1846 * but we will ignore the current match anyway. 1847 */ 1848 match_length--; 1849 } 1850 } 1851 /* If there was a match at the previous step and the current 1852 * match is not better, output the previous match: 1853 */ 1854 if (G1.prev_length >= MIN_MATCH && match_length <= G1.prev_length) { 1855 check_match(G1.strstart - 1, prev_match, G1.prev_length); 1856 flush = ct_tally(G1.strstart - 1 - prev_match, G1.prev_length - MIN_MATCH); 1857 1858 /* Insert in hash table all strings up to the end of the match. 1859 * strstart-1 and strstart are already inserted. 1860 */ 1861 G1.lookahead -= G1.prev_length - 1; 1862 G1.prev_length -= 2; 1863 do { 1864 G1.strstart++; 1865 INSERT_STRING(G1.strstart, hash_head); 1866 /* strstart never exceeds WSIZE-MAX_MATCH, so there are 1867 * always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH 1868 * these bytes are garbage, but it does not matter since the 1869 * next lookahead bytes will always be emitted as literals. 1870 */ 1871 } while (--G1.prev_length != 0); 1872 match_available = 0; 1873 match_length = MIN_MATCH - 1; 1874 G1.strstart++; 1875 if (flush) { 1876 FLUSH_BLOCK(0); 1877 G1.block_start = G1.strstart; 1878 } 1879 } else if (match_available) { 1880 /* If there was no match at the previous position, output a 1881 * single literal. If there was a match but the current match 1882 * is longer, truncate the previous match to a single literal. 1883 */ 1884 Tracevv((stderr, "%c", G1.window[G1.strstart - 1])); 1885 if (ct_tally(0, G1.window[G1.strstart - 1])) { 1886 FLUSH_BLOCK(0); 1887 G1.block_start = G1.strstart; 1888 } 1889 G1.strstart++; 1890 G1.lookahead--; 1891 } else { 1892 /* There is no previous match to compare with, wait for 1893 * the next step to decide. 1894 */ 1895 match_available = 1; 1896 G1.strstart++; 1897 G1.lookahead--; 1898 } 1899 Assert(G1.strstart <= G1.isize && G1.lookahead <= G1.isize, "a bit too far"); 1900 1901 /* Make sure that we always have enough lookahead, except 1902 * at the end of the input file. We need MAX_MATCH bytes 1903 * for the next match, plus MIN_MATCH bytes to insert the 1904 * string following the next match. 1905 */ 1906 fill_window_if_needed(); 1907 } 1908 if (match_available) 1909 ct_tally(0, G1.window[G1.strstart - 1]); 1910 1911 FLUSH_BLOCK(1); /* eof */ 1912} 1913 1914/* =========================================================================== 1915 * Initialize the bit string routines. 1916 */ 1917static void bi_init(void) 1918{ 1919 //G1.bi_buf = 0; // globals are zeroed in pack_gzip() 1920 //G1.bi_valid = 0; // globals are zeroed in pack_gzip() 1921 //DEBUG_bits_sent(= 0L); // globals are zeroed in pack_gzip() 1922} 1923 1924/* =========================================================================== 1925 * Initialize the "longest match" routines for a new file 1926 */ 1927static void lm_init(void) 1928{ 1929 unsigned j; 1930 1931 /* Initialize the hash table. */ 1932 memset(head, 0, HASH_SIZE * sizeof(*head)); 1933 /* prev will be initialized on the fly */ 1934 1935 /* ??? reduce max_chain_length for binary files */ 1936 1937 //G1.strstart = 0; // globals are zeroed in pack_gzip() 1938 //G1.block_start = 0L; // globals are zeroed in pack_gzip() 1939 1940 G1.lookahead = file_read(G1.window, 1941 sizeof(int) <= 2 ? (unsigned) WSIZE : 2 * WSIZE); 1942 1943 if (G1.lookahead == 0 || G1.lookahead == (unsigned) -1) { 1944 G1.eofile = 1; 1945 G1.lookahead = 0; 1946 return; 1947 } 1948 //G1.eofile = 0; // globals are zeroed in pack_gzip() 1949 1950 /* Make sure that we always have enough lookahead. This is important 1951 * if input comes from a device such as a tty. 1952 */ 1953 fill_window_if_needed(); 1954 1955 //G1.ins_h = 0; // globals are zeroed in pack_gzip() 1956 for (j = 0; j < MIN_MATCH - 1; j++) 1957 UPDATE_HASH(G1.ins_h, G1.window[j]); 1958 /* If lookahead < MIN_MATCH, ins_h is garbage, but this is 1959 * not important since only literal bytes will be emitted. 1960 */ 1961} 1962 1963/* =========================================================================== 1964 * Allocate the match buffer, initialize the various tables and save the 1965 * location of the internal file attribute (ascii/binary) and method 1966 * (DEFLATE/STORE). 1967 * One callsite in zip() 1968 */ 1969static void ct_init(void) 1970{ 1971 int n; /* iterates over tree elements */ 1972 int length; /* length value */ 1973 int code; /* code value */ 1974 int dist; /* distance index */ 1975 1976// //G2.compressed_len = 0L; // globals are zeroed in pack_gzip() 1977 1978#ifdef NOT_NEEDED 1979 if (G2.static_dtree[0].Len != 0) 1980 return; /* ct_init already called */ 1981#endif 1982 1983 /* Initialize the mapping length (0..255) -> length code (0..28) */ 1984 length = 0; 1985 for (code = 0; code < LENGTH_CODES - 1; code++) { 1986 G2.base_length[code] = length; 1987 for (n = 0; n < (1 << extra_lbits[code]); n++) { 1988 G2.length_code[length++] = code; 1989 } 1990 } 1991 Assert(length == 256, "ct_init: length != 256"); 1992 /* Note that the length 255 (match length 258) can be represented 1993 * in two different ways: code 284 + 5 bits or code 285, so we 1994 * overwrite length_code[255] to use the best encoding: 1995 */ 1996 G2.length_code[length - 1] = code; 1997 1998 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ 1999 dist = 0; 2000 for (code = 0; code < 16; code++) {
2001 G2.base_dist[code] = dist; 2002 for (n = 0; n < (1 << extra_dbits[code]); n++) { 2003 G2.dist_code[dist++] = code; 2004 } 2005 } 2006 Assert(dist == 256, "ct_init: dist != 256"); 2007 dist >>= 7; /* from now on, all distances are divided by 128 */ 2008 for (; code < D_CODES; code++) { 2009 G2.base_dist[code] = dist << 7; 2010 for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) { 2011 G2.dist_code[256 + dist++] = code; 2012 } 2013 } 2014 Assert(dist == 256, "ct_init: 256+dist != 512"); 2015 2016 /* Construct the codes of the static literal tree */ 2017 //for (n = 0; n <= MAX_BITS; n++) // globals are zeroed in pack_gzip() 2018 // G2.bl_count[n] = 0; 2019 2020 n = 0; 2021 while (n <= 143) { 2022 G2.static_ltree[n++].Len = 8; 2023 //G2.bl_count[8]++; 2024 } 2025 //G2.bl_count[8] = 143 + 1; 2026 while (n <= 255) { 2027 G2.static_ltree[n++].Len = 9; 2028 //G2.bl_count[9]++; 2029 } 2030 //G2.bl_count[9] = 255 - 143; 2031 while (n <= 279) { 2032 G2.static_ltree[n++].Len = 7; 2033 //G2.bl_count[7]++; 2034 } 2035 //G2.bl_count[7] = 279 - 255; 2036 while (n <= 287) { 2037 G2.static_ltree[n++].Len = 8; 2038 //G2.bl_count[8]++; 2039 } 2040 //G2.bl_count[8] += 287 - 279; 2041 G2.bl_count[7] = 279 - 255; 2042 G2.bl_count[8] = (143 + 1) + (287 - 279); 2043 G2.bl_count[9] = 255 - 143; 2044 /* Codes 286 and 287 do not exist, but we must include them in the 2045 * tree construction to get a canonical Huffman tree (longest code 2046 * all ones) 2047 */ 2048 gen_codes((ct_data *) G2.static_ltree, L_CODES + 1); 2049 2050 /* The static distance tree is trivial: */ 2051 for (n = 0; n < D_CODES; n++) { 2052 G2.static_dtree[n].Len = 5; 2053 G2.static_dtree[n].Code = bi_reverse(n, 5); 2054 } 2055 2056 /* Initialize the first block of the first file: */ 2057 init_block(); 2058} 2059 2060/* =========================================================================== 2061 * Deflate in to out. 2062 * IN assertions: the input and output buffers are cleared. 2063 */ 2064static void zip(void) 2065{ 2066 unsigned deflate_flags; 2067 2068 //G1.outcnt = 0; // globals are zeroed in pack_gzip() 2069 2070 /* Write the header to the gzip file. See algorithm.doc for the format */ 2071 /* magic header for gzip files: 1F 8B */ 2072 /* compression method: 8 (DEFLATED) */ 2073 /* general flags: 0 */ 2074 put_32bit(0x00088b1f); 2075 put_32bit(0); /* Unix timestamp */ 2076 2077 /* Write deflated file to zip file */ 2078 G1.crc = ~0; 2079 2080 bi_init(); 2081 ct_init(); 2082 lm_init(); 2083 2084 deflate_flags = 0x300; /* extra flags. OS id = 3 (Unix) */ 2085#if ENABLE_FEATURE_GZIP_LEVELS 2086 /* Note that comp_level < 4 do not exist in this version of gzip */ 2087 if (comp_level_minus4 == 9 - 4) { 2088 deflate_flags |= 0x02; /* SLOW flag */ 2089 } 2090#endif 2091 put_16bit(deflate_flags); 2092 2093 /* The above 32-bit misaligns outbuf (10 bytes are stored), flush it */ 2094 flush_outbuf_if_32bit_optimized(); 2095 2096 deflate(); 2097 2098 /* Write the crc and uncompressed size */ 2099 put_32bit(~G1.crc); 2100 put_32bit(G1.isize); 2101 2102 flush_outbuf(); 2103} 2104 2105/* ======================================================================== */ 2106static 2107IF_DESKTOP(long long) int FAST_FUNC pack_gzip(transformer_state_t *xstate UNUSED_PARAM) 2108{ 2109 /* Reinit G1.xxx except pointers to allocated buffers, and entire G2 */ 2110 memset(&G1.crc, 0, (sizeof(G1) - offsetof(struct globals, crc)) + sizeof(G2)); 2111 2112 /* Clear input and output buffers */ 2113 //G1.outcnt = 0; 2114#ifdef DEBUG 2115 //G1.insize = 0; 2116#endif 2117 //G1.isize = 0; 2118 2119 /* Reinit G2.xxx */ 2120 G2.l_desc.dyn_tree = G2.dyn_ltree; 2121 G2.l_desc.static_tree = G2.static_ltree; 2122 G2.l_desc.extra_bits = extra_lbits; 2123 G2.l_desc.extra_base = LITERALS + 1; 2124 G2.l_desc.elems = L_CODES; 2125 G2.l_desc.max_length = MAX_BITS; 2126 //G2.l_desc.max_code = 0; 2127 G2.d_desc.dyn_tree = G2.dyn_dtree; 2128 G2.d_desc.static_tree = G2.static_dtree; 2129 G2.d_desc.extra_bits = extra_dbits; 2130 //G2.d_desc.extra_base = 0; 2131 G2.d_desc.elems = D_CODES; 2132 G2.d_desc.max_length = MAX_BITS; 2133 //G2.d_desc.max_code = 0; 2134 G2.bl_desc.dyn_tree = G2.bl_tree; 2135 //G2.bl_desc.static_tree = NULL; 2136 G2.bl_desc.extra_bits = extra_blbits, 2137 //G2.bl_desc.extra_base = 0; 2138 G2.bl_desc.elems = BL_CODES; 2139 G2.bl_desc.max_length = MAX_BL_BITS; 2140 //G2.bl_desc.max_code = 0; 2141 2142#if 0 2143 /* Saving of timestamp is disabled. Why? 2144 * - it is not Y2038-safe. 2145 * - some people want deterministic results 2146 * (normally they'd use -n, but our -n is a nop). 2147 * - it's bloat. 2148 * Per RFC 1952, gzfile.time=0 is "no timestamp". 2149 * If users will demand this to be reinstated, 2150 * implement -n "don't save timestamp". 2151 */ 2152 struct stat s; 2153 s.st_ctime = 0; 2154 fstat(STDIN_FILENO, &s); 2155 zip(s.st_ctime); 2156#else 2157 zip(); 2158#endif 2159 return 0; 2160} 2161 2162#if ENABLE_FEATURE_GZIP_LONG_OPTIONS 2163static const char gzip_longopts[] ALIGN1 = 2164 "stdout\0" No_argument "c" 2165 "to-stdout\0" No_argument "c" 2166 "force\0" No_argument "f" 2167 "verbose\0" No_argument "v" 2168#if ENABLE_FEATURE_GZIP_DECOMPRESS 2169 "decompress\0" No_argument "d" 2170 "uncompress\0" No_argument "d" 2171 "test\0" No_argument "t" 2172#endif 2173 "quiet\0" No_argument "q" 2174 "fast\0" No_argument "1" 2175 "best\0" No_argument "9" 2176 "no-name\0" No_argument "n" 2177 ; 2178#endif 2179 2180/* 2181 * Linux kernel build uses gzip -d -n. We accept and ignore -n. 2182 * Man page says: 2183 * -n --no-name 2184 * gzip: do not save the original file name and time stamp. 2185 * (The original name is always saved if the name had to be truncated.) 2186 * gunzip: do not restore the original file name/time even if present 2187 * (remove only the gzip suffix from the compressed file name). 2188 * This option is the default when decompressing. 2189 * -N --name 2190 * gzip: always save the original file name and time stamp (this is the default) 2191 * gunzip: restore the original file name and time stamp if present. 2192 */ 2193 2194int gzip_main(int argc, char **argv) MAIN_EXTERNALLY_VISIBLE; 2195#if ENABLE_FEATURE_GZIP_DECOMPRESS 2196int gzip_main(int argc, char **argv) 2197#else 2198int gzip_main(int argc UNUSED_PARAM, char **argv) 2199#endif 2200{ 2201 unsigned opt; 2202#if ENABLE_FEATURE_GZIP_LEVELS 2203 static const struct { 2204 uint8_t good; 2205 uint8_t chain_shift; 2206 uint8_t lazy2; 2207 uint8_t nice2; 2208 } gzip_level_config[6] = { 2209 {4, 4, 4/2, 16/2}, /* Level 4 */ 2210 {8, 5, 16/2, 32/2}, /* Level 5 */ 2211 {8, 7, 16/2, 128/2}, /* Level 6 */ 2212 {8, 8, 32/2, 128/2}, /* Level 7 */ 2213 {32, 10, 128/2, 258/2}, /* Level 8 */ 2214 {32, 12, 258/2, 258/2}, /* Level 9 */ 2215 }; 2216#endif 2217 2218 SET_PTR_TO_GLOBALS((char *)xzalloc(sizeof(struct globals)+sizeof(struct globals2)) 2219 + sizeof(struct globals)); 2220 2221 /* Must match bbunzip's constants OPT_STDOUT, OPT_FORCE! */ 2222#if ENABLE_FEATURE_GZIP_LONG_OPTIONS 2223 opt = getopt32long(argv, BBUNPK_OPTSTR IF_FEATURE_GZIP_DECOMPRESS("dt") "n123456789", gzip_longopts); 2224#else 2225 opt = getopt32(argv, BBUNPK_OPTSTR IF_FEATURE_GZIP_DECOMPRESS("dt") "n123456789"); 2226#endif 2227#if ENABLE_FEATURE_GZIP_DECOMPRESS /* gunzip_main may not be visible... */ 2228 if (opt & (BBUNPK_OPT_DECOMPRESS|BBUNPK_OPT_TEST)) /* -d and/or -t */ 2229 return gunzip_main(argc, argv); 2230#endif 2231#if ENABLE_FEATURE_GZIP_LEVELS 2232 opt >>= (BBUNPK_OPTSTRLEN IF_FEATURE_GZIP_DECOMPRESS(+ 2) + 1); /* drop cfkvq[dt]n bits */ 2233 if (opt == 0) 2234 opt = 1 << 5; /* default: 6 */ 2235 opt = ffs(opt >> 4); /* Maps -1..-4 to [0], -5 to [1] ... -9 to [5] */ 2236 2237 comp_level_minus4 = opt; 2238 2239 max_chain_length = 1 << gzip_level_config[opt].chain_shift; 2240 good_match = gzip_level_config[opt].good; 2241 max_lazy_match = gzip_level_config[opt].lazy2 * 2; 2242 nice_match = gzip_level_config[opt].nice2 * 2; 2243#endif 2244 option_mask32 &= BBUNPK_OPTSTRMASK; /* retain only -cfkvq */ 2245 2246 /* Allocate all global buffers (for DYN_ALLOC option) */ 2247 ALLOC(uch, G1.l_buf, INBUFSIZ); 2248 ALLOC(uch, G1.outbuf, OUTBUFSIZ); 2249 ALLOC(ush, G1.d_buf, DIST_BUFSIZE); 2250 ALLOC(uch, G1.window, 2L * WSIZE); 2251 ALLOC(ush, G1.prev, 1L << BITS); 2252 2253 /* Initialize the CRC32 table */ 2254 global_crc32_new_table_le(); 2255 2256 argv += optind; 2257 return bbunpack(argv, pack_gzip, append_ext, "gz"); 2258} 2259