github.com/e154/smart-home@v0.17.2-0.20240311175135-e530a6e5cd45/doc/themes/docsy/static/js/deflate.js (about) 1 /* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp> 2 * Version: 1.0.1 3 * LastModified: Dec 25 1999 4 */ 5 6 /* Interface: 7 * data = deflate(src); 8 */ 9 const deflate = (function () { 10 /* constant parameters */ 11 var zip_WSIZE = 32768; // Sliding Window size 12 var zip_STORED_BLOCK = 0; 13 var zip_STATIC_TREES = 1; 14 var zip_DYN_TREES = 2; 15 16 /* for deflate */ 17 var zip_DEFAULT_LEVEL = 6; 18 var zip_FULL_SEARCH = true; 19 var zip_INBUFSIZ = 32768; // Input buffer size 20 var zip_INBUF_EXTRA = 64; // Extra buffer 21 var zip_OUTBUFSIZ = 1024 * 8; 22 var zip_window_size = 2 * zip_WSIZE; 23 var zip_MIN_MATCH = 3; 24 var zip_MAX_MATCH = 258; 25 var zip_BITS = 16; 26 // for SMALL_MEM 27 var zip_LIT_BUFSIZE = 0x2000; 28 var zip_HASH_BITS = 13; 29 // for MEDIUM_MEM 30 // var zip_LIT_BUFSIZE = 0x4000; 31 // var zip_HASH_BITS = 14; 32 // for BIG_MEM 33 // var zip_LIT_BUFSIZE = 0x8000; 34 // var zip_HASH_BITS = 15; 35 //if(zip_LIT_BUFSIZE > zip_INBUFSIZ) 36 // alert("error: zip_INBUFSIZ is too small"); 37 //if((zip_WSIZE<<1) > (1<<zip_BITS)) 38 // alert("error: zip_WSIZE is too large"); 39 //if(zip_HASH_BITS > zip_BITS-1) 40 // alert("error: zip_HASH_BITS is too large"); 41 //if(zip_HASH_BITS < 8 || zip_MAX_MATCH != 258) 42 // alert("error: Code too clever"); 43 var zip_DIST_BUFSIZE = zip_LIT_BUFSIZE; 44 var zip_HASH_SIZE = 1 << zip_HASH_BITS; 45 var zip_HASH_MASK = zip_HASH_SIZE - 1; 46 var zip_WMASK = zip_WSIZE - 1; 47 var zip_NIL = 0; // Tail of hash chains 48 var zip_TOO_FAR = 4096; 49 var zip_MIN_LOOKAHEAD = zip_MAX_MATCH + zip_MIN_MATCH + 1; 50 var zip_MAX_DIST = zip_WSIZE - zip_MIN_LOOKAHEAD; 51 var zip_SMALLEST = 1; 52 var zip_MAX_BITS = 15; 53 var zip_MAX_BL_BITS = 7; 54 var zip_LENGTH_CODES = 29; 55 var zip_LITERALS = 256; 56 var zip_END_BLOCK = 256; 57 var zip_L_CODES = zip_LITERALS + 1 + zip_LENGTH_CODES; 58 var zip_D_CODES = 30; 59 var zip_BL_CODES = 19; 60 var zip_REP_3_6 = 16; 61 var zip_REPZ_3_10 = 17; 62 var zip_REPZ_11_138 = 18; 63 var zip_HEAP_SIZE = 2 * zip_L_CODES + 1; 64 var zip_H_SHIFT = parseInt((zip_HASH_BITS + zip_MIN_MATCH - 1) / 65 zip_MIN_MATCH); 66 67 /* variables */ 68 var zip_free_queue; 69 var zip_qhead, zip_qtail; 70 var zip_initflag; 71 var zip_outbuf = null; 72 var zip_outcnt, zip_outoff; 73 var zip_complete; 74 var zip_window; 75 var zip_d_buf; 76 var zip_l_buf; 77 var zip_prev; 78 var zip_bi_buf; 79 var zip_bi_valid; 80 var zip_block_start; 81 var zip_ins_h; 82 var zip_hash_head; 83 var zip_prev_match; 84 var zip_match_available; 85 var zip_match_length; 86 var zip_prev_length; 87 var zip_strstart; 88 var zip_match_start; 89 var zip_eofile; 90 var zip_lookahead; 91 var zip_max_chain_length; 92 var zip_max_lazy_match; 93 var zip_compr_level; 94 var zip_good_match; 95 var zip_nice_match; 96 var zip_dyn_ltree; 97 var zip_dyn_dtree; 98 var zip_static_ltree; 99 var zip_static_dtree; 100 var zip_bl_tree; 101 var zip_l_desc; 102 var zip_d_desc; 103 var zip_bl_desc; 104 var zip_bl_count; 105 var zip_heap; 106 var zip_heap_len; 107 var zip_heap_max; 108 var zip_depth; 109 var zip_length_code; 110 var zip_dist_code; 111 var zip_base_length; 112 var zip_base_dist; 113 var zip_flag_buf; 114 var zip_last_lit; 115 var zip_last_dist; 116 var zip_last_flags; 117 var zip_flags; 118 var zip_flag_bit; 119 var zip_opt_len; 120 var zip_static_len; 121 var zip_deflate_data; 122 var zip_deflate_pos; 123 124 /* objects (deflate) */ 125 126 function zip_DeflateCT() { 127 this.fc = 0; // frequency count or bit string 128 this.dl = 0; // father node in Huffman tree or length of bit string 129 } 130 131 function zip_DeflateTreeDesc() { 132 this.dyn_tree = null; // the dynamic tree 133 this.static_tree = null; // corresponding static tree or NULL 134 this.extra_bits = null; // extra bits for each code or NULL 135 this.extra_base = 0; // base index for extra_bits 136 this.elems = 0; // max number of elements in the tree 137 this.max_length = 0; // max bit length for the codes 138 this.max_code = 0; // largest code with non zero frequency 139 } 140 141 /* Values for max_lazy_match, good_match and max_chain_length, depending on 142 * the desired pack level (0..9). The values given below have been tuned to 143 * exclude worst case performance for pathological files. Better values may be 144 * found for specific files. 145 */ 146 function zip_DeflateConfiguration(a, b, c, d) { 147 this.good_length = a; // reduce lazy search above this match length 148 this.max_lazy = b; // do not perform lazy search above this match length 149 this.nice_length = c; // quit search above this match length 150 this.max_chain = d; 151 } 152 153 function zip_DeflateBuffer() { 154 this.next = null; 155 this.len = 0; 156 this.ptr = new Array(zip_OUTBUFSIZ); 157 this.off = 0; 158 } 159 160 /* constant tables */ 161 var zip_extra_lbits = [ 162 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0]; 163 var zip_extra_dbits = [ 164 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13]; 165 var zip_extra_blbits = [ 166 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7]; 167 var zip_bl_order = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15]; 168 var zip_configuration_table = [ 169 new zip_DeflateConfiguration(0, 0, 0, 0), 170 new zip_DeflateConfiguration(4, 4, 8, 4), 171 new zip_DeflateConfiguration(4, 5, 16, 8), 172 new zip_DeflateConfiguration(4, 6, 32, 32), 173 new zip_DeflateConfiguration(4, 4, 16, 16), 174 new zip_DeflateConfiguration(8, 16, 32, 32), 175 new zip_DeflateConfiguration(8, 16, 128, 128), 176 new zip_DeflateConfiguration(8, 32, 128, 256), 177 new zip_DeflateConfiguration(32, 128, 258, 1024), 178 new zip_DeflateConfiguration(32, 258, 258, 4096)]; 179 180 181 /* routines (deflate) */ 182 183 function zip_deflate_start(level) { 184 var i; 185 186 if (!level) 187 level = zip_DEFAULT_LEVEL; 188 else if (level < 1) 189 level = 1; 190 else if (level > 9) 191 level = 9; 192 193 zip_compr_level = level; 194 zip_initflag = false; 195 zip_eofile = false; 196 if (zip_outbuf != null) 197 return; 198 199 zip_free_queue = zip_qhead = zip_qtail = null; 200 zip_outbuf = new Array(zip_OUTBUFSIZ); 201 zip_window = new Array(zip_window_size); 202 zip_d_buf = new Array(zip_DIST_BUFSIZE); 203 zip_l_buf = new Array(zip_INBUFSIZ + zip_INBUF_EXTRA); 204 zip_prev = new Array(1 << zip_BITS); 205 zip_dyn_ltree = new Array(zip_HEAP_SIZE); 206 for (i = 0; i < zip_HEAP_SIZE; i++) 207 zip_dyn_ltree[i] = new zip_DeflateCT(); 208 zip_dyn_dtree = new Array(2 * zip_D_CODES + 1); 209 for (i = 0; i < 2 * zip_D_CODES + 1; i++) 210 zip_dyn_dtree[i] = new zip_DeflateCT(); 211 zip_static_ltree = new Array(zip_L_CODES + 2); 212 for (i = 0; i < zip_L_CODES + 2; i++) 213 zip_static_ltree[i] = new zip_DeflateCT(); 214 zip_static_dtree = new Array(zip_D_CODES); 215 for (i = 0; i < zip_D_CODES; i++) 216 zip_static_dtree[i] = new zip_DeflateCT(); 217 zip_bl_tree = new Array(2 * zip_BL_CODES + 1); 218 for (i = 0; i < 2 * zip_BL_CODES + 1; i++) 219 zip_bl_tree[i] = new zip_DeflateCT(); 220 zip_l_desc = new zip_DeflateTreeDesc(); 221 zip_d_desc = new zip_DeflateTreeDesc(); 222 zip_bl_desc = new zip_DeflateTreeDesc(); 223 zip_bl_count = new Array(zip_MAX_BITS + 1); 224 zip_heap = new Array(2 * zip_L_CODES + 1); 225 zip_depth = new Array(2 * zip_L_CODES + 1); 226 zip_length_code = new Array(zip_MAX_MATCH - zip_MIN_MATCH + 1); 227 zip_dist_code = new Array(512); 228 zip_base_length = new Array(zip_LENGTH_CODES); 229 zip_base_dist = new Array(zip_D_CODES); 230 zip_flag_buf = new Array(parseInt(zip_LIT_BUFSIZE / 8)); 231 } 232 233 function zip_deflate_end() { 234 zip_free_queue = zip_qhead = zip_qtail = null; 235 zip_outbuf = null; 236 zip_window = null; 237 zip_d_buf = null; 238 zip_l_buf = null; 239 zip_prev = null; 240 zip_dyn_ltree = null; 241 zip_dyn_dtree = null; 242 zip_static_ltree = null; 243 zip_static_dtree = null; 244 zip_bl_tree = null; 245 zip_l_desc = null; 246 zip_d_desc = null; 247 zip_bl_desc = null; 248 zip_bl_count = null; 249 zip_heap = null; 250 zip_depth = null; 251 zip_length_code = null; 252 zip_dist_code = null; 253 zip_base_length = null; 254 zip_base_dist = null; 255 zip_flag_buf = null; 256 } 257 258 function zip_reuse_queue(p) { 259 p.next = zip_free_queue; 260 zip_free_queue = p; 261 } 262 263 function zip_new_queue() { 264 var p; 265 266 if (zip_free_queue != null) { 267 p = zip_free_queue; 268 zip_free_queue = zip_free_queue.next; 269 } 270 else 271 p = new zip_DeflateBuffer(); 272 p.next = null; 273 p.len = p.off = 0; 274 275 return p; 276 } 277 278 function zip_head1(i) { 279 return zip_prev[zip_WSIZE + i]; 280 } 281 282 function zip_head2(i, val) { 283 return zip_prev[zip_WSIZE + i] = val; 284 } 285 286 /* put_byte is used for the compressed output, put_ubyte for the 287 * uncompressed output. However unlzw() uses window for its 288 * suffix table instead of its output buffer, so it does not use put_ubyte 289 * (to be cleaned up). 290 */ 291 function zip_put_byte(c) { 292 zip_outbuf[zip_outoff + zip_outcnt++] = c; 293 if (zip_outoff + zip_outcnt == zip_OUTBUFSIZ) 294 zip_qoutbuf(); 295 } 296 297 /* Output a 16 bit value, lsb first */ 298 function zip_put_short(w) { 299 w &= 0xffff; 300 if (zip_outoff + zip_outcnt < zip_OUTBUFSIZ - 2) { 301 zip_outbuf[zip_outoff + zip_outcnt++] = (w & 0xff); 302 zip_outbuf[zip_outoff + zip_outcnt++] = (w >>> 8); 303 } else { 304 zip_put_byte(w & 0xff); 305 zip_put_byte(w >>> 8); 306 } 307 } 308 309 /* ========================================================================== 310 * Insert string s in the dictionary and set match_head to the previous head 311 * of the hash chain (the most recent string with same hash key). Return 312 * the previous length of the hash chain. 313 * IN assertion: all calls to to INSERT_STRING are made with consecutive 314 * input characters and the first MIN_MATCH bytes of s are valid 315 * (except for the last MIN_MATCH-1 bytes of the input file). 316 */ 317 function zip_INSERT_STRING() { 318 zip_ins_h = ((zip_ins_h << zip_H_SHIFT) 319 ^ (zip_window[zip_strstart + zip_MIN_MATCH - 1] & 0xff)) 320 & zip_HASH_MASK; 321 zip_hash_head = zip_head1(zip_ins_h); 322 zip_prev[zip_strstart & zip_WMASK] = zip_hash_head; 323 zip_head2(zip_ins_h, zip_strstart); 324 } 325 326 /* Send a code of the given tree. c and tree must not have side effects */ 327 function zip_SEND_CODE(c, tree) { 328 zip_send_bits(tree[c].fc, tree[c].dl); 329 } 330 331 /* Mapping from a distance to a distance code. dist is the distance - 1 and 332 * must not have side effects. dist_code[256] and dist_code[257] are never 333 * used. 334 */ 335 function zip_D_CODE(dist) { 336 return (dist < 256 ? zip_dist_code[dist] 337 : zip_dist_code[256 + (dist >> 7)]) & 0xff; 338 } 339 340 /* ========================================================================== 341 * Compares to subtrees, using the tree depth as tie breaker when 342 * the subtrees have equal frequency. This minimizes the worst case length. 343 */ 344 function zip_SMALLER(tree, n, m) { 345 return tree[n].fc < tree[m].fc || 346 (tree[n].fc == tree[m].fc && zip_depth[n] <= zip_depth[m]); 347 } 348 349 /* ========================================================================== 350 * read string data 351 */ 352 function zip_read_buff(buff, offset, n) { 353 var i; 354 for (i = 0; i < n && zip_deflate_pos < zip_deflate_data.length; i++) 355 buff[offset + i] = 356 zip_deflate_data.charCodeAt(zip_deflate_pos++) & 0xff; 357 return i; 358 } 359 360 /* ========================================================================== 361 * Initialize the "longest match" routines for a new file 362 */ 363 function zip_lm_init() { 364 var j; 365 366 /* Initialize the hash table. */ 367 for (j = 0; j < zip_HASH_SIZE; j++) 368 // zip_head2(j, zip_NIL); 369 zip_prev[zip_WSIZE + j] = 0; 370 /* prev will be initialized on the fly */ 371 372 /* Set the default configuration parameters: 373 */ 374 zip_max_lazy_match = zip_configuration_table[zip_compr_level].max_lazy; 375 zip_good_match = zip_configuration_table[zip_compr_level].good_length; 376 if (!zip_FULL_SEARCH) 377 zip_nice_match = zip_configuration_table[zip_compr_level].nice_length; 378 zip_max_chain_length = zip_configuration_table[zip_compr_level].max_chain; 379 380 zip_strstart = 0; 381 zip_block_start = 0; 382 383 zip_lookahead = zip_read_buff(zip_window, 0, 2 * zip_WSIZE); 384 if (zip_lookahead <= 0) { 385 zip_eofile = true; 386 zip_lookahead = 0; 387 return; 388 } 389 zip_eofile = false; 390 /* Make sure that we always have enough lookahead. This is important 391 * if input comes from a device such as a tty. 392 */ 393 while (zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile) 394 zip_fill_window(); 395 396 /* If lookahead < MIN_MATCH, ins_h is garbage, but this is 397 * not important since only literal bytes will be emitted. 398 */ 399 zip_ins_h = 0; 400 for (j = 0; j < zip_MIN_MATCH - 1; j++) { 401 // UPDATE_HASH(ins_h, window[j]); 402 zip_ins_h = ((zip_ins_h << zip_H_SHIFT) ^ (zip_window[j] & 0xff)) & zip_HASH_MASK; 403 } 404 } 405 406 /* ========================================================================== 407 * Set match_start to the longest match starting at the given string and 408 * return its length. Matches shorter or equal to prev_length are discarded, 409 * in which case the result is equal to prev_length and match_start is 410 * garbage. 411 * IN assertions: cur_match is the head of the hash chain for the current 412 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 413 */ 414 function zip_longest_match(cur_match) { 415 var chain_length = zip_max_chain_length; // max hash chain length 416 var scanp = zip_strstart; // current string 417 var matchp; // matched string 418 var len; // length of current match 419 var best_len = zip_prev_length; // best match length so far 420 421 /* Stop when cur_match becomes <= limit. To simplify the code, 422 * we prevent matches with the string of window index 0. 423 */ 424 var limit = (zip_strstart > zip_MAX_DIST ? zip_strstart - zip_MAX_DIST : zip_NIL); 425 426 var strendp = zip_strstart + zip_MAX_MATCH; 427 var scan_end1 = zip_window[scanp + best_len - 1]; 428 var scan_end = zip_window[scanp + best_len]; 429 430 /* Do not waste too much time if we already have a good match: */ 431 if (zip_prev_length >= zip_good_match) 432 chain_length >>= 2; 433 434 // Assert(encoder->strstart <= window_size-MIN_LOOKAHEAD, "insufficient lookahead"); 435 436 do { 437 // Assert(cur_match < encoder->strstart, "no future"); 438 matchp = cur_match; 439 440 /* Skip to next match if the match length cannot increase 441 * or if the match length is less than 2: 442 */ 443 if (zip_window[matchp + best_len] != scan_end || 444 zip_window[matchp + best_len - 1] != scan_end1 || 445 zip_window[matchp] != zip_window[scanp] || 446 zip_window[++matchp] != zip_window[scanp + 1]) { 447 continue; 448 } 449 450 /* The check at best_len-1 can be removed because it will be made 451 * again later. (This heuristic is not always a win.) 452 * It is not necessary to compare scan[2] and match[2] since they 453 * are always equal when the other bytes match, given that 454 * the hash keys are equal and that HASH_BITS >= 8. 455 */ 456 scanp += 2; 457 matchp++; 458 459 /* We check for insufficient lookahead only every 8th comparison; 460 * the 256th check will be made at strstart+258. 461 */ 462 do { 463 } while (zip_window[++scanp] == zip_window[++matchp] && 464 zip_window[++scanp] == zip_window[++matchp] && 465 zip_window[++scanp] == zip_window[++matchp] && 466 zip_window[++scanp] == zip_window[++matchp] && 467 zip_window[++scanp] == zip_window[++matchp] && 468 zip_window[++scanp] == zip_window[++matchp] && 469 zip_window[++scanp] == zip_window[++matchp] && 470 zip_window[++scanp] == zip_window[++matchp] && 471 scanp < strendp); 472 473 len = zip_MAX_MATCH - (strendp - scanp); 474 scanp = strendp - zip_MAX_MATCH; 475 476 if (len > best_len) { 477 zip_match_start = cur_match; 478 best_len = len; 479 if (zip_FULL_SEARCH) { 480 if (len >= zip_MAX_MATCH) break; 481 } else { 482 if (len >= zip_nice_match) break; 483 } 484 485 scan_end1 = zip_window[scanp + best_len - 1]; 486 scan_end = zip_window[scanp + best_len]; 487 } 488 } while ((cur_match = zip_prev[cur_match & zip_WMASK]) > limit 489 && --chain_length != 0); 490 491 return best_len; 492 } 493 494 /* ========================================================================== 495 * Fill the window when the lookahead becomes insufficient. 496 * Updates strstart and lookahead, and sets eofile if end of input file. 497 * IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0 498 * OUT assertions: at least one byte has been read, or eofile is set; 499 * file reads are performed for at least two bytes (required for the 500 * translate_eol option). 501 */ 502 function zip_fill_window() { 503 var n, m; 504 505 // Amount of free space at the end of the window. 506 var more = zip_window_size - zip_lookahead - zip_strstart; 507 508 /* If the window is almost full and there is insufficient lookahead, 509 * move the upper half to the lower one to make room in the upper half. 510 */ 511 if (more == -1) { 512 /* Very unlikely, but possible on 16 bit machine if strstart == 0 513 * and lookahead == 1 (input done one byte at time) 514 */ 515 more--; 516 } else if (zip_strstart >= zip_WSIZE + zip_MAX_DIST) { 517 /* By the IN assertion, the window is not empty so we can't confuse 518 * more == 0 with more == 64K on a 16 bit machine. 519 */ 520 // Assert(window_size == (ulg)2*WSIZE, "no sliding with BIG_MEM"); 521 522 // System.arraycopy(window, WSIZE, window, 0, WSIZE); 523 for (n = 0; n < zip_WSIZE; n++) 524 zip_window[n] = zip_window[n + zip_WSIZE]; 525 526 zip_match_start -= zip_WSIZE; 527 zip_strstart -= zip_WSIZE; /* we now have strstart >= MAX_DIST: */ 528 zip_block_start -= zip_WSIZE; 529 530 for (n = 0; n < zip_HASH_SIZE; n++) { 531 m = zip_head1(n); 532 zip_head2(n, m >= zip_WSIZE ? m - zip_WSIZE : zip_NIL); 533 } 534 for (n = 0; n < zip_WSIZE; n++) { 535 /* If n is not on any hash chain, prev[n] is garbage but 536 * its value will never be used. 537 */ 538 m = zip_prev[n]; 539 zip_prev[n] = (m >= zip_WSIZE ? m - zip_WSIZE : zip_NIL); 540 } 541 more += zip_WSIZE; 542 } 543 // At this point, more >= 2 544 if (!zip_eofile) { 545 n = zip_read_buff(zip_window, zip_strstart + zip_lookahead, more); 546 if (n <= 0) 547 zip_eofile = true; 548 else 549 zip_lookahead += n; 550 } 551 } 552 553 /* ========================================================================== 554 * Processes a new input file and return its compressed length. This 555 * function does not perform lazy evaluationof matches and inserts 556 * new strings in the dictionary only for unmatched strings or for short 557 * matches. It is used only for the fast compression options. 558 */ 559 function zip_deflate_fast() { 560 while (zip_lookahead != 0 && zip_qhead == null) { 561 var flush; // set if current block must be flushed 562 563 /* Insert the string window[strstart .. strstart+2] in the 564 * dictionary, and set hash_head to the head of the hash chain: 565 */ 566 zip_INSERT_STRING(); 567 568 /* Find the longest match, discarding those <= prev_length. 569 * At this point we have always match_length < MIN_MATCH 570 */ 571 if (zip_hash_head != zip_NIL && 572 zip_strstart - zip_hash_head <= zip_MAX_DIST) { 573 /* To simplify the code, we prevent matches with the string 574 * of window index 0 (in particular we have to avoid a match 575 * of the string with itself at the start of the input file). 576 */ 577 zip_match_length = zip_longest_match(zip_hash_head); 578 /* longest_match() sets match_start */ 579 if (zip_match_length > zip_lookahead) 580 zip_match_length = zip_lookahead; 581 } 582 if (zip_match_length >= zip_MIN_MATCH) { 583 // check_match(strstart, match_start, match_length); 584 585 flush = zip_ct_tally(zip_strstart - zip_match_start, 586 zip_match_length - zip_MIN_MATCH); 587 zip_lookahead -= zip_match_length; 588 589 /* Insert new strings in the hash table only if the match length 590 * is not too large. This saves time but degrades compression. 591 */ 592 if (zip_match_length <= zip_max_lazy_match) { 593 zip_match_length--; // string at strstart already in hash table 594 do { 595 zip_strstart++; 596 zip_INSERT_STRING(); 597 /* strstart never exceeds WSIZE-MAX_MATCH, so there are 598 * always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH 599 * these bytes are garbage, but it does not matter since 600 * the next lookahead bytes will be emitted as literals. 601 */ 602 } while (--zip_match_length != 0); 603 zip_strstart++; 604 } else { 605 zip_strstart += zip_match_length; 606 zip_match_length = 0; 607 zip_ins_h = zip_window[zip_strstart] & 0xff; 608 // UPDATE_HASH(ins_h, window[strstart + 1]); 609 zip_ins_h = ((zip_ins_h << zip_H_SHIFT) ^ (zip_window[zip_strstart + 1] & 0xff)) & zip_HASH_MASK; 610 611 //#if MIN_MATCH != 3 612 // Call UPDATE_HASH() MIN_MATCH-3 more times 613 //#endif 614 615 } 616 } else { 617 /* No match, output a literal byte */ 618 flush = zip_ct_tally(0, zip_window[zip_strstart] & 0xff); 619 zip_lookahead--; 620 zip_strstart++; 621 } 622 if (flush) { 623 zip_flush_block(0); 624 zip_block_start = zip_strstart; 625 } 626 627 /* Make sure that we always have enough lookahead, except 628 * at the end of the input file. We need MAX_MATCH bytes 629 * for the next match, plus MIN_MATCH bytes to insert the 630 * string following the next match. 631 */ 632 while (zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile) 633 zip_fill_window(); 634 } 635 } 636 637 function zip_deflate_better() { 638 /* Process the input block. */ 639 while (zip_lookahead != 0 && zip_qhead == null) { 640 /* Insert the string window[strstart .. strstart+2] in the 641 * dictionary, and set hash_head to the head of the hash chain: 642 */ 643 zip_INSERT_STRING(); 644 645 /* Find the longest match, discarding those <= prev_length. 646 */ 647 zip_prev_length = zip_match_length; 648 zip_prev_match = zip_match_start; 649 zip_match_length = zip_MIN_MATCH - 1; 650 651 if (zip_hash_head != zip_NIL && 652 zip_prev_length < zip_max_lazy_match && 653 zip_strstart - zip_hash_head <= zip_MAX_DIST) { 654 /* To simplify the code, we prevent matches with the string 655 * of window index 0 (in particular we have to avoid a match 656 * of the string with itself at the start of the input file). 657 */ 658 zip_match_length = zip_longest_match(zip_hash_head); 659 /* longest_match() sets match_start */ 660 if (zip_match_length > zip_lookahead) 661 zip_match_length = zip_lookahead; 662 663 /* Ignore a length 3 match if it is too distant: */ 664 if (zip_match_length == zip_MIN_MATCH && 665 zip_strstart - zip_match_start > zip_TOO_FAR) { 666 /* If prev_match is also MIN_MATCH, match_start is garbage 667 * but we will ignore the current match anyway. 668 */ 669 zip_match_length--; 670 } 671 } 672 /* If there was a match at the previous step and the current 673 * match is not better, output the previous match: 674 */ 675 if (zip_prev_length >= zip_MIN_MATCH && 676 zip_match_length <= zip_prev_length) { 677 var flush; // set if current block must be flushed 678 679 // check_match(strstart - 1, prev_match, prev_length); 680 flush = zip_ct_tally(zip_strstart - 1 - zip_prev_match, 681 zip_prev_length - zip_MIN_MATCH); 682 683 /* Insert in hash table all strings up to the end of the match. 684 * strstart-1 and strstart are already inserted. 685 */ 686 zip_lookahead -= zip_prev_length - 1; 687 zip_prev_length -= 2; 688 do { 689 zip_strstart++; 690 zip_INSERT_STRING(); 691 /* strstart never exceeds WSIZE-MAX_MATCH, so there are 692 * always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH 693 * these bytes are garbage, but it does not matter since the 694 * next lookahead bytes will always be emitted as literals. 695 */ 696 } while (--zip_prev_length != 0); 697 zip_match_available = 0; 698 zip_match_length = zip_MIN_MATCH - 1; 699 zip_strstart++; 700 if (flush) { 701 zip_flush_block(0); 702 zip_block_start = zip_strstart; 703 } 704 } else if (zip_match_available != 0) { 705 /* If there was no match at the previous position, output a 706 * single literal. If there was a match but the current match 707 * is longer, truncate the previous match to a single literal. 708 */ 709 if (zip_ct_tally(0, zip_window[zip_strstart - 1] & 0xff)) { 710 zip_flush_block(0); 711 zip_block_start = zip_strstart; 712 } 713 zip_strstart++; 714 zip_lookahead--; 715 } else { 716 /* There is no previous match to compare with, wait for 717 * the next step to decide. 718 */ 719 zip_match_available = 1; 720 zip_strstart++; 721 zip_lookahead--; 722 } 723 724 /* Make sure that we always have enough lookahead, except 725 * at the end of the input file. We need MAX_MATCH bytes 726 * for the next match, plus MIN_MATCH bytes to insert the 727 * string following the next match. 728 */ 729 while (zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile) 730 zip_fill_window(); 731 } 732 } 733 734 function zip_init_deflate() { 735 if (zip_eofile) 736 return; 737 zip_bi_buf = 0; 738 zip_bi_valid = 0; 739 zip_ct_init(); 740 zip_lm_init(); 741 742 zip_qhead = null; 743 zip_outcnt = 0; 744 zip_outoff = 0; 745 746 if (zip_compr_level <= 3) { 747 zip_prev_length = zip_MIN_MATCH - 1; 748 zip_match_length = 0; 749 } 750 else { 751 zip_match_length = zip_MIN_MATCH - 1; 752 zip_match_available = 0; 753 } 754 755 zip_complete = false; 756 } 757 758 /* ========================================================================== 759 * Same as above, but achieves better compression. We use a lazy 760 * evaluation for matches: a match is finally adopted only if there is 761 * no better match at the next window position. 762 */ 763 function zip_deflate_internal(buff, off, buff_size) { 764 var n; 765 766 if (!zip_initflag) { 767 zip_init_deflate(); 768 zip_initflag = true; 769 if (zip_lookahead == 0) { // empty 770 zip_complete = true; 771 return 0; 772 } 773 } 774 775 if ((n = zip_qcopy(buff, off, buff_size)) == buff_size) 776 return buff_size; 777 778 if (zip_complete) 779 return n; 780 781 if (zip_compr_level <= 3) // optimized for speed 782 zip_deflate_fast(); 783 else 784 zip_deflate_better(); 785 if (zip_lookahead == 0) { 786 if (zip_match_available != 0) 787 zip_ct_tally(0, zip_window[zip_strstart - 1] & 0xff); 788 zip_flush_block(1); 789 zip_complete = true; 790 } 791 return n + zip_qcopy(buff, n + off, buff_size - n); 792 } 793 794 function zip_qcopy(buff, off, buff_size) { 795 var n, i, j; 796 797 n = 0; 798 while (zip_qhead != null && n < buff_size) { 799 i = buff_size - n; 800 if (i > zip_qhead.len) 801 i = zip_qhead.len; 802 // System.arraycopy(qhead.ptr, qhead.off, buff, off + n, i); 803 for (j = 0; j < i; j++) 804 buff[off + n + j] = zip_qhead.ptr[zip_qhead.off + j]; 805 806 zip_qhead.off += i; 807 zip_qhead.len -= i; 808 n += i; 809 if (zip_qhead.len == 0) { 810 var p; 811 p = zip_qhead; 812 zip_qhead = zip_qhead.next; 813 zip_reuse_queue(p); 814 } 815 } 816 817 if (n == buff_size) 818 return n; 819 820 if (zip_outoff < zip_outcnt) { 821 i = buff_size - n; 822 if (i > zip_outcnt - zip_outoff) 823 i = zip_outcnt - zip_outoff; 824 // System.arraycopy(outbuf, outoff, buff, off + n, i); 825 for (j = 0; j < i; j++) 826 buff[off + n + j] = zip_outbuf[zip_outoff + j]; 827 zip_outoff += i; 828 n += i; 829 if (zip_outcnt == zip_outoff) 830 zip_outcnt = zip_outoff = 0; 831 } 832 return n; 833 } 834 835 /* ========================================================================== 836 * Allocate the match buffer, initialize the various tables and save the 837 * location of the internal file attribute (ascii/binary) and method 838 * (DEFLATE/STORE). 839 */ 840 function zip_ct_init() { 841 var n; // iterates over tree elements 842 var bits; // bit counter 843 var length; // length value 844 var code; // code value 845 var dist; // distance index 846 847 if (zip_static_dtree[0].dl != 0) return; // ct_init already called 848 849 zip_l_desc.dyn_tree = zip_dyn_ltree; 850 zip_l_desc.static_tree = zip_static_ltree; 851 zip_l_desc.extra_bits = zip_extra_lbits; 852 zip_l_desc.extra_base = zip_LITERALS + 1; 853 zip_l_desc.elems = zip_L_CODES; 854 zip_l_desc.max_length = zip_MAX_BITS; 855 zip_l_desc.max_code = 0; 856 857 zip_d_desc.dyn_tree = zip_dyn_dtree; 858 zip_d_desc.static_tree = zip_static_dtree; 859 zip_d_desc.extra_bits = zip_extra_dbits; 860 zip_d_desc.extra_base = 0; 861 zip_d_desc.elems = zip_D_CODES; 862 zip_d_desc.max_length = zip_MAX_BITS; 863 zip_d_desc.max_code = 0; 864 865 zip_bl_desc.dyn_tree = zip_bl_tree; 866 zip_bl_desc.static_tree = null; 867 zip_bl_desc.extra_bits = zip_extra_blbits; 868 zip_bl_desc.extra_base = 0; 869 zip_bl_desc.elems = zip_BL_CODES; 870 zip_bl_desc.max_length = zip_MAX_BL_BITS; 871 zip_bl_desc.max_code = 0; 872 873 // Initialize the mapping length (0..255) -> length code (0..28) 874 length = 0; 875 for (code = 0; code < zip_LENGTH_CODES - 1; code++) { 876 zip_base_length[code] = length; 877 for (n = 0; n < (1 << zip_extra_lbits[code]); n++) 878 zip_length_code[length++] = code; 879 } 880 // Assert (length == 256, "ct_init: length != 256"); 881 882 /* Note that the length 255 (match length 258) can be represented 883 * in two different ways: code 284 + 5 bits or code 285, so we 884 * overwrite length_code[255] to use the best encoding: 885 */ 886 zip_length_code[length - 1] = code; 887 888 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ 889 dist = 0; 890 for (code = 0; code < 16; code++) { 891 zip_base_dist[code] = dist; 892 for (n = 0; n < (1 << zip_extra_dbits[code]); n++) { 893 zip_dist_code[dist++] = code; 894 } 895 } 896 // Assert (dist == 256, "ct_init: dist != 256"); 897 dist >>= 7; // from now on, all distances are divided by 128 898 for (; code < zip_D_CODES; code++) { 899 zip_base_dist[code] = dist << 7; 900 for (n = 0; n < (1 << (zip_extra_dbits[code] - 7)); n++) 901 zip_dist_code[256 + dist++] = code; 902 } 903 // Assert (dist == 256, "ct_init: 256+dist != 512"); 904 905 // Construct the codes of the static literal tree 906 for (bits = 0; bits <= zip_MAX_BITS; bits++) 907 zip_bl_count[bits] = 0; 908 n = 0; 909 while (n <= 143) { zip_static_ltree[n++].dl = 8; zip_bl_count[8]++; } 910 while (n <= 255) { zip_static_ltree[n++].dl = 9; zip_bl_count[9]++; } 911 while (n <= 279) { zip_static_ltree[n++].dl = 7; zip_bl_count[7]++; } 912 while (n <= 287) { zip_static_ltree[n++].dl = 8; zip_bl_count[8]++; } 913 /* Codes 286 and 287 do not exist, but we must include them in the 914 * tree construction to get a canonical Huffman tree (longest code 915 * all ones) 916 */ 917 zip_gen_codes(zip_static_ltree, zip_L_CODES + 1); 918 919 /* The static distance tree is trivial: */ 920 for (n = 0; n < zip_D_CODES; n++) { 921 zip_static_dtree[n].dl = 5; 922 zip_static_dtree[n].fc = zip_bi_reverse(n, 5); 923 } 924 925 // Initialize the first block of the first file: 926 zip_init_block(); 927 } 928 929 /* ========================================================================== 930 * Initialize a new block. 931 */ 932 function zip_init_block() { 933 var n; // iterates over tree elements 934 935 // Initialize the trees. 936 for (n = 0; n < zip_L_CODES; n++) zip_dyn_ltree[n].fc = 0; 937 for (n = 0; n < zip_D_CODES; n++) zip_dyn_dtree[n].fc = 0; 938 for (n = 0; n < zip_BL_CODES; n++) zip_bl_tree[n].fc = 0; 939 940 zip_dyn_ltree[zip_END_BLOCK].fc = 1; 941 zip_opt_len = zip_static_len = 0; 942 zip_last_lit = zip_last_dist = zip_last_flags = 0; 943 zip_flags = 0; 944 zip_flag_bit = 1; 945 } 946 947 /* ========================================================================== 948 * Restore the heap property by moving down the tree starting at node k, 949 * exchanging a node with the smallest of its two sons if necessary, stopping 950 * when the heap property is re-established (each father smaller than its 951 * two sons). 952 */ 953 function zip_pqdownheap( 954 tree, // the tree to restore 955 k) { // node to move down 956 var v = zip_heap[k]; 957 var j = k << 1; // left son of k 958 959 while (j <= zip_heap_len) { 960 // Set j to the smallest of the two sons: 961 if (j < zip_heap_len && 962 zip_SMALLER(tree, zip_heap[j + 1], zip_heap[j])) 963 j++; 964 965 // Exit if v is smaller than both sons 966 if (zip_SMALLER(tree, v, zip_heap[j])) 967 break; 968 969 // Exchange v with the smallest son 970 zip_heap[k] = zip_heap[j]; 971 k = j; 972 973 // And continue down the tree, setting j to the left son of k 974 j <<= 1; 975 } 976 zip_heap[k] = v; 977 } 978 979 /* ========================================================================== 980 * Compute the optimal bit lengths for a tree and update the total bit length 981 * for the current block. 982 * IN assertion: the fields freq and dad are set, heap[heap_max] and 983 * above are the tree nodes sorted by increasing frequency. 984 * OUT assertions: the field len is set to the optimal bit length, the 985 * array bl_count contains the frequencies for each bit length. 986 * The length opt_len is updated; static_len is also updated if stree is 987 * not null. 988 */ 989 function zip_gen_bitlen(desc) { // the tree descriptor 990 var tree = desc.dyn_tree; 991 var extra = desc.extra_bits; 992 var base = desc.extra_base; 993 var max_code = desc.max_code; 994 var max_length = desc.max_length; 995 var stree = desc.static_tree; 996 var h; // heap index 997 var n, m; // iterate over the tree elements 998 var bits; // bit length 999 var xbits; // extra bits 1000 var f; // frequency 1001 var overflow = 0; // number of elements with bit length too large 1002 1003 for (bits = 0; bits <= zip_MAX_BITS; bits++) 1004 zip_bl_count[bits] = 0; 1005 1006 /* In a first pass, compute the optimal bit lengths (which may 1007 * overflow in the case of the bit length tree). 1008 */ 1009 tree[zip_heap[zip_heap_max]].dl = 0; // root of the heap 1010 1011 for (h = zip_heap_max + 1; h < zip_HEAP_SIZE; h++) { 1012 n = zip_heap[h]; 1013 bits = tree[tree[n].dl].dl + 1; 1014 if (bits > max_length) { 1015 bits = max_length; 1016 overflow++; 1017 } 1018 tree[n].dl = bits; 1019 // We overwrite tree[n].dl which is no longer needed 1020 1021 if (n > max_code) 1022 continue; // not a leaf node 1023 1024 zip_bl_count[bits]++; 1025 xbits = 0; 1026 if (n >= base) 1027 xbits = extra[n - base]; 1028 f = tree[n].fc; 1029 zip_opt_len += f * (bits + xbits); 1030 if (stree != null) 1031 zip_static_len += f * (stree[n].dl + xbits); 1032 } 1033 if (overflow == 0) 1034 return; 1035 1036 // This happens for example on obj2 and pic of the Calgary corpus 1037 1038 // Find the first bit length which could increase: 1039 do { 1040 bits = max_length - 1; 1041 while (zip_bl_count[bits] == 0) 1042 bits--; 1043 zip_bl_count[bits]--; // move one leaf down the tree 1044 zip_bl_count[bits + 1] += 2; // move one overflow item as its brother 1045 zip_bl_count[max_length]--; 1046 /* The brother of the overflow item also moves one step up, 1047 * but this does not affect bl_count[max_length] 1048 */ 1049 overflow -= 2; 1050 } while (overflow > 0); 1051 1052 /* Now recompute all bit lengths, scanning in increasing frequency. 1053 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all 1054 * lengths instead of fixing only the wrong ones. This idea is taken 1055 * from 'ar' written by Haruhiko Okumura.) 1056 */ 1057 for (bits = max_length; bits != 0; bits--) { 1058 n = zip_bl_count[bits]; 1059 while (n != 0) { 1060 m = zip_heap[--h]; 1061 if (m > max_code) 1062 continue; 1063 if (tree[m].dl != bits) { 1064 zip_opt_len += (bits - tree[m].dl) * tree[m].fc; 1065 tree[m].fc = bits; 1066 } 1067 n--; 1068 } 1069 } 1070 } 1071 1072 /* ========================================================================== 1073 * Generate the codes for a given tree and bit counts (which need not be 1074 * optimal). 1075 * IN assertion: the array bl_count contains the bit length statistics for 1076 * the given tree and the field len is set for all tree elements. 1077 * OUT assertion: the field code is set for all tree elements of non 1078 * zero code length. 1079 */ 1080 function zip_gen_codes(tree, // the tree to decorate 1081 max_code) { // largest code with non zero frequency 1082 var next_code = new Array(zip_MAX_BITS + 1); // next code value for each bit length 1083 var code = 0; // running code value 1084 var bits; // bit index 1085 var n; // code index 1086 1087 /* The distribution counts are first used to generate the code values 1088 * without bit reversal. 1089 */ 1090 for (bits = 1; bits <= zip_MAX_BITS; bits++) { 1091 code = ((code + zip_bl_count[bits - 1]) << 1); 1092 next_code[bits] = code; 1093 } 1094 1095 /* Check that the bit counts in bl_count are consistent. The last code 1096 * must be all ones. 1097 */ 1098 // Assert (code + encoder->bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1, 1099 // "inconsistent bit counts"); 1100 // Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); 1101 1102 for (n = 0; n <= max_code; n++) { 1103 var len = tree[n].dl; 1104 if (len == 0) 1105 continue; 1106 // Now reverse the bits 1107 tree[n].fc = zip_bi_reverse(next_code[len]++, len); 1108 1109 // Tracec(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", 1110 // n, (isgraph(n) ? n : ' '), len, tree[n].fc, next_code[len]-1)); 1111 } 1112 } 1113 1114 /* ========================================================================== 1115 * Construct one Huffman tree and assigns the code bit strings and lengths. 1116 * Update the total bit length for the current block. 1117 * IN assertion: the field freq is set for all tree elements. 1118 * OUT assertions: the fields len and code are set to the optimal bit length 1119 * and corresponding code. The length opt_len is updated; static_len is 1120 * also updated if stree is not null. The field max_code is set. 1121 */ 1122 function zip_build_tree(desc) { // the tree descriptor 1123 var tree = desc.dyn_tree; 1124 var stree = desc.static_tree; 1125 var elems = desc.elems; 1126 var n, m; // iterate over heap elements 1127 var max_code = -1; // largest code with non zero frequency 1128 var node = elems; // next internal node of the tree 1129 1130 /* Construct the initial heap, with least frequent element in 1131 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. 1132 * heap[0] is not used. 1133 */ 1134 zip_heap_len = 0; 1135 zip_heap_max = zip_HEAP_SIZE; 1136 1137 for (n = 0; n < elems; n++) { 1138 if (tree[n].fc != 0) { 1139 zip_heap[++zip_heap_len] = max_code = n; 1140 zip_depth[n] = 0; 1141 } else 1142 tree[n].dl = 0; 1143 } 1144 1145 /* The pkzip format requires that at least one distance code exists, 1146 * and that at least one bit should be sent even if there is only one 1147 * possible code. So to avoid special checks later on we force at least 1148 * two codes of non zero frequency. 1149 */ 1150 while (zip_heap_len < 2) { 1151 var xnew = zip_heap[++zip_heap_len] = (max_code < 2 ? ++max_code : 0); 1152 tree[xnew].fc = 1; 1153 zip_depth[xnew] = 0; 1154 zip_opt_len--; 1155 if (stree != null) 1156 zip_static_len -= stree[xnew].dl; 1157 // new is 0 or 1 so it does not have extra bits 1158 } 1159 desc.max_code = max_code; 1160 1161 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, 1162 * establish sub-heaps of increasing lengths: 1163 */ 1164 for (n = zip_heap_len >> 1; n >= 1; n--) 1165 zip_pqdownheap(tree, n); 1166 1167 /* Construct the Huffman tree by repeatedly combining the least two 1168 * frequent nodes. 1169 */ 1170 do { 1171 n = zip_heap[zip_SMALLEST]; 1172 zip_heap[zip_SMALLEST] = zip_heap[zip_heap_len--]; 1173 zip_pqdownheap(tree, zip_SMALLEST); 1174 1175 m = zip_heap[zip_SMALLEST]; // m = node of next least frequency 1176 1177 // keep the nodes sorted by frequency 1178 zip_heap[--zip_heap_max] = n; 1179 zip_heap[--zip_heap_max] = m; 1180 1181 // Create a new node father of n and m 1182 tree[node].fc = tree[n].fc + tree[m].fc; 1183 // depth[node] = (char)(MAX(depth[n], depth[m]) + 1); 1184 if (zip_depth[n] > zip_depth[m] + 1) 1185 zip_depth[node] = zip_depth[n]; 1186 else 1187 zip_depth[node] = zip_depth[m] + 1; 1188 tree[n].dl = tree[m].dl = node; 1189 1190 // and insert the new node in the heap 1191 zip_heap[zip_SMALLEST] = node++; 1192 zip_pqdownheap(tree, zip_SMALLEST); 1193 1194 } while (zip_heap_len >= 2); 1195 1196 zip_heap[--zip_heap_max] = zip_heap[zip_SMALLEST]; 1197 1198 /* At this point, the fields freq and dad are set. We can now 1199 * generate the bit lengths. 1200 */ 1201 zip_gen_bitlen(desc); 1202 1203 // The field len is now set, we can generate the bit codes 1204 zip_gen_codes(tree, max_code); 1205 } 1206 1207 /* ========================================================================== 1208 * Scan a literal or distance tree to determine the frequencies of the codes 1209 * in the bit length tree. Updates opt_len to take into account the repeat 1210 * counts. (The contribution of the bit length codes will be added later 1211 * during the construction of bl_tree.) 1212 */ 1213 function zip_scan_tree(tree,// the tree to be scanned 1214 max_code) { // and its largest code of non zero frequency 1215 var n; // iterates over all tree elements 1216 var prevlen = -1; // last emitted length 1217 var curlen; // length of current code 1218 var nextlen = tree[0].dl; // length of next code 1219 var count = 0; // repeat count of the current code 1220 var max_count = 7; // max repeat count 1221 var min_count = 4; // min repeat count 1222 1223 if (nextlen == 0) { 1224 max_count = 138; 1225 min_count = 3; 1226 } 1227 tree[max_code + 1].dl = 0xffff; // guard 1228 1229 for (n = 0; n <= max_code; n++) { 1230 curlen = nextlen; 1231 nextlen = tree[n + 1].dl; 1232 if (++count < max_count && curlen == nextlen) 1233 continue; 1234 else if (count < min_count) 1235 zip_bl_tree[curlen].fc += count; 1236 else if (curlen != 0) { 1237 if (curlen != prevlen) 1238 zip_bl_tree[curlen].fc++; 1239 zip_bl_tree[zip_REP_3_6].fc++; 1240 } else if (count <= 10) 1241 zip_bl_tree[zip_REPZ_3_10].fc++; 1242 else 1243 zip_bl_tree[zip_REPZ_11_138].fc++; 1244 count = 0; prevlen = curlen; 1245 if (nextlen == 0) { 1246 max_count = 138; 1247 min_count = 3; 1248 } else if (curlen == nextlen) { 1249 max_count = 6; 1250 min_count = 3; 1251 } else { 1252 max_count = 7; 1253 min_count = 4; 1254 } 1255 } 1256 } 1257 1258 /* ========================================================================== 1259 * Send a literal or distance tree in compressed form, using the codes in 1260 * bl_tree. 1261 */ 1262 function zip_send_tree(tree, // the tree to be scanned 1263 max_code) { // and its largest code of non zero frequency 1264 var n; // iterates over all tree elements 1265 var prevlen = -1; // last emitted length 1266 var curlen; // length of current code 1267 var nextlen = tree[0].dl; // length of next code 1268 var count = 0; // repeat count of the current code 1269 var max_count = 7; // max repeat count 1270 var min_count = 4; // min repeat count 1271 1272 /* tree[max_code+1].dl = -1; */ /* guard already set */ 1273 if (nextlen == 0) { 1274 max_count = 138; 1275 min_count = 3; 1276 } 1277 1278 for (n = 0; n <= max_code; n++) { 1279 curlen = nextlen; 1280 nextlen = tree[n + 1].dl; 1281 if (++count < max_count && curlen == nextlen) { 1282 continue; 1283 } else if (count < min_count) { 1284 do { zip_SEND_CODE(curlen, zip_bl_tree); } while (--count != 0); 1285 } else if (curlen != 0) { 1286 if (curlen != prevlen) { 1287 zip_SEND_CODE(curlen, zip_bl_tree); 1288 count--; 1289 } 1290 // Assert(count >= 3 && count <= 6, " 3_6?"); 1291 zip_SEND_CODE(zip_REP_3_6, zip_bl_tree); 1292 zip_send_bits(count - 3, 2); 1293 } else if (count <= 10) { 1294 zip_SEND_CODE(zip_REPZ_3_10, zip_bl_tree); 1295 zip_send_bits(count - 3, 3); 1296 } else { 1297 zip_SEND_CODE(zip_REPZ_11_138, zip_bl_tree); 1298 zip_send_bits(count - 11, 7); 1299 } 1300 count = 0; 1301 prevlen = curlen; 1302 if (nextlen == 0) { 1303 max_count = 138; 1304 min_count = 3; 1305 } else if (curlen == nextlen) { 1306 max_count = 6; 1307 min_count = 3; 1308 } else { 1309 max_count = 7; 1310 min_count = 4; 1311 } 1312 } 1313 } 1314 1315 /* ========================================================================== 1316 * Construct the Huffman tree for the bit lengths and return the index in 1317 * bl_order of the last bit length code to send. 1318 */ 1319 function zip_build_bl_tree() { 1320 var max_blindex; // index of last bit length code of non zero freq 1321 1322 // Determine the bit length frequencies for literal and distance trees 1323 zip_scan_tree(zip_dyn_ltree, zip_l_desc.max_code); 1324 zip_scan_tree(zip_dyn_dtree, zip_d_desc.max_code); 1325 1326 // Build the bit length tree: 1327 zip_build_tree(zip_bl_desc); 1328 /* opt_len now includes the length of the tree representations, except 1329 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. 1330 */ 1331 1332 /* Determine the number of bit length codes to send. The pkzip format 1333 * requires that at least 4 bit length codes be sent. (appnote.txt says 1334 * 3 but the actual value used is 4.) 1335 */ 1336 for (max_blindex = zip_BL_CODES - 1; max_blindex >= 3; max_blindex--) { 1337 if (zip_bl_tree[zip_bl_order[max_blindex]].dl != 0) break; 1338 } 1339 /* Update opt_len to include the bit length tree and counts */ 1340 zip_opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4; 1341 // Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", 1342 // encoder->opt_len, encoder->static_len)); 1343 1344 return max_blindex; 1345 } 1346 1347 /* ========================================================================== 1348 * Send the header for a block using dynamic Huffman trees: the counts, the 1349 * lengths of the bit length codes, the literal tree and the distance tree. 1350 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. 1351 */ 1352 function zip_send_all_trees(lcodes, dcodes, blcodes) { // number of codes for each tree 1353 var rank; // index in bl_order 1354 1355 // Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); 1356 // Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, 1357 // "too many codes"); 1358 // Tracev((stderr, "\nbl counts: ")); 1359 zip_send_bits(lcodes - 257, 5); // not +255 as stated in appnote.txt 1360 zip_send_bits(dcodes - 1, 5); 1361 zip_send_bits(blcodes - 4, 4); // not -3 as stated in appnote.txt 1362 for (rank = 0; rank < blcodes; rank++) { 1363 // Tracev((stderr, "\nbl code %2d ", bl_order[rank])); 1364 zip_send_bits(zip_bl_tree[zip_bl_order[rank]].dl, 3); 1365 } 1366 1367 // send the literal tree 1368 zip_send_tree(zip_dyn_ltree, lcodes - 1); 1369 1370 // send the distance tree 1371 zip_send_tree(zip_dyn_dtree, dcodes - 1); 1372 } 1373 1374 /* ========================================================================== 1375 * Determine the best encoding for the current block: dynamic trees, static 1376 * trees or store, and output the encoded block to the zip file. 1377 */ 1378 function zip_flush_block(eof) { // true if this is the last block for a file 1379 var opt_lenb, static_lenb; // opt_len and static_len in bytes 1380 var max_blindex; // index of last bit length code of non zero freq 1381 var stored_len; // length of input block 1382 1383 stored_len = zip_strstart - zip_block_start; 1384 zip_flag_buf[zip_last_flags] = zip_flags; // Save the flags for the last 8 items 1385 1386 // Construct the literal and distance trees 1387 zip_build_tree(zip_l_desc); 1388 // Tracev((stderr, "\nlit data: dyn %ld, stat %ld", 1389 // encoder->opt_len, encoder->static_len)); 1390 1391 zip_build_tree(zip_d_desc); 1392 // Tracev((stderr, "\ndist data: dyn %ld, stat %ld", 1393 // encoder->opt_len, encoder->static_len)); 1394 /* At this point, opt_len and static_len are the total bit lengths of 1395 * the compressed block data, excluding the tree representations. 1396 */ 1397 1398 /* Build the bit length tree for the above two trees, and get the index 1399 * in bl_order of the last bit length code to send. 1400 */ 1401 max_blindex = zip_build_bl_tree(); 1402 1403 // Determine the best encoding. Compute first the block length in bytes 1404 opt_lenb = (zip_opt_len + 3 + 7) >> 3; 1405 static_lenb = (zip_static_len + 3 + 7) >> 3; 1406 1407 // Trace((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u dist %u ", 1408 // opt_lenb, encoder->opt_len, 1409 // static_lenb, encoder->static_len, stored_len, 1410 // encoder->last_lit, encoder->last_dist)); 1411 1412 if (static_lenb <= opt_lenb) 1413 opt_lenb = static_lenb; 1414 if (stored_len + 4 <= opt_lenb // 4: two words for the lengths 1415 && zip_block_start >= 0) { 1416 var i; 1417 1418 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. 1419 * Otherwise we can't have processed more than WSIZE input bytes since 1420 * the last block flush, because compression would have been 1421 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to 1422 * transform a block into a stored block. 1423 */ 1424 zip_send_bits((zip_STORED_BLOCK << 1) + eof, 3); /* send block type */ 1425 zip_bi_windup(); /* align on byte boundary */ 1426 zip_put_short(stored_len); 1427 zip_put_short(~stored_len); 1428 1429 // copy block 1430 /* 1431 p = &window[block_start]; 1432 for(i = 0; i < stored_len; i++) 1433 put_byte(p[i]); 1434 */ 1435 for (i = 0; i < stored_len; i++) 1436 zip_put_byte(zip_window[zip_block_start + i]); 1437 1438 } else if (static_lenb == opt_lenb) { 1439 zip_send_bits((zip_STATIC_TREES << 1) + eof, 3); 1440 zip_compress_block(zip_static_ltree, zip_static_dtree); 1441 } else { 1442 zip_send_bits((zip_DYN_TREES << 1) + eof, 3); 1443 zip_send_all_trees(zip_l_desc.max_code + 1, 1444 zip_d_desc.max_code + 1, 1445 max_blindex + 1); 1446 zip_compress_block(zip_dyn_ltree, zip_dyn_dtree); 1447 } 1448 1449 zip_init_block(); 1450 1451 if (eof != 0) 1452 zip_bi_windup(); 1453 } 1454 1455 /* ========================================================================== 1456 * Save the match info and tally the frequency counts. Return true if 1457 * the current block must be flushed. 1458 */ 1459 function zip_ct_tally( 1460 dist, // distance of matched string 1461 lc) { // match length-MIN_MATCH or unmatched char (if dist==0) 1462 zip_l_buf[zip_last_lit++] = lc; 1463 if (dist == 0) { 1464 // lc is the unmatched char 1465 zip_dyn_ltree[lc].fc++; 1466 } else { 1467 // Here, lc is the match length - MIN_MATCH 1468 dist--; // dist = match distance - 1 1469 // Assert((ush)dist < (ush)MAX_DIST && 1470 // (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && 1471 // (ush)D_CODE(dist) < (ush)D_CODES, "ct_tally: bad match"); 1472 1473 zip_dyn_ltree[zip_length_code[lc] + zip_LITERALS + 1].fc++; 1474 zip_dyn_dtree[zip_D_CODE(dist)].fc++; 1475 1476 zip_d_buf[zip_last_dist++] = dist; 1477 zip_flags |= zip_flag_bit; 1478 } 1479 zip_flag_bit <<= 1; 1480 1481 // Output the flags if they fill a byte 1482 if ((zip_last_lit & 7) == 0) { 1483 zip_flag_buf[zip_last_flags++] = zip_flags; 1484 zip_flags = 0; 1485 zip_flag_bit = 1; 1486 } 1487 // Try to guess if it is profitable to stop the current block here 1488 if (zip_compr_level > 2 && (zip_last_lit & 0xfff) == 0) { 1489 // Compute an upper bound for the compressed length 1490 var out_length = zip_last_lit * 8; 1491 var in_length = zip_strstart - zip_block_start; 1492 var dcode; 1493 1494 for (dcode = 0; dcode < zip_D_CODES; dcode++) { 1495 out_length += zip_dyn_dtree[dcode].fc * (5 + zip_extra_dbits[dcode]); 1496 } 1497 out_length >>= 3; 1498 // Trace((stderr,"\nlast_lit %u, last_dist %u, in %ld, out ~%ld(%ld%%) ", 1499 // encoder->last_lit, encoder->last_dist, in_length, out_length, 1500 // 100L - out_length*100L/in_length)); 1501 if (zip_last_dist < parseInt(zip_last_lit / 2) && 1502 out_length < parseInt(in_length / 2)) 1503 return true; 1504 } 1505 return (zip_last_lit == zip_LIT_BUFSIZE - 1 || 1506 zip_last_dist == zip_DIST_BUFSIZE); 1507 /* We avoid equality with LIT_BUFSIZE because of wraparound at 64K 1508 * on 16 bit machines and because stored blocks are restricted to 1509 * 64K-1 bytes. 1510 */ 1511 } 1512 1513 /* ========================================================================== 1514 * Send the block data compressed using the given Huffman trees 1515 */ 1516 function zip_compress_block( 1517 ltree, // literal tree 1518 dtree) { // distance tree 1519 var dist; // distance of matched string 1520 var lc; // match length or unmatched char (if dist == 0) 1521 var lx = 0; // running index in l_buf 1522 var dx = 0; // running index in d_buf 1523 var fx = 0; // running index in flag_buf 1524 var flag = 0; // current flags 1525 var code; // the code to send 1526 var extra; // number of extra bits to send 1527 1528 if (zip_last_lit != 0) do { 1529 if ((lx & 7) == 0) 1530 flag = zip_flag_buf[fx++]; 1531 lc = zip_l_buf[lx++] & 0xff; 1532 if ((flag & 1) == 0) { 1533 zip_SEND_CODE(lc, ltree); /* send a literal byte */ 1534 // Tracecv(isgraph(lc), (stderr," '%c' ", lc)); 1535 } else { 1536 // Here, lc is the match length - MIN_MATCH 1537 code = zip_length_code[lc]; 1538 zip_SEND_CODE(code + zip_LITERALS + 1, ltree); // send the length code 1539 extra = zip_extra_lbits[code]; 1540 if (extra != 0) { 1541 lc -= zip_base_length[code]; 1542 zip_send_bits(lc, extra); // send the extra length bits 1543 } 1544 dist = zip_d_buf[dx++]; 1545 // Here, dist is the match distance - 1 1546 code = zip_D_CODE(dist); 1547 // Assert (code < D_CODES, "bad d_code"); 1548 1549 zip_SEND_CODE(code, dtree); // send the distance code 1550 extra = zip_extra_dbits[code]; 1551 if (extra != 0) { 1552 dist -= zip_base_dist[code]; 1553 zip_send_bits(dist, extra); // send the extra distance bits 1554 } 1555 } // literal or match pair ? 1556 flag >>= 1; 1557 } while (lx < zip_last_lit); 1558 1559 zip_SEND_CODE(zip_END_BLOCK, ltree); 1560 } 1561 1562 /* ========================================================================== 1563 * Send a value on a given number of bits. 1564 * IN assertion: length <= 16 and value fits in length bits. 1565 */ 1566 var zip_Buf_size = 16; // bit size of bi_buf 1567 function zip_send_bits( 1568 value, // value to send 1569 length) { // number of bits 1570 /* If not enough room in bi_buf, use (valid) bits from bi_buf and 1571 * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) 1572 * unused bits in value. 1573 */ 1574 if (zip_bi_valid > zip_Buf_size - length) { 1575 zip_bi_buf |= (value << zip_bi_valid); 1576 zip_put_short(zip_bi_buf); 1577 zip_bi_buf = (value >> (zip_Buf_size - zip_bi_valid)); 1578 zip_bi_valid += length - zip_Buf_size; 1579 } else { 1580 zip_bi_buf |= value << zip_bi_valid; 1581 zip_bi_valid += length; 1582 } 1583 } 1584 1585 /* ========================================================================== 1586 * Reverse the first len bits of a code, using straightforward code (a faster 1587 * method would use a table) 1588 * IN assertion: 1 <= len <= 15 1589 */ 1590 function zip_bi_reverse( 1591 code, // the value to invert 1592 len) { // its bit length 1593 var res = 0; 1594 do { 1595 res |= code & 1; 1596 code >>= 1; 1597 res <<= 1; 1598 } while (--len > 0); 1599 return res >> 1; 1600 } 1601 1602 /* ========================================================================== 1603 * Write out any remaining bits in an incomplete byte. 1604 */ 1605 function zip_bi_windup() { 1606 if (zip_bi_valid > 8) { 1607 zip_put_short(zip_bi_buf); 1608 } else if (zip_bi_valid > 0) { 1609 zip_put_byte(zip_bi_buf); 1610 } 1611 zip_bi_buf = 0; 1612 zip_bi_valid = 0; 1613 } 1614 1615 function zip_qoutbuf() { 1616 if (zip_outcnt != 0) { 1617 var q, i; 1618 q = zip_new_queue(); 1619 if (zip_qhead == null) 1620 zip_qhead = zip_qtail = q; 1621 else 1622 zip_qtail = zip_qtail.next = q; 1623 q.len = zip_outcnt - zip_outoff; 1624 // System.arraycopy(zip_outbuf, zip_outoff, q.ptr, 0, q.len); 1625 for (i = 0; i < q.len; i++) 1626 q.ptr[i] = zip_outbuf[zip_outoff + i]; 1627 zip_outcnt = zip_outoff = 0; 1628 } 1629 } 1630 1631 return function deflate(str, level) { 1632 var i, j; 1633 1634 zip_deflate_data = str; 1635 zip_deflate_pos = 0; 1636 if (typeof level == "undefined") 1637 level = zip_DEFAULT_LEVEL; 1638 zip_deflate_start(level); 1639 1640 var buff = new Array(1024); 1641 var aout = []; 1642 while ((i = zip_deflate_internal(buff, 0, buff.length)) > 0) { 1643 var cbuf = new Array(i); 1644 for (j = 0; j < i; j++) { 1645 cbuf[j] = String.fromCharCode(buff[j]); 1646 } 1647 aout[aout.length] = cbuf.join(""); 1648 } 1649 zip_deflate_data = null; // G.C. 1650 return aout.join(""); 1651 }; 1652 })();