Mercurial > hg > extraction-interface
comparison geotemco/lib/jszip/jszip-deflate.js @ 0:b12c99b7c3f0
commit for previous development
author | Zoe Hong <zhong@mpiwg-berlin.mpg.de> |
---|---|
date | Mon, 19 Jan 2015 17:13:49 +0100 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:b12c99b7c3f0 |
---|---|
1 /* | |
2 * Port of a script by Masanao Izumo. | |
3 * | |
4 * Only changes : wrap all the variables in a function and add the | |
5 * main function to JSZip (DEFLATE compression method). | |
6 * Everything else was written by M. Izumo. | |
7 * | |
8 * Original code can be found here: http://www.onicos.com/staff/iz/amuse/javascript/expert/deflate.txt | |
9 */ | |
10 | |
11 if(!JSZip) { | |
12 throw "JSZip not defined"; | |
13 } | |
14 | |
15 /* | |
16 * Original: | |
17 * http://www.onicos.com/staff/iz/amuse/javascript/expert/deflate.txt | |
18 */ | |
19 | |
20 (function(){ | |
21 | |
22 /* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp> | |
23 * Version: 1.0.1 | |
24 * LastModified: Dec 25 1999 | |
25 */ | |
26 | |
27 /* Interface: | |
28 * data = zip_deflate(src); | |
29 */ | |
30 | |
31 /* constant parameters */ | |
32 var zip_WSIZE = 32768; // Sliding Window size | |
33 var zip_STORED_BLOCK = 0; | |
34 var zip_STATIC_TREES = 1; | |
35 var zip_DYN_TREES = 2; | |
36 | |
37 /* for deflate */ | |
38 var zip_DEFAULT_LEVEL = 6; | |
39 var zip_FULL_SEARCH = true; | |
40 var zip_INBUFSIZ = 32768; // Input buffer size | |
41 var zip_INBUF_EXTRA = 64; // Extra buffer | |
42 var zip_OUTBUFSIZ = 1024 * 8; | |
43 var zip_window_size = 2 * zip_WSIZE; | |
44 var zip_MIN_MATCH = 3; | |
45 var zip_MAX_MATCH = 258; | |
46 var zip_BITS = 16; | |
47 // for SMALL_MEM | |
48 var zip_LIT_BUFSIZE = 0x2000; | |
49 var zip_HASH_BITS = 13; | |
50 // for MEDIUM_MEM | |
51 // var zip_LIT_BUFSIZE = 0x4000; | |
52 // var zip_HASH_BITS = 14; | |
53 // for BIG_MEM | |
54 // var zip_LIT_BUFSIZE = 0x8000; | |
55 // var zip_HASH_BITS = 15; | |
56 if(zip_LIT_BUFSIZE > zip_INBUFSIZ) | |
57 alert("error: zip_INBUFSIZ is too small"); | |
58 if((zip_WSIZE<<1) > (1<<zip_BITS)) | |
59 alert("error: zip_WSIZE is too large"); | |
60 if(zip_HASH_BITS > zip_BITS-1) | |
61 alert("error: zip_HASH_BITS is too large"); | |
62 if(zip_HASH_BITS < 8 || zip_MAX_MATCH != 258) | |
63 alert("error: Code too clever"); | |
64 var zip_DIST_BUFSIZE = zip_LIT_BUFSIZE; | |
65 var zip_HASH_SIZE = 1 << zip_HASH_BITS; | |
66 var zip_HASH_MASK = zip_HASH_SIZE - 1; | |
67 var zip_WMASK = zip_WSIZE - 1; | |
68 var zip_NIL = 0; // Tail of hash chains | |
69 var zip_TOO_FAR = 4096; | |
70 var zip_MIN_LOOKAHEAD = zip_MAX_MATCH + zip_MIN_MATCH + 1; | |
71 var zip_MAX_DIST = zip_WSIZE - zip_MIN_LOOKAHEAD; | |
72 var zip_SMALLEST = 1; | |
73 var zip_MAX_BITS = 15; | |
74 var zip_MAX_BL_BITS = 7; | |
75 var zip_LENGTH_CODES = 29; | |
76 var zip_LITERALS =256; | |
77 var zip_END_BLOCK = 256; | |
78 var zip_L_CODES = zip_LITERALS + 1 + zip_LENGTH_CODES; | |
79 var zip_D_CODES = 30; | |
80 var zip_BL_CODES = 19; | |
81 var zip_REP_3_6 = 16; | |
82 var zip_REPZ_3_10 = 17; | |
83 var zip_REPZ_11_138 = 18; | |
84 var zip_HEAP_SIZE = 2 * zip_L_CODES + 1; | |
85 var zip_H_SHIFT = parseInt((zip_HASH_BITS + zip_MIN_MATCH - 1) / | |
86 zip_MIN_MATCH); | |
87 | |
88 /* variables */ | |
89 var zip_free_queue; | |
90 var zip_qhead, zip_qtail; | |
91 var zip_initflag; | |
92 var zip_outbuf = null; | |
93 var zip_outcnt, zip_outoff; | |
94 var zip_complete; | |
95 var zip_window; | |
96 var zip_d_buf; | |
97 var zip_l_buf; | |
98 var zip_prev; | |
99 var zip_bi_buf; | |
100 var zip_bi_valid; | |
101 var zip_block_start; | |
102 var zip_ins_h; | |
103 var zip_hash_head; | |
104 var zip_prev_match; | |
105 var zip_match_available; | |
106 var zip_match_length; | |
107 var zip_prev_length; | |
108 var zip_strstart; | |
109 var zip_match_start; | |
110 var zip_eofile; | |
111 var zip_lookahead; | |
112 var zip_max_chain_length; | |
113 var zip_max_lazy_match; | |
114 var zip_compr_level; | |
115 var zip_good_match; | |
116 var zip_nice_match; | |
117 var zip_dyn_ltree; | |
118 var zip_dyn_dtree; | |
119 var zip_static_ltree; | |
120 var zip_static_dtree; | |
121 var zip_bl_tree; | |
122 var zip_l_desc; | |
123 var zip_d_desc; | |
124 var zip_bl_desc; | |
125 var zip_bl_count; | |
126 var zip_heap; | |
127 var zip_heap_len; | |
128 var zip_heap_max; | |
129 var zip_depth; | |
130 var zip_length_code; | |
131 var zip_dist_code; | |
132 var zip_base_length; | |
133 var zip_base_dist; | |
134 var zip_flag_buf; | |
135 var zip_last_lit; | |
136 var zip_last_dist; | |
137 var zip_last_flags; | |
138 var zip_flags; | |
139 var zip_flag_bit; | |
140 var zip_opt_len; | |
141 var zip_static_len; | |
142 var zip_deflate_data; | |
143 var zip_deflate_pos; | |
144 | |
145 /* objects (deflate) */ | |
146 | |
147 var zip_DeflateCT = function() { | |
148 this.fc = 0; // frequency count or bit string | |
149 this.dl = 0; // father node in Huffman tree or length of bit string | |
150 } | |
151 | |
152 var zip_DeflateTreeDesc = function() { | |
153 this.dyn_tree = null; // the dynamic tree | |
154 this.static_tree = null; // corresponding static tree or NULL | |
155 this.extra_bits = null; // extra bits for each code or NULL | |
156 this.extra_base = 0; // base index for extra_bits | |
157 this.elems = 0; // max number of elements in the tree | |
158 this.max_length = 0; // max bit length for the codes | |
159 this.max_code = 0; // largest code with non zero frequency | |
160 } | |
161 | |
162 /* Values for max_lazy_match, good_match and max_chain_length, depending on | |
163 * the desired pack level (0..9). The values given below have been tuned to | |
164 * exclude worst case performance for pathological files. Better values may be | |
165 * found for specific files. | |
166 */ | |
167 var zip_DeflateConfiguration = function(a, b, c, d) { | |
168 this.good_length = a; // reduce lazy search above this match length | |
169 this.max_lazy = b; // do not perform lazy search above this match length | |
170 this.nice_length = c; // quit search above this match length | |
171 this.max_chain = d; | |
172 } | |
173 | |
174 var zip_DeflateBuffer = function() { | |
175 this.next = null; | |
176 this.len = 0; | |
177 this.ptr = new Array(zip_OUTBUFSIZ); | |
178 this.off = 0; | |
179 } | |
180 | |
181 /* constant tables */ | |
182 var zip_extra_lbits = new Array( | |
183 0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0); | |
184 var zip_extra_dbits = new Array( | |
185 0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13); | |
186 var zip_extra_blbits = new Array( | |
187 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7); | |
188 var zip_bl_order = new Array( | |
189 16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15); | |
190 var zip_configuration_table = new Array( | |
191 new zip_DeflateConfiguration(0, 0, 0, 0), | |
192 new zip_DeflateConfiguration(4, 4, 8, 4), | |
193 new zip_DeflateConfiguration(4, 5, 16, 8), | |
194 new zip_DeflateConfiguration(4, 6, 32, 32), | |
195 new zip_DeflateConfiguration(4, 4, 16, 16), | |
196 new zip_DeflateConfiguration(8, 16, 32, 32), | |
197 new zip_DeflateConfiguration(8, 16, 128, 128), | |
198 new zip_DeflateConfiguration(8, 32, 128, 256), | |
199 new zip_DeflateConfiguration(32, 128, 258, 1024), | |
200 new zip_DeflateConfiguration(32, 258, 258, 4096)); | |
201 | |
202 | |
203 /* routines (deflate) */ | |
204 | |
205 var zip_deflate_start = function(level) { | |
206 var i; | |
207 | |
208 if(!level) | |
209 level = zip_DEFAULT_LEVEL; | |
210 else if(level < 1) | |
211 level = 1; | |
212 else if(level > 9) | |
213 level = 9; | |
214 | |
215 zip_compr_level = level; | |
216 zip_initflag = false; | |
217 zip_eofile = false; | |
218 if(zip_outbuf != null) | |
219 return; | |
220 | |
221 zip_free_queue = zip_qhead = zip_qtail = null; | |
222 zip_outbuf = new Array(zip_OUTBUFSIZ); | |
223 zip_window = new Array(zip_window_size); | |
224 zip_d_buf = new Array(zip_DIST_BUFSIZE); | |
225 zip_l_buf = new Array(zip_INBUFSIZ + zip_INBUF_EXTRA); | |
226 zip_prev = new Array(1 << zip_BITS); | |
227 zip_dyn_ltree = new Array(zip_HEAP_SIZE); | |
228 for(i = 0; i < zip_HEAP_SIZE; i++) | |
229 zip_dyn_ltree[i] = new zip_DeflateCT(); | |
230 zip_dyn_dtree = new Array(2*zip_D_CODES+1); | |
231 for(i = 0; i < 2*zip_D_CODES+1; i++) | |
232 zip_dyn_dtree[i] = new zip_DeflateCT(); | |
233 zip_static_ltree = new Array(zip_L_CODES+2); | |
234 for(i = 0; i < zip_L_CODES+2; i++) | |
235 zip_static_ltree[i] = new zip_DeflateCT(); | |
236 zip_static_dtree = new Array(zip_D_CODES); | |
237 for(i = 0; i < zip_D_CODES; i++) | |
238 zip_static_dtree[i] = new zip_DeflateCT(); | |
239 zip_bl_tree = new Array(2*zip_BL_CODES+1); | |
240 for(i = 0; i < 2*zip_BL_CODES+1; i++) | |
241 zip_bl_tree[i] = new zip_DeflateCT(); | |
242 zip_l_desc = new zip_DeflateTreeDesc(); | |
243 zip_d_desc = new zip_DeflateTreeDesc(); | |
244 zip_bl_desc = new zip_DeflateTreeDesc(); | |
245 zip_bl_count = new Array(zip_MAX_BITS+1); | |
246 zip_heap = new Array(2*zip_L_CODES+1); | |
247 zip_depth = new Array(2*zip_L_CODES+1); | |
248 zip_length_code = new Array(zip_MAX_MATCH-zip_MIN_MATCH+1); | |
249 zip_dist_code = new Array(512); | |
250 zip_base_length = new Array(zip_LENGTH_CODES); | |
251 zip_base_dist = new Array(zip_D_CODES); | |
252 zip_flag_buf = new Array(parseInt(zip_LIT_BUFSIZE / 8)); | |
253 } | |
254 | |
255 var zip_deflate_end = function() { | |
256 zip_free_queue = zip_qhead = zip_qtail = null; | |
257 zip_outbuf = null; | |
258 zip_window = null; | |
259 zip_d_buf = null; | |
260 zip_l_buf = null; | |
261 zip_prev = null; | |
262 zip_dyn_ltree = null; | |
263 zip_dyn_dtree = null; | |
264 zip_static_ltree = null; | |
265 zip_static_dtree = null; | |
266 zip_bl_tree = null; | |
267 zip_l_desc = null; | |
268 zip_d_desc = null; | |
269 zip_bl_desc = null; | |
270 zip_bl_count = null; | |
271 zip_heap = null; | |
272 zip_depth = null; | |
273 zip_length_code = null; | |
274 zip_dist_code = null; | |
275 zip_base_length = null; | |
276 zip_base_dist = null; | |
277 zip_flag_buf = null; | |
278 } | |
279 | |
280 var zip_reuse_queue = function(p) { | |
281 p.next = zip_free_queue; | |
282 zip_free_queue = p; | |
283 } | |
284 | |
285 var zip_new_queue = function() { | |
286 var p; | |
287 | |
288 if(zip_free_queue != null) | |
289 { | |
290 p = zip_free_queue; | |
291 zip_free_queue = zip_free_queue.next; | |
292 } | |
293 else | |
294 p = new zip_DeflateBuffer(); | |
295 p.next = null; | |
296 p.len = p.off = 0; | |
297 | |
298 return p; | |
299 } | |
300 | |
301 var zip_head1 = function(i) { | |
302 return zip_prev[zip_WSIZE + i]; | |
303 } | |
304 | |
305 var zip_head2 = function(i, val) { | |
306 return zip_prev[zip_WSIZE + i] = val; | |
307 } | |
308 | |
309 /* put_byte is used for the compressed output, put_ubyte for the | |
310 * uncompressed output. However unlzw() uses window for its | |
311 * suffix table instead of its output buffer, so it does not use put_ubyte | |
312 * (to be cleaned up). | |
313 */ | |
314 var zip_put_byte = function(c) { | |
315 zip_outbuf[zip_outoff + zip_outcnt++] = c; | |
316 if(zip_outoff + zip_outcnt == zip_OUTBUFSIZ) | |
317 zip_qoutbuf(); | |
318 } | |
319 | |
320 /* Output a 16 bit value, lsb first */ | |
321 var zip_put_short = function(w) { | |
322 w &= 0xffff; | |
323 if(zip_outoff + zip_outcnt < zip_OUTBUFSIZ - 2) { | |
324 zip_outbuf[zip_outoff + zip_outcnt++] = (w & 0xff); | |
325 zip_outbuf[zip_outoff + zip_outcnt++] = (w >>> 8); | |
326 } else { | |
327 zip_put_byte(w & 0xff); | |
328 zip_put_byte(w >>> 8); | |
329 } | |
330 } | |
331 | |
332 /* ========================================================================== | |
333 * Insert string s in the dictionary and set match_head to the previous head | |
334 * of the hash chain (the most recent string with same hash key). Return | |
335 * the previous length of the hash chain. | |
336 * IN assertion: all calls to to INSERT_STRING are made with consecutive | |
337 * input characters and the first MIN_MATCH bytes of s are valid | |
338 * (except for the last MIN_MATCH-1 bytes of the input file). | |
339 */ | |
340 var zip_INSERT_STRING = function() { | |
341 zip_ins_h = ((zip_ins_h << zip_H_SHIFT) | |
342 ^ (zip_window[zip_strstart + zip_MIN_MATCH - 1] & 0xff)) | |
343 & zip_HASH_MASK; | |
344 zip_hash_head = zip_head1(zip_ins_h); | |
345 zip_prev[zip_strstart & zip_WMASK] = zip_hash_head; | |
346 zip_head2(zip_ins_h, zip_strstart); | |
347 } | |
348 | |
349 /* Send a code of the given tree. c and tree must not have side effects */ | |
350 var zip_SEND_CODE = function(c, tree) { | |
351 zip_send_bits(tree[c].fc, tree[c].dl); | |
352 } | |
353 | |
354 /* Mapping from a distance to a distance code. dist is the distance - 1 and | |
355 * must not have side effects. dist_code[256] and dist_code[257] are never | |
356 * used. | |
357 */ | |
358 var zip_D_CODE = function(dist) { | |
359 return (dist < 256 ? zip_dist_code[dist] | |
360 : zip_dist_code[256 + (dist>>7)]) & 0xff; | |
361 } | |
362 | |
363 /* ========================================================================== | |
364 * Compares to subtrees, using the tree depth as tie breaker when | |
365 * the subtrees have equal frequency. This minimizes the worst case length. | |
366 */ | |
367 var zip_SMALLER = function(tree, n, m) { | |
368 return tree[n].fc < tree[m].fc || | |
369 (tree[n].fc == tree[m].fc && zip_depth[n] <= zip_depth[m]); | |
370 } | |
371 | |
372 /* ========================================================================== | |
373 * read string data | |
374 */ | |
375 var zip_read_buff = function(buff, offset, n) { | |
376 var i; | |
377 for(i = 0; i < n && zip_deflate_pos < zip_deflate_data.length; i++) | |
378 buff[offset + i] = | |
379 zip_deflate_data.charCodeAt(zip_deflate_pos++) & 0xff; | |
380 return i; | |
381 } | |
382 | |
383 /* ========================================================================== | |
384 * Initialize the "longest match" routines for a new file | |
385 */ | |
386 var zip_lm_init = function() { | |
387 var j; | |
388 | |
389 /* Initialize the hash table. */ | |
390 for(j = 0; j < zip_HASH_SIZE; j++) | |
391 // zip_head2(j, zip_NIL); | |
392 zip_prev[zip_WSIZE + j] = 0; | |
393 /* prev will be initialized on the fly */ | |
394 | |
395 /* Set the default configuration parameters: | |
396 */ | |
397 zip_max_lazy_match = zip_configuration_table[zip_compr_level].max_lazy; | |
398 zip_good_match = zip_configuration_table[zip_compr_level].good_length; | |
399 if(!zip_FULL_SEARCH) | |
400 zip_nice_match = zip_configuration_table[zip_compr_level].nice_length; | |
401 zip_max_chain_length = zip_configuration_table[zip_compr_level].max_chain; | |
402 | |
403 zip_strstart = 0; | |
404 zip_block_start = 0; | |
405 | |
406 zip_lookahead = zip_read_buff(zip_window, 0, 2 * zip_WSIZE); | |
407 if(zip_lookahead <= 0) { | |
408 zip_eofile = true; | |
409 zip_lookahead = 0; | |
410 return; | |
411 } | |
412 zip_eofile = false; | |
413 /* Make sure that we always have enough lookahead. This is important | |
414 * if input comes from a device such as a tty. | |
415 */ | |
416 while(zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile) | |
417 zip_fill_window(); | |
418 | |
419 /* If lookahead < MIN_MATCH, ins_h is garbage, but this is | |
420 * not important since only literal bytes will be emitted. | |
421 */ | |
422 zip_ins_h = 0; | |
423 for(j = 0; j < zip_MIN_MATCH - 1; j++) { | |
424 // UPDATE_HASH(ins_h, window[j]); | |
425 zip_ins_h = ((zip_ins_h << zip_H_SHIFT) ^ (zip_window[j] & 0xff)) & zip_HASH_MASK; | |
426 } | |
427 } | |
428 | |
429 /* ========================================================================== | |
430 * Set match_start to the longest match starting at the given string and | |
431 * return its length. Matches shorter or equal to prev_length are discarded, | |
432 * in which case the result is equal to prev_length and match_start is | |
433 * garbage. | |
434 * IN assertions: cur_match is the head of the hash chain for the current | |
435 * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1 | |
436 */ | |
437 var zip_longest_match = function(cur_match) { | |
438 var chain_length = zip_max_chain_length; // max hash chain length | |
439 var scanp = zip_strstart; // current string | |
440 var matchp; // matched string | |
441 var len; // length of current match | |
442 var best_len = zip_prev_length; // best match length so far | |
443 | |
444 /* Stop when cur_match becomes <= limit. To simplify the code, | |
445 * we prevent matches with the string of window index 0. | |
446 */ | |
447 var limit = (zip_strstart > zip_MAX_DIST ? zip_strstart - zip_MAX_DIST : zip_NIL); | |
448 | |
449 var strendp = zip_strstart + zip_MAX_MATCH; | |
450 var scan_end1 = zip_window[scanp + best_len - 1]; | |
451 var scan_end = zip_window[scanp + best_len]; | |
452 | |
453 /* Do not waste too much time if we already have a good match: */ | |
454 if(zip_prev_length >= zip_good_match) | |
455 chain_length >>= 2; | |
456 | |
457 // Assert(encoder->strstart <= window_size-MIN_LOOKAHEAD, "insufficient lookahead"); | |
458 | |
459 do { | |
460 // Assert(cur_match < encoder->strstart, "no future"); | |
461 matchp = cur_match; | |
462 | |
463 /* Skip to next match if the match length cannot increase | |
464 * or if the match length is less than 2: | |
465 */ | |
466 if(zip_window[matchp + best_len] != scan_end || | |
467 zip_window[matchp + best_len - 1] != scan_end1 || | |
468 zip_window[matchp] != zip_window[scanp] || | |
469 zip_window[++matchp] != zip_window[scanp + 1]) { | |
470 continue; | |
471 } | |
472 | |
473 /* The check at best_len-1 can be removed because it will be made | |
474 * again later. (This heuristic is not always a win.) | |
475 * It is not necessary to compare scan[2] and match[2] since they | |
476 * are always equal when the other bytes match, given that | |
477 * the hash keys are equal and that HASH_BITS >= 8. | |
478 */ | |
479 scanp += 2; | |
480 matchp++; | |
481 | |
482 /* We check for insufficient lookahead only every 8th comparison; | |
483 * the 256th check will be made at strstart+258. | |
484 */ | |
485 do { | |
486 } while(zip_window[++scanp] == zip_window[++matchp] && | |
487 zip_window[++scanp] == zip_window[++matchp] && | |
488 zip_window[++scanp] == zip_window[++matchp] && | |
489 zip_window[++scanp] == zip_window[++matchp] && | |
490 zip_window[++scanp] == zip_window[++matchp] && | |
491 zip_window[++scanp] == zip_window[++matchp] && | |
492 zip_window[++scanp] == zip_window[++matchp] && | |
493 zip_window[++scanp] == zip_window[++matchp] && | |
494 scanp < strendp); | |
495 | |
496 len = zip_MAX_MATCH - (strendp - scanp); | |
497 scanp = strendp - zip_MAX_MATCH; | |
498 | |
499 if(len > best_len) { | |
500 zip_match_start = cur_match; | |
501 best_len = len; | |
502 if(zip_FULL_SEARCH) { | |
503 if(len >= zip_MAX_MATCH) break; | |
504 } else { | |
505 if(len >= zip_nice_match) break; | |
506 } | |
507 | |
508 scan_end1 = zip_window[scanp + best_len-1]; | |
509 scan_end = zip_window[scanp + best_len]; | |
510 } | |
511 } while((cur_match = zip_prev[cur_match & zip_WMASK]) > limit | |
512 && --chain_length != 0); | |
513 | |
514 return best_len; | |
515 } | |
516 | |
517 /* ========================================================================== | |
518 * Fill the window when the lookahead becomes insufficient. | |
519 * Updates strstart and lookahead, and sets eofile if end of input file. | |
520 * IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0 | |
521 * OUT assertions: at least one byte has been read, or eofile is set; | |
522 * file reads are performed for at least two bytes (required for the | |
523 * translate_eol option). | |
524 */ | |
525 var zip_fill_window = function() { | |
526 var n, m; | |
527 | |
528 // Amount of free space at the end of the window. | |
529 var more = zip_window_size - zip_lookahead - zip_strstart; | |
530 | |
531 /* If the window is almost full and there is insufficient lookahead, | |
532 * move the upper half to the lower one to make room in the upper half. | |
533 */ | |
534 if(more == -1) { | |
535 /* Very unlikely, but possible on 16 bit machine if strstart == 0 | |
536 * and lookahead == 1 (input done one byte at time) | |
537 */ | |
538 more--; | |
539 } else if(zip_strstart >= zip_WSIZE + zip_MAX_DIST) { | |
540 /* By the IN assertion, the window is not empty so we can't confuse | |
541 * more == 0 with more == 64K on a 16 bit machine. | |
542 */ | |
543 // Assert(window_size == (ulg)2*WSIZE, "no sliding with BIG_MEM"); | |
544 | |
545 // System.arraycopy(window, WSIZE, window, 0, WSIZE); | |
546 for(n = 0; n < zip_WSIZE; n++) | |
547 zip_window[n] = zip_window[n + zip_WSIZE]; | |
548 | |
549 zip_match_start -= zip_WSIZE; | |
550 zip_strstart -= zip_WSIZE; /* we now have strstart >= MAX_DIST: */ | |
551 zip_block_start -= zip_WSIZE; | |
552 | |
553 for(n = 0; n < zip_HASH_SIZE; n++) { | |
554 m = zip_head1(n); | |
555 zip_head2(n, m >= zip_WSIZE ? m - zip_WSIZE : zip_NIL); | |
556 } | |
557 for(n = 0; n < zip_WSIZE; n++) { | |
558 /* If n is not on any hash chain, prev[n] is garbage but | |
559 * its value will never be used. | |
560 */ | |
561 m = zip_prev[n]; | |
562 zip_prev[n] = (m >= zip_WSIZE ? m - zip_WSIZE : zip_NIL); | |
563 } | |
564 more += zip_WSIZE; | |
565 } | |
566 // At this point, more >= 2 | |
567 if(!zip_eofile) { | |
568 n = zip_read_buff(zip_window, zip_strstart + zip_lookahead, more); | |
569 if(n <= 0) | |
570 zip_eofile = true; | |
571 else | |
572 zip_lookahead += n; | |
573 } | |
574 } | |
575 | |
576 /* ========================================================================== | |
577 * Processes a new input file and return its compressed length. This | |
578 * function does not perform lazy evaluationof matches and inserts | |
579 * new strings in the dictionary only for unmatched strings or for short | |
580 * matches. It is used only for the fast compression options. | |
581 */ | |
582 var zip_deflate_fast = function() { | |
583 while(zip_lookahead != 0 && zip_qhead == null) { | |
584 var flush; // set if current block must be flushed | |
585 | |
586 /* Insert the string window[strstart .. strstart+2] in the | |
587 * dictionary, and set hash_head to the head of the hash chain: | |
588 */ | |
589 zip_INSERT_STRING(); | |
590 | |
591 /* Find the longest match, discarding those <= prev_length. | |
592 * At this point we have always match_length < MIN_MATCH | |
593 */ | |
594 if(zip_hash_head != zip_NIL && | |
595 zip_strstart - zip_hash_head <= zip_MAX_DIST) { | |
596 /* To simplify the code, we prevent matches with the string | |
597 * of window index 0 (in particular we have to avoid a match | |
598 * of the string with itself at the start of the input file). | |
599 */ | |
600 zip_match_length = zip_longest_match(zip_hash_head); | |
601 /* longest_match() sets match_start */ | |
602 if(zip_match_length > zip_lookahead) | |
603 zip_match_length = zip_lookahead; | |
604 } | |
605 if(zip_match_length >= zip_MIN_MATCH) { | |
606 // check_match(strstart, match_start, match_length); | |
607 | |
608 flush = zip_ct_tally(zip_strstart - zip_match_start, | |
609 zip_match_length - zip_MIN_MATCH); | |
610 zip_lookahead -= zip_match_length; | |
611 | |
612 /* Insert new strings in the hash table only if the match length | |
613 * is not too large. This saves time but degrades compression. | |
614 */ | |
615 if(zip_match_length <= zip_max_lazy_match) { | |
616 zip_match_length--; // string at strstart already in hash table | |
617 do { | |
618 zip_strstart++; | |
619 zip_INSERT_STRING(); | |
620 /* strstart never exceeds WSIZE-MAX_MATCH, so there are | |
621 * always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH | |
622 * these bytes are garbage, but it does not matter since | |
623 * the next lookahead bytes will be emitted as literals. | |
624 */ | |
625 } while(--zip_match_length != 0); | |
626 zip_strstart++; | |
627 } else { | |
628 zip_strstart += zip_match_length; | |
629 zip_match_length = 0; | |
630 zip_ins_h = zip_window[zip_strstart] & 0xff; | |
631 // UPDATE_HASH(ins_h, window[strstart + 1]); | |
632 zip_ins_h = ((zip_ins_h<<zip_H_SHIFT) ^ (zip_window[zip_strstart + 1] & 0xff)) & zip_HASH_MASK; | |
633 | |
634 //#if MIN_MATCH != 3 | |
635 // Call UPDATE_HASH() MIN_MATCH-3 more times | |
636 //#endif | |
637 | |
638 } | |
639 } else { | |
640 /* No match, output a literal byte */ | |
641 flush = zip_ct_tally(0, zip_window[zip_strstart] & 0xff); | |
642 zip_lookahead--; | |
643 zip_strstart++; | |
644 } | |
645 if(flush) { | |
646 zip_flush_block(0); | |
647 zip_block_start = zip_strstart; | |
648 } | |
649 | |
650 /* Make sure that we always have enough lookahead, except | |
651 * at the end of the input file. We need MAX_MATCH bytes | |
652 * for the next match, plus MIN_MATCH bytes to insert the | |
653 * string following the next match. | |
654 */ | |
655 while(zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile) | |
656 zip_fill_window(); | |
657 } | |
658 } | |
659 | |
660 var zip_deflate_better = function() { | |
661 /* Process the input block. */ | |
662 while(zip_lookahead != 0 && zip_qhead == null) { | |
663 /* Insert the string window[strstart .. strstart+2] in the | |
664 * dictionary, and set hash_head to the head of the hash chain: | |
665 */ | |
666 zip_INSERT_STRING(); | |
667 | |
668 /* Find the longest match, discarding those <= prev_length. | |
669 */ | |
670 zip_prev_length = zip_match_length; | |
671 zip_prev_match = zip_match_start; | |
672 zip_match_length = zip_MIN_MATCH - 1; | |
673 | |
674 if(zip_hash_head != zip_NIL && | |
675 zip_prev_length < zip_max_lazy_match && | |
676 zip_strstart - zip_hash_head <= zip_MAX_DIST) { | |
677 /* To simplify the code, we prevent matches with the string | |
678 * of window index 0 (in particular we have to avoid a match | |
679 * of the string with itself at the start of the input file). | |
680 */ | |
681 zip_match_length = zip_longest_match(zip_hash_head); | |
682 /* longest_match() sets match_start */ | |
683 if(zip_match_length > zip_lookahead) | |
684 zip_match_length = zip_lookahead; | |
685 | |
686 /* Ignore a length 3 match if it is too distant: */ | |
687 if(zip_match_length == zip_MIN_MATCH && | |
688 zip_strstart - zip_match_start > zip_TOO_FAR) { | |
689 /* If prev_match is also MIN_MATCH, match_start is garbage | |
690 * but we will ignore the current match anyway. | |
691 */ | |
692 zip_match_length--; | |
693 } | |
694 } | |
695 /* If there was a match at the previous step and the current | |
696 * match is not better, output the previous match: | |
697 */ | |
698 if(zip_prev_length >= zip_MIN_MATCH && | |
699 zip_match_length <= zip_prev_length) { | |
700 var flush; // set if current block must be flushed | |
701 | |
702 // check_match(strstart - 1, prev_match, prev_length); | |
703 flush = zip_ct_tally(zip_strstart - 1 - zip_prev_match, | |
704 zip_prev_length - zip_MIN_MATCH); | |
705 | |
706 /* Insert in hash table all strings up to the end of the match. | |
707 * strstart-1 and strstart are already inserted. | |
708 */ | |
709 zip_lookahead -= zip_prev_length - 1; | |
710 zip_prev_length -= 2; | |
711 do { | |
712 zip_strstart++; | |
713 zip_INSERT_STRING(); | |
714 /* strstart never exceeds WSIZE-MAX_MATCH, so there are | |
715 * always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH | |
716 * these bytes are garbage, but it does not matter since the | |
717 * next lookahead bytes will always be emitted as literals. | |
718 */ | |
719 } while(--zip_prev_length != 0); | |
720 zip_match_available = 0; | |
721 zip_match_length = zip_MIN_MATCH - 1; | |
722 zip_strstart++; | |
723 if(flush) { | |
724 zip_flush_block(0); | |
725 zip_block_start = zip_strstart; | |
726 } | |
727 } else if(zip_match_available != 0) { | |
728 /* If there was no match at the previous position, output a | |
729 * single literal. If there was a match but the current match | |
730 * is longer, truncate the previous match to a single literal. | |
731 */ | |
732 if(zip_ct_tally(0, zip_window[zip_strstart - 1] & 0xff)) { | |
733 zip_flush_block(0); | |
734 zip_block_start = zip_strstart; | |
735 } | |
736 zip_strstart++; | |
737 zip_lookahead--; | |
738 } else { | |
739 /* There is no previous match to compare with, wait for | |
740 * the next step to decide. | |
741 */ | |
742 zip_match_available = 1; | |
743 zip_strstart++; | |
744 zip_lookahead--; | |
745 } | |
746 | |
747 /* Make sure that we always have enough lookahead, except | |
748 * at the end of the input file. We need MAX_MATCH bytes | |
749 * for the next match, plus MIN_MATCH bytes to insert the | |
750 * string following the next match. | |
751 */ | |
752 while(zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile) | |
753 zip_fill_window(); | |
754 } | |
755 } | |
756 | |
757 var zip_init_deflate = function() { | |
758 if(zip_eofile) | |
759 return; | |
760 zip_bi_buf = 0; | |
761 zip_bi_valid = 0; | |
762 zip_ct_init(); | |
763 zip_lm_init(); | |
764 | |
765 zip_qhead = null; | |
766 zip_outcnt = 0; | |
767 zip_outoff = 0; | |
768 | |
769 if(zip_compr_level <= 3) | |
770 { | |
771 zip_prev_length = zip_MIN_MATCH - 1; | |
772 zip_match_length = 0; | |
773 } | |
774 else | |
775 { | |
776 zip_match_length = zip_MIN_MATCH - 1; | |
777 zip_match_available = 0; | |
778 } | |
779 | |
780 zip_complete = false; | |
781 } | |
782 | |
783 /* ========================================================================== | |
784 * Same as above, but achieves better compression. We use a lazy | |
785 * evaluation for matches: a match is finally adopted only if there is | |
786 * no better match at the next window position. | |
787 */ | |
788 var zip_deflate_internal = function(buff, off, buff_size) { | |
789 var n; | |
790 | |
791 if(!zip_initflag) | |
792 { | |
793 zip_init_deflate(); | |
794 zip_initflag = true; | |
795 if(zip_lookahead == 0) { // empty | |
796 zip_complete = true; | |
797 return 0; | |
798 } | |
799 } | |
800 | |
801 if((n = zip_qcopy(buff, off, buff_size)) == buff_size) | |
802 return buff_size; | |
803 | |
804 if(zip_complete) | |
805 return n; | |
806 | |
807 if(zip_compr_level <= 3) // optimized for speed | |
808 zip_deflate_fast(); | |
809 else | |
810 zip_deflate_better(); | |
811 if(zip_lookahead == 0) { | |
812 if(zip_match_available != 0) | |
813 zip_ct_tally(0, zip_window[zip_strstart - 1] & 0xff); | |
814 zip_flush_block(1); | |
815 zip_complete = true; | |
816 } | |
817 return n + zip_qcopy(buff, n + off, buff_size - n); | |
818 } | |
819 | |
820 var zip_qcopy = function(buff, off, buff_size) { | |
821 var n, i, j; | |
822 | |
823 n = 0; | |
824 while(zip_qhead != null && n < buff_size) | |
825 { | |
826 i = buff_size - n; | |
827 if(i > zip_qhead.len) | |
828 i = zip_qhead.len; | |
829 // System.arraycopy(qhead.ptr, qhead.off, buff, off + n, i); | |
830 for(j = 0; j < i; j++) | |
831 buff[off + n + j] = zip_qhead.ptr[zip_qhead.off + j]; | |
832 | |
833 zip_qhead.off += i; | |
834 zip_qhead.len -= i; | |
835 n += i; | |
836 if(zip_qhead.len == 0) { | |
837 var p; | |
838 p = zip_qhead; | |
839 zip_qhead = zip_qhead.next; | |
840 zip_reuse_queue(p); | |
841 } | |
842 } | |
843 | |
844 if(n == buff_size) | |
845 return n; | |
846 | |
847 if(zip_outoff < zip_outcnt) { | |
848 i = buff_size - n; | |
849 if(i > zip_outcnt - zip_outoff) | |
850 i = zip_outcnt - zip_outoff; | |
851 // System.arraycopy(outbuf, outoff, buff, off + n, i); | |
852 for(j = 0; j < i; j++) | |
853 buff[off + n + j] = zip_outbuf[zip_outoff + j]; | |
854 zip_outoff += i; | |
855 n += i; | |
856 if(zip_outcnt == zip_outoff) | |
857 zip_outcnt = zip_outoff = 0; | |
858 } | |
859 return n; | |
860 } | |
861 | |
862 /* ========================================================================== | |
863 * Allocate the match buffer, initialize the various tables and save the | |
864 * location of the internal file attribute (ascii/binary) and method | |
865 * (DEFLATE/STORE). | |
866 */ | |
867 var zip_ct_init = function() { | |
868 var n; // iterates over tree elements | |
869 var bits; // bit counter | |
870 var length; // length value | |
871 var code; // code value | |
872 var dist; // distance index | |
873 | |
874 if(zip_static_dtree[0].dl != 0) return; // ct_init already called | |
875 | |
876 zip_l_desc.dyn_tree = zip_dyn_ltree; | |
877 zip_l_desc.static_tree = zip_static_ltree; | |
878 zip_l_desc.extra_bits = zip_extra_lbits; | |
879 zip_l_desc.extra_base = zip_LITERALS + 1; | |
880 zip_l_desc.elems = zip_L_CODES; | |
881 zip_l_desc.max_length = zip_MAX_BITS; | |
882 zip_l_desc.max_code = 0; | |
883 | |
884 zip_d_desc.dyn_tree = zip_dyn_dtree; | |
885 zip_d_desc.static_tree = zip_static_dtree; | |
886 zip_d_desc.extra_bits = zip_extra_dbits; | |
887 zip_d_desc.extra_base = 0; | |
888 zip_d_desc.elems = zip_D_CODES; | |
889 zip_d_desc.max_length = zip_MAX_BITS; | |
890 zip_d_desc.max_code = 0; | |
891 | |
892 zip_bl_desc.dyn_tree = zip_bl_tree; | |
893 zip_bl_desc.static_tree = null; | |
894 zip_bl_desc.extra_bits = zip_extra_blbits; | |
895 zip_bl_desc.extra_base = 0; | |
896 zip_bl_desc.elems = zip_BL_CODES; | |
897 zip_bl_desc.max_length = zip_MAX_BL_BITS; | |
898 zip_bl_desc.max_code = 0; | |
899 | |
900 // Initialize the mapping length (0..255) -> length code (0..28) | |
901 length = 0; | |
902 for(code = 0; code < zip_LENGTH_CODES-1; code++) { | |
903 zip_base_length[code] = length; | |
904 for(n = 0; n < (1<<zip_extra_lbits[code]); n++) | |
905 zip_length_code[length++] = code; | |
906 } | |
907 // Assert (length == 256, "ct_init: length != 256"); | |
908 | |
909 /* Note that the length 255 (match length 258) can be represented | |
910 * in two different ways: code 284 + 5 bits or code 285, so we | |
911 * overwrite length_code[255] to use the best encoding: | |
912 */ | |
913 zip_length_code[length-1] = code; | |
914 | |
915 /* Initialize the mapping dist (0..32K) -> dist code (0..29) */ | |
916 dist = 0; | |
917 for(code = 0 ; code < 16; code++) { | |
918 zip_base_dist[code] = dist; | |
919 for(n = 0; n < (1<<zip_extra_dbits[code]); n++) { | |
920 zip_dist_code[dist++] = code; | |
921 } | |
922 } | |
923 // Assert (dist == 256, "ct_init: dist != 256"); | |
924 dist >>= 7; // from now on, all distances are divided by 128 | |
925 for( ; code < zip_D_CODES; code++) { | |
926 zip_base_dist[code] = dist << 7; | |
927 for(n = 0; n < (1<<(zip_extra_dbits[code]-7)); n++) | |
928 zip_dist_code[256 + dist++] = code; | |
929 } | |
930 // Assert (dist == 256, "ct_init: 256+dist != 512"); | |
931 | |
932 // Construct the codes of the static literal tree | |
933 for(bits = 0; bits <= zip_MAX_BITS; bits++) | |
934 zip_bl_count[bits] = 0; | |
935 n = 0; | |
936 while(n <= 143) { zip_static_ltree[n++].dl = 8; zip_bl_count[8]++; } | |
937 while(n <= 255) { zip_static_ltree[n++].dl = 9; zip_bl_count[9]++; } | |
938 while(n <= 279) { zip_static_ltree[n++].dl = 7; zip_bl_count[7]++; } | |
939 while(n <= 287) { zip_static_ltree[n++].dl = 8; zip_bl_count[8]++; } | |
940 /* Codes 286 and 287 do not exist, but we must include them in the | |
941 * tree construction to get a canonical Huffman tree (longest code | |
942 * all ones) | |
943 */ | |
944 zip_gen_codes(zip_static_ltree, zip_L_CODES + 1); | |
945 | |
946 /* The static distance tree is trivial: */ | |
947 for(n = 0; n < zip_D_CODES; n++) { | |
948 zip_static_dtree[n].dl = 5; | |
949 zip_static_dtree[n].fc = zip_bi_reverse(n, 5); | |
950 } | |
951 | |
952 // Initialize the first block of the first file: | |
953 zip_init_block(); | |
954 } | |
955 | |
956 /* ========================================================================== | |
957 * Initialize a new block. | |
958 */ | |
959 var zip_init_block = function() { | |
960 var n; // iterates over tree elements | |
961 | |
962 // Initialize the trees. | |
963 for(n = 0; n < zip_L_CODES; n++) zip_dyn_ltree[n].fc = 0; | |
964 for(n = 0; n < zip_D_CODES; n++) zip_dyn_dtree[n].fc = 0; | |
965 for(n = 0; n < zip_BL_CODES; n++) zip_bl_tree[n].fc = 0; | |
966 | |
967 zip_dyn_ltree[zip_END_BLOCK].fc = 1; | |
968 zip_opt_len = zip_static_len = 0; | |
969 zip_last_lit = zip_last_dist = zip_last_flags = 0; | |
970 zip_flags = 0; | |
971 zip_flag_bit = 1; | |
972 } | |
973 | |
974 /* ========================================================================== | |
975 * Restore the heap property by moving down the tree starting at node k, | |
976 * exchanging a node with the smallest of its two sons if necessary, stopping | |
977 * when the heap property is re-established (each father smaller than its | |
978 * two sons). | |
979 */ | |
980 var zip_pqdownheap = function( | |
981 tree, // the tree to restore | |
982 k) { // node to move down | |
983 var v = zip_heap[k]; | |
984 var j = k << 1; // left son of k | |
985 | |
986 while(j <= zip_heap_len) { | |
987 // Set j to the smallest of the two sons: | |
988 if(j < zip_heap_len && | |
989 zip_SMALLER(tree, zip_heap[j + 1], zip_heap[j])) | |
990 j++; | |
991 | |
992 // Exit if v is smaller than both sons | |
993 if(zip_SMALLER(tree, v, zip_heap[j])) | |
994 break; | |
995 | |
996 // Exchange v with the smallest son | |
997 zip_heap[k] = zip_heap[j]; | |
998 k = j; | |
999 | |
1000 // And continue down the tree, setting j to the left son of k | |
1001 j <<= 1; | |
1002 } | |
1003 zip_heap[k] = v; | |
1004 } | |
1005 | |
1006 /* ========================================================================== | |
1007 * Compute the optimal bit lengths for a tree and update the total bit length | |
1008 * for the current block. | |
1009 * IN assertion: the fields freq and dad are set, heap[heap_max] and | |
1010 * above are the tree nodes sorted by increasing frequency. | |
1011 * OUT assertions: the field len is set to the optimal bit length, the | |
1012 * array bl_count contains the frequencies for each bit length. | |
1013 * The length opt_len is updated; static_len is also updated if stree is | |
1014 * not null. | |
1015 */ | |
1016 var zip_gen_bitlen = function(desc) { // the tree descriptor | |
1017 var tree = desc.dyn_tree; | |
1018 var extra = desc.extra_bits; | |
1019 var base = desc.extra_base; | |
1020 var max_code = desc.max_code; | |
1021 var max_length = desc.max_length; | |
1022 var stree = desc.static_tree; | |
1023 var h; // heap index | |
1024 var n, m; // iterate over the tree elements | |
1025 var bits; // bit length | |
1026 var xbits; // extra bits | |
1027 var f; // frequency | |
1028 var overflow = 0; // number of elements with bit length too large | |
1029 | |
1030 for(bits = 0; bits <= zip_MAX_BITS; bits++) | |
1031 zip_bl_count[bits] = 0; | |
1032 | |
1033 /* In a first pass, compute the optimal bit lengths (which may | |
1034 * overflow in the case of the bit length tree). | |
1035 */ | |
1036 tree[zip_heap[zip_heap_max]].dl = 0; // root of the heap | |
1037 | |
1038 for(h = zip_heap_max + 1; h < zip_HEAP_SIZE; h++) { | |
1039 n = zip_heap[h]; | |
1040 bits = tree[tree[n].dl].dl + 1; | |
1041 if(bits > max_length) { | |
1042 bits = max_length; | |
1043 overflow++; | |
1044 } | |
1045 tree[n].dl = bits; | |
1046 // We overwrite tree[n].dl which is no longer needed | |
1047 | |
1048 if(n > max_code) | |
1049 continue; // not a leaf node | |
1050 | |
1051 zip_bl_count[bits]++; | |
1052 xbits = 0; | |
1053 if(n >= base) | |
1054 xbits = extra[n - base]; | |
1055 f = tree[n].fc; | |
1056 zip_opt_len += f * (bits + xbits); | |
1057 if(stree != null) | |
1058 zip_static_len += f * (stree[n].dl + xbits); | |
1059 } | |
1060 if(overflow == 0) | |
1061 return; | |
1062 | |
1063 // This happens for example on obj2 and pic of the Calgary corpus | |
1064 | |
1065 // Find the first bit length which could increase: | |
1066 do { | |
1067 bits = max_length - 1; | |
1068 while(zip_bl_count[bits] == 0) | |
1069 bits--; | |
1070 zip_bl_count[bits]--; // move one leaf down the tree | |
1071 zip_bl_count[bits + 1] += 2; // move one overflow item as its brother | |
1072 zip_bl_count[max_length]--; | |
1073 /* The brother of the overflow item also moves one step up, | |
1074 * but this does not affect bl_count[max_length] | |
1075 */ | |
1076 overflow -= 2; | |
1077 } while(overflow > 0); | |
1078 | |
1079 /* Now recompute all bit lengths, scanning in increasing frequency. | |
1080 * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all | |
1081 * lengths instead of fixing only the wrong ones. This idea is taken | |
1082 * from 'ar' written by Haruhiko Okumura.) | |
1083 */ | |
1084 for(bits = max_length; bits != 0; bits--) { | |
1085 n = zip_bl_count[bits]; | |
1086 while(n != 0) { | |
1087 m = zip_heap[--h]; | |
1088 if(m > max_code) | |
1089 continue; | |
1090 if(tree[m].dl != bits) { | |
1091 zip_opt_len += (bits - tree[m].dl) * tree[m].fc; | |
1092 tree[m].fc = bits; | |
1093 } | |
1094 n--; | |
1095 } | |
1096 } | |
1097 } | |
1098 | |
1099 /* ========================================================================== | |
1100 * Generate the codes for a given tree and bit counts (which need not be | |
1101 * optimal). | |
1102 * IN assertion: the array bl_count contains the bit length statistics for | |
1103 * the given tree and the field len is set for all tree elements. | |
1104 * OUT assertion: the field code is set for all tree elements of non | |
1105 * zero code length. | |
1106 */ | |
1107 var zip_gen_codes = function(tree, // the tree to decorate | |
1108 max_code) { // largest code with non zero frequency | |
1109 var next_code = new Array(zip_MAX_BITS+1); // next code value for each bit length | |
1110 var code = 0; // running code value | |
1111 var bits; // bit index | |
1112 var n; // code index | |
1113 | |
1114 /* The distribution counts are first used to generate the code values | |
1115 * without bit reversal. | |
1116 */ | |
1117 for(bits = 1; bits <= zip_MAX_BITS; bits++) { | |
1118 code = ((code + zip_bl_count[bits-1]) << 1); | |
1119 next_code[bits] = code; | |
1120 } | |
1121 | |
1122 /* Check that the bit counts in bl_count are consistent. The last code | |
1123 * must be all ones. | |
1124 */ | |
1125 // Assert (code + encoder->bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1, | |
1126 // "inconsistent bit counts"); | |
1127 // Tracev((stderr,"\ngen_codes: max_code %d ", max_code)); | |
1128 | |
1129 for(n = 0; n <= max_code; n++) { | |
1130 var len = tree[n].dl; | |
1131 if(len == 0) | |
1132 continue; | |
1133 // Now reverse the bits | |
1134 tree[n].fc = zip_bi_reverse(next_code[len]++, len); | |
1135 | |
1136 // Tracec(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ", | |
1137 // n, (isgraph(n) ? n : ' '), len, tree[n].fc, next_code[len]-1)); | |
1138 } | |
1139 } | |
1140 | |
1141 /* ========================================================================== | |
1142 * Construct one Huffman tree and assigns the code bit strings and lengths. | |
1143 * Update the total bit length for the current block. | |
1144 * IN assertion: the field freq is set for all tree elements. | |
1145 * OUT assertions: the fields len and code are set to the optimal bit length | |
1146 * and corresponding code. The length opt_len is updated; static_len is | |
1147 * also updated if stree is not null. The field max_code is set. | |
1148 */ | |
1149 var zip_build_tree = function(desc) { // the tree descriptor | |
1150 var tree = desc.dyn_tree; | |
1151 var stree = desc.static_tree; | |
1152 var elems = desc.elems; | |
1153 var n, m; // iterate over heap elements | |
1154 var max_code = -1; // largest code with non zero frequency | |
1155 var node = elems; // next internal node of the tree | |
1156 | |
1157 /* Construct the initial heap, with least frequent element in | |
1158 * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1]. | |
1159 * heap[0] is not used. | |
1160 */ | |
1161 zip_heap_len = 0; | |
1162 zip_heap_max = zip_HEAP_SIZE; | |
1163 | |
1164 for(n = 0; n < elems; n++) { | |
1165 if(tree[n].fc != 0) { | |
1166 zip_heap[++zip_heap_len] = max_code = n; | |
1167 zip_depth[n] = 0; | |
1168 } else | |
1169 tree[n].dl = 0; | |
1170 } | |
1171 | |
1172 /* The pkzip format requires that at least one distance code exists, | |
1173 * and that at least one bit should be sent even if there is only one | |
1174 * possible code. So to avoid special checks later on we force at least | |
1175 * two codes of non zero frequency. | |
1176 */ | |
1177 while(zip_heap_len < 2) { | |
1178 var xnew = zip_heap[++zip_heap_len] = (max_code < 2 ? ++max_code : 0); | |
1179 tree[xnew].fc = 1; | |
1180 zip_depth[xnew] = 0; | |
1181 zip_opt_len--; | |
1182 if(stree != null) | |
1183 zip_static_len -= stree[xnew].dl; | |
1184 // new is 0 or 1 so it does not have extra bits | |
1185 } | |
1186 desc.max_code = max_code; | |
1187 | |
1188 /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree, | |
1189 * establish sub-heaps of increasing lengths: | |
1190 */ | |
1191 for(n = zip_heap_len >> 1; n >= 1; n--) | |
1192 zip_pqdownheap(tree, n); | |
1193 | |
1194 /* Construct the Huffman tree by repeatedly combining the least two | |
1195 * frequent nodes. | |
1196 */ | |
1197 do { | |
1198 n = zip_heap[zip_SMALLEST]; | |
1199 zip_heap[zip_SMALLEST] = zip_heap[zip_heap_len--]; | |
1200 zip_pqdownheap(tree, zip_SMALLEST); | |
1201 | |
1202 m = zip_heap[zip_SMALLEST]; // m = node of next least frequency | |
1203 | |
1204 // keep the nodes sorted by frequency | |
1205 zip_heap[--zip_heap_max] = n; | |
1206 zip_heap[--zip_heap_max] = m; | |
1207 | |
1208 // Create a new node father of n and m | |
1209 tree[node].fc = tree[n].fc + tree[m].fc; | |
1210 // depth[node] = (char)(MAX(depth[n], depth[m]) + 1); | |
1211 if(zip_depth[n] > zip_depth[m] + 1) | |
1212 zip_depth[node] = zip_depth[n]; | |
1213 else | |
1214 zip_depth[node] = zip_depth[m] + 1; | |
1215 tree[n].dl = tree[m].dl = node; | |
1216 | |
1217 // and insert the new node in the heap | |
1218 zip_heap[zip_SMALLEST] = node++; | |
1219 zip_pqdownheap(tree, zip_SMALLEST); | |
1220 | |
1221 } while(zip_heap_len >= 2); | |
1222 | |
1223 zip_heap[--zip_heap_max] = zip_heap[zip_SMALLEST]; | |
1224 | |
1225 /* At this point, the fields freq and dad are set. We can now | |
1226 * generate the bit lengths. | |
1227 */ | |
1228 zip_gen_bitlen(desc); | |
1229 | |
1230 // The field len is now set, we can generate the bit codes | |
1231 zip_gen_codes(tree, max_code); | |
1232 } | |
1233 | |
1234 /* ========================================================================== | |
1235 * Scan a literal or distance tree to determine the frequencies of the codes | |
1236 * in the bit length tree. Updates opt_len to take into account the repeat | |
1237 * counts. (The contribution of the bit length codes will be added later | |
1238 * during the construction of bl_tree.) | |
1239 */ | |
1240 var zip_scan_tree = function(tree,// the tree to be scanned | |
1241 max_code) { // and its largest code of non zero frequency | |
1242 var n; // iterates over all tree elements | |
1243 var prevlen = -1; // last emitted length | |
1244 var curlen; // length of current code | |
1245 var nextlen = tree[0].dl; // length of next code | |
1246 var count = 0; // repeat count of the current code | |
1247 var max_count = 7; // max repeat count | |
1248 var min_count = 4; // min repeat count | |
1249 | |
1250 if(nextlen == 0) { | |
1251 max_count = 138; | |
1252 min_count = 3; | |
1253 } | |
1254 tree[max_code + 1].dl = 0xffff; // guard | |
1255 | |
1256 for(n = 0; n <= max_code; n++) { | |
1257 curlen = nextlen; | |
1258 nextlen = tree[n + 1].dl; | |
1259 if(++count < max_count && curlen == nextlen) | |
1260 continue; | |
1261 else if(count < min_count) | |
1262 zip_bl_tree[curlen].fc += count; | |
1263 else if(curlen != 0) { | |
1264 if(curlen != prevlen) | |
1265 zip_bl_tree[curlen].fc++; | |
1266 zip_bl_tree[zip_REP_3_6].fc++; | |
1267 } else if(count <= 10) | |
1268 zip_bl_tree[zip_REPZ_3_10].fc++; | |
1269 else | |
1270 zip_bl_tree[zip_REPZ_11_138].fc++; | |
1271 count = 0; prevlen = curlen; | |
1272 if(nextlen == 0) { | |
1273 max_count = 138; | |
1274 min_count = 3; | |
1275 } else if(curlen == nextlen) { | |
1276 max_count = 6; | |
1277 min_count = 3; | |
1278 } else { | |
1279 max_count = 7; | |
1280 min_count = 4; | |
1281 } | |
1282 } | |
1283 } | |
1284 | |
1285 /* ========================================================================== | |
1286 * Send a literal or distance tree in compressed form, using the codes in | |
1287 * bl_tree. | |
1288 */ | |
1289 var zip_send_tree = function(tree, // the tree to be scanned | |
1290 max_code) { // and its largest code of non zero frequency | |
1291 var n; // iterates over all tree elements | |
1292 var prevlen = -1; // last emitted length | |
1293 var curlen; // length of current code | |
1294 var nextlen = tree[0].dl; // length of next code | |
1295 var count = 0; // repeat count of the current code | |
1296 var max_count = 7; // max repeat count | |
1297 var min_count = 4; // min repeat count | |
1298 | |
1299 /* tree[max_code+1].dl = -1; */ /* guard already set */ | |
1300 if(nextlen == 0) { | |
1301 max_count = 138; | |
1302 min_count = 3; | |
1303 } | |
1304 | |
1305 for(n = 0; n <= max_code; n++) { | |
1306 curlen = nextlen; | |
1307 nextlen = tree[n+1].dl; | |
1308 if(++count < max_count && curlen == nextlen) { | |
1309 continue; | |
1310 } else if(count < min_count) { | |
1311 do { zip_SEND_CODE(curlen, zip_bl_tree); } while(--count != 0); | |
1312 } else if(curlen != 0) { | |
1313 if(curlen != prevlen) { | |
1314 zip_SEND_CODE(curlen, zip_bl_tree); | |
1315 count--; | |
1316 } | |
1317 // Assert(count >= 3 && count <= 6, " 3_6?"); | |
1318 zip_SEND_CODE(zip_REP_3_6, zip_bl_tree); | |
1319 zip_send_bits(count - 3, 2); | |
1320 } else if(count <= 10) { | |
1321 zip_SEND_CODE(zip_REPZ_3_10, zip_bl_tree); | |
1322 zip_send_bits(count-3, 3); | |
1323 } else { | |
1324 zip_SEND_CODE(zip_REPZ_11_138, zip_bl_tree); | |
1325 zip_send_bits(count-11, 7); | |
1326 } | |
1327 count = 0; | |
1328 prevlen = curlen; | |
1329 if(nextlen == 0) { | |
1330 max_count = 138; | |
1331 min_count = 3; | |
1332 } else if(curlen == nextlen) { | |
1333 max_count = 6; | |
1334 min_count = 3; | |
1335 } else { | |
1336 max_count = 7; | |
1337 min_count = 4; | |
1338 } | |
1339 } | |
1340 } | |
1341 | |
1342 /* ========================================================================== | |
1343 * Construct the Huffman tree for the bit lengths and return the index in | |
1344 * bl_order of the last bit length code to send. | |
1345 */ | |
1346 var zip_build_bl_tree = function() { | |
1347 var max_blindex; // index of last bit length code of non zero freq | |
1348 | |
1349 // Determine the bit length frequencies for literal and distance trees | |
1350 zip_scan_tree(zip_dyn_ltree, zip_l_desc.max_code); | |
1351 zip_scan_tree(zip_dyn_dtree, zip_d_desc.max_code); | |
1352 | |
1353 // Build the bit length tree: | |
1354 zip_build_tree(zip_bl_desc); | |
1355 /* opt_len now includes the length of the tree representations, except | |
1356 * the lengths of the bit lengths codes and the 5+5+4 bits for the counts. | |
1357 */ | |
1358 | |
1359 /* Determine the number of bit length codes to send. The pkzip format | |
1360 * requires that at least 4 bit length codes be sent. (appnote.txt says | |
1361 * 3 but the actual value used is 4.) | |
1362 */ | |
1363 for(max_blindex = zip_BL_CODES-1; max_blindex >= 3; max_blindex--) { | |
1364 if(zip_bl_tree[zip_bl_order[max_blindex]].dl != 0) break; | |
1365 } | |
1366 /* Update opt_len to include the bit length tree and counts */ | |
1367 zip_opt_len += 3*(max_blindex+1) + 5+5+4; | |
1368 // Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld", | |
1369 // encoder->opt_len, encoder->static_len)); | |
1370 | |
1371 return max_blindex; | |
1372 } | |
1373 | |
1374 /* ========================================================================== | |
1375 * Send the header for a block using dynamic Huffman trees: the counts, the | |
1376 * lengths of the bit length codes, the literal tree and the distance tree. | |
1377 * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4. | |
1378 */ | |
1379 var zip_send_all_trees = function(lcodes, dcodes, blcodes) { // number of codes for each tree | |
1380 var rank; // index in bl_order | |
1381 | |
1382 // Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes"); | |
1383 // Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES, | |
1384 // "too many codes"); | |
1385 // Tracev((stderr, "\nbl counts: ")); | |
1386 zip_send_bits(lcodes-257, 5); // not +255 as stated in appnote.txt | |
1387 zip_send_bits(dcodes-1, 5); | |
1388 zip_send_bits(blcodes-4, 4); // not -3 as stated in appnote.txt | |
1389 for(rank = 0; rank < blcodes; rank++) { | |
1390 // Tracev((stderr, "\nbl code %2d ", bl_order[rank])); | |
1391 zip_send_bits(zip_bl_tree[zip_bl_order[rank]].dl, 3); | |
1392 } | |
1393 | |
1394 // send the literal tree | |
1395 zip_send_tree(zip_dyn_ltree,lcodes-1); | |
1396 | |
1397 // send the distance tree | |
1398 zip_send_tree(zip_dyn_dtree,dcodes-1); | |
1399 } | |
1400 | |
1401 /* ========================================================================== | |
1402 * Determine the best encoding for the current block: dynamic trees, static | |
1403 * trees or store, and output the encoded block to the zip file. | |
1404 */ | |
1405 var zip_flush_block = function(eof) { // true if this is the last block for a file | |
1406 var opt_lenb, static_lenb; // opt_len and static_len in bytes | |
1407 var max_blindex; // index of last bit length code of non zero freq | |
1408 var stored_len; // length of input block | |
1409 | |
1410 stored_len = zip_strstart - zip_block_start; | |
1411 zip_flag_buf[zip_last_flags] = zip_flags; // Save the flags for the last 8 items | |
1412 | |
1413 // Construct the literal and distance trees | |
1414 zip_build_tree(zip_l_desc); | |
1415 // Tracev((stderr, "\nlit data: dyn %ld, stat %ld", | |
1416 // encoder->opt_len, encoder->static_len)); | |
1417 | |
1418 zip_build_tree(zip_d_desc); | |
1419 // Tracev((stderr, "\ndist data: dyn %ld, stat %ld", | |
1420 // encoder->opt_len, encoder->static_len)); | |
1421 /* At this point, opt_len and static_len are the total bit lengths of | |
1422 * the compressed block data, excluding the tree representations. | |
1423 */ | |
1424 | |
1425 /* Build the bit length tree for the above two trees, and get the index | |
1426 * in bl_order of the last bit length code to send. | |
1427 */ | |
1428 max_blindex = zip_build_bl_tree(); | |
1429 | |
1430 // Determine the best encoding. Compute first the block length in bytes | |
1431 opt_lenb = (zip_opt_len +3+7)>>3; | |
1432 static_lenb = (zip_static_len+3+7)>>3; | |
1433 | |
1434 // Trace((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u dist %u ", | |
1435 // opt_lenb, encoder->opt_len, | |
1436 // static_lenb, encoder->static_len, stored_len, | |
1437 // encoder->last_lit, encoder->last_dist)); | |
1438 | |
1439 if(static_lenb <= opt_lenb) | |
1440 opt_lenb = static_lenb; | |
1441 if(stored_len + 4 <= opt_lenb // 4: two words for the lengths | |
1442 && zip_block_start >= 0) { | |
1443 var i; | |
1444 | |
1445 /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE. | |
1446 * Otherwise we can't have processed more than WSIZE input bytes since | |
1447 * the last block flush, because compression would have been | |
1448 * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to | |
1449 * transform a block into a stored block. | |
1450 */ | |
1451 zip_send_bits((zip_STORED_BLOCK<<1)+eof, 3); /* send block type */ | |
1452 zip_bi_windup(); /* align on byte boundary */ | |
1453 zip_put_short(stored_len); | |
1454 zip_put_short(~stored_len); | |
1455 | |
1456 // copy block | |
1457 /* | |
1458 p = &window[block_start]; | |
1459 for(i = 0; i < stored_len; i++) | |
1460 put_byte(p[i]); | |
1461 */ | |
1462 for(i = 0; i < stored_len; i++) | |
1463 zip_put_byte(zip_window[zip_block_start + i]); | |
1464 | |
1465 } else if(static_lenb == opt_lenb) { | |
1466 zip_send_bits((zip_STATIC_TREES<<1)+eof, 3); | |
1467 zip_compress_block(zip_static_ltree, zip_static_dtree); | |
1468 } else { | |
1469 zip_send_bits((zip_DYN_TREES<<1)+eof, 3); | |
1470 zip_send_all_trees(zip_l_desc.max_code+1, | |
1471 zip_d_desc.max_code+1, | |
1472 max_blindex+1); | |
1473 zip_compress_block(zip_dyn_ltree, zip_dyn_dtree); | |
1474 } | |
1475 | |
1476 zip_init_block(); | |
1477 | |
1478 if(eof != 0) | |
1479 zip_bi_windup(); | |
1480 } | |
1481 | |
1482 /* ========================================================================== | |
1483 * Save the match info and tally the frequency counts. Return true if | |
1484 * the current block must be flushed. | |
1485 */ | |
1486 var zip_ct_tally = function( | |
1487 dist, // distance of matched string | |
1488 lc) { // match length-MIN_MATCH or unmatched char (if dist==0) | |
1489 zip_l_buf[zip_last_lit++] = lc; | |
1490 if(dist == 0) { | |
1491 // lc is the unmatched char | |
1492 zip_dyn_ltree[lc].fc++; | |
1493 } else { | |
1494 // Here, lc is the match length - MIN_MATCH | |
1495 dist--; // dist = match distance - 1 | |
1496 // Assert((ush)dist < (ush)MAX_DIST && | |
1497 // (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) && | |
1498 // (ush)D_CODE(dist) < (ush)D_CODES, "ct_tally: bad match"); | |
1499 | |
1500 zip_dyn_ltree[zip_length_code[lc]+zip_LITERALS+1].fc++; | |
1501 zip_dyn_dtree[zip_D_CODE(dist)].fc++; | |
1502 | |
1503 zip_d_buf[zip_last_dist++] = dist; | |
1504 zip_flags |= zip_flag_bit; | |
1505 } | |
1506 zip_flag_bit <<= 1; | |
1507 | |
1508 // Output the flags if they fill a byte | |
1509 if((zip_last_lit & 7) == 0) { | |
1510 zip_flag_buf[zip_last_flags++] = zip_flags; | |
1511 zip_flags = 0; | |
1512 zip_flag_bit = 1; | |
1513 } | |
1514 // Try to guess if it is profitable to stop the current block here | |
1515 if(zip_compr_level > 2 && (zip_last_lit & 0xfff) == 0) { | |
1516 // Compute an upper bound for the compressed length | |
1517 var out_length = zip_last_lit * 8; | |
1518 var in_length = zip_strstart - zip_block_start; | |
1519 var dcode; | |
1520 | |
1521 for(dcode = 0; dcode < zip_D_CODES; dcode++) { | |
1522 out_length += zip_dyn_dtree[dcode].fc * (5 + zip_extra_dbits[dcode]); | |
1523 } | |
1524 out_length >>= 3; | |
1525 // Trace((stderr,"\nlast_lit %u, last_dist %u, in %ld, out ~%ld(%ld%%) ", | |
1526 // encoder->last_lit, encoder->last_dist, in_length, out_length, | |
1527 // 100L - out_length*100L/in_length)); | |
1528 if(zip_last_dist < parseInt(zip_last_lit/2) && | |
1529 out_length < parseInt(in_length/2)) | |
1530 return true; | |
1531 } | |
1532 return (zip_last_lit == zip_LIT_BUFSIZE-1 || | |
1533 zip_last_dist == zip_DIST_BUFSIZE); | |
1534 /* We avoid equality with LIT_BUFSIZE because of wraparound at 64K | |
1535 * on 16 bit machines and because stored blocks are restricted to | |
1536 * 64K-1 bytes. | |
1537 */ | |
1538 } | |
1539 | |
1540 /* ========================================================================== | |
1541 * Send the block data compressed using the given Huffman trees | |
1542 */ | |
1543 var zip_compress_block = function( | |
1544 ltree, // literal tree | |
1545 dtree) { // distance tree | |
1546 var dist; // distance of matched string | |
1547 var lc; // match length or unmatched char (if dist == 0) | |
1548 var lx = 0; // running index in l_buf | |
1549 var dx = 0; // running index in d_buf | |
1550 var fx = 0; // running index in flag_buf | |
1551 var flag = 0; // current flags | |
1552 var code; // the code to send | |
1553 var extra; // number of extra bits to send | |
1554 | |
1555 if(zip_last_lit != 0) do { | |
1556 if((lx & 7) == 0) | |
1557 flag = zip_flag_buf[fx++]; | |
1558 lc = zip_l_buf[lx++] & 0xff; | |
1559 if((flag & 1) == 0) { | |
1560 zip_SEND_CODE(lc, ltree); /* send a literal byte */ | |
1561 // Tracecv(isgraph(lc), (stderr," '%c' ", lc)); | |
1562 } else { | |
1563 // Here, lc is the match length - MIN_MATCH | |
1564 code = zip_length_code[lc]; | |
1565 zip_SEND_CODE(code+zip_LITERALS+1, ltree); // send the length code | |
1566 extra = zip_extra_lbits[code]; | |
1567 if(extra != 0) { | |
1568 lc -= zip_base_length[code]; | |
1569 zip_send_bits(lc, extra); // send the extra length bits | |
1570 } | |
1571 dist = zip_d_buf[dx++]; | |
1572 // Here, dist is the match distance - 1 | |
1573 code = zip_D_CODE(dist); | |
1574 // Assert (code < D_CODES, "bad d_code"); | |
1575 | |
1576 zip_SEND_CODE(code, dtree); // send the distance code | |
1577 extra = zip_extra_dbits[code]; | |
1578 if(extra != 0) { | |
1579 dist -= zip_base_dist[code]; | |
1580 zip_send_bits(dist, extra); // send the extra distance bits | |
1581 } | |
1582 } // literal or match pair ? | |
1583 flag >>= 1; | |
1584 } while(lx < zip_last_lit); | |
1585 | |
1586 zip_SEND_CODE(zip_END_BLOCK, ltree); | |
1587 } | |
1588 | |
1589 /* ========================================================================== | |
1590 * Send a value on a given number of bits. | |
1591 * IN assertion: length <= 16 and value fits in length bits. | |
1592 */ | |
1593 var zip_Buf_size = 16; // bit size of bi_buf | |
1594 var zip_send_bits = function( | |
1595 value, // value to send | |
1596 length) { // number of bits | |
1597 /* If not enough room in bi_buf, use (valid) bits from bi_buf and | |
1598 * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid)) | |
1599 * unused bits in value. | |
1600 */ | |
1601 if(zip_bi_valid > zip_Buf_size - length) { | |
1602 zip_bi_buf |= (value << zip_bi_valid); | |
1603 zip_put_short(zip_bi_buf); | |
1604 zip_bi_buf = (value >> (zip_Buf_size - zip_bi_valid)); | |
1605 zip_bi_valid += length - zip_Buf_size; | |
1606 } else { | |
1607 zip_bi_buf |= value << zip_bi_valid; | |
1608 zip_bi_valid += length; | |
1609 } | |
1610 } | |
1611 | |
1612 /* ========================================================================== | |
1613 * Reverse the first len bits of a code, using straightforward code (a faster | |
1614 * method would use a table) | |
1615 * IN assertion: 1 <= len <= 15 | |
1616 */ | |
1617 var zip_bi_reverse = function( | |
1618 code, // the value to invert | |
1619 len) { // its bit length | |
1620 var res = 0; | |
1621 do { | |
1622 res |= code & 1; | |
1623 code >>= 1; | |
1624 res <<= 1; | |
1625 } while(--len > 0); | |
1626 return res >> 1; | |
1627 } | |
1628 | |
1629 /* ========================================================================== | |
1630 * Write out any remaining bits in an incomplete byte. | |
1631 */ | |
1632 var zip_bi_windup = function() { | |
1633 if(zip_bi_valid > 8) { | |
1634 zip_put_short(zip_bi_buf); | |
1635 } else if(zip_bi_valid > 0) { | |
1636 zip_put_byte(zip_bi_buf); | |
1637 } | |
1638 zip_bi_buf = 0; | |
1639 zip_bi_valid = 0; | |
1640 } | |
1641 | |
1642 var zip_qoutbuf = function() { | |
1643 if(zip_outcnt != 0) { | |
1644 var q, i; | |
1645 q = zip_new_queue(); | |
1646 if(zip_qhead == null) | |
1647 zip_qhead = zip_qtail = q; | |
1648 else | |
1649 zip_qtail = zip_qtail.next = q; | |
1650 q.len = zip_outcnt - zip_outoff; | |
1651 // System.arraycopy(zip_outbuf, zip_outoff, q.ptr, 0, q.len); | |
1652 for(i = 0; i < q.len; i++) | |
1653 q.ptr[i] = zip_outbuf[zip_outoff + i]; | |
1654 zip_outcnt = zip_outoff = 0; | |
1655 } | |
1656 } | |
1657 | |
1658 var zip_deflate = function(str, level) { | |
1659 var i, j; | |
1660 | |
1661 zip_deflate_data = str; | |
1662 zip_deflate_pos = 0; | |
1663 if(typeof level == "undefined") | |
1664 level = zip_DEFAULT_LEVEL; | |
1665 zip_deflate_start(level); | |
1666 | |
1667 var buff = new Array(1024); | |
1668 var aout = []; | |
1669 while((i = zip_deflate_internal(buff, 0, buff.length)) > 0) { | |
1670 var cbuf = new Array(i); | |
1671 for(j = 0; j < i; j++){ | |
1672 cbuf[j] = String.fromCharCode(buff[j]); | |
1673 } | |
1674 aout[aout.length] = cbuf.join(""); | |
1675 } | |
1676 zip_deflate_data = null; // G.C. | |
1677 return aout.join(""); | |
1678 } | |
1679 | |
1680 // | |
1681 // end of the script of Masanao Izumo. | |
1682 // | |
1683 | |
1684 // we add the compression method for JSZip | |
1685 if(!JSZip.compressions["DEFLATE"]) { | |
1686 JSZip.compressions["DEFLATE"] = { | |
1687 magic : "\x08\x00", | |
1688 compress : zip_deflate | |
1689 } | |
1690 } else { | |
1691 JSZip.compressions["DEFLATE"].compress = zip_deflate; | |
1692 } | |
1693 | |
1694 })(); | |
1695 | |
1696 // enforcing Stuk's coding style | |
1697 // vim: set shiftwidth=3 softtabstop=3: |