GPQAPP
1/*!
2
3JSZip v3.7.1 - A JavaScript class for generating and reading zip files
4<http://stuartk.com/jszip>
5
6(c) 2009-2016 Stuart Knightley <stuart [at] stuartk.com>
7Dual licenced under the MIT license or GPLv3. See https://raw.github.com/Stuk/jszip/master/LICENSE.markdown.
8
9JSZip uses the library pako released under the MIT license :
10https://github.com/nodeca/pako/blob/master/LICENSE
11*/
12
13(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.JSZip = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){14'use strict';15var utils = require('./utils');16var support = require('./support');17// private property
18var _keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";19
20
21// public method for encoding
22exports.encode = function(input) {23var output = [];24var chr1, chr2, chr3, enc1, enc2, enc3, enc4;25var i = 0, len = input.length, remainingBytes = len;26
27var isArray = utils.getTypeOf(input) !== "string";28while (i < input.length) {29remainingBytes = len - i;30
31if (!isArray) {32chr1 = input.charCodeAt(i++);33chr2 = i < len ? input.charCodeAt(i++) : 0;34chr3 = i < len ? input.charCodeAt(i++) : 0;35} else {36chr1 = input[i++];37chr2 = i < len ? input[i++] : 0;38chr3 = i < len ? input[i++] : 0;39}40
41enc1 = chr1 >> 2;42enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);43enc3 = remainingBytes > 1 ? (((chr2 & 15) << 2) | (chr3 >> 6)) : 64;44enc4 = remainingBytes > 2 ? (chr3 & 63) : 64;45
46output.push(_keyStr.charAt(enc1) + _keyStr.charAt(enc2) + _keyStr.charAt(enc3) + _keyStr.charAt(enc4));47
48}49
50return output.join("");51};52
53// public method for decoding
54exports.decode = function(input) {55var chr1, chr2, chr3;56var enc1, enc2, enc3, enc4;57var i = 0, resultIndex = 0;58
59var dataUrlPrefix = "data:";60
61if (input.substr(0, dataUrlPrefix.length) === dataUrlPrefix) {62// This is a common error: people give a data url63// (data:image/png;base64,iVBOR...) with a {base64: true} and64// wonders why things don't work.65// We can detect that the string input looks like a data url but we66// *can't* be sure it is one: removing everything up to the comma would67// be too dangerous.68throw new Error("Invalid base64 input, it looks like a data url.");69}70
71input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "");72
73var totalLength = input.length * 3 / 4;74if(input.charAt(input.length - 1) === _keyStr.charAt(64)) {75totalLength--;76}77if(input.charAt(input.length - 2) === _keyStr.charAt(64)) {78totalLength--;79}80if (totalLength % 1 !== 0) {81// totalLength is not an integer, the length does not match a valid82// base64 content. That can happen if:83// - the input is not a base64 content84// - the input is *almost* a base64 content, with a extra chars at the85// beginning or at the end86// - the input uses a base64 variant (base64url for example)87throw new Error("Invalid base64 input, bad content length.");88}89var output;90if (support.uint8array) {91output = new Uint8Array(totalLength|0);92} else {93output = new Array(totalLength|0);94}95
96while (i < input.length) {97
98enc1 = _keyStr.indexOf(input.charAt(i++));99enc2 = _keyStr.indexOf(input.charAt(i++));100enc3 = _keyStr.indexOf(input.charAt(i++));101enc4 = _keyStr.indexOf(input.charAt(i++));102
103chr1 = (enc1 << 2) | (enc2 >> 4);104chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);105chr3 = ((enc3 & 3) << 6) | enc4;106
107output[resultIndex++] = chr1;108
109if (enc3 !== 64) {110output[resultIndex++] = chr2;111}112if (enc4 !== 64) {113output[resultIndex++] = chr3;114}115
116}117
118return output;119};120
121},{"./support":30,"./utils":32}],2:[function(require,module,exports){122'use strict';123
124var external = require("./external");125var DataWorker = require('./stream/DataWorker');126var Crc32Probe = require('./stream/Crc32Probe');127var DataLengthProbe = require('./stream/DataLengthProbe');128
129/**
130* Represent a compressed object, with everything needed to decompress it.
131* @constructor
132* @param {number} compressedSize the size of the data compressed.
133* @param {number} uncompressedSize the size of the data after decompression.
134* @param {number} crc32 the crc32 of the decompressed file.
135* @param {object} compression the type of compression, see lib/compressions.js.
136* @param {String|ArrayBuffer|Uint8Array|Buffer} data the compressed data.
137*/
138function CompressedObject(compressedSize, uncompressedSize, crc32, compression, data) {139this.compressedSize = compressedSize;140this.uncompressedSize = uncompressedSize;141this.crc32 = crc32;142this.compression = compression;143this.compressedContent = data;144}
145
146CompressedObject.prototype = {147/**148* Create a worker to get the uncompressed content.
149* @return {GenericWorker} the worker.
150*/
151getContentWorker: function () {152var worker = new DataWorker(external.Promise.resolve(this.compressedContent))153.pipe(this.compression.uncompressWorker())154.pipe(new DataLengthProbe("data_length"));155
156var that = this;157worker.on("end", function () {158if (this.streamInfo['data_length'] !== that.uncompressedSize) {159throw new Error("Bug : uncompressed data size mismatch");160}161});162return worker;163},164/**165* Create a worker to get the compressed content.
166* @return {GenericWorker} the worker.
167*/
168getCompressedWorker: function () {169return new DataWorker(external.Promise.resolve(this.compressedContent))170.withStreamInfo("compressedSize", this.compressedSize)171.withStreamInfo("uncompressedSize", this.uncompressedSize)172.withStreamInfo("crc32", this.crc32)173.withStreamInfo("compression", this.compression)174;175}176};177
178/**
179* Chain the given worker with other workers to compress the content with the
180* given compression.
181* @param {GenericWorker} uncompressedWorker the worker to pipe.
182* @param {Object} compression the compression object.
183* @param {Object} compressionOptions the options to use when compressing.
184* @return {GenericWorker} the new worker compressing the content.
185*/
186CompressedObject.createWorkerFrom = function (uncompressedWorker, compression, compressionOptions) {187return uncompressedWorker188.pipe(new Crc32Probe())189.pipe(new DataLengthProbe("uncompressedSize"))190.pipe(compression.compressWorker(compressionOptions))191.pipe(new DataLengthProbe("compressedSize"))192.withStreamInfo("compression", compression);193};194
195module.exports = CompressedObject;196
197},{"./external":6,"./stream/Crc32Probe":25,"./stream/DataLengthProbe":26,"./stream/DataWorker":27}],3:[function(require,module,exports){198'use strict';199
200var GenericWorker = require("./stream/GenericWorker");201
202exports.STORE = {203magic: "\x00\x00",204compressWorker : function (compressionOptions) {205return new GenericWorker("STORE compression");206},207uncompressWorker : function () {208return new GenericWorker("STORE decompression");209}210};211exports.DEFLATE = require('./flate');212
213},{"./flate":7,"./stream/GenericWorker":28}],4:[function(require,module,exports){214'use strict';215
216var utils = require('./utils');217
218/**
219* The following functions come from pako, from pako/lib/zlib/crc32.js
220* released under the MIT license, see pako https://github.com/nodeca/pako/
221*/
222
223// Use ordinary array, since untyped makes no boost here
224function makeTable() {225var c, table = [];226
227for(var n =0; n < 256; n++){228c = n;229for(var k =0; k < 8; k++){230c = ((c&1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));231}232table[n] = c;233}234
235return table;236}
237
238// Create table on load. Just 255 signed longs. Not a problem.
239var crcTable = makeTable();240
241
242function crc32(crc, buf, len, pos) {243var t = crcTable, end = pos + len;244
245crc = crc ^ (-1);246
247for (var i = pos; i < end; i++ ) {248crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF];249}250
251return (crc ^ (-1)); // >>> 0;252}
253
254// That's all for the pako functions.
255
256/**
257* Compute the crc32 of a string.
258* This is almost the same as the function crc32, but for strings. Using the
259* same function for the two use cases leads to horrible performances.
260* @param {Number} crc the starting value of the crc.
261* @param {String} str the string to use.
262* @param {Number} len the length of the string.
263* @param {Number} pos the starting position for the crc32 computation.
264* @return {Number} the computed crc32.
265*/
266function crc32str(crc, str, len, pos) {267var t = crcTable, end = pos + len;268
269crc = crc ^ (-1);270
271for (var i = pos; i < end; i++ ) {272crc = (crc >>> 8) ^ t[(crc ^ str.charCodeAt(i)) & 0xFF];273}274
275return (crc ^ (-1)); // >>> 0;276}
277
278module.exports = function crc32wrapper(input, crc) {279if (typeof input === "undefined" || !input.length) {280return 0;281}282
283var isArray = utils.getTypeOf(input) !== "string";284
285if(isArray) {286return crc32(crc|0, input, input.length, 0);287} else {288return crc32str(crc|0, input, input.length, 0);289}290};291
292},{"./utils":32}],5:[function(require,module,exports){293'use strict';294exports.base64 = false;295exports.binary = false;296exports.dir = false;297exports.createFolders = true;298exports.date = null;299exports.compression = null;300exports.compressionOptions = null;301exports.comment = null;302exports.unixPermissions = null;303exports.dosPermissions = null;304
305},{}],6:[function(require,module,exports){306/* global Promise */
307'use strict';308
309// load the global object first:
310// - it should be better integrated in the system (unhandledRejection in node)
311// - the environment may have a custom Promise implementation (see zone.js)
312var ES6Promise = null;313if (typeof Promise !== "undefined") {314ES6Promise = Promise;315} else {316ES6Promise = require("lie");317}
318
319/**
320* Let the user use/change some implementations.
321*/
322module.exports = {323Promise: ES6Promise324};325
326},{"lie":37}],7:[function(require,module,exports){327'use strict';328var USE_TYPEDARRAY = (typeof Uint8Array !== 'undefined') && (typeof Uint16Array !== 'undefined') && (typeof Uint32Array !== 'undefined');329
330var pako = require("pako");331var utils = require("./utils");332var GenericWorker = require("./stream/GenericWorker");333
334var ARRAY_TYPE = USE_TYPEDARRAY ? "uint8array" : "array";335
336exports.magic = "\x08\x00";337
338/**
339* Create a worker that uses pako to inflate/deflate.
340* @constructor
341* @param {String} action the name of the pako function to call : either "Deflate" or "Inflate".
342* @param {Object} options the options to use when (de)compressing.
343*/
344function FlateWorker(action, options) {345GenericWorker.call(this, "FlateWorker/" + action);346
347this._pako = null;348this._pakoAction = action;349this._pakoOptions = options;350// the `meta` object from the last chunk received351// this allow this worker to pass around metadata352this.meta = {};353}
354
355utils.inherits(FlateWorker, GenericWorker);356
357/**
358* @see GenericWorker.processChunk
359*/
360FlateWorker.prototype.processChunk = function (chunk) {361this.meta = chunk.meta;362if (this._pako === null) {363this._createPako();364}365this._pako.push(utils.transformTo(ARRAY_TYPE, chunk.data), false);366};367
368/**
369* @see GenericWorker.flush
370*/
371FlateWorker.prototype.flush = function () {372GenericWorker.prototype.flush.call(this);373if (this._pako === null) {374this._createPako();375}376this._pako.push([], true);377};378/**
379* @see GenericWorker.cleanUp
380*/
381FlateWorker.prototype.cleanUp = function () {382GenericWorker.prototype.cleanUp.call(this);383this._pako = null;384};385
386/**
387* Create the _pako object.
388* TODO: lazy-loading this object isn't the best solution but it's the
389* quickest. The best solution is to lazy-load the worker list. See also the
390* issue #446.
391*/
392FlateWorker.prototype._createPako = function () {393this._pako = new pako[this._pakoAction]({394raw: true,395level: this._pakoOptions.level || -1 // default compression396});397var self = this;398this._pako.onData = function(data) {399self.push({400data : data,401meta : self.meta402});403};404};405
406exports.compressWorker = function (compressionOptions) {407return new FlateWorker("Deflate", compressionOptions);408};409exports.uncompressWorker = function () {410return new FlateWorker("Inflate", {});411};412
413},{"./stream/GenericWorker":28,"./utils":32,"pako":38}],8:[function(require,module,exports){414'use strict';415
416var utils = require('../utils');417var GenericWorker = require('../stream/GenericWorker');418var utf8 = require('../utf8');419var crc32 = require('../crc32');420var signature = require('../signature');421
422/**
423* Transform an integer into a string in hexadecimal.
424* @private
425* @param {number} dec the number to convert.
426* @param {number} bytes the number of bytes to generate.
427* @returns {string} the result.
428*/
429var decToHex = function(dec, bytes) {430var hex = "", i;431for (i = 0; i < bytes; i++) {432hex += String.fromCharCode(dec & 0xff);433dec = dec >>> 8;434}435return hex;436};437
438/**
439* Generate the UNIX part of the external file attributes.
440* @param {Object} unixPermissions the unix permissions or null.
441* @param {Boolean} isDir true if the entry is a directory, false otherwise.
442* @return {Number} a 32 bit integer.
443*
444* adapted from http://unix.stackexchange.com/questions/14705/the-zip-formats-external-file-attribute :
445*
446* TTTTsstrwxrwxrwx0000000000ADVSHR
447* ^^^^____________________________ file type, see zipinfo.c (UNX_*)
448* ^^^_________________________ setuid, setgid, sticky
449* ^^^^^^^^^________________ permissions
450* ^^^^^^^^^^______ not used ?
451* ^^^^^^ DOS attribute bits : Archive, Directory, Volume label, System file, Hidden, Read only
452*/
453var generateUnixExternalFileAttr = function (unixPermissions, isDir) {454
455var result = unixPermissions;456if (!unixPermissions) {457// I can't use octal values in strict mode, hence the hexa.458// 040775 => 0x41fd459// 0100664 => 0x81b4460result = isDir ? 0x41fd : 0x81b4;461}462return (result & 0xFFFF) << 16;463};464
465/**
466* Generate the DOS part of the external file attributes.
467* @param {Object} dosPermissions the dos permissions or null.
468* @param {Boolean} isDir true if the entry is a directory, false otherwise.
469* @return {Number} a 32 bit integer.
470*
471* Bit 0 Read-Only
472* Bit 1 Hidden
473* Bit 2 System
474* Bit 3 Volume Label
475* Bit 4 Directory
476* Bit 5 Archive
477*/
478var generateDosExternalFileAttr = function (dosPermissions, isDir) {479
480// the dir flag is already set for compatibility481return (dosPermissions || 0) & 0x3F;482};483
484/**
485* Generate the various parts used in the construction of the final zip file.
486* @param {Object} streamInfo the hash with information about the compressed file.
487* @param {Boolean} streamedContent is the content streamed ?
488* @param {Boolean} streamingEnded is the stream finished ?
489* @param {number} offset the current offset from the start of the zip file.
490* @param {String} platform let's pretend we are this platform (change platform dependents fields)
491* @param {Function} encodeFileName the function to encode the file name / comment.
492* @return {Object} the zip parts.
493*/
494var generateZipParts = function(streamInfo, streamedContent, streamingEnded, offset, platform, encodeFileName) {495var file = streamInfo['file'],496compression = streamInfo['compression'],497useCustomEncoding = encodeFileName !== utf8.utf8encode,498encodedFileName = utils.transformTo("string", encodeFileName(file.name)),499utfEncodedFileName = utils.transformTo("string", utf8.utf8encode(file.name)),500comment = file.comment,501encodedComment = utils.transformTo("string", encodeFileName(comment)),502utfEncodedComment = utils.transformTo("string", utf8.utf8encode(comment)),503useUTF8ForFileName = utfEncodedFileName.length !== file.name.length,504useUTF8ForComment = utfEncodedComment.length !== comment.length,505dosTime,506dosDate,507extraFields = "",508unicodePathExtraField = "",509unicodeCommentExtraField = "",510dir = file.dir,511date = file.date;512
513
514var dataInfo = {515crc32 : 0,516compressedSize : 0,517uncompressedSize : 0518};519
520// if the content is streamed, the sizes/crc32 are only available AFTER521// the end of the stream.522if (!streamedContent || streamingEnded) {523dataInfo.crc32 = streamInfo['crc32'];524dataInfo.compressedSize = streamInfo['compressedSize'];525dataInfo.uncompressedSize = streamInfo['uncompressedSize'];526}527
528var bitflag = 0;529if (streamedContent) {530// Bit 3: the sizes/crc32 are set to zero in the local header.531// The correct values are put in the data descriptor immediately532// following the compressed data.533bitflag |= 0x0008;534}535if (!useCustomEncoding && (useUTF8ForFileName || useUTF8ForComment)) {536// Bit 11: Language encoding flag (EFS).537bitflag |= 0x0800;538}539
540
541var extFileAttr = 0;542var versionMadeBy = 0;543if (dir) {544// dos or unix, we set the dos dir flag545extFileAttr |= 0x00010;546}547if(platform === "UNIX") {548versionMadeBy = 0x031E; // UNIX, version 3.0549extFileAttr |= generateUnixExternalFileAttr(file.unixPermissions, dir);550} else { // DOS or other, fallback to DOS551versionMadeBy = 0x0014; // DOS, version 2.0552extFileAttr |= generateDosExternalFileAttr(file.dosPermissions, dir);553}554
555// date556// @see http://www.delorie.com/djgpp/doc/rbinter/it/52/13.html557// @see http://www.delorie.com/djgpp/doc/rbinter/it/65/16.html558// @see http://www.delorie.com/djgpp/doc/rbinter/it/66/16.html559
560dosTime = date.getUTCHours();561dosTime = dosTime << 6;562dosTime = dosTime | date.getUTCMinutes();563dosTime = dosTime << 5;564dosTime = dosTime | date.getUTCSeconds() / 2;565
566dosDate = date.getUTCFullYear() - 1980;567dosDate = dosDate << 4;568dosDate = dosDate | (date.getUTCMonth() + 1);569dosDate = dosDate << 5;570dosDate = dosDate | date.getUTCDate();571
572if (useUTF8ForFileName) {573// set the unicode path extra field. unzip needs at least one extra574// field to correctly handle unicode path, so using the path is as good575// as any other information. This could improve the situation with576// other archive managers too.577// This field is usually used without the utf8 flag, with a non578// unicode path in the header (winrar, winzip). This helps (a bit)579// with the messy Windows' default compressed folders feature but580// breaks on p7zip which doesn't seek the unicode path extra field.581// So for now, UTF-8 everywhere !582unicodePathExtraField =583// Version584decToHex(1, 1) +585// NameCRC32586decToHex(crc32(encodedFileName), 4) +587// UnicodeName588utfEncodedFileName;589
590extraFields +=591// Info-ZIP Unicode Path Extra Field592"\x75\x70" +593// size594decToHex(unicodePathExtraField.length, 2) +595// content596unicodePathExtraField;597}598
599if(useUTF8ForComment) {600
601unicodeCommentExtraField =602// Version603decToHex(1, 1) +604// CommentCRC32605decToHex(crc32(encodedComment), 4) +606// UnicodeName607utfEncodedComment;608
609extraFields +=610// Info-ZIP Unicode Path Extra Field611"\x75\x63" +612// size613decToHex(unicodeCommentExtraField.length, 2) +614// content615unicodeCommentExtraField;616}617
618var header = "";619
620// version needed to extract621header += "\x0A\x00";622// general purpose bit flag623header += decToHex(bitflag, 2);624// compression method625header += compression.magic;626// last mod file time627header += decToHex(dosTime, 2);628// last mod file date629header += decToHex(dosDate, 2);630// crc-32631header += decToHex(dataInfo.crc32, 4);632// compressed size633header += decToHex(dataInfo.compressedSize, 4);634// uncompressed size635header += decToHex(dataInfo.uncompressedSize, 4);636// file name length637header += decToHex(encodedFileName.length, 2);638// extra field length639header += decToHex(extraFields.length, 2);640
641
642var fileRecord = signature.LOCAL_FILE_HEADER + header + encodedFileName + extraFields;643
644var dirRecord = signature.CENTRAL_FILE_HEADER +645// version made by (00: DOS)646decToHex(versionMadeBy, 2) +647// file header (common to file and central directory)648header +649// file comment length650decToHex(encodedComment.length, 2) +651// disk number start652"\x00\x00" +653// internal file attributes TODO654"\x00\x00" +655// external file attributes656decToHex(extFileAttr, 4) +657// relative offset of local header658decToHex(offset, 4) +659// file name660encodedFileName +661// extra field662extraFields +663// file comment664encodedComment;665
666return {667fileRecord: fileRecord,668dirRecord: dirRecord669};670};671
672/**
673* Generate the EOCD record.
674* @param {Number} entriesCount the number of entries in the zip file.
675* @param {Number} centralDirLength the length (in bytes) of the central dir.
676* @param {Number} localDirLength the length (in bytes) of the local dir.
677* @param {String} comment the zip file comment as a binary string.
678* @param {Function} encodeFileName the function to encode the comment.
679* @return {String} the EOCD record.
680*/
681var generateCentralDirectoryEnd = function (entriesCount, centralDirLength, localDirLength, comment, encodeFileName) {682var dirEnd = "";683var encodedComment = utils.transformTo("string", encodeFileName(comment));684
685// end of central dir signature686dirEnd = signature.CENTRAL_DIRECTORY_END +687// number of this disk688"\x00\x00" +689// number of the disk with the start of the central directory690"\x00\x00" +691// total number of entries in the central directory on this disk692decToHex(entriesCount, 2) +693// total number of entries in the central directory694decToHex(entriesCount, 2) +695// size of the central directory 4 bytes696decToHex(centralDirLength, 4) +697// offset of start of central directory with respect to the starting disk number698decToHex(localDirLength, 4) +699// .ZIP file comment length700decToHex(encodedComment.length, 2) +701// .ZIP file comment702encodedComment;703
704return dirEnd;705};706
707/**
708* Generate data descriptors for a file entry.
709* @param {Object} streamInfo the hash generated by a worker, containing information
710* on the file entry.
711* @return {String} the data descriptors.
712*/
713var generateDataDescriptors = function (streamInfo) {714var descriptor = "";715descriptor = signature.DATA_DESCRIPTOR +716// crc-32 4 bytes717decToHex(streamInfo['crc32'], 4) +718// compressed size 4 bytes719decToHex(streamInfo['compressedSize'], 4) +720// uncompressed size 4 bytes721decToHex(streamInfo['uncompressedSize'], 4);722
723return descriptor;724};725
726
727/**
728* A worker to concatenate other workers to create a zip file.
729* @param {Boolean} streamFiles `true` to stream the content of the files,
730* `false` to accumulate it.
731* @param {String} comment the comment to use.
732* @param {String} platform the platform to use, "UNIX" or "DOS".
733* @param {Function} encodeFileName the function to encode file names and comments.
734*/
735function ZipFileWorker(streamFiles, comment, platform, encodeFileName) {736GenericWorker.call(this, "ZipFileWorker");737// The number of bytes written so far. This doesn't count accumulated chunks.738this.bytesWritten = 0;739// The comment of the zip file740this.zipComment = comment;741// The platform "generating" the zip file.742this.zipPlatform = platform;743// the function to encode file names and comments.744this.encodeFileName = encodeFileName;745// Should we stream the content of the files ?746this.streamFiles = streamFiles;747// If `streamFiles` is false, we will need to accumulate the content of the748// files to calculate sizes / crc32 (and write them *before* the content).749// This boolean indicates if we are accumulating chunks (it will change a lot750// during the lifetime of this worker).751this.accumulate = false;752// The buffer receiving chunks when accumulating content.753this.contentBuffer = [];754// The list of generated directory records.755this.dirRecords = [];756// The offset (in bytes) from the beginning of the zip file for the current source.757this.currentSourceOffset = 0;758// The total number of entries in this zip file.759this.entriesCount = 0;760// the name of the file currently being added, null when handling the end of the zip file.761// Used for the emitted metadata.762this.currentFile = null;763
764
765
766this._sources = [];767}
768utils.inherits(ZipFileWorker, GenericWorker);769
770/**
771* @see GenericWorker.push
772*/
773ZipFileWorker.prototype.push = function (chunk) {774
775var currentFilePercent = chunk.meta.percent || 0;776var entriesCount = this.entriesCount;777var remainingFiles = this._sources.length;778
779if(this.accumulate) {780this.contentBuffer.push(chunk);781} else {782this.bytesWritten += chunk.data.length;783
784GenericWorker.prototype.push.call(this, {785data : chunk.data,786meta : {787currentFile : this.currentFile,788percent : entriesCount ? (currentFilePercent + 100 * (entriesCount - remainingFiles - 1)) / entriesCount : 100789}790});791}792};793
794/**
795* The worker started a new source (an other worker).
796* @param {Object} streamInfo the streamInfo object from the new source.
797*/
798ZipFileWorker.prototype.openedSource = function (streamInfo) {799this.currentSourceOffset = this.bytesWritten;800this.currentFile = streamInfo['file'].name;801
802var streamedContent = this.streamFiles && !streamInfo['file'].dir;803
804// don't stream folders (because they don't have any content)805if(streamedContent) {806var record = generateZipParts(streamInfo, streamedContent, false, this.currentSourceOffset, this.zipPlatform, this.encodeFileName);807this.push({808data : record.fileRecord,809meta : {percent:0}810});811} else {812// we need to wait for the whole file before pushing anything813this.accumulate = true;814}815};816
817/**
818* The worker finished a source (an other worker).
819* @param {Object} streamInfo the streamInfo object from the finished source.
820*/
821ZipFileWorker.prototype.closedSource = function (streamInfo) {822this.accumulate = false;823var streamedContent = this.streamFiles && !streamInfo['file'].dir;824var record = generateZipParts(streamInfo, streamedContent, true, this.currentSourceOffset, this.zipPlatform, this.encodeFileName);825
826this.dirRecords.push(record.dirRecord);827if(streamedContent) {828// after the streamed file, we put data descriptors829this.push({830data : generateDataDescriptors(streamInfo),831meta : {percent:100}832});833} else {834// the content wasn't streamed, we need to push everything now835// first the file record, then the content836this.push({837data : record.fileRecord,838meta : {percent:0}839});840while(this.contentBuffer.length) {841this.push(this.contentBuffer.shift());842}843}844this.currentFile = null;845};846
847/**
848* @see GenericWorker.flush
849*/
850ZipFileWorker.prototype.flush = function () {851
852var localDirLength = this.bytesWritten;853for(var i = 0; i < this.dirRecords.length; i++) {854this.push({855data : this.dirRecords[i],856meta : {percent:100}857});858}859var centralDirLength = this.bytesWritten - localDirLength;860
861var dirEnd = generateCentralDirectoryEnd(this.dirRecords.length, centralDirLength, localDirLength, this.zipComment, this.encodeFileName);862
863this.push({864data : dirEnd,865meta : {percent:100}866});867};868
869/**
870* Prepare the next source to be read.
871*/
872ZipFileWorker.prototype.prepareNextSource = function () {873this.previous = this._sources.shift();874this.openedSource(this.previous.streamInfo);875if (this.isPaused) {876this.previous.pause();877} else {878this.previous.resume();879}880};881
882/**
883* @see GenericWorker.registerPrevious
884*/
885ZipFileWorker.prototype.registerPrevious = function (previous) {886this._sources.push(previous);887var self = this;888
889previous.on('data', function (chunk) {890self.processChunk(chunk);891});892previous.on('end', function () {893self.closedSource(self.previous.streamInfo);894if(self._sources.length) {895self.prepareNextSource();896} else {897self.end();898}899});900previous.on('error', function (e) {901self.error(e);902});903return this;904};905
906/**
907* @see GenericWorker.resume
908*/
909ZipFileWorker.prototype.resume = function () {910if(!GenericWorker.prototype.resume.call(this)) {911return false;912}913
914if (!this.previous && this._sources.length) {915this.prepareNextSource();916return true;917}918if (!this.previous && !this._sources.length && !this.generatedError) {919this.end();920return true;921}922};923
924/**
925* @see GenericWorker.error
926*/
927ZipFileWorker.prototype.error = function (e) {928var sources = this._sources;929if(!GenericWorker.prototype.error.call(this, e)) {930return false;931}932for(var i = 0; i < sources.length; i++) {933try {934sources[i].error(e);935} catch(e) {936// the `error` exploded, nothing to do937}938}939return true;940};941
942/**
943* @see GenericWorker.lock
944*/
945ZipFileWorker.prototype.lock = function () {946GenericWorker.prototype.lock.call(this);947var sources = this._sources;948for(var i = 0; i < sources.length; i++) {949sources[i].lock();950}951};952
953module.exports = ZipFileWorker;954
955},{"../crc32":4,"../signature":23,"../stream/GenericWorker":28,"../utf8":31,"../utils":32}],9:[function(require,module,exports){956'use strict';957
958var compressions = require('../compressions');959var ZipFileWorker = require('./ZipFileWorker');960
961/**
962* Find the compression to use.
963* @param {String} fileCompression the compression defined at the file level, if any.
964* @param {String} zipCompression the compression defined at the load() level.
965* @return {Object} the compression object to use.
966*/
967var getCompression = function (fileCompression, zipCompression) {968
969var compressionName = fileCompression || zipCompression;970var compression = compressions[compressionName];971if (!compression) {972throw new Error(compressionName + " is not a valid compression method !");973}974return compression;975};976
977/**
978* Create a worker to generate a zip file.
979* @param {JSZip} zip the JSZip instance at the right root level.
980* @param {Object} options to generate the zip file.
981* @param {String} comment the comment to use.
982*/
983exports.generateWorker = function (zip, options, comment) {984
985var zipFileWorker = new ZipFileWorker(options.streamFiles, comment, options.platform, options.encodeFileName);986var entriesCount = 0;987try {988
989zip.forEach(function (relativePath, file) {990entriesCount++;991var compression = getCompression(file.options.compression, options.compression);992var compressionOptions = file.options.compressionOptions || options.compressionOptions || {};993var dir = file.dir, date = file.date;994
995file._compressWorker(compression, compressionOptions)996.withStreamInfo("file", {997name : relativePath,998dir : dir,999date : date,1000comment : file.comment || "",1001unixPermissions : file.unixPermissions,1002dosPermissions : file.dosPermissions1003})1004.pipe(zipFileWorker);1005});1006zipFileWorker.entriesCount = entriesCount;1007} catch (e) {1008zipFileWorker.error(e);1009}1010
1011return zipFileWorker;1012};1013
1014},{"../compressions":3,"./ZipFileWorker":8}],10:[function(require,module,exports){1015'use strict';1016
1017/**
1018* Representation a of zip file in js
1019* @constructor
1020*/
1021function JSZip() {1022// if this constructor is used without `new`, it adds `new` before itself:1023if(!(this instanceof JSZip)) {1024return new JSZip();1025}1026
1027if(arguments.length) {1028throw new Error("The constructor with parameters has been removed in JSZip 3.0, please check the upgrade guide.");1029}1030
1031// object containing the files :1032// {1033// "folder/" : {...},1034// "folder/data.txt" : {...}1035// }1036// NOTE: we use a null prototype because we do not1037// want filenames like "toString" coming from a zip file1038// to overwrite methods and attributes in a normal Object.1039this.files = Object.create(null);1040
1041this.comment = null;1042
1043// Where we are in the hierarchy1044this.root = "";1045this.clone = function() {1046var newObj = new JSZip();1047for (var i in this) {1048if (typeof this[i] !== "function") {1049newObj[i] = this[i];1050}1051}1052return newObj;1053};1054}
1055JSZip.prototype = require('./object');1056JSZip.prototype.loadAsync = require('./load');1057JSZip.support = require('./support');1058JSZip.defaults = require('./defaults');1059
1060// TODO find a better way to handle this version,
1061// a require('package.json').version doesn't work with webpack, see #327
1062JSZip.version = "3.7.1";1063
1064JSZip.loadAsync = function (content, options) {1065return new JSZip().loadAsync(content, options);1066};1067
1068JSZip.external = require("./external");1069module.exports = JSZip;1070
1071},{"./defaults":5,"./external":6,"./load":11,"./object":15,"./support":30}],11:[function(require,module,exports){1072'use strict';1073var utils = require('./utils');1074var external = require("./external");1075var utf8 = require('./utf8');1076var ZipEntries = require('./zipEntries');1077var Crc32Probe = require('./stream/Crc32Probe');1078var nodejsUtils = require("./nodejsUtils");1079
1080/**
1081* Check the CRC32 of an entry.
1082* @param {ZipEntry} zipEntry the zip entry to check.
1083* @return {Promise} the result.
1084*/
1085function checkEntryCRC32(zipEntry) {1086return new external.Promise(function (resolve, reject) {1087var worker = zipEntry.decompressed.getContentWorker().pipe(new Crc32Probe());1088worker.on("error", function (e) {1089reject(e);1090})1091.on("end", function () {1092if (worker.streamInfo.crc32 !== zipEntry.decompressed.crc32) {1093reject(new Error("Corrupted zip : CRC32 mismatch"));1094} else {1095resolve();1096}1097})1098.resume();1099});1100}
1101
1102module.exports = function (data, options) {1103var zip = this;1104options = utils.extend(options || {}, {1105base64: false,1106checkCRC32: false,1107optimizedBinaryString: false,1108createFolders: false,1109decodeFileName: utf8.utf8decode1110});1111
1112if (nodejsUtils.isNode && nodejsUtils.isStream(data)) {1113return external.Promise.reject(new Error("JSZip can't accept a stream when loading a zip file."));1114}1115
1116return utils.prepareContent("the loaded zip file", data, true, options.optimizedBinaryString, options.base64)1117.then(function (data) {1118var zipEntries = new ZipEntries(options);1119zipEntries.load(data);1120return zipEntries;1121}).then(function checkCRC32(zipEntries) {1122var promises = [external.Promise.resolve(zipEntries)];1123var files = zipEntries.files;1124if (options.checkCRC32) {1125for (var i = 0; i < files.length; i++) {1126promises.push(checkEntryCRC32(files[i]));1127}1128}1129return external.Promise.all(promises);1130}).then(function addFiles(results) {1131var zipEntries = results.shift();1132var files = zipEntries.files;1133for (var i = 0; i < files.length; i++) {1134var input = files[i];1135zip.file(input.fileNameStr, input.decompressed, {1136binary: true,1137optimizedBinaryString: true,1138date: input.date,1139dir: input.dir,1140comment: input.fileCommentStr.length ? input.fileCommentStr : null,1141unixPermissions: input.unixPermissions,1142dosPermissions: input.dosPermissions,1143createFolders: options.createFolders1144});1145}1146if (zipEntries.zipComment.length) {1147zip.comment = zipEntries.zipComment;1148}1149
1150return zip;1151});1152};1153
1154},{"./external":6,"./nodejsUtils":14,"./stream/Crc32Probe":25,"./utf8":31,"./utils":32,"./zipEntries":33}],12:[function(require,module,exports){1155"use strict";1156
1157var utils = require('../utils');1158var GenericWorker = require('../stream/GenericWorker');1159
1160/**
1161* A worker that use a nodejs stream as source.
1162* @constructor
1163* @param {String} filename the name of the file entry for this stream.
1164* @param {Readable} stream the nodejs stream.
1165*/
1166function NodejsStreamInputAdapter(filename, stream) {1167GenericWorker.call(this, "Nodejs stream input adapter for " + filename);1168this._upstreamEnded = false;1169this._bindStream(stream);1170}
1171
1172utils.inherits(NodejsStreamInputAdapter, GenericWorker);1173
1174/**
1175* Prepare the stream and bind the callbacks on it.
1176* Do this ASAP on node 0.10 ! A lazy binding doesn't always work.
1177* @param {Stream} stream the nodejs stream to use.
1178*/
1179NodejsStreamInputAdapter.prototype._bindStream = function (stream) {1180var self = this;1181this._stream = stream;1182stream.pause();1183stream
1184.on("data", function (chunk) {1185self.push({1186data: chunk,1187meta : {1188percent : 01189}1190});1191})1192.on("error", function (e) {1193if(self.isPaused) {1194this.generatedError = e;1195} else {1196self.error(e);1197}1198})1199.on("end", function () {1200if(self.isPaused) {1201self._upstreamEnded = true;1202} else {1203self.end();1204}1205});1206};1207NodejsStreamInputAdapter.prototype.pause = function () {1208if(!GenericWorker.prototype.pause.call(this)) {1209return false;1210}1211this._stream.pause();1212return true;1213};1214NodejsStreamInputAdapter.prototype.resume = function () {1215if(!GenericWorker.prototype.resume.call(this)) {1216return false;1217}1218
1219if(this._upstreamEnded) {1220this.end();1221} else {1222this._stream.resume();1223}1224
1225return true;1226};1227
1228module.exports = NodejsStreamInputAdapter;1229
1230},{"../stream/GenericWorker":28,"../utils":32}],13:[function(require,module,exports){1231'use strict';1232
1233var Readable = require('readable-stream').Readable;1234
1235var utils = require('../utils');1236utils.inherits(NodejsStreamOutputAdapter, Readable);1237
1238/**
1239* A nodejs stream using a worker as source.
1240* @see the SourceWrapper in http://nodejs.org/api/stream.html
1241* @constructor
1242* @param {StreamHelper} helper the helper wrapping the worker
1243* @param {Object} options the nodejs stream options
1244* @param {Function} updateCb the update callback.
1245*/
1246function NodejsStreamOutputAdapter(helper, options, updateCb) {1247Readable.call(this, options);1248this._helper = helper;1249
1250var self = this;1251helper.on("data", function (data, meta) {1252if (!self.push(data)) {1253self._helper.pause();1254}1255if(updateCb) {1256updateCb(meta);1257}1258})1259.on("error", function(e) {1260self.emit('error', e);1261})1262.on("end", function () {1263self.push(null);1264});1265}
1266
1267
1268NodejsStreamOutputAdapter.prototype._read = function() {1269this._helper.resume();1270};1271
1272module.exports = NodejsStreamOutputAdapter;1273
1274},{"../utils":32,"readable-stream":16}],14:[function(require,module,exports){1275'use strict';1276
1277module.exports = {1278/**1279* True if this is running in Nodejs, will be undefined in a browser.
1280* In a browser, browserify won't include this file and the whole module
1281* will be resolved an empty object.
1282*/
1283isNode : typeof Buffer !== "undefined",1284/**1285* Create a new nodejs Buffer from an existing content.
1286* @param {Object} data the data to pass to the constructor.
1287* @param {String} encoding the encoding to use.
1288* @return {Buffer} a new Buffer.
1289*/
1290newBufferFrom: function(data, encoding) {1291if (Buffer.from && Buffer.from !== Uint8Array.from) {1292return Buffer.from(data, encoding);1293} else {1294if (typeof data === "number") {1295// Safeguard for old Node.js versions. On newer versions,1296// Buffer.from(number) / Buffer(number, encoding) already throw.1297throw new Error("The \"data\" argument must not be a number");1298}1299return new Buffer(data, encoding);1300}1301},1302/**1303* Create a new nodejs Buffer with the specified size.
1304* @param {Integer} size the size of the buffer.
1305* @return {Buffer} a new Buffer.
1306*/
1307allocBuffer: function (size) {1308if (Buffer.alloc) {1309return Buffer.alloc(size);1310} else {1311var buf = new Buffer(size);1312buf.fill(0);1313return buf;1314}1315},1316/**1317* Find out if an object is a Buffer.
1318* @param {Object} b the object to test.
1319* @return {Boolean} true if the object is a Buffer, false otherwise.
1320*/
1321isBuffer : function(b){1322return Buffer.isBuffer(b);1323},1324
1325isStream : function (obj) {1326return obj &&1327typeof obj.on === "function" &&1328typeof obj.pause === "function" &&1329typeof obj.resume === "function";1330}1331};1332
1333},{}],15:[function(require,module,exports){1334'use strict';1335var utf8 = require('./utf8');1336var utils = require('./utils');1337var GenericWorker = require('./stream/GenericWorker');1338var StreamHelper = require('./stream/StreamHelper');1339var defaults = require('./defaults');1340var CompressedObject = require('./compressedObject');1341var ZipObject = require('./zipObject');1342var generate = require("./generate");1343var nodejsUtils = require("./nodejsUtils");1344var NodejsStreamInputAdapter = require("./nodejs/NodejsStreamInputAdapter");1345
1346
1347/**
1348* Add a file in the current folder.
1349* @private
1350* @param {string} name the name of the file
1351* @param {String|ArrayBuffer|Uint8Array|Buffer} data the data of the file
1352* @param {Object} originalOptions the options of the file
1353* @return {Object} the new file.
1354*/
1355var fileAdd = function(name, data, originalOptions) {1356// be sure sub folders exist1357var dataType = utils.getTypeOf(data),1358parent;1359
1360
1361/*1362* Correct options.
1363*/
1364
1365var o = utils.extend(originalOptions || {}, defaults);1366o.date = o.date || new Date();1367if (o.compression !== null) {1368o.compression = o.compression.toUpperCase();1369}1370
1371if (typeof o.unixPermissions === "string") {1372o.unixPermissions = parseInt(o.unixPermissions, 8);1373}1374
1375// UNX_IFDIR 0040000 see zipinfo.c1376if (o.unixPermissions && (o.unixPermissions & 0x4000)) {1377o.dir = true;1378}1379// Bit 4 Directory1380if (o.dosPermissions && (o.dosPermissions & 0x0010)) {1381o.dir = true;1382}1383
1384if (o.dir) {1385name = forceTrailingSlash(name);1386}1387if (o.createFolders && (parent = parentFolder(name))) {1388folderAdd.call(this, parent, true);1389}1390
1391var isUnicodeString = dataType === "string" && o.binary === false && o.base64 === false;1392if (!originalOptions || typeof originalOptions.binary === "undefined") {1393o.binary = !isUnicodeString;1394}1395
1396
1397var isCompressedEmpty = (data instanceof CompressedObject) && data.uncompressedSize === 0;1398
1399if (isCompressedEmpty || o.dir || !data || data.length === 0) {1400o.base64 = false;1401o.binary = true;1402data = "";1403o.compression = "STORE";1404dataType = "string";1405}1406
1407/*1408* Convert content to fit.
1409*/
1410
1411var zipObjectContent = null;1412if (data instanceof CompressedObject || data instanceof GenericWorker) {1413zipObjectContent = data;1414} else if (nodejsUtils.isNode && nodejsUtils.isStream(data)) {1415zipObjectContent = new NodejsStreamInputAdapter(name, data);1416} else {1417zipObjectContent = utils.prepareContent(name, data, o.binary, o.optimizedBinaryString, o.base64);1418}1419
1420var object = new ZipObject(name, zipObjectContent, o);1421this.files[name] = object;1422/*1423TODO: we can't throw an exception because we have async promises
1424(we can have a promise of a Date() for example) but returning a
1425promise is useless because file(name, data) returns the JSZip
1426object for chaining. Should we break that to allow the user
1427to catch the error ?
1428
1429return external.Promise.resolve(zipObjectContent)
1430.then(function () {
1431return object;
1432});
1433*/
1434};1435
1436/**
1437* Find the parent folder of the path.
1438* @private
1439* @param {string} path the path to use
1440* @return {string} the parent folder, or ""
1441*/
1442var parentFolder = function (path) {1443if (path.slice(-1) === '/') {1444path = path.substring(0, path.length - 1);1445}1446var lastSlash = path.lastIndexOf('/');1447return (lastSlash > 0) ? path.substring(0, lastSlash) : "";1448};1449
1450/**
1451* Returns the path with a slash at the end.
1452* @private
1453* @param {String} path the path to check.
1454* @return {String} the path with a trailing slash.
1455*/
1456var forceTrailingSlash = function(path) {1457// Check the name ends with a /1458if (path.slice(-1) !== "/") {1459path += "/"; // IE doesn't like substr(-1)1460}1461return path;1462};1463
1464/**
1465* Add a (sub) folder in the current folder.
1466* @private
1467* @param {string} name the folder's name
1468* @param {boolean=} [createFolders] If true, automatically create sub
1469* folders. Defaults to false.
1470* @return {Object} the new folder.
1471*/
1472var folderAdd = function(name, createFolders) {1473createFolders = (typeof createFolders !== 'undefined') ? createFolders : defaults.createFolders;1474
1475name = forceTrailingSlash(name);1476
1477// Does this folder already exist?1478if (!this.files[name]) {1479fileAdd.call(this, name, null, {1480dir: true,1481createFolders: createFolders1482});1483}1484return this.files[name];1485};1486
1487/**
1488* Cross-window, cross-Node-context regular expression detection
1489* @param {Object} object Anything
1490* @return {Boolean} true if the object is a regular expression,
1491* false otherwise
1492*/
1493function isRegExp(object) {1494return Object.prototype.toString.call(object) === "[object RegExp]";1495}
1496
1497// return the actual prototype of JSZip
1498var out = {1499/**1500* @see loadAsync
1501*/
1502load: function() {1503throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.");1504},1505
1506
1507/**1508* Call a callback function for each entry at this folder level.
1509* @param {Function} cb the callback function:
1510* function (relativePath, file) {...}
1511* It takes 2 arguments : the relative path and the file.
1512*/
1513forEach: function(cb) {1514var filename, relativePath, file;1515/* jshint ignore:start */1516// ignore warning about unwanted properties because this.files is a null prototype object1517for (filename in this.files) {1518file = this.files[filename];1519relativePath = filename.slice(this.root.length, filename.length);1520if (relativePath && filename.slice(0, this.root.length) === this.root) { // the file is in the current root1521cb(relativePath, file); // TODO reverse the parameters ? need to be clean AND consistent with the filter search fn...1522}1523}1524/* jshint ignore:end */1525},1526
1527/**1528* Filter nested files/folders with the specified function.
1529* @param {Function} search the predicate to use :
1530* function (relativePath, file) {...}
1531* It takes 2 arguments : the relative path and the file.
1532* @return {Array} An array of matching elements.
1533*/
1534filter: function(search) {1535var result = [];1536this.forEach(function (relativePath, entry) {1537if (search(relativePath, entry)) { // the file matches the function1538result.push(entry);1539}1540
1541});1542return result;1543},1544
1545/**1546* Add a file to the zip file, or search a file.
1547* @param {string|RegExp} name The name of the file to add (if data is defined),
1548* the name of the file to find (if no data) or a regex to match files.
1549* @param {String|ArrayBuffer|Uint8Array|Buffer} data The file data, either raw or base64 encoded
1550* @param {Object} o File options
1551* @return {JSZip|Object|Array} this JSZip object (when adding a file),
1552* a file (when searching by string) or an array of files (when searching by regex).
1553*/
1554file: function(name, data, o) {1555if (arguments.length === 1) {1556if (isRegExp(name)) {1557var regexp = name;1558return this.filter(function(relativePath, file) {1559return !file.dir && regexp.test(relativePath);1560});1561}1562else { // text1563var obj = this.files[this.root + name];1564if (obj && !obj.dir) {1565return obj;1566} else {1567return null;1568}1569}1570}1571else { // more than one argument : we have data !1572name = this.root + name;1573fileAdd.call(this, name, data, o);1574}1575return this;1576},1577
1578/**1579* Add a directory to the zip file, or search.
1580* @param {String|RegExp} arg The name of the directory to add, or a regex to search folders.
1581* @return {JSZip} an object with the new directory as the root, or an array containing matching folders.
1582*/
1583folder: function(arg) {1584if (!arg) {1585return this;1586}1587
1588if (isRegExp(arg)) {1589return this.filter(function(relativePath, file) {1590return file.dir && arg.test(relativePath);1591});1592}1593
1594// else, name is a new folder1595var name = this.root + arg;1596var newFolder = folderAdd.call(this, name);1597
1598// Allow chaining by returning a new object with this folder as the root1599var ret = this.clone();1600ret.root = newFolder.name;1601return ret;1602},1603
1604/**1605* Delete a file, or a directory and all sub-files, from the zip
1606* @param {string} name the name of the file to delete
1607* @return {JSZip} this JSZip object
1608*/
1609remove: function(name) {1610name = this.root + name;1611var file = this.files[name];1612if (!file) {1613// Look for any folders1614if (name.slice(-1) !== "/") {1615name += "/";1616}1617file = this.files[name];1618}1619
1620if (file && !file.dir) {1621// file1622delete this.files[name];1623} else {1624// maybe a folder, delete recursively1625var kids = this.filter(function(relativePath, file) {1626return file.name.slice(0, name.length) === name;1627});1628for (var i = 0; i < kids.length; i++) {1629delete this.files[kids[i].name];1630}1631}1632
1633return this;1634},1635
1636/**1637* Generate the complete zip file
1638* @param {Object} options the options to generate the zip file :
1639* - compression, "STORE" by default.
1640* - type, "base64" by default. Values are : string, base64, uint8array, arraybuffer, blob.
1641* @return {String|Uint8Array|ArrayBuffer|Buffer|Blob} the zip file
1642*/
1643generate: function(options) {1644throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.");1645},1646
1647/**1648* Generate the complete zip file as an internal stream.
1649* @param {Object} options the options to generate the zip file :
1650* - compression, "STORE" by default.
1651* - type, "base64" by default. Values are : string, base64, uint8array, arraybuffer, blob.
1652* @return {StreamHelper} the streamed zip file.
1653*/
1654generateInternalStream: function(options) {1655var worker, opts = {};1656try {1657opts = utils.extend(options || {}, {1658streamFiles: false,1659compression: "STORE",1660compressionOptions : null,1661type: "",1662platform: "DOS",1663comment: null,1664mimeType: 'application/zip',1665encodeFileName: utf8.utf8encode1666});1667
1668opts.type = opts.type.toLowerCase();1669opts.compression = opts.compression.toUpperCase();1670
1671// "binarystring" is preferred but the internals use "string".1672if(opts.type === "binarystring") {1673opts.type = "string";1674}1675
1676if (!opts.type) {1677throw new Error("No output type specified.");1678}1679
1680utils.checkSupport(opts.type);1681
1682// accept nodejs `process.platform`1683if(1684opts.platform === 'darwin' ||1685opts.platform === 'freebsd' ||1686opts.platform === 'linux' ||1687opts.platform === 'sunos'1688) {1689opts.platform = "UNIX";1690}1691if (opts.platform === 'win32') {1692opts.platform = "DOS";1693}1694
1695var comment = opts.comment || this.comment || "";1696worker = generate.generateWorker(this, opts, comment);1697} catch (e) {1698worker = new GenericWorker("error");1699worker.error(e);1700}1701return new StreamHelper(worker, opts.type || "string", opts.mimeType);1702},1703/**1704* Generate the complete zip file asynchronously.
1705* @see generateInternalStream
1706*/
1707generateAsync: function(options, onUpdate) {1708return this.generateInternalStream(options).accumulate(onUpdate);1709},1710/**1711* Generate the complete zip file asynchronously.
1712* @see generateInternalStream
1713*/
1714generateNodeStream: function(options, onUpdate) {1715options = options || {};1716if (!options.type) {1717options.type = "nodebuffer";1718}1719return this.generateInternalStream(options).toNodejsStream(onUpdate);1720}1721};1722module.exports = out;1723
1724},{"./compressedObject":2,"./defaults":5,"./generate":9,"./nodejs/NodejsStreamInputAdapter":12,"./nodejsUtils":14,"./stream/GenericWorker":28,"./stream/StreamHelper":29,"./utf8":31,"./utils":32,"./zipObject":35}],16:[function(require,module,exports){1725/*
1726* This file is used by module bundlers (browserify/webpack/etc) when
1727* including a stream implementation. We use "readable-stream" to get a
1728* consistent behavior between nodejs versions but bundlers often have a shim
1729* for "stream". Using this shim greatly improve the compatibility and greatly
1730* reduce the final size of the bundle (only one stream implementation, not
1731* two).
1732*/
1733module.exports = require("stream");1734
1735},{"stream":undefined}],17:[function(require,module,exports){1736'use strict';1737var DataReader = require('./DataReader');1738var utils = require('../utils');1739
1740function ArrayReader(data) {1741DataReader.call(this, data);1742for(var i = 0; i < this.data.length; i++) {1743data[i] = data[i] & 0xFF;1744}1745}
1746utils.inherits(ArrayReader, DataReader);1747/**
1748* @see DataReader.byteAt
1749*/
1750ArrayReader.prototype.byteAt = function(i) {1751return this.data[this.zero + i];1752};1753/**
1754* @see DataReader.lastIndexOfSignature
1755*/
1756ArrayReader.prototype.lastIndexOfSignature = function(sig) {1757var sig0 = sig.charCodeAt(0),1758sig1 = sig.charCodeAt(1),1759sig2 = sig.charCodeAt(2),1760sig3 = sig.charCodeAt(3);1761for (var i = this.length - 4; i >= 0; --i) {1762if (this.data[i] === sig0 && this.data[i + 1] === sig1 && this.data[i + 2] === sig2 && this.data[i + 3] === sig3) {1763return i - this.zero;1764}1765}1766
1767return -1;1768};1769/**
1770* @see DataReader.readAndCheckSignature
1771*/
1772ArrayReader.prototype.readAndCheckSignature = function (sig) {1773var sig0 = sig.charCodeAt(0),1774sig1 = sig.charCodeAt(1),1775sig2 = sig.charCodeAt(2),1776sig3 = sig.charCodeAt(3),1777data = this.readData(4);1778return sig0 === data[0] && sig1 === data[1] && sig2 === data[2] && sig3 === data[3];1779};1780/**
1781* @see DataReader.readData
1782*/
1783ArrayReader.prototype.readData = function(size) {1784this.checkOffset(size);1785if(size === 0) {1786return [];1787}1788var result = this.data.slice(this.zero + this.index, this.zero + this.index + size);1789this.index += size;1790return result;1791};1792module.exports = ArrayReader;1793
1794},{"../utils":32,"./DataReader":18}],18:[function(require,module,exports){1795'use strict';1796var utils = require('../utils');1797
1798function DataReader(data) {1799this.data = data; // type : see implementation1800this.length = data.length;1801this.index = 0;1802this.zero = 0;1803}
1804DataReader.prototype = {1805/**1806* Check that the offset will not go too far.
1807* @param {string} offset the additional offset to check.
1808* @throws {Error} an Error if the offset is out of bounds.
1809*/
1810checkOffset: function(offset) {1811this.checkIndex(this.index + offset);1812},1813/**1814* Check that the specified index will not be too far.
1815* @param {string} newIndex the index to check.
1816* @throws {Error} an Error if the index is out of bounds.
1817*/
1818checkIndex: function(newIndex) {1819if (this.length < this.zero + newIndex || newIndex < 0) {1820throw new Error("End of data reached (data length = " + this.length + ", asked index = " + (newIndex) + "). Corrupted zip ?");1821}1822},1823/**1824* Change the index.
1825* @param {number} newIndex The new index.
1826* @throws {Error} if the new index is out of the data.
1827*/
1828setIndex: function(newIndex) {1829this.checkIndex(newIndex);1830this.index = newIndex;1831},1832/**1833* Skip the next n bytes.
1834* @param {number} n the number of bytes to skip.
1835* @throws {Error} if the new index is out of the data.
1836*/
1837skip: function(n) {1838this.setIndex(this.index + n);1839},1840/**1841* Get the byte at the specified index.
1842* @param {number} i the index to use.
1843* @return {number} a byte.
1844*/
1845byteAt: function(i) {1846// see implementations1847},1848/**1849* Get the next number with a given byte size.
1850* @param {number} size the number of bytes to read.
1851* @return {number} the corresponding number.
1852*/
1853readInt: function(size) {1854var result = 0,1855i;1856this.checkOffset(size);1857for (i = this.index + size - 1; i >= this.index; i--) {1858result = (result << 8) + this.byteAt(i);1859}1860this.index += size;1861return result;1862},1863/**1864* Get the next string with a given byte size.
1865* @param {number} size the number of bytes to read.
1866* @return {string} the corresponding string.
1867*/
1868readString: function(size) {1869return utils.transformTo("string", this.readData(size));1870},1871/**1872* Get raw data without conversion, <size> bytes.
1873* @param {number} size the number of bytes to read.
1874* @return {Object} the raw data, implementation specific.
1875*/
1876readData: function(size) {1877// see implementations1878},1879/**1880* Find the last occurrence of a zip signature (4 bytes).
1881* @param {string} sig the signature to find.
1882* @return {number} the index of the last occurrence, -1 if not found.
1883*/
1884lastIndexOfSignature: function(sig) {1885// see implementations1886},1887/**1888* Read the signature (4 bytes) at the current position and compare it with sig.
1889* @param {string} sig the expected signature
1890* @return {boolean} true if the signature matches, false otherwise.
1891*/
1892readAndCheckSignature: function(sig) {1893// see implementations1894},1895/**1896* Get the next date.
1897* @return {Date} the date.
1898*/
1899readDate: function() {1900var dostime = this.readInt(4);1901return new Date(Date.UTC(1902((dostime >> 25) & 0x7f) + 1980, // year1903((dostime >> 21) & 0x0f) - 1, // month1904(dostime >> 16) & 0x1f, // day1905(dostime >> 11) & 0x1f, // hour1906(dostime >> 5) & 0x3f, // minute1907(dostime & 0x1f) << 1)); // second1908}1909};1910module.exports = DataReader;1911
1912},{"../utils":32}],19:[function(require,module,exports){1913'use strict';1914var Uint8ArrayReader = require('./Uint8ArrayReader');1915var utils = require('../utils');1916
1917function NodeBufferReader(data) {1918Uint8ArrayReader.call(this, data);1919}
1920utils.inherits(NodeBufferReader, Uint8ArrayReader);1921
1922/**
1923* @see DataReader.readData
1924*/
1925NodeBufferReader.prototype.readData = function(size) {1926this.checkOffset(size);1927var result = this.data.slice(this.zero + this.index, this.zero + this.index + size);1928this.index += size;1929return result;1930};1931module.exports = NodeBufferReader;1932
1933},{"../utils":32,"./Uint8ArrayReader":21}],20:[function(require,module,exports){1934'use strict';1935var DataReader = require('./DataReader');1936var utils = require('../utils');1937
1938function StringReader(data) {1939DataReader.call(this, data);1940}
1941utils.inherits(StringReader, DataReader);1942/**
1943* @see DataReader.byteAt
1944*/
1945StringReader.prototype.byteAt = function(i) {1946return this.data.charCodeAt(this.zero + i);1947};1948/**
1949* @see DataReader.lastIndexOfSignature
1950*/
1951StringReader.prototype.lastIndexOfSignature = function(sig) {1952return this.data.lastIndexOf(sig) - this.zero;1953};1954/**
1955* @see DataReader.readAndCheckSignature
1956*/
1957StringReader.prototype.readAndCheckSignature = function (sig) {1958var data = this.readData(4);1959return sig === data;1960};1961/**
1962* @see DataReader.readData
1963*/
1964StringReader.prototype.readData = function(size) {1965this.checkOffset(size);1966// this will work because the constructor applied the "& 0xff" mask.1967var result = this.data.slice(this.zero + this.index, this.zero + this.index + size);1968this.index += size;1969return result;1970};1971module.exports = StringReader;1972
1973},{"../utils":32,"./DataReader":18}],21:[function(require,module,exports){1974'use strict';1975var ArrayReader = require('./ArrayReader');1976var utils = require('../utils');1977
1978function Uint8ArrayReader(data) {1979ArrayReader.call(this, data);1980}
1981utils.inherits(Uint8ArrayReader, ArrayReader);1982/**
1983* @see DataReader.readData
1984*/
1985Uint8ArrayReader.prototype.readData = function(size) {1986this.checkOffset(size);1987if(size === 0) {1988// in IE10, when using subarray(idx, idx), we get the array [0x00] instead of [].1989return new Uint8Array(0);1990}1991var result = this.data.subarray(this.zero + this.index, this.zero + this.index + size);1992this.index += size;1993return result;1994};1995module.exports = Uint8ArrayReader;1996
1997},{"../utils":32,"./ArrayReader":17}],22:[function(require,module,exports){1998'use strict';1999
2000var utils = require('../utils');2001var support = require('../support');2002var ArrayReader = require('./ArrayReader');2003var StringReader = require('./StringReader');2004var NodeBufferReader = require('./NodeBufferReader');2005var Uint8ArrayReader = require('./Uint8ArrayReader');2006
2007/**
2008* Create a reader adapted to the data.
2009* @param {String|ArrayBuffer|Uint8Array|Buffer} data the data to read.
2010* @return {DataReader} the data reader.
2011*/
2012module.exports = function (data) {2013var type = utils.getTypeOf(data);2014utils.checkSupport(type);2015if (type === "string" && !support.uint8array) {2016return new StringReader(data);2017}2018if (type === "nodebuffer") {2019return new NodeBufferReader(data);2020}2021if (support.uint8array) {2022return new Uint8ArrayReader(utils.transformTo("uint8array", data));2023}2024return new ArrayReader(utils.transformTo("array", data));2025};2026
2027},{"../support":30,"../utils":32,"./ArrayReader":17,"./NodeBufferReader":19,"./StringReader":20,"./Uint8ArrayReader":21}],23:[function(require,module,exports){2028'use strict';2029exports.LOCAL_FILE_HEADER = "PK\x03\x04";2030exports.CENTRAL_FILE_HEADER = "PK\x01\x02";2031exports.CENTRAL_DIRECTORY_END = "PK\x05\x06";2032exports.ZIP64_CENTRAL_DIRECTORY_LOCATOR = "PK\x06\x07";2033exports.ZIP64_CENTRAL_DIRECTORY_END = "PK\x06\x06";2034exports.DATA_DESCRIPTOR = "PK\x07\x08";2035
2036},{}],24:[function(require,module,exports){2037'use strict';2038
2039var GenericWorker = require('./GenericWorker');2040var utils = require('../utils');2041
2042/**
2043* A worker which convert chunks to a specified type.
2044* @constructor
2045* @param {String} destType the destination type.
2046*/
2047function ConvertWorker(destType) {2048GenericWorker.call(this, "ConvertWorker to " + destType);2049this.destType = destType;2050}
2051utils.inherits(ConvertWorker, GenericWorker);2052
2053/**
2054* @see GenericWorker.processChunk
2055*/
2056ConvertWorker.prototype.processChunk = function (chunk) {2057this.push({2058data : utils.transformTo(this.destType, chunk.data),2059meta : chunk.meta2060});2061};2062module.exports = ConvertWorker;2063
2064},{"../utils":32,"./GenericWorker":28}],25:[function(require,module,exports){2065'use strict';2066
2067var GenericWorker = require('./GenericWorker');2068var crc32 = require('../crc32');2069var utils = require('../utils');2070
2071/**
2072* A worker which calculate the crc32 of the data flowing through.
2073* @constructor
2074*/
2075function Crc32Probe() {2076GenericWorker.call(this, "Crc32Probe");2077this.withStreamInfo("crc32", 0);2078}
2079utils.inherits(Crc32Probe, GenericWorker);2080
2081/**
2082* @see GenericWorker.processChunk
2083*/
2084Crc32Probe.prototype.processChunk = function (chunk) {2085this.streamInfo.crc32 = crc32(chunk.data, this.streamInfo.crc32 || 0);2086this.push(chunk);2087};2088module.exports = Crc32Probe;2089
2090},{"../crc32":4,"../utils":32,"./GenericWorker":28}],26:[function(require,module,exports){2091'use strict';2092
2093var utils = require('../utils');2094var GenericWorker = require('./GenericWorker');2095
2096/**
2097* A worker which calculate the total length of the data flowing through.
2098* @constructor
2099* @param {String} propName the name used to expose the length
2100*/
2101function DataLengthProbe(propName) {2102GenericWorker.call(this, "DataLengthProbe for " + propName);2103this.propName = propName;2104this.withStreamInfo(propName, 0);2105}
2106utils.inherits(DataLengthProbe, GenericWorker);2107
2108/**
2109* @see GenericWorker.processChunk
2110*/
2111DataLengthProbe.prototype.processChunk = function (chunk) {2112if(chunk) {2113var length = this.streamInfo[this.propName] || 0;2114this.streamInfo[this.propName] = length + chunk.data.length;2115}2116GenericWorker.prototype.processChunk.call(this, chunk);2117};2118module.exports = DataLengthProbe;2119
2120
2121},{"../utils":32,"./GenericWorker":28}],27:[function(require,module,exports){2122'use strict';2123
2124var utils = require('../utils');2125var GenericWorker = require('./GenericWorker');2126
2127// the size of the generated chunks
2128// TODO expose this as a public variable
2129var DEFAULT_BLOCK_SIZE = 16 * 1024;2130
2131/**
2132* A worker that reads a content and emits chunks.
2133* @constructor
2134* @param {Promise} dataP the promise of the data to split
2135*/
2136function DataWorker(dataP) {2137GenericWorker.call(this, "DataWorker");2138var self = this;2139this.dataIsReady = false;2140this.index = 0;2141this.max = 0;2142this.data = null;2143this.type = "";2144
2145this._tickScheduled = false;2146
2147dataP.then(function (data) {2148self.dataIsReady = true;2149self.data = data;2150self.max = data && data.length || 0;2151self.type = utils.getTypeOf(data);2152if(!self.isPaused) {2153self._tickAndRepeat();2154}2155}, function (e) {2156self.error(e);2157});2158}
2159
2160utils.inherits(DataWorker, GenericWorker);2161
2162/**
2163* @see GenericWorker.cleanUp
2164*/
2165DataWorker.prototype.cleanUp = function () {2166GenericWorker.prototype.cleanUp.call(this);2167this.data = null;2168};2169
2170/**
2171* @see GenericWorker.resume
2172*/
2173DataWorker.prototype.resume = function () {2174if(!GenericWorker.prototype.resume.call(this)) {2175return false;2176}2177
2178if (!this._tickScheduled && this.dataIsReady) {2179this._tickScheduled = true;2180utils.delay(this._tickAndRepeat, [], this);2181}2182return true;2183};2184
2185/**
2186* Trigger a tick a schedule an other call to this function.
2187*/
2188DataWorker.prototype._tickAndRepeat = function() {2189this._tickScheduled = false;2190if(this.isPaused || this.isFinished) {2191return;2192}2193this._tick();2194if(!this.isFinished) {2195utils.delay(this._tickAndRepeat, [], this);2196this._tickScheduled = true;2197}2198};2199
2200/**
2201* Read and push a chunk.
2202*/
2203DataWorker.prototype._tick = function() {2204
2205if(this.isPaused || this.isFinished) {2206return false;2207}2208
2209var size = DEFAULT_BLOCK_SIZE;2210var data = null, nextIndex = Math.min(this.max, this.index + size);2211if (this.index >= this.max) {2212// EOF2213return this.end();2214} else {2215switch(this.type) {2216case "string":2217data = this.data.substring(this.index, nextIndex);2218break;2219case "uint8array":2220data = this.data.subarray(this.index, nextIndex);2221break;2222case "array":2223case "nodebuffer":2224data = this.data.slice(this.index, nextIndex);2225break;2226}2227this.index = nextIndex;2228return this.push({2229data : data,2230meta : {2231percent : this.max ? this.index / this.max * 100 : 02232}2233});2234}2235};2236
2237module.exports = DataWorker;2238
2239},{"../utils":32,"./GenericWorker":28}],28:[function(require,module,exports){2240'use strict';2241
2242/**
2243* A worker that does nothing but passing chunks to the next one. This is like
2244* a nodejs stream but with some differences. On the good side :
2245* - it works on IE 6-9 without any issue / polyfill
2246* - it weights less than the full dependencies bundled with browserify
2247* - it forwards errors (no need to declare an error handler EVERYWHERE)
2248*
2249* A chunk is an object with 2 attributes : `meta` and `data`. The former is an
2250* object containing anything (`percent` for example), see each worker for more
2251* details. The latter is the real data (String, Uint8Array, etc).
2252*
2253* @constructor
2254* @param {String} name the name of the stream (mainly used for debugging purposes)
2255*/
2256function GenericWorker(name) {2257// the name of the worker2258this.name = name || "default";2259// an object containing metadata about the workers chain2260this.streamInfo = {};2261// an error which happened when the worker was paused2262this.generatedError = null;2263// an object containing metadata to be merged by this worker into the general metadata2264this.extraStreamInfo = {};2265// true if the stream is paused (and should not do anything), false otherwise2266this.isPaused = true;2267// true if the stream is finished (and should not do anything), false otherwise2268this.isFinished = false;2269// true if the stream is locked to prevent further structure updates (pipe), false otherwise2270this.isLocked = false;2271// the event listeners2272this._listeners = {2273'data':[],2274'end':[],2275'error':[]2276};2277// the previous worker, if any2278this.previous = null;2279}
2280
2281GenericWorker.prototype = {2282/**2283* Push a chunk to the next workers.
2284* @param {Object} chunk the chunk to push
2285*/
2286push : function (chunk) {2287this.emit("data", chunk);2288},2289/**2290* End the stream.
2291* @return {Boolean} true if this call ended the worker, false otherwise.
2292*/
2293end : function () {2294if (this.isFinished) {2295return false;2296}2297
2298this.flush();2299try {2300this.emit("end");2301this.cleanUp();2302this.isFinished = true;2303} catch (e) {2304this.emit("error", e);2305}2306return true;2307},2308/**2309* End the stream with an error.
2310* @param {Error} e the error which caused the premature end.
2311* @return {Boolean} true if this call ended the worker with an error, false otherwise.
2312*/
2313error : function (e) {2314if (this.isFinished) {2315return false;2316}2317
2318if(this.isPaused) {2319this.generatedError = e;2320} else {2321this.isFinished = true;2322
2323this.emit("error", e);2324
2325// in the workers chain exploded in the middle of the chain,2326// the error event will go downward but we also need to notify2327// workers upward that there has been an error.2328if(this.previous) {2329this.previous.error(e);2330}2331
2332this.cleanUp();2333}2334return true;2335},2336/**2337* Add a callback on an event.
2338* @param {String} name the name of the event (data, end, error)
2339* @param {Function} listener the function to call when the event is triggered
2340* @return {GenericWorker} the current object for chainability
2341*/
2342on : function (name, listener) {2343this._listeners[name].push(listener);2344return this;2345},2346/**2347* Clean any references when a worker is ending.
2348*/
2349cleanUp : function () {2350this.streamInfo = this.generatedError = this.extraStreamInfo = null;2351this._listeners = [];2352},2353/**2354* Trigger an event. This will call registered callback with the provided arg.
2355* @param {String} name the name of the event (data, end, error)
2356* @param {Object} arg the argument to call the callback with.
2357*/
2358emit : function (name, arg) {2359if (this._listeners[name]) {2360for(var i = 0; i < this._listeners[name].length; i++) {2361this._listeners[name][i].call(this, arg);2362}2363}2364},2365/**2366* Chain a worker with an other.
2367* @param {Worker} next the worker receiving events from the current one.
2368* @return {worker} the next worker for chainability
2369*/
2370pipe : function (next) {2371return next.registerPrevious(this);2372},2373/**2374* Same as `pipe` in the other direction.
2375* Using an API with `pipe(next)` is very easy.
2376* Implementing the API with the point of view of the next one registering
2377* a source is easier, see the ZipFileWorker.
2378* @param {Worker} previous the previous worker, sending events to this one
2379* @return {Worker} the current worker for chainability
2380*/
2381registerPrevious : function (previous) {2382if (this.isLocked) {2383throw new Error("The stream '" + this + "' has already been used.");2384}2385
2386// sharing the streamInfo...2387this.streamInfo = previous.streamInfo;2388// ... and adding our own bits2389this.mergeStreamInfo();2390this.previous = previous;2391var self = this;2392previous.on('data', function (chunk) {2393self.processChunk(chunk);2394});2395previous.on('end', function () {2396self.end();2397});2398previous.on('error', function (e) {2399self.error(e);2400});2401return this;2402},2403/**2404* Pause the stream so it doesn't send events anymore.
2405* @return {Boolean} true if this call paused the worker, false otherwise.
2406*/
2407pause : function () {2408if(this.isPaused || this.isFinished) {2409return false;2410}2411this.isPaused = true;2412
2413if(this.previous) {2414this.previous.pause();2415}2416return true;2417},2418/**2419* Resume a paused stream.
2420* @return {Boolean} true if this call resumed the worker, false otherwise.
2421*/
2422resume : function () {2423if(!this.isPaused || this.isFinished) {2424return false;2425}2426this.isPaused = false;2427
2428// if true, the worker tried to resume but failed2429var withError = false;2430if(this.generatedError) {2431this.error(this.generatedError);2432withError = true;2433}2434if(this.previous) {2435this.previous.resume();2436}2437
2438return !withError;2439},2440/**2441* Flush any remaining bytes as the stream is ending.
2442*/
2443flush : function () {},2444/**2445* Process a chunk. This is usually the method overridden.
2446* @param {Object} chunk the chunk to process.
2447*/
2448processChunk : function(chunk) {2449this.push(chunk);2450},2451/**2452* Add a key/value to be added in the workers chain streamInfo once activated.
2453* @param {String} key the key to use
2454* @param {Object} value the associated value
2455* @return {Worker} the current worker for chainability
2456*/
2457withStreamInfo : function (key, value) {2458this.extraStreamInfo[key] = value;2459this.mergeStreamInfo();2460return this;2461},2462/**2463* Merge this worker's streamInfo into the chain's streamInfo.
2464*/
2465mergeStreamInfo : function () {2466for(var key in this.extraStreamInfo) {2467if (!this.extraStreamInfo.hasOwnProperty(key)) {2468continue;2469}2470this.streamInfo[key] = this.extraStreamInfo[key];2471}2472},2473
2474/**2475* Lock the stream to prevent further updates on the workers chain.
2476* After calling this method, all calls to pipe will fail.
2477*/
2478lock: function () {2479if (this.isLocked) {2480throw new Error("The stream '" + this + "' has already been used.");2481}2482this.isLocked = true;2483if (this.previous) {2484this.previous.lock();2485}2486},2487
2488/**2489*
2490* Pretty print the workers chain.
2491*/
2492toString : function () {2493var me = "Worker " + this.name;2494if (this.previous) {2495return this.previous + " -> " + me;2496} else {2497return me;2498}2499}2500};2501
2502module.exports = GenericWorker;2503
2504},{}],29:[function(require,module,exports){2505'use strict';2506
2507var utils = require('../utils');2508var ConvertWorker = require('./ConvertWorker');2509var GenericWorker = require('./GenericWorker');2510var base64 = require('../base64');2511var support = require("../support");2512var external = require("../external");2513
2514var NodejsStreamOutputAdapter = null;2515if (support.nodestream) {2516try {2517NodejsStreamOutputAdapter = require('../nodejs/NodejsStreamOutputAdapter');2518} catch(e) {}2519}
2520
2521/**
2522* Apply the final transformation of the data. If the user wants a Blob for
2523* example, it's easier to work with an U8intArray and finally do the
2524* ArrayBuffer/Blob conversion.
2525* @param {String} type the name of the final type
2526* @param {String|Uint8Array|Buffer} content the content to transform
2527* @param {String} mimeType the mime type of the content, if applicable.
2528* @return {String|Uint8Array|ArrayBuffer|Buffer|Blob} the content in the right format.
2529*/
2530function transformZipOutput(type, content, mimeType) {2531switch(type) {2532case "blob" :2533return utils.newBlob(utils.transformTo("arraybuffer", content), mimeType);2534case "base64" :2535return base64.encode(content);2536default :2537return utils.transformTo(type, content);2538}2539}
2540
2541/**
2542* Concatenate an array of data of the given type.
2543* @param {String} type the type of the data in the given array.
2544* @param {Array} dataArray the array containing the data chunks to concatenate
2545* @return {String|Uint8Array|Buffer} the concatenated data
2546* @throws Error if the asked type is unsupported
2547*/
2548function concat (type, dataArray) {2549var i, index = 0, res = null, totalLength = 0;2550for(i = 0; i < dataArray.length; i++) {2551totalLength += dataArray[i].length;2552}2553switch(type) {2554case "string":2555return dataArray.join("");2556case "array":2557return Array.prototype.concat.apply([], dataArray);2558case "uint8array":2559res = new Uint8Array(totalLength);2560for(i = 0; i < dataArray.length; i++) {2561res.set(dataArray[i], index);2562index += dataArray[i].length;2563}2564return res;2565case "nodebuffer":2566return Buffer.concat(dataArray);2567default:2568throw new Error("concat : unsupported type '" + type + "'");2569}2570}
2571
2572/**
2573* Listen a StreamHelper, accumulate its content and concatenate it into a
2574* complete block.
2575* @param {StreamHelper} helper the helper to use.
2576* @param {Function} updateCallback a callback called on each update. Called
2577* with one arg :
2578* - the metadata linked to the update received.
2579* @return Promise the promise for the accumulation.
2580*/
2581function accumulate(helper, updateCallback) {2582return new external.Promise(function (resolve, reject){2583var dataArray = [];2584var chunkType = helper._internalType,2585resultType = helper._outputType,2586mimeType = helper._mimeType;2587helper
2588.on('data', function (data, meta) {2589dataArray.push(data);2590if(updateCallback) {2591updateCallback(meta);2592}2593})2594.on('error', function(err) {2595dataArray = [];2596reject(err);2597})2598.on('end', function (){2599try {2600var result = transformZipOutput(resultType, concat(chunkType, dataArray), mimeType);2601resolve(result);2602} catch (e) {2603reject(e);2604}2605dataArray = [];2606})2607.resume();2608});2609}
2610
2611/**
2612* An helper to easily use workers outside of JSZip.
2613* @constructor
2614* @param {Worker} worker the worker to wrap
2615* @param {String} outputType the type of data expected by the use
2616* @param {String} mimeType the mime type of the content, if applicable.
2617*/
2618function StreamHelper(worker, outputType, mimeType) {2619var internalType = outputType;2620switch(outputType) {2621case "blob":2622case "arraybuffer":2623internalType = "uint8array";2624break;2625case "base64":2626internalType = "string";2627break;2628}2629
2630try {2631// the type used internally2632this._internalType = internalType;2633// the type used to output results2634this._outputType = outputType;2635// the mime type2636this._mimeType = mimeType;2637utils.checkSupport(internalType);2638this._worker = worker.pipe(new ConvertWorker(internalType));2639// the last workers can be rewired without issues but we need to2640// prevent any updates on previous workers.2641worker.lock();2642} catch(e) {2643this._worker = new GenericWorker("error");2644this._worker.error(e);2645}2646}
2647
2648StreamHelper.prototype = {2649/**2650* Listen a StreamHelper, accumulate its content and concatenate it into a
2651* complete block.
2652* @param {Function} updateCb the update callback.
2653* @return Promise the promise for the accumulation.
2654*/
2655accumulate : function (updateCb) {2656return accumulate(this, updateCb);2657},2658/**2659* Add a listener on an event triggered on a stream.
2660* @param {String} evt the name of the event
2661* @param {Function} fn the listener
2662* @return {StreamHelper} the current helper.
2663*/
2664on : function (evt, fn) {2665var self = this;2666
2667if(evt === "data") {2668this._worker.on(evt, function (chunk) {2669fn.call(self, chunk.data, chunk.meta);2670});2671} else {2672this._worker.on(evt, function () {2673utils.delay(fn, arguments, self);2674});2675}2676return this;2677},2678/**2679* Resume the flow of chunks.
2680* @return {StreamHelper} the current helper.
2681*/
2682resume : function () {2683utils.delay(this._worker.resume, [], this._worker);2684return this;2685},2686/**2687* Pause the flow of chunks.
2688* @return {StreamHelper} the current helper.
2689*/
2690pause : function () {2691this._worker.pause();2692return this;2693},2694/**2695* Return a nodejs stream for this helper.
2696* @param {Function} updateCb the update callback.
2697* @return {NodejsStreamOutputAdapter} the nodejs stream.
2698*/
2699toNodejsStream : function (updateCb) {2700utils.checkSupport("nodestream");2701if (this._outputType !== "nodebuffer") {2702// an object stream containing blob/arraybuffer/uint8array/string2703// is strange and I don't know if it would be useful.2704// I you find this comment and have a good usecase, please open a2705// bug report !2706throw new Error(this._outputType + " is not supported by this method");2707}2708
2709return new NodejsStreamOutputAdapter(this, {2710objectMode : this._outputType !== "nodebuffer"2711}, updateCb);2712}2713};2714
2715
2716module.exports = StreamHelper;2717
2718},{"../base64":1,"../external":6,"../nodejs/NodejsStreamOutputAdapter":13,"../support":30,"../utils":32,"./ConvertWorker":24,"./GenericWorker":28}],30:[function(require,module,exports){2719'use strict';2720
2721exports.base64 = true;2722exports.array = true;2723exports.string = true;2724exports.arraybuffer = typeof ArrayBuffer !== "undefined" && typeof Uint8Array !== "undefined";2725exports.nodebuffer = typeof Buffer !== "undefined";2726// contains true if JSZip can read/generate Uint8Array, false otherwise.
2727exports.uint8array = typeof Uint8Array !== "undefined";2728
2729if (typeof ArrayBuffer === "undefined") {2730exports.blob = false;2731}
2732else {2733var buffer = new ArrayBuffer(0);2734try {2735exports.blob = new Blob([buffer], {2736type: "application/zip"2737}).size === 0;2738}2739catch (e) {2740try {2741var Builder = self.BlobBuilder || self.WebKitBlobBuilder || self.MozBlobBuilder || self.MSBlobBuilder;2742var builder = new Builder();2743builder.append(buffer);2744exports.blob = builder.getBlob('application/zip').size === 0;2745}2746catch (e) {2747exports.blob = false;2748}2749}2750}
2751
2752try {2753exports.nodestream = !!require('readable-stream').Readable;2754} catch(e) {2755exports.nodestream = false;2756}
2757
2758},{"readable-stream":16}],31:[function(require,module,exports){2759'use strict';2760
2761var utils = require('./utils');2762var support = require('./support');2763var nodejsUtils = require('./nodejsUtils');2764var GenericWorker = require('./stream/GenericWorker');2765
2766/**
2767* The following functions come from pako, from pako/lib/utils/strings
2768* released under the MIT license, see pako https://github.com/nodeca/pako/
2769*/
2770
2771// Table with utf8 lengths (calculated by first byte of sequence)
2772// Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS,
2773// because max possible codepoint is 0x10ffff
2774var _utf8len = new Array(256);2775for (var i=0; i<256; i++) {2776_utf8len[i] = (i >= 252 ? 6 : i >= 248 ? 5 : i >= 240 ? 4 : i >= 224 ? 3 : i >= 192 ? 2 : 1);2777}
2778_utf8len[254]=_utf8len[254]=1; // Invalid sequence start2779
2780// convert string to array (typed, when possible)
2781var string2buf = function (str) {2782var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0;2783
2784// count binary size2785for (m_pos = 0; m_pos < str_len; m_pos++) {2786c = str.charCodeAt(m_pos);2787if ((c & 0xfc00) === 0xd800 && (m_pos+1 < str_len)) {2788c2 = str.charCodeAt(m_pos+1);2789if ((c2 & 0xfc00) === 0xdc00) {2790c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);2791m_pos++;2792}2793}2794buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4;2795}2796
2797// allocate buffer2798if (support.uint8array) {2799buf = new Uint8Array(buf_len);2800} else {2801buf = new Array(buf_len);2802}2803
2804// convert2805for (i=0, m_pos = 0; i < buf_len; m_pos++) {2806c = str.charCodeAt(m_pos);2807if ((c & 0xfc00) === 0xd800 && (m_pos+1 < str_len)) {2808c2 = str.charCodeAt(m_pos+1);2809if ((c2 & 0xfc00) === 0xdc00) {2810c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);2811m_pos++;2812}2813}2814if (c < 0x80) {2815/* one byte */2816buf[i++] = c;2817} else if (c < 0x800) {2818/* two bytes */2819buf[i++] = 0xC0 | (c >>> 6);2820buf[i++] = 0x80 | (c & 0x3f);2821} else if (c < 0x10000) {2822/* three bytes */2823buf[i++] = 0xE0 | (c >>> 12);2824buf[i++] = 0x80 | (c >>> 6 & 0x3f);2825buf[i++] = 0x80 | (c & 0x3f);2826} else {2827/* four bytes */2828buf[i++] = 0xf0 | (c >>> 18);2829buf[i++] = 0x80 | (c >>> 12 & 0x3f);2830buf[i++] = 0x80 | (c >>> 6 & 0x3f);2831buf[i++] = 0x80 | (c & 0x3f);2832}2833}2834
2835return buf;2836};2837
2838// Calculate max possible position in utf8 buffer,
2839// that will not break sequence. If that's not possible
2840// - (very small limits) return max size as is.
2841//
2842// buf[] - utf8 bytes array
2843// max - length limit (mandatory);
2844var utf8border = function(buf, max) {2845var pos;2846
2847max = max || buf.length;2848if (max > buf.length) { max = buf.length; }2849
2850// go back from last position, until start of sequence found2851pos = max-1;2852while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; }2853
2854// Fuckup - very small and broken sequence,2855// return max, because we should return something anyway.2856if (pos < 0) { return max; }2857
2858// If we came to start of buffer - that means vuffer is too small,2859// return max too.2860if (pos === 0) { return max; }2861
2862return (pos + _utf8len[buf[pos]] > max) ? pos : max;2863};2864
2865// convert array to string
2866var buf2string = function (buf) {2867var str, i, out, c, c_len;2868var len = buf.length;2869
2870// Reserve max possible length (2 words per char)2871// NB: by unknown reasons, Array is significantly faster for2872// String.fromCharCode.apply than Uint16Array.2873var utf16buf = new Array(len*2);2874
2875for (out=0, i=0; i<len;) {2876c = buf[i++];2877// quick process ascii2878if (c < 0x80) { utf16buf[out++] = c; continue; }2879
2880c_len = _utf8len[c];2881// skip 5 & 6 byte codes2882if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len-1; continue; }2883
2884// apply mask on first byte2885c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07;2886// join the rest2887while (c_len > 1 && i < len) {2888c = (c << 6) | (buf[i++] & 0x3f);2889c_len--;2890}2891
2892// terminated by end of string?2893if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; }2894
2895if (c < 0x10000) {2896utf16buf[out++] = c;2897} else {2898c -= 0x10000;2899utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff);2900utf16buf[out++] = 0xdc00 | (c & 0x3ff);2901}2902}2903
2904// shrinkBuf(utf16buf, out)2905if (utf16buf.length !== out) {2906if(utf16buf.subarray) {2907utf16buf = utf16buf.subarray(0, out);2908} else {2909utf16buf.length = out;2910}2911}2912
2913// return String.fromCharCode.apply(null, utf16buf);2914return utils.applyFromCharCode(utf16buf);2915};2916
2917
2918// That's all for the pako functions.
2919
2920
2921/**
2922* Transform a javascript string into an array (typed if possible) of bytes,
2923* UTF-8 encoded.
2924* @param {String} str the string to encode
2925* @return {Array|Uint8Array|Buffer} the UTF-8 encoded string.
2926*/
2927exports.utf8encode = function utf8encode(str) {2928if (support.nodebuffer) {2929return nodejsUtils.newBufferFrom(str, "utf-8");2930}2931
2932return string2buf(str);2933};2934
2935
2936/**
2937* Transform a bytes array (or a representation) representing an UTF-8 encoded
2938* string into a javascript string.
2939* @param {Array|Uint8Array|Buffer} buf the data de decode
2940* @return {String} the decoded string.
2941*/
2942exports.utf8decode = function utf8decode(buf) {2943if (support.nodebuffer) {2944return utils.transformTo("nodebuffer", buf).toString("utf-8");2945}2946
2947buf = utils.transformTo(support.uint8array ? "uint8array" : "array", buf);2948
2949return buf2string(buf);2950};2951
2952/**
2953* A worker to decode utf8 encoded binary chunks into string chunks.
2954* @constructor
2955*/
2956function Utf8DecodeWorker() {2957GenericWorker.call(this, "utf-8 decode");2958// the last bytes if a chunk didn't end with a complete codepoint.2959this.leftOver = null;2960}
2961utils.inherits(Utf8DecodeWorker, GenericWorker);2962
2963/**
2964* @see GenericWorker.processChunk
2965*/
2966Utf8DecodeWorker.prototype.processChunk = function (chunk) {2967
2968var data = utils.transformTo(support.uint8array ? "uint8array" : "array", chunk.data);2969
2970// 1st step, re-use what's left of the previous chunk2971if (this.leftOver && this.leftOver.length) {2972if(support.uint8array) {2973var previousData = data;2974data = new Uint8Array(previousData.length + this.leftOver.length);2975data.set(this.leftOver, 0);2976data.set(previousData, this.leftOver.length);2977} else {2978data = this.leftOver.concat(data);2979}2980this.leftOver = null;2981}2982
2983var nextBoundary = utf8border(data);2984var usableData = data;2985if (nextBoundary !== data.length) {2986if (support.uint8array) {2987usableData = data.subarray(0, nextBoundary);2988this.leftOver = data.subarray(nextBoundary, data.length);2989} else {2990usableData = data.slice(0, nextBoundary);2991this.leftOver = data.slice(nextBoundary, data.length);2992}2993}2994
2995this.push({2996data : exports.utf8decode(usableData),2997meta : chunk.meta2998});2999};3000
3001/**
3002* @see GenericWorker.flush
3003*/
3004Utf8DecodeWorker.prototype.flush = function () {3005if(this.leftOver && this.leftOver.length) {3006this.push({3007data : exports.utf8decode(this.leftOver),3008meta : {}3009});3010this.leftOver = null;3011}3012};3013exports.Utf8DecodeWorker = Utf8DecodeWorker;3014
3015/**
3016* A worker to endcode string chunks into utf8 encoded binary chunks.
3017* @constructor
3018*/
3019function Utf8EncodeWorker() {3020GenericWorker.call(this, "utf-8 encode");3021}
3022utils.inherits(Utf8EncodeWorker, GenericWorker);3023
3024/**
3025* @see GenericWorker.processChunk
3026*/
3027Utf8EncodeWorker.prototype.processChunk = function (chunk) {3028this.push({3029data : exports.utf8encode(chunk.data),3030meta : chunk.meta3031});3032};3033exports.Utf8EncodeWorker = Utf8EncodeWorker;3034
3035},{"./nodejsUtils":14,"./stream/GenericWorker":28,"./support":30,"./utils":32}],32:[function(require,module,exports){3036'use strict';3037
3038var support = require('./support');3039var base64 = require('./base64');3040var nodejsUtils = require('./nodejsUtils');3041var setImmediate = require('set-immediate-shim');3042var external = require("./external");3043
3044
3045/**
3046* Convert a string that pass as a "binary string": it should represent a byte
3047* array but may have > 255 char codes. Be sure to take only the first byte
3048* and returns the byte array.
3049* @param {String} str the string to transform.
3050* @return {Array|Uint8Array} the string in a binary format.
3051*/
3052function string2binary(str) {3053var result = null;3054if (support.uint8array) {3055result = new Uint8Array(str.length);3056} else {3057result = new Array(str.length);3058}3059return stringToArrayLike(str, result);3060}
3061
3062/**
3063* Create a new blob with the given content and the given type.
3064* @param {String|ArrayBuffer} part the content to put in the blob. DO NOT use
3065* an Uint8Array because the stock browser of android 4 won't accept it (it
3066* will be silently converted to a string, "[object Uint8Array]").
3067*
3068* Use only ONE part to build the blob to avoid a memory leak in IE11 / Edge:
3069* when a large amount of Array is used to create the Blob, the amount of
3070* memory consumed is nearly 100 times the original data amount.
3071*
3072* @param {String} type the mime type of the blob.
3073* @return {Blob} the created blob.
3074*/
3075exports.newBlob = function(part, type) {3076exports.checkSupport("blob");3077
3078try {3079// Blob constructor3080return new Blob([part], {3081type: type3082});3083}3084catch (e) {3085
3086try {3087// deprecated, browser only, old way3088var Builder = self.BlobBuilder || self.WebKitBlobBuilder || self.MozBlobBuilder || self.MSBlobBuilder;3089var builder = new Builder();3090builder.append(part);3091return builder.getBlob(type);3092}3093catch (e) {3094
3095// well, fuck ?!3096throw new Error("Bug : can't construct the Blob.");3097}3098}3099
3100
3101};3102/**
3103* The identity function.
3104* @param {Object} input the input.
3105* @return {Object} the same input.
3106*/
3107function identity(input) {3108return input;3109}
3110
3111/**
3112* Fill in an array with a string.
3113* @param {String} str the string to use.
3114* @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to fill in (will be mutated).
3115* @return {Array|ArrayBuffer|Uint8Array|Buffer} the updated array.
3116*/
3117function stringToArrayLike(str, array) {3118for (var i = 0; i < str.length; ++i) {3119array[i] = str.charCodeAt(i) & 0xFF;3120}3121return array;3122}
3123
3124/**
3125* An helper for the function arrayLikeToString.
3126* This contains static information and functions that
3127* can be optimized by the browser JIT compiler.
3128*/
3129var arrayToStringHelper = {3130/**3131* Transform an array of int into a string, chunk by chunk.
3132* See the performances notes on arrayLikeToString.
3133* @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to transform.
3134* @param {String} type the type of the array.
3135* @param {Integer} chunk the chunk size.
3136* @return {String} the resulting string.
3137* @throws Error if the chunk is too big for the stack.
3138*/
3139stringifyByChunk: function(array, type, chunk) {3140var result = [], k = 0, len = array.length;3141// shortcut3142if (len <= chunk) {3143return String.fromCharCode.apply(null, array);3144}3145while (k < len) {3146if (type === "array" || type === "nodebuffer") {3147result.push(String.fromCharCode.apply(null, array.slice(k, Math.min(k + chunk, len))));3148}3149else {3150result.push(String.fromCharCode.apply(null, array.subarray(k, Math.min(k + chunk, len))));3151}3152k += chunk;3153}3154return result.join("");3155},3156/**3157* Call String.fromCharCode on every item in the array.
3158* This is the naive implementation, which generate A LOT of intermediate string.
3159* This should be used when everything else fail.
3160* @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to transform.
3161* @return {String} the result.
3162*/
3163stringifyByChar: function(array){3164var resultStr = "";3165for(var i = 0; i < array.length; i++) {3166resultStr += String.fromCharCode(array[i]);3167}3168return resultStr;3169},3170applyCanBeUsed : {3171/**3172* true if the browser accepts to use String.fromCharCode on Uint8Array
3173*/
3174uint8array : (function () {3175try {3176return support.uint8array && String.fromCharCode.apply(null, new Uint8Array(1)).length === 1;3177} catch (e) {3178return false;3179}3180})(),3181/**3182* true if the browser accepts to use String.fromCharCode on nodejs Buffer.
3183*/
3184nodebuffer : (function () {3185try {3186return support.nodebuffer && String.fromCharCode.apply(null, nodejsUtils.allocBuffer(1)).length === 1;3187} catch (e) {3188return false;3189}3190})()3191}3192};3193
3194/**
3195* Transform an array-like object to a string.
3196* @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to transform.
3197* @return {String} the result.
3198*/
3199function arrayLikeToString(array) {3200// Performances notes :3201// --------------------3202// String.fromCharCode.apply(null, array) is the fastest, see3203// see http://jsperf.com/converting-a-uint8array-to-a-string/23204// but the stack is limited (and we can get huge arrays !).3205//3206// result += String.fromCharCode(array[i]); generate too many strings !3207//3208// This code is inspired by http://jsperf.com/arraybuffer-to-string-apply-performance/23209// TODO : we now have workers that split the work. Do we still need that ?3210var chunk = 65536,3211type = exports.getTypeOf(array),3212canUseApply = true;3213if (type === "uint8array") {3214canUseApply = arrayToStringHelper.applyCanBeUsed.uint8array;3215} else if (type === "nodebuffer") {3216canUseApply = arrayToStringHelper.applyCanBeUsed.nodebuffer;3217}3218
3219if (canUseApply) {3220while (chunk > 1) {3221try {3222return arrayToStringHelper.stringifyByChunk(array, type, chunk);3223} catch (e) {3224chunk = Math.floor(chunk / 2);3225}3226}3227}3228
3229// no apply or chunk error : slow and painful algorithm3230// default browser on android 4.*3231return arrayToStringHelper.stringifyByChar(array);3232}
3233
3234exports.applyFromCharCode = arrayLikeToString;3235
3236
3237/**
3238* Copy the data from an array-like to an other array-like.
3239* @param {Array|ArrayBuffer|Uint8Array|Buffer} arrayFrom the origin array.
3240* @param {Array|ArrayBuffer|Uint8Array|Buffer} arrayTo the destination array which will be mutated.
3241* @return {Array|ArrayBuffer|Uint8Array|Buffer} the updated destination array.
3242*/
3243function arrayLikeToArrayLike(arrayFrom, arrayTo) {3244for (var i = 0; i < arrayFrom.length; i++) {3245arrayTo[i] = arrayFrom[i];3246}3247return arrayTo;3248}
3249
3250// a matrix containing functions to transform everything into everything.
3251var transform = {};3252
3253// string to ?
3254transform["string"] = {3255"string": identity,3256"array": function(input) {3257return stringToArrayLike(input, new Array(input.length));3258},3259"arraybuffer": function(input) {3260return transform["string"]["uint8array"](input).buffer;3261},3262"uint8array": function(input) {3263return stringToArrayLike(input, new Uint8Array(input.length));3264},3265"nodebuffer": function(input) {3266return stringToArrayLike(input, nodejsUtils.allocBuffer(input.length));3267}3268};3269
3270// array to ?
3271transform["array"] = {3272"string": arrayLikeToString,3273"array": identity,3274"arraybuffer": function(input) {3275return (new Uint8Array(input)).buffer;3276},3277"uint8array": function(input) {3278return new Uint8Array(input);3279},3280"nodebuffer": function(input) {3281return nodejsUtils.newBufferFrom(input);3282}3283};3284
3285// arraybuffer to ?
3286transform["arraybuffer"] = {3287"string": function(input) {3288return arrayLikeToString(new Uint8Array(input));3289},3290"array": function(input) {3291return arrayLikeToArrayLike(new Uint8Array(input), new Array(input.byteLength));3292},3293"arraybuffer": identity,3294"uint8array": function(input) {3295return new Uint8Array(input);3296},3297"nodebuffer": function(input) {3298return nodejsUtils.newBufferFrom(new Uint8Array(input));3299}3300};3301
3302// uint8array to ?
3303transform["uint8array"] = {3304"string": arrayLikeToString,3305"array": function(input) {3306return arrayLikeToArrayLike(input, new Array(input.length));3307},3308"arraybuffer": function(input) {3309return input.buffer;3310},3311"uint8array": identity,3312"nodebuffer": function(input) {3313return nodejsUtils.newBufferFrom(input);3314}3315};3316
3317// nodebuffer to ?
3318transform["nodebuffer"] = {3319"string": arrayLikeToString,3320"array": function(input) {3321return arrayLikeToArrayLike(input, new Array(input.length));3322},3323"arraybuffer": function(input) {3324return transform["nodebuffer"]["uint8array"](input).buffer;3325},3326"uint8array": function(input) {3327return arrayLikeToArrayLike(input, new Uint8Array(input.length));3328},3329"nodebuffer": identity3330};3331
3332/**
3333* Transform an input into any type.
3334* The supported output type are : string, array, uint8array, arraybuffer, nodebuffer.
3335* If no output type is specified, the unmodified input will be returned.
3336* @param {String} outputType the output type.
3337* @param {String|Array|ArrayBuffer|Uint8Array|Buffer} input the input to convert.
3338* @throws {Error} an Error if the browser doesn't support the requested output type.
3339*/
3340exports.transformTo = function(outputType, input) {3341if (!input) {3342// undefined, null, etc3343// an empty string won't harm.3344input = "";3345}3346if (!outputType) {3347return input;3348}3349exports.checkSupport(outputType);3350var inputType = exports.getTypeOf(input);3351var result = transform[inputType][outputType](input);3352return result;3353};3354
3355/**
3356* Return the type of the input.
3357* The type will be in a format valid for JSZip.utils.transformTo : string, array, uint8array, arraybuffer.
3358* @param {Object} input the input to identify.
3359* @return {String} the (lowercase) type of the input.
3360*/
3361exports.getTypeOf = function(input) {3362if (typeof input === "string") {3363return "string";3364}3365if (Object.prototype.toString.call(input) === "[object Array]") {3366return "array";3367}3368if (support.nodebuffer && nodejsUtils.isBuffer(input)) {3369return "nodebuffer";3370}3371if (support.uint8array && input instanceof Uint8Array) {3372return "uint8array";3373}3374if (support.arraybuffer && input instanceof ArrayBuffer) {3375return "arraybuffer";3376}3377};3378
3379/**
3380* Throw an exception if the type is not supported.
3381* @param {String} type the type to check.
3382* @throws {Error} an Error if the browser doesn't support the requested type.
3383*/
3384exports.checkSupport = function(type) {3385var supported = support[type.toLowerCase()];3386if (!supported) {3387throw new Error(type + " is not supported by this platform");3388}3389};3390
3391exports.MAX_VALUE_16BITS = 65535;3392exports.MAX_VALUE_32BITS = -1; // well, "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF" is parsed as -13393
3394/**
3395* Prettify a string read as binary.
3396* @param {string} str the string to prettify.
3397* @return {string} a pretty string.
3398*/
3399exports.pretty = function(str) {3400var res = '',3401code, i;3402for (i = 0; i < (str || "").length; i++) {3403code = str.charCodeAt(i);3404res += '\\x' + (code < 16 ? "0" : "") + code.toString(16).toUpperCase();3405}3406return res;3407};3408
3409/**
3410* Defer the call of a function.
3411* @param {Function} callback the function to call asynchronously.
3412* @param {Array} args the arguments to give to the callback.
3413*/
3414exports.delay = function(callback, args, self) {3415setImmediate(function () {3416callback.apply(self || null, args || []);3417});3418};3419
3420/**
3421* Extends a prototype with an other, without calling a constructor with
3422* side effects. Inspired by nodejs' `utils.inherits`
3423* @param {Function} ctor the constructor to augment
3424* @param {Function} superCtor the parent constructor to use
3425*/
3426exports.inherits = function (ctor, superCtor) {3427var Obj = function() {};3428Obj.prototype = superCtor.prototype;3429ctor.prototype = new Obj();3430};3431
3432/**
3433* Merge the objects passed as parameters into a new one.
3434* @private
3435* @param {...Object} var_args All objects to merge.
3436* @return {Object} a new object with the data of the others.
3437*/
3438exports.extend = function() {3439var result = {}, i, attr;3440for (i = 0; i < arguments.length; i++) { // arguments is not enumerable in some browsers3441for (attr in arguments[i]) {3442if (arguments[i].hasOwnProperty(attr) && typeof result[attr] === "undefined") {3443result[attr] = arguments[i][attr];3444}3445}3446}3447return result;3448};3449
3450/**
3451* Transform arbitrary content into a Promise.
3452* @param {String} name a name for the content being processed.
3453* @param {Object} inputData the content to process.
3454* @param {Boolean} isBinary true if the content is not an unicode string
3455* @param {Boolean} isOptimizedBinaryString true if the string content only has one byte per character.
3456* @param {Boolean} isBase64 true if the string content is encoded with base64.
3457* @return {Promise} a promise in a format usable by JSZip.
3458*/
3459exports.prepareContent = function(name, inputData, isBinary, isOptimizedBinaryString, isBase64) {3460
3461// if inputData is already a promise, this flatten it.3462var promise = external.Promise.resolve(inputData).then(function(data) {3463
3464
3465var isBlob = support.blob && (data instanceof Blob || ['[object File]', '[object Blob]'].indexOf(Object.prototype.toString.call(data)) !== -1);3466
3467if (isBlob && typeof FileReader !== "undefined") {3468return new external.Promise(function (resolve, reject) {3469var reader = new FileReader();3470
3471reader.onload = function(e) {3472resolve(e.target.result);3473};3474reader.onerror = function(e) {3475reject(e.target.error);3476};3477reader.readAsArrayBuffer(data);3478});3479} else {3480return data;3481}3482});3483
3484return promise.then(function(data) {3485var dataType = exports.getTypeOf(data);3486
3487if (!dataType) {3488return external.Promise.reject(3489new Error("Can't read the data of '" + name + "'. Is it " +3490"in a supported JavaScript type (String, Blob, ArrayBuffer, etc) ?")3491);3492}3493// special case : it's way easier to work with Uint8Array than with ArrayBuffer3494if (dataType === "arraybuffer") {3495data = exports.transformTo("uint8array", data);3496} else if (dataType === "string") {3497if (isBase64) {3498data = base64.decode(data);3499}3500else if (isBinary) {3501// optimizedBinaryString === true means that the file has already been filtered with a 0xFF mask3502if (isOptimizedBinaryString !== true) {3503// this is a string, not in a base64 format.3504// Be sure that this is a correct "binary string"3505data = string2binary(data);3506}3507}3508}3509return data;3510});3511};3512
3513},{"./base64":1,"./external":6,"./nodejsUtils":14,"./support":30,"set-immediate-shim":54}],33:[function(require,module,exports){3514'use strict';3515var readerFor = require('./reader/readerFor');3516var utils = require('./utils');3517var sig = require('./signature');3518var ZipEntry = require('./zipEntry');3519var utf8 = require('./utf8');3520var support = require('./support');3521// class ZipEntries {{{
3522/**
3523* All the entries in the zip file.
3524* @constructor
3525* @param {Object} loadOptions Options for loading the stream.
3526*/
3527function ZipEntries(loadOptions) {3528this.files = [];3529this.loadOptions = loadOptions;3530}
3531ZipEntries.prototype = {3532/**3533* Check that the reader is on the specified signature.
3534* @param {string} expectedSignature the expected signature.
3535* @throws {Error} if it is an other signature.
3536*/
3537checkSignature: function(expectedSignature) {3538if (!this.reader.readAndCheckSignature(expectedSignature)) {3539this.reader.index -= 4;3540var signature = this.reader.readString(4);3541throw new Error("Corrupted zip or bug: unexpected signature " + "(" + utils.pretty(signature) + ", expected " + utils.pretty(expectedSignature) + ")");3542}3543},3544/**3545* Check if the given signature is at the given index.
3546* @param {number} askedIndex the index to check.
3547* @param {string} expectedSignature the signature to expect.
3548* @return {boolean} true if the signature is here, false otherwise.
3549*/
3550isSignature: function(askedIndex, expectedSignature) {3551var currentIndex = this.reader.index;3552this.reader.setIndex(askedIndex);3553var signature = this.reader.readString(4);3554var result = signature === expectedSignature;3555this.reader.setIndex(currentIndex);3556return result;3557},3558/**3559* Read the end of the central directory.
3560*/
3561readBlockEndOfCentral: function() {3562this.diskNumber = this.reader.readInt(2);3563this.diskWithCentralDirStart = this.reader.readInt(2);3564this.centralDirRecordsOnThisDisk = this.reader.readInt(2);3565this.centralDirRecords = this.reader.readInt(2);3566this.centralDirSize = this.reader.readInt(4);3567this.centralDirOffset = this.reader.readInt(4);3568
3569this.zipCommentLength = this.reader.readInt(2);3570// warning : the encoding depends of the system locale3571// On a linux machine with LANG=en_US.utf8, this field is utf8 encoded.3572// On a windows machine, this field is encoded with the localized windows code page.3573var zipComment = this.reader.readData(this.zipCommentLength);3574var decodeParamType = support.uint8array ? "uint8array" : "array";3575// To get consistent behavior with the generation part, we will assume that3576// this is utf8 encoded unless specified otherwise.3577var decodeContent = utils.transformTo(decodeParamType, zipComment);3578this.zipComment = this.loadOptions.decodeFileName(decodeContent);3579},3580/**3581* Read the end of the Zip 64 central directory.
3582* Not merged with the method readEndOfCentral :
3583* The end of central can coexist with its Zip64 brother,
3584* I don't want to read the wrong number of bytes !
3585*/
3586readBlockZip64EndOfCentral: function() {3587this.zip64EndOfCentralSize = this.reader.readInt(8);3588this.reader.skip(4);3589// this.versionMadeBy = this.reader.readString(2);3590// this.versionNeeded = this.reader.readInt(2);3591this.diskNumber = this.reader.readInt(4);3592this.diskWithCentralDirStart = this.reader.readInt(4);3593this.centralDirRecordsOnThisDisk = this.reader.readInt(8);3594this.centralDirRecords = this.reader.readInt(8);3595this.centralDirSize = this.reader.readInt(8);3596this.centralDirOffset = this.reader.readInt(8);3597
3598this.zip64ExtensibleData = {};3599var extraDataSize = this.zip64EndOfCentralSize - 44,3600index = 0,3601extraFieldId,3602extraFieldLength,3603extraFieldValue;3604while (index < extraDataSize) {3605extraFieldId = this.reader.readInt(2);3606extraFieldLength = this.reader.readInt(4);3607extraFieldValue = this.reader.readData(extraFieldLength);3608this.zip64ExtensibleData[extraFieldId] = {3609id: extraFieldId,3610length: extraFieldLength,3611value: extraFieldValue3612};3613}3614},3615/**3616* Read the end of the Zip 64 central directory locator.
3617*/
3618readBlockZip64EndOfCentralLocator: function() {3619this.diskWithZip64CentralDirStart = this.reader.readInt(4);3620this.relativeOffsetEndOfZip64CentralDir = this.reader.readInt(8);3621this.disksCount = this.reader.readInt(4);3622if (this.disksCount > 1) {3623throw new Error("Multi-volumes zip are not supported");3624}3625},3626/**3627* Read the local files, based on the offset read in the central part.
3628*/
3629readLocalFiles: function() {3630var i, file;3631for (i = 0; i < this.files.length; i++) {3632file = this.files[i];3633this.reader.setIndex(file.localHeaderOffset);3634this.checkSignature(sig.LOCAL_FILE_HEADER);3635file.readLocalPart(this.reader);3636file.handleUTF8();3637file.processAttributes();3638}3639},3640/**3641* Read the central directory.
3642*/
3643readCentralDir: function() {3644var file;3645
3646this.reader.setIndex(this.centralDirOffset);3647while (this.reader.readAndCheckSignature(sig.CENTRAL_FILE_HEADER)) {3648file = new ZipEntry({3649zip64: this.zip643650}, this.loadOptions);3651file.readCentralPart(this.reader);3652this.files.push(file);3653}3654
3655if (this.centralDirRecords !== this.files.length) {3656if (this.centralDirRecords !== 0 && this.files.length === 0) {3657// We expected some records but couldn't find ANY.3658// This is really suspicious, as if something went wrong.3659throw new Error("Corrupted zip or bug: expected " + this.centralDirRecords + " records in central dir, got " + this.files.length);3660} else {3661// We found some records but not all.3662// Something is wrong but we got something for the user: no error here.3663// console.warn("expected", this.centralDirRecords, "records in central dir, got", this.files.length);3664}3665}3666},3667/**3668* Read the end of central directory.
3669*/
3670readEndOfCentral: function() {3671var offset = this.reader.lastIndexOfSignature(sig.CENTRAL_DIRECTORY_END);3672if (offset < 0) {3673// Check if the content is a truncated zip or complete garbage.3674// A "LOCAL_FILE_HEADER" is not required at the beginning (auto3675// extractible zip for example) but it can give a good hint.3676// If an ajax request was used without responseType, we will also3677// get unreadable data.3678var isGarbage = !this.isSignature(0, sig.LOCAL_FILE_HEADER);3679
3680if (isGarbage) {3681throw new Error("Can't find end of central directory : is this a zip file ? " +3682"If it is, see https://stuk.github.io/jszip/documentation/howto/read_zip.html");3683} else {3684throw new Error("Corrupted zip: can't find end of central directory");3685}3686
3687}3688this.reader.setIndex(offset);3689var endOfCentralDirOffset = offset;3690this.checkSignature(sig.CENTRAL_DIRECTORY_END);3691this.readBlockEndOfCentral();3692
3693
3694/* extract from the zip spec :36954) If one of the fields in the end of central directory
3696record is too small to hold required data, the field
3697should be set to -1 (0xFFFF or 0xFFFFFFFF) and the
3698ZIP64 format record should be created.
36995) The end of central directory record and the
3700Zip64 end of central directory locator record must
3701reside on the same disk when splitting or spanning
3702an archive.
3703*/
3704if (this.diskNumber === utils.MAX_VALUE_16BITS || this.diskWithCentralDirStart === utils.MAX_VALUE_16BITS || this.centralDirRecordsOnThisDisk === utils.MAX_VALUE_16BITS || this.centralDirRecords === utils.MAX_VALUE_16BITS || this.centralDirSize === utils.MAX_VALUE_32BITS || this.centralDirOffset === utils.MAX_VALUE_32BITS) {3705this.zip64 = true;3706
3707/*3708Warning : the zip64 extension is supported, but ONLY if the 64bits integer read from
3709the zip file can fit into a 32bits integer. This cannot be solved : JavaScript represents
3710all numbers as 64-bit double precision IEEE 754 floating point numbers.
3711So, we have 53bits for integers and bitwise operations treat everything as 32bits.
3712see https://developer.mozilla.org/en-US/docs/JavaScript/Reference/Operators/Bitwise_Operators
3713and http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-262.pdf section 8.5
3714*/
3715
3716// should look for a zip64 EOCD locator3717offset = this.reader.lastIndexOfSignature(sig.ZIP64_CENTRAL_DIRECTORY_LOCATOR);3718if (offset < 0) {3719throw new Error("Corrupted zip: can't find the ZIP64 end of central directory locator");3720}3721this.reader.setIndex(offset);3722this.checkSignature(sig.ZIP64_CENTRAL_DIRECTORY_LOCATOR);3723this.readBlockZip64EndOfCentralLocator();3724
3725// now the zip64 EOCD record3726if (!this.isSignature(this.relativeOffsetEndOfZip64CentralDir, sig.ZIP64_CENTRAL_DIRECTORY_END)) {3727// console.warn("ZIP64 end of central directory not where expected.");3728this.relativeOffsetEndOfZip64CentralDir = this.reader.lastIndexOfSignature(sig.ZIP64_CENTRAL_DIRECTORY_END);3729if (this.relativeOffsetEndOfZip64CentralDir < 0) {3730throw new Error("Corrupted zip: can't find the ZIP64 end of central directory");3731}3732}3733this.reader.setIndex(this.relativeOffsetEndOfZip64CentralDir);3734this.checkSignature(sig.ZIP64_CENTRAL_DIRECTORY_END);3735this.readBlockZip64EndOfCentral();3736}3737
3738var expectedEndOfCentralDirOffset = this.centralDirOffset + this.centralDirSize;3739if (this.zip64) {3740expectedEndOfCentralDirOffset += 20; // end of central dir 64 locator3741expectedEndOfCentralDirOffset += 12 /* should not include the leading 12 bytes */ + this.zip64EndOfCentralSize;3742}3743
3744var extraBytes = endOfCentralDirOffset - expectedEndOfCentralDirOffset;3745
3746if (extraBytes > 0) {3747// console.warn(extraBytes, "extra bytes at beginning or within zipfile");3748if (this.isSignature(endOfCentralDirOffset, sig.CENTRAL_FILE_HEADER)) {3749// The offsets seem wrong, but we have something at the specified offset.3750// So… we keep it.3751} else {3752// the offset is wrong, update the "zero" of the reader3753// this happens if data has been prepended (crx files for example)3754this.reader.zero = extraBytes;3755}3756} else if (extraBytes < 0) {3757throw new Error("Corrupted zip: missing " + Math.abs(extraBytes) + " bytes.");3758}3759},3760prepareReader: function(data) {3761this.reader = readerFor(data);3762},3763/**3764* Read a zip file and create ZipEntries.
3765* @param {String|ArrayBuffer|Uint8Array|Buffer} data the binary string representing a zip file.
3766*/
3767load: function(data) {3768this.prepareReader(data);3769this.readEndOfCentral();3770this.readCentralDir();3771this.readLocalFiles();3772}3773};3774// }}} end of ZipEntries
3775module.exports = ZipEntries;3776
3777},{"./reader/readerFor":22,"./signature":23,"./support":30,"./utf8":31,"./utils":32,"./zipEntry":34}],34:[function(require,module,exports){3778'use strict';3779var readerFor = require('./reader/readerFor');3780var utils = require('./utils');3781var CompressedObject = require('./compressedObject');3782var crc32fn = require('./crc32');3783var utf8 = require('./utf8');3784var compressions = require('./compressions');3785var support = require('./support');3786
3787var MADE_BY_DOS = 0x00;3788var MADE_BY_UNIX = 0x03;3789
3790/**
3791* Find a compression registered in JSZip.
3792* @param {string} compressionMethod the method magic to find.
3793* @return {Object|null} the JSZip compression object, null if none found.
3794*/
3795var findCompression = function(compressionMethod) {3796for (var method in compressions) {3797if (!compressions.hasOwnProperty(method)) {3798continue;3799}3800if (compressions[method].magic === compressionMethod) {3801return compressions[method];3802}3803}3804return null;3805};3806
3807// class ZipEntry {{{
3808/**
3809* An entry in the zip file.
3810* @constructor
3811* @param {Object} options Options of the current file.
3812* @param {Object} loadOptions Options for loading the stream.
3813*/
3814function ZipEntry(options, loadOptions) {3815this.options = options;3816this.loadOptions = loadOptions;3817}
3818ZipEntry.prototype = {3819/**3820* say if the file is encrypted.
3821* @return {boolean} true if the file is encrypted, false otherwise.
3822*/
3823isEncrypted: function() {3824// bit 1 is set3825return (this.bitFlag & 0x0001) === 0x0001;3826},3827/**3828* say if the file has utf-8 filename/comment.
3829* @return {boolean} true if the filename/comment is in utf-8, false otherwise.
3830*/
3831useUTF8: function() {3832// bit 11 is set3833return (this.bitFlag & 0x0800) === 0x0800;3834},3835/**3836* Read the local part of a zip file and add the info in this object.
3837* @param {DataReader} reader the reader to use.
3838*/
3839readLocalPart: function(reader) {3840var compression, localExtraFieldsLength;3841
3842// we already know everything from the central dir !3843// If the central dir data are false, we are doomed.3844// On the bright side, the local part is scary : zip64, data descriptors, both, etc.3845// The less data we get here, the more reliable this should be.3846// Let's skip the whole header and dash to the data !3847reader.skip(22);3848// in some zip created on windows, the filename stored in the central dir contains \ instead of /.3849// Strangely, the filename here is OK.3850// I would love to treat these zip files as corrupted (see http://www.info-zip.org/FAQ.html#backslashes3851// or APPNOTE#4.4.17.1, "All slashes MUST be forward slashes '/'") but there are a lot of bad zip generators...3852// Search "unzip mismatching "local" filename continuing with "central" filename version" on3853// the internet.3854//3855// I think I see the logic here : the central directory is used to display3856// content and the local directory is used to extract the files. Mixing / and \3857// may be used to display \ to windows users and use / when extracting the files.3858// Unfortunately, this lead also to some issues : http://seclists.org/fulldisclosure/2009/Sep/3943859this.fileNameLength = reader.readInt(2);3860localExtraFieldsLength = reader.readInt(2); // can't be sure this will be the same as the central dir3861// the fileName is stored as binary data, the handleUTF8 method will take care of the encoding.3862this.fileName = reader.readData(this.fileNameLength);3863reader.skip(localExtraFieldsLength);3864
3865if (this.compressedSize === -1 || this.uncompressedSize === -1) {3866throw new Error("Bug or corrupted zip : didn't get enough information from the central directory " + "(compressedSize === -1 || uncompressedSize === -1)");3867}3868
3869compression = findCompression(this.compressionMethod);3870if (compression === null) { // no compression found3871throw new Error("Corrupted zip : compression " + utils.pretty(this.compressionMethod) + " unknown (inner file : " + utils.transformTo("string", this.fileName) + ")");3872}3873this.decompressed = new CompressedObject(this.compressedSize, this.uncompressedSize, this.crc32, compression, reader.readData(this.compressedSize));3874},3875
3876/**3877* Read the central part of a zip file and add the info in this object.
3878* @param {DataReader} reader the reader to use.
3879*/
3880readCentralPart: function(reader) {3881this.versionMadeBy = reader.readInt(2);3882reader.skip(2);3883// this.versionNeeded = reader.readInt(2);3884this.bitFlag = reader.readInt(2);3885this.compressionMethod = reader.readString(2);3886this.date = reader.readDate();3887this.crc32 = reader.readInt(4);3888this.compressedSize = reader.readInt(4);3889this.uncompressedSize = reader.readInt(4);3890var fileNameLength = reader.readInt(2);3891this.extraFieldsLength = reader.readInt(2);3892this.fileCommentLength = reader.readInt(2);3893this.diskNumberStart = reader.readInt(2);3894this.internalFileAttributes = reader.readInt(2);3895this.externalFileAttributes = reader.readInt(4);3896this.localHeaderOffset = reader.readInt(4);3897
3898if (this.isEncrypted()) {3899throw new Error("Encrypted zip are not supported");3900}3901
3902// will be read in the local part, see the comments there3903reader.skip(fileNameLength);3904this.readExtraFields(reader);3905this.parseZIP64ExtraField(reader);3906this.fileComment = reader.readData(this.fileCommentLength);3907},3908
3909/**3910* Parse the external file attributes and get the unix/dos permissions.
3911*/
3912processAttributes: function () {3913this.unixPermissions = null;3914this.dosPermissions = null;3915var madeBy = this.versionMadeBy >> 8;3916
3917// Check if we have the DOS directory flag set.3918// We look for it in the DOS and UNIX permissions3919// but some unknown platform could set it as a compatibility flag.3920this.dir = this.externalFileAttributes & 0x0010 ? true : false;3921
3922if(madeBy === MADE_BY_DOS) {3923// first 6 bits (0 to 5)3924this.dosPermissions = this.externalFileAttributes & 0x3F;3925}3926
3927if(madeBy === MADE_BY_UNIX) {3928this.unixPermissions = (this.externalFileAttributes >> 16) & 0xFFFF;3929// the octal permissions are in (this.unixPermissions & 0x01FF).toString(8);3930}3931
3932// fail safe : if the name ends with a / it probably means a folder3933if (!this.dir && this.fileNameStr.slice(-1) === '/') {3934this.dir = true;3935}3936},3937
3938/**3939* Parse the ZIP64 extra field and merge the info in the current ZipEntry.
3940* @param {DataReader} reader the reader to use.
3941*/
3942parseZIP64ExtraField: function(reader) {3943
3944if (!this.extraFields[0x0001]) {3945return;3946}3947
3948// should be something, preparing the extra reader3949var extraReader = readerFor(this.extraFields[0x0001].value);3950
3951// I really hope that these 64bits integer can fit in 32 bits integer, because js3952// won't let us have more.3953if (this.uncompressedSize === utils.MAX_VALUE_32BITS) {3954this.uncompressedSize = extraReader.readInt(8);3955}3956if (this.compressedSize === utils.MAX_VALUE_32BITS) {3957this.compressedSize = extraReader.readInt(8);3958}3959if (this.localHeaderOffset === utils.MAX_VALUE_32BITS) {3960this.localHeaderOffset = extraReader.readInt(8);3961}3962if (this.diskNumberStart === utils.MAX_VALUE_32BITS) {3963this.diskNumberStart = extraReader.readInt(4);3964}3965},3966/**3967* Read the central part of a zip file and add the info in this object.
3968* @param {DataReader} reader the reader to use.
3969*/
3970readExtraFields: function(reader) {3971var end = reader.index + this.extraFieldsLength,3972extraFieldId,3973extraFieldLength,3974extraFieldValue;3975
3976if (!this.extraFields) {3977this.extraFields = {};3978}3979
3980while (reader.index + 4 < end) {3981extraFieldId = reader.readInt(2);3982extraFieldLength = reader.readInt(2);3983extraFieldValue = reader.readData(extraFieldLength);3984
3985this.extraFields[extraFieldId] = {3986id: extraFieldId,3987length: extraFieldLength,3988value: extraFieldValue3989};3990}3991
3992reader.setIndex(end);3993},3994/**3995* Apply an UTF8 transformation if needed.
3996*/
3997handleUTF8: function() {3998var decodeParamType = support.uint8array ? "uint8array" : "array";3999if (this.useUTF8()) {4000this.fileNameStr = utf8.utf8decode(this.fileName);4001this.fileCommentStr = utf8.utf8decode(this.fileComment);4002} else {4003var upath = this.findExtraFieldUnicodePath();4004if (upath !== null) {4005this.fileNameStr = upath;4006} else {4007// ASCII text or unsupported code page4008var fileNameByteArray = utils.transformTo(decodeParamType, this.fileName);4009this.fileNameStr = this.loadOptions.decodeFileName(fileNameByteArray);4010}4011
4012var ucomment = this.findExtraFieldUnicodeComment();4013if (ucomment !== null) {4014this.fileCommentStr = ucomment;4015} else {4016// ASCII text or unsupported code page4017var commentByteArray = utils.transformTo(decodeParamType, this.fileComment);4018this.fileCommentStr = this.loadOptions.decodeFileName(commentByteArray);4019}4020}4021},4022
4023/**4024* Find the unicode path declared in the extra field, if any.
4025* @return {String} the unicode path, null otherwise.
4026*/
4027findExtraFieldUnicodePath: function() {4028var upathField = this.extraFields[0x7075];4029if (upathField) {4030var extraReader = readerFor(upathField.value);4031
4032// wrong version4033if (extraReader.readInt(1) !== 1) {4034return null;4035}4036
4037// the crc of the filename changed, this field is out of date.4038if (crc32fn(this.fileName) !== extraReader.readInt(4)) {4039return null;4040}4041
4042return utf8.utf8decode(extraReader.readData(upathField.length - 5));4043}4044return null;4045},4046
4047/**4048* Find the unicode comment declared in the extra field, if any.
4049* @return {String} the unicode comment, null otherwise.
4050*/
4051findExtraFieldUnicodeComment: function() {4052var ucommentField = this.extraFields[0x6375];4053if (ucommentField) {4054var extraReader = readerFor(ucommentField.value);4055
4056// wrong version4057if (extraReader.readInt(1) !== 1) {4058return null;4059}4060
4061// the crc of the comment changed, this field is out of date.4062if (crc32fn(this.fileComment) !== extraReader.readInt(4)) {4063return null;4064}4065
4066return utf8.utf8decode(extraReader.readData(ucommentField.length - 5));4067}4068return null;4069}4070};4071module.exports = ZipEntry;4072
4073},{"./compressedObject":2,"./compressions":3,"./crc32":4,"./reader/readerFor":22,"./support":30,"./utf8":31,"./utils":32}],35:[function(require,module,exports){4074'use strict';4075
4076var StreamHelper = require('./stream/StreamHelper');4077var DataWorker = require('./stream/DataWorker');4078var utf8 = require('./utf8');4079var CompressedObject = require('./compressedObject');4080var GenericWorker = require('./stream/GenericWorker');4081
4082/**
4083* A simple object representing a file in the zip file.
4084* @constructor
4085* @param {string} name the name of the file
4086* @param {String|ArrayBuffer|Uint8Array|Buffer} data the data
4087* @param {Object} options the options of the file
4088*/
4089var ZipObject = function(name, data, options) {4090this.name = name;4091this.dir = options.dir;4092this.date = options.date;4093this.comment = options.comment;4094this.unixPermissions = options.unixPermissions;4095this.dosPermissions = options.dosPermissions;4096
4097this._data = data;4098this._dataBinary = options.binary;4099// keep only the compression4100this.options = {4101compression : options.compression,4102compressionOptions : options.compressionOptions4103};4104};4105
4106ZipObject.prototype = {4107/**4108* Create an internal stream for the content of this object.
4109* @param {String} type the type of each chunk.
4110* @return StreamHelper the stream.
4111*/
4112internalStream: function (type) {4113var result = null, outputType = "string";4114try {4115if (!type) {4116throw new Error("No output type specified.");4117}4118outputType = type.toLowerCase();4119var askUnicodeString = outputType === "string" || outputType === "text";4120if (outputType === "binarystring" || outputType === "text") {4121outputType = "string";4122}4123result = this._decompressWorker();4124
4125var isUnicodeString = !this._dataBinary;4126
4127if (isUnicodeString && !askUnicodeString) {4128result = result.pipe(new utf8.Utf8EncodeWorker());4129}4130if (!isUnicodeString && askUnicodeString) {4131result = result.pipe(new utf8.Utf8DecodeWorker());4132}4133} catch (e) {4134result = new GenericWorker("error");4135result.error(e);4136}4137
4138return new StreamHelper(result, outputType, "");4139},4140
4141/**4142* Prepare the content in the asked type.
4143* @param {String} type the type of the result.
4144* @param {Function} onUpdate a function to call on each internal update.
4145* @return Promise the promise of the result.
4146*/
4147async: function (type, onUpdate) {4148return this.internalStream(type).accumulate(onUpdate);4149},4150
4151/**4152* Prepare the content as a nodejs stream.
4153* @param {String} type the type of each chunk.
4154* @param {Function} onUpdate a function to call on each internal update.
4155* @return Stream the stream.
4156*/
4157nodeStream: function (type, onUpdate) {4158return this.internalStream(type || "nodebuffer").toNodejsStream(onUpdate);4159},4160
4161/**4162* Return a worker for the compressed content.
4163* @private
4164* @param {Object} compression the compression object to use.
4165* @param {Object} compressionOptions the options to use when compressing.
4166* @return Worker the worker.
4167*/
4168_compressWorker: function (compression, compressionOptions) {4169if (4170this._data instanceof CompressedObject &&4171this._data.compression.magic === compression.magic4172) {4173return this._data.getCompressedWorker();4174} else {4175var result = this._decompressWorker();4176if(!this._dataBinary) {4177result = result.pipe(new utf8.Utf8EncodeWorker());4178}4179return CompressedObject.createWorkerFrom(result, compression, compressionOptions);4180}4181},4182/**4183* Return a worker for the decompressed content.
4184* @private
4185* @return Worker the worker.
4186*/
4187_decompressWorker : function () {4188if (this._data instanceof CompressedObject) {4189return this._data.getContentWorker();4190} else if (this._data instanceof GenericWorker) {4191return this._data;4192} else {4193return new DataWorker(this._data);4194}4195}4196};4197
4198var removedMethods = ["asText", "asBinary", "asNodeBuffer", "asUint8Array", "asArrayBuffer"];4199var removedFn = function () {4200throw new Error("This method has been removed in JSZip 3.0, please check the upgrade guide.");4201};4202
4203for(var i = 0; i < removedMethods.length; i++) {4204ZipObject.prototype[removedMethods[i]] = removedFn;4205}
4206module.exports = ZipObject;4207
4208},{"./compressedObject":2,"./stream/DataWorker":27,"./stream/GenericWorker":28,"./stream/StreamHelper":29,"./utf8":31}],36:[function(require,module,exports){4209(function (global){4210'use strict';4211var Mutation = global.MutationObserver || global.WebKitMutationObserver;4212
4213var scheduleDrain;4214
4215{
4216if (Mutation) {4217var called = 0;4218var observer = new Mutation(nextTick);4219var element = global.document.createTextNode('');4220observer.observe(element, {4221characterData: true4222});4223scheduleDrain = function () {4224element.data = (called = ++called % 2);4225};4226} else if (!global.setImmediate && typeof global.MessageChannel !== 'undefined') {4227var channel = new global.MessageChannel();4228channel.port1.onmessage = nextTick;4229scheduleDrain = function () {4230channel.port2.postMessage(0);4231};4232} else if ('document' in global && 'onreadystatechange' in global.document.createElement('script')) {4233scheduleDrain = function () {4234
4235// Create a <script> element; its readystatechange event will be fired asynchronously once it is inserted4236// into the document. Do so, thus queuing up the task. Remember to clean up once it's been called.4237var scriptEl = global.document.createElement('script');4238scriptEl.onreadystatechange = function () {4239nextTick();4240
4241scriptEl.onreadystatechange = null;4242scriptEl.parentNode.removeChild(scriptEl);4243scriptEl = null;4244};4245global.document.documentElement.appendChild(scriptEl);4246};4247} else {4248scheduleDrain = function () {4249setTimeout(nextTick, 0);4250};4251}4252}
4253
4254var draining;4255var queue = [];4256//named nextTick for less confusing stack traces
4257function nextTick() {4258draining = true;4259var i, oldQueue;4260var len = queue.length;4261while (len) {4262oldQueue = queue;4263queue = [];4264i = -1;4265while (++i < len) {4266oldQueue[i]();4267}4268len = queue.length;4269}4270draining = false;4271}
4272
4273module.exports = immediate;4274function immediate(task) {4275if (queue.push(task) === 1 && !draining) {4276scheduleDrain();4277}4278}
4279
4280}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})4281},{}],37:[function(require,module,exports){4282'use strict';4283var immediate = require('immediate');4284
4285/* istanbul ignore next */
4286function INTERNAL() {}4287
4288var handlers = {};4289
4290var REJECTED = ['REJECTED'];4291var FULFILLED = ['FULFILLED'];4292var PENDING = ['PENDING'];4293
4294module.exports = Promise;4295
4296function Promise(resolver) {4297if (typeof resolver !== 'function') {4298throw new TypeError('resolver must be a function');4299}4300this.state = PENDING;4301this.queue = [];4302this.outcome = void 0;4303if (resolver !== INTERNAL) {4304safelyResolveThenable(this, resolver);4305}4306}
4307
4308Promise.prototype["finally"] = function (callback) {4309if (typeof callback !== 'function') {4310return this;4311}4312var p = this.constructor;4313return this.then(resolve, reject);4314
4315function resolve(value) {4316function yes () {4317return value;4318}4319return p.resolve(callback()).then(yes);4320}4321function reject(reason) {4322function no () {4323throw reason;4324}4325return p.resolve(callback()).then(no);4326}4327};4328Promise.prototype["catch"] = function (onRejected) {4329return this.then(null, onRejected);4330};4331Promise.prototype.then = function (onFulfilled, onRejected) {4332if (typeof onFulfilled !== 'function' && this.state === FULFILLED ||4333typeof onRejected !== 'function' && this.state === REJECTED) {4334return this;4335}4336var promise = new this.constructor(INTERNAL);4337if (this.state !== PENDING) {4338var resolver = this.state === FULFILLED ? onFulfilled : onRejected;4339unwrap(promise, resolver, this.outcome);4340} else {4341this.queue.push(new QueueItem(promise, onFulfilled, onRejected));4342}4343
4344return promise;4345};4346function QueueItem(promise, onFulfilled, onRejected) {4347this.promise = promise;4348if (typeof onFulfilled === 'function') {4349this.onFulfilled = onFulfilled;4350this.callFulfilled = this.otherCallFulfilled;4351}4352if (typeof onRejected === 'function') {4353this.onRejected = onRejected;4354this.callRejected = this.otherCallRejected;4355}4356}
4357QueueItem.prototype.callFulfilled = function (value) {4358handlers.resolve(this.promise, value);4359};4360QueueItem.prototype.otherCallFulfilled = function (value) {4361unwrap(this.promise, this.onFulfilled, value);4362};4363QueueItem.prototype.callRejected = function (value) {4364handlers.reject(this.promise, value);4365};4366QueueItem.prototype.otherCallRejected = function (value) {4367unwrap(this.promise, this.onRejected, value);4368};4369
4370function unwrap(promise, func, value) {4371immediate(function () {4372var returnValue;4373try {4374returnValue = func(value);4375} catch (e) {4376return handlers.reject(promise, e);4377}4378if (returnValue === promise) {4379handlers.reject(promise, new TypeError('Cannot resolve promise with itself'));4380} else {4381handlers.resolve(promise, returnValue);4382}4383});4384}
4385
4386handlers.resolve = function (self, value) {4387var result = tryCatch(getThen, value);4388if (result.status === 'error') {4389return handlers.reject(self, result.value);4390}4391var thenable = result.value;4392
4393if (thenable) {4394safelyResolveThenable(self, thenable);4395} else {4396self.state = FULFILLED;4397self.outcome = value;4398var i = -1;4399var len = self.queue.length;4400while (++i < len) {4401self.queue[i].callFulfilled(value);4402}4403}4404return self;4405};4406handlers.reject = function (self, error) {4407self.state = REJECTED;4408self.outcome = error;4409var i = -1;4410var len = self.queue.length;4411while (++i < len) {4412self.queue[i].callRejected(error);4413}4414return self;4415};4416
4417function getThen(obj) {4418// Make sure we only access the accessor once as required by the spec4419var then = obj && obj.then;4420if (obj && (typeof obj === 'object' || typeof obj === 'function') && typeof then === 'function') {4421return function appyThen() {4422then.apply(obj, arguments);4423};4424}4425}
4426
4427function safelyResolveThenable(self, thenable) {4428// Either fulfill, reject or reject with error4429var called = false;4430function onError(value) {4431if (called) {4432return;4433}4434called = true;4435handlers.reject(self, value);4436}4437
4438function onSuccess(value) {4439if (called) {4440return;4441}4442called = true;4443handlers.resolve(self, value);4444}4445
4446function tryToUnwrap() {4447thenable(onSuccess, onError);4448}4449
4450var result = tryCatch(tryToUnwrap);4451if (result.status === 'error') {4452onError(result.value);4453}4454}
4455
4456function tryCatch(func, value) {4457var out = {};4458try {4459out.value = func(value);4460out.status = 'success';4461} catch (e) {4462out.status = 'error';4463out.value = e;4464}4465return out;4466}
4467
4468Promise.resolve = resolve;4469function resolve(value) {4470if (value instanceof this) {4471return value;4472}4473return handlers.resolve(new this(INTERNAL), value);4474}
4475
4476Promise.reject = reject;4477function reject(reason) {4478var promise = new this(INTERNAL);4479return handlers.reject(promise, reason);4480}
4481
4482Promise.all = all;4483function all(iterable) {4484var self = this;4485if (Object.prototype.toString.call(iterable) !== '[object Array]') {4486return this.reject(new TypeError('must be an array'));4487}4488
4489var len = iterable.length;4490var called = false;4491if (!len) {4492return this.resolve([]);4493}4494
4495var values = new Array(len);4496var resolved = 0;4497var i = -1;4498var promise = new this(INTERNAL);4499
4500while (++i < len) {4501allResolver(iterable[i], i);4502}4503return promise;4504function allResolver(value, i) {4505self.resolve(value).then(resolveFromAll, function (error) {4506if (!called) {4507called = true;4508handlers.reject(promise, error);4509}4510});4511function resolveFromAll(outValue) {4512values[i] = outValue;4513if (++resolved === len && !called) {4514called = true;4515handlers.resolve(promise, values);4516}4517}4518}4519}
4520
4521Promise.race = race;4522function race(iterable) {4523var self = this;4524if (Object.prototype.toString.call(iterable) !== '[object Array]') {4525return this.reject(new TypeError('must be an array'));4526}4527
4528var len = iterable.length;4529var called = false;4530if (!len) {4531return this.resolve([]);4532}4533
4534var i = -1;4535var promise = new this(INTERNAL);4536
4537while (++i < len) {4538resolver(iterable[i]);4539}4540return promise;4541function resolver(value) {4542self.resolve(value).then(function (response) {4543if (!called) {4544called = true;4545handlers.resolve(promise, response);4546}4547}, function (error) {4548if (!called) {4549called = true;4550handlers.reject(promise, error);4551}4552});4553}4554}
4555
4556},{"immediate":36}],38:[function(require,module,exports){4557// Top level file is just a mixin of submodules & constants
4558'use strict';4559
4560var assign = require('./lib/utils/common').assign;4561
4562var deflate = require('./lib/deflate');4563var inflate = require('./lib/inflate');4564var constants = require('./lib/zlib/constants');4565
4566var pako = {};4567
4568assign(pako, deflate, inflate, constants);4569
4570module.exports = pako;4571
4572},{"./lib/deflate":39,"./lib/inflate":40,"./lib/utils/common":41,"./lib/zlib/constants":44}],39:[function(require,module,exports){4573'use strict';4574
4575
4576var zlib_deflate = require('./zlib/deflate');4577var utils = require('./utils/common');4578var strings = require('./utils/strings');4579var msg = require('./zlib/messages');4580var ZStream = require('./zlib/zstream');4581
4582var toString = Object.prototype.toString;4583
4584/* Public constants ==========================================================*/
4585/* ===========================================================================*/
4586
4587var Z_NO_FLUSH = 0;4588var Z_FINISH = 4;4589
4590var Z_OK = 0;4591var Z_STREAM_END = 1;4592var Z_SYNC_FLUSH = 2;4593
4594var Z_DEFAULT_COMPRESSION = -1;4595
4596var Z_DEFAULT_STRATEGY = 0;4597
4598var Z_DEFLATED = 8;4599
4600/* ===========================================================================*/
4601
4602
4603/**
4604* class Deflate
4605*
4606* Generic JS-style wrapper for zlib calls. If you don't need
4607* streaming behaviour - use more simple functions: [[deflate]],
4608* [[deflateRaw]] and [[gzip]].
4609**/
4610
4611/* internal
4612* Deflate.chunks -> Array
4613*
4614* Chunks of output data, if [[Deflate#onData]] not overriden.
4615**/
4616
4617/**
4618* Deflate.result -> Uint8Array|Array
4619*
4620* Compressed result, generated by default [[Deflate#onData]]
4621* and [[Deflate#onEnd]] handlers. Filled after you push last chunk
4622* (call [[Deflate#push]] with `Z_FINISH` / `true` param) or if you
4623* push a chunk with explicit flush (call [[Deflate#push]] with
4624* `Z_SYNC_FLUSH` param).
4625**/
4626
4627/**
4628* Deflate.err -> Number
4629*
4630* Error code after deflate finished. 0 (Z_OK) on success.
4631* You will not need it in real life, because deflate errors
4632* are possible only on wrong options or bad `onData` / `onEnd`
4633* custom handlers.
4634**/
4635
4636/**
4637* Deflate.msg -> String
4638*
4639* Error message, if [[Deflate.err]] != 0
4640**/
4641
4642
4643/**
4644* new Deflate(options)
4645* - options (Object): zlib deflate options.
4646*
4647* Creates new deflator instance with specified params. Throws exception
4648* on bad params. Supported options:
4649*
4650* - `level`
4651* - `windowBits`
4652* - `memLevel`
4653* - `strategy`
4654* - `dictionary`
4655*
4656* [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced)
4657* for more information on these.
4658*
4659* Additional options, for internal needs:
4660*
4661* - `chunkSize` - size of generated data chunks (16K by default)
4662* - `raw` (Boolean) - do raw deflate
4663* - `gzip` (Boolean) - create gzip wrapper
4664* - `to` (String) - if equal to 'string', then result will be "binary string"
4665* (each char code [0..255])
4666* - `header` (Object) - custom header for gzip
4667* - `text` (Boolean) - true if compressed data believed to be text
4668* - `time` (Number) - modification time, unix timestamp
4669* - `os` (Number) - operation system code
4670* - `extra` (Array) - array of bytes with extra data (max 65536)
4671* - `name` (String) - file name (binary string)
4672* - `comment` (String) - comment (binary string)
4673* - `hcrc` (Boolean) - true if header crc should be added
4674*
4675* ##### Example:
4676*
4677* ```javascript
4678* var pako = require('pako')
4679* , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9])
4680* , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]);
4681*
4682* var deflate = new pako.Deflate({ level: 3});
4683*
4684* deflate.push(chunk1, false);
4685* deflate.push(chunk2, true); // true -> last chunk
4686*
4687* if (deflate.err) { throw new Error(deflate.err); }
4688*
4689* console.log(deflate.result);
4690* ```
4691**/
4692function Deflate(options) {4693if (!(this instanceof Deflate)) return new Deflate(options);4694
4695this.options = utils.assign({4696level: Z_DEFAULT_COMPRESSION,4697method: Z_DEFLATED,4698chunkSize: 16384,4699windowBits: 15,4700memLevel: 8,4701strategy: Z_DEFAULT_STRATEGY,4702to: ''4703}, options || {});4704
4705var opt = this.options;4706
4707if (opt.raw && (opt.windowBits > 0)) {4708opt.windowBits = -opt.windowBits;4709}4710
4711else if (opt.gzip && (opt.windowBits > 0) && (opt.windowBits < 16)) {4712opt.windowBits += 16;4713}4714
4715this.err = 0; // error code, if happens (0 = Z_OK)4716this.msg = ''; // error message4717this.ended = false; // used to avoid multiple onEnd() calls4718this.chunks = []; // chunks of compressed data4719
4720this.strm = new ZStream();4721this.strm.avail_out = 0;4722
4723var status = zlib_deflate.deflateInit2(4724this.strm,4725opt.level,4726opt.method,4727opt.windowBits,4728opt.memLevel,4729opt.strategy4730);4731
4732if (status !== Z_OK) {4733throw new Error(msg[status]);4734}4735
4736if (opt.header) {4737zlib_deflate.deflateSetHeader(this.strm, opt.header);4738}4739
4740if (opt.dictionary) {4741var dict;4742// Convert data if needed4743if (typeof opt.dictionary === 'string') {4744// If we need to compress text, change encoding to utf8.4745dict = strings.string2buf(opt.dictionary);4746} else if (toString.call(opt.dictionary) === '[object ArrayBuffer]') {4747dict = new Uint8Array(opt.dictionary);4748} else {4749dict = opt.dictionary;4750}4751
4752status = zlib_deflate.deflateSetDictionary(this.strm, dict);4753
4754if (status !== Z_OK) {4755throw new Error(msg[status]);4756}4757
4758this._dict_set = true;4759}4760}
4761
4762/**
4763* Deflate#push(data[, mode]) -> Boolean
4764* - data (Uint8Array|Array|ArrayBuffer|String): input data. Strings will be
4765* converted to utf8 byte sequence.
4766* - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes.
4767* See constants. Skipped or `false` means Z_NO_FLUSH, `true` meansh Z_FINISH.
4768*
4769* Sends input data to deflate pipe, generating [[Deflate#onData]] calls with
4770* new compressed chunks. Returns `true` on success. The last data block must have
4771* mode Z_FINISH (or `true`). That will flush internal pending buffers and call
4772* [[Deflate#onEnd]]. For interim explicit flushes (without ending the stream) you
4773* can use mode Z_SYNC_FLUSH, keeping the compression context.
4774*
4775* On fail call [[Deflate#onEnd]] with error code and return false.
4776*
4777* We strongly recommend to use `Uint8Array` on input for best speed (output
4778* array format is detected automatically). Also, don't skip last param and always
4779* use the same type in your code (boolean or number). That will improve JS speed.
4780*
4781* For regular `Array`-s make sure all elements are [0..255].
4782*
4783* ##### Example
4784*
4785* ```javascript
4786* push(chunk, false); // push one of data chunks
4787* ...
4788* push(chunk, true); // push last chunk
4789* ```
4790**/
4791Deflate.prototype.push = function (data, mode) {4792var strm = this.strm;4793var chunkSize = this.options.chunkSize;4794var status, _mode;4795
4796if (this.ended) { return false; }4797
4798_mode = (mode === ~~mode) ? mode : ((mode === true) ? Z_FINISH : Z_NO_FLUSH);4799
4800// Convert data if needed4801if (typeof data === 'string') {4802// If we need to compress text, change encoding to utf8.4803strm.input = strings.string2buf(data);4804} else if (toString.call(data) === '[object ArrayBuffer]') {4805strm.input = new Uint8Array(data);4806} else {4807strm.input = data;4808}4809
4810strm.next_in = 0;4811strm.avail_in = strm.input.length;4812
4813do {4814if (strm.avail_out === 0) {4815strm.output = new utils.Buf8(chunkSize);4816strm.next_out = 0;4817strm.avail_out = chunkSize;4818}4819status = zlib_deflate.deflate(strm, _mode); /* no bad return value */4820
4821if (status !== Z_STREAM_END && status !== Z_OK) {4822this.onEnd(status);4823this.ended = true;4824return false;4825}4826if (strm.avail_out === 0 || (strm.avail_in === 0 && (_mode === Z_FINISH || _mode === Z_SYNC_FLUSH))) {4827if (this.options.to === 'string') {4828this.onData(strings.buf2binstring(utils.shrinkBuf(strm.output, strm.next_out)));4829} else {4830this.onData(utils.shrinkBuf(strm.output, strm.next_out));4831}4832}4833} while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== Z_STREAM_END);4834
4835// Finalize on the last chunk.4836if (_mode === Z_FINISH) {4837status = zlib_deflate.deflateEnd(this.strm);4838this.onEnd(status);4839this.ended = true;4840return status === Z_OK;4841}4842
4843// callback interim results if Z_SYNC_FLUSH.4844if (_mode === Z_SYNC_FLUSH) {4845this.onEnd(Z_OK);4846strm.avail_out = 0;4847return true;4848}4849
4850return true;4851};4852
4853
4854/**
4855* Deflate#onData(chunk) -> Void
4856* - chunk (Uint8Array|Array|String): ouput data. Type of array depends
4857* on js engine support. When string output requested, each chunk
4858* will be string.
4859*
4860* By default, stores data blocks in `chunks[]` property and glue
4861* those in `onEnd`. Override this handler, if you need another behaviour.
4862**/
4863Deflate.prototype.onData = function (chunk) {4864this.chunks.push(chunk);4865};4866
4867
4868/**
4869* Deflate#onEnd(status) -> Void
4870* - status (Number): deflate status. 0 (Z_OK) on success,
4871* other if not.
4872*
4873* Called once after you tell deflate that the input stream is
4874* complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH)
4875* or if an error happened. By default - join collected chunks,
4876* free memory and fill `results` / `err` properties.
4877**/
4878Deflate.prototype.onEnd = function (status) {4879// On success - join4880if (status === Z_OK) {4881if (this.options.to === 'string') {4882this.result = this.chunks.join('');4883} else {4884this.result = utils.flattenChunks(this.chunks);4885}4886}4887this.chunks = [];4888this.err = status;4889this.msg = this.strm.msg;4890};4891
4892
4893/**
4894* deflate(data[, options]) -> Uint8Array|Array|String
4895* - data (Uint8Array|Array|String): input data to compress.
4896* - options (Object): zlib deflate options.
4897*
4898* Compress `data` with deflate algorithm and `options`.
4899*
4900* Supported options are:
4901*
4902* - level
4903* - windowBits
4904* - memLevel
4905* - strategy
4906* - dictionary
4907*
4908* [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced)
4909* for more information on these.
4910*
4911* Sugar (options):
4912*
4913* - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify
4914* negative windowBits implicitly.
4915* - `to` (String) - if equal to 'string', then result will be "binary string"
4916* (each char code [0..255])
4917*
4918* ##### Example:
4919*
4920* ```javascript
4921* var pako = require('pako')
4922* , data = Uint8Array([1,2,3,4,5,6,7,8,9]);
4923*
4924* console.log(pako.deflate(data));
4925* ```
4926**/
4927function deflate(input, options) {4928var deflator = new Deflate(options);4929
4930deflator.push(input, true);4931
4932// That will never happens, if you don't cheat with options :)4933if (deflator.err) { throw deflator.msg || msg[deflator.err]; }4934
4935return deflator.result;4936}
4937
4938
4939/**
4940* deflateRaw(data[, options]) -> Uint8Array|Array|String
4941* - data (Uint8Array|Array|String): input data to compress.
4942* - options (Object): zlib deflate options.
4943*
4944* The same as [[deflate]], but creates raw data, without wrapper
4945* (header and adler32 crc).
4946**/
4947function deflateRaw(input, options) {4948options = options || {};4949options.raw = true;4950return deflate(input, options);4951}
4952
4953
4954/**
4955* gzip(data[, options]) -> Uint8Array|Array|String
4956* - data (Uint8Array|Array|String): input data to compress.
4957* - options (Object): zlib deflate options.
4958*
4959* The same as [[deflate]], but create gzip wrapper instead of
4960* deflate one.
4961**/
4962function gzip(input, options) {4963options = options || {};4964options.gzip = true;4965return deflate(input, options);4966}
4967
4968
4969exports.Deflate = Deflate;4970exports.deflate = deflate;4971exports.deflateRaw = deflateRaw;4972exports.gzip = gzip;4973
4974},{"./utils/common":41,"./utils/strings":42,"./zlib/deflate":46,"./zlib/messages":51,"./zlib/zstream":53}],40:[function(require,module,exports){4975'use strict';4976
4977
4978var zlib_inflate = require('./zlib/inflate');4979var utils = require('./utils/common');4980var strings = require('./utils/strings');4981var c = require('./zlib/constants');4982var msg = require('./zlib/messages');4983var ZStream = require('./zlib/zstream');4984var GZheader = require('./zlib/gzheader');4985
4986var toString = Object.prototype.toString;4987
4988/**
4989* class Inflate
4990*
4991* Generic JS-style wrapper for zlib calls. If you don't need
4992* streaming behaviour - use more simple functions: [[inflate]]
4993* and [[inflateRaw]].
4994**/
4995
4996/* internal
4997* inflate.chunks -> Array
4998*
4999* Chunks of output data, if [[Inflate#onData]] not overriden.
5000**/
5001
5002/**
5003* Inflate.result -> Uint8Array|Array|String
5004*
5005* Uncompressed result, generated by default [[Inflate#onData]]
5006* and [[Inflate#onEnd]] handlers. Filled after you push last chunk
5007* (call [[Inflate#push]] with `Z_FINISH` / `true` param) or if you
5008* push a chunk with explicit flush (call [[Inflate#push]] with
5009* `Z_SYNC_FLUSH` param).
5010**/
5011
5012/**
5013* Inflate.err -> Number
5014*
5015* Error code after inflate finished. 0 (Z_OK) on success.
5016* Should be checked if broken data possible.
5017**/
5018
5019/**
5020* Inflate.msg -> String
5021*
5022* Error message, if [[Inflate.err]] != 0
5023**/
5024
5025
5026/**
5027* new Inflate(options)
5028* - options (Object): zlib inflate options.
5029*
5030* Creates new inflator instance with specified params. Throws exception
5031* on bad params. Supported options:
5032*
5033* - `windowBits`
5034* - `dictionary`
5035*
5036* [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced)
5037* for more information on these.
5038*
5039* Additional options, for internal needs:
5040*
5041* - `chunkSize` - size of generated data chunks (16K by default)
5042* - `raw` (Boolean) - do raw inflate
5043* - `to` (String) - if equal to 'string', then result will be converted
5044* from utf8 to utf16 (javascript) string. When string output requested,
5045* chunk length can differ from `chunkSize`, depending on content.
5046*
5047* By default, when no options set, autodetect deflate/gzip data format via
5048* wrapper header.
5049*
5050* ##### Example:
5051*
5052* ```javascript
5053* var pako = require('pako')
5054* , chunk1 = Uint8Array([1,2,3,4,5,6,7,8,9])
5055* , chunk2 = Uint8Array([10,11,12,13,14,15,16,17,18,19]);
5056*
5057* var inflate = new pako.Inflate({ level: 3});
5058*
5059* inflate.push(chunk1, false);
5060* inflate.push(chunk2, true); // true -> last chunk
5061*
5062* if (inflate.err) { throw new Error(inflate.err); }
5063*
5064* console.log(inflate.result);
5065* ```
5066**/
5067function Inflate(options) {5068if (!(this instanceof Inflate)) return new Inflate(options);5069
5070this.options = utils.assign({5071chunkSize: 16384,5072windowBits: 0,5073to: ''5074}, options || {});5075
5076var opt = this.options;5077
5078// Force window size for `raw` data, if not set directly,5079// because we have no header for autodetect.5080if (opt.raw && (opt.windowBits >= 0) && (opt.windowBits < 16)) {5081opt.windowBits = -opt.windowBits;5082if (opt.windowBits === 0) { opt.windowBits = -15; }5083}5084
5085// If `windowBits` not defined (and mode not raw) - set autodetect flag for gzip/deflate5086if ((opt.windowBits >= 0) && (opt.windowBits < 16) &&5087!(options && options.windowBits)) {5088opt.windowBits += 32;5089}5090
5091// Gzip header has no info about windows size, we can do autodetect only5092// for deflate. So, if window size not set, force it to max when gzip possible5093if ((opt.windowBits > 15) && (opt.windowBits < 48)) {5094// bit 3 (16) -> gzipped data5095// bit 4 (32) -> autodetect gzip/deflate5096if ((opt.windowBits & 15) === 0) {5097opt.windowBits |= 15;5098}5099}5100
5101this.err = 0; // error code, if happens (0 = Z_OK)5102this.msg = ''; // error message5103this.ended = false; // used to avoid multiple onEnd() calls5104this.chunks = []; // chunks of compressed data5105
5106this.strm = new ZStream();5107this.strm.avail_out = 0;5108
5109var status = zlib_inflate.inflateInit2(5110this.strm,5111opt.windowBits5112);5113
5114if (status !== c.Z_OK) {5115throw new Error(msg[status]);5116}5117
5118this.header = new GZheader();5119
5120zlib_inflate.inflateGetHeader(this.strm, this.header);5121}
5122
5123/**
5124* Inflate#push(data[, mode]) -> Boolean
5125* - data (Uint8Array|Array|ArrayBuffer|String): input data
5126* - mode (Number|Boolean): 0..6 for corresponding Z_NO_FLUSH..Z_TREE modes.
5127* See constants. Skipped or `false` means Z_NO_FLUSH, `true` meansh Z_FINISH.
5128*
5129* Sends input data to inflate pipe, generating [[Inflate#onData]] calls with
5130* new output chunks. Returns `true` on success. The last data block must have
5131* mode Z_FINISH (or `true`). That will flush internal pending buffers and call
5132* [[Inflate#onEnd]]. For interim explicit flushes (without ending the stream) you
5133* can use mode Z_SYNC_FLUSH, keeping the decompression context.
5134*
5135* On fail call [[Inflate#onEnd]] with error code and return false.
5136*
5137* We strongly recommend to use `Uint8Array` on input for best speed (output
5138* format is detected automatically). Also, don't skip last param and always
5139* use the same type in your code (boolean or number). That will improve JS speed.
5140*
5141* For regular `Array`-s make sure all elements are [0..255].
5142*
5143* ##### Example
5144*
5145* ```javascript
5146* push(chunk, false); // push one of data chunks
5147* ...
5148* push(chunk, true); // push last chunk
5149* ```
5150**/
5151Inflate.prototype.push = function (data, mode) {5152var strm = this.strm;5153var chunkSize = this.options.chunkSize;5154var dictionary = this.options.dictionary;5155var status, _mode;5156var next_out_utf8, tail, utf8str;5157var dict;5158
5159// Flag to properly process Z_BUF_ERROR on testing inflate call5160// when we check that all output data was flushed.5161var allowBufError = false;5162
5163if (this.ended) { return false; }5164_mode = (mode === ~~mode) ? mode : ((mode === true) ? c.Z_FINISH : c.Z_NO_FLUSH);5165
5166// Convert data if needed5167if (typeof data === 'string') {5168// Only binary strings can be decompressed on practice5169strm.input = strings.binstring2buf(data);5170} else if (toString.call(data) === '[object ArrayBuffer]') {5171strm.input = new Uint8Array(data);5172} else {5173strm.input = data;5174}5175
5176strm.next_in = 0;5177strm.avail_in = strm.input.length;5178
5179do {5180if (strm.avail_out === 0) {5181strm.output = new utils.Buf8(chunkSize);5182strm.next_out = 0;5183strm.avail_out = chunkSize;5184}5185
5186status = zlib_inflate.inflate(strm, c.Z_NO_FLUSH); /* no bad return value */5187
5188if (status === c.Z_NEED_DICT && dictionary) {5189// Convert data if needed5190if (typeof dictionary === 'string') {5191dict = strings.string2buf(dictionary);5192} else if (toString.call(dictionary) === '[object ArrayBuffer]') {5193dict = new Uint8Array(dictionary);5194} else {5195dict = dictionary;5196}5197
5198status = zlib_inflate.inflateSetDictionary(this.strm, dict);5199
5200}5201
5202if (status === c.Z_BUF_ERROR && allowBufError === true) {5203status = c.Z_OK;5204allowBufError = false;5205}5206
5207if (status !== c.Z_STREAM_END && status !== c.Z_OK) {5208this.onEnd(status);5209this.ended = true;5210return false;5211}5212
5213if (strm.next_out) {5214if (strm.avail_out === 0 || status === c.Z_STREAM_END || (strm.avail_in === 0 && (_mode === c.Z_FINISH || _mode === c.Z_SYNC_FLUSH))) {5215
5216if (this.options.to === 'string') {5217
5218next_out_utf8 = strings.utf8border(strm.output, strm.next_out);5219
5220tail = strm.next_out - next_out_utf8;5221utf8str = strings.buf2string(strm.output, next_out_utf8);5222
5223// move tail5224strm.next_out = tail;5225strm.avail_out = chunkSize - tail;5226if (tail) { utils.arraySet(strm.output, strm.output, next_out_utf8, tail, 0); }5227
5228this.onData(utf8str);5229
5230} else {5231this.onData(utils.shrinkBuf(strm.output, strm.next_out));5232}5233}5234}5235
5236// When no more input data, we should check that internal inflate buffers5237// are flushed. The only way to do it when avail_out = 0 - run one more5238// inflate pass. But if output data not exists, inflate return Z_BUF_ERROR.5239// Here we set flag to process this error properly.5240//5241// NOTE. Deflate does not return error in this case and does not needs such5242// logic.5243if (strm.avail_in === 0 && strm.avail_out === 0) {5244allowBufError = true;5245}5246
5247} while ((strm.avail_in > 0 || strm.avail_out === 0) && status !== c.Z_STREAM_END);5248
5249if (status === c.Z_STREAM_END) {5250_mode = c.Z_FINISH;5251}5252
5253// Finalize on the last chunk.5254if (_mode === c.Z_FINISH) {5255status = zlib_inflate.inflateEnd(this.strm);5256this.onEnd(status);5257this.ended = true;5258return status === c.Z_OK;5259}5260
5261// callback interim results if Z_SYNC_FLUSH.5262if (_mode === c.Z_SYNC_FLUSH) {5263this.onEnd(c.Z_OK);5264strm.avail_out = 0;5265return true;5266}5267
5268return true;5269};5270
5271
5272/**
5273* Inflate#onData(chunk) -> Void
5274* - chunk (Uint8Array|Array|String): ouput data. Type of array depends
5275* on js engine support. When string output requested, each chunk
5276* will be string.
5277*
5278* By default, stores data blocks in `chunks[]` property and glue
5279* those in `onEnd`. Override this handler, if you need another behaviour.
5280**/
5281Inflate.prototype.onData = function (chunk) {5282this.chunks.push(chunk);5283};5284
5285
5286/**
5287* Inflate#onEnd(status) -> Void
5288* - status (Number): inflate status. 0 (Z_OK) on success,
5289* other if not.
5290*
5291* Called either after you tell inflate that the input stream is
5292* complete (Z_FINISH) or should be flushed (Z_SYNC_FLUSH)
5293* or if an error happened. By default - join collected chunks,
5294* free memory and fill `results` / `err` properties.
5295**/
5296Inflate.prototype.onEnd = function (status) {5297// On success - join5298if (status === c.Z_OK) {5299if (this.options.to === 'string') {5300// Glue & convert here, until we teach pako to send5301// utf8 alligned strings to onData5302this.result = this.chunks.join('');5303} else {5304this.result = utils.flattenChunks(this.chunks);5305}5306}5307this.chunks = [];5308this.err = status;5309this.msg = this.strm.msg;5310};5311
5312
5313/**
5314* inflate(data[, options]) -> Uint8Array|Array|String
5315* - data (Uint8Array|Array|String): input data to decompress.
5316* - options (Object): zlib inflate options.
5317*
5318* Decompress `data` with inflate/ungzip and `options`. Autodetect
5319* format via wrapper header by default. That's why we don't provide
5320* separate `ungzip` method.
5321*
5322* Supported options are:
5323*
5324* - windowBits
5325*
5326* [http://zlib.net/manual.html#Advanced](http://zlib.net/manual.html#Advanced)
5327* for more information.
5328*
5329* Sugar (options):
5330*
5331* - `raw` (Boolean) - say that we work with raw stream, if you don't wish to specify
5332* negative windowBits implicitly.
5333* - `to` (String) - if equal to 'string', then result will be converted
5334* from utf8 to utf16 (javascript) string. When string output requested,
5335* chunk length can differ from `chunkSize`, depending on content.
5336*
5337*
5338* ##### Example:
5339*
5340* ```javascript
5341* var pako = require('pako')
5342* , input = pako.deflate([1,2,3,4,5,6,7,8,9])
5343* , output;
5344*
5345* try {
5346* output = pako.inflate(input);
5347* } catch (err)
5348* console.log(err);
5349* }
5350* ```
5351**/
5352function inflate(input, options) {5353var inflator = new Inflate(options);5354
5355inflator.push(input, true);5356
5357// That will never happens, if you don't cheat with options :)5358if (inflator.err) { throw inflator.msg || msg[inflator.err]; }5359
5360return inflator.result;5361}
5362
5363
5364/**
5365* inflateRaw(data[, options]) -> Uint8Array|Array|String
5366* - data (Uint8Array|Array|String): input data to decompress.
5367* - options (Object): zlib inflate options.
5368*
5369* The same as [[inflate]], but creates raw data, without wrapper
5370* (header and adler32 crc).
5371**/
5372function inflateRaw(input, options) {5373options = options || {};5374options.raw = true;5375return inflate(input, options);5376}
5377
5378
5379/**
5380* ungzip(data[, options]) -> Uint8Array|Array|String
5381* - data (Uint8Array|Array|String): input data to decompress.
5382* - options (Object): zlib inflate options.
5383*
5384* Just shortcut to [[inflate]], because it autodetects format
5385* by header.content. Done for convenience.
5386**/
5387
5388
5389exports.Inflate = Inflate;5390exports.inflate = inflate;5391exports.inflateRaw = inflateRaw;5392exports.ungzip = inflate;5393
5394},{"./utils/common":41,"./utils/strings":42,"./zlib/constants":44,"./zlib/gzheader":47,"./zlib/inflate":49,"./zlib/messages":51,"./zlib/zstream":53}],41:[function(require,module,exports){5395'use strict';5396
5397
5398var TYPED_OK = (typeof Uint8Array !== 'undefined') &&5399(typeof Uint16Array !== 'undefined') &&5400(typeof Int32Array !== 'undefined');5401
5402
5403exports.assign = function (obj /*from1, from2, from3, ...*/) {5404var sources = Array.prototype.slice.call(arguments, 1);5405while (sources.length) {5406var source = sources.shift();5407if (!source) { continue; }5408
5409if (typeof source !== 'object') {5410throw new TypeError(source + 'must be non-object');5411}5412
5413for (var p in source) {5414if (source.hasOwnProperty(p)) {5415obj[p] = source[p];5416}5417}5418}5419
5420return obj;5421};5422
5423
5424// reduce buffer size, avoiding mem copy
5425exports.shrinkBuf = function (buf, size) {5426if (buf.length === size) { return buf; }5427if (buf.subarray) { return buf.subarray(0, size); }5428buf.length = size;5429return buf;5430};5431
5432
5433var fnTyped = {5434arraySet: function (dest, src, src_offs, len, dest_offs) {5435if (src.subarray && dest.subarray) {5436dest.set(src.subarray(src_offs, src_offs + len), dest_offs);5437return;5438}5439// Fallback to ordinary array5440for (var i = 0; i < len; i++) {5441dest[dest_offs + i] = src[src_offs + i];5442}5443},5444// Join array of chunks to single array.5445flattenChunks: function (chunks) {5446var i, l, len, pos, chunk, result;5447
5448// calculate data length5449len = 0;5450for (i = 0, l = chunks.length; i < l; i++) {5451len += chunks[i].length;5452}5453
5454// join chunks5455result = new Uint8Array(len);5456pos = 0;5457for (i = 0, l = chunks.length; i < l; i++) {5458chunk = chunks[i];5459result.set(chunk, pos);5460pos += chunk.length;5461}5462
5463return result;5464}5465};5466
5467var fnUntyped = {5468arraySet: function (dest, src, src_offs, len, dest_offs) {5469for (var i = 0; i < len; i++) {5470dest[dest_offs + i] = src[src_offs + i];5471}5472},5473// Join array of chunks to single array.5474flattenChunks: function (chunks) {5475return [].concat.apply([], chunks);5476}5477};5478
5479
5480// Enable/Disable typed arrays use, for testing
5481//
5482exports.setTyped = function (on) {5483if (on) {5484exports.Buf8 = Uint8Array;5485exports.Buf16 = Uint16Array;5486exports.Buf32 = Int32Array;5487exports.assign(exports, fnTyped);5488} else {5489exports.Buf8 = Array;5490exports.Buf16 = Array;5491exports.Buf32 = Array;5492exports.assign(exports, fnUntyped);5493}5494};5495
5496exports.setTyped(TYPED_OK);5497
5498},{}],42:[function(require,module,exports){5499// String encode/decode helpers
5500'use strict';5501
5502
5503var utils = require('./common');5504
5505
5506// Quick check if we can use fast array to bin string conversion
5507//
5508// - apply(Array) can fail on Android 2.2
5509// - apply(Uint8Array) can fail on iOS 5.1 Safary
5510//
5511var STR_APPLY_OK = true;5512var STR_APPLY_UIA_OK = true;5513
5514try { String.fromCharCode.apply(null, [ 0 ]); } catch (__) { STR_APPLY_OK = false; }5515try { String.fromCharCode.apply(null, new Uint8Array(1)); } catch (__) { STR_APPLY_UIA_OK = false; }5516
5517
5518// Table with utf8 lengths (calculated by first byte of sequence)
5519// Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS,
5520// because max possible codepoint is 0x10ffff
5521var _utf8len = new utils.Buf8(256);5522for (var q = 0; q < 256; q++) {5523_utf8len[q] = (q >= 252 ? 6 : q >= 248 ? 5 : q >= 240 ? 4 : q >= 224 ? 3 : q >= 192 ? 2 : 1);5524}
5525_utf8len[254] = _utf8len[254] = 1; // Invalid sequence start5526
5527
5528// convert string to array (typed, when possible)
5529exports.string2buf = function (str) {5530var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0;5531
5532// count binary size5533for (m_pos = 0; m_pos < str_len; m_pos++) {5534c = str.charCodeAt(m_pos);5535if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) {5536c2 = str.charCodeAt(m_pos + 1);5537if ((c2 & 0xfc00) === 0xdc00) {5538c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);5539m_pos++;5540}5541}5542buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4;5543}5544
5545// allocate buffer5546buf = new utils.Buf8(buf_len);5547
5548// convert5549for (i = 0, m_pos = 0; i < buf_len; m_pos++) {5550c = str.charCodeAt(m_pos);5551if ((c & 0xfc00) === 0xd800 && (m_pos + 1 < str_len)) {5552c2 = str.charCodeAt(m_pos + 1);5553if ((c2 & 0xfc00) === 0xdc00) {5554c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);5555m_pos++;5556}5557}5558if (c < 0x80) {5559/* one byte */5560buf[i++] = c;5561} else if (c < 0x800) {5562/* two bytes */5563buf[i++] = 0xC0 | (c >>> 6);5564buf[i++] = 0x80 | (c & 0x3f);5565} else if (c < 0x10000) {5566/* three bytes */5567buf[i++] = 0xE0 | (c >>> 12);5568buf[i++] = 0x80 | (c >>> 6 & 0x3f);5569buf[i++] = 0x80 | (c & 0x3f);5570} else {5571/* four bytes */5572buf[i++] = 0xf0 | (c >>> 18);5573buf[i++] = 0x80 | (c >>> 12 & 0x3f);5574buf[i++] = 0x80 | (c >>> 6 & 0x3f);5575buf[i++] = 0x80 | (c & 0x3f);5576}5577}5578
5579return buf;5580};5581
5582// Helper (used in 2 places)
5583function buf2binstring(buf, len) {5584// use fallback for big arrays to avoid stack overflow5585if (len < 65537) {5586if ((buf.subarray && STR_APPLY_UIA_OK) || (!buf.subarray && STR_APPLY_OK)) {5587return String.fromCharCode.apply(null, utils.shrinkBuf(buf, len));5588}5589}5590
5591var result = '';5592for (var i = 0; i < len; i++) {5593result += String.fromCharCode(buf[i]);5594}5595return result;5596}
5597
5598
5599// Convert byte array to binary string
5600exports.buf2binstring = function (buf) {5601return buf2binstring(buf, buf.length);5602};5603
5604
5605// Convert binary string (typed, when possible)
5606exports.binstring2buf = function (str) {5607var buf = new utils.Buf8(str.length);5608for (var i = 0, len = buf.length; i < len; i++) {5609buf[i] = str.charCodeAt(i);5610}5611return buf;5612};5613
5614
5615// convert array to string
5616exports.buf2string = function (buf, max) {5617var i, out, c, c_len;5618var len = max || buf.length;5619
5620// Reserve max possible length (2 words per char)5621// NB: by unknown reasons, Array is significantly faster for5622// String.fromCharCode.apply than Uint16Array.5623var utf16buf = new Array(len * 2);5624
5625for (out = 0, i = 0; i < len;) {5626c = buf[i++];5627// quick process ascii5628if (c < 0x80) { utf16buf[out++] = c; continue; }5629
5630c_len = _utf8len[c];5631// skip 5 & 6 byte codes5632if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len - 1; continue; }5633
5634// apply mask on first byte5635c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07;5636// join the rest5637while (c_len > 1 && i < len) {5638c = (c << 6) | (buf[i++] & 0x3f);5639c_len--;5640}5641
5642// terminated by end of string?5643if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; }5644
5645if (c < 0x10000) {5646utf16buf[out++] = c;5647} else {5648c -= 0x10000;5649utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff);5650utf16buf[out++] = 0xdc00 | (c & 0x3ff);5651}5652}5653
5654return buf2binstring(utf16buf, out);5655};5656
5657
5658// Calculate max possible position in utf8 buffer,
5659// that will not break sequence. If that's not possible
5660// - (very small limits) return max size as is.
5661//
5662// buf[] - utf8 bytes array
5663// max - length limit (mandatory);
5664exports.utf8border = function (buf, max) {5665var pos;5666
5667max = max || buf.length;5668if (max > buf.length) { max = buf.length; }5669
5670// go back from last position, until start of sequence found5671pos = max - 1;5672while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; }5673
5674// Fuckup - very small and broken sequence,5675// return max, because we should return something anyway.5676if (pos < 0) { return max; }5677
5678// If we came to start of buffer - that means vuffer is too small,5679// return max too.5680if (pos === 0) { return max; }5681
5682return (pos + _utf8len[buf[pos]] > max) ? pos : max;5683};5684
5685},{"./common":41}],43:[function(require,module,exports){5686'use strict';5687
5688// Note: adler32 takes 12% for level 0 and 2% for level 6.
5689// It doesn't worth to make additional optimizationa as in original.
5690// Small size is preferable.
5691
5692// (C) 1995-2013 Jean-loup Gailly and Mark Adler
5693// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
5694//
5695// This software is provided 'as-is', without any express or implied
5696// warranty. In no event will the authors be held liable for any damages
5697// arising from the use of this software.
5698//
5699// Permission is granted to anyone to use this software for any purpose,
5700// including commercial applications, and to alter it and redistribute it
5701// freely, subject to the following restrictions:
5702//
5703// 1. The origin of this software must not be misrepresented; you must not
5704// claim that you wrote the original software. If you use this software
5705// in a product, an acknowledgment in the product documentation would be
5706// appreciated but is not required.
5707// 2. Altered source versions must be plainly marked as such, and must not be
5708// misrepresented as being the original software.
5709// 3. This notice may not be removed or altered from any source distribution.
5710
5711function adler32(adler, buf, len, pos) {5712var s1 = (adler & 0xffff) |0,5713s2 = ((adler >>> 16) & 0xffff) |0,5714n = 0;5715
5716while (len !== 0) {5717// Set limit ~ twice less than 5552, to keep5718// s2 in 31-bits, because we force signed ints.5719// in other case %= will fail.5720n = len > 2000 ? 2000 : len;5721len -= n;5722
5723do {5724s1 = (s1 + buf[pos++]) |0;5725s2 = (s2 + s1) |0;5726} while (--n);5727
5728s1 %= 65521;5729s2 %= 65521;5730}5731
5732return (s1 | (s2 << 16)) |0;5733}
5734
5735
5736module.exports = adler32;5737
5738},{}],44:[function(require,module,exports){5739'use strict';5740
5741// (C) 1995-2013 Jean-loup Gailly and Mark Adler
5742// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
5743//
5744// This software is provided 'as-is', without any express or implied
5745// warranty. In no event will the authors be held liable for any damages
5746// arising from the use of this software.
5747//
5748// Permission is granted to anyone to use this software for any purpose,
5749// including commercial applications, and to alter it and redistribute it
5750// freely, subject to the following restrictions:
5751//
5752// 1. The origin of this software must not be misrepresented; you must not
5753// claim that you wrote the original software. If you use this software
5754// in a product, an acknowledgment in the product documentation would be
5755// appreciated but is not required.
5756// 2. Altered source versions must be plainly marked as such, and must not be
5757// misrepresented as being the original software.
5758// 3. This notice may not be removed or altered from any source distribution.
5759
5760module.exports = {5761
5762/* Allowed flush values; see deflate() and inflate() below for details */5763Z_NO_FLUSH: 0,5764Z_PARTIAL_FLUSH: 1,5765Z_SYNC_FLUSH: 2,5766Z_FULL_FLUSH: 3,5767Z_FINISH: 4,5768Z_BLOCK: 5,5769Z_TREES: 6,5770
5771/* Return codes for the compression/decompression functions. Negative values5772* are errors, positive values are used for special but normal events.
5773*/
5774Z_OK: 0,5775Z_STREAM_END: 1,5776Z_NEED_DICT: 2,5777Z_ERRNO: -1,5778Z_STREAM_ERROR: -2,5779Z_DATA_ERROR: -3,5780//Z_MEM_ERROR: -4,5781Z_BUF_ERROR: -5,5782//Z_VERSION_ERROR: -6,5783
5784/* compression levels */5785Z_NO_COMPRESSION: 0,5786Z_BEST_SPEED: 1,5787Z_BEST_COMPRESSION: 9,5788Z_DEFAULT_COMPRESSION: -1,5789
5790
5791Z_FILTERED: 1,5792Z_HUFFMAN_ONLY: 2,5793Z_RLE: 3,5794Z_FIXED: 4,5795Z_DEFAULT_STRATEGY: 0,5796
5797/* Possible values of the data_type field (though see inflate()) */5798Z_BINARY: 0,5799Z_TEXT: 1,5800//Z_ASCII: 1, // = Z_TEXT (deprecated)5801Z_UNKNOWN: 2,5802
5803/* The deflate compression method */5804Z_DEFLATED: 85805//Z_NULL: null // Use -1 or null inline, depending on var type5806};5807
5808},{}],45:[function(require,module,exports){5809'use strict';5810
5811// Note: we can't get significant speed boost here.
5812// So write code to minimize size - no pregenerated tables
5813// and array tools dependencies.
5814
5815// (C) 1995-2013 Jean-loup Gailly and Mark Adler
5816// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
5817//
5818// This software is provided 'as-is', without any express or implied
5819// warranty. In no event will the authors be held liable for any damages
5820// arising from the use of this software.
5821//
5822// Permission is granted to anyone to use this software for any purpose,
5823// including commercial applications, and to alter it and redistribute it
5824// freely, subject to the following restrictions:
5825//
5826// 1. The origin of this software must not be misrepresented; you must not
5827// claim that you wrote the original software. If you use this software
5828// in a product, an acknowledgment in the product documentation would be
5829// appreciated but is not required.
5830// 2. Altered source versions must be plainly marked as such, and must not be
5831// misrepresented as being the original software.
5832// 3. This notice may not be removed or altered from any source distribution.
5833
5834// Use ordinary array, since untyped makes no boost here
5835function makeTable() {5836var c, table = [];5837
5838for (var n = 0; n < 256; n++) {5839c = n;5840for (var k = 0; k < 8; k++) {5841c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));5842}5843table[n] = c;5844}5845
5846return table;5847}
5848
5849// Create table on load. Just 255 signed longs. Not a problem.
5850var crcTable = makeTable();5851
5852
5853function crc32(crc, buf, len, pos) {5854var t = crcTable,5855end = pos + len;5856
5857crc ^= -1;5858
5859for (var i = pos; i < end; i++) {5860crc = (crc >>> 8) ^ t[(crc ^ buf[i]) & 0xFF];5861}5862
5863return (crc ^ (-1)); // >>> 0;5864}
5865
5866
5867module.exports = crc32;5868
5869},{}],46:[function(require,module,exports){5870'use strict';5871
5872// (C) 1995-2013 Jean-loup Gailly and Mark Adler
5873// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
5874//
5875// This software is provided 'as-is', without any express or implied
5876// warranty. In no event will the authors be held liable for any damages
5877// arising from the use of this software.
5878//
5879// Permission is granted to anyone to use this software for any purpose,
5880// including commercial applications, and to alter it and redistribute it
5881// freely, subject to the following restrictions:
5882//
5883// 1. The origin of this software must not be misrepresented; you must not
5884// claim that you wrote the original software. If you use this software
5885// in a product, an acknowledgment in the product documentation would be
5886// appreciated but is not required.
5887// 2. Altered source versions must be plainly marked as such, and must not be
5888// misrepresented as being the original software.
5889// 3. This notice may not be removed or altered from any source distribution.
5890
5891var utils = require('../utils/common');5892var trees = require('./trees');5893var adler32 = require('./adler32');5894var crc32 = require('./crc32');5895var msg = require('./messages');5896
5897/* Public constants ==========================================================*/
5898/* ===========================================================================*/
5899
5900
5901/* Allowed flush values; see deflate() and inflate() below for details */
5902var Z_NO_FLUSH = 0;5903var Z_PARTIAL_FLUSH = 1;5904//var Z_SYNC_FLUSH = 2;
5905var Z_FULL_FLUSH = 3;5906var Z_FINISH = 4;5907var Z_BLOCK = 5;5908//var Z_TREES = 6;
5909
5910
5911/* Return codes for the compression/decompression functions. Negative values
5912* are errors, positive values are used for special but normal events.
5913*/
5914var Z_OK = 0;5915var Z_STREAM_END = 1;5916//var Z_NEED_DICT = 2;
5917//var Z_ERRNO = -1;
5918var Z_STREAM_ERROR = -2;5919var Z_DATA_ERROR = -3;5920//var Z_MEM_ERROR = -4;
5921var Z_BUF_ERROR = -5;5922//var Z_VERSION_ERROR = -6;
5923
5924
5925/* compression levels */
5926//var Z_NO_COMPRESSION = 0;
5927//var Z_BEST_SPEED = 1;
5928//var Z_BEST_COMPRESSION = 9;
5929var Z_DEFAULT_COMPRESSION = -1;5930
5931
5932var Z_FILTERED = 1;5933var Z_HUFFMAN_ONLY = 2;5934var Z_RLE = 3;5935var Z_FIXED = 4;5936var Z_DEFAULT_STRATEGY = 0;5937
5938/* Possible values of the data_type field (though see inflate()) */
5939//var Z_BINARY = 0;
5940//var Z_TEXT = 1;
5941//var Z_ASCII = 1; // = Z_TEXT
5942var Z_UNKNOWN = 2;5943
5944
5945/* The deflate compression method */
5946var Z_DEFLATED = 8;5947
5948/*============================================================================*/
5949
5950
5951var MAX_MEM_LEVEL = 9;5952/* Maximum value for memLevel in deflateInit2 */
5953var MAX_WBITS = 15;5954/* 32K LZ77 window */
5955var DEF_MEM_LEVEL = 8;5956
5957
5958var LENGTH_CODES = 29;5959/* number of length codes, not counting the special END_BLOCK code */
5960var LITERALS = 256;5961/* number of literal bytes 0..255 */
5962var L_CODES = LITERALS + 1 + LENGTH_CODES;5963/* number of Literal or Length codes, including the END_BLOCK code */
5964var D_CODES = 30;5965/* number of distance codes */
5966var BL_CODES = 19;5967/* number of codes used to transfer the bit lengths */
5968var HEAP_SIZE = 2 * L_CODES + 1;5969/* maximum heap size */
5970var MAX_BITS = 15;5971/* All codes must not exceed MAX_BITS bits */
5972
5973var MIN_MATCH = 3;5974var MAX_MATCH = 258;5975var MIN_LOOKAHEAD = (MAX_MATCH + MIN_MATCH + 1);5976
5977var PRESET_DICT = 0x20;5978
5979var INIT_STATE = 42;5980var EXTRA_STATE = 69;5981var NAME_STATE = 73;5982var COMMENT_STATE = 91;5983var HCRC_STATE = 103;5984var BUSY_STATE = 113;5985var FINISH_STATE = 666;5986
5987var BS_NEED_MORE = 1; /* block not completed, need more input or more output */5988var BS_BLOCK_DONE = 2; /* block flush performed */5989var BS_FINISH_STARTED = 3; /* finish started, need only more output at next deflate */5990var BS_FINISH_DONE = 4; /* finish done, accept no more input or output */5991
5992var OS_CODE = 0x03; // Unix :) . Don't detect, use this default.5993
5994function err(strm, errorCode) {5995strm.msg = msg[errorCode];5996return errorCode;5997}
5998
5999function rank(f) {6000return ((f) << 1) - ((f) > 4 ? 9 : 0);6001}
6002
6003function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } }6004
6005
6006/* =========================================================================
6007* Flush as much pending output as possible. All deflate() output goes
6008* through this function so some applications may wish to modify it
6009* to avoid allocating a large strm->output buffer and copying into it.
6010* (See also read_buf()).
6011*/
6012function flush_pending(strm) {6013var s = strm.state;6014
6015//_tr_flush_bits(s);6016var len = s.pending;6017if (len > strm.avail_out) {6018len = strm.avail_out;6019}6020if (len === 0) { return; }6021
6022utils.arraySet(strm.output, s.pending_buf, s.pending_out, len, strm.next_out);6023strm.next_out += len;6024s.pending_out += len;6025strm.total_out += len;6026strm.avail_out -= len;6027s.pending -= len;6028if (s.pending === 0) {6029s.pending_out = 0;6030}6031}
6032
6033
6034function flush_block_only(s, last) {6035trees._tr_flush_block(s, (s.block_start >= 0 ? s.block_start : -1), s.strstart - s.block_start, last);6036s.block_start = s.strstart;6037flush_pending(s.strm);6038}
6039
6040
6041function put_byte(s, b) {6042s.pending_buf[s.pending++] = b;6043}
6044
6045
6046/* =========================================================================
6047* Put a short in the pending buffer. The 16-bit value is put in MSB order.
6048* IN assertion: the stream state is correct and there is enough room in
6049* pending_buf.
6050*/
6051function putShortMSB(s, b) {6052// put_byte(s, (Byte)(b >> 8));
6053// put_byte(s, (Byte)(b & 0xff));
6054s.pending_buf[s.pending++] = (b >>> 8) & 0xff;6055s.pending_buf[s.pending++] = b & 0xff;6056}
6057
6058
6059/* ===========================================================================
6060* Read a new buffer from the current input stream, update the adler32
6061* and total number of bytes read. All deflate() input goes through
6062* this function so some applications may wish to modify it to avoid
6063* allocating a large strm->input buffer and copying from it.
6064* (See also flush_pending()).
6065*/
6066function read_buf(strm, buf, start, size) {6067var len = strm.avail_in;6068
6069if (len > size) { len = size; }6070if (len === 0) { return 0; }6071
6072strm.avail_in -= len;6073
6074// zmemcpy(buf, strm->next_in, len);6075utils.arraySet(buf, strm.input, strm.next_in, len, start);6076if (strm.state.wrap === 1) {6077strm.adler = adler32(strm.adler, buf, len, start);6078}6079
6080else if (strm.state.wrap === 2) {6081strm.adler = crc32(strm.adler, buf, len, start);6082}6083
6084strm.next_in += len;6085strm.total_in += len;6086
6087return len;6088}
6089
6090
6091/* ===========================================================================
6092* Set match_start to the longest match starting at the given string and
6093* return its length. Matches shorter or equal to prev_length are discarded,
6094* in which case the result is equal to prev_length and match_start is
6095* garbage.
6096* IN assertions: cur_match is the head of the hash chain for the current
6097* string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
6098* OUT assertion: the match length is not greater than s->lookahead.
6099*/
6100function longest_match(s, cur_match) {6101var chain_length = s.max_chain_length; /* max hash chain length */6102var scan = s.strstart; /* current string */6103var match; /* matched string */6104var len; /* length of current match */6105var best_len = s.prev_length; /* best match length so far */6106var nice_match = s.nice_match; /* stop if match long enough */6107var limit = (s.strstart > (s.w_size - MIN_LOOKAHEAD)) ?6108s.strstart - (s.w_size - MIN_LOOKAHEAD) : 0/*NIL*/;6109
6110var _win = s.window; // shortcut6111
6112var wmask = s.w_mask;6113var prev = s.prev;6114
6115/* Stop when cur_match becomes <= limit. To simplify the code,6116* we prevent matches with the string of window index 0.
6117*/
6118
6119var strend = s.strstart + MAX_MATCH;6120var scan_end1 = _win[scan + best_len - 1];6121var scan_end = _win[scan + best_len];6122
6123/* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.6124* It is easy to get rid of this optimization if necessary.
6125*/
6126// Assert(s->hash_bits >= 8 && MAX_MATCH == 258, "Code too clever");6127
6128/* Do not waste too much time if we already have a good match: */6129if (s.prev_length >= s.good_match) {6130chain_length >>= 2;6131}6132/* Do not look for matches beyond the end of the input. This is necessary6133* to make deflate deterministic.
6134*/
6135if (nice_match > s.lookahead) { nice_match = s.lookahead; }6136
6137// Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, "need lookahead");6138
6139do {6140// Assert(cur_match < s->strstart, "no future");6141match = cur_match;6142
6143/* Skip to next match if the match length cannot increase6144* or if the match length is less than 2. Note that the checks below
6145* for insufficient lookahead only occur occasionally for performance
6146* reasons. Therefore uninitialized memory will be accessed, and
6147* conditional jumps will be made that depend on those values.
6148* However the length of the match is limited to the lookahead, so
6149* the output of deflate is not affected by the uninitialized values.
6150*/
6151
6152if (_win[match + best_len] !== scan_end ||6153_win[match + best_len - 1] !== scan_end1 ||6154_win[match] !== _win[scan] ||6155_win[++match] !== _win[scan + 1]) {6156continue;6157}6158
6159/* The check at best_len-1 can be removed because it will be made6160* again later. (This heuristic is not always a win.)
6161* It is not necessary to compare scan[2] and match[2] since they
6162* are always equal when the other bytes match, given that
6163* the hash keys are equal and that HASH_BITS >= 8.
6164*/
6165scan += 2;6166match++;6167// Assert(*scan == *match, "match[2]?");6168
6169/* We check for insufficient lookahead only every 8th comparison;6170* the 256th check will be made at strstart+258.
6171*/
6172do {6173/*jshint noempty:false*/6174} while (_win[++scan] === _win[++match] && _win[++scan] === _win[++match] &&6175_win[++scan] === _win[++match] && _win[++scan] === _win[++match] &&6176_win[++scan] === _win[++match] && _win[++scan] === _win[++match] &&6177_win[++scan] === _win[++match] && _win[++scan] === _win[++match] &&6178scan < strend);6179
6180// Assert(scan <= s->window+(unsigned)(s->window_size-1), "wild scan");6181
6182len = MAX_MATCH - (strend - scan);6183scan = strend - MAX_MATCH;6184
6185if (len > best_len) {6186s.match_start = cur_match;6187best_len = len;6188if (len >= nice_match) {6189break;6190}6191scan_end1 = _win[scan + best_len - 1];6192scan_end = _win[scan + best_len];6193}6194} while ((cur_match = prev[cur_match & wmask]) > limit && --chain_length !== 0);6195
6196if (best_len <= s.lookahead) {6197return best_len;6198}6199return s.lookahead;6200}
6201
6202
6203/* ===========================================================================
6204* Fill the window when the lookahead becomes insufficient.
6205* Updates strstart and lookahead.
6206*
6207* IN assertion: lookahead < MIN_LOOKAHEAD
6208* OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
6209* At least one byte has been read, or avail_in == 0; reads are
6210* performed for at least two bytes (required for the zip translate_eol
6211* option -- not supported here).
6212*/
6213function fill_window(s) {6214var _w_size = s.w_size;6215var p, n, m, more, str;6216
6217//Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead");6218
6219do {6220more = s.window_size - s.lookahead - s.strstart;6221
6222// JS ints have 32 bit, block below not needed6223/* Deal with !@#$% 64K limit: */6224//if (sizeof(int) <= 2) {6225// if (more == 0 && s->strstart == 0 && s->lookahead == 0) {6226// more = wsize;6227//6228// } else if (more == (unsigned)(-1)) {6229// /* Very unlikely, but possible on 16 bit machine if6230// * strstart == 0 && lookahead == 1 (input done a byte at time)6231// */6232// more--;6233// }6234//}6235
6236
6237/* If the window is almost full and there is insufficient lookahead,6238* move the upper half to the lower one to make room in the upper half.
6239*/
6240if (s.strstart >= _w_size + (_w_size - MIN_LOOKAHEAD)) {6241
6242utils.arraySet(s.window, s.window, _w_size, _w_size, 0);6243s.match_start -= _w_size;6244s.strstart -= _w_size;6245/* we now have strstart >= MAX_DIST */6246s.block_start -= _w_size;6247
6248/* Slide the hash table (could be avoided with 32 bit values6249at the expense of memory usage). We slide even when level == 0
6250to keep the hash table consistent if we switch back to level > 0
6251later. (Using level 0 permanently is not an optimal usage of
6252zlib, so we don't care about this pathological case.)
6253*/
6254
6255n = s.hash_size;6256p = n;6257do {6258m = s.head[--p];6259s.head[p] = (m >= _w_size ? m - _w_size : 0);6260} while (--n);6261
6262n = _w_size;6263p = n;6264do {6265m = s.prev[--p];6266s.prev[p] = (m >= _w_size ? m - _w_size : 0);6267/* If n is not on any hash chain, prev[n] is garbage but6268* its value will never be used.
6269*/
6270} while (--n);6271
6272more += _w_size;6273}6274if (s.strm.avail_in === 0) {6275break;6276}6277
6278/* If there was no sliding:6279* strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
6280* more == window_size - lookahead - strstart
6281* => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
6282* => more >= window_size - 2*WSIZE + 2
6283* In the BIG_MEM or MMAP case (not yet supported),
6284* window_size == input_size + MIN_LOOKAHEAD &&
6285* strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
6286* Otherwise, window_size == 2*WSIZE so more >= 2.
6287* If there was sliding, more >= WSIZE. So in all cases, more >= 2.
6288*/
6289//Assert(more >= 2, "more < 2");6290n = read_buf(s.strm, s.window, s.strstart + s.lookahead, more);6291s.lookahead += n;6292
6293/* Initialize the hash value now that we have some input: */6294if (s.lookahead + s.insert >= MIN_MATCH) {6295str = s.strstart - s.insert;6296s.ins_h = s.window[str];6297
6298/* UPDATE_HASH(s, s->ins_h, s->window[str + 1]); */6299s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + 1]) & s.hash_mask;6300//#if MIN_MATCH != 3
6301// Call update_hash() MIN_MATCH-3 more times
6302//#endif
6303while (s.insert) {6304/* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */6305s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask;6306
6307s.prev[str & s.w_mask] = s.head[s.ins_h];6308s.head[s.ins_h] = str;6309str++;6310s.insert--;6311if (s.lookahead + s.insert < MIN_MATCH) {6312break;6313}6314}6315}6316/* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,6317* but this is not important since only literal bytes will be emitted.
6318*/
6319
6320} while (s.lookahead < MIN_LOOKAHEAD && s.strm.avail_in !== 0);6321
6322/* If the WIN_INIT bytes after the end of the current data have never been6323* written, then zero those bytes in order to avoid memory check reports of
6324* the use of uninitialized (or uninitialised as Julian writes) bytes by
6325* the longest match routines. Update the high water mark for the next
6326* time through here. WIN_INIT is set to MAX_MATCH since the longest match
6327* routines allow scanning to strstart + MAX_MATCH, ignoring lookahead.
6328*/
6329// if (s.high_water < s.window_size) {
6330// var curr = s.strstart + s.lookahead;
6331// var init = 0;
6332//
6333// if (s.high_water < curr) {
6334// /* Previous high water mark below current data -- zero WIN_INIT
6335// * bytes or up to end of window, whichever is less.
6336// */
6337// init = s.window_size - curr;
6338// if (init > WIN_INIT)
6339// init = WIN_INIT;
6340// zmemzero(s->window + curr, (unsigned)init);
6341// s->high_water = curr + init;
6342// }
6343// else if (s->high_water < (ulg)curr + WIN_INIT) {
6344// /* High water mark at or above current data, but below current data
6345// * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up
6346// * to end of window, whichever is less.
6347// */
6348// init = (ulg)curr + WIN_INIT - s->high_water;
6349// if (init > s->window_size - s->high_water)
6350// init = s->window_size - s->high_water;
6351// zmemzero(s->window + s->high_water, (unsigned)init);
6352// s->high_water += init;
6353// }
6354// }
6355//
6356// Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
6357// "not enough room for search");
6358}
6359
6360/* ===========================================================================
6361* Copy without compression as much as possible from the input stream, return
6362* the current block state.
6363* This function does not insert new strings in the dictionary since
6364* uncompressible data is probably not useful. This function is used
6365* only for the level=0 compression option.
6366* NOTE: this function should be optimized to avoid extra copying from
6367* window to pending_buf.
6368*/
6369function deflate_stored(s, flush) {6370/* Stored blocks are limited to 0xffff bytes, pending_buf is limited6371* to pending_buf_size, and each stored block has a 5 byte header:
6372*/
6373var max_block_size = 0xffff;6374
6375if (max_block_size > s.pending_buf_size - 5) {6376max_block_size = s.pending_buf_size - 5;6377}6378
6379/* Copy as much as possible from input to output: */6380for (;;) {6381/* Fill the window as much as possible: */6382if (s.lookahead <= 1) {6383
6384//Assert(s->strstart < s->w_size+MAX_DIST(s) ||6385// s->block_start >= (long)s->w_size, "slide too late");6386// if (!(s.strstart < s.w_size + (s.w_size - MIN_LOOKAHEAD) ||
6387// s.block_start >= s.w_size)) {
6388// throw new Error("slide too late");
6389// }
6390
6391fill_window(s);6392if (s.lookahead === 0 && flush === Z_NO_FLUSH) {6393return BS_NEED_MORE;6394}6395
6396if (s.lookahead === 0) {6397break;6398}6399/* flush the current block */6400}6401//Assert(s->block_start >= 0L, "block gone");6402// if (s.block_start < 0) throw new Error("block gone");
6403
6404s.strstart += s.lookahead;6405s.lookahead = 0;6406
6407/* Emit a stored block if pending_buf will be full: */6408var max_start = s.block_start + max_block_size;6409
6410if (s.strstart === 0 || s.strstart >= max_start) {6411/* strstart == 0 is possible when wraparound on 16-bit machine */6412s.lookahead = s.strstart - max_start;6413s.strstart = max_start;6414/*** FLUSH_BLOCK(s, 0); ***/6415flush_block_only(s, false);6416if (s.strm.avail_out === 0) {6417return BS_NEED_MORE;6418}6419/***/6420
6421
6422}6423/* Flush if we may have to slide, otherwise block_start may become6424* negative and the data will be gone:
6425*/
6426if (s.strstart - s.block_start >= (s.w_size - MIN_LOOKAHEAD)) {6427/*** FLUSH_BLOCK(s, 0); ***/6428flush_block_only(s, false);6429if (s.strm.avail_out === 0) {6430return BS_NEED_MORE;6431}6432/***/6433}6434}6435
6436s.insert = 0;6437
6438if (flush === Z_FINISH) {6439/*** FLUSH_BLOCK(s, 1); ***/6440flush_block_only(s, true);6441if (s.strm.avail_out === 0) {6442return BS_FINISH_STARTED;6443}6444/***/6445return BS_FINISH_DONE;6446}6447
6448if (s.strstart > s.block_start) {6449/*** FLUSH_BLOCK(s, 0); ***/6450flush_block_only(s, false);6451if (s.strm.avail_out === 0) {6452return BS_NEED_MORE;6453}6454/***/6455}6456
6457return BS_NEED_MORE;6458}
6459
6460/* ===========================================================================
6461* Compress as much as possible from the input stream, return the current
6462* block state.
6463* This function does not perform lazy evaluation of matches and inserts
6464* new strings in the dictionary only for unmatched strings or for short
6465* matches. It is used only for the fast compression options.
6466*/
6467function deflate_fast(s, flush) {6468var hash_head; /* head of the hash chain */6469var bflush; /* set if current block must be flushed */6470
6471for (;;) {6472/* Make sure that we always have enough lookahead, except6473* at the end of the input file. We need MAX_MATCH bytes
6474* for the next match, plus MIN_MATCH bytes to insert the
6475* string following the next match.
6476*/
6477if (s.lookahead < MIN_LOOKAHEAD) {6478fill_window(s);6479if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) {6480return BS_NEED_MORE;6481}6482if (s.lookahead === 0) {6483break; /* flush the current block */6484}6485}6486
6487/* Insert the string window[strstart .. strstart+2] in the6488* dictionary, and set hash_head to the head of the hash chain:
6489*/
6490hash_head = 0/*NIL*/;6491if (s.lookahead >= MIN_MATCH) {6492/*** INSERT_STRING(s, s.strstart, hash_head); ***/6493s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask;6494hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h];6495s.head[s.ins_h] = s.strstart;6496/***/6497}6498
6499/* Find the longest match, discarding those <= prev_length.6500* At this point we have always match_length < MIN_MATCH
6501*/
6502if (hash_head !== 0/*NIL*/ && ((s.strstart - hash_head) <= (s.w_size - MIN_LOOKAHEAD))) {6503/* To simplify the code, we prevent matches with the string6504* of window index 0 (in particular we have to avoid a match
6505* of the string with itself at the start of the input file).
6506*/
6507s.match_length = longest_match(s, hash_head);6508/* longest_match() sets match_start */6509}6510if (s.match_length >= MIN_MATCH) {6511// check_match(s, s.strstart, s.match_start, s.match_length); // for debug only6512
6513/*** _tr_tally_dist(s, s.strstart - s.match_start,6514s.match_length - MIN_MATCH, bflush); ***/
6515bflush = trees._tr_tally(s, s.strstart - s.match_start, s.match_length - MIN_MATCH);6516
6517s.lookahead -= s.match_length;6518
6519/* Insert new strings in the hash table only if the match length6520* is not too large. This saves time but degrades compression.
6521*/
6522if (s.match_length <= s.max_lazy_match/*max_insert_length*/ && s.lookahead >= MIN_MATCH) {6523s.match_length--; /* string at strstart already in table */6524do {6525s.strstart++;6526/*** INSERT_STRING(s, s.strstart, hash_head); ***/6527s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask;6528hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h];6529s.head[s.ins_h] = s.strstart;6530/***/6531/* strstart never exceeds WSIZE-MAX_MATCH, so there are6532* always MIN_MATCH bytes ahead.
6533*/
6534} while (--s.match_length !== 0);6535s.strstart++;6536} else6537{6538s.strstart += s.match_length;6539s.match_length = 0;6540s.ins_h = s.window[s.strstart];6541/* UPDATE_HASH(s, s.ins_h, s.window[s.strstart+1]); */6542s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + 1]) & s.hash_mask;6543
6544//#if MIN_MATCH != 3
6545// Call UPDATE_HASH() MIN_MATCH-3 more times
6546//#endif
6547/* If lookahead < MIN_MATCH, ins_h is garbage, but it does not6548* matter since it will be recomputed at next deflate call.
6549*/
6550}6551} else {6552/* No match, output a literal byte */6553//Tracevv((stderr,"%c", s.window[s.strstart]));6554/*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/6555bflush = trees._tr_tally(s, 0, s.window[s.strstart]);6556
6557s.lookahead--;6558s.strstart++;6559}6560if (bflush) {6561/*** FLUSH_BLOCK(s, 0); ***/6562flush_block_only(s, false);6563if (s.strm.avail_out === 0) {6564return BS_NEED_MORE;6565}6566/***/6567}6568}6569s.insert = ((s.strstart < (MIN_MATCH - 1)) ? s.strstart : MIN_MATCH - 1);6570if (flush === Z_FINISH) {6571/*** FLUSH_BLOCK(s, 1); ***/6572flush_block_only(s, true);6573if (s.strm.avail_out === 0) {6574return BS_FINISH_STARTED;6575}6576/***/6577return BS_FINISH_DONE;6578}6579if (s.last_lit) {6580/*** FLUSH_BLOCK(s, 0); ***/6581flush_block_only(s, false);6582if (s.strm.avail_out === 0) {6583return BS_NEED_MORE;6584}6585/***/6586}6587return BS_BLOCK_DONE;6588}
6589
6590/* ===========================================================================
6591* Same as above, but achieves better compression. We use a lazy
6592* evaluation for matches: a match is finally adopted only if there is
6593* no better match at the next window position.
6594*/
6595function deflate_slow(s, flush) {6596var hash_head; /* head of hash chain */6597var bflush; /* set if current block must be flushed */6598
6599var max_insert;6600
6601/* Process the input block. */6602for (;;) {6603/* Make sure that we always have enough lookahead, except6604* at the end of the input file. We need MAX_MATCH bytes
6605* for the next match, plus MIN_MATCH bytes to insert the
6606* string following the next match.
6607*/
6608if (s.lookahead < MIN_LOOKAHEAD) {6609fill_window(s);6610if (s.lookahead < MIN_LOOKAHEAD && flush === Z_NO_FLUSH) {6611return BS_NEED_MORE;6612}6613if (s.lookahead === 0) { break; } /* flush the current block */6614}6615
6616/* Insert the string window[strstart .. strstart+2] in the6617* dictionary, and set hash_head to the head of the hash chain:
6618*/
6619hash_head = 0/*NIL*/;6620if (s.lookahead >= MIN_MATCH) {6621/*** INSERT_STRING(s, s.strstart, hash_head); ***/6622s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask;6623hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h];6624s.head[s.ins_h] = s.strstart;6625/***/6626}6627
6628/* Find the longest match, discarding those <= prev_length.6629*/
6630s.prev_length = s.match_length;6631s.prev_match = s.match_start;6632s.match_length = MIN_MATCH - 1;6633
6634if (hash_head !== 0/*NIL*/ && s.prev_length < s.max_lazy_match &&6635s.strstart - hash_head <= (s.w_size - MIN_LOOKAHEAD)/*MAX_DIST(s)*/) {6636/* To simplify the code, we prevent matches with the string6637* of window index 0 (in particular we have to avoid a match
6638* of the string with itself at the start of the input file).
6639*/
6640s.match_length = longest_match(s, hash_head);6641/* longest_match() sets match_start */6642
6643if (s.match_length <= 5 &&6644(s.strategy === Z_FILTERED || (s.match_length === MIN_MATCH && s.strstart - s.match_start > 4096/*TOO_FAR*/))) {6645
6646/* If prev_match is also MIN_MATCH, match_start is garbage6647* but we will ignore the current match anyway.
6648*/
6649s.match_length = MIN_MATCH - 1;6650}6651}6652/* If there was a match at the previous step and the current6653* match is not better, output the previous match:
6654*/
6655if (s.prev_length >= MIN_MATCH && s.match_length <= s.prev_length) {6656max_insert = s.strstart + s.lookahead - MIN_MATCH;6657/* Do not insert strings in hash table beyond this. */6658
6659//check_match(s, s.strstart-1, s.prev_match, s.prev_length);6660
6661/***_tr_tally_dist(s, s.strstart - 1 - s.prev_match,6662s.prev_length - MIN_MATCH, bflush);***/
6663bflush = trees._tr_tally(s, s.strstart - 1 - s.prev_match, s.prev_length - MIN_MATCH);6664/* Insert in hash table all strings up to the end of the match.6665* strstart-1 and strstart are already inserted. If there is not
6666* enough lookahead, the last two strings are not inserted in
6667* the hash table.
6668*/
6669s.lookahead -= s.prev_length - 1;6670s.prev_length -= 2;6671do {6672if (++s.strstart <= max_insert) {6673/*** INSERT_STRING(s, s.strstart, hash_head); ***/6674s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[s.strstart + MIN_MATCH - 1]) & s.hash_mask;6675hash_head = s.prev[s.strstart & s.w_mask] = s.head[s.ins_h];6676s.head[s.ins_h] = s.strstart;6677/***/6678}6679} while (--s.prev_length !== 0);6680s.match_available = 0;6681s.match_length = MIN_MATCH - 1;6682s.strstart++;6683
6684if (bflush) {6685/*** FLUSH_BLOCK(s, 0); ***/6686flush_block_only(s, false);6687if (s.strm.avail_out === 0) {6688return BS_NEED_MORE;6689}6690/***/6691}6692
6693} else if (s.match_available) {6694/* If there was no match at the previous position, output a6695* single literal. If there was a match but the current match
6696* is longer, truncate the previous match to a single literal.
6697*/
6698//Tracevv((stderr,"%c", s->window[s->strstart-1]));6699/*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/6700bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]);6701
6702if (bflush) {6703/*** FLUSH_BLOCK_ONLY(s, 0) ***/6704flush_block_only(s, false);6705/***/6706}6707s.strstart++;6708s.lookahead--;6709if (s.strm.avail_out === 0) {6710return BS_NEED_MORE;6711}6712} else {6713/* There is no previous match to compare with, wait for6714* the next step to decide.
6715*/
6716s.match_available = 1;6717s.strstart++;6718s.lookahead--;6719}6720}6721//Assert (flush != Z_NO_FLUSH, "no flush?");6722if (s.match_available) {6723//Tracevv((stderr,"%c", s->window[s->strstart-1]));6724/*** _tr_tally_lit(s, s.window[s.strstart-1], bflush); ***/6725bflush = trees._tr_tally(s, 0, s.window[s.strstart - 1]);6726
6727s.match_available = 0;6728}6729s.insert = s.strstart < MIN_MATCH - 1 ? s.strstart : MIN_MATCH - 1;6730if (flush === Z_FINISH) {6731/*** FLUSH_BLOCK(s, 1); ***/6732flush_block_only(s, true);6733if (s.strm.avail_out === 0) {6734return BS_FINISH_STARTED;6735}6736/***/6737return BS_FINISH_DONE;6738}6739if (s.last_lit) {6740/*** FLUSH_BLOCK(s, 0); ***/6741flush_block_only(s, false);6742if (s.strm.avail_out === 0) {6743return BS_NEED_MORE;6744}6745/***/6746}6747
6748return BS_BLOCK_DONE;6749}
6750
6751
6752/* ===========================================================================
6753* For Z_RLE, simply look for runs of bytes, generate matches only of distance
6754* one. Do not maintain a hash table. (It will be regenerated if this run of
6755* deflate switches away from Z_RLE.)
6756*/
6757function deflate_rle(s, flush) {6758var bflush; /* set if current block must be flushed */6759var prev; /* byte at distance one to match */6760var scan, strend; /* scan goes up to strend for length of run */6761
6762var _win = s.window;6763
6764for (;;) {6765/* Make sure that we always have enough lookahead, except6766* at the end of the input file. We need MAX_MATCH bytes
6767* for the longest run, plus one for the unrolled loop.
6768*/
6769if (s.lookahead <= MAX_MATCH) {6770fill_window(s);6771if (s.lookahead <= MAX_MATCH && flush === Z_NO_FLUSH) {6772return BS_NEED_MORE;6773}6774if (s.lookahead === 0) { break; } /* flush the current block */6775}6776
6777/* See how many times the previous byte repeats */6778s.match_length = 0;6779if (s.lookahead >= MIN_MATCH && s.strstart > 0) {6780scan = s.strstart - 1;6781prev = _win[scan];6782if (prev === _win[++scan] && prev === _win[++scan] && prev === _win[++scan]) {6783strend = s.strstart + MAX_MATCH;6784do {6785/*jshint noempty:false*/6786} while (prev === _win[++scan] && prev === _win[++scan] &&6787prev === _win[++scan] && prev === _win[++scan] &&6788prev === _win[++scan] && prev === _win[++scan] &&6789prev === _win[++scan] && prev === _win[++scan] &&6790scan < strend);6791s.match_length = MAX_MATCH - (strend - scan);6792if (s.match_length > s.lookahead) {6793s.match_length = s.lookahead;6794}6795}6796//Assert(scan <= s->window+(uInt)(s->window_size-1), "wild scan");6797}6798
6799/* Emit match if have run of MIN_MATCH or longer, else emit literal */6800if (s.match_length >= MIN_MATCH) {6801//check_match(s, s.strstart, s.strstart - 1, s.match_length);6802
6803/*** _tr_tally_dist(s, 1, s.match_length - MIN_MATCH, bflush); ***/6804bflush = trees._tr_tally(s, 1, s.match_length - MIN_MATCH);6805
6806s.lookahead -= s.match_length;6807s.strstart += s.match_length;6808s.match_length = 0;6809} else {6810/* No match, output a literal byte */6811//Tracevv((stderr,"%c", s->window[s->strstart]));6812/*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/6813bflush = trees._tr_tally(s, 0, s.window[s.strstart]);6814
6815s.lookahead--;6816s.strstart++;6817}6818if (bflush) {6819/*** FLUSH_BLOCK(s, 0); ***/6820flush_block_only(s, false);6821if (s.strm.avail_out === 0) {6822return BS_NEED_MORE;6823}6824/***/6825}6826}6827s.insert = 0;6828if (flush === Z_FINISH) {6829/*** FLUSH_BLOCK(s, 1); ***/6830flush_block_only(s, true);6831if (s.strm.avail_out === 0) {6832return BS_FINISH_STARTED;6833}6834/***/6835return BS_FINISH_DONE;6836}6837if (s.last_lit) {6838/*** FLUSH_BLOCK(s, 0); ***/6839flush_block_only(s, false);6840if (s.strm.avail_out === 0) {6841return BS_NEED_MORE;6842}6843/***/6844}6845return BS_BLOCK_DONE;6846}
6847
6848/* ===========================================================================
6849* For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table.
6850* (It will be regenerated if this run of deflate switches away from Huffman.)
6851*/
6852function deflate_huff(s, flush) {6853var bflush; /* set if current block must be flushed */6854
6855for (;;) {6856/* Make sure that we have a literal to write. */6857if (s.lookahead === 0) {6858fill_window(s);6859if (s.lookahead === 0) {6860if (flush === Z_NO_FLUSH) {6861return BS_NEED_MORE;6862}6863break; /* flush the current block */6864}6865}6866
6867/* Output a literal byte */6868s.match_length = 0;6869//Tracevv((stderr,"%c", s->window[s->strstart]));6870/*** _tr_tally_lit(s, s.window[s.strstart], bflush); ***/6871bflush = trees._tr_tally(s, 0, s.window[s.strstart]);6872s.lookahead--;6873s.strstart++;6874if (bflush) {6875/*** FLUSH_BLOCK(s, 0); ***/6876flush_block_only(s, false);6877if (s.strm.avail_out === 0) {6878return BS_NEED_MORE;6879}6880/***/6881}6882}6883s.insert = 0;6884if (flush === Z_FINISH) {6885/*** FLUSH_BLOCK(s, 1); ***/6886flush_block_only(s, true);6887if (s.strm.avail_out === 0) {6888return BS_FINISH_STARTED;6889}6890/***/6891return BS_FINISH_DONE;6892}6893if (s.last_lit) {6894/*** FLUSH_BLOCK(s, 0); ***/6895flush_block_only(s, false);6896if (s.strm.avail_out === 0) {6897return BS_NEED_MORE;6898}6899/***/6900}6901return BS_BLOCK_DONE;6902}
6903
6904/* Values for max_lazy_match, good_match and max_chain_length, depending on
6905* the desired pack level (0..9). The values given below have been tuned to
6906* exclude worst case performance for pathological files. Better values may be
6907* found for specific files.
6908*/
6909function Config(good_length, max_lazy, nice_length, max_chain, func) {6910this.good_length = good_length;6911this.max_lazy = max_lazy;6912this.nice_length = nice_length;6913this.max_chain = max_chain;6914this.func = func;6915}
6916
6917var configuration_table;6918
6919configuration_table = [6920/* good lazy nice chain */6921new Config(0, 0, 0, 0, deflate_stored), /* 0 store only */6922new Config(4, 4, 8, 4, deflate_fast), /* 1 max speed, no lazy matches */6923new Config(4, 5, 16, 8, deflate_fast), /* 2 */6924new Config(4, 6, 32, 32, deflate_fast), /* 3 */6925
6926new Config(4, 4, 16, 16, deflate_slow), /* 4 lazy matches */6927new Config(8, 16, 32, 32, deflate_slow), /* 5 */6928new Config(8, 16, 128, 128, deflate_slow), /* 6 */6929new Config(8, 32, 128, 256, deflate_slow), /* 7 */6930new Config(32, 128, 258, 1024, deflate_slow), /* 8 */6931new Config(32, 258, 258, 4096, deflate_slow) /* 9 max compression */6932];6933
6934
6935/* ===========================================================================
6936* Initialize the "longest match" routines for a new zlib stream
6937*/
6938function lm_init(s) {6939s.window_size = 2 * s.w_size;6940
6941/*** CLEAR_HASH(s); ***/6942zero(s.head); // Fill with NIL (= 0);6943
6944/* Set the default configuration parameters:6945*/
6946s.max_lazy_match = configuration_table[s.level].max_lazy;6947s.good_match = configuration_table[s.level].good_length;6948s.nice_match = configuration_table[s.level].nice_length;6949s.max_chain_length = configuration_table[s.level].max_chain;6950
6951s.strstart = 0;6952s.block_start = 0;6953s.lookahead = 0;6954s.insert = 0;6955s.match_length = s.prev_length = MIN_MATCH - 1;6956s.match_available = 0;6957s.ins_h = 0;6958}
6959
6960
6961function DeflateState() {6962this.strm = null; /* pointer back to this zlib stream */6963this.status = 0; /* as the name implies */6964this.pending_buf = null; /* output still pending */6965this.pending_buf_size = 0; /* size of pending_buf */6966this.pending_out = 0; /* next pending byte to output to the stream */6967this.pending = 0; /* nb of bytes in the pending buffer */6968this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */6969this.gzhead = null; /* gzip header information to write */6970this.gzindex = 0; /* where in extra, name, or comment */6971this.method = Z_DEFLATED; /* can only be DEFLATED */6972this.last_flush = -1; /* value of flush param for previous deflate call */6973
6974this.w_size = 0; /* LZ77 window size (32K by default) */6975this.w_bits = 0; /* log2(w_size) (8..16) */6976this.w_mask = 0; /* w_size - 1 */6977
6978this.window = null;6979/* Sliding window. Input bytes are read into the second half of the window,6980* and move to the first half later to keep a dictionary of at least wSize
6981* bytes. With this organization, matches are limited to a distance of
6982* wSize-MAX_MATCH bytes, but this ensures that IO is always
6983* performed with a length multiple of the block size.
6984*/
6985
6986this.window_size = 0;6987/* Actual size of window: 2*wSize, except when the user input buffer6988* is directly used as sliding window.
6989*/
6990
6991this.prev = null;6992/* Link to older string with same hash index. To limit the size of this6993* array to 64K, this link is maintained only for the last 32K strings.
6994* An index in this array is thus a window index modulo 32K.
6995*/
6996
6997this.head = null; /* Heads of the hash chains or NIL. */6998
6999this.ins_h = 0; /* hash index of string to be inserted */7000this.hash_size = 0; /* number of elements in hash table */7001this.hash_bits = 0; /* log2(hash_size) */7002this.hash_mask = 0; /* hash_size-1 */7003
7004this.hash_shift = 0;7005/* Number of bits by which ins_h must be shifted at each input7006* step. It must be such that after MIN_MATCH steps, the oldest
7007* byte no longer takes part in the hash key, that is:
7008* hash_shift * MIN_MATCH >= hash_bits
7009*/
7010
7011this.block_start = 0;7012/* Window position at the beginning of the current output block. Gets7013* negative when the window is moved backwards.
7014*/
7015
7016this.match_length = 0; /* length of best match */7017this.prev_match = 0; /* previous match */7018this.match_available = 0; /* set if previous match exists */7019this.strstart = 0; /* start of string to insert */7020this.match_start = 0; /* start of matching string */7021this.lookahead = 0; /* number of valid bytes ahead in window */7022
7023this.prev_length = 0;7024/* Length of the best match at previous step. Matches not greater than this7025* are discarded. This is used in the lazy match evaluation.
7026*/
7027
7028this.max_chain_length = 0;7029/* To speed up deflation, hash chains are never searched beyond this7030* length. A higher limit improves compression ratio but degrades the
7031* speed.
7032*/
7033
7034this.max_lazy_match = 0;7035/* Attempt to find a better match only when the current match is strictly7036* smaller than this value. This mechanism is used only for compression
7037* levels >= 4.
7038*/
7039// That's alias to max_lazy_match, don't use directly7040//this.max_insert_length = 0;7041/* Insert new strings in the hash table only if the match length is not7042* greater than this length. This saves time but degrades compression.
7043* max_insert_length is used only for compression levels <= 3.
7044*/
7045
7046this.level = 0; /* compression level (1..9) */7047this.strategy = 0; /* favor or force Huffman coding*/7048
7049this.good_match = 0;7050/* Use a faster search when the previous match is longer than this */7051
7052this.nice_match = 0; /* Stop searching when current match exceeds this */7053
7054/* used by trees.c: */7055
7056/* Didn't use ct_data typedef below to suppress compiler warning */7057
7058// struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */7059// struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */7060// struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */7061
7062// Use flat array of DOUBLE size, with interleaved fata,7063// because JS does not support effective7064this.dyn_ltree = new utils.Buf16(HEAP_SIZE * 2);7065this.dyn_dtree = new utils.Buf16((2 * D_CODES + 1) * 2);7066this.bl_tree = new utils.Buf16((2 * BL_CODES + 1) * 2);7067zero(this.dyn_ltree);7068zero(this.dyn_dtree);7069zero(this.bl_tree);7070
7071this.l_desc = null; /* desc. for literal tree */7072this.d_desc = null; /* desc. for distance tree */7073this.bl_desc = null; /* desc. for bit length tree */7074
7075//ush bl_count[MAX_BITS+1];7076this.bl_count = new utils.Buf16(MAX_BITS + 1);7077/* number of codes at each bit length for an optimal tree */7078
7079//int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */7080this.heap = new utils.Buf16(2 * L_CODES + 1); /* heap used to build the Huffman trees */7081zero(this.heap);7082
7083this.heap_len = 0; /* number of elements in the heap */7084this.heap_max = 0; /* element of largest frequency */7085/* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.7086* The same heap array is used to build all trees.
7087*/
7088
7089this.depth = new utils.Buf16(2 * L_CODES + 1); //uch depth[2*L_CODES+1];7090zero(this.depth);7091/* Depth of each subtree used as tie breaker for trees of equal frequency7092*/
7093
7094this.l_buf = 0; /* buffer index for literals or lengths */7095
7096this.lit_bufsize = 0;7097/* Size of match buffer for literals/lengths. There are 4 reasons for7098* limiting lit_bufsize to 64K:
7099* - frequencies can be kept in 16 bit counters
7100* - if compression is not successful for the first block, all input
7101* data is still in the window so we can still emit a stored block even
7102* when input comes from standard input. (This can also be done for
7103* all blocks if lit_bufsize is not greater than 32K.)
7104* - if compression is not successful for a file smaller than 64K, we can
7105* even emit a stored file instead of a stored block (saving 5 bytes).
7106* This is applicable only for zip (not gzip or zlib).
7107* - creating new Huffman trees less frequently may not provide fast
7108* adaptation to changes in the input data statistics. (Take for
7109* example a binary file with poorly compressible code followed by
7110* a highly compressible string table.) Smaller buffer sizes give
7111* fast adaptation but have of course the overhead of transmitting
7112* trees more frequently.
7113* - I can't count above 4
7114*/
7115
7116this.last_lit = 0; /* running index in l_buf */7117
7118this.d_buf = 0;7119/* Buffer index for distances. To simplify the code, d_buf and l_buf have7120* the same number of elements. To use different lengths, an extra flag
7121* array would be necessary.
7122*/
7123
7124this.opt_len = 0; /* bit length of current block with optimal trees */7125this.static_len = 0; /* bit length of current block with static trees */7126this.matches = 0; /* number of string matches in current block */7127this.insert = 0; /* bytes at end of window left to insert */7128
7129
7130this.bi_buf = 0;7131/* Output buffer. bits are inserted starting at the bottom (least7132* significant bits).
7133*/
7134this.bi_valid = 0;7135/* Number of valid bits in bi_buf. All bits above the last valid bit7136* are always zero.
7137*/
7138
7139// Used for window memory init. We safely ignore it for JS. That makes7140// sense only for pointers and memory check tools.7141//this.high_water = 0;7142/* High water mark offset in window for initialized bytes -- bytes above7143* this are set to zero in order to avoid memory check warnings when
7144* longest match routines access bytes past the input. This is then
7145* updated to the new high water mark.
7146*/
7147}
7148
7149
7150function deflateResetKeep(strm) {7151var s;7152
7153if (!strm || !strm.state) {7154return err(strm, Z_STREAM_ERROR);7155}7156
7157strm.total_in = strm.total_out = 0;7158strm.data_type = Z_UNKNOWN;7159
7160s = strm.state;7161s.pending = 0;7162s.pending_out = 0;7163
7164if (s.wrap < 0) {7165s.wrap = -s.wrap;7166/* was made negative by deflate(..., Z_FINISH); */7167}7168s.status = (s.wrap ? INIT_STATE : BUSY_STATE);7169strm.adler = (s.wrap === 2) ?71700 // crc32(0, Z_NULL, 0)7171:71721; // adler32(0, Z_NULL, 0)7173s.last_flush = Z_NO_FLUSH;7174trees._tr_init(s);7175return Z_OK;7176}
7177
7178
7179function deflateReset(strm) {7180var ret = deflateResetKeep(strm);7181if (ret === Z_OK) {7182lm_init(strm.state);7183}7184return ret;7185}
7186
7187
7188function deflateSetHeader(strm, head) {7189if (!strm || !strm.state) { return Z_STREAM_ERROR; }7190if (strm.state.wrap !== 2) { return Z_STREAM_ERROR; }7191strm.state.gzhead = head;7192return Z_OK;7193}
7194
7195
7196function deflateInit2(strm, level, method, windowBits, memLevel, strategy) {7197if (!strm) { // === Z_NULL7198return Z_STREAM_ERROR;7199}7200var wrap = 1;7201
7202if (level === Z_DEFAULT_COMPRESSION) {7203level = 6;7204}7205
7206if (windowBits < 0) { /* suppress zlib wrapper */7207wrap = 0;7208windowBits = -windowBits;7209}7210
7211else if (windowBits > 15) {7212wrap = 2; /* write gzip wrapper instead */7213windowBits -= 16;7214}7215
7216
7217if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method !== Z_DEFLATED ||7218windowBits < 8 || windowBits > 15 || level < 0 || level > 9 ||7219strategy < 0 || strategy > Z_FIXED) {7220return err(strm, Z_STREAM_ERROR);7221}7222
7223
7224if (windowBits === 8) {7225windowBits = 9;7226}7227/* until 256-byte window bug fixed */7228
7229var s = new DeflateState();7230
7231strm.state = s;7232s.strm = strm;7233
7234s.wrap = wrap;7235s.gzhead = null;7236s.w_bits = windowBits;7237s.w_size = 1 << s.w_bits;7238s.w_mask = s.w_size - 1;7239
7240s.hash_bits = memLevel + 7;7241s.hash_size = 1 << s.hash_bits;7242s.hash_mask = s.hash_size - 1;7243s.hash_shift = ~~((s.hash_bits + MIN_MATCH - 1) / MIN_MATCH);7244
7245s.window = new utils.Buf8(s.w_size * 2);7246s.head = new utils.Buf16(s.hash_size);7247s.prev = new utils.Buf16(s.w_size);7248
7249// Don't need mem init magic for JS.7250//s.high_water = 0; /* nothing written to s->window yet */7251
7252s.lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */7253
7254s.pending_buf_size = s.lit_bufsize * 4;7255
7256//overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2);7257//s->pending_buf = (uchf *) overlay;7258s.pending_buf = new utils.Buf8(s.pending_buf_size);7259
7260// It is offset from `s.pending_buf` (size is `s.lit_bufsize * 2`)7261//s->d_buf = overlay + s->lit_bufsize/sizeof(ush);7262s.d_buf = 1 * s.lit_bufsize;7263
7264//s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize;7265s.l_buf = (1 + 2) * s.lit_bufsize;7266
7267s.level = level;7268s.strategy = strategy;7269s.method = method;7270
7271return deflateReset(strm);7272}
7273
7274function deflateInit(strm, level) {7275return deflateInit2(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY);7276}
7277
7278
7279function deflate(strm, flush) {7280var old_flush, s;7281var beg, val; // for gzip header write only7282
7283if (!strm || !strm.state ||7284flush > Z_BLOCK || flush < 0) {7285return strm ? err(strm, Z_STREAM_ERROR) : Z_STREAM_ERROR;7286}7287
7288s = strm.state;7289
7290if (!strm.output ||7291(!strm.input && strm.avail_in !== 0) ||7292(s.status === FINISH_STATE && flush !== Z_FINISH)) {7293return err(strm, (strm.avail_out === 0) ? Z_BUF_ERROR : Z_STREAM_ERROR);7294}7295
7296s.strm = strm; /* just in case */7297old_flush = s.last_flush;7298s.last_flush = flush;7299
7300/* Write the header */7301if (s.status === INIT_STATE) {7302
7303if (s.wrap === 2) { // GZIP header7304strm.adler = 0; //crc32(0L, Z_NULL, 0);7305put_byte(s, 31);7306put_byte(s, 139);7307put_byte(s, 8);7308if (!s.gzhead) { // s->gzhead == Z_NULL7309put_byte(s, 0);7310put_byte(s, 0);7311put_byte(s, 0);7312put_byte(s, 0);7313put_byte(s, 0);7314put_byte(s, s.level === 9 ? 2 :7315(s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ?73164 : 0));7317put_byte(s, OS_CODE);7318s.status = BUSY_STATE;7319}7320else {7321put_byte(s, (s.gzhead.text ? 1 : 0) +7322(s.gzhead.hcrc ? 2 : 0) +7323(!s.gzhead.extra ? 0 : 4) +7324(!s.gzhead.name ? 0 : 8) +7325(!s.gzhead.comment ? 0 : 16)7326);7327put_byte(s, s.gzhead.time & 0xff);7328put_byte(s, (s.gzhead.time >> 8) & 0xff);7329put_byte(s, (s.gzhead.time >> 16) & 0xff);7330put_byte(s, (s.gzhead.time >> 24) & 0xff);7331put_byte(s, s.level === 9 ? 2 :7332(s.strategy >= Z_HUFFMAN_ONLY || s.level < 2 ?73334 : 0));7334put_byte(s, s.gzhead.os & 0xff);7335if (s.gzhead.extra && s.gzhead.extra.length) {7336put_byte(s, s.gzhead.extra.length & 0xff);7337put_byte(s, (s.gzhead.extra.length >> 8) & 0xff);7338}7339if (s.gzhead.hcrc) {7340strm.adler = crc32(strm.adler, s.pending_buf, s.pending, 0);7341}7342s.gzindex = 0;7343s.status = EXTRA_STATE;7344}7345}7346else // DEFLATE header7347{7348var header = (Z_DEFLATED + ((s.w_bits - 8) << 4)) << 8;7349var level_flags = -1;7350
7351if (s.strategy >= Z_HUFFMAN_ONLY || s.level < 2) {7352level_flags = 0;7353} else if (s.level < 6) {7354level_flags = 1;7355} else if (s.level === 6) {7356level_flags = 2;7357} else {7358level_flags = 3;7359}7360header |= (level_flags << 6);7361if (s.strstart !== 0) { header |= PRESET_DICT; }7362header += 31 - (header % 31);7363
7364s.status = BUSY_STATE;7365putShortMSB(s, header);7366
7367/* Save the adler32 of the preset dictionary: */7368if (s.strstart !== 0) {7369putShortMSB(s, strm.adler >>> 16);7370putShortMSB(s, strm.adler & 0xffff);7371}7372strm.adler = 1; // adler32(0L, Z_NULL, 0);7373}7374}7375
7376//#ifdef GZIP
7377if (s.status === EXTRA_STATE) {7378if (s.gzhead.extra/* != Z_NULL*/) {7379beg = s.pending; /* start of bytes to update crc */7380
7381while (s.gzindex < (s.gzhead.extra.length & 0xffff)) {7382if (s.pending === s.pending_buf_size) {7383if (s.gzhead.hcrc && s.pending > beg) {7384strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg);7385}7386flush_pending(strm);7387beg = s.pending;7388if (s.pending === s.pending_buf_size) {7389break;7390}7391}7392put_byte(s, s.gzhead.extra[s.gzindex] & 0xff);7393s.gzindex++;7394}7395if (s.gzhead.hcrc && s.pending > beg) {7396strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg);7397}7398if (s.gzindex === s.gzhead.extra.length) {7399s.gzindex = 0;7400s.status = NAME_STATE;7401}7402}7403else {7404s.status = NAME_STATE;7405}7406}7407if (s.status === NAME_STATE) {7408if (s.gzhead.name/* != Z_NULL*/) {7409beg = s.pending; /* start of bytes to update crc */7410//int val;7411
7412do {7413if (s.pending === s.pending_buf_size) {7414if (s.gzhead.hcrc && s.pending > beg) {7415strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg);7416}7417flush_pending(strm);7418beg = s.pending;7419if (s.pending === s.pending_buf_size) {7420val = 1;7421break;7422}7423}7424// JS specific: little magic to add zero terminator to end of string7425if (s.gzindex < s.gzhead.name.length) {7426val = s.gzhead.name.charCodeAt(s.gzindex++) & 0xff;7427} else {7428val = 0;7429}7430put_byte(s, val);7431} while (val !== 0);7432
7433if (s.gzhead.hcrc && s.pending > beg) {7434strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg);7435}7436if (val === 0) {7437s.gzindex = 0;7438s.status = COMMENT_STATE;7439}7440}7441else {7442s.status = COMMENT_STATE;7443}7444}7445if (s.status === COMMENT_STATE) {7446if (s.gzhead.comment/* != Z_NULL*/) {7447beg = s.pending; /* start of bytes to update crc */7448//int val;7449
7450do {7451if (s.pending === s.pending_buf_size) {7452if (s.gzhead.hcrc && s.pending > beg) {7453strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg);7454}7455flush_pending(strm);7456beg = s.pending;7457if (s.pending === s.pending_buf_size) {7458val = 1;7459break;7460}7461}7462// JS specific: little magic to add zero terminator to end of string7463if (s.gzindex < s.gzhead.comment.length) {7464val = s.gzhead.comment.charCodeAt(s.gzindex++) & 0xff;7465} else {7466val = 0;7467}7468put_byte(s, val);7469} while (val !== 0);7470
7471if (s.gzhead.hcrc && s.pending > beg) {7472strm.adler = crc32(strm.adler, s.pending_buf, s.pending - beg, beg);7473}7474if (val === 0) {7475s.status = HCRC_STATE;7476}7477}7478else {7479s.status = HCRC_STATE;7480}7481}7482if (s.status === HCRC_STATE) {7483if (s.gzhead.hcrc) {7484if (s.pending + 2 > s.pending_buf_size) {7485flush_pending(strm);7486}7487if (s.pending + 2 <= s.pending_buf_size) {7488put_byte(s, strm.adler & 0xff);7489put_byte(s, (strm.adler >> 8) & 0xff);7490strm.adler = 0; //crc32(0L, Z_NULL, 0);7491s.status = BUSY_STATE;7492}7493}7494else {7495s.status = BUSY_STATE;7496}7497}7498//#endif
7499
7500/* Flush as much pending output as possible */7501if (s.pending !== 0) {7502flush_pending(strm);7503if (strm.avail_out === 0) {7504/* Since avail_out is 0, deflate will be called again with7505* more output space, but possibly with both pending and
7506* avail_in equal to zero. There won't be anything to do,
7507* but this is not an error situation so make sure we
7508* return OK instead of BUF_ERROR at next call of deflate:
7509*/
7510s.last_flush = -1;7511return Z_OK;7512}7513
7514/* Make sure there is something to do and avoid duplicate consecutive7515* flushes. For repeated and useless calls with Z_FINISH, we keep
7516* returning Z_STREAM_END instead of Z_BUF_ERROR.
7517*/
7518} else if (strm.avail_in === 0 && rank(flush) <= rank(old_flush) &&7519flush !== Z_FINISH) {7520return err(strm, Z_BUF_ERROR);7521}7522
7523/* User must not provide more input after the first FINISH: */7524if (s.status === FINISH_STATE && strm.avail_in !== 0) {7525return err(strm, Z_BUF_ERROR);7526}7527
7528/* Start a new block or continue the current one.7529*/
7530if (strm.avail_in !== 0 || s.lookahead !== 0 ||7531(flush !== Z_NO_FLUSH && s.status !== FINISH_STATE)) {7532var bstate = (s.strategy === Z_HUFFMAN_ONLY) ? deflate_huff(s, flush) :7533(s.strategy === Z_RLE ? deflate_rle(s, flush) :7534configuration_table[s.level].func(s, flush));7535
7536if (bstate === BS_FINISH_STARTED || bstate === BS_FINISH_DONE) {7537s.status = FINISH_STATE;7538}7539if (bstate === BS_NEED_MORE || bstate === BS_FINISH_STARTED) {7540if (strm.avail_out === 0) {7541s.last_flush = -1;7542/* avoid BUF_ERROR next call, see above */7543}7544return Z_OK;7545/* If flush != Z_NO_FLUSH && avail_out == 0, the next call7546* of deflate should use the same flush parameter to make sure
7547* that the flush is complete. So we don't have to output an
7548* empty block here, this will be done at next call. This also
7549* ensures that for a very small output buffer, we emit at most
7550* one empty block.
7551*/
7552}7553if (bstate === BS_BLOCK_DONE) {7554if (flush === Z_PARTIAL_FLUSH) {7555trees._tr_align(s);7556}7557else if (flush !== Z_BLOCK) { /* FULL_FLUSH or SYNC_FLUSH */7558
7559trees._tr_stored_block(s, 0, 0, false);7560/* For a full flush, this empty block will be recognized7561* as a special marker by inflate_sync().
7562*/
7563if (flush === Z_FULL_FLUSH) {7564/*** CLEAR_HASH(s); ***/ /* forget history */7565zero(s.head); // Fill with NIL (= 0);7566
7567if (s.lookahead === 0) {7568s.strstart = 0;7569s.block_start = 0;7570s.insert = 0;7571}7572}7573}7574flush_pending(strm);7575if (strm.avail_out === 0) {7576s.last_flush = -1; /* avoid BUF_ERROR at next call, see above */7577return Z_OK;7578}7579}7580}7581//Assert(strm->avail_out > 0, "bug2");7582//if (strm.avail_out <= 0) { throw new Error("bug2");}7583
7584if (flush !== Z_FINISH) { return Z_OK; }7585if (s.wrap <= 0) { return Z_STREAM_END; }7586
7587/* Write the trailer */7588if (s.wrap === 2) {7589put_byte(s, strm.adler & 0xff);7590put_byte(s, (strm.adler >> 8) & 0xff);7591put_byte(s, (strm.adler >> 16) & 0xff);7592put_byte(s, (strm.adler >> 24) & 0xff);7593put_byte(s, strm.total_in & 0xff);7594put_byte(s, (strm.total_in >> 8) & 0xff);7595put_byte(s, (strm.total_in >> 16) & 0xff);7596put_byte(s, (strm.total_in >> 24) & 0xff);7597}7598else7599{7600putShortMSB(s, strm.adler >>> 16);7601putShortMSB(s, strm.adler & 0xffff);7602}7603
7604flush_pending(strm);7605/* If avail_out is zero, the application will call deflate again7606* to flush the rest.
7607*/
7608if (s.wrap > 0) { s.wrap = -s.wrap; }7609/* write the trailer only once! */7610return s.pending !== 0 ? Z_OK : Z_STREAM_END;7611}
7612
7613function deflateEnd(strm) {7614var status;7615
7616if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) {7617return Z_STREAM_ERROR;7618}7619
7620status = strm.state.status;7621if (status !== INIT_STATE &&7622status !== EXTRA_STATE &&7623status !== NAME_STATE &&7624status !== COMMENT_STATE &&7625status !== HCRC_STATE &&7626status !== BUSY_STATE &&7627status !== FINISH_STATE7628) {7629return err(strm, Z_STREAM_ERROR);7630}7631
7632strm.state = null;7633
7634return status === BUSY_STATE ? err(strm, Z_DATA_ERROR) : Z_OK;7635}
7636
7637
7638/* =========================================================================
7639* Initializes the compression dictionary from the given byte
7640* sequence without producing any compressed output.
7641*/
7642function deflateSetDictionary(strm, dictionary) {7643var dictLength = dictionary.length;7644
7645var s;7646var str, n;7647var wrap;7648var avail;7649var next;7650var input;7651var tmpDict;7652
7653if (!strm/*== Z_NULL*/ || !strm.state/*== Z_NULL*/) {7654return Z_STREAM_ERROR;7655}7656
7657s = strm.state;7658wrap = s.wrap;7659
7660if (wrap === 2 || (wrap === 1 && s.status !== INIT_STATE) || s.lookahead) {7661return Z_STREAM_ERROR;7662}7663
7664/* when using zlib wrappers, compute Adler-32 for provided dictionary */7665if (wrap === 1) {7666/* adler32(strm->adler, dictionary, dictLength); */7667strm.adler = adler32(strm.adler, dictionary, dictLength, 0);7668}7669
7670s.wrap = 0; /* avoid computing Adler-32 in read_buf */7671
7672/* if dictionary would fill window, just replace the history */7673if (dictLength >= s.w_size) {7674if (wrap === 0) { /* already empty otherwise */7675/*** CLEAR_HASH(s); ***/7676zero(s.head); // Fill with NIL (= 0);7677s.strstart = 0;7678s.block_start = 0;7679s.insert = 0;7680}7681/* use the tail */7682// dictionary = dictionary.slice(dictLength - s.w_size);7683tmpDict = new utils.Buf8(s.w_size);7684utils.arraySet(tmpDict, dictionary, dictLength - s.w_size, s.w_size, 0);7685dictionary = tmpDict;7686dictLength = s.w_size;7687}7688/* insert dictionary into window and hash */7689avail = strm.avail_in;7690next = strm.next_in;7691input = strm.input;7692strm.avail_in = dictLength;7693strm.next_in = 0;7694strm.input = dictionary;7695fill_window(s);7696while (s.lookahead >= MIN_MATCH) {7697str = s.strstart;7698n = s.lookahead - (MIN_MATCH - 1);7699do {7700/* UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]); */7701s.ins_h = ((s.ins_h << s.hash_shift) ^ s.window[str + MIN_MATCH - 1]) & s.hash_mask;7702
7703s.prev[str & s.w_mask] = s.head[s.ins_h];7704
7705s.head[s.ins_h] = str;7706str++;7707} while (--n);7708s.strstart = str;7709s.lookahead = MIN_MATCH - 1;7710fill_window(s);7711}7712s.strstart += s.lookahead;7713s.block_start = s.strstart;7714s.insert = s.lookahead;7715s.lookahead = 0;7716s.match_length = s.prev_length = MIN_MATCH - 1;7717s.match_available = 0;7718strm.next_in = next;7719strm.input = input;7720strm.avail_in = avail;7721s.wrap = wrap;7722return Z_OK;7723}
7724
7725
7726exports.deflateInit = deflateInit;7727exports.deflateInit2 = deflateInit2;7728exports.deflateReset = deflateReset;7729exports.deflateResetKeep = deflateResetKeep;7730exports.deflateSetHeader = deflateSetHeader;7731exports.deflate = deflate;7732exports.deflateEnd = deflateEnd;7733exports.deflateSetDictionary = deflateSetDictionary;7734exports.deflateInfo = 'pako deflate (from Nodeca project)';7735
7736/* Not implemented
7737exports.deflateBound = deflateBound;
7738exports.deflateCopy = deflateCopy;
7739exports.deflateParams = deflateParams;
7740exports.deflatePending = deflatePending;
7741exports.deflatePrime = deflatePrime;
7742exports.deflateTune = deflateTune;
7743*/
7744
7745},{"../utils/common":41,"./adler32":43,"./crc32":45,"./messages":51,"./trees":52}],47:[function(require,module,exports){7746'use strict';7747
7748// (C) 1995-2013 Jean-loup Gailly and Mark Adler
7749// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
7750//
7751// This software is provided 'as-is', without any express or implied
7752// warranty. In no event will the authors be held liable for any damages
7753// arising from the use of this software.
7754//
7755// Permission is granted to anyone to use this software for any purpose,
7756// including commercial applications, and to alter it and redistribute it
7757// freely, subject to the following restrictions:
7758//
7759// 1. The origin of this software must not be misrepresented; you must not
7760// claim that you wrote the original software. If you use this software
7761// in a product, an acknowledgment in the product documentation would be
7762// appreciated but is not required.
7763// 2. Altered source versions must be plainly marked as such, and must not be
7764// misrepresented as being the original software.
7765// 3. This notice may not be removed or altered from any source distribution.
7766
7767function GZheader() {7768/* true if compressed data believed to be text */7769this.text = 0;7770/* modification time */7771this.time = 0;7772/* extra flags (not used when writing a gzip file) */7773this.xflags = 0;7774/* operating system */7775this.os = 0;7776/* pointer to extra field or Z_NULL if none */7777this.extra = null;7778/* extra field length (valid if extra != Z_NULL) */7779this.extra_len = 0; // Actually, we don't need it in JS,7780// but leave for few code modifications7781
7782//7783// Setup limits is not necessary because in js we should not preallocate memory7784// for inflate use constant limit in 65536 bytes7785//7786
7787/* space at extra (only when reading header) */7788// this.extra_max = 0;7789/* pointer to zero-terminated file name or Z_NULL */7790this.name = '';7791/* space at name (only when reading header) */7792// this.name_max = 0;7793/* pointer to zero-terminated comment or Z_NULL */7794this.comment = '';7795/* space at comment (only when reading header) */7796// this.comm_max = 0;7797/* true if there was or will be a header crc */7798this.hcrc = 0;7799/* true when done reading gzip header (not used when writing a gzip file) */7800this.done = false;7801}
7802
7803module.exports = GZheader;7804
7805},{}],48:[function(require,module,exports){7806'use strict';7807
7808// (C) 1995-2013 Jean-loup Gailly and Mark Adler
7809// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
7810//
7811// This software is provided 'as-is', without any express or implied
7812// warranty. In no event will the authors be held liable for any damages
7813// arising from the use of this software.
7814//
7815// Permission is granted to anyone to use this software for any purpose,
7816// including commercial applications, and to alter it and redistribute it
7817// freely, subject to the following restrictions:
7818//
7819// 1. The origin of this software must not be misrepresented; you must not
7820// claim that you wrote the original software. If you use this software
7821// in a product, an acknowledgment in the product documentation would be
7822// appreciated but is not required.
7823// 2. Altered source versions must be plainly marked as such, and must not be
7824// misrepresented as being the original software.
7825// 3. This notice may not be removed or altered from any source distribution.
7826
7827// See state defs from inflate.js
7828var BAD = 30; /* got a data error -- remain here until reset */7829var TYPE = 12; /* i: waiting for type bits, including last-flag bit */7830
7831/*
7832Decode literal, length, and distance codes and write out the resulting
7833literal and match bytes until either not enough input or output is
7834available, an end-of-block is encountered, or a data error is encountered.
7835When large enough input and output buffers are supplied to inflate(), for
7836example, a 16K input buffer and a 64K output buffer, more than 95% of the
7837inflate execution time is spent in this routine.
7838
7839Entry assumptions:
7840
7841state.mode === LEN
7842strm.avail_in >= 6
7843strm.avail_out >= 258
7844start >= strm.avail_out
7845state.bits < 8
7846
7847On return, state.mode is one of:
7848
7849LEN -- ran out of enough output space or enough available input
7850TYPE -- reached end of block code, inflate() to interpret next block
7851BAD -- error in block data
7852
7853Notes:
7854
7855- The maximum input bits used by a length/distance pair is 15 bits for the
7856length code, 5 bits for the length extra, 15 bits for the distance code,
7857and 13 bits for the distance extra. This totals 48 bits, or six bytes.
7858Therefore if strm.avail_in >= 6, then there is enough input to avoid
7859checking for available input while decoding.
7860
7861- The maximum bytes that a single length/distance pair can output is 258
7862bytes, which is the maximum length that can be coded. inflate_fast()
7863requires strm.avail_out >= 258 for each loop to avoid checking for
7864output space.
7865*/
7866module.exports = function inflate_fast(strm, start) {7867var state;7868var _in; /* local strm.input */7869var last; /* have enough input while in < last */7870var _out; /* local strm.output */7871var beg; /* inflate()'s initial strm.output */7872var end; /* while out < end, enough space available */7873//#ifdef INFLATE_STRICT
7874var dmax; /* maximum distance from zlib header */7875//#endif
7876var wsize; /* window size or zero if not using window */7877var whave; /* valid bytes in the window */7878var wnext; /* window write index */7879// Use `s_window` instead `window`, avoid conflict with instrumentation tools7880var s_window; /* allocated sliding window, if wsize != 0 */7881var hold; /* local strm.hold */7882var bits; /* local strm.bits */7883var lcode; /* local strm.lencode */7884var dcode; /* local strm.distcode */7885var lmask; /* mask for first level of length codes */7886var dmask; /* mask for first level of distance codes */7887var here; /* retrieved table entry */7888var op; /* code bits, operation, extra bits, or */7889/* window position, window bytes to copy */7890var len; /* match length, unused bytes */7891var dist; /* match distance */7892var from; /* where to copy match from */7893var from_source;7894
7895
7896var input, output; // JS specific, because we have no pointers7897
7898/* copy state to local variables */7899state = strm.state;7900//here = state.here;7901_in = strm.next_in;7902input = strm.input;7903last = _in + (strm.avail_in - 5);7904_out = strm.next_out;7905output = strm.output;7906beg = _out - (start - strm.avail_out);7907end = _out + (strm.avail_out - 257);7908//#ifdef INFLATE_STRICT
7909dmax = state.dmax;7910//#endif
7911wsize = state.wsize;7912whave = state.whave;7913wnext = state.wnext;7914s_window = state.window;7915hold = state.hold;7916bits = state.bits;7917lcode = state.lencode;7918dcode = state.distcode;7919lmask = (1 << state.lenbits) - 1;7920dmask = (1 << state.distbits) - 1;7921
7922
7923/* decode literals and length/distances until end-of-block or not enough7924input data or output space */
7925
7926top:7927do {7928if (bits < 15) {7929hold += input[_in++] << bits;7930bits += 8;7931hold += input[_in++] << bits;7932bits += 8;7933}7934
7935here = lcode[hold & lmask];7936
7937dolen:7938for (;;) { // Goto emulation7939op = here >>> 24/*here.bits*/;7940hold >>>= op;7941bits -= op;7942op = (here >>> 16) & 0xff/*here.op*/;7943if (op === 0) { /* literal */7944//Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ?7945// "inflate: literal '%c'\n" :7946// "inflate: literal 0x%02x\n", here.val));7947output[_out++] = here & 0xffff/*here.val*/;7948}7949else if (op & 16) { /* length base */7950len = here & 0xffff/*here.val*/;7951op &= 15; /* number of extra bits */7952if (op) {7953if (bits < op) {7954hold += input[_in++] << bits;7955bits += 8;7956}7957len += hold & ((1 << op) - 1);7958hold >>>= op;7959bits -= op;7960}7961//Tracevv((stderr, "inflate: length %u\n", len));7962if (bits < 15) {7963hold += input[_in++] << bits;7964bits += 8;7965hold += input[_in++] << bits;7966bits += 8;7967}7968here = dcode[hold & dmask];7969
7970dodist:7971for (;;) { // goto emulation7972op = here >>> 24/*here.bits*/;7973hold >>>= op;7974bits -= op;7975op = (here >>> 16) & 0xff/*here.op*/;7976
7977if (op & 16) { /* distance base */7978dist = here & 0xffff/*here.val*/;7979op &= 15; /* number of extra bits */7980if (bits < op) {7981hold += input[_in++] << bits;7982bits += 8;7983if (bits < op) {7984hold += input[_in++] << bits;7985bits += 8;7986}7987}7988dist += hold & ((1 << op) - 1);7989//#ifdef INFLATE_STRICT
7990if (dist > dmax) {7991strm.msg = 'invalid distance too far back';7992state.mode = BAD;7993break top;7994}7995//#endif
7996hold >>>= op;7997bits -= op;7998//Tracevv((stderr, "inflate: distance %u\n", dist));7999op = _out - beg; /* max distance in output */8000if (dist > op) { /* see if copy from window */8001op = dist - op; /* distance back in window */8002if (op > whave) {8003if (state.sane) {8004strm.msg = 'invalid distance too far back';8005state.mode = BAD;8006break top;8007}8008
8009// (!) This block is disabled in zlib defailts,
8010// don't enable it for binary compatibility
8011//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR
8012// if (len <= op - whave) {
8013// do {
8014// output[_out++] = 0;
8015// } while (--len);
8016// continue top;
8017// }
8018// len -= op - whave;
8019// do {
8020// output[_out++] = 0;
8021// } while (--op > whave);
8022// if (op === 0) {
8023// from = _out - dist;
8024// do {
8025// output[_out++] = output[from++];
8026// } while (--len);
8027// continue top;
8028// }
8029//#endif
8030}8031from = 0; // window index8032from_source = s_window;8033if (wnext === 0) { /* very common case */8034from += wsize - op;8035if (op < len) { /* some from window */8036len -= op;8037do {8038output[_out++] = s_window[from++];8039} while (--op);8040from = _out - dist; /* rest from output */8041from_source = output;8042}8043}8044else if (wnext < op) { /* wrap around window */8045from += wsize + wnext - op;8046op -= wnext;8047if (op < len) { /* some from end of window */8048len -= op;8049do {8050output[_out++] = s_window[from++];8051} while (--op);8052from = 0;8053if (wnext < len) { /* some from start of window */8054op = wnext;8055len -= op;8056do {8057output[_out++] = s_window[from++];8058} while (--op);8059from = _out - dist; /* rest from output */8060from_source = output;8061}8062}8063}8064else { /* contiguous in window */8065from += wnext - op;8066if (op < len) { /* some from window */8067len -= op;8068do {8069output[_out++] = s_window[from++];8070} while (--op);8071from = _out - dist; /* rest from output */8072from_source = output;8073}8074}8075while (len > 2) {8076output[_out++] = from_source[from++];8077output[_out++] = from_source[from++];8078output[_out++] = from_source[from++];8079len -= 3;8080}8081if (len) {8082output[_out++] = from_source[from++];8083if (len > 1) {8084output[_out++] = from_source[from++];8085}8086}8087}8088else {8089from = _out - dist; /* copy direct from output */8090do { /* minimum length is three */8091output[_out++] = output[from++];8092output[_out++] = output[from++];8093output[_out++] = output[from++];8094len -= 3;8095} while (len > 2);8096if (len) {8097output[_out++] = output[from++];8098if (len > 1) {8099output[_out++] = output[from++];8100}8101}8102}8103}8104else if ((op & 64) === 0) { /* 2nd level distance code */8105here = dcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))];8106continue dodist;8107}8108else {8109strm.msg = 'invalid distance code';8110state.mode = BAD;8111break top;8112}8113
8114break; // need to emulate goto via "continue"8115}8116}8117else if ((op & 64) === 0) { /* 2nd level length code */8118here = lcode[(here & 0xffff)/*here.val*/ + (hold & ((1 << op) - 1))];8119continue dolen;8120}8121else if (op & 32) { /* end-of-block */8122//Tracevv((stderr, "inflate: end of block\n"));8123state.mode = TYPE;8124break top;8125}8126else {8127strm.msg = 'invalid literal/length code';8128state.mode = BAD;8129break top;8130}8131
8132break; // need to emulate goto via "continue"8133}8134} while (_in < last && _out < end);8135
8136/* return unused bytes (on entry, bits < 8, so in won't go too far back) */8137len = bits >> 3;8138_in -= len;8139bits -= len << 3;8140hold &= (1 << bits) - 1;8141
8142/* update state and return */8143strm.next_in = _in;8144strm.next_out = _out;8145strm.avail_in = (_in < last ? 5 + (last - _in) : 5 - (_in - last));8146strm.avail_out = (_out < end ? 257 + (end - _out) : 257 - (_out - end));8147state.hold = hold;8148state.bits = bits;8149return;8150};8151
8152},{}],49:[function(require,module,exports){8153'use strict';8154
8155// (C) 1995-2013 Jean-loup Gailly and Mark Adler
8156// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
8157//
8158// This software is provided 'as-is', without any express or implied
8159// warranty. In no event will the authors be held liable for any damages
8160// arising from the use of this software.
8161//
8162// Permission is granted to anyone to use this software for any purpose,
8163// including commercial applications, and to alter it and redistribute it
8164// freely, subject to the following restrictions:
8165//
8166// 1. The origin of this software must not be misrepresented; you must not
8167// claim that you wrote the original software. If you use this software
8168// in a product, an acknowledgment in the product documentation would be
8169// appreciated but is not required.
8170// 2. Altered source versions must be plainly marked as such, and must not be
8171// misrepresented as being the original software.
8172// 3. This notice may not be removed or altered from any source distribution.
8173
8174var utils = require('../utils/common');8175var adler32 = require('./adler32');8176var crc32 = require('./crc32');8177var inflate_fast = require('./inffast');8178var inflate_table = require('./inftrees');8179
8180var CODES = 0;8181var LENS = 1;8182var DISTS = 2;8183
8184/* Public constants ==========================================================*/
8185/* ===========================================================================*/
8186
8187
8188/* Allowed flush values; see deflate() and inflate() below for details */
8189//var Z_NO_FLUSH = 0;
8190//var Z_PARTIAL_FLUSH = 1;
8191//var Z_SYNC_FLUSH = 2;
8192//var Z_FULL_FLUSH = 3;
8193var Z_FINISH = 4;8194var Z_BLOCK = 5;8195var Z_TREES = 6;8196
8197
8198/* Return codes for the compression/decompression functions. Negative values
8199* are errors, positive values are used for special but normal events.
8200*/
8201var Z_OK = 0;8202var Z_STREAM_END = 1;8203var Z_NEED_DICT = 2;8204//var Z_ERRNO = -1;
8205var Z_STREAM_ERROR = -2;8206var Z_DATA_ERROR = -3;8207var Z_MEM_ERROR = -4;8208var Z_BUF_ERROR = -5;8209//var Z_VERSION_ERROR = -6;
8210
8211/* The deflate compression method */
8212var Z_DEFLATED = 8;8213
8214
8215/* STATES ====================================================================*/
8216/* ===========================================================================*/
8217
8218
8219var HEAD = 1; /* i: waiting for magic header */8220var FLAGS = 2; /* i: waiting for method and flags (gzip) */8221var TIME = 3; /* i: waiting for modification time (gzip) */8222var OS = 4; /* i: waiting for extra flags and operating system (gzip) */8223var EXLEN = 5; /* i: waiting for extra length (gzip) */8224var EXTRA = 6; /* i: waiting for extra bytes (gzip) */8225var NAME = 7; /* i: waiting for end of file name (gzip) */8226var COMMENT = 8; /* i: waiting for end of comment (gzip) */8227var HCRC = 9; /* i: waiting for header crc (gzip) */8228var DICTID = 10; /* i: waiting for dictionary check value */8229var DICT = 11; /* waiting for inflateSetDictionary() call */8230var TYPE = 12; /* i: waiting for type bits, including last-flag bit */8231var TYPEDO = 13; /* i: same, but skip check to exit inflate on new block */8232var STORED = 14; /* i: waiting for stored size (length and complement) */8233var COPY_ = 15; /* i/o: same as COPY below, but only first time in */8234var COPY = 16; /* i/o: waiting for input or output to copy stored block */8235var TABLE = 17; /* i: waiting for dynamic block table lengths */8236var LENLENS = 18; /* i: waiting for code length code lengths */8237var CODELENS = 19; /* i: waiting for length/lit and distance code lengths */8238var LEN_ = 20; /* i: same as LEN below, but only first time in */8239var LEN = 21; /* i: waiting for length/lit/eob code */8240var LENEXT = 22; /* i: waiting for length extra bits */8241var DIST = 23; /* i: waiting for distance code */8242var DISTEXT = 24; /* i: waiting for distance extra bits */8243var MATCH = 25; /* o: waiting for output space to copy string */8244var LIT = 26; /* o: waiting for output space to write literal */8245var CHECK = 27; /* i: waiting for 32-bit check value */8246var LENGTH = 28; /* i: waiting for 32-bit length (gzip) */8247var DONE = 29; /* finished check, done -- remain here until reset */8248var BAD = 30; /* got a data error -- remain here until reset */8249var MEM = 31; /* got an inflate() memory error -- remain here until reset */8250var SYNC = 32; /* looking for synchronization bytes to restart inflate() */8251
8252/* ===========================================================================*/
8253
8254
8255
8256var ENOUGH_LENS = 852;8257var ENOUGH_DISTS = 592;8258//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS);
8259
8260var MAX_WBITS = 15;8261/* 32K LZ77 window */
8262var DEF_WBITS = MAX_WBITS;8263
8264
8265function zswap32(q) {8266return (((q >>> 24) & 0xff) +8267((q >>> 8) & 0xff00) +8268((q & 0xff00) << 8) +8269((q & 0xff) << 24));8270}
8271
8272
8273function InflateState() {8274this.mode = 0; /* current inflate mode */8275this.last = false; /* true if processing last block */8276this.wrap = 0; /* bit 0 true for zlib, bit 1 true for gzip */8277this.havedict = false; /* true if dictionary provided */8278this.flags = 0; /* gzip header method and flags (0 if zlib) */8279this.dmax = 0; /* zlib header max distance (INFLATE_STRICT) */8280this.check = 0; /* protected copy of check value */8281this.total = 0; /* protected copy of output count */8282// TODO: may be {}8283this.head = null; /* where to save gzip header information */8284
8285/* sliding window */8286this.wbits = 0; /* log base 2 of requested window size */8287this.wsize = 0; /* window size or zero if not using window */8288this.whave = 0; /* valid bytes in the window */8289this.wnext = 0; /* window write index */8290this.window = null; /* allocated sliding window, if needed */8291
8292/* bit accumulator */8293this.hold = 0; /* input bit accumulator */8294this.bits = 0; /* number of bits in "in" */8295
8296/* for string and stored block copying */8297this.length = 0; /* literal or length of data to copy */8298this.offset = 0; /* distance back to copy string from */8299
8300/* for table and code decoding */8301this.extra = 0; /* extra bits needed */8302
8303/* fixed and dynamic code tables */8304this.lencode = null; /* starting table for length/literal codes */8305this.distcode = null; /* starting table for distance codes */8306this.lenbits = 0; /* index bits for lencode */8307this.distbits = 0; /* index bits for distcode */8308
8309/* dynamic table building */8310this.ncode = 0; /* number of code length code lengths */8311this.nlen = 0; /* number of length code lengths */8312this.ndist = 0; /* number of distance code lengths */8313this.have = 0; /* number of code lengths in lens[] */8314this.next = null; /* next available space in codes[] */8315
8316this.lens = new utils.Buf16(320); /* temporary storage for code lengths */8317this.work = new utils.Buf16(288); /* work area for code table building */8318
8319/*8320because we don't have pointers in js, we use lencode and distcode directly
8321as buffers so we don't need codes
8322*/
8323//this.codes = new utils.Buf32(ENOUGH); /* space for code tables */8324this.lendyn = null; /* dynamic table for length/literal codes (JS specific) */8325this.distdyn = null; /* dynamic table for distance codes (JS specific) */8326this.sane = 0; /* if false, allow invalid distance too far */8327this.back = 0; /* bits back of last unprocessed length/lit */8328this.was = 0; /* initial length of match */8329}
8330
8331function inflateResetKeep(strm) {8332var state;8333
8334if (!strm || !strm.state) { return Z_STREAM_ERROR; }8335state = strm.state;8336strm.total_in = strm.total_out = state.total = 0;8337strm.msg = ''; /*Z_NULL*/8338if (state.wrap) { /* to support ill-conceived Java test suite */8339strm.adler = state.wrap & 1;8340}8341state.mode = HEAD;8342state.last = 0;8343state.havedict = 0;8344state.dmax = 32768;8345state.head = null/*Z_NULL*/;8346state.hold = 0;8347state.bits = 0;8348//state.lencode = state.distcode = state.next = state.codes;8349state.lencode = state.lendyn = new utils.Buf32(ENOUGH_LENS);8350state.distcode = state.distdyn = new utils.Buf32(ENOUGH_DISTS);8351
8352state.sane = 1;8353state.back = -1;8354//Tracev((stderr, "inflate: reset\n"));8355return Z_OK;8356}
8357
8358function inflateReset(strm) {8359var state;8360
8361if (!strm || !strm.state) { return Z_STREAM_ERROR; }8362state = strm.state;8363state.wsize = 0;8364state.whave = 0;8365state.wnext = 0;8366return inflateResetKeep(strm);8367
8368}
8369
8370function inflateReset2(strm, windowBits) {8371var wrap;8372var state;8373
8374/* get the state */8375if (!strm || !strm.state) { return Z_STREAM_ERROR; }8376state = strm.state;8377
8378/* extract wrap request from windowBits parameter */8379if (windowBits < 0) {8380wrap = 0;8381windowBits = -windowBits;8382}8383else {8384wrap = (windowBits >> 4) + 1;8385if (windowBits < 48) {8386windowBits &= 15;8387}8388}8389
8390/* set number of window bits, free window if different */8391if (windowBits && (windowBits < 8 || windowBits > 15)) {8392return Z_STREAM_ERROR;8393}8394if (state.window !== null && state.wbits !== windowBits) {8395state.window = null;8396}8397
8398/* update state and reset the rest of it */8399state.wrap = wrap;8400state.wbits = windowBits;8401return inflateReset(strm);8402}
8403
8404function inflateInit2(strm, windowBits) {8405var ret;8406var state;8407
8408if (!strm) { return Z_STREAM_ERROR; }8409//strm.msg = Z_NULL; /* in case we return an error */8410
8411state = new InflateState();8412
8413//if (state === Z_NULL) return Z_MEM_ERROR;8414//Tracev((stderr, "inflate: allocated\n"));8415strm.state = state;8416state.window = null/*Z_NULL*/;8417ret = inflateReset2(strm, windowBits);8418if (ret !== Z_OK) {8419strm.state = null/*Z_NULL*/;8420}8421return ret;8422}
8423
8424function inflateInit(strm) {8425return inflateInit2(strm, DEF_WBITS);8426}
8427
8428
8429/*
8430Return state with length and distance decoding tables and index sizes set to
8431fixed code decoding. Normally this returns fixed tables from inffixed.h.
8432If BUILDFIXED is defined, then instead this routine builds the tables the
8433first time it's called, and returns those tables the first time and
8434thereafter. This reduces the size of the code by about 2K bytes, in
8435exchange for a little execution time. However, BUILDFIXED should not be
8436used for threaded applications, since the rewriting of the tables and virgin
8437may not be thread-safe.
8438*/
8439var virgin = true;8440
8441var lenfix, distfix; // We have no pointers in JS, so keep tables separate8442
8443function fixedtables(state) {8444/* build fixed huffman tables if first call (may not be thread safe) */8445if (virgin) {8446var sym;8447
8448lenfix = new utils.Buf32(512);8449distfix = new utils.Buf32(32);8450
8451/* literal/length table */8452sym = 0;8453while (sym < 144) { state.lens[sym++] = 8; }8454while (sym < 256) { state.lens[sym++] = 9; }8455while (sym < 280) { state.lens[sym++] = 7; }8456while (sym < 288) { state.lens[sym++] = 8; }8457
8458inflate_table(LENS, state.lens, 0, 288, lenfix, 0, state.work, { bits: 9 });8459
8460/* distance table */8461sym = 0;8462while (sym < 32) { state.lens[sym++] = 5; }8463
8464inflate_table(DISTS, state.lens, 0, 32, distfix, 0, state.work, { bits: 5 });8465
8466/* do this just once */8467virgin = false;8468}8469
8470state.lencode = lenfix;8471state.lenbits = 9;8472state.distcode = distfix;8473state.distbits = 5;8474}
8475
8476
8477/*
8478Update the window with the last wsize (normally 32K) bytes written before
8479returning. If window does not exist yet, create it. This is only called
8480when a window is already in use, or when output has been written during this
8481inflate call, but the end of the deflate stream has not been reached yet.
8482It is also called to create a window for dictionary data when a dictionary
8483is loaded.
8484
8485Providing output buffers larger than 32K to inflate() should provide a speed
8486advantage, since only the last 32K of output is copied to the sliding window
8487upon return from inflate(), and since all distances after the first 32K of
8488output will fall in the output data, making match copies simpler and faster.
8489The advantage may be dependent on the size of the processor's data caches.
8490*/
8491function updatewindow(strm, src, end, copy) {8492var dist;8493var state = strm.state;8494
8495/* if it hasn't been done already, allocate space for the window */8496if (state.window === null) {8497state.wsize = 1 << state.wbits;8498state.wnext = 0;8499state.whave = 0;8500
8501state.window = new utils.Buf8(state.wsize);8502}8503
8504/* copy state->wsize or less output bytes into the circular window */8505if (copy >= state.wsize) {8506utils.arraySet(state.window, src, end - state.wsize, state.wsize, 0);8507state.wnext = 0;8508state.whave = state.wsize;8509}8510else {8511dist = state.wsize - state.wnext;8512if (dist > copy) {8513dist = copy;8514}8515//zmemcpy(state->window + state->wnext, end - copy, dist);8516utils.arraySet(state.window, src, end - copy, dist, state.wnext);8517copy -= dist;8518if (copy) {8519//zmemcpy(state->window, end - copy, copy);8520utils.arraySet(state.window, src, end - copy, copy, 0);8521state.wnext = copy;8522state.whave = state.wsize;8523}8524else {8525state.wnext += dist;8526if (state.wnext === state.wsize) { state.wnext = 0; }8527if (state.whave < state.wsize) { state.whave += dist; }8528}8529}8530return 0;8531}
8532
8533function inflate(strm, flush) {8534var state;8535var input, output; // input/output buffers8536var next; /* next input INDEX */8537var put; /* next output INDEX */8538var have, left; /* available input and output */8539var hold; /* bit buffer */8540var bits; /* bits in bit buffer */8541var _in, _out; /* save starting available input and output */8542var copy; /* number of stored or match bytes to copy */8543var from; /* where to copy match bytes from */8544var from_source;8545var here = 0; /* current decoding table entry */8546var here_bits, here_op, here_val; // paked "here" denormalized (JS specific)8547//var last; /* parent table entry */8548var last_bits, last_op, last_val; // paked "last" denormalized (JS specific)8549var len; /* length to copy for repeats, bits to drop */8550var ret; /* return code */8551var hbuf = new utils.Buf8(4); /* buffer for gzip header crc calculation */8552var opts;8553
8554var n; // temporary var for NEED_BITS8555
8556var order = /* permutation of code lengths */8557[ 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 ];8558
8559
8560if (!strm || !strm.state || !strm.output ||8561(!strm.input && strm.avail_in !== 0)) {8562return Z_STREAM_ERROR;8563}8564
8565state = strm.state;8566if (state.mode === TYPE) { state.mode = TYPEDO; } /* skip check */8567
8568
8569//--- LOAD() ---8570put = strm.next_out;8571output = strm.output;8572left = strm.avail_out;8573next = strm.next_in;8574input = strm.input;8575have = strm.avail_in;8576hold = state.hold;8577bits = state.bits;8578//---8579
8580_in = have;8581_out = left;8582ret = Z_OK;8583
8584inf_leave: // goto emulation8585for (;;) {8586switch (state.mode) {8587case HEAD:8588if (state.wrap === 0) {8589state.mode = TYPEDO;8590break;8591}8592//=== NEEDBITS(16);8593while (bits < 16) {8594if (have === 0) { break inf_leave; }8595have--;8596hold += input[next++] << bits;8597bits += 8;8598}8599//===//8600if ((state.wrap & 2) && hold === 0x8b1f) { /* gzip header */8601state.check = 0/*crc32(0L, Z_NULL, 0)*/;8602//=== CRC2(state.check, hold);8603hbuf[0] = hold & 0xff;8604hbuf[1] = (hold >>> 8) & 0xff;8605state.check = crc32(state.check, hbuf, 2, 0);8606//===//8607
8608//=== INITBITS();8609hold = 0;8610bits = 0;8611//===//8612state.mode = FLAGS;8613break;8614}8615state.flags = 0; /* expect zlib header */8616if (state.head) {8617state.head.done = false;8618}8619if (!(state.wrap & 1) || /* check if zlib header allowed */8620(((hold & 0xff)/*BITS(8)*/ << 8) + (hold >> 8)) % 31) {8621strm.msg = 'incorrect header check';8622state.mode = BAD;8623break;8624}8625if ((hold & 0x0f)/*BITS(4)*/ !== Z_DEFLATED) {8626strm.msg = 'unknown compression method';8627state.mode = BAD;8628break;8629}8630//--- DROPBITS(4) ---//8631hold >>>= 4;8632bits -= 4;8633//---//8634len = (hold & 0x0f)/*BITS(4)*/ + 8;8635if (state.wbits === 0) {8636state.wbits = len;8637}8638else if (len > state.wbits) {8639strm.msg = 'invalid window size';8640state.mode = BAD;8641break;8642}8643state.dmax = 1 << len;8644//Tracev((stderr, "inflate: zlib header ok\n"));8645strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/;8646state.mode = hold & 0x200 ? DICTID : TYPE;8647//=== INITBITS();8648hold = 0;8649bits = 0;8650//===//8651break;8652case FLAGS:8653//=== NEEDBITS(16); */8654while (bits < 16) {8655if (have === 0) { break inf_leave; }8656have--;8657hold += input[next++] << bits;8658bits += 8;8659}8660//===//8661state.flags = hold;8662if ((state.flags & 0xff) !== Z_DEFLATED) {8663strm.msg = 'unknown compression method';8664state.mode = BAD;8665break;8666}8667if (state.flags & 0xe000) {8668strm.msg = 'unknown header flags set';8669state.mode = BAD;8670break;8671}8672if (state.head) {8673state.head.text = ((hold >> 8) & 1);8674}8675if (state.flags & 0x0200) {8676//=== CRC2(state.check, hold);8677hbuf[0] = hold & 0xff;8678hbuf[1] = (hold >>> 8) & 0xff;8679state.check = crc32(state.check, hbuf, 2, 0);8680//===//8681}8682//=== INITBITS();8683hold = 0;8684bits = 0;8685//===//8686state.mode = TIME;8687/* falls through */8688case TIME:8689//=== NEEDBITS(32); */8690while (bits < 32) {8691if (have === 0) { break inf_leave; }8692have--;8693hold += input[next++] << bits;8694bits += 8;8695}8696//===//8697if (state.head) {8698state.head.time = hold;8699}8700if (state.flags & 0x0200) {8701//=== CRC4(state.check, hold)8702hbuf[0] = hold & 0xff;8703hbuf[1] = (hold >>> 8) & 0xff;8704hbuf[2] = (hold >>> 16) & 0xff;8705hbuf[3] = (hold >>> 24) & 0xff;8706state.check = crc32(state.check, hbuf, 4, 0);8707//===8708}8709//=== INITBITS();8710hold = 0;8711bits = 0;8712//===//8713state.mode = OS;8714/* falls through */8715case OS:8716//=== NEEDBITS(16); */8717while (bits < 16) {8718if (have === 0) { break inf_leave; }8719have--;8720hold += input[next++] << bits;8721bits += 8;8722}8723//===//8724if (state.head) {8725state.head.xflags = (hold & 0xff);8726state.head.os = (hold >> 8);8727}8728if (state.flags & 0x0200) {8729//=== CRC2(state.check, hold);8730hbuf[0] = hold & 0xff;8731hbuf[1] = (hold >>> 8) & 0xff;8732state.check = crc32(state.check, hbuf, 2, 0);8733//===//8734}8735//=== INITBITS();8736hold = 0;8737bits = 0;8738//===//8739state.mode = EXLEN;8740/* falls through */8741case EXLEN:8742if (state.flags & 0x0400) {8743//=== NEEDBITS(16); */8744while (bits < 16) {8745if (have === 0) { break inf_leave; }8746have--;8747hold += input[next++] << bits;8748bits += 8;8749}8750//===//8751state.length = hold;8752if (state.head) {8753state.head.extra_len = hold;8754}8755if (state.flags & 0x0200) {8756//=== CRC2(state.check, hold);8757hbuf[0] = hold & 0xff;8758hbuf[1] = (hold >>> 8) & 0xff;8759state.check = crc32(state.check, hbuf, 2, 0);8760//===//8761}8762//=== INITBITS();8763hold = 0;8764bits = 0;8765//===//8766}8767else if (state.head) {8768state.head.extra = null/*Z_NULL*/;8769}8770state.mode = EXTRA;8771/* falls through */8772case EXTRA:8773if (state.flags & 0x0400) {8774copy = state.length;8775if (copy > have) { copy = have; }8776if (copy) {8777if (state.head) {8778len = state.head.extra_len - state.length;8779if (!state.head.extra) {8780// Use untyped array for more conveniend processing later8781state.head.extra = new Array(state.head.extra_len);8782}8783utils.arraySet(8784state.head.extra,8785input,8786next,8787// extra field is limited to 65536 bytes8788// - no need for additional size check8789copy,8790/*len + copy > state.head.extra_max - len ? state.head.extra_max : copy,*/8791len
8792);8793//zmemcpy(state.head.extra + len, next,8794// len + copy > state.head.extra_max ?8795// state.head.extra_max - len : copy);8796}8797if (state.flags & 0x0200) {8798state.check = crc32(state.check, input, copy, next);8799}8800have -= copy;8801next += copy;8802state.length -= copy;8803}8804if (state.length) { break inf_leave; }8805}8806state.length = 0;8807state.mode = NAME;8808/* falls through */8809case NAME:8810if (state.flags & 0x0800) {8811if (have === 0) { break inf_leave; }8812copy = 0;8813do {8814// TODO: 2 or 1 bytes?8815len = input[next + copy++];8816/* use constant limit because in js we should not preallocate memory */8817if (state.head && len &&8818(state.length < 65536 /*state.head.name_max*/)) {8819state.head.name += String.fromCharCode(len);8820}8821} while (len && copy < have);8822
8823if (state.flags & 0x0200) {8824state.check = crc32(state.check, input, copy, next);8825}8826have -= copy;8827next += copy;8828if (len) { break inf_leave; }8829}8830else if (state.head) {8831state.head.name = null;8832}8833state.length = 0;8834state.mode = COMMENT;8835/* falls through */8836case COMMENT:8837if (state.flags & 0x1000) {8838if (have === 0) { break inf_leave; }8839copy = 0;8840do {8841len = input[next + copy++];8842/* use constant limit because in js we should not preallocate memory */8843if (state.head && len &&8844(state.length < 65536 /*state.head.comm_max*/)) {8845state.head.comment += String.fromCharCode(len);8846}8847} while (len && copy < have);8848if (state.flags & 0x0200) {8849state.check = crc32(state.check, input, copy, next);8850}8851have -= copy;8852next += copy;8853if (len) { break inf_leave; }8854}8855else if (state.head) {8856state.head.comment = null;8857}8858state.mode = HCRC;8859/* falls through */8860case HCRC:8861if (state.flags & 0x0200) {8862//=== NEEDBITS(16); */8863while (bits < 16) {8864if (have === 0) { break inf_leave; }8865have--;8866hold += input[next++] << bits;8867bits += 8;8868}8869//===//8870if (hold !== (state.check & 0xffff)) {8871strm.msg = 'header crc mismatch';8872state.mode = BAD;8873break;8874}8875//=== INITBITS();8876hold = 0;8877bits = 0;8878//===//8879}8880if (state.head) {8881state.head.hcrc = ((state.flags >> 9) & 1);8882state.head.done = true;8883}8884strm.adler = state.check = 0;8885state.mode = TYPE;8886break;8887case DICTID:8888//=== NEEDBITS(32); */8889while (bits < 32) {8890if (have === 0) { break inf_leave; }8891have--;8892hold += input[next++] << bits;8893bits += 8;8894}8895//===//8896strm.adler = state.check = zswap32(hold);8897//=== INITBITS();8898hold = 0;8899bits = 0;8900//===//8901state.mode = DICT;8902/* falls through */8903case DICT:8904if (state.havedict === 0) {8905//--- RESTORE() ---8906strm.next_out = put;8907strm.avail_out = left;8908strm.next_in = next;8909strm.avail_in = have;8910state.hold = hold;8911state.bits = bits;8912//---8913return Z_NEED_DICT;8914}8915strm.adler = state.check = 1/*adler32(0L, Z_NULL, 0)*/;8916state.mode = TYPE;8917/* falls through */8918case TYPE:8919if (flush === Z_BLOCK || flush === Z_TREES) { break inf_leave; }8920/* falls through */8921case TYPEDO:8922if (state.last) {8923//--- BYTEBITS() ---//8924hold >>>= bits & 7;8925bits -= bits & 7;8926//---//8927state.mode = CHECK;8928break;8929}8930//=== NEEDBITS(3); */8931while (bits < 3) {8932if (have === 0) { break inf_leave; }8933have--;8934hold += input[next++] << bits;8935bits += 8;8936}8937//===//8938state.last = (hold & 0x01)/*BITS(1)*/;8939//--- DROPBITS(1) ---//8940hold >>>= 1;8941bits -= 1;8942//---//8943
8944switch ((hold & 0x03)/*BITS(2)*/) {8945case 0: /* stored block */8946//Tracev((stderr, "inflate: stored block%s\n",8947// state.last ? " (last)" : ""));8948state.mode = STORED;8949break;8950case 1: /* fixed block */8951fixedtables(state);8952//Tracev((stderr, "inflate: fixed codes block%s\n",8953// state.last ? " (last)" : ""));8954state.mode = LEN_; /* decode codes */8955if (flush === Z_TREES) {8956//--- DROPBITS(2) ---//8957hold >>>= 2;8958bits -= 2;8959//---//8960break inf_leave;8961}8962break;8963case 2: /* dynamic block */8964//Tracev((stderr, "inflate: dynamic codes block%s\n",8965// state.last ? " (last)" : ""));8966state.mode = TABLE;8967break;8968case 3:8969strm.msg = 'invalid block type';8970state.mode = BAD;8971}8972//--- DROPBITS(2) ---//8973hold >>>= 2;8974bits -= 2;8975//---//8976break;8977case STORED:8978//--- BYTEBITS() ---// /* go to byte boundary */8979hold >>>= bits & 7;8980bits -= bits & 7;8981//---//8982//=== NEEDBITS(32); */8983while (bits < 32) {8984if (have === 0) { break inf_leave; }8985have--;8986hold += input[next++] << bits;8987bits += 8;8988}8989//===//8990if ((hold & 0xffff) !== ((hold >>> 16) ^ 0xffff)) {8991strm.msg = 'invalid stored block lengths';8992state.mode = BAD;8993break;8994}8995state.length = hold & 0xffff;8996//Tracev((stderr, "inflate: stored length %u\n",8997// state.length));8998//=== INITBITS();8999hold = 0;9000bits = 0;9001//===//9002state.mode = COPY_;9003if (flush === Z_TREES) { break inf_leave; }9004/* falls through */9005case COPY_:9006state.mode = COPY;9007/* falls through */9008case COPY:9009copy = state.length;9010if (copy) {9011if (copy > have) { copy = have; }9012if (copy > left) { copy = left; }9013if (copy === 0) { break inf_leave; }9014//--- zmemcpy(put, next, copy); ---9015utils.arraySet(output, input, next, copy, put);9016//---//9017have -= copy;9018next += copy;9019left -= copy;9020put += copy;9021state.length -= copy;9022break;9023}9024//Tracev((stderr, "inflate: stored end\n"));9025state.mode = TYPE;9026break;9027case TABLE:9028//=== NEEDBITS(14); */9029while (bits < 14) {9030if (have === 0) { break inf_leave; }9031have--;9032hold += input[next++] << bits;9033bits += 8;9034}9035//===//9036state.nlen = (hold & 0x1f)/*BITS(5)*/ + 257;9037//--- DROPBITS(5) ---//9038hold >>>= 5;9039bits -= 5;9040//---//9041state.ndist = (hold & 0x1f)/*BITS(5)*/ + 1;9042//--- DROPBITS(5) ---//9043hold >>>= 5;9044bits -= 5;9045//---//9046state.ncode = (hold & 0x0f)/*BITS(4)*/ + 4;9047//--- DROPBITS(4) ---//9048hold >>>= 4;9049bits -= 4;9050//---//9051//#ifndef PKZIP_BUG_WORKAROUND
9052if (state.nlen > 286 || state.ndist > 30) {9053strm.msg = 'too many length or distance symbols';9054state.mode = BAD;9055break;9056}9057//#endif
9058//Tracev((stderr, "inflate: table sizes ok\n"));9059state.have = 0;9060state.mode = LENLENS;9061/* falls through */9062case LENLENS:9063while (state.have < state.ncode) {9064//=== NEEDBITS(3);9065while (bits < 3) {9066if (have === 0) { break inf_leave; }9067have--;9068hold += input[next++] << bits;9069bits += 8;9070}9071//===//9072state.lens[order[state.have++]] = (hold & 0x07);//BITS(3);9073//--- DROPBITS(3) ---//9074hold >>>= 3;9075bits -= 3;9076//---//9077}9078while (state.have < 19) {9079state.lens[order[state.have++]] = 0;9080}9081// We have separate tables & no pointers. 2 commented lines below not needed.9082//state.next = state.codes;9083//state.lencode = state.next;9084// Switch to use dynamic table9085state.lencode = state.lendyn;9086state.lenbits = 7;9087
9088opts = { bits: state.lenbits };9089ret = inflate_table(CODES, state.lens, 0, 19, state.lencode, 0, state.work, opts);9090state.lenbits = opts.bits;9091
9092if (ret) {9093strm.msg = 'invalid code lengths set';9094state.mode = BAD;9095break;9096}9097//Tracev((stderr, "inflate: code lengths ok\n"));9098state.have = 0;9099state.mode = CODELENS;9100/* falls through */9101case CODELENS:9102while (state.have < state.nlen + state.ndist) {9103for (;;) {9104here = state.lencode[hold & ((1 << state.lenbits) - 1)];/*BITS(state.lenbits)*/9105here_bits = here >>> 24;9106here_op = (here >>> 16) & 0xff;9107here_val = here & 0xffff;9108
9109if ((here_bits) <= bits) { break; }9110//--- PULLBYTE() ---//9111if (have === 0) { break inf_leave; }9112have--;9113hold += input[next++] << bits;9114bits += 8;9115//---//9116}9117if (here_val < 16) {9118//--- DROPBITS(here.bits) ---//9119hold >>>= here_bits;9120bits -= here_bits;9121//---//9122state.lens[state.have++] = here_val;9123}9124else {9125if (here_val === 16) {9126//=== NEEDBITS(here.bits + 2);9127n = here_bits + 2;9128while (bits < n) {9129if (have === 0) { break inf_leave; }9130have--;9131hold += input[next++] << bits;9132bits += 8;9133}9134//===//9135//--- DROPBITS(here.bits) ---//9136hold >>>= here_bits;9137bits -= here_bits;9138//---//9139if (state.have === 0) {9140strm.msg = 'invalid bit length repeat';9141state.mode = BAD;9142break;9143}9144len = state.lens[state.have - 1];9145copy = 3 + (hold & 0x03);//BITS(2);9146//--- DROPBITS(2) ---//9147hold >>>= 2;9148bits -= 2;9149//---//9150}9151else if (here_val === 17) {9152//=== NEEDBITS(here.bits + 3);9153n = here_bits + 3;9154while (bits < n) {9155if (have === 0) { break inf_leave; }9156have--;9157hold += input[next++] << bits;9158bits += 8;9159}9160//===//9161//--- DROPBITS(here.bits) ---//9162hold >>>= here_bits;9163bits -= here_bits;9164//---//9165len = 0;9166copy = 3 + (hold & 0x07);//BITS(3);9167//--- DROPBITS(3) ---//9168hold >>>= 3;9169bits -= 3;9170//---//9171}9172else {9173//=== NEEDBITS(here.bits + 7);9174n = here_bits + 7;9175while (bits < n) {9176if (have === 0) { break inf_leave; }9177have--;9178hold += input[next++] << bits;9179bits += 8;9180}9181//===//9182//--- DROPBITS(here.bits) ---//9183hold >>>= here_bits;9184bits -= here_bits;9185//---//9186len = 0;9187copy = 11 + (hold & 0x7f);//BITS(7);9188//--- DROPBITS(7) ---//9189hold >>>= 7;9190bits -= 7;9191//---//9192}9193if (state.have + copy > state.nlen + state.ndist) {9194strm.msg = 'invalid bit length repeat';9195state.mode = BAD;9196break;9197}9198while (copy--) {9199state.lens[state.have++] = len;9200}9201}9202}9203
9204/* handle error breaks in while */9205if (state.mode === BAD) { break; }9206
9207/* check for end-of-block code (better have one) */9208if (state.lens[256] === 0) {9209strm.msg = 'invalid code -- missing end-of-block';9210state.mode = BAD;9211break;9212}9213
9214/* build code tables -- note: do not change the lenbits or distbits9215values here (9 and 6) without reading the comments in inftrees.h
9216concerning the ENOUGH constants, which depend on those values */
9217state.lenbits = 9;9218
9219opts = { bits: state.lenbits };9220ret = inflate_table(LENS, state.lens, 0, state.nlen, state.lencode, 0, state.work, opts);9221// We have separate tables & no pointers. 2 commented lines below not needed.9222// state.next_index = opts.table_index;9223state.lenbits = opts.bits;9224// state.lencode = state.next;9225
9226if (ret) {9227strm.msg = 'invalid literal/lengths set';9228state.mode = BAD;9229break;9230}9231
9232state.distbits = 6;9233//state.distcode.copy(state.codes);9234// Switch to use dynamic table9235state.distcode = state.distdyn;9236opts = { bits: state.distbits };9237ret = inflate_table(DISTS, state.lens, state.nlen, state.ndist, state.distcode, 0, state.work, opts);9238// We have separate tables & no pointers. 2 commented lines below not needed.9239// state.next_index = opts.table_index;9240state.distbits = opts.bits;9241// state.distcode = state.next;9242
9243if (ret) {9244strm.msg = 'invalid distances set';9245state.mode = BAD;9246break;9247}9248//Tracev((stderr, 'inflate: codes ok\n'));9249state.mode = LEN_;9250if (flush === Z_TREES) { break inf_leave; }9251/* falls through */9252case LEN_:9253state.mode = LEN;9254/* falls through */9255case LEN:9256if (have >= 6 && left >= 258) {9257//--- RESTORE() ---9258strm.next_out = put;9259strm.avail_out = left;9260strm.next_in = next;9261strm.avail_in = have;9262state.hold = hold;9263state.bits = bits;9264//---9265inflate_fast(strm, _out);9266//--- LOAD() ---9267put = strm.next_out;9268output = strm.output;9269left = strm.avail_out;9270next = strm.next_in;9271input = strm.input;9272have = strm.avail_in;9273hold = state.hold;9274bits = state.bits;9275//---9276
9277if (state.mode === TYPE) {9278state.back = -1;9279}9280break;9281}9282state.back = 0;9283for (;;) {9284here = state.lencode[hold & ((1 << state.lenbits) - 1)]; /*BITS(state.lenbits)*/9285here_bits = here >>> 24;9286here_op = (here >>> 16) & 0xff;9287here_val = here & 0xffff;9288
9289if (here_bits <= bits) { break; }9290//--- PULLBYTE() ---//9291if (have === 0) { break inf_leave; }9292have--;9293hold += input[next++] << bits;9294bits += 8;9295//---//9296}9297if (here_op && (here_op & 0xf0) === 0) {9298last_bits = here_bits;9299last_op = here_op;9300last_val = here_val;9301for (;;) {9302here = state.lencode[last_val +9303((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)];9304here_bits = here >>> 24;9305here_op = (here >>> 16) & 0xff;9306here_val = here & 0xffff;9307
9308if ((last_bits + here_bits) <= bits) { break; }9309//--- PULLBYTE() ---//9310if (have === 0) { break inf_leave; }9311have--;9312hold += input[next++] << bits;9313bits += 8;9314//---//9315}9316//--- DROPBITS(last.bits) ---//9317hold >>>= last_bits;9318bits -= last_bits;9319//---//9320state.back += last_bits;9321}9322//--- DROPBITS(here.bits) ---//9323hold >>>= here_bits;9324bits -= here_bits;9325//---//9326state.back += here_bits;9327state.length = here_val;9328if (here_op === 0) {9329//Tracevv((stderr, here.val >= 0x20 && here.val < 0x7f ?9330// "inflate: literal '%c'\n" :9331// "inflate: literal 0x%02x\n", here.val));9332state.mode = LIT;9333break;9334}9335if (here_op & 32) {9336//Tracevv((stderr, "inflate: end of block\n"));9337state.back = -1;9338state.mode = TYPE;9339break;9340}9341if (here_op & 64) {9342strm.msg = 'invalid literal/length code';9343state.mode = BAD;9344break;9345}9346state.extra = here_op & 15;9347state.mode = LENEXT;9348/* falls through */9349case LENEXT:9350if (state.extra) {9351//=== NEEDBITS(state.extra);9352n = state.extra;9353while (bits < n) {9354if (have === 0) { break inf_leave; }9355have--;9356hold += input[next++] << bits;9357bits += 8;9358}9359//===//9360state.length += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/;9361//--- DROPBITS(state.extra) ---//9362hold >>>= state.extra;9363bits -= state.extra;9364//---//9365state.back += state.extra;9366}9367//Tracevv((stderr, "inflate: length %u\n", state.length));9368state.was = state.length;9369state.mode = DIST;9370/* falls through */9371case DIST:9372for (;;) {9373here = state.distcode[hold & ((1 << state.distbits) - 1)];/*BITS(state.distbits)*/9374here_bits = here >>> 24;9375here_op = (here >>> 16) & 0xff;9376here_val = here & 0xffff;9377
9378if ((here_bits) <= bits) { break; }9379//--- PULLBYTE() ---//9380if (have === 0) { break inf_leave; }9381have--;9382hold += input[next++] << bits;9383bits += 8;9384//---//9385}9386if ((here_op & 0xf0) === 0) {9387last_bits = here_bits;9388last_op = here_op;9389last_val = here_val;9390for (;;) {9391here = state.distcode[last_val +9392((hold & ((1 << (last_bits + last_op)) - 1))/*BITS(last.bits + last.op)*/ >> last_bits)];9393here_bits = here >>> 24;9394here_op = (here >>> 16) & 0xff;9395here_val = here & 0xffff;9396
9397if ((last_bits + here_bits) <= bits) { break; }9398//--- PULLBYTE() ---//9399if (have === 0) { break inf_leave; }9400have--;9401hold += input[next++] << bits;9402bits += 8;9403//---//9404}9405//--- DROPBITS(last.bits) ---//9406hold >>>= last_bits;9407bits -= last_bits;9408//---//9409state.back += last_bits;9410}9411//--- DROPBITS(here.bits) ---//9412hold >>>= here_bits;9413bits -= here_bits;9414//---//9415state.back += here_bits;9416if (here_op & 64) {9417strm.msg = 'invalid distance code';9418state.mode = BAD;9419break;9420}9421state.offset = here_val;9422state.extra = (here_op) & 15;9423state.mode = DISTEXT;9424/* falls through */9425case DISTEXT:9426if (state.extra) {9427//=== NEEDBITS(state.extra);9428n = state.extra;9429while (bits < n) {9430if (have === 0) { break inf_leave; }9431have--;9432hold += input[next++] << bits;9433bits += 8;9434}9435//===//9436state.offset += hold & ((1 << state.extra) - 1)/*BITS(state.extra)*/;9437//--- DROPBITS(state.extra) ---//9438hold >>>= state.extra;9439bits -= state.extra;9440//---//9441state.back += state.extra;9442}9443//#ifdef INFLATE_STRICT
9444if (state.offset > state.dmax) {9445strm.msg = 'invalid distance too far back';9446state.mode = BAD;9447break;9448}9449//#endif
9450//Tracevv((stderr, "inflate: distance %u\n", state.offset));9451state.mode = MATCH;9452/* falls through */9453case MATCH:9454if (left === 0) { break inf_leave; }9455copy = _out - left;9456if (state.offset > copy) { /* copy from window */9457copy = state.offset - copy;9458if (copy > state.whave) {9459if (state.sane) {9460strm.msg = 'invalid distance too far back';9461state.mode = BAD;9462break;9463}9464// (!) This block is disabled in zlib defailts,
9465// don't enable it for binary compatibility
9466//#ifdef INFLATE_ALLOW_INVALID_DISTANCE_TOOFAR_ARRR
9467// Trace((stderr, "inflate.c too far\n"));
9468// copy -= state.whave;
9469// if (copy > state.length) { copy = state.length; }
9470// if (copy > left) { copy = left; }
9471// left -= copy;
9472// state.length -= copy;
9473// do {
9474// output[put++] = 0;
9475// } while (--copy);
9476// if (state.length === 0) { state.mode = LEN; }
9477// break;
9478//#endif
9479}9480if (copy > state.wnext) {9481copy -= state.wnext;9482from = state.wsize - copy;9483}9484else {9485from = state.wnext - copy;9486}9487if (copy > state.length) { copy = state.length; }9488from_source = state.window;9489}9490else { /* copy from output */9491from_source = output;9492from = put - state.offset;9493copy = state.length;9494}9495if (copy > left) { copy = left; }9496left -= copy;9497state.length -= copy;9498do {9499output[put++] = from_source[from++];9500} while (--copy);9501if (state.length === 0) { state.mode = LEN; }9502break;9503case LIT:9504if (left === 0) { break inf_leave; }9505output[put++] = state.length;9506left--;9507state.mode = LEN;9508break;9509case CHECK:9510if (state.wrap) {9511//=== NEEDBITS(32);9512while (bits < 32) {9513if (have === 0) { break inf_leave; }9514have--;9515// Use '|' insdead of '+' to make sure that result is signed9516hold |= input[next++] << bits;9517bits += 8;9518}9519//===//9520_out -= left;9521strm.total_out += _out;9522state.total += _out;9523if (_out) {9524strm.adler = state.check =9525/*UPDATE(state.check, put - _out, _out);*/9526(state.flags ? crc32(state.check, output, _out, put - _out) : adler32(state.check, output, _out, put - _out));9527
9528}9529_out = left;9530// NB: crc32 stored as signed 32-bit int, zswap32 returns signed too9531if ((state.flags ? hold : zswap32(hold)) !== state.check) {9532strm.msg = 'incorrect data check';9533state.mode = BAD;9534break;9535}9536//=== INITBITS();9537hold = 0;9538bits = 0;9539//===//9540//Tracev((stderr, "inflate: check matches trailer\n"));9541}9542state.mode = LENGTH;9543/* falls through */9544case LENGTH:9545if (state.wrap && state.flags) {9546//=== NEEDBITS(32);9547while (bits < 32) {9548if (have === 0) { break inf_leave; }9549have--;9550hold += input[next++] << bits;9551bits += 8;9552}9553//===//9554if (hold !== (state.total & 0xffffffff)) {9555strm.msg = 'incorrect length check';9556state.mode = BAD;9557break;9558}9559//=== INITBITS();9560hold = 0;9561bits = 0;9562//===//9563//Tracev((stderr, "inflate: length matches trailer\n"));9564}9565state.mode = DONE;9566/* falls through */9567case DONE:9568ret = Z_STREAM_END;9569break inf_leave;9570case BAD:9571ret = Z_DATA_ERROR;9572break inf_leave;9573case MEM:9574return Z_MEM_ERROR;9575case SYNC:9576/* falls through */9577default:9578return Z_STREAM_ERROR;9579}9580}9581
9582// inf_leave <- here is real place for "goto inf_leave", emulated via "break inf_leave"9583
9584/*9585Return from inflate(), updating the total counts and the check value.
9586If there was no progress during the inflate() call, return a buffer
9587error. Call updatewindow() to create and/or update the window state.
9588Note: a memory error from inflate() is non-recoverable.
9589*/
9590
9591//--- RESTORE() ---9592strm.next_out = put;9593strm.avail_out = left;9594strm.next_in = next;9595strm.avail_in = have;9596state.hold = hold;9597state.bits = bits;9598//---9599
9600if (state.wsize || (_out !== strm.avail_out && state.mode < BAD &&9601(state.mode < CHECK || flush !== Z_FINISH))) {9602if (updatewindow(strm, strm.output, strm.next_out, _out - strm.avail_out)) {9603state.mode = MEM;9604return Z_MEM_ERROR;9605}9606}9607_in -= strm.avail_in;9608_out -= strm.avail_out;9609strm.total_in += _in;9610strm.total_out += _out;9611state.total += _out;9612if (state.wrap && _out) {9613strm.adler = state.check = /*UPDATE(state.check, strm.next_out - _out, _out);*/9614(state.flags ? crc32(state.check, output, _out, strm.next_out - _out) : adler32(state.check, output, _out, strm.next_out - _out));9615}9616strm.data_type = state.bits + (state.last ? 64 : 0) +9617(state.mode === TYPE ? 128 : 0) +9618(state.mode === LEN_ || state.mode === COPY_ ? 256 : 0);9619if (((_in === 0 && _out === 0) || flush === Z_FINISH) && ret === Z_OK) {9620ret = Z_BUF_ERROR;9621}9622return ret;9623}
9624
9625function inflateEnd(strm) {9626
9627if (!strm || !strm.state /*|| strm->zfree == (free_func)0*/) {9628return Z_STREAM_ERROR;9629}9630
9631var state = strm.state;9632if (state.window) {9633state.window = null;9634}9635strm.state = null;9636return Z_OK;9637}
9638
9639function inflateGetHeader(strm, head) {9640var state;9641
9642/* check state */9643if (!strm || !strm.state) { return Z_STREAM_ERROR; }9644state = strm.state;9645if ((state.wrap & 2) === 0) { return Z_STREAM_ERROR; }9646
9647/* save header structure */9648state.head = head;9649head.done = false;9650return Z_OK;9651}
9652
9653function inflateSetDictionary(strm, dictionary) {9654var dictLength = dictionary.length;9655
9656var state;9657var dictid;9658var ret;9659
9660/* check state */9661if (!strm /* == Z_NULL */ || !strm.state /* == Z_NULL */) { return Z_STREAM_ERROR; }9662state = strm.state;9663
9664if (state.wrap !== 0 && state.mode !== DICT) {9665return Z_STREAM_ERROR;9666}9667
9668/* check for correct dictionary identifier */9669if (state.mode === DICT) {9670dictid = 1; /* adler32(0, null, 0)*/9671/* dictid = adler32(dictid, dictionary, dictLength); */9672dictid = adler32(dictid, dictionary, dictLength, 0);9673if (dictid !== state.check) {9674return Z_DATA_ERROR;9675}9676}9677/* copy dictionary to window using updatewindow(), which will amend the9678existing dictionary if appropriate */
9679ret = updatewindow(strm, dictionary, dictLength, dictLength);9680if (ret) {9681state.mode = MEM;9682return Z_MEM_ERROR;9683}9684state.havedict = 1;9685// Tracev((stderr, "inflate: dictionary set\n"));9686return Z_OK;9687}
9688
9689exports.inflateReset = inflateReset;9690exports.inflateReset2 = inflateReset2;9691exports.inflateResetKeep = inflateResetKeep;9692exports.inflateInit = inflateInit;9693exports.inflateInit2 = inflateInit2;9694exports.inflate = inflate;9695exports.inflateEnd = inflateEnd;9696exports.inflateGetHeader = inflateGetHeader;9697exports.inflateSetDictionary = inflateSetDictionary;9698exports.inflateInfo = 'pako inflate (from Nodeca project)';9699
9700/* Not implemented
9701exports.inflateCopy = inflateCopy;
9702exports.inflateGetDictionary = inflateGetDictionary;
9703exports.inflateMark = inflateMark;
9704exports.inflatePrime = inflatePrime;
9705exports.inflateSync = inflateSync;
9706exports.inflateSyncPoint = inflateSyncPoint;
9707exports.inflateUndermine = inflateUndermine;
9708*/
9709
9710},{"../utils/common":41,"./adler32":43,"./crc32":45,"./inffast":48,"./inftrees":50}],50:[function(require,module,exports){9711'use strict';9712
9713// (C) 1995-2013 Jean-loup Gailly and Mark Adler
9714// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
9715//
9716// This software is provided 'as-is', without any express or implied
9717// warranty. In no event will the authors be held liable for any damages
9718// arising from the use of this software.
9719//
9720// Permission is granted to anyone to use this software for any purpose,
9721// including commercial applications, and to alter it and redistribute it
9722// freely, subject to the following restrictions:
9723//
9724// 1. The origin of this software must not be misrepresented; you must not
9725// claim that you wrote the original software. If you use this software
9726// in a product, an acknowledgment in the product documentation would be
9727// appreciated but is not required.
9728// 2. Altered source versions must be plainly marked as such, and must not be
9729// misrepresented as being the original software.
9730// 3. This notice may not be removed or altered from any source distribution.
9731
9732var utils = require('../utils/common');9733
9734var MAXBITS = 15;9735var ENOUGH_LENS = 852;9736var ENOUGH_DISTS = 592;9737//var ENOUGH = (ENOUGH_LENS+ENOUGH_DISTS);
9738
9739var CODES = 0;9740var LENS = 1;9741var DISTS = 2;9742
9743var lbase = [ /* Length codes 257..285 base */97443, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,974535, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 09746];9747
9748var lext = [ /* Length codes 257..285 extra */974916, 16, 16, 16, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18,975019, 19, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21, 16, 72, 789751];9752
9753var dbase = [ /* Distance codes 0..29 base */97541, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,9755257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,97568193, 12289, 16385, 24577, 0, 09757];9758
9759var dext = [ /* Distance codes 0..29 extra */976016, 16, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22,976123, 23, 24, 24, 25, 25, 26, 26, 27, 27,976228, 28, 29, 29, 64, 649763];9764
9765module.exports = function inflate_table(type, lens, lens_index, codes, table, table_index, work, opts)9766{
9767var bits = opts.bits;9768//here = opts.here; /* table entry for duplication */9769
9770var len = 0; /* a code's length in bits */9771var sym = 0; /* index of code symbols */9772var min = 0, max = 0; /* minimum and maximum code lengths */9773var root = 0; /* number of index bits for root table */9774var curr = 0; /* number of index bits for current table */9775var drop = 0; /* code bits to drop for sub-table */9776var left = 0; /* number of prefix codes available */9777var used = 0; /* code entries in table used */9778var huff = 0; /* Huffman code */9779var incr; /* for incrementing code, index */9780var fill; /* index for replicating entries */9781var low; /* low bits for current root entry */9782var mask; /* mask for low root bits */9783var next; /* next available space in table */9784var base = null; /* base value table to use */9785var base_index = 0;9786// var shoextra; /* extra bits table to use */
9787var end; /* use base and extra for symbol > end */9788var count = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* number of codes of each length */9789var offs = new utils.Buf16(MAXBITS + 1); //[MAXBITS+1]; /* offsets in table for each length */9790var extra = null;9791var extra_index = 0;9792
9793var here_bits, here_op, here_val;9794
9795/*9796Process a set of code lengths to create a canonical Huffman code. The
9797code lengths are lens[0..codes-1]. Each length corresponds to the
9798symbols 0..codes-1. The Huffman code is generated by first sorting the
9799symbols by length from short to long, and retaining the symbol order
9800for codes with equal lengths. Then the code starts with all zero bits
9801for the first code of the shortest length, and the codes are integer
9802increments for the same length, and zeros are appended as the length
9803increases. For the deflate format, these bits are stored backwards
9804from their more natural integer increment ordering, and so when the
9805decoding tables are built in the large loop below, the integer codes
9806are incremented backwards.
9807
9808This routine assumes, but does not check, that all of the entries in
9809lens[] are in the range 0..MAXBITS. The caller must assure this.
98101..MAXBITS is interpreted as that code length. zero means that that
9811symbol does not occur in this code.
9812
9813The codes are sorted by computing a count of codes for each length,
9814creating from that a table of starting indices for each length in the
9815sorted table, and then entering the symbols in order in the sorted
9816table. The sorted table is work[], with that space being provided by
9817the caller.
9818
9819The length counts are used for other purposes as well, i.e. finding
9820the minimum and maximum length codes, determining if there are any
9821codes at all, checking for a valid set of lengths, and looking ahead
9822at length counts to determine sub-table sizes when building the
9823decoding tables.
9824*/
9825
9826/* accumulate lengths for codes (assumes lens[] all in 0..MAXBITS) */9827for (len = 0; len <= MAXBITS; len++) {9828count[len] = 0;9829}9830for (sym = 0; sym < codes; sym++) {9831count[lens[lens_index + sym]]++;9832}9833
9834/* bound code lengths, force root to be within code lengths */9835root = bits;9836for (max = MAXBITS; max >= 1; max--) {9837if (count[max] !== 0) { break; }9838}9839if (root > max) {9840root = max;9841}9842if (max === 0) { /* no symbols to code at all */9843//table.op[opts.table_index] = 64; //here.op = (var char)64; /* invalid code marker */9844//table.bits[opts.table_index] = 1; //here.bits = (var char)1;9845//table.val[opts.table_index++] = 0; //here.val = (var short)0;9846table[table_index++] = (1 << 24) | (64 << 16) | 0;9847
9848
9849//table.op[opts.table_index] = 64;9850//table.bits[opts.table_index] = 1;9851//table.val[opts.table_index++] = 0;9852table[table_index++] = (1 << 24) | (64 << 16) | 0;9853
9854opts.bits = 1;9855return 0; /* no symbols, but wait for decoding to report error */9856}9857for (min = 1; min < max; min++) {9858if (count[min] !== 0) { break; }9859}9860if (root < min) {9861root = min;9862}9863
9864/* check for an over-subscribed or incomplete set of lengths */9865left = 1;9866for (len = 1; len <= MAXBITS; len++) {9867left <<= 1;9868left -= count[len];9869if (left < 0) {9870return -1;9871} /* over-subscribed */9872}9873if (left > 0 && (type === CODES || max !== 1)) {9874return -1; /* incomplete set */9875}9876
9877/* generate offsets into symbol table for each length for sorting */9878offs[1] = 0;9879for (len = 1; len < MAXBITS; len++) {9880offs[len + 1] = offs[len] + count[len];9881}9882
9883/* sort symbols by length, by symbol order within each length */9884for (sym = 0; sym < codes; sym++) {9885if (lens[lens_index + sym] !== 0) {9886work[offs[lens[lens_index + sym]]++] = sym;9887}9888}9889
9890/*9891Create and fill in decoding tables. In this loop, the table being
9892filled is at next and has curr index bits. The code being used is huff
9893with length len. That code is converted to an index by dropping drop
9894bits off of the bottom. For codes where len is less than drop + curr,
9895those top drop + curr - len bits are incremented through all values to
9896fill the table with replicated entries.
9897
9898root is the number of index bits for the root table. When len exceeds
9899root, sub-tables are created pointed to by the root entry with an index
9900of the low root bits of huff. This is saved in low to check for when a
9901new sub-table should be started. drop is zero when the root table is
9902being filled, and drop is root when sub-tables are being filled.
9903
9904When a new sub-table is needed, it is necessary to look ahead in the
9905code lengths to determine what size sub-table is needed. The length
9906counts are used for this, and so count[] is decremented as codes are
9907entered in the tables.
9908
9909used keeps track of how many table entries have been allocated from the
9910provided *table space. It is checked for LENS and DIST tables against
9911the constants ENOUGH_LENS and ENOUGH_DISTS to guard against changes in
9912the initial root table size constants. See the comments in inftrees.h
9913for more information.
9914
9915sym increments through all symbols, and the loop terminates when
9916all codes of length max, i.e. all codes, have been processed. This
9917routine permits incomplete codes, so another loop after this one fills
9918in the rest of the decoding tables with invalid code markers.
9919*/
9920
9921/* set up for code type */9922// poor man optimization - use if-else instead of switch,9923// to avoid deopts in old v89924if (type === CODES) {9925base = extra = work; /* dummy value--not used */9926end = 19;9927
9928} else if (type === LENS) {9929base = lbase;9930base_index -= 257;9931extra = lext;9932extra_index -= 257;9933end = 256;9934
9935} else { /* DISTS */9936base = dbase;9937extra = dext;9938end = -1;9939}9940
9941/* initialize opts for loop */9942huff = 0; /* starting code */9943sym = 0; /* starting code symbol */9944len = min; /* starting code length */9945next = table_index; /* current table to fill in */9946curr = root; /* current table index bits */9947drop = 0; /* current bits to drop from code for index */9948low = -1; /* trigger new sub-table when len > root */9949used = 1 << root; /* use root table entries */9950mask = used - 1; /* mask for comparing low */9951
9952/* check available table space */9953if ((type === LENS && used > ENOUGH_LENS) ||9954(type === DISTS && used > ENOUGH_DISTS)) {9955return 1;9956}9957
9958/* process all codes and make table entries */9959for (;;) {9960/* create table entry */9961here_bits = len - drop;9962if (work[sym] < end) {9963here_op = 0;9964here_val = work[sym];9965}9966else if (work[sym] > end) {9967here_op = extra[extra_index + work[sym]];9968here_val = base[base_index + work[sym]];9969}9970else {9971here_op = 32 + 64; /* end of block */9972here_val = 0;9973}9974
9975/* replicate for those indices with low len bits equal to huff */9976incr = 1 << (len - drop);9977fill = 1 << curr;9978min = fill; /* save offset to next table */9979do {9980fill -= incr;9981table[next + (huff >> drop) + fill] = (here_bits << 24) | (here_op << 16) | here_val |0;9982} while (fill !== 0);9983
9984/* backwards increment the len-bit code huff */9985incr = 1 << (len - 1);9986while (huff & incr) {9987incr >>= 1;9988}9989if (incr !== 0) {9990huff &= incr - 1;9991huff += incr;9992} else {9993huff = 0;9994}9995
9996/* go to next symbol, update count, len */9997sym++;9998if (--count[len] === 0) {9999if (len === max) { break; }10000len = lens[lens_index + work[sym]];10001}10002
10003/* create new sub-table if needed */10004if (len > root && (huff & mask) !== low) {10005/* if first time, transition to sub-tables */10006if (drop === 0) {10007drop = root;10008}10009
10010/* increment past last table */10011next += min; /* here min is 1 << curr */10012
10013/* determine length of next table */10014curr = len - drop;10015left = 1 << curr;10016while (curr + drop < max) {10017left -= count[curr + drop];10018if (left <= 0) { break; }10019curr++;10020left <<= 1;10021}10022
10023/* check for enough space */10024used += 1 << curr;10025if ((type === LENS && used > ENOUGH_LENS) ||10026(type === DISTS && used > ENOUGH_DISTS)) {10027return 1;10028}10029
10030/* point entry in root table to sub-table */10031low = huff & mask;10032/*table.op[low] = curr;10033table.bits[low] = root;
10034table.val[low] = next - opts.table_index;*/
10035table[low] = (root << 24) | (curr << 16) | (next - table_index) |0;10036}10037}10038
10039/* fill in remaining table entry if code is incomplete (guaranteed to have10040at most one remaining entry, since if the code is incomplete, the
10041maximum code length that was allowed to get this far is one bit) */
10042if (huff !== 0) {10043//table.op[next + huff] = 64; /* invalid code marker */10044//table.bits[next + huff] = len - drop;10045//table.val[next + huff] = 0;10046table[next + huff] = ((len - drop) << 24) | (64 << 16) |0;10047}10048
10049/* set return parameters */10050//opts.table_index += used;10051opts.bits = root;10052return 0;10053};10054
10055},{"../utils/common":41}],51:[function(require,module,exports){10056'use strict';10057
10058// (C) 1995-2013 Jean-loup Gailly and Mark Adler
10059// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
10060//
10061// This software is provided 'as-is', without any express or implied
10062// warranty. In no event will the authors be held liable for any damages
10063// arising from the use of this software.
10064//
10065// Permission is granted to anyone to use this software for any purpose,
10066// including commercial applications, and to alter it and redistribute it
10067// freely, subject to the following restrictions:
10068//
10069// 1. The origin of this software must not be misrepresented; you must not
10070// claim that you wrote the original software. If you use this software
10071// in a product, an acknowledgment in the product documentation would be
10072// appreciated but is not required.
10073// 2. Altered source versions must be plainly marked as such, and must not be
10074// misrepresented as being the original software.
10075// 3. This notice may not be removed or altered from any source distribution.
10076
10077module.exports = {100782: 'need dictionary', /* Z_NEED_DICT 2 */100791: 'stream end', /* Z_STREAM_END 1 */100800: '', /* Z_OK 0 */10081'-1': 'file error', /* Z_ERRNO (-1) */10082'-2': 'stream error', /* Z_STREAM_ERROR (-2) */10083'-3': 'data error', /* Z_DATA_ERROR (-3) */10084'-4': 'insufficient memory', /* Z_MEM_ERROR (-4) */10085'-5': 'buffer error', /* Z_BUF_ERROR (-5) */10086'-6': 'incompatible version' /* Z_VERSION_ERROR (-6) */10087};10088
10089},{}],52:[function(require,module,exports){10090'use strict';10091
10092// (C) 1995-2013 Jean-loup Gailly and Mark Adler
10093// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
10094//
10095// This software is provided 'as-is', without any express or implied
10096// warranty. In no event will the authors be held liable for any damages
10097// arising from the use of this software.
10098//
10099// Permission is granted to anyone to use this software for any purpose,
10100// including commercial applications, and to alter it and redistribute it
10101// freely, subject to the following restrictions:
10102//
10103// 1. The origin of this software must not be misrepresented; you must not
10104// claim that you wrote the original software. If you use this software
10105// in a product, an acknowledgment in the product documentation would be
10106// appreciated but is not required.
10107// 2. Altered source versions must be plainly marked as such, and must not be
10108// misrepresented as being the original software.
10109// 3. This notice may not be removed or altered from any source distribution.
10110
10111var utils = require('../utils/common');10112
10113/* Public constants ==========================================================*/
10114/* ===========================================================================*/
10115
10116
10117//var Z_FILTERED = 1;
10118//var Z_HUFFMAN_ONLY = 2;
10119//var Z_RLE = 3;
10120var Z_FIXED = 4;10121//var Z_DEFAULT_STRATEGY = 0;
10122
10123/* Possible values of the data_type field (though see inflate()) */
10124var Z_BINARY = 0;10125var Z_TEXT = 1;10126//var Z_ASCII = 1; // = Z_TEXT
10127var Z_UNKNOWN = 2;10128
10129/*============================================================================*/
10130
10131
10132function zero(buf) { var len = buf.length; while (--len >= 0) { buf[len] = 0; } }10133
10134// From zutil.h
10135
10136var STORED_BLOCK = 0;10137var STATIC_TREES = 1;10138var DYN_TREES = 2;10139/* The three kinds of block type */
10140
10141var MIN_MATCH = 3;10142var MAX_MATCH = 258;10143/* The minimum and maximum match lengths */
10144
10145// From deflate.h
10146/* ===========================================================================
10147* Internal compression state.
10148*/
10149
10150var LENGTH_CODES = 29;10151/* number of length codes, not counting the special END_BLOCK code */
10152
10153var LITERALS = 256;10154/* number of literal bytes 0..255 */
10155
10156var L_CODES = LITERALS + 1 + LENGTH_CODES;10157/* number of Literal or Length codes, including the END_BLOCK code */
10158
10159var D_CODES = 30;10160/* number of distance codes */
10161
10162var BL_CODES = 19;10163/* number of codes used to transfer the bit lengths */
10164
10165var HEAP_SIZE = 2 * L_CODES + 1;10166/* maximum heap size */
10167
10168var MAX_BITS = 15;10169/* All codes must not exceed MAX_BITS bits */
10170
10171var Buf_size = 16;10172/* size of bit buffer in bi_buf */
10173
10174
10175/* ===========================================================================
10176* Constants
10177*/
10178
10179var MAX_BL_BITS = 7;10180/* Bit length codes must not exceed MAX_BL_BITS bits */
10181
10182var END_BLOCK = 256;10183/* end of block literal code */
10184
10185var REP_3_6 = 16;10186/* repeat previous bit length 3-6 times (2 bits of repeat count) */
10187
10188var REPZ_3_10 = 17;10189/* repeat a zero length 3-10 times (3 bits of repeat count) */
10190
10191var REPZ_11_138 = 18;10192/* repeat a zero length 11-138 times (7 bits of repeat count) */
10193
10194/* eslint-disable comma-spacing,array-bracket-spacing */
10195var extra_lbits = /* extra bits for each length code */10196[0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0];10197
10198var extra_dbits = /* extra bits for each distance code */10199[0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13];10200
10201var extra_blbits = /* extra bits for each bit length code */10202[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7];10203
10204var bl_order =10205[16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15];10206/* eslint-enable comma-spacing,array-bracket-spacing */
10207
10208/* The lengths of the bit length codes are sent in order of decreasing
10209* probability, to avoid transmitting the lengths for unused bit length codes.
10210*/
10211
10212/* ===========================================================================
10213* Local data. These are initialized only once.
10214*/
10215
10216// We pre-fill arrays with 0 to avoid uninitialized gaps
10217
10218var DIST_CODE_LEN = 512; /* see definition of array dist_code below */10219
10220// !!!! Use flat array insdead of structure, Freq = i*2, Len = i*2+1
10221var static_ltree = new Array((L_CODES + 2) * 2);10222zero(static_ltree);10223/* The static literal tree. Since the bit lengths are imposed, there is no
10224* need for the L_CODES extra codes used during heap construction. However
10225* The codes 286 and 287 are needed to build a canonical tree (see _tr_init
10226* below).
10227*/
10228
10229var static_dtree = new Array(D_CODES * 2);10230zero(static_dtree);10231/* The static distance tree. (Actually a trivial tree since all codes use
10232* 5 bits.)
10233*/
10234
10235var _dist_code = new Array(DIST_CODE_LEN);10236zero(_dist_code);10237/* Distance codes. The first 256 values correspond to the distances
10238* 3 .. 258, the last 256 values correspond to the top 8 bits of
10239* the 15 bit distances.
10240*/
10241
10242var _length_code = new Array(MAX_MATCH - MIN_MATCH + 1);10243zero(_length_code);10244/* length code for each normalized match length (0 == MIN_MATCH) */
10245
10246var base_length = new Array(LENGTH_CODES);10247zero(base_length);10248/* First normalized length for each code (0 = MIN_MATCH) */
10249
10250var base_dist = new Array(D_CODES);10251zero(base_dist);10252/* First normalized distance for each code (0 = distance of 1) */
10253
10254
10255function StaticTreeDesc(static_tree, extra_bits, extra_base, elems, max_length) {10256
10257this.static_tree = static_tree; /* static tree or NULL */10258this.extra_bits = extra_bits; /* extra bits for each code or NULL */10259this.extra_base = extra_base; /* base index for extra_bits */10260this.elems = elems; /* max number of elements in the tree */10261this.max_length = max_length; /* max bit length for the codes */10262
10263// show if `static_tree` has data or dummy - needed for monomorphic objects10264this.has_stree = static_tree && static_tree.length;10265}
10266
10267
10268var static_l_desc;10269var static_d_desc;10270var static_bl_desc;10271
10272
10273function TreeDesc(dyn_tree, stat_desc) {10274this.dyn_tree = dyn_tree; /* the dynamic tree */10275this.max_code = 0; /* largest code with non zero frequency */10276this.stat_desc = stat_desc; /* the corresponding static tree */10277}
10278
10279
10280
10281function d_code(dist) {10282return dist < 256 ? _dist_code[dist] : _dist_code[256 + (dist >>> 7)];10283}
10284
10285
10286/* ===========================================================================
10287* Output a short LSB first on the stream.
10288* IN assertion: there is enough room in pendingBuf.
10289*/
10290function put_short(s, w) {10291// put_byte(s, (uch)((w) & 0xff));
10292// put_byte(s, (uch)((ush)(w) >> 8));
10293s.pending_buf[s.pending++] = (w) & 0xff;10294s.pending_buf[s.pending++] = (w >>> 8) & 0xff;10295}
10296
10297
10298/* ===========================================================================
10299* Send a value on a given number of bits.
10300* IN assertion: length <= 16 and value fits in length bits.
10301*/
10302function send_bits(s, value, length) {10303if (s.bi_valid > (Buf_size - length)) {10304s.bi_buf |= (value << s.bi_valid) & 0xffff;10305put_short(s, s.bi_buf);10306s.bi_buf = value >> (Buf_size - s.bi_valid);10307s.bi_valid += length - Buf_size;10308} else {10309s.bi_buf |= (value << s.bi_valid) & 0xffff;10310s.bi_valid += length;10311}10312}
10313
10314
10315function send_code(s, c, tree) {10316send_bits(s, tree[c * 2]/*.Code*/, tree[c * 2 + 1]/*.Len*/);10317}
10318
10319
10320/* ===========================================================================
10321* Reverse the first len bits of a code, using straightforward code (a faster
10322* method would use a table)
10323* IN assertion: 1 <= len <= 15
10324*/
10325function bi_reverse(code, len) {10326var res = 0;10327do {10328res |= code & 1;10329code >>>= 1;10330res <<= 1;10331} while (--len > 0);10332return res >>> 1;10333}
10334
10335
10336/* ===========================================================================
10337* Flush the bit buffer, keeping at most 7 bits in it.
10338*/
10339function bi_flush(s) {10340if (s.bi_valid === 16) {10341put_short(s, s.bi_buf);10342s.bi_buf = 0;10343s.bi_valid = 0;10344
10345} else if (s.bi_valid >= 8) {10346s.pending_buf[s.pending++] = s.bi_buf & 0xff;10347s.bi_buf >>= 8;10348s.bi_valid -= 8;10349}10350}
10351
10352
10353/* ===========================================================================
10354* Compute the optimal bit lengths for a tree and update the total bit length
10355* for the current block.
10356* IN assertion: the fields freq and dad are set, heap[heap_max] and
10357* above are the tree nodes sorted by increasing frequency.
10358* OUT assertions: the field len is set to the optimal bit length, the
10359* array bl_count contains the frequencies for each bit length.
10360* The length opt_len is updated; static_len is also updated if stree is
10361* not null.
10362*/
10363function gen_bitlen(s, desc)10364// deflate_state *s;
10365// tree_desc *desc; /* the tree descriptor */
10366{
10367var tree = desc.dyn_tree;10368var max_code = desc.max_code;10369var stree = desc.stat_desc.static_tree;10370var has_stree = desc.stat_desc.has_stree;10371var extra = desc.stat_desc.extra_bits;10372var base = desc.stat_desc.extra_base;10373var max_length = desc.stat_desc.max_length;10374var h; /* heap index */10375var n, m; /* iterate over the tree elements */10376var bits; /* bit length */10377var xbits; /* extra bits */10378var f; /* frequency */10379var overflow = 0; /* number of elements with bit length too large */10380
10381for (bits = 0; bits <= MAX_BITS; bits++) {10382s.bl_count[bits] = 0;10383}10384
10385/* In a first pass, compute the optimal bit lengths (which may10386* overflow in the case of the bit length tree).
10387*/
10388tree[s.heap[s.heap_max] * 2 + 1]/*.Len*/ = 0; /* root of the heap */10389
10390for (h = s.heap_max + 1; h < HEAP_SIZE; h++) {10391n = s.heap[h];10392bits = tree[tree[n * 2 + 1]/*.Dad*/ * 2 + 1]/*.Len*/ + 1;10393if (bits > max_length) {10394bits = max_length;10395overflow++;10396}10397tree[n * 2 + 1]/*.Len*/ = bits;10398/* We overwrite tree[n].Dad which is no longer needed */10399
10400if (n > max_code) { continue; } /* not a leaf node */10401
10402s.bl_count[bits]++;10403xbits = 0;10404if (n >= base) {10405xbits = extra[n - base];10406}10407f = tree[n * 2]/*.Freq*/;10408s.opt_len += f * (bits + xbits);10409if (has_stree) {10410s.static_len += f * (stree[n * 2 + 1]/*.Len*/ + xbits);10411}10412}10413if (overflow === 0) { return; }10414
10415// Trace((stderr,"\nbit length overflow\n"));10416/* This happens for example on obj2 and pic of the Calgary corpus */10417
10418/* Find the first bit length which could increase: */10419do {10420bits = max_length - 1;10421while (s.bl_count[bits] === 0) { bits--; }10422s.bl_count[bits]--; /* move one leaf down the tree */10423s.bl_count[bits + 1] += 2; /* move one overflow item as its brother */10424s.bl_count[max_length]--;10425/* The brother of the overflow item also moves one step up,10426* but this does not affect bl_count[max_length]
10427*/
10428overflow -= 2;10429} while (overflow > 0);10430
10431/* Now recompute all bit lengths, scanning in increasing frequency.10432* h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
10433* lengths instead of fixing only the wrong ones. This idea is taken
10434* from 'ar' written by Haruhiko Okumura.)
10435*/
10436for (bits = max_length; bits !== 0; bits--) {10437n = s.bl_count[bits];10438while (n !== 0) {10439m = s.heap[--h];10440if (m > max_code) { continue; }10441if (tree[m * 2 + 1]/*.Len*/ !== bits) {10442// Trace((stderr,"code %d bits %d->%d\n", m, tree[m].Len, bits));10443s.opt_len += (bits - tree[m * 2 + 1]/*.Len*/) * tree[m * 2]/*.Freq*/;10444tree[m * 2 + 1]/*.Len*/ = bits;10445}10446n--;10447}10448}10449}
10450
10451
10452/* ===========================================================================
10453* Generate the codes for a given tree and bit counts (which need not be
10454* optimal).
10455* IN assertion: the array bl_count contains the bit length statistics for
10456* the given tree and the field len is set for all tree elements.
10457* OUT assertion: the field code is set for all tree elements of non
10458* zero code length.
10459*/
10460function gen_codes(tree, max_code, bl_count)10461// ct_data *tree; /* the tree to decorate */
10462// int max_code; /* largest code with non zero frequency */
10463// ushf *bl_count; /* number of codes at each bit length */
10464{
10465var next_code = new Array(MAX_BITS + 1); /* next code value for each bit length */10466var code = 0; /* running code value */10467var bits; /* bit index */10468var n; /* code index */10469
10470/* The distribution counts are first used to generate the code values10471* without bit reversal.
10472*/
10473for (bits = 1; bits <= MAX_BITS; bits++) {10474next_code[bits] = code = (code + bl_count[bits - 1]) << 1;10475}10476/* Check that the bit counts in bl_count are consistent. The last code10477* must be all ones.
10478*/
10479//Assert (code + bl_count[MAX_BITS]-1 == (1<<MAX_BITS)-1,10480// "inconsistent bit counts");10481//Tracev((stderr,"\ngen_codes: max_code %d ", max_code));10482
10483for (n = 0; n <= max_code; n++) {10484var len = tree[n * 2 + 1]/*.Len*/;10485if (len === 0) { continue; }10486/* Now reverse the bits */10487tree[n * 2]/*.Code*/ = bi_reverse(next_code[len]++, len);10488
10489//Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",10490// n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len]-1));10491}10492}
10493
10494
10495/* ===========================================================================
10496* Initialize the various 'constant' tables.
10497*/
10498function tr_static_init() {10499var n; /* iterates over tree elements */10500var bits; /* bit counter */10501var length; /* length value */10502var code; /* code value */10503var dist; /* distance index */10504var bl_count = new Array(MAX_BITS + 1);10505/* number of codes at each bit length for an optimal tree */10506
10507// do check in _tr_init()10508//if (static_init_done) return;10509
10510/* For some embedded targets, global variables are not initialized: */10511/*#ifdef NO_INIT_GLOBAL_POINTERS
10512static_l_desc.static_tree = static_ltree;
10513static_l_desc.extra_bits = extra_lbits;
10514static_d_desc.static_tree = static_dtree;
10515static_d_desc.extra_bits = extra_dbits;
10516static_bl_desc.extra_bits = extra_blbits;
10517#endif*/
10518
10519/* Initialize the mapping length (0..255) -> length code (0..28) */10520length = 0;10521for (code = 0; code < LENGTH_CODES - 1; code++) {10522base_length[code] = length;10523for (n = 0; n < (1 << extra_lbits[code]); n++) {10524_length_code[length++] = code;10525}10526}10527//Assert (length == 256, "tr_static_init: length != 256");10528/* Note that the length 255 (match length 258) can be represented10529* in two different ways: code 284 + 5 bits or code 285, so we
10530* overwrite length_code[255] to use the best encoding:
10531*/
10532_length_code[length - 1] = code;10533
10534/* Initialize the mapping dist (0..32K) -> dist code (0..29) */10535dist = 0;10536for (code = 0; code < 16; code++) {10537base_dist[code] = dist;10538for (n = 0; n < (1 << extra_dbits[code]); n++) {10539_dist_code[dist++] = code;10540}10541}10542//Assert (dist == 256, "tr_static_init: dist != 256");10543dist >>= 7; /* from now on, all distances are divided by 128 */10544for (; code < D_CODES; code++) {10545base_dist[code] = dist << 7;10546for (n = 0; n < (1 << (extra_dbits[code] - 7)); n++) {10547_dist_code[256 + dist++] = code;10548}10549}10550//Assert (dist == 256, "tr_static_init: 256+dist != 512");10551
10552/* Construct the codes of the static literal tree */10553for (bits = 0; bits <= MAX_BITS; bits++) {10554bl_count[bits] = 0;10555}10556
10557n = 0;10558while (n <= 143) {10559static_ltree[n * 2 + 1]/*.Len*/ = 8;10560n++;10561bl_count[8]++;10562}10563while (n <= 255) {10564static_ltree[n * 2 + 1]/*.Len*/ = 9;10565n++;10566bl_count[9]++;10567}10568while (n <= 279) {10569static_ltree[n * 2 + 1]/*.Len*/ = 7;10570n++;10571bl_count[7]++;10572}10573while (n <= 287) {10574static_ltree[n * 2 + 1]/*.Len*/ = 8;10575n++;10576bl_count[8]++;10577}10578/* Codes 286 and 287 do not exist, but we must include them in the10579* tree construction to get a canonical Huffman tree (longest code
10580* all ones)
10581*/
10582gen_codes(static_ltree, L_CODES + 1, bl_count);10583
10584/* The static distance tree is trivial: */10585for (n = 0; n < D_CODES; n++) {10586static_dtree[n * 2 + 1]/*.Len*/ = 5;10587static_dtree[n * 2]/*.Code*/ = bi_reverse(n, 5);10588}10589
10590// Now data ready and we can init static trees10591static_l_desc = new StaticTreeDesc(static_ltree, extra_lbits, LITERALS + 1, L_CODES, MAX_BITS);10592static_d_desc = new StaticTreeDesc(static_dtree, extra_dbits, 0, D_CODES, MAX_BITS);10593static_bl_desc = new StaticTreeDesc(new Array(0), extra_blbits, 0, BL_CODES, MAX_BL_BITS);10594
10595//static_init_done = true;10596}
10597
10598
10599/* ===========================================================================
10600* Initialize a new block.
10601*/
10602function init_block(s) {10603var n; /* iterates over tree elements */10604
10605/* Initialize the trees. */10606for (n = 0; n < L_CODES; n++) { s.dyn_ltree[n * 2]/*.Freq*/ = 0; }10607for (n = 0; n < D_CODES; n++) { s.dyn_dtree[n * 2]/*.Freq*/ = 0; }10608for (n = 0; n < BL_CODES; n++) { s.bl_tree[n * 2]/*.Freq*/ = 0; }10609
10610s.dyn_ltree[END_BLOCK * 2]/*.Freq*/ = 1;10611s.opt_len = s.static_len = 0;10612s.last_lit = s.matches = 0;10613}
10614
10615
10616/* ===========================================================================
10617* Flush the bit buffer and align the output on a byte boundary
10618*/
10619function bi_windup(s)10620{
10621if (s.bi_valid > 8) {10622put_short(s, s.bi_buf);10623} else if (s.bi_valid > 0) {10624//put_byte(s, (Byte)s->bi_buf);10625s.pending_buf[s.pending++] = s.bi_buf;10626}10627s.bi_buf = 0;10628s.bi_valid = 0;10629}
10630
10631/* ===========================================================================
10632* Copy a stored block, storing first the length and its
10633* one's complement if requested.
10634*/
10635function copy_block(s, buf, len, header)10636//DeflateState *s;
10637//charf *buf; /* the input data */
10638//unsigned len; /* its length */
10639//int header; /* true if block header must be written */
10640{
10641bi_windup(s); /* align on byte boundary */10642
10643if (header) {10644put_short(s, len);10645put_short(s, ~len);10646}10647// while (len--) {
10648// put_byte(s, *buf++);
10649// }
10650utils.arraySet(s.pending_buf, s.window, buf, len, s.pending);10651s.pending += len;10652}
10653
10654/* ===========================================================================
10655* Compares to subtrees, using the tree depth as tie breaker when
10656* the subtrees have equal frequency. This minimizes the worst case length.
10657*/
10658function smaller(tree, n, m, depth) {10659var _n2 = n * 2;10660var _m2 = m * 2;10661return (tree[_n2]/*.Freq*/ < tree[_m2]/*.Freq*/ ||10662(tree[_n2]/*.Freq*/ === tree[_m2]/*.Freq*/ && depth[n] <= depth[m]));10663}
10664
10665/* ===========================================================================
10666* Restore the heap property by moving down the tree starting at node k,
10667* exchanging a node with the smallest of its two sons if necessary, stopping
10668* when the heap property is re-established (each father smaller than its
10669* two sons).
10670*/
10671function pqdownheap(s, tree, k)10672// deflate_state *s;
10673// ct_data *tree; /* the tree to restore */
10674// int k; /* node to move down */
10675{
10676var v = s.heap[k];10677var j = k << 1; /* left son of k */10678while (j <= s.heap_len) {10679/* Set j to the smallest of the two sons: */10680if (j < s.heap_len &&10681smaller(tree, s.heap[j + 1], s.heap[j], s.depth)) {10682j++;10683}10684/* Exit if v is smaller than both sons */10685if (smaller(tree, v, s.heap[j], s.depth)) { break; }10686
10687/* Exchange v with the smallest son */10688s.heap[k] = s.heap[j];10689k = j;10690
10691/* And continue down the tree, setting j to the left son of k */10692j <<= 1;10693}10694s.heap[k] = v;10695}
10696
10697
10698// inlined manually
10699// var SMALLEST = 1;
10700
10701/* ===========================================================================
10702* Send the block data compressed using the given Huffman trees
10703*/
10704function compress_block(s, ltree, dtree)10705// deflate_state *s;
10706// const ct_data *ltree; /* literal tree */
10707// const ct_data *dtree; /* distance tree */
10708{
10709var dist; /* distance of matched string */10710var lc; /* match length or unmatched char (if dist == 0) */10711var lx = 0; /* running index in l_buf */10712var code; /* the code to send */10713var extra; /* number of extra bits to send */10714
10715if (s.last_lit !== 0) {10716do {10717dist = (s.pending_buf[s.d_buf + lx * 2] << 8) | (s.pending_buf[s.d_buf + lx * 2 + 1]);10718lc = s.pending_buf[s.l_buf + lx];10719lx++;10720
10721if (dist === 0) {10722send_code(s, lc, ltree); /* send a literal byte */10723//Tracecv(isgraph(lc), (stderr," '%c' ", lc));10724} else {10725/* Here, lc is the match length - MIN_MATCH */10726code = _length_code[lc];10727send_code(s, code + LITERALS + 1, ltree); /* send the length code */10728extra = extra_lbits[code];10729if (extra !== 0) {10730lc -= base_length[code];10731send_bits(s, lc, extra); /* send the extra length bits */10732}10733dist--; /* dist is now the match distance - 1 */10734code = d_code(dist);10735//Assert (code < D_CODES, "bad d_code");10736
10737send_code(s, code, dtree); /* send the distance code */10738extra = extra_dbits[code];10739if (extra !== 0) {10740dist -= base_dist[code];10741send_bits(s, dist, extra); /* send the extra distance bits */10742}10743} /* literal or match pair ? */10744
10745/* Check that the overlay between pending_buf and d_buf+l_buf is ok: */10746//Assert((uInt)(s->pending) < s->lit_bufsize + 2*lx,10747// "pendingBuf overflow");10748
10749} while (lx < s.last_lit);10750}10751
10752send_code(s, END_BLOCK, ltree);10753}
10754
10755
10756/* ===========================================================================
10757* Construct one Huffman tree and assigns the code bit strings and lengths.
10758* Update the total bit length for the current block.
10759* IN assertion: the field freq is set for all tree elements.
10760* OUT assertions: the fields len and code are set to the optimal bit length
10761* and corresponding code. The length opt_len is updated; static_len is
10762* also updated if stree is not null. The field max_code is set.
10763*/
10764function build_tree(s, desc)10765// deflate_state *s;
10766// tree_desc *desc; /* the tree descriptor */
10767{
10768var tree = desc.dyn_tree;10769var stree = desc.stat_desc.static_tree;10770var has_stree = desc.stat_desc.has_stree;10771var elems = desc.stat_desc.elems;10772var n, m; /* iterate over heap elements */10773var max_code = -1; /* largest code with non zero frequency */10774var node; /* new node being created */10775
10776/* Construct the initial heap, with least frequent element in10777* heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
10778* heap[0] is not used.
10779*/
10780s.heap_len = 0;10781s.heap_max = HEAP_SIZE;10782
10783for (n = 0; n < elems; n++) {10784if (tree[n * 2]/*.Freq*/ !== 0) {10785s.heap[++s.heap_len] = max_code = n;10786s.depth[n] = 0;10787
10788} else {10789tree[n * 2 + 1]/*.Len*/ = 0;10790}10791}10792
10793/* The pkzip format requires that at least one distance code exists,10794* and that at least one bit should be sent even if there is only one
10795* possible code. So to avoid special checks later on we force at least
10796* two codes of non zero frequency.
10797*/
10798while (s.heap_len < 2) {10799node = s.heap[++s.heap_len] = (max_code < 2 ? ++max_code : 0);10800tree[node * 2]/*.Freq*/ = 1;10801s.depth[node] = 0;10802s.opt_len--;10803
10804if (has_stree) {10805s.static_len -= stree[node * 2 + 1]/*.Len*/;10806}10807/* node is 0 or 1 so it does not have extra bits */10808}10809desc.max_code = max_code;10810
10811/* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,10812* establish sub-heaps of increasing lengths:
10813*/
10814for (n = (s.heap_len >> 1/*int /2*/); n >= 1; n--) { pqdownheap(s, tree, n); }10815
10816/* Construct the Huffman tree by repeatedly combining the least two10817* frequent nodes.
10818*/
10819node = elems; /* next internal node of the tree */10820do {10821//pqremove(s, tree, n); /* n = node of least frequency */10822/*** pqremove ***/10823n = s.heap[1/*SMALLEST*/];10824s.heap[1/*SMALLEST*/] = s.heap[s.heap_len--];10825pqdownheap(s, tree, 1/*SMALLEST*/);10826/***/10827
10828m = s.heap[1/*SMALLEST*/]; /* m = node of next least frequency */10829
10830s.heap[--s.heap_max] = n; /* keep the nodes sorted by frequency */10831s.heap[--s.heap_max] = m;10832
10833/* Create a new node father of n and m */10834tree[node * 2]/*.Freq*/ = tree[n * 2]/*.Freq*/ + tree[m * 2]/*.Freq*/;10835s.depth[node] = (s.depth[n] >= s.depth[m] ? s.depth[n] : s.depth[m]) + 1;10836tree[n * 2 + 1]/*.Dad*/ = tree[m * 2 + 1]/*.Dad*/ = node;10837
10838/* and insert the new node in the heap */10839s.heap[1/*SMALLEST*/] = node++;10840pqdownheap(s, tree, 1/*SMALLEST*/);10841
10842} while (s.heap_len >= 2);10843
10844s.heap[--s.heap_max] = s.heap[1/*SMALLEST*/];10845
10846/* At this point, the fields freq and dad are set. We can now10847* generate the bit lengths.
10848*/
10849gen_bitlen(s, desc);10850
10851/* The field len is now set, we can generate the bit codes */10852gen_codes(tree, max_code, s.bl_count);10853}
10854
10855
10856/* ===========================================================================
10857* Scan a literal or distance tree to determine the frequencies of the codes
10858* in the bit length tree.
10859*/
10860function scan_tree(s, tree, max_code)10861// deflate_state *s;
10862// ct_data *tree; /* the tree to be scanned */
10863// int max_code; /* and its largest code of non zero frequency */
10864{
10865var n; /* iterates over all tree elements */10866var prevlen = -1; /* last emitted length */10867var curlen; /* length of current code */10868
10869var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */10870
10871var count = 0; /* repeat count of the current code */10872var max_count = 7; /* max repeat count */10873var min_count = 4; /* min repeat count */10874
10875if (nextlen === 0) {10876max_count = 138;10877min_count = 3;10878}10879tree[(max_code + 1) * 2 + 1]/*.Len*/ = 0xffff; /* guard */10880
10881for (n = 0; n <= max_code; n++) {10882curlen = nextlen;10883nextlen = tree[(n + 1) * 2 + 1]/*.Len*/;10884
10885if (++count < max_count && curlen === nextlen) {10886continue;10887
10888} else if (count < min_count) {10889s.bl_tree[curlen * 2]/*.Freq*/ += count;10890
10891} else if (curlen !== 0) {10892
10893if (curlen !== prevlen) { s.bl_tree[curlen * 2]/*.Freq*/++; }10894s.bl_tree[REP_3_6 * 2]/*.Freq*/++;10895
10896} else if (count <= 10) {10897s.bl_tree[REPZ_3_10 * 2]/*.Freq*/++;10898
10899} else {10900s.bl_tree[REPZ_11_138 * 2]/*.Freq*/++;10901}10902
10903count = 0;10904prevlen = curlen;10905
10906if (nextlen === 0) {10907max_count = 138;10908min_count = 3;10909
10910} else if (curlen === nextlen) {10911max_count = 6;10912min_count = 3;10913
10914} else {10915max_count = 7;10916min_count = 4;10917}10918}10919}
10920
10921
10922/* ===========================================================================
10923* Send a literal or distance tree in compressed form, using the codes in
10924* bl_tree.
10925*/
10926function send_tree(s, tree, max_code)10927// deflate_state *s;
10928// ct_data *tree; /* the tree to be scanned */
10929// int max_code; /* and its largest code of non zero frequency */
10930{
10931var n; /* iterates over all tree elements */10932var prevlen = -1; /* last emitted length */10933var curlen; /* length of current code */10934
10935var nextlen = tree[0 * 2 + 1]/*.Len*/; /* length of next code */10936
10937var count = 0; /* repeat count of the current code */10938var max_count = 7; /* max repeat count */10939var min_count = 4; /* min repeat count */10940
10941/* tree[max_code+1].Len = -1; */ /* guard already set */10942if (nextlen === 0) {10943max_count = 138;10944min_count = 3;10945}10946
10947for (n = 0; n <= max_code; n++) {10948curlen = nextlen;10949nextlen = tree[(n + 1) * 2 + 1]/*.Len*/;10950
10951if (++count < max_count && curlen === nextlen) {10952continue;10953
10954} else if (count < min_count) {10955do { send_code(s, curlen, s.bl_tree); } while (--count !== 0);10956
10957} else if (curlen !== 0) {10958if (curlen !== prevlen) {10959send_code(s, curlen, s.bl_tree);10960count--;10961}10962//Assert(count >= 3 && count <= 6, " 3_6?");10963send_code(s, REP_3_6, s.bl_tree);10964send_bits(s, count - 3, 2);10965
10966} else if (count <= 10) {10967send_code(s, REPZ_3_10, s.bl_tree);10968send_bits(s, count - 3, 3);10969
10970} else {10971send_code(s, REPZ_11_138, s.bl_tree);10972send_bits(s, count - 11, 7);10973}10974
10975count = 0;10976prevlen = curlen;10977if (nextlen === 0) {10978max_count = 138;10979min_count = 3;10980
10981} else if (curlen === nextlen) {10982max_count = 6;10983min_count = 3;10984
10985} else {10986max_count = 7;10987min_count = 4;10988}10989}10990}
10991
10992
10993/* ===========================================================================
10994* Construct the Huffman tree for the bit lengths and return the index in
10995* bl_order of the last bit length code to send.
10996*/
10997function build_bl_tree(s) {10998var max_blindex; /* index of last bit length code of non zero freq */10999
11000/* Determine the bit length frequencies for literal and distance trees */11001scan_tree(s, s.dyn_ltree, s.l_desc.max_code);11002scan_tree(s, s.dyn_dtree, s.d_desc.max_code);11003
11004/* Build the bit length tree: */11005build_tree(s, s.bl_desc);11006/* opt_len now includes the length of the tree representations, except11007* the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
11008*/
11009
11010/* Determine the number of bit length codes to send. The pkzip format11011* requires that at least 4 bit length codes be sent. (appnote.txt says
11012* 3 but the actual value used is 4.)
11013*/
11014for (max_blindex = BL_CODES - 1; max_blindex >= 3; max_blindex--) {11015if (s.bl_tree[bl_order[max_blindex] * 2 + 1]/*.Len*/ !== 0) {11016break;11017}11018}11019/* Update opt_len to include the bit length tree and counts */11020s.opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4;11021//Tracev((stderr, "\ndyn trees: dyn %ld, stat %ld",11022// s->opt_len, s->static_len));11023
11024return max_blindex;11025}
11026
11027
11028/* ===========================================================================
11029* Send the header for a block using dynamic Huffman trees: the counts, the
11030* lengths of the bit length codes, the literal tree and the distance tree.
11031* IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
11032*/
11033function send_all_trees(s, lcodes, dcodes, blcodes)11034// deflate_state *s;
11035// int lcodes, dcodes, blcodes; /* number of codes for each tree */
11036{
11037var rank; /* index in bl_order */11038
11039//Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");11040//Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,11041// "too many codes");11042//Tracev((stderr, "\nbl counts: "));11043send_bits(s, lcodes - 257, 5); /* not +255 as stated in appnote.txt */11044send_bits(s, dcodes - 1, 5);11045send_bits(s, blcodes - 4, 4); /* not -3 as stated in appnote.txt */11046for (rank = 0; rank < blcodes; rank++) {11047//Tracev((stderr, "\nbl code %2d ", bl_order[rank]));11048send_bits(s, s.bl_tree[bl_order[rank] * 2 + 1]/*.Len*/, 3);11049}11050//Tracev((stderr, "\nbl tree: sent %ld", s->bits_sent));11051
11052send_tree(s, s.dyn_ltree, lcodes - 1); /* literal tree */11053//Tracev((stderr, "\nlit tree: sent %ld", s->bits_sent));11054
11055send_tree(s, s.dyn_dtree, dcodes - 1); /* distance tree */11056//Tracev((stderr, "\ndist tree: sent %ld", s->bits_sent));11057}
11058
11059
11060/* ===========================================================================
11061* Check if the data type is TEXT or BINARY, using the following algorithm:
11062* - TEXT if the two conditions below are satisfied:
11063* a) There are no non-portable control characters belonging to the
11064* "black list" (0..6, 14..25, 28..31).
11065* b) There is at least one printable character belonging to the
11066* "white list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255).
11067* - BINARY otherwise.
11068* - The following partially-portable control characters form a
11069* "gray list" that is ignored in this detection algorithm:
11070* (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}).
11071* IN assertion: the fields Freq of dyn_ltree are set.
11072*/
11073function detect_data_type(s) {11074/* black_mask is the bit mask of black-listed bytes11075* set bits 0..6, 14..25, and 28..31
11076* 0xf3ffc07f = binary 11110011111111111100000001111111
11077*/
11078var black_mask = 0xf3ffc07f;11079var n;11080
11081/* Check for non-textual ("black-listed") bytes. */11082for (n = 0; n <= 31; n++, black_mask >>>= 1) {11083if ((black_mask & 1) && (s.dyn_ltree[n * 2]/*.Freq*/ !== 0)) {11084return Z_BINARY;11085}11086}11087
11088/* Check for textual ("white-listed") bytes. */11089if (s.dyn_ltree[9 * 2]/*.Freq*/ !== 0 || s.dyn_ltree[10 * 2]/*.Freq*/ !== 0 ||11090s.dyn_ltree[13 * 2]/*.Freq*/ !== 0) {11091return Z_TEXT;11092}11093for (n = 32; n < LITERALS; n++) {11094if (s.dyn_ltree[n * 2]/*.Freq*/ !== 0) {11095return Z_TEXT;11096}11097}11098
11099/* There are no "black-listed" or "white-listed" bytes:11100* this stream either is empty or has tolerated ("gray-listed") bytes only.
11101*/
11102return Z_BINARY;11103}
11104
11105
11106var static_init_done = false;11107
11108/* ===========================================================================
11109* Initialize the tree data structures for a new zlib stream.
11110*/
11111function _tr_init(s)11112{
11113
11114if (!static_init_done) {11115tr_static_init();11116static_init_done = true;11117}11118
11119s.l_desc = new TreeDesc(s.dyn_ltree, static_l_desc);11120s.d_desc = new TreeDesc(s.dyn_dtree, static_d_desc);11121s.bl_desc = new TreeDesc(s.bl_tree, static_bl_desc);11122
11123s.bi_buf = 0;11124s.bi_valid = 0;11125
11126/* Initialize the first block of the first file: */11127init_block(s);11128}
11129
11130
11131/* ===========================================================================
11132* Send a stored block
11133*/
11134function _tr_stored_block(s, buf, stored_len, last)11135//DeflateState *s;
11136//charf *buf; /* input block */
11137//ulg stored_len; /* length of input block */
11138//int last; /* one if this is the last block for a file */
11139{
11140send_bits(s, (STORED_BLOCK << 1) + (last ? 1 : 0), 3); /* send block type */11141copy_block(s, buf, stored_len, true); /* with header */11142}
11143
11144
11145/* ===========================================================================
11146* Send one empty static block to give enough lookahead for inflate.
11147* This takes 10 bits, of which 7 may remain in the bit buffer.
11148*/
11149function _tr_align(s) {11150send_bits(s, STATIC_TREES << 1, 3);11151send_code(s, END_BLOCK, static_ltree);11152bi_flush(s);11153}
11154
11155
11156/* ===========================================================================
11157* Determine the best encoding for the current block: dynamic trees, static
11158* trees or store, and output the encoded block to the zip file.
11159*/
11160function _tr_flush_block(s, buf, stored_len, last)11161//DeflateState *s;
11162//charf *buf; /* input block, or NULL if too old */
11163//ulg stored_len; /* length of input block */
11164//int last; /* one if this is the last block for a file */
11165{
11166var opt_lenb, static_lenb; /* opt_len and static_len in bytes */11167var max_blindex = 0; /* index of last bit length code of non zero freq */11168
11169/* Build the Huffman trees unless a stored block is forced */11170if (s.level > 0) {11171
11172/* Check if the file is binary or text */11173if (s.strm.data_type === Z_UNKNOWN) {11174s.strm.data_type = detect_data_type(s);11175}11176
11177/* Construct the literal and distance trees */11178build_tree(s, s.l_desc);11179// Tracev((stderr, "\nlit data: dyn %ld, stat %ld", s->opt_len,11180// s->static_len));11181
11182build_tree(s, s.d_desc);11183// Tracev((stderr, "\ndist data: dyn %ld, stat %ld", s->opt_len,11184// s->static_len));11185/* At this point, opt_len and static_len are the total bit lengths of11186* the compressed block data, excluding the tree representations.
11187*/
11188
11189/* Build the bit length tree for the above two trees, and get the index11190* in bl_order of the last bit length code to send.
11191*/
11192max_blindex = build_bl_tree(s);11193
11194/* Determine the best encoding. Compute the block lengths in bytes. */11195opt_lenb = (s.opt_len + 3 + 7) >>> 3;11196static_lenb = (s.static_len + 3 + 7) >>> 3;11197
11198// Tracev((stderr, "\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u ",11199// opt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,11200// s->last_lit));11201
11202if (static_lenb <= opt_lenb) { opt_lenb = static_lenb; }11203
11204} else {11205// Assert(buf != (char*)0, "lost buf");11206opt_lenb = static_lenb = stored_len + 5; /* force a stored block */11207}11208
11209if ((stored_len + 4 <= opt_lenb) && (buf !== -1)) {11210/* 4: two words for the lengths */11211
11212/* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.11213* Otherwise we can't have processed more than WSIZE input bytes since
11214* the last block flush, because compression would have been
11215* successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
11216* transform a block into a stored block.
11217*/
11218_tr_stored_block(s, buf, stored_len, last);11219
11220} else if (s.strategy === Z_FIXED || static_lenb === opt_lenb) {11221
11222send_bits(s, (STATIC_TREES << 1) + (last ? 1 : 0), 3);11223compress_block(s, static_ltree, static_dtree);11224
11225} else {11226send_bits(s, (DYN_TREES << 1) + (last ? 1 : 0), 3);11227send_all_trees(s, s.l_desc.max_code + 1, s.d_desc.max_code + 1, max_blindex + 1);11228compress_block(s, s.dyn_ltree, s.dyn_dtree);11229}11230// Assert (s->compressed_len == s->bits_sent, "bad compressed size");11231/* The above check is made mod 2^32, for files larger than 512 MB11232* and uLong implemented on 32 bits.
11233*/
11234init_block(s);11235
11236if (last) {11237bi_windup(s);11238}11239// Tracev((stderr,"\ncomprlen %lu(%lu) ", s->compressed_len>>3,11240// s->compressed_len-7*last));11241}
11242
11243/* ===========================================================================
11244* Save the match info and tally the frequency counts. Return true if
11245* the current block must be flushed.
11246*/
11247function _tr_tally(s, dist, lc)11248// deflate_state *s;
11249// unsigned dist; /* distance of matched string */
11250// unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */
11251{
11252//var out_length, in_length, dcode;11253
11254s.pending_buf[s.d_buf + s.last_lit * 2] = (dist >>> 8) & 0xff;11255s.pending_buf[s.d_buf + s.last_lit * 2 + 1] = dist & 0xff;11256
11257s.pending_buf[s.l_buf + s.last_lit] = lc & 0xff;11258s.last_lit++;11259
11260if (dist === 0) {11261/* lc is the unmatched char */11262s.dyn_ltree[lc * 2]/*.Freq*/++;11263} else {11264s.matches++;11265/* Here, lc is the match length - MIN_MATCH */11266dist--; /* dist = match distance - 1 */11267//Assert((ush)dist < (ush)MAX_DIST(s) &&11268// (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&11269// (ush)d_code(dist) < (ush)D_CODES, "_tr_tally: bad match");11270
11271s.dyn_ltree[(_length_code[lc] + LITERALS + 1) * 2]/*.Freq*/++;11272s.dyn_dtree[d_code(dist) * 2]/*.Freq*/++;11273}11274
11275// (!) This block is disabled in zlib defailts,
11276// don't enable it for binary compatibility
11277
11278//#ifdef TRUNCATE_BLOCK
11279// /* Try to guess if it is profitable to stop the current block here */
11280// if ((s.last_lit & 0x1fff) === 0 && s.level > 2) {
11281// /* Compute an upper bound for the compressed length */
11282// out_length = s.last_lit*8;
11283// in_length = s.strstart - s.block_start;
11284//
11285// for (dcode = 0; dcode < D_CODES; dcode++) {
11286// out_length += s.dyn_dtree[dcode*2]/*.Freq*/ * (5 + extra_dbits[dcode]);
11287// }
11288// out_length >>>= 3;
11289// //Tracev((stderr,"\nlast_lit %u, in %ld, out ~%ld(%ld%%) ",
11290// // s->last_lit, in_length, out_length,
11291// // 100L - out_length*100L/in_length));
11292// if (s.matches < (s.last_lit>>1)/*int /2*/ && out_length < (in_length>>1)/*int /2*/) {
11293// return true;
11294// }
11295// }
11296//#endif
11297
11298return (s.last_lit === s.lit_bufsize - 1);11299/* We avoid equality with lit_bufsize because of wraparound at 64K11300* on 16 bit machines and because stored blocks are restricted to
11301* 64K-1 bytes.
11302*/
11303}
11304
11305exports._tr_init = _tr_init;11306exports._tr_stored_block = _tr_stored_block;11307exports._tr_flush_block = _tr_flush_block;11308exports._tr_tally = _tr_tally;11309exports._tr_align = _tr_align;11310
11311},{"../utils/common":41}],53:[function(require,module,exports){11312'use strict';11313
11314// (C) 1995-2013 Jean-loup Gailly and Mark Adler
11315// (C) 2014-2017 Vitaly Puzrin and Andrey Tupitsin
11316//
11317// This software is provided 'as-is', without any express or implied
11318// warranty. In no event will the authors be held liable for any damages
11319// arising from the use of this software.
11320//
11321// Permission is granted to anyone to use this software for any purpose,
11322// including commercial applications, and to alter it and redistribute it
11323// freely, subject to the following restrictions:
11324//
11325// 1. The origin of this software must not be misrepresented; you must not
11326// claim that you wrote the original software. If you use this software
11327// in a product, an acknowledgment in the product documentation would be
11328// appreciated but is not required.
11329// 2. Altered source versions must be plainly marked as such, and must not be
11330// misrepresented as being the original software.
11331// 3. This notice may not be removed or altered from any source distribution.
11332
11333function ZStream() {11334/* next input byte */11335this.input = null; // JS specific, because we have no pointers11336this.next_in = 0;11337/* number of bytes available at input */11338this.avail_in = 0;11339/* total number of input bytes read so far */11340this.total_in = 0;11341/* next output byte should be put there */11342this.output = null; // JS specific, because we have no pointers11343this.next_out = 0;11344/* remaining free space at output */11345this.avail_out = 0;11346/* total number of bytes output so far */11347this.total_out = 0;11348/* last error message, NULL if no error */11349this.msg = ''/*Z_NULL*/;11350/* not visible by applications */11351this.state = null;11352/* best guess about the data type: binary or text */11353this.data_type = 2/*Z_UNKNOWN*/;11354/* adler32 value of the uncompressed data */11355this.adler = 0;11356}
11357
11358module.exports = ZStream;11359
11360},{}],54:[function(require,module,exports){11361'use strict';11362module.exports = typeof setImmediate === 'function' ? setImmediate :11363function setImmediate() {11364var args = [].slice.apply(arguments);11365args.splice(1, 0, 0);11366setTimeout.apply(null, args);11367};11368
11369},{}]},{},[10])(10)11370});