Fix slow loading

pull/911/head
subdiox 6 years ago
parent 479b4b7d82
commit c0d136ccd8

@ -1,357 +0,0 @@
/**
* archive.js
*
* Provides base functionality for unarchiving.
*
* Licensed under the MIT License
*
* Copyright(c) 2011 Google Inc.
*/
/* global bitjs */
var bitjs = bitjs || {};
bitjs.archive = bitjs.archive || {};
(function() {
// ===========================================================================
// Stolen from Closure because it's the best way to do Java-like inheritance.
bitjs.base = function(me, optMethodName, varArgs) {
var caller = arguments.callee.caller;
if (caller.superClass_) {
// This is a constructor. Call the superclass constructor.
return caller.superClass_.constructor.apply(
me, Array.prototype.slice.call(arguments, 1));
}
var args = Array.prototype.slice.call(arguments, 2);
var foundCaller = false;
for (var ctor = me.constructor;
ctor; ctor = ctor.superClass_ && ctor.superClass_.constructor) {
if (ctor.prototype[optMethodName] === caller) {
foundCaller = true;
} else if (foundCaller) {
return ctor.prototype[optMethodName].apply(me, args);
}
}
// If we did not find the caller in the prototype chain,
// then one of two things happened:
// 1) The caller is an instance method.
// 2) This method was not called by the right caller.
if (me[optMethodName] === caller) {
return me.constructor.prototype[optMethodName].apply(me, args);
} else {
throw Error(
"goog.base called from a method of one name " +
"to a method of a different name");
}
};
bitjs.inherits = function(childCtor, parentCtor) {
/** @constructor */
function TempCtor() {}
TempCtor.prototype = parentCtor.prototype;
childCtor.superClass_ = parentCtor.prototype;
childCtor.prototype = new TempCtor();
childCtor.prototype.constructor = childCtor;
};
// ===========================================================================
/**
* An unarchive event.
*
* @param {string} type The event type.
* @constructor
*/
bitjs.archive.UnarchiveEvent = function(type) {
/**
* The event type.
*
* @type {string}
*/
this.type = type;
};
/**
* The UnarchiveEvent types.
*/
bitjs.archive.UnarchiveEvent.Type = {
START: "start",
PROGRESS: "progress",
EXTRACT: "extract",
FINISH: "finish",
INFO: "info",
ERROR: "error"
};
/**
* Useful for passing info up to the client (for debugging).
*
* @param {string} msg The info message.
*/
bitjs.archive.UnarchiveInfoEvent = function(msg) {
bitjs.base(this, bitjs.archive.UnarchiveEvent.Type.INFO);
/**
* The information message.
*
* @type {string}
*/
this.msg = msg;
};
bitjs.inherits(bitjs.archive.UnarchiveInfoEvent, bitjs.archive.UnarchiveEvent);
/**
* An unrecoverable error has occured.
*
* @param {string} msg The error message.
*/
bitjs.archive.UnarchiveErrorEvent = function(msg) {
bitjs.base(this, bitjs.archive.UnarchiveEvent.Type.ERROR);
/**
* The information message.
*
* @type {string}
*/
this.msg = msg;
};
bitjs.inherits(bitjs.archive.UnarchiveErrorEvent, bitjs.archive.UnarchiveEvent);
/**
* Start event.
*
* @param {string} msg The info message.
*/
bitjs.archive.UnarchiveStartEvent = function() {
bitjs.base(this, bitjs.archive.UnarchiveEvent.Type.START);
};
bitjs.inherits(bitjs.archive.UnarchiveStartEvent, bitjs.archive.UnarchiveEvent);
/**
* Finish event.
*
* @param {string} msg The info message.
*/
bitjs.archive.UnarchiveFinishEvent = function() {
bitjs.base(this, bitjs.archive.UnarchiveEvent.Type.FINISH);
};
bitjs.inherits(bitjs.archive.UnarchiveFinishEvent, bitjs.archive.UnarchiveEvent);
/**
* Progress event.
*/
bitjs.archive.UnarchiveProgressEvent = function(
currentFilename,
currentFileNumber,
currentBytesUnarchivedInFile,
currentBytesUnarchived,
totalUncompressedBytesInArchive,
totalFilesInArchive) {
bitjs.base(this, bitjs.archive.UnarchiveEvent.Type.PROGRESS);
this.currentFilename = currentFilename;
this.currentFileNumber = currentFileNumber;
this.currentBytesUnarchivedInFile = currentBytesUnarchivedInFile;
this.totalFilesInArchive = totalFilesInArchive;
this.currentBytesUnarchived = currentBytesUnarchived;
this.totalUncompressedBytesInArchive = totalUncompressedBytesInArchive;
};
bitjs.inherits(bitjs.archive.UnarchiveProgressEvent, bitjs.archive.UnarchiveEvent);
/**
* All extracted files returned by an Unarchiver will implement
* the following interface:
*
* interface UnarchivedFile {
* string filename
* TypedArray fileData
* }
*
*/
/**
* Extract event.
*/
bitjs.archive.UnarchiveExtractEvent = function(unarchivedFile) {
bitjs.base(this, bitjs.archive.UnarchiveEvent.Type.EXTRACT);
/**
* @type {UnarchivedFile}
*/
this.unarchivedFile = unarchivedFile;
};
bitjs.inherits(bitjs.archive.UnarchiveExtractEvent, bitjs.archive.UnarchiveEvent);
/**
* Base class for all Unarchivers.
*
* @param {ArrayBuffer} arrayBuffer The Array Buffer.
* @param {string} optPathToBitJS Optional string for where the BitJS files are located.
* @constructor
*/
bitjs.archive.Unarchiver = function(arrayBuffer, optPathToBitJS) {
/**
* The ArrayBuffer object.
* @type {ArrayBuffer}
* @protected
*/
this.ab = arrayBuffer;
/**
* The path to the BitJS files.
* @type {string}
* @private
*/
this.pathToBitJS_ = optPathToBitJS || "";
/**
* A map from event type to an array of listeners.
* @type {Map.<string, Array>}
*/
this.listeners_ = {};
for (var type in bitjs.archive.UnarchiveEvent.Type) {
this.listeners_[bitjs.archive.UnarchiveEvent.Type[type]] = [];
}
};
/**
* Private web worker initialized during start().
* @type {Worker}
* @private
*/
bitjs.archive.Unarchiver.prototype.worker_ = null;
/**
* This method must be overridden by the subclass to return the script filename.
* @return {string} The script filename.
* @protected.
*/
bitjs.archive.Unarchiver.prototype.getScriptFileName = function() {
throw "Subclasses of AbstractUnarchiver must overload getScriptFileName()";
};
/**
* Adds an event listener for UnarchiveEvents.
*
* @param {string} Event type.
* @param {function} An event handler function.
*/
bitjs.archive.Unarchiver.prototype.addEventListener = function(type, listener) {
if (type in this.listeners_) {
if (this.listeners_[type].indexOf(listener) === -1) {
this.listeners_[type].push(listener);
}
}
};
/**
* Removes an event listener.
*
* @param {string} Event type.
* @param {EventListener|function} An event listener or handler function.
*/
bitjs.archive.Unarchiver.prototype.removeEventListener = function(type, listener) {
if (type in this.listeners_) {
var index = this.listeners_[type].indexOf(listener);
if (index !== -1) {
this.listeners_[type].splice(index, 1);
}
}
};
/**
* Receive an event and pass it to the listener functions.
*
* @param {bitjs.archive.UnarchiveEvent} e
* @private
*/
bitjs.archive.Unarchiver.prototype.handleWorkerEvent_ = function(e) {
if ((e instanceof bitjs.archive.UnarchiveEvent || e.type) &&
this.listeners_[e.type] instanceof Array) {
this.listeners_[e.type].forEach(function (listener) {
listener(e);
});
if (e.type === bitjs.archive.UnarchiveEvent.Type.FINISH) {
this.worker_.terminate();
}
}
};
/**
* Starts the unarchive in a separate Web Worker thread and returns immediately.
*/
bitjs.archive.Unarchiver.prototype.start = function() {
var me = this;
var scriptFileName = this.pathToBitJS_ + this.getScriptFileName();
if (scriptFileName) {
this.worker_ = new Worker(scriptFileName);
this.worker_.onerror = function(e) {
throw e;
};
this.worker_.onmessage = function(e) {
if (typeof e.data !== "string") {
// Assume that it is an UnarchiveEvent. Some browsers preserve the 'type'
// so that instanceof UnarchiveEvent returns true, but others do not.
me.handleWorkerEvent_(e.data);
}
};
this.worker_.postMessage({file: this.ab});
}
};
/**
* Terminates the Web Worker for this Unarchiver and returns immediately.
*/
bitjs.archive.Unarchiver.prototype.stop = function() {
if (this.worker_) {
this.worker_.terminate();
}
};
/**
* Unzipper
* @extends {bitjs.archive.Unarchiver}
* @constructor
*/
bitjs.archive.Unzipper = function(arrayBuffer, optPathToBitJS) {
bitjs.base(this, arrayBuffer, optPathToBitJS);
};
bitjs.inherits(bitjs.archive.Unzipper, bitjs.archive.Unarchiver);
bitjs.archive.Unzipper.prototype.getScriptFileName = function() {
return "unzip.js";
};
/**
* Unrarrer
* @extends {bitjs.archive.Unarchiver}
* @constructor
*/
bitjs.archive.Unrarrer = function(arrayBuffer, optPathToBitJS) {
bitjs.base(this, arrayBuffer, optPathToBitJS);
};
bitjs.inherits(bitjs.archive.Unrarrer, bitjs.archive.Unarchiver);
bitjs.archive.Unrarrer.prototype.getScriptFileName = function() {
return "unrar.js";
};
/**
* Untarrer
* @extends {bitjs.archive.Unarchiver}
* @constructor
*/
bitjs.archive.Untarrer = function(arrayBuffer, optPathToBitJS) {
bitjs.base(this, arrayBuffer, optPathToBitJS);
};
bitjs.inherits(bitjs.archive.Untarrer, bitjs.archive.Unarchiver);
bitjs.archive.Untarrer.prototype.getScriptFileName = function() {
return "untar.js";
};
})();

@ -0,0 +1,362 @@
/**
* archive.js
*
* Provides base functionality for unarchiving.
*
* Licensed under the MIT License
*
* Copyright(c) 2011 Google Inc.
*/
var bitjs = bitjs || {};
bitjs.archive = bitjs.archive || {};
/**
* An unarchive event.
*/
bitjs.archive.UnarchiveEvent = class {
/**
* @param {string} type The event type.
*/
constructor(type) {
/**
* The event type.
* @type {string}
*/
this.type = type;
}
}
/**
* The UnarchiveEvent types.
*/
bitjs.archive.UnarchiveEvent.Type = {
START: 'start',
PROGRESS: 'progress',
EXTRACT: 'extract',
FINISH: 'finish',
INFO: 'info',
ERROR: 'error'
};
/**
* Useful for passing info up to the client (for debugging).
*/
bitjs.archive.UnarchiveInfoEvent = class extends bitjs.archive.UnarchiveEvent {
/**
* @param {string} msg The info message.
*/
constructor(msg) {
super(bitjs.archive.UnarchiveEvent.Type.INFO);
/**
* The information message.
* @type {string}
*/
this.msg = msg;
}
}
/**
* An unrecoverable error has occured.
*/
bitjs.archive.UnarchiveErrorEvent = class extends bitjs.archive.UnarchiveEvent {
/**
* @param {string} msg The error message.
*/
constructor(msg) {
super(bitjs.archive.UnarchiveEvent.Type.ERROR);
/**
* The information message.
* @type {string}
*/
this.msg = msg;
}
}
/**
* Start event.
*/
bitjs.archive.UnarchiveStartEvent = class extends bitjs.archive.UnarchiveEvent {
constructor() {
super(bitjs.archive.UnarchiveEvent.Type.START);
}
}
/**
* Finish event.
*/
bitjs.archive.UnarchiveFinishEvent = class extends bitjs.archive.UnarchiveEvent {
constructor() {
super(bitjs.archive.UnarchiveEvent.Type.FINISH);
}
}
/**
* Progress event.
*/
bitjs.archive.UnarchiveProgressEvent = class extends bitjs.archive.UnarchiveEvent {
/**
* @param {string} currentFilename
* @param {number} currentFileNumber
* @param {number} currentBytesUnarchivedInFile
* @param {number} currentBytesUnarchived
* @param {number} totalUncompressedBytesInArchive
* @param {number} totalFilesInArchive
* @param {number} totalCompressedBytesRead
*/
constructor(currentFilename, currentFileNumber, currentBytesUnarchivedInFile,
currentBytesUnarchived, totalUncompressedBytesInArchive, totalFilesInArchive,
totalCompressedBytesRead) {
super(bitjs.archive.UnarchiveEvent.Type.PROGRESS);
this.currentFilename = currentFilename;
this.currentFileNumber = currentFileNumber;
this.currentBytesUnarchivedInFile = currentBytesUnarchivedInFile;
this.totalFilesInArchive = totalFilesInArchive;
this.currentBytesUnarchived = currentBytesUnarchived;
this.totalUncompressedBytesInArchive = totalUncompressedBytesInArchive;
this.totalCompressedBytesRead = totalCompressedBytesRead;
}
}
/**
* Extract event.
*/
bitjs.archive.UnarchiveExtractEvent = class extends bitjs.archive.UnarchiveEvent {
/**
* @param {UnarchivedFile} unarchivedFile
*/
constructor(unarchivedFile) {
super(bitjs.archive.UnarchiveEvent.Type.EXTRACT);
/**
* @type {UnarchivedFile}
*/
this.unarchivedFile = unarchivedFile;
}
}
/**
* All extracted files returned by an Unarchiver will implement
* the following interface:
*
* interface UnarchivedFile {
* string filename
* TypedArray fileData
* }
*
*/
/**
* Base class for all Unarchivers.
*/
bitjs.archive.Unarchiver = class {
/**
* @param {ArrayBuffer} arrayBuffer The Array Buffer.
* @param {string} opt_pathToBitJS Optional string for where the BitJS files are located.
*/
constructor(arrayBuffer, opt_pathToBitJS) {
/**
* The ArrayBuffer object.
* @type {ArrayBuffer}
* @protected
*/
this.ab = arrayBuffer;
/**
* The path to the BitJS files.
* @type {string}
* @private
*/
this.pathToBitJS_ = opt_pathToBitJS || '/';
/**
* A map from event type to an array of listeners.
* @type {Map.<string, Array>}
*/
this.listeners_ = {};
for (let type in bitjs.archive.UnarchiveEvent.Type) {
this.listeners_[bitjs.archive.UnarchiveEvent.Type[type]] = [];
}
/**
* Private web worker initialized during start().
* @type {Worker}
* @private
*/
this.worker_ = null;
}
/**
* This method must be overridden by the subclass to return the script filename.
* @return {string} The script filename.
* @protected.
*/
getScriptFileName() {
throw 'Subclasses of AbstractUnarchiver must overload getScriptFileName()';
}
/**
* Adds an event listener for UnarchiveEvents.
*
* @param {string} Event type.
* @param {function} An event handler function.
*/
addEventListener(type, listener) {
if (type in this.listeners_) {
if (this.listeners_[type].indexOf(listener) == -1) {
this.listeners_[type].push(listener);
}
}
}
/**
* Removes an event listener.
*
* @param {string} Event type.
* @param {EventListener|function} An event listener or handler function.
*/
removeEventListener(type, listener) {
if (type in this.listeners_) {
const index = this.listeners_[type].indexOf(listener);
if (index != -1) {
this.listeners_[type].splice(index, 1);
}
}
}
/**
* Receive an event and pass it to the listener functions.
*
* @param {bitjs.archive.UnarchiveEvent} e
* @private
*/
handleWorkerEvent_(e) {
if ((e instanceof bitjs.archive.UnarchiveEvent || e.type) &&
this.listeners_[e.type] instanceof Array) {
this.listeners_[e.type].forEach(function (listener) { listener(e) });
if (e.type == bitjs.archive.UnarchiveEvent.Type.FINISH) {
this.worker_.terminate();
}
} else {
console.log(e);
}
}
/**
* Starts the unarchive in a separate Web Worker thread and returns immediately.
*/
start() {
const me = this;
const scriptFileName = this.pathToBitJS_ + this.getScriptFileName();
if (scriptFileName) {
this.worker_ = new Worker(scriptFileName);
this.worker_.onerror = function(e) {
console.log('Worker error: message = ' + e.message);
throw e;
};
this.worker_.onmessage = function(e) {
if (typeof e.data == 'string') {
// Just log any strings the workers pump our way.
console.log(e.data);
} else {
// Assume that it is an UnarchiveEvent. Some browsers preserve the 'type'
// so that instanceof UnarchiveEvent returns true, but others do not.
me.handleWorkerEvent_(e.data);
}
};
const ab = this.ab;
this.worker_.postMessage({
file: ab,
logToConsole: false,
});
this.ab = null;
}
}
/**
* Adds more bytes to the unarchiver's Worker thread.
*/
update(ab) {
if (this.worker_) {
this.worker_.postMessage({bytes: ab});
}
}
/**
* Terminates the Web Worker for this Unarchiver and returns immediately.
*/
stop() {
if (this.worker_) {
this.worker_.terminate();
}
}
}
/**
* Unzipper
*/
bitjs.archive.Unzipper = class extends bitjs.archive.Unarchiver {
constructor(arrayBuffer, opt_pathToBitJS) {
super(arrayBuffer, opt_pathToBitJS);
}
getScriptFileName() { return 'archive/unzip.js'; }
}
/**
* Unrarrer
*/
bitjs.archive.Unrarrer = class extends bitjs.archive.Unarchiver {
constructor(arrayBuffer, opt_pathToBitJS) {
super(arrayBuffer, opt_pathToBitJS);
}
getScriptFileName() { return 'archive/unrar.js'; }
}
/**
* Untarrer
* @extends {bitjs.archive.Unarchiver}
* @constructor
*/
bitjs.archive.Untarrer = class extends bitjs.archive.Unarchiver {
constructor(arrayBuffer, opt_pathToBitJS) {
super(arrayBuffer, opt_pathToBitJS);
}
getScriptFileName() { return 'archive/untar.js'; };
}
/**
* Factory method that creates an unarchiver based on the byte signature found
* in the arrayBuffer.
* @param {ArrayBuffer} ab
* @param {string=} opt_pathToBitJS Path to the unarchiver script files.
* @return {bitjs.archive.Unarchiver}
*/
bitjs.archive.GetUnarchiver = function(ab, opt_pathToBitJS) {
if (ab.byteLength < 10) {
return null;
}
let unarchiver = null;
const pathToBitJS = opt_pathToBitJS || '';
const h = new Uint8Array(ab, 0, 10);
if (h[0] == 0x52 && h[1] == 0x61 && h[2] == 0x72 && h[3] == 0x21) { // Rar!
unarchiver = new bitjs.archive.Unrarrer(ab, pathToBitJS);
} else if (h[0] == 0x50 && h[1] == 0x4B) { // PK (Zip)
unarchiver = new bitjs.archive.Unzipper(ab, pathToBitJS);
} else { // Try with tar
unarchiver = new bitjs.archive.Untarrer(ab, pathToBitJS);
}
return unarchiver;
};

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -1,17 +1,17 @@
/**
* untar.js
*
* Licensed under the MIT License
*
* Copyright(c) 2011 Google Inc.
*
* Reference Documentation:
*
* TAR format: http://www.gnu.org/software/automake/manual/tar/Standard.html
*/
* untar.js
*
* Licensed under the MIT License
*
* Copyright(c) 2011 Google Inc.
*
* Reference Documentation:
*
* TAR format: http://www.gnu.org/software/automake/manual/tar/Standard.html
*/
// This file expects to be invoked as a Worker (see onmessage below).
importScripts('bytestream.js');
importScripts('../io/bytestream.js');
importScripts('archive.js');
const UnarchiveState = {
@ -42,15 +42,6 @@ const info = function(str) {
const err = function(str) {
postMessage(new bitjs.archive.UnarchiveErrorEvent(str));
};
// Removes all characters from the first zero-byte in the string onwards.
var readCleanString = function(bstr, numBytes) {
var str = bstr.readString(numBytes);
var zIndex = str.indexOf(String.fromCharCode(0));
return zIndex != -1 ? str.substr(0, zIndex) : str;
};
const postProgress = function() {
postMessage(new bitjs.archive.UnarchiveProgressEvent(
currentFilename,
@ -63,6 +54,12 @@ const postProgress = function() {
));
};
// Removes all characters from the first zero-byte in the string onwards.
const readCleanString = function(bstr, numBytes) {
const str = bstr.readString(numBytes);
const zIndex = str.indexOf(String.fromCharCode(0));
return zIndex != -1 ? str.substr(0, zIndex) : str;
};
class TarLocalFile {
// takes a ByteStream and parses out the local file information
@ -94,7 +91,7 @@ class TarLocalFile {
if (this.prefix.length) {
this.name = this.prefix + this.name;
}
bstream.readBytes(12); // 512 - 500in
bstream.readBytes(12); // 512 - 500
} else {
bstream.readBytes(255); // 512 - 257
}

@ -0,0 +1,665 @@
/**
* unzip.js
*
* Licensed under the MIT License
*
* Copyright(c) 2011 Google Inc.
* Copyright(c) 2011 antimatter15
*
* Reference Documentation:
*
* ZIP format: http://www.pkware.com/documents/casestudies/APPNOTE.TXT
* DEFLATE format: http://tools.ietf.org/html/rfc1951
*/
// This file expects to be invoked as a Worker (see onmessage below).
importScripts('../io/bitstream.js');
importScripts('../io/bytebuffer.js');
importScripts('../io/bytestream.js');
importScripts('archive.js');
const UnarchiveState = {
NOT_STARTED: 0,
UNARCHIVING: 1,
WAITING: 2,
FINISHED: 3,
};
// State - consider putting these into a class.
let unarchiveState = UnarchiveState.NOT_STARTED;
let bytestream = null;
let allLocalFiles = null;
let logToConsole = false;
// Progress variables.
let currentFilename = "";
let currentFileNumber = 0;
let currentBytesUnarchivedInFile = 0;
let currentBytesUnarchived = 0;
let totalUncompressedBytesInArchive = 0;
let totalFilesInArchive = 0;
// Helper functions.
const info = function(str) {
postMessage(new bitjs.archive.UnarchiveInfoEvent(str));
};
const err = function(str) {
postMessage(new bitjs.archive.UnarchiveErrorEvent(str));
};
const postProgress = function() {
postMessage(new bitjs.archive.UnarchiveProgressEvent(
currentFilename,
currentFileNumber,
currentBytesUnarchivedInFile,
currentBytesUnarchived,
totalUncompressedBytesInArchive,
totalFilesInArchive,
bytestream.getNumBytesRead(),
));
};
const zLocalFileHeaderSignature = 0x04034b50;
const zArchiveExtraDataSignature = 0x08064b50;
const zCentralFileHeaderSignature = 0x02014b50;
const zDigitalSignatureSignature = 0x05054b50;
const zEndOfCentralDirSignature = 0x06064b50;
const zEndOfCentralDirLocatorSignature = 0x07064b50;
// mask for getting the Nth bit (zero-based)
const BIT = [ 0x01, 0x02, 0x04, 0x08,
0x10, 0x20, 0x40, 0x80,
0x100, 0x200, 0x400, 0x800,
0x1000, 0x2000, 0x4000, 0x8000];
class ZipLocalFile {
// takes a ByteStream and parses out the local file information
constructor(bstream) {
if (typeof bstream != typeof {} || !bstream.readNumber || typeof bstream.readNumber != typeof function(){}) {
return null;
}
bstream.readNumber(4); // swallow signature
this.version = bstream.readNumber(2);
this.generalPurpose = bstream.readNumber(2);
this.compressionMethod = bstream.readNumber(2);
this.lastModFileTime = bstream.readNumber(2);
this.lastModFileDate = bstream.readNumber(2);
this.crc32 = bstream.readNumber(4);
this.compressedSize = bstream.readNumber(4);
this.uncompressedSize = bstream.readNumber(4);
this.fileNameLength = bstream.readNumber(2);
this.extraFieldLength = bstream.readNumber(2);
this.filename = null;
if (this.fileNameLength > 0) {
this.filename = bstream.readString(this.fileNameLength);
}
this.extraField = null;
if (this.extraFieldLength > 0) {
this.extraField = bstream.readString(this.extraFieldLength);
//info(" extra field=" + this.extraField);
}
// read in the compressed data
this.fileData = null;
if (this.compressedSize > 0) {
this.fileData = new Uint8Array(bstream.readBytes(this.compressedSize));
}
// TODO: deal with data descriptor if present (we currently assume no data descriptor!)
// "This descriptor exists only if bit 3 of the general purpose bit flag is set"
// But how do you figure out how big the file data is if you don't know the compressedSize
// from the header?!?
if ((this.generalPurpose & BIT[3]) != 0) {
this.crc32 = bstream.readNumber(4);
this.compressedSize = bstream.readNumber(4);
this.uncompressedSize = bstream.readNumber(4);
}
// Now that we have all the bytes for this file, we can print out some information.
if (logToConsole) {
info("Zip Local File Header:");
info(" version=" + this.version);
info(" general purpose=" + this.generalPurpose);
info(" compression method=" + this.compressionMethod);
info(" last mod file time=" + this.lastModFileTime);
info(" last mod file date=" + this.lastModFileDate);
info(" crc32=" + this.crc32);
info(" compressed size=" + this.compressedSize);
info(" uncompressed size=" + this.uncompressedSize);
info(" file name length=" + this.fileNameLength);
info(" extra field length=" + this.extraFieldLength);
info(" filename = '" + this.filename + "'");
}
}
// determine what kind of compressed data we have and decompress
unzip() {
// Zip Version 1.0, no compression (store only)
if (this.compressionMethod == 0 ) {
if (logToConsole) {
info("ZIP v"+this.version+", store only: " + this.filename + " (" + this.compressedSize + " bytes)");
}
currentBytesUnarchivedInFile = this.compressedSize;
currentBytesUnarchived += this.compressedSize;
}
// version == 20, compression method == 8 (DEFLATE)
else if (this.compressionMethod == 8) {
if (logToConsole) {
info("ZIP v2.0, DEFLATE: " + this.filename + " (" + this.compressedSize + " bytes)");
}
this.fileData = inflate(this.fileData, this.uncompressedSize);
}
else {
err("UNSUPPORTED VERSION/FORMAT: ZIP v" + this.version + ", compression method=" + this.compressionMethod + ": " + this.filename + " (" + this.compressedSize + " bytes)");
this.fileData = null;
}
}
}
// returns a table of Huffman codes
// each entry's index is its code and its value is a JavaScript object
// containing {length: 6, symbol: X}
function getHuffmanCodes(bitLengths) {
// ensure bitLengths is an array containing at least one element
if (typeof bitLengths != typeof [] || bitLengths.length < 1) {
err("Error! getHuffmanCodes() called with an invalid array");
return null;
}
// Reference: http://tools.ietf.org/html/rfc1951#page-8
const numLengths = bitLengths.length;
const bl_count = [];
let MAX_BITS = 1;
// Step 1: count up how many codes of each length we have
for (let i = 0; i < numLengths; ++i) {
const length = bitLengths[i];
// test to ensure each bit length is a positive, non-zero number
if (typeof length != typeof 1 || length < 0) {
err("bitLengths contained an invalid number in getHuffmanCodes(): " + length + " of type " + (typeof length));
return null;
}
// increment the appropriate bitlength count
if (bl_count[length] == undefined) bl_count[length] = 0;
// a length of zero means this symbol is not participating in the huffman coding
if (length > 0) bl_count[length]++;
if (length > MAX_BITS) MAX_BITS = length;
}
// Step 2: Find the numerical value of the smallest code for each code length
const next_code = [];
let code = 0;
for (let bits = 1; bits <= MAX_BITS; ++bits) {
const length = bits-1;
// ensure undefined lengths are zero
if (bl_count[length] == undefined) bl_count[length] = 0;
code = (code + bl_count[bits-1]) << 1;
next_code[bits] = code;
}
// Step 3: Assign numerical values to all codes
const table = {};
let tableLength = 0;
for (let n = 0; n < numLengths; ++n) {
const len = bitLengths[n];
if (len != 0) {
table[next_code[len]] = { length: len, symbol: n }; //, bitstring: binaryValueToString(next_code[len],len) };
tableLength++;
next_code[len]++;
}
}
table.maxLength = tableLength;
return table;
}
/*
The Huffman codes for the two alphabets are fixed, and are not
represented explicitly in the data. The Huffman code lengths
for the literal/length alphabet are:
Lit Value Bits Codes
--------- ---- -----
0 - 143 8 00110000 through
10111111
144 - 255 9 110010000 through
111111111
256 - 279 7 0000000 through
0010111
280 - 287 8 11000000 through
11000111
*/
// fixed Huffman codes go from 7-9 bits, so we need an array whose index can hold up to 9 bits
let fixedHCtoLiteral = null;
let fixedHCtoDistance = null;
function getFixedLiteralTable() {
// create once
if (!fixedHCtoLiteral) {
const bitlengths = new Array(288);
for (let i = 0; i <= 143; ++i) bitlengths[i] = 8;
for (let i = 144; i <= 255; ++i) bitlengths[i] = 9;
for (let i = 256; i <= 279; ++i) bitlengths[i] = 7;
for (let i = 280; i <= 287; ++i) bitlengths[i] = 8;
// get huffman code table
fixedHCtoLiteral = getHuffmanCodes(bitlengths);
}
return fixedHCtoLiteral;
}
function getFixedDistanceTable() {
// create once
if (!fixedHCtoDistance) {
const bitlengths = new Array(32);
for (let i = 0; i < 32; ++i) { bitlengths[i] = 5; }
// get huffman code table
fixedHCtoDistance = getHuffmanCodes(bitlengths);
}
return fixedHCtoDistance;
}
// extract one bit at a time until we find a matching Huffman Code
// then return that symbol
function decodeSymbol(bstream, hcTable) {
let code = 0;
let len = 0;
let match = false;
// loop until we match
for (;;) {
// read in next bit
const bit = bstream.readBits(1);
code = (code<<1) | bit;
++len;
// check against Huffman Code table and break if found
if (hcTable.hasOwnProperty(code) && hcTable[code].length == len) {
break;
}
if (len > hcTable.maxLength) {
err("Bit stream out of sync, didn't find a Huffman Code, length was " + len +
" and table only max code length of " + hcTable.maxLength);
break;
}
}
return hcTable[code].symbol;
}
const CodeLengthCodeOrder = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15];
/*
Extra Extra Extra
Code Bits Length(s) Code Bits Lengths Code Bits Length(s)
---- ---- ------ ---- ---- ------- ---- ---- -------
257 0 3 267 1 15,16 277 4 67-82
258 0 4 268 1 17,18 278 4 83-98
259 0 5 269 2 19-22 279 4 99-114
260 0 6 270 2 23-26 280 4 115-130
261 0 7 271 2 27-30 281 5 131-162
262 0 8 272 2 31-34 282 5 163-194
263 0 9 273 3 35-42 283 5 195-226
264 0 10 274 3 43-50 284 5 227-257
265 1 11,12 275 3 51-58 285 0 258
266 1 13,14 276 3 59-66
*/
const LengthLookupTable = [
[0,3], [0,4], [0,5], [0,6],
[0,7], [0,8], [0,9], [0,10],
[1,11], [1,13], [1,15], [1,17],
[2,19], [2,23], [2,27], [2,31],
[3,35], [3,43], [3,51], [3,59],
[4,67], [4,83], [4,99], [4,115],
[5,131], [5,163], [5,195], [5,227],
[0,258]
];
/*
Extra Extra Extra
Code Bits Dist Code Bits Dist Code Bits Distance
---- ---- ---- ---- ---- ------ ---- ---- --------
0 0 1 10 4 33-48 20 9 1025-1536
1 0 2 11 4 49-64 21 9 1537-2048
2 0 3 12 5 65-96 22 10 2049-3072
3 0 4 13 5 97-128 23 10 3073-4096
4 1 5,6 14 6 129-192 24 11 4097-6144
5 1 7,8 15 6 193-256 25 11 6145-8192
6 2 9-12 16 7 257-384 26 12 8193-12288
7 2 13-16 17 7 385-512 27 12 12289-16384
8 3 17-24 18 8 513-768 28 13 16385-24576
9 3 25-32 19 8 769-1024 29 13 24577-32768
*/
const DistLookupTable = [
[0,1], [0,2], [0,3], [0,4],
[1,5], [1,7],
[2,9], [2,13],
[3,17], [3,25],
[4,33], [4,49],
[5,65], [5,97],
[6,129], [6,193],
[7,257], [7,385],
[8,513], [8,769],
[9,1025], [9,1537],
[10,2049], [10,3073],
[11,4097], [11,6145],
[12,8193], [12,12289],
[13,16385], [13,24577]
];
function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
/*
loop (until end of block code recognized)
decode literal/length value from input stream
if value < 256
copy value (literal byte) to output stream
otherwise
if value = end of block (256)
break from loop
otherwise (value = 257..285)
decode distance from input stream
move backwards distance bytes in the output
stream, and copy length bytes from this
position to the output stream.
*/
let numSymbols = 0;
let blockSize = 0;
for (;;) {
const symbol = decodeSymbol(bstream, hcLiteralTable);
++numSymbols;
if (symbol < 256) {
// copy literal byte to output
buffer.insertByte(symbol);
blockSize++;
} else {
// end of block reached
if (symbol == 256) {
break;
} else {
const lengthLookup = LengthLookupTable[symbol - 257];
let length = lengthLookup[1] + bstream.readBits(lengthLookup[0]);
const distLookup = DistLookupTable[decodeSymbol(bstream, hcDistanceTable)];
let distance = distLookup[1] + bstream.readBits(distLookup[0]);
// now apply length and distance appropriately and copy to output
// TODO: check that backward distance < data.length?
// http://tools.ietf.org/html/rfc1951#page-11
// "Note also that the referenced string may overlap the current
// position; for example, if the last 2 bytes decoded have values
// X and Y, a string reference with <length = 5, distance = 2>
// adds X,Y,X,Y,X to the output stream."
//
// loop for each character
let ch = buffer.ptr - distance;
blockSize += length;
if(length > distance) {
const data = buffer.data;
while (length--) {
buffer.insertByte(data[ch++]);
}
} else {
buffer.insertBytes(buffer.data.subarray(ch, ch + length))
}
} // length-distance pair
} // length-distance pair or end-of-block
} // loop until we reach end of block
return blockSize;
}
// {Uint8Array} compressedData A Uint8Array of the compressed file data.
// compression method 8
// deflate: http://tools.ietf.org/html/rfc1951
function inflate(compressedData, numDecompressedBytes) {
// Bit stream representing the compressed data.
const bstream = new bitjs.io.BitStream(compressedData.buffer,
false /* rtl */,
compressedData.byteOffset,
compressedData.byteLength);
const buffer = new bitjs.io.ByteBuffer(numDecompressedBytes);
let blockSize = 0;
// block format: http://tools.ietf.org/html/rfc1951#page-9
let bFinal = 0;
do {
bFinal = bstream.readBits(1);
let bType = bstream.readBits(2);
blockSize = 0;
// no compression
if (bType == 0) {
// skip remaining bits in this byte
while (bstream.bitPtr != 0) bstream.readBits(1);
const len = bstream.readBits(16);
const nlen = bstream.readBits(16);
// TODO: check if nlen is the ones-complement of len?
if (len > 0) buffer.insertBytes(bstream.readBytes(len));
blockSize = len;
}
// fixed Huffman codes
else if (bType == 1) {
blockSize = inflateBlockData(bstream, getFixedLiteralTable(), getFixedDistanceTable(), buffer);
}
// dynamic Huffman codes
else if (bType == 2) {
const numLiteralLengthCodes = bstream.readBits(5) + 257;
const numDistanceCodes = bstream.readBits(5) + 1;
const numCodeLengthCodes = bstream.readBits(4) + 4;
// populate the array of code length codes (first de-compaction)
const codeLengthsCodeLengths = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0];
for (let i = 0; i < numCodeLengthCodes; ++i) {
codeLengthsCodeLengths[ CodeLengthCodeOrder[i] ] = bstream.readBits(3);
}
// get the Huffman Codes for the code lengths
const codeLengthsCodes = getHuffmanCodes(codeLengthsCodeLengths);
// now follow this mapping
/*
0 - 15: Represent code lengths of 0 - 15
16: Copy the previous code length 3 - 6 times.
The next 2 bits indicate repeat length
(0 = 3, ... , 3 = 6)
Example: Codes 8, 16 (+2 bits 11),
16 (+2 bits 10) will expand to
12 code lengths of 8 (1 + 6 + 5)
17: Repeat a code length of 0 for 3 - 10 times.
(3 bits of length)
18: Repeat a code length of 0 for 11 - 138 times
(7 bits of length)
*/
// to generate the true code lengths of the Huffman Codes for the literal
// and distance tables together
const literalCodeLengths = [];
let prevCodeLength = 0;
while (literalCodeLengths.length < numLiteralLengthCodes + numDistanceCodes) {
const symbol = decodeSymbol(bstream, codeLengthsCodes);
if (symbol <= 15) {
literalCodeLengths.push(symbol);
prevCodeLength = symbol;
} else if (symbol == 16) {
let repeat = bstream.readBits(2) + 3;
while (repeat--) {
literalCodeLengths.push(prevCodeLength);
}
} else if (symbol == 17) {
let repeat = bstream.readBits(3) + 3;
while (repeat--) {
literalCodeLengths.push(0);
}
} else if (symbol == 18) {
let repeat = bstream.readBits(7) + 11;
while (repeat--) {
literalCodeLengths.push(0);
}
}
}
// now split the distance code lengths out of the literal code array
const distanceCodeLengths = literalCodeLengths.splice(numLiteralLengthCodes, numDistanceCodes);
// now generate the true Huffman Code tables using these code lengths
const hcLiteralTable = getHuffmanCodes(literalCodeLengths);
const hcDistanceTable = getHuffmanCodes(distanceCodeLengths);
blockSize = inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer);
} else { // error
err("Error! Encountered deflate block of type 3");
return null;
}
// update progress
currentBytesUnarchivedInFile += blockSize;
currentBytesUnarchived += blockSize;
postProgress();
} while (bFinal != 1);
// we are done reading blocks if the bFinal bit was set for this block
// return the buffer data bytes
return buffer.data;
}
function unzip() {
let bstream = bytestream.tee();
// loop until we don't see any more local files
while (bstream.peekNumber(4) == zLocalFileHeaderSignature) {
const oneLocalFile = new ZipLocalFile(bstream);
// this should strip out directories/folders
if (oneLocalFile && oneLocalFile.uncompressedSize > 0 && oneLocalFile.fileData) {
// If we make it to this point and haven't thrown an error, we have successfully
// read in the data for a local file, so we can update the actual bytestream.
bytestream = bstream.tee();
allLocalFiles.push(oneLocalFile);
totalUncompressedBytesInArchive += oneLocalFile.uncompressedSize;
// update progress
currentFilename = oneLocalFile.filename;
currentFileNumber = allLocalFiles.length - 1;
currentBytesUnarchivedInFile = 0;
// Actually do the unzipping.
oneLocalFile.unzip();
if (oneLocalFile.fileData != null) {
postMessage(new bitjs.archive.UnarchiveExtractEvent(oneLocalFile));
postProgress();
}
}
}
totalFilesInArchive = allLocalFiles.length;
// archive extra data record
if (bstream.peekNumber(4) == zArchiveExtraDataSignature) {
if (logToConsole) {
info(" Found an Archive Extra Data Signature");
}
// skipping this record for now
bstream.readNumber(4);
const archiveExtraFieldLength = bstream.readNumber(4);
bstream.readString(archiveExtraFieldLength);
}
// central directory structure
// TODO: handle the rest of the structures (Zip64 stuff)
if (bytestream.peekNumber(4) == zCentralFileHeaderSignature) {
if (logToConsole) {
info(" Found a Central File Header");
}
// read all file headers
while (bstream.peekNumber(4) == zCentralFileHeaderSignature) {
bstream.readNumber(4); // signature
bstream.readNumber(2); // version made by
bstream.readNumber(2); // version needed to extract
bstream.readNumber(2); // general purpose bit flag
bstream.readNumber(2); // compression method
bstream.readNumber(2); // last mod file time
bstream.readNumber(2); // last mod file date
bstream.readNumber(4); // crc32
bstream.readNumber(4); // compressed size
bstream.readNumber(4); // uncompressed size
const fileNameLength = bstream.readNumber(2); // file name length
const extraFieldLength = bstream.readNumber(2); // extra field length
const fileCommentLength = bstream.readNumber(2); // file comment length
bstream.readNumber(2); // disk number start
bstream.readNumber(2); // internal file attributes
bstream.readNumber(4); // external file attributes
bstream.readNumber(4); // relative offset of local header
bstream.readString(fileNameLength); // file name
bstream.readString(extraFieldLength); // extra field
bstream.readString(fileCommentLength); // file comment
}
}
// digital signature
if (bstream.peekNumber(4) == zDigitalSignatureSignature) {
if (logToConsole) {
info(" Found a Digital Signature");
}
bstream.readNumber(4);
const sizeOfSignature = bstream.readNumber(2);
bstream.readString(sizeOfSignature); // digital signature data
}
postProgress();
bytestream = bstream.tee();
}
// event.data.file has the first ArrayBuffer.
// event.data.bytes has all subsequent ArrayBuffers.
onmessage = function(event) {
const bytes = event.data.file || event.data.bytes;
logToConsole = !!event.data.logToConsole;
// This is the very first time we have been called. Initialize the bytestream.
if (!bytestream) {
bytestream = new bitjs.io.ByteStream(bytes);
} else {
bytestream.push(bytes);
}
if (unarchiveState === UnarchiveState.NOT_STARTED) {
currentFilename = "";
currentFileNumber = 0;
currentBytesUnarchivedInFile = 0;
currentBytesUnarchived = 0;
totalUncompressedBytesInArchive = 0;
totalFilesInArchive = 0;
currentBytesUnarchived = 0;
allLocalFiles = [];
postMessage(new bitjs.archive.UnarchiveStartEvent());
unarchiveState = UnarchiveState.UNARCHIVING;
postProgress();
}
if (unarchiveState === UnarchiveState.UNARCHIVING ||
unarchiveState === UnarchiveState.WAITING) {
try {
unzip();
unarchiveState = UnarchiveState.FINISHED;
postMessage(new bitjs.archive.UnarchiveFinishEvent());
} catch (e) {
if (typeof e === 'string' && e.startsWith('Error! Overflowed')) {
// Overrun the buffer.
unarchiveState = UnarchiveState.WAITING;
} else {
console.error('Found an error while unzipping');
console.dir(e);
throw e;
}
}
}
};

@ -0,0 +1,288 @@
/*
* bitstream.js
*
* Provides readers for bitstreams.
*
* Licensed under the MIT License
*
* Copyright(c) 2011 Google Inc.
* Copyright(c) 2011 antimatter15
*/
var bitjs = bitjs || {};
bitjs.io = bitjs.io || {};
/**
* This object allows you to peek and consume bits and bytes out of a stream.
* Note that this stream is optimized, and thus, will *NOT* throw an error if
* the end of the stream is reached. Only use this in scenarios where you
* already have all the bits you need.
*/
bitjs.io.BitStream = class {
/**
* @param {ArrayBuffer} ab An ArrayBuffer object or a Uint8Array.
* @param {boolean} rtl Whether the stream reads bits from the byte starting
* from bit 7 to 0 (true) or bit 0 to 7 (false).
* @param {Number} opt_offset The offset into the ArrayBuffer
* @param {Number} opt_length The length of this BitStream
*/
constructor(ab, rtl, opt_offset, opt_length) {
if (!(ab instanceof ArrayBuffer)) {
throw 'Error! BitArray constructed with an invalid ArrayBuffer object';
}
const offset = opt_offset || 0;
const length = opt_length || ab.byteLength;
/**
* The bytes in the stream.
* @type {Uint8Array}
* @private
*/
this.bytes = new Uint8Array(ab, offset, length);
/**
* The byte in the stream that we are currently on.
* @type {Number}
* @private
*/
this.bytePtr = 0;
/**
* The bit in the current byte that we will read next (can have values 0 through 7).
* @type {Number}
* @private
*/
this.bitPtr = 0; // tracks which bit we are on (can have values 0 through 7)
/**
* An ever-increasing number.
* @type {Number}
* @private
*/
this.bitsRead_ = 0;
this.peekBits = rtl ? this.peekBits_rtl : this.peekBits_ltr;
}
/**
* Returns how many bites have been read in the stream since the beginning of time.
*/
getNumBitsRead() {
return this.bitsRead_;
}
/**
* Returns how many bits are currently in the stream left to be read.
*/
getNumBitsLeft() {
const bitsLeftInByte = 8 - this.bitPtr;
return (this.bytes.byteLength - this.bytePtr - 1) * 8 + bitsLeftInByte;
}
/**
* byte0 byte1 byte2 byte3
* 7......0 | 7......0 | 7......0 | 7......0
*
* The bit pointer starts at bit0 of byte0 and moves left until it reaches
* bit7 of byte0, then jumps to bit0 of byte1, etc.
* @param {number} n The number of bits to peek, must be a positive integer.
* @param {boolean=} movePointers Whether to move the pointer, defaults false.
* @return {number} The peeked bits, as an unsigned number.
*/
peekBits_ltr(n, opt_movePointers) {
const NUM = parseInt(n, 10);
let num = NUM;
if (n !== num || num <= 0) {
return 0;
}
const BITMASK = bitjs.io.BitStream.BITMASK;
const movePointers = opt_movePointers || false;
let bytes = this.bytes;
let bytePtr = this.bytePtr;
let bitPtr = this.bitPtr;
let result = 0;
let bitsIn = 0;
// keep going until we have no more bits left to peek at
while (num > 0) {
// We overflowed the stream, so just return what we got.
if (bytePtr >= bytes.length) {
break;
}
const numBitsLeftInThisByte = (8 - bitPtr);
if (num >= numBitsLeftInThisByte) {
const mask = (BITMASK[numBitsLeftInThisByte] << bitPtr);
result |= (((bytes[bytePtr] & mask) >> bitPtr) << bitsIn);
bytePtr++;
bitPtr = 0;
bitsIn += numBitsLeftInThisByte;
num -= numBitsLeftInThisByte;
} else {
const mask = (BITMASK[num] << bitPtr);
result |= (((bytes[bytePtr] & mask) >> bitPtr) << bitsIn);
bitPtr += num;
break;
}
}
if (movePointers) {
this.bitPtr = bitPtr;
this.bytePtr = bytePtr;
this.bitsRead_ += NUM;
}
return result;
}
/**
* byte0 byte1 byte2 byte3
* 7......0 | 7......0 | 7......0 | 7......0
*
* The bit pointer starts at bit7 of byte0 and moves right until it reaches
* bit0 of byte0, then goes to bit7 of byte1, etc.
* @param {number} n The number of bits to peek. Must be a positive integer.
* @param {boolean=} movePointers Whether to move the pointer, defaults false.
* @return {number} The peeked bits, as an unsigned number.
*/
peekBits_rtl(n, opt_movePointers) {
const NUM = parseInt(n, 10);
let num = NUM;
if (n !== num || num <= 0) {
return 0;
}
const BITMASK = bitjs.io.BitStream.BITMASK;
const movePointers = opt_movePointers || false;
let bytes = this.bytes;
let bytePtr = this.bytePtr;
let bitPtr = this.bitPtr;
let result = 0;
// keep going until we have no more bits left to peek at
while (num > 0) {
// We overflowed the stream, so just return the bits we got.
if (bytePtr >= bytes.length) {
break;
}
const numBitsLeftInThisByte = (8 - bitPtr);
if (num >= numBitsLeftInThisByte) {
result <<= numBitsLeftInThisByte;
result |= (BITMASK[numBitsLeftInThisByte] & bytes[bytePtr]);
bytePtr++;
bitPtr = 0;
num -= numBitsLeftInThisByte;
} else {
result <<= num;
const numBits = 8 - num - bitPtr;
result |= ((bytes[bytePtr] & (BITMASK[num] << numBits)) >> numBits);
bitPtr += num;
break;
}
}
if (movePointers) {
this.bitPtr = bitPtr;
this.bytePtr = bytePtr;
this.bitsRead_ += NUM;
}
return result;
}
/**
* Peek at 16 bits from current position in the buffer.
* Bit at (bytePtr,bitPtr) has the highest position in returning data.
* Taken from getbits.hpp in unrar.
* TODO: Move this out of BitStream and into unrar.
*/
getBits() {
return (((((this.bytes[this.bytePtr] & 0xff) << 16) +
((this.bytes[this.bytePtr+1] & 0xff) << 8) +
((this.bytes[this.bytePtr+2] & 0xff))) >>> (8-this.bitPtr)) & 0xffff);
}
/**
* Reads n bits out of the stream, consuming them (moving the bit pointer).
* @param {number} n The number of bits to read. Must be a positive integer.
* @return {number} The read bits, as an unsigned number.
*/
readBits(n) {
return this.peekBits(n, true);
}
/**
* This returns n bytes as a sub-array, advancing the pointer if movePointers
* is true. Only use this for uncompressed blocks as this throws away remaining
* bits in the current byte.
* @param {number} n The number of bytes to peek. Must be a positive integer.
* @param {boolean=} movePointers Whether to move the pointer, defaults false.
* @return {Uint8Array} The subarray.
*/
peekBytes(n, opt_movePointers) {
const num = parseInt(n, 10);
if (n !== num || num < 0) {
throw 'Error! Called peekBytes() with a non-positive integer: ' + n;
} else if (num === 0) {
return new Uint8Array();
}
// Flush bits until we are byte-aligned.
// from http://tools.ietf.org/html/rfc1951#page-11
// "Any bits of input up to the next byte boundary are ignored."
while (this.bitPtr != 0) {
this.readBits(1);
}
const numBytesLeft = this.getNumBitsLeft() / 8;
if (num > numBytesLeft) {
throw 'Error! Overflowed the bit stream! n=' + num + ', bytePtr=' + this.bytePtr +
', bytes.length=' + this.bytes.length + ', bitPtr=' + this.bitPtr;
}
const movePointers = opt_movePointers || false;
const result = new Uint8Array(num);
let bytes = this.bytes;
let ptr = this.bytePtr;
let bytesLeftToCopy = num;
while (bytesLeftToCopy > 0) {
const bytesLeftInStream = bytes.length - ptr;
const sourceLength = Math.min(bytesLeftToCopy, bytesLeftInStream);
result.set(bytes.subarray(ptr, ptr + sourceLength), num - bytesLeftToCopy);
ptr += sourceLength;
// Overflowed the stream, just return what we got.
if (ptr >= bytes.length) {
break;
}
bytesLeftToCopy -= sourceLength;
}
if (movePointers) {
this.bytePtr += num;
this.bitsRead_ += (num * 8);
}
return result;
}
/**
* @param {number} n The number of bytes to read.
* @return {Uint8Array} The subarray.
*/
readBytes(n) {
return this.peekBytes(n, true);
}
}
// mask for getting N number of bits (0-8)
bitjs.io.BitStream.BITMASK = [0, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF ];

@ -0,0 +1,117 @@
/*
* bytestream.js
*
* Provides a writer for bytes.
*
* Licensed under the MIT License
*
* Copyright(c) 2011 Google Inc.
* Copyright(c) 2011 antimatter15
*/
var bitjs = bitjs || {};
bitjs.io = bitjs.io || {};
/**
* A write-only Byte buffer which uses a Uint8 Typed Array as a backing store.
*/
bitjs.io.ByteBuffer = class {
/**
* @param {number} numBytes The number of bytes to allocate.
*/
constructor(numBytes) {
if (typeof numBytes != typeof 1 || numBytes <= 0) {
throw "Error! ByteBuffer initialized with '" + numBytes + "'";
}
this.data = new Uint8Array(numBytes);
this.ptr = 0;
}
/**
* @param {number} b The byte to insert.
*/
insertByte(b) {
// TODO: throw if byte is invalid?
this.data[this.ptr++] = b;
}
/**
* @param {Array.<number>|Uint8Array|Int8Array} bytes The bytes to insert.
*/
insertBytes(bytes) {
// TODO: throw if bytes is invalid?
this.data.set(bytes, this.ptr);
this.ptr += bytes.length;
}
/**
* Writes an unsigned number into the next n bytes. If the number is too large
* to fit into n bytes or is negative, an error is thrown.
* @param {number} num The unsigned number to write.
* @param {number} numBytes The number of bytes to write the number into.
*/
writeNumber(num, numBytes) {
if (numBytes < 1 || !numBytes) {
throw 'Trying to write into too few bytes: ' + numBytes;
}
if (num < 0) {
throw 'Trying to write a negative number (' + num +
') as an unsigned number to an ArrayBuffer';
}
if (num > (Math.pow(2, numBytes * 8) - 1)) {
throw 'Trying to write ' + num + ' into only ' + numBytes + ' bytes';
}
// Roll 8-bits at a time into an array of bytes.
const bytes = [];
while (numBytes-- > 0) {
const eightBits = num & 255;
bytes.push(eightBits);
num >>= 8;
}
this.insertBytes(bytes);
}
/**
* Writes a signed number into the next n bytes. If the number is too large
* to fit into n bytes, an error is thrown.
* @param {number} num The signed number to write.
* @param {number} numBytes The number of bytes to write the number into.
*/
writeSignedNumber(num, numBytes) {
if (numBytes < 1) {
throw 'Trying to write into too few bytes: ' + numBytes;
}
const HALF = Math.pow(2, (numBytes * 8) - 1);
if (num >= HALF || num < -HALF) {
throw 'Trying to write ' + num + ' into only ' + numBytes + ' bytes';
}
// Roll 8-bits at a time into an array of bytes.
const bytes = [];
while (numBytes-- > 0) {
const eightBits = num & 255;
bytes.push(eightBits);
num >>= 8;
}
this.insertBytes(bytes);
}
/**
* @param {string} str The ASCII string to write.
*/
writeASCIIString(str) {
for (let i = 0; i < str.length; ++i) {
const curByte = str.charCodeAt(i);
if (curByte < 0 || curByte > 255) {
throw 'Trying to write a non-ASCII string!';
}
this.insertByte(curByte);
}
};
}

@ -1,902 +0,0 @@
/**
* unrar.js
*
* Copyright(c) 2011 Google Inc.
* Copyright(c) 2011 antimatter15
*
* Reference Documentation:
*
* http://kthoom.googlecode.com/hg/docs/unrar.html
*/
/* global bitjs, importScripts */
// This file expects to be invoked as a Worker (see onmessage below).
importScripts("io.js");
importScripts("archive.js");
// Progress variables.
var currentFilename = "";
var currentFileNumber = 0;
var currentBytesUnarchivedInFile = 0;
var currentBytesUnarchived = 0;
var totalUncompressedBytesInArchive = 0;
var totalFilesInArchive = 0;
// Helper functions.
var info = function(str) {
postMessage(new bitjs.archive.UnarchiveInfoEvent(str));
};
var err = function(str) {
postMessage(new bitjs.archive.UnarchiveErrorEvent(str));
};
var postProgress = function() {
postMessage(new bitjs.archive.UnarchiveProgressEvent(
currentFilename,
currentFileNumber,
currentBytesUnarchivedInFile,
currentBytesUnarchived,
totalUncompressedBytesInArchive,
totalFilesInArchive));
};
// shows a byte value as its hex representation
var nibble = "0123456789ABCDEF";
var byteValueToHexString = function(num) {
return nibble[num >> 4] + nibble[num & 0xF];
};
var twoByteValueToHexString = function(num) {
return nibble[(num >> 12) & 0xF] + nibble[(num >> 8) & 0xF] + nibble[(num >> 4) & 0xF] + nibble[num & 0xF];
};
// Volume Types
// MARK_HEAD = 0x72;
var MAIN_HEAD = 0x73,
FILE_HEAD = 0x74,
// COMM_HEAD = 0x75,
// AV_HEAD = 0x76,
// SUB_HEAD = 0x77,
// PROTECT_HEAD = 0x78,
// SIGN_HEAD = 0x79,
// NEWSUB_HEAD = 0x7a,
ENDARC_HEAD = 0x7b;
// bstream is a bit stream
var RarVolumeHeader = function(bstream) {
var headPos = bstream.bytePtr;
// byte 1,2
info("Rar Volume Header @" + bstream.bytePtr);
this.crc = bstream.readBits(16);
info(" crc=" + this.crc);
// byte 3
this.headType = bstream.readBits(8);
info(" headType=" + this.headType);
// Get flags
// bytes 4,5
this.flags = {};
this.flags.value = bstream.peekBits(16);
info(" flags=" + twoByteValueToHexString(this.flags.value));
switch (this.headType) {
case MAIN_HEAD:
this.flags.MHD_VOLUME = !!bstream.readBits(1);
this.flags.MHD_COMMENT = !!bstream.readBits(1);
this.flags.MHD_LOCK = !!bstream.readBits(1);
this.flags.MHD_SOLID = !!bstream.readBits(1);
this.flags.MHD_PACK_COMMENT = !!bstream.readBits(1);
this.flags.MHD_NEWNUMBERING = this.flags.MHD_PACK_COMMENT;
this.flags.MHD_AV = !!bstream.readBits(1);
this.flags.MHD_PROTECT = !!bstream.readBits(1);
this.flags.MHD_PASSWORD = !!bstream.readBits(1);
this.flags.MHD_FIRSTVOLUME = !!bstream.readBits(1);
this.flags.MHD_ENCRYPTVER = !!bstream.readBits(1);
bstream.readBits(6); // unused
break;
case FILE_HEAD:
this.flags.LHD_SPLIT_BEFORE = !!bstream.readBits(1); // 0x0001
this.flags.LHD_SPLIT_AFTER = !!bstream.readBits(1); // 0x0002
this.flags.LHD_PASSWORD = !!bstream.readBits(1); // 0x0004
this.flags.LHD_COMMENT = !!bstream.readBits(1); // 0x0008
this.flags.LHD_SOLID = !!bstream.readBits(1); // 0x0010
bstream.readBits(3); // unused
this.flags.LHD_LARGE = !!bstream.readBits(1); // 0x0100
this.flags.LHD_UNICODE = !!bstream.readBits(1); // 0x0200
this.flags.LHD_SALT = !!bstream.readBits(1); // 0x0400
this.flags.LHD_VERSION = !!bstream.readBits(1); // 0x0800
this.flags.LHD_EXTTIME = !!bstream.readBits(1); // 0x1000
this.flags.LHD_EXTFLAGS = !!bstream.readBits(1); // 0x2000
bstream.readBits(2); // unused
info(" LHD_SPLIT_BEFORE = " + this.flags.LHD_SPLIT_BEFORE);
break;
default:
bstream.readBits(16);
}
// byte 6,7
this.headSize = bstream.readBits(16);
info(" headSize=" + this.headSize);
switch (this.headType) {
case MAIN_HEAD:
this.highPosAv = bstream.readBits(16);
this.posAv = bstream.readBits(32);
if (this.flags.MHD_ENCRYPTVER) {
this.encryptVer = bstream.readBits(8);
}
info("Found MAIN_HEAD with highPosAv=" + this.highPosAv + ", posAv=" + this.posAv);
break;
case FILE_HEAD:
this.packSize = bstream.readBits(32);
this.unpackedSize = bstream.readBits(32);
this.hostOS = bstream.readBits(8);
this.fileCRC = bstream.readBits(32);
this.fileTime = bstream.readBits(32);
this.unpVer = bstream.readBits(8);
this.method = bstream.readBits(8);
this.nameSize = bstream.readBits(16);
this.fileAttr = bstream.readBits(32);
if (this.flags.LHD_LARGE) {
info("Warning: Reading in LHD_LARGE 64-bit size values");
this.HighPackSize = bstream.readBits(32);
this.HighUnpSize = bstream.readBits(32);
} else {
this.HighPackSize = 0;
this.HighUnpSize = 0;
if (this.unpackedSize === 0xffffffff) {
this.HighUnpSize = 0x7fffffff;
this.unpackedSize = 0xffffffff;
}
}
this.fullPackSize = 0;
this.fullUnpackSize = 0;
this.fullPackSize |= this.HighPackSize;
this.fullPackSize <<= 32;
this.fullPackSize |= this.packSize;
// read in filename
this.filename = bstream.readBytes(this.nameSize);
var _s = "";
for (var _i = 0; _i < this.filename.length ; _i++) {
_s += String.fromCharCode(this.filename[_i]);
}
this.filename = _s;
if (this.flags.LHD_SALT) {
info("Warning: Reading in 64-bit salt value");
this.salt = bstream.readBits(64); // 8 bytes
}
if (this.flags.LHD_EXTTIME) {
// 16-bit flags
var extTimeFlags = bstream.readBits(16);
// this is adapted straight out of arcread.cpp, Archive::ReadHeader()
for (var I = 0; I < 4; ++I) {
var rmode = extTimeFlags >> ((3 - I) * 4);
if ((rmode & 8) === 0) {
continue;
}
if (I !== 0) {
bstream.readBits(16);
}
var count = (rmode & 3);
for (var J = 0; J < count; ++J) {
bstream.readBits(8);
}
}
}
if (this.flags.LHD_COMMENT) {
info("Found a LHD_COMMENT");
}
while (headPos + this.headSize > bstream.bytePtr) bstream.readBits(1);
info("Found FILE_HEAD with packSize=" + this.packSize + ", unpackedSize= " + this.unpackedSize + ", hostOS=" + this.hostOS + ", unpVer=" + this.unpVer + ", method=" + this.method + ", filename=" + this.filename);
break;
default:
info("Found a header of type 0x" + byteValueToHexString(this.headType));
// skip the rest of the header bytes (for now)
bstream.readBytes( this.headSize - 7 );
break;
}
};
//var BLOCK_LZ = 0;
var rLDecode = [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224],
rLBits = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5],
rDBitLengthCounts = [4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 14, 0, 12],
rSDDecode = [0, 4, 8, 16, 32, 64, 128, 192],
rSDBits = [2, 2, 3, 4, 5, 6, 6, 6];
var rDDecode = [0, 1, 2, 3, 4, 6, 8, 12, 16, 24, 32,
48, 64, 96, 128, 192, 256, 384, 512, 768, 1024, 1536, 2048, 3072,
4096, 6144, 8192, 12288, 16384, 24576, 32768, 49152, 65536, 98304,
131072, 196608, 262144, 327680, 393216, 458752, 524288, 589824,
655360, 720896, 786432, 851968, 917504, 983040];
var rDBits = [0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5,
5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14,
15, 15, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16];
var rLowDistRepCount = 16;
var rNC = 299,
rDC = 60,
rLDC = 17,
rRC = 28,
rBC = 20,
rHuffTableSize = (rNC + rDC + rRC + rLDC);
//var UnpBlockType = BLOCK_LZ;
var UnpOldTable = new Array(rHuffTableSize);
var BD = { //bitdecode
DecodeLen: new Array(16),
DecodePos: new Array(16),
DecodeNum: new Array(rBC)
};
var LD = { //litdecode
DecodeLen: new Array(16),
DecodePos: new Array(16),
DecodeNum: new Array(rNC)
};
var DD = { //distdecode
DecodeLen: new Array(16),
DecodePos: new Array(16),
DecodeNum: new Array(rDC)
};
var LDD = { //low dist decode
DecodeLen: new Array(16),
DecodePos: new Array(16),
DecodeNum: new Array(rLDC)
};
var RD = { //rep decode
DecodeLen: new Array(16),
DecodePos: new Array(16),
DecodeNum: new Array(rRC)
};
var rBuffer;
// read in Huffman tables for RAR
function rarReadTables(bstream) {
var BitLength = new Array(rBC),
Table = new Array(rHuffTableSize);
var i;
// before we start anything we need to get byte-aligned
bstream.readBits( (8 - bstream.bitPtr) & 0x7 );
if (bstream.readBits(1)) {
info("Error! PPM not implemented yet");
return;
}
if (!bstream.readBits(1)) { //discard old table
for (i = UnpOldTable.length; i--;) UnpOldTable[i] = 0;
}
// read in bit lengths
for (var I = 0; I < rBC; ++I) {
var Length = bstream.readBits(4);
if (Length === 15) {
var ZeroCount = bstream.readBits(4);
if (ZeroCount === 0) {
BitLength[I] = 15;
} else {
ZeroCount += 2;
while (ZeroCount-- > 0 && I < rBC) {
BitLength[I++] = 0;
}
--I;
}
} else {
BitLength[I] = Length;
}
}
// now all 20 bit lengths are obtained, we construct the Huffman Table:
rarMakeDecodeTables(BitLength, 0, BD, rBC);
var TableSize = rHuffTableSize;
//console.log(DecodeLen, DecodePos, DecodeNum);
for (i = 0; i < TableSize;) {
var N;
var num = rarDecodeNumber(bstream, BD);
if (num < 16) {
Table[i] = (num + UnpOldTable[i]) & 0xf;
i++;
} else if (num < 18) {
N = (num === 16) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11);
while (N-- > 0 && i < TableSize) {
Table[i] = Table[i - 1];
i++;
}
} else {
N = (num === 18) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11);
while (N-- > 0 && i < TableSize) {
Table[i++] = 0;
}
}
}
rarMakeDecodeTables(Table, 0, LD, rNC);
rarMakeDecodeTables(Table, rNC, DD, rDC);
rarMakeDecodeTables(Table, rNC + rDC, LDD, rLDC);
rarMakeDecodeTables(Table, rNC + rDC + rLDC, RD, rRC);
for (i = UnpOldTable.length; i--;) {
UnpOldTable[i] = Table[i];
}
return true;
}
function rarDecodeNumber(bstream, dec) {
var DecodeLen = dec.DecodeLen, DecodePos = dec.DecodePos, DecodeNum = dec.DecodeNum;
var bitField = bstream.getBits() & 0xfffe;
//some sort of rolled out binary search
var bits = ((bitField < DecodeLen[8]) ?
((bitField < DecodeLen[4]) ?
((bitField < DecodeLen[2]) ?
((bitField < DecodeLen[1]) ? 1 : 2)
: ((bitField < DecodeLen[3]) ? 3 : 4))
: (bitField < DecodeLen[6]) ?
((bitField < DecodeLen[5]) ? 5 : 6)
: ((bitField < DecodeLen[7]) ? 7 : 8))
: ((bitField < DecodeLen[12]) ?
((bitField < DecodeLen[10]) ?
((bitField < DecodeLen[9]) ? 9 : 10)
: ((bitField < DecodeLen[11]) ? 11 : 12))
: (bitField < DecodeLen[14]) ?
((bitField < DecodeLen[13]) ? 13 : 14)
: 15));
bstream.readBits(bits);
var N = DecodePos[bits] + ((bitField - DecodeLen[bits - 1]) >>> (16 - bits));
return DecodeNum[N];
}
function rarMakeDecodeTables(BitLength, offset, dec, size) {
var DecodeLen = dec.DecodeLen, DecodePos = dec.DecodePos, DecodeNum = dec.DecodeNum;
var LenCount = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
TmpPos = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
N = 0, M = 0;
var i;
for (i = DecodeNum.length; i--;) DecodeNum[i] = 0;
for (i = 0; i < size; i++) {
LenCount[BitLength[i + offset] & 0xF]++;
}
LenCount[0] = 0;
TmpPos[0] = 0;
DecodePos[0] = 0;
DecodeLen[0] = 0;
var I;
for (I = 1; I < 16; ++I) {
N = 2 * (N + LenCount[I]);
M = (N << (15 - I));
if (M > 0xFFFF) {
M = 0xFFFF;
}
DecodeLen[I] = M;
DecodePos[I] = DecodePos[I - 1] + LenCount[I - 1];
TmpPos[I] = DecodePos[I];
}
for (I = 0; I < size; ++I) {
if (BitLength[I + offset] !== 0) {
DecodeNum[ TmpPos[ BitLength[offset + I] & 0xF ]++] = I;
}
}
}
// TODO: implement
function Unpack15() { //bstream, Solid) {
info("ERROR! RAR 1.5 compression not supported");
}
var lowDistRepCount = 0, prevLowDist = 0;
var rOldDist = [0, 0, 0, 0];
var lastDist = 0;
var lastLength = 0;
function Unpack20(bstream) { //, Solid) {
var destUnpSize = rBuffer.data.length;
var oldDistPtr = 0;
var Length;
var Distance;
rarReadTables20(bstream);
while (destUnpSize > rBuffer.ptr) {
var num = rarDecodeNumber(bstream, LD);
var Bits;
if (num < 256) {
rBuffer.insertByte(num);
continue;
}
if (num > 269) {
Length = rLDecode[num -= 270] + 3;
if ((Bits = rLBits[num]) > 0) {
Length += bstream.readBits(Bits);
}
var DistNumber = rarDecodeNumber(bstream, DD);
Distance = rDDecode[DistNumber] + 1;
if ((Bits = rDBits[DistNumber]) > 0) {
Distance += bstream.readBits(Bits);
}
if (Distance >= 0x2000) {
Length++;
if (Distance >= 0x40000) Length++;
}
lastLength = Length;
lastDist = rOldDist[oldDistPtr++ & 3] = Distance;
rarCopyString(Length, Distance);
continue;
}
if (num === 269) {
rarReadTables20(bstream);
rarUpdateProgress();
continue;
}
if (num === 256) {
lastDist = rOldDist[oldDistPtr++ & 3] = lastDist;
rarCopyString(lastLength, lastDist);
continue;
}
if (num < 261) {
Distance = rOldDist[(oldDistPtr - (num - 256)) & 3];
var LengthNumber = rarDecodeNumber(bstream, RD);
Length = rLDecode[LengthNumber] + 2;
if ((Bits = rLBits[LengthNumber]) > 0) {
Length += bstream.readBits(Bits);
}
if (Distance >= 0x101) {
Length++;
if (Distance >= 0x2000) {
Length++;
if (Distance >= 0x40000) Length++;
}
}
lastLength = Length;
lastDist = rOldDist[oldDistPtr++ & 3] = Distance;
rarCopyString(Length, Distance);
continue;
}
if (num < 270) {
Distance = rSDDecode[num -= 261] + 1;
if ((Bits = rSDBits[num]) > 0) {
Distance += bstream.readBits(Bits);
}
lastLength = 2;
lastDist = rOldDist[oldDistPtr++ & 3] = Distance;
rarCopyString(2, Distance);
continue;
}
}
rarUpdateProgress();
}
function rarUpdateProgress() {
var change = rBuffer.ptr - currentBytesUnarchivedInFile;
currentBytesUnarchivedInFile = rBuffer.ptr;
currentBytesUnarchived += change;
postProgress();
}
var rNC20 = 298,
rDC20 = 48,
rRC20 = 28,
rBC20 = 19,
rMC20 = 257;
var UnpOldTable20 = new Array(rMC20 * 4);
function rarReadTables20(bstream) {
var BitLength = new Array(rBC20);
var Table = new Array(rMC20 * 4);
var TableSize, N, I;
var i;
bstream.readBits(1);
if (!bstream.readBits(1)) {
for (i = UnpOldTable20.length; i--;) UnpOldTable20[i] = 0;
}
TableSize = rNC20 + rDC20 + rRC20;
for (I = 0; I < rBC20; I++) {
BitLength[I] = bstream.readBits(4);
}
rarMakeDecodeTables(BitLength, 0, BD, rBC20);
I = 0;
while (I < TableSize) {
var num = rarDecodeNumber(bstream, BD);
if (num < 16) {
Table[I] = num + UnpOldTable20[I] & 0xf;
I++;
} else if (num === 16) {
N = bstream.readBits(2) + 3;
while (N-- > 0 && I < TableSize) {
Table[I] = Table[I - 1];
I++;
}
} else {
if (num === 17) {
N = bstream.readBits(3) + 3;
} else {
N = bstream.readBits(7) + 11;
}
while (N-- > 0 && I < TableSize) {
Table[I++] = 0;
}
}
}
rarMakeDecodeTables(Table, 0, LD, rNC20);
rarMakeDecodeTables(Table, rNC20, DD, rDC20);
rarMakeDecodeTables(Table, rNC20 + rDC20, RD, rRC20);
for (i = UnpOldTable20.length; i--;) UnpOldTable20[i] = Table[i];
}
function Unpack29(bstream) {
// lazy initialize rDDecode and rDBits
var DDecode = new Array(rDC);
var DBits = new Array(rDC);
var Distance = 0;
var Length = 0;
var Dist = 0, BitLength = 0, Slot = 0;
var I;
for (I = 0; I < rDBitLengthCounts.length; I++, BitLength++) {
for (var J = 0; J < rDBitLengthCounts[I]; J++, Slot++, Dist += (1 << BitLength)) {
DDecode[Slot] = Dist;
DBits[Slot] = BitLength;
}
}
var Bits;
//tablesRead = false;
rOldDist = [0, 0, 0, 0];
lastDist = 0;
lastLength = 0;
var i;
for (i = UnpOldTable.length; i--;) UnpOldTable[i] = 0;
// read in Huffman tables
rarReadTables(bstream);
while (true) {
var num = rarDecodeNumber(bstream, LD);
if (num < 256) {
rBuffer.insertByte(num);
continue;
}
if (num >= 271) {
Length = rLDecode[num -= 271] + 3;
if ((Bits = rLBits[num]) > 0) {
Length += bstream.readBits(Bits);
}
var DistNumber = rarDecodeNumber(bstream, DD);
Distance = DDecode[DistNumber] + 1;
if ((Bits = DBits[DistNumber]) > 0) {
if (DistNumber > 9) {
if (Bits > 4) {
Distance += ((bstream.getBits() >>> (20 - Bits)) << 4);
bstream.readBits(Bits - 4);
//todo: check this
}
if (lowDistRepCount > 0) {
lowDistRepCount--;
Distance += prevLowDist;
} else {
var LowDist = rarDecodeNumber(bstream, LDD);
if (LowDist === 16) {
lowDistRepCount = rLowDistRepCount - 1;
Distance += prevLowDist;
} else {
Distance += LowDist;
prevLowDist = LowDist;
}
}
} else {
Distance += bstream.readBits(Bits);
}
}
if (Distance >= 0x2000) {
Length++;
if (Distance >= 0x40000) {
Length++;
}
}
rarInsertOldDist(Distance);
rarInsertLastMatch(Length, Distance);
rarCopyString(Length, Distance);
continue;
}
if (num === 256) {
if (!rarReadEndOfBlock(bstream)) break;
continue;
}
if (num === 257) {
//console.log("READVMCODE");
if (!rarReadVMCode(bstream)) break;
continue;
}
if (num === 258) {
if (lastLength != 0) {
rarCopyString(lastLength, lastDist);
}
continue;
}
if (num < 263) {
var DistNum = num - 259;
Distance = rOldDist[DistNum];
for (var I = DistNum; I > 0; I--) {
rOldDist[I] = rOldDist[I - 1];
}
rOldDist[0] = Distance;
var LengthNumber = rarDecodeNumber(bstream, RD);
Length = rLDecode[LengthNumber] + 2;
if ((Bits = rLBits[LengthNumber]) > 0) {
Length += bstream.readBits(Bits);
}
rarInsertLastMatch(Length, Distance);
rarCopyString(Length, Distance);
continue;
}
if (num < 272) {
Distance = rSDDecode[num -= 263] + 1;
if ((Bits = rSDBits[num]) > 0) {
Distance += bstream.readBits(Bits);
}
rarInsertOldDist(Distance);
rarInsertLastMatch(2, Distance);
rarCopyString(2, Distance);
continue;
}
}
rarUpdateProgress();
}
function rarReadEndOfBlock(bstream) {
rarUpdateProgress();
var NewTable = false, NewFile = false;
if (bstream.readBits(1)) {
NewTable = true;
} else {
NewFile = true;
NewTable = !!bstream.readBits(1);
}
//tablesRead = !NewTable;
return !(NewFile || NewTable && !rarReadTables(bstream));
}
function rarReadVMCode(bstream) {
var FirstByte = bstream.readBits(8);
var Length = (FirstByte & 7) + 1;
if (Length === 7) {
Length = bstream.readBits(8) + 7;
} else if (Length === 8) {
Length = bstream.readBits(16);
}
var vmCode = [];
for (var I = 0; I < Length; I++) {
//do something here with cheking readbuf
vmCode.push(bstream.readBits(8));
}
return RarAddVMCode(FirstByte, vmCode, Length);
}
function RarAddVMCode(firstByte, vmCode, length) {
//console.log(vmCode);
if (vmCode.length > 0) {
info("Error! RarVM not supported yet!");
}
return true;
}
function rarInsertLastMatch(length, distance) {
lastDist = distance;
lastLength = length;
}
function rarInsertOldDist(distance) {
rOldDist.splice(3, 1);
rOldDist.splice(0, 0, distance);
}
//this is the real function, the other one is for debugging
function rarCopyString(length, distance) {
var destPtr = rBuffer.ptr - distance;
if (destPtr < 0) {
var l = rOldBuffers.length;
while (destPtr < 0) {
destPtr = rOldBuffers[--l].data.length + destPtr;
}
//TODO: lets hope that it never needs to read beyond file boundaries
while (length--) rBuffer.insertByte(rOldBuffers[l].data[destPtr++]);
}
if (length > distance) {
while (length--) rBuffer.insertByte(rBuffer.data[destPtr++]);
} else {
rBuffer.insertBytes(rBuffer.data.subarray(destPtr, destPtr + length));
}
}
var rOldBuffers = [];
// v must be a valid RarVolume
function unpack(v) {
// TODO: implement what happens when unpVer is < 15
var Ver = v.header.unpVer <= 15 ? 15 : v.header.unpVer,
Solid = v.header.LHD_SOLID,
bstream = new bitjs.io.BitStream(v.fileData.buffer, true /* rtl */, v.fileData.byteOffset, v.fileData.byteLength );
rBuffer = new bitjs.io.ByteBuffer(v.header.unpackedSize);
info("Unpacking " + v.filename + " RAR v" + Ver);
switch(Ver) {
case 15: // rar 1.5 compression
Unpack15(); //(bstream, Solid);
break;
case 20: // rar 2.x compression
case 26: // files larger than 2GB
Unpack20(bstream); //, Solid);
break;
case 29: // rar 3.x compression
case 36: // alternative hash
Unpack29(bstream);
break;
} // switch(method)
rOldBuffers.push(rBuffer);
//TODO: clear these old buffers when there's over 4MB of history
return rBuffer.data;
}
// bstream is a bit stream
var RarLocalFile = function(bstream) {
this.header = new RarVolumeHeader(bstream);
this.filename = this.header.filename;
if (this.header.headType != FILE_HEAD && this.header.headType != ENDARC_HEAD) {
this.isValid = false;
info("Error! RAR Volume did not include a FILE_HEAD header ");
} else {
// read in the compressed data
this.fileData = null;
if (this.header.packSize > 0) {
this.fileData = bstream.readBytes(this.header.packSize);
this.isValid = true;
}
}
};
RarLocalFile.prototype.unrar = function() {
if (!this.header.flags.LHD_SPLIT_BEFORE) {
// unstore file
if (this.header.method === 0x30) {
info("Unstore " + this.filename);
this.isValid = true;
currentBytesUnarchivedInFile += this.fileData.length;
currentBytesUnarchived += this.fileData.length;
// Create a new buffer and copy it over.
var len = this.header.packSize;
var newBuffer = new bitjs.io.ByteBuffer(len);
newBuffer.insertBytes(this.fileData);
this.fileData = newBuffer.data;
} else {
this.isValid = true;
this.fileData = unpack(this);
}
}
};
var unrar = function(arrayBuffer) {
currentFilename = "";
currentFileNumber = 0;
currentBytesUnarchivedInFile = 0;
currentBytesUnarchived = 0;
totalUncompressedBytesInArchive = 0;
totalFilesInArchive = 0;
postMessage(new bitjs.archive.UnarchiveStartEvent());
var bstream = new bitjs.io.BitStream(arrayBuffer, false /* rtl */);
var header = new RarVolumeHeader(bstream);
if (header.crc === 0x6152 &&
header.headType === 0x72 &&
header.flags.value === 0x1A21 &&
header.headSize === 7) {
info("Found RAR signature");
var mhead = new RarVolumeHeader(bstream);
if (mhead.headType != MAIN_HEAD) {
info("Error! RAR did not include a MAIN_HEAD header");
} else {
var localFiles = [];
var localFile = null;
do {
try {
localFile = new RarLocalFile(bstream);
info("RAR localFile isValid=" + localFile.isValid + ", volume packSize=" + localFile.header.packSize);
if (localFile && localFile.isValid && localFile.header.packSize > 0) {
totalUncompressedBytesInArchive += localFile.header.unpackedSize;
localFiles.push(localFile);
} else if (localFile.header.packSize === 0 && localFile.header.unpackedSize === 0) {
localFile.isValid = true;
}
} catch (err) {
break;
}
//info("bstream" + bstream.bytePtr+"/"+bstream.bytes.length);
} while ( localFile.isValid );
totalFilesInArchive = localFiles.length;
// now we have all information but things are unpacked
// TODO: unpack
localFiles = localFiles.sort(function(a, b) {
var aname = a.filename.toLowerCase();
var bname = b.filename.toLowerCase();
return aname > bname ? 1 : -1;
});
info(localFiles.map(function(a) {return a.filename;}).join(", "));
for (var i = 0; i < localFiles.length; ++i) {
var localfile = localFiles[i];
// update progress
currentFilename = localfile.header.filename;
currentBytesUnarchivedInFile = 0;
// actually do the unzipping
localfile.unrar();
if (localfile.isValid) {
postMessage(new bitjs.archive.UnarchiveExtractEvent(localfile));
postProgress();
}
}
postProgress();
}
} else {
err("Invalid RAR file");
}
postMessage(new bitjs.archive.UnarchiveFinishEvent());
};
// event.data.file has the ArrayBuffer.
onmessage = function(event) {
var ab = event.data.file;
unrar(ab, true);
};

@ -1,626 +0,0 @@
/**
* unzip.js
*
* Copyright(c) 2011 Google Inc.
* Copyright(c) 2011 antimatter15
*
* Reference Documentation:
*
* ZIP format: http://www.pkware.com/documents/casestudies/APPNOTE.TXT
* DEFLATE format: http://tools.ietf.org/html/rfc1951
*/
/* global bitjs, importScripts, Uint8Array*/
// This file expects to be invoked as a Worker (see onmessage below).
importScripts("io.js");
importScripts("archive.js");
// Progress variables.
var currentFilename = "";
var currentFileNumber = 0;
var currentBytesUnarchivedInFile = 0;
var currentBytesUnarchived = 0;
var totalUncompressedBytesInArchive = 0;
var totalFilesInArchive = 0;
// Helper functions.
var info = function(str) {
postMessage(new bitjs.archive.UnarchiveInfoEvent(str));
};
var err = function(str) {
postMessage(new bitjs.archive.UnarchiveErrorEvent(str));
};
var postProgress = function() {
postMessage(new bitjs.archive.UnarchiveProgressEvent(
currentFilename,
currentFileNumber,
currentBytesUnarchivedInFile,
currentBytesUnarchived,
totalUncompressedBytesInArchive,
totalFilesInArchive));
};
var zLocalFileHeaderSignature = 0x04034b50;
var zArchiveExtraDataSignature = 0x08064b50;
var zCentralFileHeaderSignature = 0x02014b50;
var zDigitalSignatureSignature = 0x05054b50;
// takes a ByteStream and parses out the local file information
var ZipLocalFile = function(bstream) {
if (typeof bstream != typeof {} || !bstream.readNumber || typeof bstream.readNumber != typeof function() {}) {
return null;
}
bstream.readNumber(4); // swallow signature
this.version = bstream.readNumber(2);
this.generalPurpose = bstream.readNumber(2);
this.compressionMethod = bstream.readNumber(2);
this.lastModFileTime = bstream.readNumber(2);
this.lastModFileDate = bstream.readNumber(2);
this.crc32 = bstream.readNumber(4);
this.compressedSize = bstream.readNumber(4);
this.uncompressedSize = bstream.readNumber(4);
this.fileNameLength = bstream.readNumber(2);
this.extraFieldLength = bstream.readNumber(2);
this.filename = null;
if (this.fileNameLength > 0) {
this.filename = bstream.readString(this.fileNameLength);
}
info("Zip Local File Header:");
info(" version=" + this.version);
info(" general purpose=" + this.generalPurpose);
info(" compression method=" + this.compressionMethod);
info(" last mod file time=" + this.lastModFileTime);
info(" last mod file date=" + this.lastModFileDate);
info(" crc32=" + this.crc32);
info(" compressed size=" + this.compressedSize);
info(" uncompressed size=" + this.uncompressedSize);
info(" file name length=" + this.fileNameLength);
info(" extra field length=" + this.extraFieldLength);
info(" filename = '" + this.filename + "'");
this.extraField = null;
if (this.extraFieldLength > 0) {
this.extraField = bstream.readString(this.extraFieldLength);
info(" extra field=" + this.extraField);
}
// read in the compressed data
this.fileData = null;
if (this.compressedSize > 0) {
this.fileData = new Uint8Array(bstream.bytes.buffer, bstream.ptr, this.compressedSize);
bstream.ptr += this.compressedSize;
}
// TODO: deal with data descriptor if present (we currently assume no data descriptor!)
// "This descriptor exists only if bit 3 of the general purpose bit flag is set"
// But how do you figure out how big the file data is if you don't know the compressedSize
// from the header?!?
if ((this.generalPurpose & bitjs.BIT[3]) != 0) {
this.crc32 = bstream.readNumber(4);
this.compressedSize = bstream.readNumber(4);
this.uncompressedSize = bstream.readNumber(4);
}
};
// determine what kind of compressed data we have and decompress
ZipLocalFile.prototype.unzip = function() {
// Zip Version 1.0, no compression (store only)
if (this.compressionMethod == 0 ) {
info("ZIP v" + this.version + ", store only: " + this.filename + " (" + this.compressedSize + " bytes)");
currentBytesUnarchivedInFile = this.compressedSize;
currentBytesUnarchived += this.compressedSize;
this.fileData = zeroCompression(this.fileData, this.uncompressedSize);
}
// version == 20, compression method == 8 (DEFLATE)
else if (this.compressionMethod == 8) {
info("ZIP v2.0, DEFLATE: " + this.filename + " (" + this.compressedSize + " bytes)");
this.fileData = inflate(this.fileData, this.uncompressedSize);
}
else {
err("UNSUPPORTED VERSION/FORMAT: ZIP v" + this.version + ", compression method=" + this.compressionMethod + ": " + this.filename + " (" + this.compressedSize + " bytes)");
this.fileData = null;
}
};
// Takes an ArrayBuffer of a zip file in
// returns null on error
// returns an array of DecompressedFile objects on success
var unzip = function(arrayBuffer) {
postMessage(new bitjs.archive.UnarchiveStartEvent());
currentFilename = "";
currentFileNumber = 0;
currentBytesUnarchivedInFile = 0;
currentBytesUnarchived = 0;
totalUncompressedBytesInArchive = 0;
totalFilesInArchive = 0;
currentBytesUnarchived = 0;
var bstream = new bitjs.io.ByteStream(arrayBuffer);
// detect local file header signature or return null
if (bstream.peekNumber(4) == zLocalFileHeaderSignature) {
var localFiles = [];
// loop until we don't see any more local files
while (bstream.peekNumber(4) == zLocalFileHeaderSignature) {
var oneLocalFile = new ZipLocalFile(bstream);
// this should strip out directories/folders
if (oneLocalFile && oneLocalFile.uncompressedSize > 0 && oneLocalFile.fileData) {
localFiles.push(oneLocalFile);
totalUncompressedBytesInArchive += oneLocalFile.uncompressedSize;
}
}
totalFilesInArchive = localFiles.length;
// got all local files, now sort them
localFiles.sort(function(a, b) {
var aname = a.filename.toLowerCase();
var bname = b.filename.toLowerCase();
return aname > bname ? 1 : -1;
});
// archive extra data record
if (bstream.peekNumber(4) == zArchiveExtraDataSignature) {
info(" Found an Archive Extra Data Signature");
// skipping this record for now
bstream.readNumber(4);
var archiveExtraFieldLength = bstream.readNumber(4);
bstream.readString(archiveExtraFieldLength);
}
// central directory structure
// TODO: handle the rest of the structures (Zip64 stuff)
if (bstream.peekNumber(4) == zCentralFileHeaderSignature) {
info(" Found a Central File Header");
// read all file headers
while (bstream.peekNumber(4) == zCentralFileHeaderSignature) {
bstream.readNumber(4); // signature
bstream.readNumber(2); // version made by
bstream.readNumber(2); // version needed to extract
bstream.readNumber(2); // general purpose bit flag
bstream.readNumber(2); // compression method
bstream.readNumber(2); // last mod file time
bstream.readNumber(2); // last mod file date
bstream.readNumber(4); // crc32
bstream.readNumber(4); // compressed size
bstream.readNumber(4); // uncompressed size
var fileNameLength = bstream.readNumber(2); // file name length
var extraFieldLength = bstream.readNumber(2); // extra field length
var fileCommentLength = bstream.readNumber(2); // file comment length
bstream.readNumber(2); // disk number start
bstream.readNumber(2); // internal file attributes
bstream.readNumber(4); // external file attributes
bstream.readNumber(4); // relative offset of local header
bstream.readString(fileNameLength); // file name
bstream.readString(extraFieldLength); // extra field
bstream.readString(fileCommentLength); // file comment
}
}
// digital signature
if (bstream.peekNumber(4) == zDigitalSignatureSignature) {
info(" Found a Digital Signature");
bstream.readNumber(4);
var sizeOfSignature = bstream.readNumber(2);
bstream.readString(sizeOfSignature); // digital signature data
}
// report # files and total length
if (localFiles.length > 0) {
postProgress();
}
// now do the unzipping of each file
for (var i = 0; i < localFiles.length; ++i) {
var localfile = localFiles[i];
// update progress
currentFilename = localfile.filename;
currentFileNumber = i;
currentBytesUnarchivedInFile = 0;
// actually do the unzipping
localfile.unzip();
if (localfile.fileData != null) {
postMessage(new bitjs.archive.UnarchiveExtractEvent(localfile));
postProgress();
}
}
postProgress();
postMessage(new bitjs.archive.UnarchiveFinishEvent());
}
};
// returns a table of Huffman codes
// each entry's index is its code and its value is a JavaScript object
// containing {length: 6, symbol: X}
function getHuffmanCodes(bitLengths) {
// ensure bitLengths is an array containing at least one element
if (typeof bitLengths != typeof [] || bitLengths.length < 1) {
err("Error! getHuffmanCodes() called with an invalid array");
return null;
}
// Reference: http://tools.ietf.org/html/rfc1951#page-8
var numLengths = bitLengths.length,
blCount = [],
MAX_BITS = 1;
// Step 1: count up how many codes of each length we have
for (var i = 0; i < numLengths; ++i) {
var length = bitLengths[i];
// test to ensure each bit length is a positive, non-zero number
if (typeof length != typeof 1 || length < 0) {
err("bitLengths contained an invalid number in getHuffmanCodes(): " + length + " of type " + (typeof length));
return null;
}
// increment the appropriate bitlength count
if (blCount[length] == undefined) blCount[length] = 0;
// a length of zero means this symbol is not participating in the huffman coding
if (length > 0) blCount[length]++;
if (length > MAX_BITS) MAX_BITS = length;
}
// Step 2: Find the numerical value of the smallest code for each code length
var nextCode = [],
code = 0;
for (var bits = 1; bits <= MAX_BITS; ++bits) {
var length = bits - 1;
// ensure undefined lengths are zero
if (blCount[length] == undefined) blCount[length] = 0;
code = (code + blCount[bits - 1]) << 1;
nextCode [bits] = code;
}
// Step 3: Assign numerical values to all codes
var table = {}, tableLength = 0;
for (var n = 0; n < numLengths; ++n) {
var len = bitLengths[n];
if (len != 0) {
table[nextCode [len]] = { length: len, symbol: n }; //, bitstring: binaryValueToString(nextCode [len],len) };
tableLength++;
nextCode [len]++;
}
}
table.maxLength = tableLength;
return table;
}
/*
The Huffman codes for the two alphabets are fixed, and are not
represented explicitly in the data. The Huffman code lengths
for the literal/length alphabet are:
Lit Value Bits Codes
--------- ---- -----
0 - 143 8 00110000 through
10111111
144 - 255 9 110010000 through
111111111
256 - 279 7 0000000 through
0010111
280 - 287 8 11000000 through
11000111
*/
// fixed Huffman codes go from 7-9 bits, so we need an array whose index can hold up to 9 bits
var fixedHCtoLiteral = null;
var fixedHCtoDistance = null;
function getFixedLiteralTable() {
// create once
if (!fixedHCtoLiteral) {
var bitlengths = new Array(288);
var i;
for (i = 0; i <= 143; ++i) bitlengths[i] = 8;
for (i = 144; i <= 255; ++i) bitlengths[i] = 9;
for (i = 256; i <= 279; ++i) bitlengths[i] = 7;
for (i = 280; i <= 287; ++i) bitlengths[i] = 8;
// get huffman code table
fixedHCtoLiteral = getHuffmanCodes(bitlengths);
}
return fixedHCtoLiteral;
}
function getFixedDistanceTable() {
// create once
if (!fixedHCtoDistance) {
var bitlengths = new Array(32);
for (var i = 0; i < 32; ++i) {
bitlengths[i] = 5;
}
// get huffman code table
fixedHCtoDistance = getHuffmanCodes(bitlengths);
}
return fixedHCtoDistance;
}
// extract one bit at a time until we find a matching Huffman Code
// then return that symbol
function decodeSymbol(bstream, hcTable) {
var code = 0, len = 0;
// loop until we match
for (;;) {
// read in next bit
var bit = bstream.readBits(1);
code = (code << 1) | bit;
++len;
// check against Huffman Code table and break if found
if (hcTable.hasOwnProperty(code) && hcTable[code].length == len) {
break;
}
if (len > hcTable.maxLength) {
err("Bit stream out of sync, didn't find a Huffman Code, length was " + len +
" and table only max code length of " + hcTable.maxLength);
break;
}
}
return hcTable[code].symbol;
}
var CodeLengthCodeOrder = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15];
/*
Extra Extra Extra
Code Bits Length(s) Code Bits Lengths Code Bits Length(s)
---- ---- ------ ---- ---- ------- ---- ---- -------
257 0 3 267 1 15,16 277 4 67-82
258 0 4 268 1 17,18 278 4 83-98
259 0 5 269 2 19-22 279 4 99-114
260 0 6 270 2 23-26 280 4 115-130
261 0 7 271 2 27-30 281 5 131-162
262 0 8 272 2 31-34 282 5 163-194
263 0 9 273 3 35-42 283 5 195-226
264 0 10 274 3 43-50 284 5 227-257
265 1 11,12 275 3 51-58 285 0 258
266 1 13,14 276 3 59-66
*/
var LengthLookupTable = [
[0, 3], [0, 4], [0, 5], [0, 6],
[0, 7], [0, 8], [0, 9], [0, 10],
[1, 11], [1, 13], [1, 15], [1, 17],
[2, 19], [2, 23], [2, 27], [2, 31],
[3, 35], [3, 43], [3, 51], [3, 59],
[4, 67], [4, 83], [4, 99], [4, 115],
[5, 131], [5, 163], [5, 195], [5, 227],
[0, 258]
];
/*
Extra Extra Extra
Code Bits Dist Code Bits Dist Code Bits Distance
---- ---- ---- ---- ---- ------ ---- ---- --------
0 0 1 10 4 33-48 20 9 1025-1536
1 0 2 11 4 49-64 21 9 1537-2048
2 0 3 12 5 65-96 22 10 2049-3072
3 0 4 13 5 97-128 23 10 3073-4096
4 1 5,6 14 6 129-192 24 11 4097-6144
5 1 7,8 15 6 193-256 25 11 6145-8192
6 2 9-12 16 7 257-384 26 12 8193-12288
7 2 13-16 17 7 385-512 27 12 12289-16384
8 3 17-24 18 8 513-768 28 13 16385-24576
9 3 25-32 19 8 769-1024 29 13 24577-32768
*/
var DistLookupTable = [
[0, 1], [0, 2], [0, 3], [0, 4],
[1, 5], [1, 7],
[2, 9], [2, 13],
[3, 17], [3, 25],
[4, 33], [4, 49],
[5, 65], [5, 97],
[6, 129], [6, 193],
[7, 257], [7, 385],
[8, 513], [8, 769],
[9, 1025], [9, 1537],
[10, 2049], [10, 3073],
[11, 4097], [11, 6145],
[12, 8193], [12, 12289],
[13, 16385], [13, 24577]
];
function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
/*
loop (until end of block code recognized)
decode literal/length value from input stream
if value < 256
copy value (literal byte) to output stream
otherwise
if value = end of block (256)
break from loop
otherwise (value = 257..285)
decode distance from input stream
move backwards distance bytes in the output
stream, and copy length bytes from this
position to the output stream.
*/
var blockSize = 0;
for (;;) {
var symbol = decodeSymbol(bstream, hcLiteralTable);
if (symbol < 256) {
// copy literal byte to output
buffer.insertByte(symbol);
blockSize++;
}
else {
// end of block reached
if (symbol == 256) {
break;
}
else {
var lengthLookup = LengthLookupTable[symbol - 257],
length = lengthLookup[1] + bstream.readBits(lengthLookup[0]),
distLookup = DistLookupTable[decodeSymbol(bstream, hcDistanceTable)],
distance = distLookup[1] + bstream.readBits(distLookup[0]);
// now apply length and distance appropriately and copy to output
// TODO: check that backward distance < data.length?
// http://tools.ietf.org/html/rfc1951#page-11
// "Note also that the referenced string may overlap the current
// position; for example, if the last 2 bytes decoded have values
// X and Y, a string reference with <length = 5, distance = 2>
// adds X,Y,X,Y,X to the output stream."
//
// loop for each character
var ch = buffer.ptr - distance;
blockSize += length;
if (length > distance) {
var data = buffer.data;
while (length--) {
buffer.insertByte(data[ch++]);
}
} else {
buffer.insertBytes(buffer.data.subarray(ch, ch + length));
}
} // length-distance pair
} // length-distance pair or end-of-block
} // loop until we reach end of block
return blockSize;
}
function zeroCompression(compressedData, numDecompressedBytes) {
var bstream = new bitjs.io.BitStream(compressedData.buffer,
false /* rtl */,
compressedData.byteOffset,
compressedData.byteLength);
var buffer = new bitjs.io.ByteBuffer(numDecompressedBytes);
buffer.insertBytes(bstream.readBytes(numDecompressedBytes));
return buffer.data;
}
// {Uint8Array} compressedData A Uint8Array of the compressed file data.
// compression method 8
// deflate: http://tools.ietf.org/html/rfc1951
function inflate(compressedData, numDecompressedBytes) {
// Bit stream representing the compressed data.
var bstream = new bitjs.io.BitStream(compressedData.buffer,
false /* rtl */,
compressedData.byteOffset,
compressedData.byteLength);
var buffer = new bitjs.io.ByteBuffer(numDecompressedBytes);
var numBlocks = 0, blockSize = 0;
// block format: http://tools.ietf.org/html/rfc1951#page-9
do {
var bFinal = bstream.readBits(1),
bType = bstream.readBits(2);
blockSize = 0;
++numBlocks;
// no compression
if (bType == 0) {
// skip remaining bits in this byte
while (bstream.bitPtr != 0) bstream.readBits(1);
var len = bstream.readBits(16);
bstream.readBits(16);
// TODO: check if nlen is the ones-complement of len?
if (len > 0) buffer.insertBytes(bstream.readBytes(len));
blockSize = len;
}
// fixed Huffman codes
else if(bType == 1) {
blockSize = inflateBlockData(bstream, getFixedLiteralTable(), getFixedDistanceTable(), buffer);
}
// dynamic Huffman codes
else if(bType == 2) {
var numLiteralLengthCodes = bstream.readBits(5) + 257;
var numDistanceCodes = bstream.readBits(5) + 1,
numCodeLengthCodes = bstream.readBits(4) + 4;
// populate the array of code length codes (first de-compaction)
var codeLengthsCodeLengths = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0];
for (var i = 0; i < numCodeLengthCodes; ++i) {
codeLengthsCodeLengths[ CodeLengthCodeOrder[i] ] = bstream.readBits(3);
}
// get the Huffman Codes for the code lengths
var codeLengthsCodes = getHuffmanCodes(codeLengthsCodeLengths);
// now follow this mapping
/*
0 - 15: Represent code lengths of 0 - 15
16: Copy the previous code length 3 - 6 times.
The next 2 bits indicate repeat length
(0 = 3, ... , 3 = 6)
Example: Codes 8, 16 (+2 bits 11),
16 (+2 bits 10) will expand to
12 code lengths of 8 (1 + 6 + 5)
17: Repeat a code length of 0 for 3 - 10 times.
(3 bits of length)
18: Repeat a code length of 0 for 11 - 138 times
(7 bits of length)
*/
// to generate the true code lengths of the Huffman Codes for the literal
// and distance tables together
var literalCodeLengths = [];
var prevCodeLength = 0;
while (literalCodeLengths.length < numLiteralLengthCodes + numDistanceCodes) {
var symbol = decodeSymbol(bstream, codeLengthsCodes);
if (symbol <= 15) {
literalCodeLengths.push(symbol);
prevCodeLength = symbol;
}
else if (symbol == 16) {
var repeat = bstream.readBits(2) + 3;
while (repeat--) {
literalCodeLengths.push(prevCodeLength);
}
}
else if (symbol == 17) {
var repeat1 = bstream.readBits(3) + 3;
while (repeat1--) {
literalCodeLengths.push(0);
}
} else if (symbol == 18) {
var repeat2 = bstream.readBits(7) + 11;
while (repeat2--) {
literalCodeLengths.push(0);
}
}
}
// now split the distance code lengths out of the literal code array
var distanceCodeLengths = literalCodeLengths.splice(numLiteralLengthCodes, numDistanceCodes);
// now generate the true Huffman Code tables using these code lengths
var hcLiteralTable = getHuffmanCodes(literalCodeLengths),
hcDistanceTable = getHuffmanCodes(distanceCodeLengths);
blockSize = inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer);
} else {
// error
err("Error! Encountered deflate block of type 3");
return null;
}
// update progress
currentBytesUnarchivedInFile += blockSize;
currentBytesUnarchived += blockSize;
postProgress();
} while (bFinal != 1);
// we are done reading blocks if the bFinal bit was set for this block
// return the buffer data bytes
return buffer.data;
}
// event.data.file has the ArrayBuffer.
onmessage = function(event) {
unzip(event.data.file, true);
};

@ -15,7 +15,7 @@
<script src="{{ url_for('static', filename='js/libs/jquery.min.js') }}"></script>
<script src="{{ url_for('static', filename='js/libs/screenfull.min.js') }}"></script>
<script src="{{ url_for('static', filename='js/kthoom.js') }}"></script>
<script src="{{ url_for('static', filename='js/archive.js') }}"></script>
<script src="{{ url_for('static', filename='js/archive/archive.js') }}"></script>
<script>
var updateArrows = function() {
if ($('input[name="direction"]:checked').val() === "0") {

Loading…
Cancel
Save