From e1439b529b62a9e6aeb5335730176b815d5f9c5e Mon Sep 17 00:00:00 2001 From: Ozzieisaacs Date: Mon, 4 May 2020 18:19:30 +0200 Subject: [PATCH] Config Options for limiting email size, change username to e-mail adress, use kepubify Added work on Unrar5 decompression (breaks comic reader totally) --- cps/db.py | 6 +- cps/editbooks.py | 20 +- cps/helper.py | 2 +- cps/static/js/archive/unrar.js | 20 +- cps/static/js/archive/unrar5.js | 1371 +++++++++++++++++++++++++++++++ cps/static/js/kthoom.js | 11 +- cps/templates/readcbr.html | 7 +- 7 files changed, 1411 insertions(+), 26 deletions(-) create mode 100644 cps/static/js/archive/unrar5.js diff --git a/cps/db.py b/cps/db.py index 6a2229d3..2c8b699a 100755 --- a/cps/db.py +++ b/cps/db.py @@ -26,7 +26,7 @@ from datetime import datetime from sqlalchemy import create_engine from sqlalchemy import Table, Column, ForeignKey, CheckConstraint -from sqlalchemy import String, Integer, Boolean, TIMESTAMP, Float, DateTime, REAL +from sqlalchemy import String, Integer, Boolean, TIMESTAMP, Float, DateTime from sqlalchemy.orm import relationship, sessionmaker, scoped_session from sqlalchemy.ext.declarative import declarative_base @@ -253,8 +253,8 @@ class Books(Base): sort = Column(String(collation='NOCASE')) author_sort = Column(String(collation='NOCASE')) timestamp = Column(TIMESTAMP, default=datetime.utcnow) - pubdate = Column(TIMESTAMP, default=datetime.utcnow) - series_index = Column(REAL, nullable=False, default=1.0) + pubdate = Column(String) # , default=datetime.utcnow) + series_index = Column(String, nullable=False, default="1.0") last_modified = Column(TIMESTAMP, default=datetime.utcnow) path = Column(String, default="", nullable=False) has_cover = Column(Integer, default=0) diff --git a/cps/editbooks.py b/cps/editbooks.py index 08a7d34a..3ca4e793 100644 --- a/cps/editbooks.py +++ b/cps/editbooks.py @@ -30,7 +30,6 @@ from uuid import uuid4 from flask import Blueprint, request, flash, redirect, url_for, abort, Markup, Response from flask_babel import gettext as _ from flask_login import current_user, login_required -from sqlalchemy import func from . import constants, logger, isoLanguages, gdriveutils, uploader, helper from . import config, get_locale, db, ub, worker @@ -182,6 +181,8 @@ def delete_book(book_id, book_format): if not result: flash(error, category="error") return redirect(url_for('editbook.edit_book', book_id=book_id)) + if error: + flash(error, category="warning") if not book_format: # delete book from Shelfs, Downloads, Read list ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete() @@ -689,6 +690,15 @@ def upload(): flash(_(u"Uploaded book probably exists in the library, consider to change before upload new: ") + Markup(render_title_template('book_exists_flash.html', entry=entry)), category="warning") + # handle authors + is_author = db.session.query(db.Authors).filter(db.Authors.name == authr).first() + if is_author: + db_author = is_author + authr= is_author.name + else: + db_author = db.Authors(authr, helper.get_sorted_author(authr), "") + db.session.add(db_author) + title_dir = helper.get_valid_filename(title) author_dir = helper.get_valid_filename(authr) filepath = os.path.join(config.config_calibre_dir, author_dir, title_dir) @@ -722,14 +732,6 @@ def upload(): else: has_cover = 1 - # handle authors - is_author = db.session.query(db.Authors).filter(db.Authors.name == func.binary(authr)).first() - if is_author: - db_author = is_author - else: - db_author = db.Authors(authr, helper.get_sorted_author(authr), "") - db.session.add(db_author) - # handle series db_series = None is_series = db.session.query(db.Series).filter(db.Series.name == series).first() diff --git a/cps/helper.py b/cps/helper.py index 13cdea80..95abc69c 100644 --- a/cps/helper.py +++ b/cps/helper.py @@ -315,7 +315,7 @@ def delete_book_file(book, calibrepath, book_format=None): return True, None else: log.error("Deleting book %s failed, book path not valid: %s", book.id, book.path) - return False, _("Deleting book %(id)s failed, book path not valid: %(path)s", + return True, _("Deleting book %(id)s, book path not valid: %(path)s", id=book.id, path=book.path) diff --git a/cps/static/js/archive/unrar.js b/cps/static/js/archive/unrar.js index fadb791e..3e2a45af 100644 --- a/cps/static/js/archive/unrar.js +++ b/cps/static/js/archive/unrar.js @@ -14,10 +14,10 @@ /* global VM_FIXEDGLOBALSIZE, VM_GLOBALMEMSIZE, MAXWINMASK, VM_GLOBALMEMADDR, MAXWINSIZE */ // This file expects to be invoked as a Worker (see onmessage below). -importScripts("../io/bitstream.js"); +/*importScripts("../io/bitstream.js"); importScripts("../io/bytebuffer.js"); importScripts("archive.js"); -importScripts("rarvm.js"); +importScripts("rarvm.js");*/ // Progress variables. var currentFilename = ""; @@ -29,19 +29,21 @@ var totalFilesInArchive = 0; // Helper functions. var info = function(str) { - postMessage(new bitjs.archive.UnarchiveInfoEvent(str)); + console.log(str); + // postMessage(new bitjs.archive.UnarchiveInfoEvent(str)); }; var err = function(str) { - postMessage(new bitjs.archive.UnarchiveErrorEvent(str)); + console.log(str); + // postMessage(new bitjs.archive.UnarchiveErrorEvent(str)); }; var postProgress = function() { - postMessage(new bitjs.archive.UnarchiveProgressEvent( + /*postMessage(new bitjs.archive.UnarchiveProgressEvent( currentFilename, currentFileNumber, currentBytesUnarchivedInFile, currentBytesUnarchived, totalUncompressedBytesInArchive, - totalFilesInArchive)); + totalFilesInArchive));*/ }; // shows a byte value as its hex representation @@ -1298,7 +1300,7 @@ var unrar = function(arrayBuffer) { totalUncompressedBytesInArchive = 0; totalFilesInArchive = 0; - postMessage(new bitjs.archive.UnarchiveStartEvent()); + //postMessage(new bitjs.archive.UnarchiveStartEvent()); var bstream = new bitjs.io.BitStream(arrayBuffer, false /* rtl */); var header = new RarVolumeHeader(bstream); @@ -1348,7 +1350,7 @@ var unrar = function(arrayBuffer) { localfile.unrar(); if (localfile.isValid) { - postMessage(new bitjs.archive.UnarchiveExtractEvent(localfile)); + // postMessage(new bitjs.archive.UnarchiveExtractEvent(localfile)); postProgress(); } } @@ -1358,7 +1360,7 @@ var unrar = function(arrayBuffer) { } else { err("Invalid RAR file"); } - postMessage(new bitjs.archive.UnarchiveFinishEvent()); + // postMessage(new bitjs.archive.UnarchiveFinishEvent()); }; // event.data.file has the ArrayBuffer. diff --git a/cps/static/js/archive/unrar5.js b/cps/static/js/archive/unrar5.js new file mode 100644 index 00000000..452989c0 --- /dev/null +++ b/cps/static/js/archive/unrar5.js @@ -0,0 +1,1371 @@ +/** + * unrar.js + * + * Licensed under the MIT License + * + * Copyright(c) 2011 Google Inc. + * Copyright(c) 2011 antimatter15 + * + * Reference Documentation: + * + * http://kthoom.googlecode.com/hg/docs/unrar.html + */ +/* global bitjs, importScripts, RarVM, Uint8Array, UnpackFilter */ +/* global VM_FIXEDGLOBALSIZE, VM_GLOBALMEMSIZE, MAXWINMASK, VM_GLOBALMEMADDR, MAXWINSIZE */ + +// This file expects to be invoked as a Worker (see onmessage below). +/*importScripts("../io/bitstream.js"); +importScripts("../io/bytebuffer.js"); +importScripts("archive.js"); +importScripts("rarvm.js");*/ + +// Progress variables. +var currentFilename = ""; +var currentFileNumber = 0; +var currentBytesUnarchivedInFile = 0; +var currentBytesUnarchived = 0; +var totalUncompressedBytesInArchive = 0; +var totalFilesInArchive = 0; + +// Helper functions. +var info = function(str) { + console.log(str) + //postMessage(new bitjs.archive.UnarchiveInfoEvent(str)); +}; +var err = function(str) { + console.log(str) + //postMessage(new bitjs.archive.UnarchiveErrorEvent(str)); +}; +var postProgress = function() { + /*postMessage(new bitjs.archive.UnarchiveProgressEvent( + currentFilename, + currentFileNumber, + currentBytesUnarchivedInFile, + currentBytesUnarchived, + totalUncompressedBytesInArchive, + totalFilesInArchive));*/ +}; + +// shows a byte value as its hex representation +var nibble = "0123456789ABCDEF"; +var byteValueToHexString = function(num) { + return nibble[num >> 4] + nibble[num & 0xF]; +}; +var twoByteValueToHexString = function(num) { + return nibble[(num >> 12) & 0xF] + nibble[(num >> 8) & 0xF] + nibble[(num >> 4) & 0xF] + nibble[num & 0xF]; +}; + + +// Volume Types +var MAIN_HEAD = 0x01, + ENCRYPT_HEAD = 0x04, + FILE_HEAD = 0x02, + SERVICE_HEAD = 0x03, + // COMM_HEAD = 0x75, + // AV_HEAD = 0x76, + // SUB_HEAD = 0x77, + // PROTECT_HEAD = 0x78, + // SIGN_HEAD = 0x79, + // NEWSUB_HEAD = 0x7a, + ENDARC_HEAD = 0x05; + +// ============================================================================================== // + +var RarMainVolumeHeader = function(bstream) { + var headPos = bstream.bytePtr; + // byte 1,2 + info("Rar Volume Header @" + bstream.bytePtr); + + this.crc = bstream.readBits(16); + info(" crc=" + this.crc); + + // byte 3 + this.headType = bstream.readBits(8); + info(" headType=" + this.headType); + + // Get flags + // bytes 4,5 + this.flags = {}; + this.flags.value = bstream.readBits(16); + + // byte 6 + this.headSize = bstream.readBits(8); + // byte 7 + if (bstream.readBits(8) === 1) { + info(" RarVersion=5"); + } + // byte 8 + bstream.readBits(8); +} + +var vint = function(bstream) { + var size = 0; + var result = 0; + var loop = 0 ; + do { + size = bstream.readBits(8); + result |= (size & 0x7F) << (loop * 7); + loop++; + } while (size & 0x80 ) + return result; +} + +/** + * @param {bitjs.io.BitStream} bstream + * @constructor + */ +var RarVolumeHeader = function(bstream) { + var headPos = bstream.bytePtr; + // byte 1,2 + info("Rar Volume Header @" + bstream.bytePtr); + + this.crc = bstream.readBits(32); + info(" crc=" + this.crc); + + // byte 3 + x Header size + this.headSize = vint(bstream); + info(" Header Size=" + this.headSize); + + // byte 4 + this.headType = bstream.readBits(8); + info(" headType=" + this.headType); + + // Get Header flags + this.headFlags = {}; + this.headFlags.value = bstream.peekBits(8); + + info(" Header flags=" + byteValueToHexString(this.headFlags.value)); + this.headFlags.EXTRA_AREA = !!bstream.readBits(1); + this.headFlags.DATA_AREA = !!bstream.readBits(1); + this.headFlags.UNKNOWN = !!bstream.readBits(1); + this.headFlags.CONTINUE_FROM = !!bstream.readBits(1); + this.headFlags.CONTNUE_TO = !!bstream.readBits(1); + this.headFlags.DEPENDS = !!bstream.readBits(1); + this.headFlags.CHILDBLOCK = !!bstream.readBits(1); + bstream.readBits(1); // unused*/ + + // Get extra AreaSize + if (this.headFlags.EXTRA_AREA) { + this.extraSize = vint(bstream); + } else { + this.extraSize = 0; + } + if (this.headFlags.DATA_AREA && (this.headType == FILE_HEAD || this.headType == SERVICE_HEAD)) { + this.packSize = vint(bstream); + // this.packSize = bstream.readBits(32); + } + this.flags = {}; + this.flags.value = bstream.peekBits(8); + + switch (this.headType) { + case MAIN_HEAD: + // this.flags = {}; + // this.flags.value = bstream.peekBits(16); + this.flags.MHD_VOLUME = !!bstream.readBits(1); + this.flags.MHD_VOLUMNE_NO = !!bstream.readBits(1); + this.flags.MHD_SOLID = !!bstream.readBits(1); + this.flags.MHD_RECOVERY = !!bstream.readBits(1); + this.flags.MHD_LOCKED = !!bstream.readBits(1); + bstream.readBits(3); // unused*/ + if (this.flags.MHD_VOLUMNE_NO) { + this.volumeNumber = vint(bstream); + } + bstream.readBytes(this.extraSize); + // break; + return; // Main Header finally parsed + // ------------ + case FILE_HEAD: + case SERVICE_HEAD: + this.flags.DIRECTORY = !!bstream.readBits(1); + this.flags.TIME = !!bstream.readBits(1); + this.flags.CRC = !!bstream.readBits(1); + this.flags.UNPACK_UNKNOWN = !!bstream.readBits(1); + bstream.readBits(4); + + if (this.flags.UNPACK_UNKNOWN) { + vint(bstream); + } else { + this.unpackedSize = vint(bstream); + } + this.fileAttr = vint(bstream); + if (this.flags.TIME) { + this.fileTime = bstream.readBits(32); + } + if (this.flags.CRC) { + this.fileCRC = bstream.readBits(32); + } + // var compInfo = vint(bstream); + this.unpVer = bstream.readBits(6); + this.solid = bstream.readBits(1); + bstream.readBits(1); + this.method = bstream.readBits(3); + this.dictSize = bstream.readBits(4); + bstream.readBits(1); + this.hostOS = vint(bstream); + this.nameSize = vint(bstream); + + this.filename = bstream.readBytes(this.nameSize); + var _s = ""; + for (var _i = 0; _i < this.filename.length ; _i++) { + _s += String.fromCharCode(this.filename[_i]); + } + + this.filename = _s; + bstream.readBytes(this.extraSize); + break; + + default: + info("Found a header of type 0x" + byteValueToHexString(this.headType)); + // skip the rest of the header bytes (for now) + bstream.readBytes(this.headSize - 7); + break; + } +}; + +//var BLOCK_LZ = 0; + +var rLDecode = [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224], + rLBits = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5], + rDBitLengthCounts = [4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 14, 0, 12], + rSDDecode = [0, 4, 8, 16, 32, 64, 128, 192], + rSDBits = [2, 2, 3, 4, 5, 6, 6, 6]; + +var rDDecode = [0, 1, 2, 3, 4, 6, 8, 12, 16, 24, 32, + 48, 64, 96, 128, 192, 256, 384, 512, 768, 1024, 1536, 2048, 3072, + 4096, 6144, 8192, 12288, 16384, 24576, 32768, 49152, 65536, 98304, + 131072, 196608, 262144, 327680, 393216, 458752, 524288, 589824, + 655360, 720896, 786432, 851968, 917504, 983040 +]; + +var rDBits = [0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, + 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, + 15, 15, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16 +]; + +var rLowDistRepCount = 16; + +var rNC = 299, + rDC = 60, + rLDC = 17, + rRC = 28, + rBC = 20, + rHuffTableSize = (rNC + rDC + rRC + rLDC); + +//var UnpBlockType = BLOCK_LZ; +var UnpOldTable = new Array(rHuffTableSize); + +var BD = { //bitdecode + DecodeLen: new Array(16), + DecodePos: new Array(16), + DecodeNum: new Array(rBC) +}; +var LD = { //litdecode + DecodeLen: new Array(16), + DecodePos: new Array(16), + DecodeNum: new Array(rNC) +}; +var DD = { //distdecode + DecodeLen: new Array(16), + DecodePos: new Array(16), + DecodeNum: new Array(rDC) +}; +var LDD = { //low dist decode + DecodeLen: new Array(16), + DecodePos: new Array(16), + DecodeNum: new Array(rLDC) +}; +var RD = { //rep decode + DecodeLen: new Array(16), + DecodePos: new Array(16), + DecodeNum: new Array(rRC) +}; + +/** + * @type {Array} + */ +var rOldBuffers = []; + +/** + * The current buffer we are unpacking to. + * @type {bitjs.io.ByteBuffer} + */ +var rBuffer; + +/** + * The buffer of the final bytes after filtering (only used in Unpack29). + * @type {bitjs.io.ByteBuffer} + */ +var wBuffer; + +var lowDistRepCount = 0; +var prevLowDist = 0; + +var rOldDist = [0, 0, 0, 0]; +var lastDist; +var lastLength; + +/** + * In unpack.cpp, UnpPtr keeps track of what bytes have been unpacked + * into the Window buffer and WrPtr keeps track of what bytes have been + * actually written to disk after the unpacking and optional filtering + * has been done. + * + * In our case, rBuffer is the buffer for the unpacked bytes and wBuffer is + * the final output bytes. + */ + + +/** + * Read in Huffman tables for RAR + * @param {bitjs.io.BitStream} bstream + */ +function rarReadTables(bstream) { + var BitLength = new Array(rBC); + var Table = new Array(rHuffTableSize); + var i; + // before we start anything we need to get byte-aligned + bstream.readBits((8 - bstream.bitPtr) & 0x7); + + if (bstream.readBits(1)) { + info("Error! PPM not implemented yet"); + return; + } + + if (!bstream.readBits(1)) { //discard old table + for (i = UnpOldTable.length; i--;) { + UnpOldTable[i] = 0; + } + } + + // read in bit lengths + for (var I = 0; I < rBC; ++I) { + var Length = bstream.readBits(4); + if (Length === 15) { + var ZeroCount = bstream.readBits(4); + if (ZeroCount === 0) { + BitLength[I] = 15; + } else { + ZeroCount += 2; + while (ZeroCount-- > 0 && I < rBC) { + BitLength[I++] = 0; + } + --I; + } + } else { + BitLength[I] = Length; + } + } + + // now all 20 bit lengths are obtained, we construct the Huffman Table: + + rarMakeDecodeTables(BitLength, 0, BD, rBC); + + var TableSize = rHuffTableSize; + //console.log(DecodeLen, DecodePos, DecodeNum); + for (i = 0; i < TableSize;) { + var N; + var num = rarDecodeNumber(bstream, BD); + if (num < 16) { + Table[i] = (num + UnpOldTable[i]) & 0xf; + i++; + } else if (num < 18) { + N = (num === 16) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11); + + while (N-- > 0 && i < TableSize) { + Table[i] = Table[i - 1]; + i++; + } + } else { + N = (num === 18) ? (bstream.readBits(3) + 3) : (bstream.readBits(7) + 11); + + while (N-- > 0 && i < TableSize) { + Table[i++] = 0; + } + } + } + + rarMakeDecodeTables(Table, 0, LD, rNC); + rarMakeDecodeTables(Table, rNC, DD, rDC); + rarMakeDecodeTables(Table, rNC + rDC, LDD, rLDC); + rarMakeDecodeTables(Table, rNC + rDC + rLDC, RD, rRC); + + for (i = UnpOldTable.length; i--;) { + UnpOldTable[i] = Table[i]; + } + return true; +} + + +function rarDecodeNumber(bstream, dec) { + var DecodeLen = dec.DecodeLen, + DecodePos = dec.DecodePos, + DecodeNum = dec.DecodeNum; + var bitField = bstream.getBits() & 0xfffe; + //some sort of rolled out binary search + var bits = ((bitField < DecodeLen[8]) ? + ((bitField < DecodeLen[4]) ? + ((bitField < DecodeLen[2]) ? + ((bitField < DecodeLen[1]) ? 1 : 2) : + ((bitField < DecodeLen[3]) ? 3 : 4)) : + (bitField < DecodeLen[6]) ? + ((bitField < DecodeLen[5]) ? 5 : 6) : + ((bitField < DecodeLen[7]) ? 7 : 8)) : + ((bitField < DecodeLen[12]) ? + ((bitField < DecodeLen[10]) ? + ((bitField < DecodeLen[9]) ? 9 : 10) : + ((bitField < DecodeLen[11]) ? 11 : 12)) : + (bitField < DecodeLen[14]) ? + ((bitField < DecodeLen[13]) ? 13 : 14) : + 15)); + bstream.readBits(bits); + var N = DecodePos[bits] + ((bitField - DecodeLen[bits - 1]) >>> (16 - bits)); + + return DecodeNum[N]; +} + + +function rarMakeDecodeTables(BitLength, offset, dec, size) { + var DecodeLen = dec.DecodeLen; + var DecodePos = dec.DecodePos; + var DecodeNum = dec.DecodeNum; + var LenCount = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; + var TmpPos = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; + var N = 0; + var M = 0; + var i; + for (i = DecodeNum.length; i--;) { + DecodeNum[i] = 0; + } + for (i = 0; i < size; i++) { + LenCount[BitLength[i + offset] & 0xF]++; + } + LenCount[0] = 0; + TmpPos[0] = 0; + DecodePos[0] = 0; + DecodeLen[0] = 0; + + var I; + for (I = 1; I < 16; ++I) { + N = 2 * (N + LenCount[I]); + M = (N << (15 - I)); + if (M > 0xFFFF) { + M = 0xFFFF; + } + DecodeLen[I] = M; + DecodePos[I] = DecodePos[I - 1] + LenCount[I - 1]; + TmpPos[I] = DecodePos[I]; + } + for (I = 0; I < size; ++I) { + if (BitLength[I + offset] !== 0) { + DecodeNum[TmpPos[BitLength[offset + I] & 0xF]++] = I; + } + } + +} + +// TODO: implement +/** + * @param {bitjs.io.BitStream} bstream + * @param {boolean} Solid + */ +function unpack15() { //bstream, Solid) { + info("ERROR! RAR 1.5 compression not supported"); +} + +/** + * Unpacks the bit stream into rBuffer using the Unpack20 algorithm. + * @param {bitjs.io.BitStream} bstream + * @param {boolean} Solid + */ +function unpack20(bstream) { //, Solid) { + var destUnpSize = rBuffer.data.length; + var oldDistPtr = 0; + var Length; + var Distance; + rarReadTables20(bstream); + while (destUnpSize > rBuffer.ptr) { + var num = rarDecodeNumber(bstream, LD); + var Bits; + if (num < 256) { + rBuffer.insertByte(num); + continue; + } + if (num > 269) { + Length = rLDecode[num -= 270] + 3; + if ((Bits = rLBits[num]) > 0) { + Length += bstream.readBits(Bits); + } + var DistNumber = rarDecodeNumber(bstream, DD); + Distance = rDDecode[DistNumber] + 1; + if ((Bits = rDBits[DistNumber]) > 0) { + Distance += bstream.readBits(Bits); + } + if (Distance >= 0x2000) { + Length++; + if (Distance >= 0x40000) { + Length++; + } + } + lastLength = Length; + lastDist = rOldDist[oldDistPtr++ & 3] = Distance; + rarCopyString(Length, Distance); + continue; + } + if (num === 269) { + rarReadTables20(bstream); + rarUpdateProgress(); + continue; + } + if (num === 256) { + lastDist = rOldDist[oldDistPtr++ & 3] = lastDist; + rarCopyString(lastLength, lastDist); + continue; + } + if (num < 261) { + Distance = rOldDist[(oldDistPtr - (num - 256)) & 3]; + var LengthNumber = rarDecodeNumber(bstream, RD); + Length = rLDecode[LengthNumber] + 2; + if ((Bits = rLBits[LengthNumber]) > 0) { + Length += bstream.readBits(Bits); + } + if (Distance >= 0x101) { + Length++; + if (Distance >= 0x2000) { + Length++; + if (Distance >= 0x40000) { + Length++; + } + } + } + lastLength = Length; + lastDist = rOldDist[oldDistPtr++ & 3] = Distance; + rarCopyString(Length, Distance); + continue; + } + if (num < 270) { + Distance = rSDDecode[num -= 261] + 1; + if ((Bits = rSDBits[num]) > 0) { + Distance += bstream.readBits(Bits); + } + lastLength = 2; + lastDist = rOldDist[oldDistPtr++ & 3] = Distance; + rarCopyString(2, Distance); + continue; + } + } + rarUpdateProgress(); +} + +function rarUpdateProgress() { + var change = rBuffer.ptr - currentBytesUnarchivedInFile; + currentBytesUnarchivedInFile = rBuffer.ptr; + currentBytesUnarchived += change; + postProgress(); +} + +var rNC20 = 298, + rDC20 = 48, + rRC20 = 28, + rBC20 = 19, + rMC20 = 257; + +var UnpOldTable20 = new Array(rMC20 * 4); + +function rarReadTables20(bstream) { + var BitLength = new Array(rBC20); + var Table = new Array(rMC20 * 4); + var TableSize, N, I; + var i; + bstream.readBits(1); + if (!bstream.readBits(1)) { + for (i = UnpOldTable20.length; i--;) { + UnpOldTable20[i] = 0; + } + } + TableSize = rNC20 + rDC20 + rRC20; + for (I = 0; I < rBC20; I++) { + BitLength[I] = bstream.readBits(4); + } + rarMakeDecodeTables(BitLength, 0, BD, rBC20); + I = 0; + while (I < TableSize) { + var num = rarDecodeNumber(bstream, BD); + if (num < 16) { + Table[I] = num + UnpOldTable20[I] & 0xf; + I++; + } else if (num === 16) { + N = bstream.readBits(2) + 3; + while (N-- > 0 && I < TableSize) { + Table[I] = Table[I - 1]; + I++; + } + } else { + if (num === 17) { + N = bstream.readBits(3) + 3; + } else { + N = bstream.readBits(7) + 11; + } + while (N-- > 0 && I < TableSize) { + Table[I++] = 0; + } + } + } + rarMakeDecodeTables(Table, 0, LD, rNC20); + rarMakeDecodeTables(Table, rNC20, DD, rDC20); + rarMakeDecodeTables(Table, rNC20 + rDC20, RD, rRC20); + for (i = UnpOldTable20.length; i--;) { + UnpOldTable20[i] = Table[i]; + } +} + +// ============================================================================================== // + +// Unpack code specific to RarVM +var VM = new RarVM(); + +/** + * Filters code, one entry per filter. + * @type {Array} + */ +var Filters = []; + +/** + * Filters stack, several entrances of same filter are possible. + * @type {Array} + */ +var PrgStack = []; + +/** + * Lengths of preceding blocks, one length per filter. Used to reduce + * size required to write block length if lengths are repeating. + * @type {Array} + */ +var OldFilterLengths = []; + +var LastFilter = 0; + +function initFilters() { + OldFilterLengths = []; + LastFilter = 0; + Filters = []; + PrgStack = []; +} + + +/** + * @param {number} firstByte The first byte (flags). + * @param {Uint8Array} vmCode An array of bytes. + */ +function rarAddVMCode(firstByte, vmCode) { + VM.init(); + var i; + var bstream = new bitjs.io.BitStream(vmCode.buffer, true /* rtl */ ); + + var filtPos; + if (firstByte & 0x80) { + filtPos = RarVM.readData(bstream); + if (filtPos === 0) { + initFilters(); + } else { + filtPos--; + } + } else { + filtPos = LastFilter; + } + + if (filtPos > Filters.length || filtPos > OldFilterLengths.length) { + return false; + } + + LastFilter = filtPos; + var newFilter = (filtPos === Filters.length); + + // new filter for PrgStack + var stackFilter = new UnpackFilter(); + var filter = null; + // new filter code, never used before since VM reset + if (newFilter) { + // too many different filters, corrupt archive + if (filtPos > 1024) { + return false; + } + + filter = new UnpackFilter(); + Filters.push(filter); + stackFilter.ParentFilter = (Filters.length - 1); + OldFilterLengths.push(0); // OldFilterLengths.Add(1) + filter.ExecCount = 0; + } else { // filter was used in the past + filter = Filters[filtPos]; + stackFilter.ParentFilter = filtPos; + filter.ExecCount++; + } + + var emptyCount = 0; + for (i = 0; i < PrgStack.length; ++i) { + PrgStack[i - emptyCount] = PrgStack[i]; + + if (PrgStack[i] === null) { + emptyCount++; + } + if (emptyCount > 0) { + PrgStack[i] = null; + } + } + + if (emptyCount === 0) { + PrgStack.push(null); //PrgStack.Add(1); + emptyCount = 1; + } + + var stackPos = PrgStack.length - emptyCount; + PrgStack[stackPos] = stackFilter; + stackFilter.ExecCount = filter.ExecCount; + + var blockStart = RarVM.readData(bstream); + if (firstByte & 0x40) { + blockStart += 258; + } + stackFilter.BlockStart = (blockStart + rBuffer.ptr) & MAXWINMASK; + + if (firstByte & 0x20) { + stackFilter.BlockLength = RarVM.readData(bstream); + } else { + stackFilter.BlockLength = filtPos < OldFilterLengths.length ? + OldFilterLengths[filtPos] : + 0; + } + stackFilter.NextWindow = (wBuffer.ptr !== rBuffer.ptr) && + (((wBuffer.ptr - rBuffer.ptr) & MAXWINMASK) <= blockStart); + + OldFilterLengths[filtPos] = stackFilter.BlockLength; + + for (i = 0; i < 7; ++i) { + stackFilter.Prg.InitR[i] = 0; + } + stackFilter.Prg.InitR[3] = VM_GLOBALMEMADDR; + stackFilter.Prg.InitR[4] = stackFilter.BlockLength; + stackFilter.Prg.InitR[5] = stackFilter.ExecCount; + + // set registers to optional parameters if any + if (firstByte & 0x10) { + var initMask = bstream.readBits(7); + for (i = 0; i < 7; ++i) { + if (initMask & (1 << i)) { + stackFilter.Prg.InitR[i] = RarVM.readData(bstream); + } + } + } + + if (newFilter) { + var vmCodeSize = RarVM.readData(bstream); + if (vmCodeSize >= 0x10000 || vmCodeSize === 0) { + return false; + } + vmCode = new Uint8Array(vmCodeSize); + for (i = 0; i < vmCodeSize; ++i) { + //if (Inp.Overflow(3)) + // return(false); + vmCode[i] = bstream.readBits(8); + } + VM.prepare(vmCode, filter.Prg); + } + stackFilter.Prg.Cmd = filter.Prg.Cmd; + stackFilter.Prg.AltCmd = filter.Prg.Cmd; + + var staticDataSize = filter.Prg.StaticData.length; + if (staticDataSize > 0 && staticDataSize < VM_GLOBALMEMSIZE) { + // read statically defined data contained in DB commands + for (i = 0; i < staticDataSize; ++i) { + stackFilter.Prg.StaticData[i] = filter.Prg.StaticData[i]; + } + } + + if (stackFilter.Prg.GlobalData.length < VM_FIXEDGLOBALSIZE) { + stackFilter.Prg.GlobalData = new Uint8Array(VM_FIXEDGLOBALSIZE); + } + + var globalData = stackFilter.Prg.GlobalData; + for (i = 0; i < 7; ++i) { + VM.setLowEndianValue(globalData, stackFilter.Prg.InitR[i], i * 4); + } + + VM.setLowEndianValue(globalData, stackFilter.BlockLength, 0x1c); + VM.setLowEndianValue(globalData, 0, 0x20); + VM.setLowEndianValue(globalData, stackFilter.ExecCount, 0x2c); + for (i = 0; i < 16; ++i) { + globalData[0x30 + i] = 0; + } + + // put data block passed as parameter if any + if (firstByte & 8) { + //if (Inp.Overflow(3)) + // return(false); + var dataSize = RarVM.readData(bstream); + if (dataSize > (VM_GLOBALMEMSIZE - VM_FIXEDGLOBALSIZE)) { + return (false); + } + + var curSize = stackFilter.Prg.GlobalData.length; + if (curSize < dataSize + VM_FIXEDGLOBALSIZE) { + // Resize global data and update the stackFilter and local variable. + var numBytesToAdd = dataSize + VM_FIXEDGLOBALSIZE - curSize; + var newGlobalData = new Uint8Array(globalData.length + numBytesToAdd); + newGlobalData.set(globalData); + + stackFilter.Prg.GlobalData = newGlobalData; + globalData = newGlobalData; + } + //byte *GlobalData=&StackFilter->Prg.GlobalData[VM_FIXEDGLOBALSIZE]; + for (i = 0; i < dataSize; ++i) { + //if (Inp.Overflow(3)) + // return(false); + globalData[VM_FIXEDGLOBALSIZE + i] = bstream.readBits(8); + } + } + + return true; +} + + +/** + * @param {!bitjs.io.BitStream} bstream + */ +function rarReadVMCode(bstream) { + var firstByte = bstream.readBits(8); + var length = (firstByte & 7) + 1; + if (length === 7) { + length = bstream.readBits(8) + 7; + } else if (length === 8) { + length = bstream.readBits(16); + } + + // Read all bytes of VM code into an array. + var vmCode = new Uint8Array(length); + for (var i = 0; i < length; i++) { + // Do something here with checking readbuf. + vmCode[i] = bstream.readBits(8); + } + return rarAddVMCode(firstByte, vmCode); +} + +/** + * Unpacks the bit stream into rBuffer using the Unpack29 algorithm. + * @param {bitjs.io.BitStream} bstream + * @param {boolean} Solid + */ +function unpack29(bstream) { + // lazy initialize rDDecode and rDBits + + var DDecode = new Array(rDC); + var DBits = new Array(rDC); + var Distance = 0; + var Length = 0; + var Dist = 0, BitLength = 0, Slot = 0; + var I; + for (I = 0; I < rDBitLengthCounts.length; I++, BitLength++) { + for (var J = 0; J < rDBitLengthCounts[I]; J++, Slot++, Dist += (1 << BitLength)) { + DDecode[Slot] = Dist; + DBits[Slot] = BitLength; + } + } + + var Bits; + //tablesRead = false; + + rOldDist = [0, 0, 0, 0]; + + lastDist = 0; + lastLength = 0; + var i; + for (i = UnpOldTable.length; i--;) { + UnpOldTable[i] = 0; + } + + // read in Huffman tables + rarReadTables(bstream); + + while (true) { + var num = rarDecodeNumber(bstream, LD); + + if (num < 256) { + rBuffer.insertByte(num); + continue; + } + if (num >= 271) { + Length = rLDecode[num -= 271] + 3; + if ((Bits = rLBits[num]) > 0) { + Length += bstream.readBits(Bits); + } + var DistNumber = rarDecodeNumber(bstream, DD); + Distance = DDecode[DistNumber] + 1; + if ((Bits = DBits[DistNumber]) > 0) { + if (DistNumber > 9) { + if (Bits > 4) { + Distance += ((bstream.getBits() >>> (20 - Bits)) << 4); + bstream.readBits(Bits - 4); + //todo: check this + } + if (lowDistRepCount > 0) { + lowDistRepCount--; + Distance += prevLowDist; + } else { + var LowDist = rarDecodeNumber(bstream, LDD); + if (LowDist === 16) { + lowDistRepCount = rLowDistRepCount - 1; + Distance += prevLowDist; + } else { + Distance += LowDist; + prevLowDist = LowDist; + } + } + } else { + Distance += bstream.readBits(Bits); + } + } + if (Distance >= 0x2000) { + Length++; + if (Distance >= 0x40000) { + Length++; + } + } + rarInsertOldDist(Distance); + rarInsertLastMatch(Length, Distance); + rarCopyString(Length, Distance); + continue; + } + if (num === 256) { + if (!rarReadEndOfBlock(bstream)) { + break; + } + continue; + } + if (num === 257) { + if (!rarReadVMCode(bstream)) { + break; + } + continue; + } + if (num === 258) { + if (lastLength !== 0) { + rarCopyString(lastLength, lastDist); + } + continue; + } + if (num < 263) { + var DistNum = num - 259; + Distance = rOldDist[DistNum]; + + for (var I2 = DistNum; I2 > 0; I2--) { + rOldDist[I2] = rOldDist[I2 - 1]; + } + rOldDist[0] = Distance; + + var LengthNumber = rarDecodeNumber(bstream, RD); + Length = rLDecode[LengthNumber] + 2; + if ((Bits = rLBits[LengthNumber]) > 0) { + Length += bstream.readBits(Bits); + } + rarInsertLastMatch(Length, Distance); + rarCopyString(Length, Distance); + continue; + } + if (num < 272) { + Distance = rSDDecode[num -= 263] + 1; + if ((Bits = rSDBits[num]) > 0) { + Distance += bstream.readBits(Bits); + } + rarInsertOldDist(Distance); + rarInsertLastMatch(2, Distance); + rarCopyString(2, Distance); + continue; + } + } // while (true) + rarUpdateProgress(); + rarWriteBuf(); +} + +/** + * Does stuff to the current byte buffer (rBuffer) based on + * the filters loaded into the RarVM and writes out to wBuffer. + */ +function rarWriteBuf() { + var writeSize = (rBuffer.ptr & MAXWINMASK); + var j; + var flt; + for (var i = 0; i < PrgStack.length; ++i) { + flt = PrgStack[i]; + if (flt === null) { + continue; + } + + if (flt.NextWindow) { + flt.NextWindow = false; + continue; + } + + var blockStart = flt.BlockStart; + var blockLength = flt.BlockLength; + var parentPrg; + + // WrittenBorder = wBuffer.ptr + if (((blockStart - wBuffer.ptr) & MAXWINMASK) < writeSize) { + if (wBuffer.ptr !== blockStart) { + // Copy blockStart bytes from rBuffer into wBuffer. + rarWriteArea(wBuffer.ptr, blockStart); + writeSize = (rBuffer.ptr - wBuffer.ptr) & MAXWINMASK; + } + if (blockLength <= writeSize) { + var blockEnd = (blockStart + blockLength) & MAXWINMASK; + if (blockStart < blockEnd || blockEnd === 0) { + VM.setMemory(0, rBuffer.data.subarray(blockStart, blockStart + blockLength), blockLength); + } else { + var firstPartLength = MAXWINSIZE - blockStart; + VM.setMemory(0, rBuffer.data.subarray(blockStart, blockStart + firstPartLength), firstPartLength); + VM.setMemory(firstPartLength, rBuffer.data, blockEnd); + } + + parentPrg = Filters[flt.ParentFilter].Prg; + var prg = flt.Prg; + + if (parentPrg.GlobalData.length > VM_FIXEDGLOBALSIZE) { + // Copy global data from previous script execution if any. + prg.GlobalData = new Uint8Array(parentPrg.GlobalData); + } + + rarExecuteCode(prg); + var globalDataLen; + + if (prg.GlobalData.length > VM_FIXEDGLOBALSIZE) { + // Save global data for next script execution. + globalDataLen = prg.GlobalData.length; + if (parentPrg.GlobalData.length < globalDataLen) { + parentPrg.GlobalData = new Uint8Array(globalDataLen); + } + parentPrg.GlobalData.set( + this.mem_.subarray(VM_FIXEDGLOBALSIZE, VM_FIXEDGLOBALSIZE + globalDataLen), + VM_FIXEDGLOBALSIZE); + } else { + parentPrg.GlobalData = new Uint8Array(0); + } + + var filteredData = prg.FilteredData; + + PrgStack[i] = null; + while (i + 1 < PrgStack.length) { + var nextFilter = PrgStack[i + 1]; + if (nextFilter === null || nextFilter.BlockStart !== blockStart || + nextFilter.BlockLength !== filteredData.length || nextFilter.NextWindow) { + break; + } + + // Apply several filters to same data block. + + VM.setMemory(0, filteredData, filteredData.length); + + parentPrg = Filters[nextFilter.ParentFilter].Prg; + var nextPrg = nextFilter.Prg; + + globalDataLen = parentPrg.GlobalData.length; + if (globalDataLen > VM_FIXEDGLOBALSIZE) { + // Copy global data from previous script execution if any. + nextPrg.GlobalData = new Uint8Array(globalDataLen); + nextPrg.GlobalData.set(parentPrg.GlobalData.subarray(VM_FIXEDGLOBALSIZE, VM_FIXEDGLOBALSIZE + globalDataLen), VM_FIXEDGLOBALSIZE); + } + + rarExecuteCode(nextPrg); + + if (nextPrg.GlobalData.length > VM_GLOBALMEMSIZE) { + // Save global data for next script execution. + globalDataLen = nextPrg.GlobalData.length; + if (parentPrg.GlobalData.length < globalDataLen) { + parentPrg.GlobalData = new Uint8Array(globalDataLen); + } + parentPrg.GlobalData.set( + this.mem_.subarray(VM_FIXEDGLOBALSIZE, VM_FIXEDGLOBALSIZE + globalDataLen), + VM_FIXEDGLOBALSIZE); + } else { + parentPrg.GlobalData = new Uint8Array(0); + } + + filteredData = nextPrg.FilteredData; + i++; + PrgStack[i] = null; + } // while (i + 1 < PrgStack.length) + + for (j = 0; j < filteredData.length; ++j) { + wBuffer.insertByte(filteredData[j]); + } + writeSize = (rBuffer.ptr - wBuffer.ptr) & MAXWINMASK; + } else { // if (blockLength <= writeSize) + for (j = i; j < PrgStack.length; ++j) { + flt = PrgStack[j]; + if (flt !== null && flt.NextWindow) { + flt.NextWindow = false; + } + } + //WrPtr=WrittenBorder; + return; + } + } // if (((blockStart - wBuffer.ptr) & MAXWINMASK) < writeSize) + } // for (var i = 0; i < PrgStack.length; ++i) + + // Write any remaining bytes from rBuffer to wBuffer; + rarWriteArea(wBuffer.ptr, rBuffer.ptr); + + // Now that the filtered buffer has been written, swap it back to rBuffer. + rBuffer = wBuffer; +} + +/** + * Copy bytes from rBuffer to wBuffer. + * @param {number} startPtr The starting point to copy from rBuffer. + * @param {number} endPtr The ending point to copy from rBuffer. + */ +function rarWriteArea(startPtr, endPtr) { + if (endPtr < startPtr) { + console.error("endPtr < startPtr, endPtr=" + endPtr + ", startPtr=" + startPtr); + // rarWriteData(startPtr, -(int)StartPtr & MAXWINMASK); + // RarWriteData(0, endPtr); + return; + } else if (startPtr < endPtr) { + rarWriteData(startPtr, endPtr - startPtr); + } +} + +/** + * Writes bytes into wBuffer from rBuffer. + * @param {number} offset The starting point to copy bytes from rBuffer. + * @param {number} numBytes The number of bytes to copy. + */ +function rarWriteData(offset, numBytes) { + if (wBuffer.ptr >= rBuffer.data.length) { + return; + } + var leftToWrite = rBuffer.data.length - wBuffer.ptr; + if (numBytes > leftToWrite) { + numBytes = leftToWrite; + } + for (var i = 0; i < numBytes; ++i) { + wBuffer.insertByte(rBuffer.data[offset + i]); + } +} + +/** + * @param {VM_PreparedProgram} prg + */ +function rarExecuteCode(prg) { + if (prg.GlobalData.length > 0) { + var writtenFileSize = wBuffer.ptr; + prg.InitR[6] = writtenFileSize; + VM.setLowEndianValue(prg.GlobalData, writtenFileSize, 0x24); + VM.setLowEndianValue(prg.GlobalData, (writtenFileSize >>> 32) >> 0, 0x28); + VM.execute(prg); + } +} + +function rarReadEndOfBlock(bstream) { + rarUpdateProgress(); + + var NewTable = false, + NewFile = false; + if (bstream.readBits(1)) { + NewTable = true; + } else { + NewFile = true; + NewTable = !!bstream.readBits(1); + } + //tablesRead = !NewTable; + return !(NewFile || (NewTable && !rarReadTables(bstream))); +} + +function rarInsertLastMatch(length, distance) { + lastDist = distance; + lastLength = length; +} + +function rarInsertOldDist(distance) { + rOldDist.splice(3, 1); + rOldDist.splice(0, 0, distance); +} + +/** + * Copies len bytes from distance bytes ago in the buffer to the end of the + * current byte buffer. + * @param {number} length How many bytes to copy. + * @param {number} distance How far back in the buffer from the current write + * pointer to start copying from. + */ +function rarCopyString(length, distance) { + var srcPtr = rBuffer.ptr - distance; + if (srcPtr < 0) { + var l = rOldBuffers.length; + while (srcPtr < 0) { + srcPtr = rOldBuffers[--l].data.length + srcPtr; + } + // TODO: lets hope that it never needs to read beyond file boundaries + while (length--) { + rBuffer.insertByte(rOldBuffers[l].data[srcPtr++]); + } + } + if (length > distance) { + while (length--) { + rBuffer.insertByte(rBuffer.data[srcPtr++]); + } + } else { + rBuffer.insertBytes(rBuffer.data.subarray(srcPtr, srcPtr + length)); + } +} + +/** + * @param {RarLocalFile} v + */ +function unpack(v) { + // TODO: implement what happens when unpVer is < 15 + var Ver = v.header.unpVer <= 15 ? 15 : v.header.unpVer; + // var Solid = v.header.LHD_SOLID; + var bstream = new bitjs.io.BitStream(v.fileData.buffer, true /* rtl */, v.fileData.byteOffset, v.fileData.byteLength); + + rBuffer = new bitjs.io.ByteBuffer(v.header.unpackedSize); + + info("Unpacking " + v.filename + " RAR v" + Ver); + + switch (Ver) { + case 15: // rar 1.5 compression + unpack15(); //(bstream, Solid); + break; + case 20: // rar 2.x compression + case 26: // files larger than 2GB + unpack20(bstream); //, Solid); + break; + case 29: // rar 3.x compression + case 36: // alternative hash + wBuffer = new bitjs.io.ByteBuffer(rBuffer.data.length); + unpack29(bstream); + break; + } // switch(method) + + rOldBuffers.push(rBuffer); + // TODO: clear these old buffers when there's over 4MB of history + return rBuffer.data; +} + +// bstream is a bit stream +var RarLocalFile = function(bstream) { + this.header = new RarVolumeHeader(bstream); + this.filename = this.header.filename; + + if (this.header.headType !== FILE_HEAD && this.header.headType !== ENDARC_HEAD && this.header.headType !== SERVICE_HEAD) { + this.isValid = false; + info("Error! RAR Volume did not include a FILE_HEAD header "); + } else { + // read in the compressed data + this.fileData = null; + if (this.header.packSize > 0) { + this.fileData = bstream.readBytes(this.header.packSize); + if (this.header.headType === FILE_HEAD) { + this.isValid = true; + } + } + } +}; + +RarLocalFile.prototype.unrar5 = function() { + //if (!this.header.flags.LHD_SPLIT_BEFORE) { + // unstore file + // No compression + if (this.header.method === 0x00) { + info("Unstore " + this.filename); + this.isValid = true; + + currentBytesUnarchivedInFile += this.fileData.length; + currentBytesUnarchived += this.fileData.length; + + // Create a new buffer and copy it over. + var len = this.header.packSize; + var newBuffer = new bitjs.io.ByteBuffer(len); + newBuffer.insertBytes(this.fileData); + this.fileData = newBuffer.data; + } else { + this.isValid = true; + this.fileData = unpack(this); + } + //} +}; + +var unrar5 = function(arrayBuffer) { + currentFilename = ""; + currentFileNumber = 0; + currentBytesUnarchivedInFile = 0; + currentBytesUnarchived = 0; + totalUncompressedBytesInArchive = 0; + totalFilesInArchive = 0; + + // postMessage(new bitjs.archive.UnarchiveStartEvent()); + var bstream = new bitjs.io.BitStream(arrayBuffer, false /* rtl */); + + var header = new RarMainVolumeHeader(bstream); + if (header.crc === 0x6152 && + header.headType === 0x72 && + header.flags.value === 0x1A21 && + header.headSize === 7) { + info("Found RAR signature"); + + var mhead = new RarVolumeHeader(bstream); + if (mhead.headType !== MAIN_HEAD) { + info("Error! RAR did not include a MAIN_HEAD header"); + } else { + var localFiles = []; + var localFile = null; + do { + try { + localFile = new RarLocalFile(bstream); + info("RAR localFile isValid=" + localFile.isValid + ", volume packSize=" + localFile.header.packSize); + if (localFile && localFile.isValid && localFile.header.packSize > 0) { + totalUncompressedBytesInArchive += localFile.header.unpackedSize; + localFiles.push(localFile); + } else if (localFile.header.packSize === 0 && localFile.header.unpackedSize === 0) { + localFile.isValid = true; + } + } catch (err) { + break; + } + //info("bstream" + bstream.bytePtr+"/"+bstream.bytes.length); + } while (localFile.isValid); + totalFilesInArchive = localFiles.length; + + // now we have all information but things are unpacked + localFiles.sort(alphanumCase); + + info(localFiles.map(function(a) { + return a.filename; + }).join(", ")); + for (var i = 0; i < localFiles.length; ++i) { + var localfile = localFiles[i]; + + // update progress + currentFilename = localfile.header.filename; + currentBytesUnarchivedInFile = 0; + + // actually do the unzipping + localfile.unrar5(); + + if (localfile.isValid) { + postMessage(new bitjs.archive.UnarchiveExtractEvent(localfile)); + postProgress(); + } + } + + postProgress(); + } + } else { + err("Invalid RAR file"); + } + // postMessage(new bitjs.archive.UnarchiveFinishEvent()); +}; + +// event.data.file has the ArrayBuffer. +onmessage = function(event) { + var ab = event.data.file; + unrar5(ab, true); +}; diff --git a/cps/static/js/kthoom.js b/cps/static/js/kthoom.js index 33a2ac0e..bbb3fead 100644 --- a/cps/static/js/kthoom.js +++ b/cps/static/js/kthoom.js @@ -162,10 +162,15 @@ function initProgressClick() { function loadFromArrayBuffer(ab) { var start = (new Date).getTime(); var h = new Uint8Array(ab, 0, 10); + unrar5(ab); var pathToBitJS = "../../static/js/archive/"; var lastCompletion = 0; - if (h[0] === 0x52 && h[1] === 0x61 && h[2] === 0x72 && h[3] === 0x21) { //Rar! - unarchiver = new bitjs.archive.Unrarrer(ab, pathToBitJS); + /*if (h[0] === 0x52 && h[1] === 0x61 && h[2] === 0x72 && h[3] === 0x21) { //Rar! + if (h[7] === 0x01) { + unarchiver = new bitjs.archive.Unrarrer(ab, pathToBitJS); + } else { + unarchiver = new bitjs.archive.Unrarrer5(ab, pathToBitJS); + } } else if (h[0] === 80 && h[1] === 75) { //PK (Zip) unarchiver = new bitjs.archive.Unzipper(ab, pathToBitJS); } else if (h[0] === 255 && h[1] === 216) { // JPEG @@ -229,7 +234,7 @@ function loadFromArrayBuffer(ab) { unarchiver.start(); } else { alert("Some error"); - } + }*/ } function scrollTocToActive() { diff --git a/cps/templates/readcbr.html b/cps/templates/readcbr.html index 14b9752c..29508fbf 100644 --- a/cps/templates/readcbr.html +++ b/cps/templates/readcbr.html @@ -14,8 +14,13 @@ + + + + + + -