diff --git a/cps/admin.py b/cps/admin.py index cc8a163e..4cf57f2d 100644 --- a/cps/admin.py +++ b/cps/admin.py @@ -676,7 +676,7 @@ def edit_user(user_id): elif "edit_shelf_role" not in to_save and content.role_edit_shelfs(): content.role = content.role - ub.ROLE_EDIT_SHELFS - val = [int(k[5:]) for k, v in to_save.items() if k.startswith('show')] + val = [int(k[5:]) for k, __ in to_save.items() if k.startswith('show')] sidebar = ub.get_sidebar_config() for element in sidebar: if element['visibility'] in val and not content.check_visibility(element['visibility']): diff --git a/cps/helper.py b/cps/helper.py index 63e6dc9d..9eb15ecd 100644 --- a/cps/helper.py +++ b/cps/helper.py @@ -310,7 +310,7 @@ def update_dir_structure_file(book_id, calibrepath, first_author): os.renames(path, new_title_path) else: app.logger.info("Copying title: " + path + " into existing: " + new_title_path) - for dir_name, subdir_list, file_list in os.walk(path): + for dir_name, __, file_list in os.walk(path): for file in file_list: os.renames(os.path.join(dir_name, file), os.path.join(new_title_path + dir_name[len(path):], file)) diff --git a/cps/static/js/unzip.js b/cps/static/js/unzip.js index 0a067516..bec20ac1 100644 --- a/cps/static/js/unzip.js +++ b/cps/static/js/unzip.js @@ -47,7 +47,7 @@ var zDigitalSignatureSignature = 0x05054b50; // takes a ByteStream and parses out the local file information var ZipLocalFile = function(bstream) { - if (typeof bstream != typeof {} || !bstream.readNumber || typeof bstream.readNumber != typeof function() {}) { + if (typeof bstream !== typeof {} || !bstream.readNumber || typeof bstream.readNumber !== typeof function() {}) { return null; } @@ -98,7 +98,7 @@ var ZipLocalFile = function(bstream) { // "This descriptor exists only if bit 3 of the general purpose bit flag is set" // But how do you figure out how big the file data is if you don't know the compressedSize // from the header?!? - if ((this.generalPurpose & bitjs.BIT[3]) != 0) { + if ((this.generalPurpose & bitjs.BIT[3]) !== 0) { this.crc32 = bstream.readNumber(4); this.compressedSize = bstream.readNumber(4); this.uncompressedSize = bstream.readNumber(4); @@ -109,14 +109,14 @@ var ZipLocalFile = function(bstream) { ZipLocalFile.prototype.unzip = function() { // Zip Version 1.0, no compression (store only) - if (this.compressionMethod == 0 ) { + if (this.compressionMethod === 0 ) { info("ZIP v" + this.version + ", store only: " + this.filename + " (" + this.compressedSize + " bytes)"); currentBytesUnarchivedInFile = this.compressedSize; currentBytesUnarchived += this.compressedSize; this.fileData = zeroCompression(this.fileData, this.uncompressedSize); } // version == 20, compression method == 8 (DEFLATE) - else if (this.compressionMethod == 8) { + else if (this.compressionMethod === 8) { info("ZIP v2.0, DEFLATE: " + this.filename + " (" + this.compressedSize + " bytes)"); this.fileData = inflate(this.fileData, this.uncompressedSize); } @@ -143,10 +143,10 @@ var unzip = function(arrayBuffer) { var bstream = new bitjs.io.ByteStream(arrayBuffer); // detect local file header signature or return null - if (bstream.peekNumber(4) == zLocalFileHeaderSignature) { + if (bstream.peekNumber(4) === zLocalFileHeaderSignature) { var localFiles = []; // loop until we don't see any more local files - while (bstream.peekNumber(4) == zLocalFileHeaderSignature) { + while (bstream.peekNumber(4) === zLocalFileHeaderSignature) { var oneLocalFile = new ZipLocalFile(bstream); // this should strip out directories/folders if (oneLocalFile && oneLocalFile.uncompressedSize > 0 && oneLocalFile.fileData) { @@ -164,7 +164,7 @@ var unzip = function(arrayBuffer) { }); // archive extra data record - if (bstream.peekNumber(4) == zArchiveExtraDataSignature) { + if (bstream.peekNumber(4) === zArchiveExtraDataSignature) { info(" Found an Archive Extra Data Signature"); // skipping this record for now @@ -175,7 +175,7 @@ var unzip = function(arrayBuffer) { // central directory structure // TODO: handle the rest of the structures (Zip64 stuff) - if (bstream.peekNumber(4) == zCentralFileHeaderSignature) { + if (bstream.peekNumber(4) === zCentralFileHeaderSignature) { info(" Found a Central File Header"); // read all file headers @@ -205,7 +205,7 @@ var unzip = function(arrayBuffer) { } // digital signature - if (bstream.peekNumber(4) == zDigitalSignatureSignature) { + if (bstream.peekNumber(4) === zDigitalSignatureSignature) { info(" Found a Digital Signature"); bstream.readNumber(4); @@ -230,7 +230,7 @@ var unzip = function(arrayBuffer) { // actually do the unzipping localfile.unzip(); - if (localfile.fileData != null) { + if (localfile.fileData !== null) { postMessage(new bitjs.archive.UnarchiveExtractEvent(localfile)); postProgress(); } @@ -245,7 +245,7 @@ var unzip = function(arrayBuffer) { // containing {length: 6, symbol: X} function getHuffmanCodes(bitLengths) { // ensure bitLengths is an array containing at least one element - if (typeof bitLengths != typeof [] || bitLengths.length < 1) { + if (typeof bitLengths !== typeof [] || bitLengths.length < 1) { err("Error! getHuffmanCodes() called with an invalid array"); return null; } @@ -259,7 +259,7 @@ function getHuffmanCodes(bitLengths) { for (var i = 0; i < numLengths; ++i) { var length = bitLengths[i]; // test to ensure each bit length is a positive, non-zero number - if (typeof length != typeof 1 || length < 0) { + if (typeof length !== typeof 1 || length < 0) { err("bitLengths contained an invalid number in getHuffmanCodes(): " + length + " of type " + (typeof length)); return null; } @@ -275,9 +275,9 @@ function getHuffmanCodes(bitLengths) { var nextCode = [], code = 0; for (var bits = 1; bits <= MAX_BITS; ++bits) { - var length = bits - 1; + var length2 = bits - 1; // ensure undefined lengths are zero - if (blCount[length] == undefined) blCount[length] = 0; + if (blCount[length2] == undefined) blCount[length2] = 0; code = (code + blCount[bits - 1]) << 1; nextCode [bits] = code; } @@ -286,7 +286,7 @@ function getHuffmanCodes(bitLengths) { var table = {}, tableLength = 0; for (var n = 0; n < numLengths; ++n) { var len = bitLengths[n]; - if (len != 0) { + if (len !== 0) { table[nextCode [len]] = { length: len, symbol: n }; //, bitstring: binaryValueToString(nextCode [len],len) }; tableLength++; nextCode [len]++; @@ -358,7 +358,7 @@ function decodeSymbol(bstream, hcTable) { ++len; // check against Huffman Code table and break if found - if (hcTable.hasOwnProperty(code) && hcTable[code].length == len) { + if (hcTable.hasOwnProperty(code) && hcTable[code].length === len) { break; } @@ -457,7 +457,7 @@ function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) { } else { // end of block reached - if (symbol == 256) { + if (symbol === 256) { break; } else { @@ -485,7 +485,7 @@ function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) { buffer.insertByte(data[ch++]); } } else { - buffer.insertBytes(buffer.data.subarray(ch, ch + length)); + buffer.insertBytes(buffer.data.subarray(ch, ch + length)); } } // length-distance pair @@ -514,12 +514,13 @@ function inflate(compressedData, numDecompressedBytes) { compressedData.byteOffset, compressedData.byteLength); var buffer = new bitjs.io.ByteBuffer(numDecompressedBytes); - var numBlocks = 0, blockSize = 0; + var blockSize = 0; // block format: http://tools.ietf.org/html/rfc1951#page-9 + var bFinal = 0; do { - var bFinal = bstream.readBits(1), - bType = bstream.readBits(2); + bFinal = bstream.readBits(1); + var bType = bstream.readBits(2); blockSize = 0; ++numBlocks; // no compression @@ -528,23 +529,23 @@ function inflate(compressedData, numDecompressedBytes) { while (bstream.bitPtr != 0) bstream.readBits(1); var len = bstream.readBits(16); bstream.readBits(16); - // TODO: check if nlen is the ones-complement of len? + // TODO: check if nlen is the ones-complement of len? if (len > 0) buffer.insertBytes(bstream.readBytes(len)); blockSize = len; } // fixed Huffman codes - else if(bType == 1) { + else if (bType == 1) { blockSize = inflateBlockData(bstream, getFixedLiteralTable(), getFixedDistanceTable(), buffer); } // dynamic Huffman codes - else if(bType == 2) { + else if (bType == 2) { var numLiteralLengthCodes = bstream.readBits(5) + 257; var numDistanceCodes = bstream.readBits(5) + 1, numCodeLengthCodes = bstream.readBits(4) + 4; // populate the array of code length codes (first de-compaction) - var codeLengthsCodeLengths = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]; + var codeLengthsCodeLengths = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; for (var i = 0; i < numCodeLengthCodes; ++i) { codeLengthsCodeLengths[ CodeLengthCodeOrder[i] ] = bstream.readBits(3); } @@ -576,18 +577,17 @@ function inflate(compressedData, numDecompressedBytes) { literalCodeLengths.push(symbol); prevCodeLength = symbol; } - else if (symbol == 16) { + else if (symbol === 16) { var repeat = bstream.readBits(2) + 3; while (repeat--) { literalCodeLengths.push(prevCodeLength); } - } - else if (symbol == 17) { + } else if (symbol === 17) { var repeat1 = bstream.readBits(3) + 3; while (repeat1--) { literalCodeLengths.push(0); } - } else if (symbol == 18) { + } else if (symbol === 18) { var repeat2 = bstream.readBits(7) + 11; while (repeat2--) { literalCodeLengths.push(0); @@ -613,7 +613,7 @@ function inflate(compressedData, numDecompressedBytes) { currentBytesUnarchived += blockSize; postProgress(); - } while (bFinal != 1); + } while (bFinal !== 1); // we are done reading blocks if the bFinal bit was set for this block // return the buffer data bytes diff --git a/cps/web.py b/cps/web.py index b0b7d087..589f93e3 100644 --- a/cps/web.py +++ b/cps/web.py @@ -62,11 +62,6 @@ try: except ImportError: feature_support['ldap'] = False -try: - from googleapiclient.errors import HttpErrort -except ImportError: - pass - try: from goodreads.client import GoodreadsClient feature_support['goodreads'] = True @@ -540,39 +535,6 @@ def books_list(data,sort, page): title=_(u"Books"), page="newest") -''' -@web.route("/hot", defaults={'page': 1}) -@web.route('/hot/page/') -@login_required_if_no_ano -def hot_books(page): - - -@web.route("/rated", defaults={'page': 1}) -@web.route('/rated/page/') -@login_required_if_no_ano -def best_rated_books(page): - if current_user.check_visibility(ub.SIDEBAR_BEST_RATED): - entries, random, pagination = fill_indexpage(page, db.Books, db.Books.ratings.any(db.Ratings.rating > 9), - [db.Books.timestamp.desc()]) - return render_title_template('index.html', random=random, entries=entries, pagination=pagination, - title=_(u"Best rated books"), page="rated") - else: - abort(404) - - -@web.route("/discover", defaults={'page': 1}) -@web.route('/discover/page/') -@login_required_if_no_ano -def discover(page): - if current_user.check_visibility(ub.SIDEBAR_RANDOM): - entries, __, pagination = fill_indexpage(page, db.Books, True, [func.randomblob(2)]) - pagination = Pagination(1, config.config_books_per_page, config.config_books_per_page) - return render_title_template('discover.html', entries=entries, pagination=pagination, - title=_(u"Random Books"), page="discover") - else: - abort(404)''' - - @web.route("/author") @login_required_if_no_ano def author_list(): @@ -843,16 +805,6 @@ def search(): def advanced_search(): # Build custom columns names cc = helper.get_cc_columns() - '''tmpcc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all() - if config.config_columns_to_ignore: - cc = [] - for col in tmpcc: - r = re.compile(config.config_columns_to_ignore) - if r.match(col.label): - cc.append(col) - else: - cc = tmpcc''' - db.session.connection().connection.connection.create_function("lower", 1, db.lcase) q = db.session.query(db.Books) @@ -988,20 +940,6 @@ def advanced_search(): series=series, title=_(u"search"), cc=cc, page="advsearch") -'''@web.route("/unreadbooks/", defaults={'page': 1}) -@web.route("/unreadbooks/'") -@login_required_if_no_ano -def unread_books(page): - return render_read_books(page, False) - - -@web.route("/readbooks/", defaults={'page': 1}) -@web.route("/readbooks/'") -@login_required_if_no_ano -def read_books(page): - return render_read_books(page, True)''' - - def render_read_books(page, are_read, as_xml=False, order=[]): if not config.config_read_column: readBooks = ub.session.query(ub.ReadBook).filter(ub.ReadBook.user_id == int(current_user.id))\ @@ -1426,16 +1364,6 @@ def show_book(book_id): entries.languages[index].language_name = _( isoLanguages.get(part3=entries.languages[index].lang_code).name) cc = helper.get_cc_columns() - '''tmpcc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all() - - if config.config_columns_to_ignore: - cc = [] - for col in tmpcc: - r = re.compile(config.config_columns_to_ignore) - if r.match(col.label): - cc.append(col) - else: - cc = tmpcc''' book_in_shelfs = [] shelfs = ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).all() for entry in shelfs: diff --git a/cps/worker.py b/cps/worker.py index 77df162c..94f6a735 100644 --- a/cps/worker.py +++ b/cps/worker.py @@ -533,5 +533,4 @@ class StderrLogger(object): else: self.buffer += message except: - pass - + self.logger.debug("Logging Error")