From 147947662ca606f26a3ae0a05e61e5e5de4fcf11 Mon Sep 17 00:00:00 2001 From: Krakinou Date: Mon, 17 Jun 2019 23:46:38 +0200 Subject: [PATCH 01/38] Base64 --- cps/admin.py | 1 + 1 file changed, 1 insertion(+) diff --git a/cps/admin.py b/cps/admin.py index f6fd838f..0f553035 100644 --- a/cps/admin.py +++ b/cps/admin.py @@ -25,6 +25,7 @@ from __future__ import division, print_function, unicode_literals import os import json import time +import base64 from datetime import datetime, timedelta try: from imp import reload From 3d0beba26172489e68c9cdd9a53ca1d3c6000b0e Mon Sep 17 00:00:00 2001 From: Krakinou Date: Mon, 17 Jun 2019 23:47:35 +0200 Subject: [PATCH 02/38] Base64 --- cps/admin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cps/admin.py b/cps/admin.py index 0f553035..c641f3a0 100644 --- a/cps/admin.py +++ b/cps/admin.py @@ -25,7 +25,7 @@ from __future__ import division, print_function, unicode_literals import os import json import time -import base64 +import base64 from datetime import datetime, timedelta try: from imp import reload From e5b9da5201d7ed1e597e2a8884fbd5f59b0ffb4b Mon Sep 17 00:00:00 2001 From: Krakinou Date: Mon, 1 Jul 2019 21:44:58 +0200 Subject: [PATCH 03/38] Error management --- cps/web.py | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/cps/web.py b/cps/web.py index 3f8964d4..1ae1aa86 100644 --- a/cps/web.py +++ b/cps/web.py @@ -1098,19 +1098,15 @@ def login(): flash(_(u"you are now logged in as: '%(nickname)s'", nickname=user.nickname), category="success") return redirect_back(url_for("web.index")) - except ldap.ldap.INVALID_CREDENTIALS as e: - log.error('Login Error: ' + str(e)) - ipAdress = request.headers.get('X-Forwarded-For', request.remote_addr) - log.info('LDAP Login failed for user "%s" IP-adress: %s', form['username'], ipAdress) - flash(_(u"Wrong Username or Password"), category="error") - except ldap.ldap.SERVER_DOWN: - log.info('LDAP Login failed, LDAP Server down') - flash(_(u"Could not login. LDAP server down, please contact your administrator"), category="error") - '''except LDAPException as exception: - app.logger.error('Login Error: ' + str(exception)) - ipAdress = request.headers.get('X-Forwarded-For', request.remote_addr) - app.logger.info('LDAP Login failed for user "' + form['username'] + ', IP-address :' + ipAdress) - flash(_(u"Wrong Username or Password"), category="error")''' + except Exception as exception: + app.logger.info('Login Error: ' + str(exception)) + if str(exception) == 'Invalid credentials': + ipAdress = request.headers.get('X-Forwarded-For', request.remote_addr) + app.logger.info('LDAP Login failed for user "' + form['username'] + ', IP-address :' + ipAdress) + flash(_(u"Wrong Username or Password"), category="error") + if str(exception) == 'Server down': + log.info('LDAP Login failed, LDAP Server down') + flash(_(u"Could not login. LDAP server down, please contact your administrator"), category="error") else: if user and check_password_hash(user.password, form['password']) and user.nickname is not "Guest": login_user(user, remember=True) From 00a29f3d8840e285cc17e6d929d7616715612110 Mon Sep 17 00:00:00 2001 From: Krakinou Date: Mon, 1 Jul 2019 21:45:35 +0200 Subject: [PATCH 04/38] Check for change before encoding --- cps/admin.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/cps/admin.py b/cps/admin.py index c641f3a0..9c08d2d4 100644 --- a/cps/admin.py +++ b/cps/admin.py @@ -413,12 +413,13 @@ def configuration_helper(origin): goodreads=goodreads_support, title=_(u"Basic Configuration"), page="config") else: - content.config_use_ldap = 1 + content.config_login_type = 1 content.config_ldap_provider_url = to_save["config_ldap_provider_url"] content.config_ldap_port = to_save["config_ldap_port"] content.config_ldap_schema = to_save["config_ldap_schema"] content.config_ldap_serv_username = to_save["config_ldap_serv_username"] - content.config_ldap_serv_password = base64.b64encode(to_save["config_ldap_serv_password"]) + if content.config_ldap_serv_password != to_save["config_ldap_serv_password"]: + content.config_ldap_serv_password = base64.b64encode(to_save["config_ldap_serv_password"]) content.config_ldap_dn = to_save["config_ldap_dn"] content.config_ldap_user_object = to_save["config_ldap_user_object"] reboot_required = True From 9b119fa7243efa46fa7e474eadb1f4d83dc5d089 Mon Sep 17 00:00:00 2001 From: Simon Latapie Date: Sun, 12 Jan 2020 23:23:43 +0100 Subject: [PATCH 05/38] edit book: manage identifiers --- cps/editbooks.py | 37 ++++++++++++++++++++++++++++++++++++ cps/templates/book_edit.html | 30 +++++++++++++++++++++++++++++ 2 files changed, 67 insertions(+) diff --git a/cps/editbooks.py b/cps/editbooks.py index 0bb005fb..261b70ba 100644 --- a/cps/editbooks.py +++ b/cps/editbooks.py @@ -147,6 +147,26 @@ def modify_database_object(input_elements, db_book_object, db_object, db_session db_book_object.append(db_element) +def modify_identifiers(input_identifiers, db_identifiers, db_session): + """Modify Identifiers to match input information. + input_identifiers is a list of read-to-persist Identifiers objects. + db_identifiers is a list of already persisted list of Identifiers objects.""" + input_dict = dict([ (identifier.type.lower(), identifier) for identifier in input_identifiers ]) + db_dict = dict([ (identifier.type.lower(), identifier) for identifier in db_identifiers ]) + # delete db identifiers not present in input or modify them with input val + for identifier_type, identifier in db_dict.items(): + if identifier_type not in input_dict.keys(): + db_session.delete(identifier) + else: + input_identifier = input_dict[identifier_type] + identifier.type = input_identifier.type + identifier.val = input_identifier.val + # add input identifiers not present in db + for identifier_type, identifier in input_dict.items(): + if identifier_type not in db_dict.keys(): + db_session.add(identifier) + + @editbook.route("/delete//", defaults={'book_format': ""}) @editbook.route("/delete///") @login_required @@ -459,6 +479,10 @@ def edit_book(book_id): else: book.comments.append(db.Comments(text=to_save["description"], book=book.id)) + # Handle identifiers + input_identifiers = identifier_list(to_save, book) + modify_identifiers(input_identifiers, book.identifiers, db.session) + # Handle book tags input_tags = to_save["tags"].split(',') input_tags = list(map(lambda it: it.strip(), input_tags)) @@ -548,6 +572,19 @@ def merge_metadata(to_save, meta): to_save["description"] = to_save["description"] or Markup( getattr(meta, 'description', '')).unescape() +def identifier_list(to_save, book): + """Generate a list of Identifiers from form information""" + id_type_prefix = 'identifier-type-' + id_val_prefix = 'identifier-val-' + result = [] + for type_key, type_value in to_save.items(): + if not type_key.startswith(id_type_prefix): + continue + val_key = id_val_prefix + type_key[len(id_type_prefix):] + if val_key not in to_save.keys(): + continue + result.append( db.Identifiers(to_save[val_key], type_value, book.id) ) + return result @editbook.route("/upload", methods=["GET", "POST"]) @login_required_if_no_ano diff --git a/cps/templates/book_edit.html b/cps/templates/book_edit.html index 78b427eb..19ac299b 100644 --- a/cps/templates/book_edit.html +++ b/cps/templates/book_edit.html @@ -61,6 +61,21 @@ + +
+ + + {% for identifier in book.identifiers %} + + + + + + {% endfor %} +
{{_('Remove')}}
+ {{_('Add Identifier')}} +
+
@@ -274,6 +289,21 @@ 'source': {{_('Source')|safe|tojson}}, }; var language = '{{ g.user.locale }}'; + + $("#add-identifier-line").click(function() { + // create a random identifier type to have a valid name in form. This will not be used when dealing with the form + var rand_id = Math.floor(Math.random() * 1000000).toString(); + var line = ''; + line += ''; + line += ''; + line += '{{_('Remove')}}'; + line += ''; + $("#identifier-table").append(line); + }); + function removeIdentifierLine(el) { + $(el).parent().parent().remove(); + } + From e404da4192286eefba0ece168b478b2e1b7bbdb0 Mon Sep 17 00:00:00 2001 From: Michael Shavit Date: Fri, 24 Jan 2020 00:04:16 -0500 Subject: [PATCH 06/38] Add support for book 'deletion' (i.e archiving) from a Kobo device. --- cps/kobo.py | 43 +++++++++++++++++++++++++++++++++++++++---- cps/ub.py | 11 +++++++++++ 2 files changed, 50 insertions(+), 4 deletions(-) diff --git a/cps/kobo.py b/cps/kobo.py index 7e1cbc8e..57ef1ab5 100644 --- a/cps/kobo.py +++ b/cps/kobo.py @@ -21,6 +21,7 @@ import sys import uuid from datetime import datetime from time import gmtime, strftime + try: from urllib import unquote except ImportError: @@ -35,12 +36,12 @@ from flask import ( url_for, redirect, ) -from flask_login import login_required +from flask_login import login_required, current_user from werkzeug.datastructures import Headers from sqlalchemy import func import requests -from . import config, logger, kobo_auth, db, helper +from . import config, logger, kobo_auth, db, helper, ub from .services import SyncToken as SyncToken from .web import download_required @@ -53,6 +54,7 @@ kobo_auth.register_url_value_preprocessor(kobo) log = logger.create() + def get_store_url_for_current_request(): # Programmatically modify the current url to point to the official Kobo store base, sep, request_path_with_auth_token = request.full_path.rpartition("/kobo/") @@ -114,6 +116,14 @@ def HandleSyncRequest(): # in case of external changes (e.g: adding a book through Calibre). db.reconnect_db(config) + archived_books = ( + ub.session.query(ub.ArchivedBook) + .filter(ub.ArchivedBook.user_id == int(current_user.id)) + .filter(ub.ArchivedBook.is_archived == True) + .all() + ) + archived_book_ids = [archived_book.book_id for archived_book in archived_books] + # sqlite gives unexpected results when performing the last_modified comparison without the datetime cast. # It looks like it's treating the db.Books.last_modified field as a string and may fail # the comparison because of the +00:00 suffix. @@ -122,6 +132,7 @@ def HandleSyncRequest(): .join(db.Data) .filter(func.datetime(db.Books.last_modified) != sync_token.books_last_modified) .filter(db.Data.format.in_(KOBO_FORMATS)) + .filter(db.Books.id.notin_(archived_book_ids)) .all() ) for book in changed_entries: @@ -342,13 +353,37 @@ def TopLevelEndpoint(): return make_response(jsonify({})) +@kobo.route("/v1/library/", methods=["DELETE"]) +@login_required +def HandleBookDeletionRequest(book_uuid): + log.info("Kobo book deletion request received for book %s" % book_uuid) + book = db.session.query(db.Books).filter(db.Books.uuid == book_uuid).first() + if not book: + log.info(u"Book %s not found in database", book_uuid) + return redirect_or_proxy_request() + + book_id = book.id + archived_book = ( + ub.session.query(ub.ArchivedBook) + .filter(ub.ArchivedBook.book_id == book_id) + .first() + ) + if not archived_book: + archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id) + archived_book.book_id = book_id + archived_book.is_archived = True + ub.session.merge(archived_book) + ub.session.commit() + + return ("", 204) + + # TODO: Implement the following routes -@kobo.route("/v1/library/", methods=["DELETE", "GET"]) @kobo.route("/v1/library//state", methods=["PUT"]) @kobo.route("/v1/library/tags", methods=["POST"]) @kobo.route("/v1/library/tags/", methods=["POST"]) @kobo.route("/v1/library/tags/", methods=["DELETE"]) -def HandleUnimplementedRequest(dummy=None, book_uuid=None, shelf_name=None, tag_id=None): +def HandleUnimplementedRequest(book_uuid=None, shelf_name=None, tag_id=None): return redirect_or_proxy_request() diff --git a/cps/ub.py b/cps/ub.py index 8564ef21..7ebd287c 100644 --- a/cps/ub.py +++ b/cps/ub.py @@ -300,6 +300,15 @@ class Bookmark(Base): format = Column(String(collation='NOCASE')) bookmark_key = Column(String) +# Baseclass representing books that are archived on the user's Kobo device. +class ArchivedBook(Base): + __tablename__ = 'archived_book' + + id = Column(Integer, primary_key=True) + user_id = Column(Integer, ForeignKey('user.id')) + book_id = Column(Integer) + is_archived = Column(Boolean, unique=False) + # Baseclass representing Downloads from calibre-web in app.db class Downloads(Base): @@ -353,6 +362,8 @@ def migrate_Database(session): ReadBook.__table__.create(bind=engine) if not engine.dialect.has_table(engine.connect(), "bookmark"): Bookmark.__table__.create(bind=engine) + if not engine.dialect.has_table(engine.connect(), "archived_book"): + ArchivedBook.__table__.create(bind=engine) if not engine.dialect.has_table(engine.connect(), "registration"): ReadBook.__table__.create(bind=engine) conn = engine.connect() From c0239a659c0426d66599e270bc0fa5cffcfe8b18 Mon Sep 17 00:00:00 2001 From: Michael Shavit Date: Sat, 25 Jan 2020 18:29:17 -0500 Subject: [PATCH 07/38] Add UI support for archived books. Archived books will no longer appear in any book lists or searches, and may only be restored from the Archive view. --- cps/config_sql.py | 2 +- cps/constants.py | 3 +- cps/helper.py | 26 +++++++++++++---- cps/static/js/caliBlur.js | 24 ++++++++++++++++ cps/static/js/details.js | 8 ++++++ cps/templates/detail.html | 8 ++++++ cps/ub.py | 5 +++- cps/web.py | 59 +++++++++++++++++++++++++++++++++++---- 8 files changed, 121 insertions(+), 14 deletions(-) diff --git a/cps/config_sql.py b/cps/config_sql.py index a00b4217..97c05067 100644 --- a/cps/config_sql.py +++ b/cps/config_sql.py @@ -70,7 +70,7 @@ class _Settings(_Base): config_remote_login = Column(Boolean, default=False) config_default_role = Column(SmallInteger, default=0) - config_default_show = Column(SmallInteger, default=6143) + config_default_show = Column(SmallInteger, default=38911) config_columns_to_ignore = Column(String) config_restricted_tags = Column(String, default="") diff --git a/cps/constants.py b/cps/constants.py index a78c31b3..e0d56922 100644 --- a/cps/constants.py +++ b/cps/constants.py @@ -80,9 +80,10 @@ MATURE_CONTENT = 1 << 11 SIDEBAR_PUBLISHER = 1 << 12 SIDEBAR_RATING = 1 << 13 SIDEBAR_FORMAT = 1 << 14 +SIDEBAR_ARCHIVED = 1 << 15 ADMIN_USER_ROLES = sum(r for r in ALL_ROLES.values()) & ~ROLE_EDIT_SHELFS & ~ROLE_ANONYMOUS -ADMIN_USER_SIDEBAR = (SIDEBAR_FORMAT << 1) - 1 +ADMIN_USER_SIDEBAR = (SIDEBAR_ARCHIVED << 1) - 1 UPDATE_STABLE = 0 << 0 AUTO_UPDATE_STABLE = 1 << 0 diff --git a/cps/helper.py b/cps/helper.py index 93ce1b07..a1265f73 100644 --- a/cps/helper.py +++ b/cps/helper.py @@ -683,7 +683,19 @@ def render_task_status(tasklist): # Language and content filters for displaying in the UI -def common_filters(): +def common_filters(allow_show_archived=False): + if not allow_show_archived: + archived_books = ( + ub.session.query(ub.ArchivedBook) + .filter(ub.ArchivedBook.user_id == int(current_user.id)) + .filter(ub.ArchivedBook.is_archived == True) + .all() + ) + archived_book_ids = [archived_book.book_id for archived_book in archived_books] + archived_filter = db.Books.id.notin_(archived_book_ids) + else: + archived_filter = true() + if current_user.filter_language() != "all": lang_filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language()) else: @@ -708,7 +720,7 @@ def common_filters(): pos_content_cc_filter = true() neg_content_cc_filter = false() return and_(lang_filter, pos_content_tags_filter, ~neg_content_tags_filter, - pos_content_cc_filter, ~neg_content_cc_filter) + pos_content_cc_filter, ~neg_content_cc_filter, archived_filter) def tags_filters(): @@ -765,15 +777,19 @@ def order_authors(entry): # Fill indexpage with all requested data from database def fill_indexpage(page, database, db_filter, order, *join): + return fill_indexpage_with_archived_books(page, database, db_filter, order, False, *join) + + +def fill_indexpage_with_archived_books(page, database, db_filter, order, allow_show_archived, *join): if current_user.show_detail_random(): - randm = db.session.query(db.Books).filter(common_filters())\ + randm = db.session.query(db.Books).filter(common_filters(allow_show_archived))\ .order_by(func.random()).limit(config.config_random_books) else: randm = false() off = int(int(config.config_books_per_page) * (page - 1)) pagination = Pagination(page, config.config_books_per_page, - len(db.session.query(database).filter(db_filter).filter(common_filters()).all())) - entries = db.session.query(database).join(*join, isouter=True).filter(db_filter).filter(common_filters()).\ + len(db.session.query(database).filter(db_filter).filter(common_filters(allow_show_archived)).all())) + entries = db.session.query(database).join(*join, isouter=True).filter(db_filter).filter(common_filters(allow_show_archived)).\ order_by(*order).offset(off).limit(config.config_books_per_page).all() for book in entries: book = order_authors(book) diff --git a/cps/static/js/caliBlur.js b/cps/static/js/caliBlur.js index 1bd5f69c..9313ee94 100644 --- a/cps/static/js/caliBlur.js +++ b/cps/static/js/caliBlur.js @@ -216,6 +216,8 @@ if ( $( 'body.book' ).length > 0 ) { .prependTo( '[aria-label^="Download, send"]' ); $( '#have_read_cb' ) .after( '' ); + $( '#archived_cb' ) + .after( '' ); $( '#shelf-actions' ).prependTo( '[aria-label^="Download, send"]' ); @@ -586,6 +588,20 @@ $( '#have_read_cb:checked' ).attr({ 'data-viewport': '.btn-toolbar' }) .addClass('readunread-btn-tooltip'); + $( '#archived_cb' ).attr({ + 'data-toggle': 'tooltip', + 'title': $( '#archived_cb').attr('data-unchecked'), + 'data-placement': 'bottom', + 'data-viewport': '.btn-toolbar' }) + .addClass('readunread-btn-tooltip'); + + $( '#archived_cb:checked' ).attr({ + 'data-toggle': 'tooltip', + 'title': $( '#archived_cb').attr('data-checked'), + 'data-placement': 'bottom', + 'data-viewport': '.btn-toolbar' }) + .addClass('readunread-btn-tooltip'); + $( 'button#delete' ).attr({ 'data-toggle-two': 'tooltip', 'title': $( 'button#delete' ).text(), //'Delete' @@ -601,6 +617,14 @@ $( '#have_read_cb' ).click(function() { } }); +$( '#archived_cb' ).click(function() { + if ( $( '#archived_cb:checked' ).length > 0 ) { + $( this ).attr('data-original-title', $('#archived_cb').attr('data-checked')); + } else { + $( this).attr('data-original-title', $('#archived_cb').attr('data-unchecked')); + } +}); + $( '.btn-group[aria-label="Edit/Delete book"] a' ).attr({ 'data-toggle': 'tooltip', 'title': $( '#edit_book' ).text(), // 'Edit' diff --git a/cps/static/js/details.js b/cps/static/js/details.js index 491d23bb..395518cb 100644 --- a/cps/static/js/details.js +++ b/cps/static/js/details.js @@ -25,6 +25,14 @@ $("#have_read_cb").on("change", function() { $(this).closest("form").submit(); }); +$(function() { + $("#archived_form").ajaxForm(); +}); + +$("#archived_cb").on("change", function() { + $(this).closest("form").submit(); +}); + (function() { var templates = { add: _.template( diff --git a/cps/templates/detail.html b/cps/templates/detail.html index b76a8afa..214a0738 100644 --- a/cps/templates/detail.html +++ b/cps/templates/detail.html @@ -202,6 +202,14 @@

+

+

+ +
+

{% endif %} diff --git a/cps/ub.py b/cps/ub.py index 7ebd287c..62ba82af 100644 --- a/cps/ub.py +++ b/cps/ub.py @@ -97,10 +97,13 @@ def get_sidebar_config(kwargs=None): sidebar.append({"glyph": "glyphicon-file", "text": _('File formats'), "link": 'web.formats_list', "id": "format", "visibility": constants.SIDEBAR_FORMAT, 'public': True, "page": "format", "show_text": _('Show file formats selection'), "config_show":True}) + sidebar.append( + {"glyph": "glyphicon-trash", "text": _('Archived Books'), "link": 'web.books_list', "id": "archived", + "visibility": constants.SIDEBAR_ARCHIVED, 'public': (not g.user.is_anonymous), "page": "archived", + "show_text": _('Show archived books'), "config_show": True}) return sidebar - class UserBase: @property diff --git a/cps/web.py b/cps/web.py index 5c535e8d..d01b4e1a 100644 --- a/cps/web.py +++ b/cps/web.py @@ -46,10 +46,10 @@ from werkzeug.security import generate_password_hash, check_password_hash from . import constants, config, logger, isoLanguages, services, worker from . import searched_ids, lm, babel, db, ub, config, get_locale, app from .gdriveutils import getFileFromEbooksFolder, do_gdrive_download -from .helper import common_filters, get_search_results, fill_indexpage, speaking_language, check_valid_domain, \ - order_authors, get_typeahead, render_task_status, json_serial, get_cc_columns, \ - get_book_cover, get_download_link, send_mail, generate_random_password, send_registration_mail, \ - check_send_to_kindle, check_read_formats, lcase, tags_filters, reset_password +from .helper import common_filters, get_search_results, fill_indexpage, fill_indexpage_with_archived_books, \ + speaking_language, check_valid_domain, order_authors, get_typeahead, render_task_status, json_serial, \ + get_cc_columns, get_book_cover, get_download_link, send_mail, generate_random_password, \ + send_registration_mail, check_send_to_kindle, check_read_formats, lcase, tags_filters, reset_password from .pagination import Pagination from .redirect import redirect_back @@ -342,6 +342,23 @@ def toggle_read(book_id): return "" +@web.route("/ajax/togglearchived/", methods=['POST']) +@login_required +def toggle_archived(book_id): + archived_book = ub.session.query(ub.ArchivedBook).filter(and_(ub.ArchivedBook.user_id == int(current_user.id), + ub.ArchivedBook.book_id == book_id)).first() + if archived_book: + archived_book.is_archived = not archived_book.is_archived + else: + archived_book = ub.ArchivedBook() + archived_book.user_id = int(current_user.id) + archived_book.book_id = book_id + archived_book.is_archived = True + ub.session.merge(archived_book) + ub.session.commit() + return "" + + ''' @web.route("/ajax/getcomic///") @login_required @@ -537,6 +554,8 @@ def books_list(data, sort, book_id, page): return render_category_books(page, book_id, order) elif data == "language": return render_language_books(page, book_id, order) + elif data == "archived": + return render_archived_books(page, order) else: entries, random, pagination = fill_indexpage(page, db.Books, True, order) return render_title_template('index.html', random=random, entries=entries, pagination=pagination, @@ -1011,6 +1030,26 @@ def render_read_books(page, are_read, as_xml=False, order=None, *args, **kwargs) title=name, page=pagename) +def render_archived_books(page, order): + order = order or [] + archived_books = ( + ub.session.query(ub.ArchivedBook) + .filter(ub.ArchivedBook.user_id == int(current_user.id)) + .filter(ub.ArchivedBook.is_archived == True) + .all() + ) + archived_book_ids = [archived_book.book_id for archived_book in archived_books] + + archived_filter = db.Books.id.in_(archived_book_ids) + + entries, random, pagination = fill_indexpage_with_archived_books(page, db.Books, archived_filter, order, + allow_show_archived=True) + + name = _(u'Archived Books') + ' (' + str(len(archived_book_ids)) + ')' + pagename = "archived" + return render_title_template('index.html', random=random, entries=entries, pagination=pagination, + title=name, page=pagename) + # ################################### Download/Send ################################################################## @@ -1423,7 +1462,8 @@ def read_book(book_id, book_format): @web.route("/book/") @login_required_if_no_ano def show_book(book_id): - entries = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first() + entries = db.session.query(db.Books).filter(and_(db.Books.id == book_id, + common_filters(allow_show_archived=True))).first() if entries: for index in range(0, len(entries.languages)): try: @@ -1451,8 +1491,14 @@ def show_book(book_id): log.error("Custom Column No.%d is not existing in calibre database", config.config_read_column) have_read = None + archived_book = ub.session.query(ub.ArchivedBook).\ + filter(and_(ub.ArchivedBook.user_id == int(current_user.id), + ub.ArchivedBook.book_id == book_id)).first() + is_archived = archived_book and archived_book.is_archived + else: have_read = None + is_archived = None entries.tags = sort(entries.tags, key=lambda tag: tag.name) @@ -1468,7 +1514,8 @@ def show_book(book_id): return render_title_template('detail.html', entry=entries, audioentries=audioentries, cc=cc, is_xhr=request.is_xhr, title=entries.title, books_shelfs=book_in_shelfs, - have_read=have_read, kindle_list=kindle_list, reader_list=reader_list, page="book") + have_read=have_read, is_archived=is_archived, kindle_list=kindle_list, + reader_list=reader_list, page="book") else: log.debug(u"Error opening eBook. File does not exist or file is not accessible:") flash(_(u"Error opening eBook. File does not exist or file is not accessible:"), category="error") From 5027aeb3a0ea833d7ca87668f05ebb925bf6e436 Mon Sep 17 00:00:00 2001 From: Michael Shavit Date: Sat, 25 Jan 2020 23:46:50 -0500 Subject: [PATCH 08/38] Fix bug where last_created is incorectly set in the SyncToken. --- cps/kobo.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cps/kobo.py b/cps/kobo.py index 57ef1ab5..f1533580 100644 --- a/cps/kobo.py +++ b/cps/kobo.py @@ -130,7 +130,7 @@ def HandleSyncRequest(): changed_entries = ( db.session.query(db.Books) .join(db.Data) - .filter(func.datetime(db.Books.last_modified) != sync_token.books_last_modified) + .filter(func.datetime(db.Books.last_modified) > sync_token.books_last_modified) .filter(db.Data.format.in_(KOBO_FORMATS)) .filter(db.Books.id.notin_(archived_book_ids)) .all() @@ -149,7 +149,7 @@ def HandleSyncRequest(): new_books_last_modified = max( book.last_modified, sync_token.books_last_modified ) - new_books_last_created = max(book.timestamp, sync_token.books_last_modified) + new_books_last_created = max(book.timestamp, sync_token.books_last_created) sync_token.books_last_created = new_books_last_created sync_token.books_last_modified = new_books_last_modified From 4547c328bc9c3e61def8fd36e901d281fb9d7caa Mon Sep 17 00:00:00 2001 From: Michael Shavit Date: Sat, 25 Jan 2020 23:54:12 -0500 Subject: [PATCH 09/38] Delete/Restore book from Kobo device upon (un)archiving of a book in the web UI. --- cps/kobo.py | 35 ++++++++++++++++++++++++----------- cps/services/SyncToken.py | 24 +++++++++++++++++------- cps/ub.py | 1 + cps/web.py | 5 ++--- 4 files changed, 44 insertions(+), 21 deletions(-) diff --git a/cps/kobo.py b/cps/kobo.py index f1533580..3c19b66e 100644 --- a/cps/kobo.py +++ b/cps/kobo.py @@ -39,6 +39,7 @@ from flask import ( from flask_login import login_required, current_user from werkzeug.datastructures import Headers from sqlalchemy import func +from sqlalchemy.sql.expression import or_ import requests from . import config, logger, kobo_auth, db, helper, ub @@ -119,10 +120,23 @@ def HandleSyncRequest(): archived_books = ( ub.session.query(ub.ArchivedBook) .filter(ub.ArchivedBook.user_id == int(current_user.id)) - .filter(ub.ArchivedBook.is_archived == True) .all() ) - archived_book_ids = [archived_book.book_id for archived_book in archived_books] + + # We join-in books that have had their Archived bit recently modified in order to either: + # * Restore them to the user's device. + # * Delete them from the user's device. + # (Ideally we would use a join for this logic, however cross-database joins don't look trivial in SqlAlchemy.) + recently_restored_or_archived_books = [] + archived_book_ids = {} + new_archived_last_modified = datetime.min + for archived_book in archived_books: + if archived_book.last_modified > sync_token.archive_last_modified: + recently_restored_or_archived_books.append(archived_book.book_id) + if archived_book.is_archived: + archived_book_ids[archived_book.book_id] = True + new_archived_last_modified = max( + new_archived_last_modified, archived_book.last_modified) # sqlite gives unexpected results when performing the last_modified comparison without the datetime cast. # It looks like it's treating the db.Books.last_modified field as a string and may fail @@ -130,14 +144,14 @@ def HandleSyncRequest(): changed_entries = ( db.session.query(db.Books) .join(db.Data) - .filter(func.datetime(db.Books.last_modified) > sync_token.books_last_modified) + .filter(or_(func.datetime(db.Books.last_modified) > sync_token.books_last_modified, + db.Books.id.in_(recently_restored_or_archived_books))) .filter(db.Data.format.in_(KOBO_FORMATS)) - .filter(db.Books.id.notin_(archived_book_ids)) .all() ) for book in changed_entries: entitlement = { - "BookEntitlement": create_book_entitlement(book), + "BookEntitlement": create_book_entitlement(book, archived=(book.id in archived_book_ids)), "BookMetadata": get_metadata(book), "ReadingState": reading_state(book), } @@ -153,8 +167,7 @@ def HandleSyncRequest(): sync_token.books_last_created = new_books_last_created sync_token.books_last_modified = new_books_last_modified - - # Missing feature: Detect server-side book deletions. + sync_token.archive_last_modified = new_archived_last_modified return generate_sync_response(request, sync_token, entitlements) @@ -216,7 +229,7 @@ def get_download_url_for_book(book, book_format): ) -def create_book_entitlement(book): +def create_book_entitlement(book, archived): book_uuid = book.uuid return { "Accessibility": "Full", @@ -224,10 +237,9 @@ def create_book_entitlement(book): "Created": book.timestamp, "CrossRevisionId": book_uuid, "Id": book_uuid, + "IsRemoved": archived, "IsHiddenFromArchive": False, "IsLocked": False, - # Setting this to true removes from the device. - "IsRemoved": False, "LastModified": book.last_modified, "OriginCategory": "Imported", "RevisionId": book_uuid, @@ -370,8 +382,9 @@ def HandleBookDeletionRequest(book_uuid): ) if not archived_book: archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id) - archived_book.book_id = book_id archived_book.is_archived = True + archived_book.last_modified = datetime.utcnow() + ub.session.merge(archived_book) ub.session.commit() diff --git a/cps/services/SyncToken.py b/cps/services/SyncToken.py index 21f16acc..1a9b1843 100644 --- a/cps/services/SyncToken.py +++ b/cps/services/SyncToken.py @@ -42,6 +42,13 @@ def to_epoch_timestamp(datetime_object): return (datetime_object - datetime(1970, 1, 1)).total_seconds() +def get_datetime_from_json(json_object, field_name): + try: + return datetime.utcfromtimestamp(json_object[field_name]) + except KeyError: + return datetime.min + + class SyncToken(): """ The SyncToken is used to persist state accross requests. When serialized over the response headers, the Kobo device will propagate the token onto following requests to the service. @@ -53,7 +60,8 @@ class SyncToken(): """ SYNC_TOKEN_HEADER = "x-kobo-synctoken" - VERSION = "1-0-0" + VERSION = "1-1-0" + LAST_MODIFIED_ADDED_VERSION = "1-1-0" MIN_VERSION = "1-0-0" token_schema = { @@ -68,6 +76,7 @@ class SyncToken(): "raw_kobo_store_token": {"type": "string"}, "books_last_modified": {"type": "string"}, "books_last_created": {"type": "string"}, + "archive_last_modified": {"type": "string"}, }, } @@ -76,10 +85,12 @@ class SyncToken(): raw_kobo_store_token="", books_last_created=datetime.min, books_last_modified=datetime.min, + archive_last_modified=datetime.min, ): self.raw_kobo_store_token = raw_kobo_store_token self.books_last_created = books_last_created self.books_last_modified = books_last_modified + self.archive_last_modified = archive_last_modified @staticmethod def from_headers(headers): @@ -109,12 +120,9 @@ class SyncToken(): raw_kobo_store_token = data_json["raw_kobo_store_token"] try: - books_last_modified = datetime.utcfromtimestamp( - data_json["books_last_modified"] - ) - books_last_created = datetime.utcfromtimestamp( - data_json["books_last_created"] - ) + books_last_modified = get_datetime_from_json(data_json, "books_last_modified") + books_last_created = get_datetime_from_json(data_json, "books_last_created") + archive_last_modified = get_datetime_from_json(data_json, "archive_last_modified") except TypeError: log.error("SyncToken timestamps don't parse to a datetime.") return SyncToken(raw_kobo_store_token=raw_kobo_store_token) @@ -123,6 +131,7 @@ class SyncToken(): raw_kobo_store_token=raw_kobo_store_token, books_last_created=books_last_created, books_last_modified=books_last_modified, + archive_last_modified=archive_last_modified ) def set_kobo_store_header(self, store_headers): @@ -143,6 +152,7 @@ class SyncToken(): "raw_kobo_store_token": self.raw_kobo_store_token, "books_last_modified": to_epoch_timestamp(self.books_last_modified), "books_last_created": to_epoch_timestamp(self.books_last_created), + "archive_last_modified": to_epoch_timestamp(self.archive_last_modified) }, } return b64encode_json(token) diff --git a/cps/ub.py b/cps/ub.py index 62ba82af..c1b92fb6 100644 --- a/cps/ub.py +++ b/cps/ub.py @@ -311,6 +311,7 @@ class ArchivedBook(Base): user_id = Column(Integer, ForeignKey('user.id')) book_id = Column(Integer) is_archived = Column(Boolean, unique=False) + last_modified = Column(DateTime, default=datetime.datetime.utcnow) # Baseclass representing Downloads from calibre-web in app.db diff --git a/cps/web.py b/cps/web.py index d01b4e1a..87dfd775 100644 --- a/cps/web.py +++ b/cps/web.py @@ -349,10 +349,9 @@ def toggle_archived(book_id): ub.ArchivedBook.book_id == book_id)).first() if archived_book: archived_book.is_archived = not archived_book.is_archived + archived_book.last_modified = datetime.datetime.utcnow() else: - archived_book = ub.ArchivedBook() - archived_book.user_id = int(current_user.id) - archived_book.book_id = book_id + archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id) archived_book.is_archived = True ub.session.merge(archived_book) ub.session.commit() From dc7aaae235f0bbc3d3552ffed8e9158c685661e4 Mon Sep 17 00:00:00 2001 From: Michael Shavit Date: Sun, 26 Jan 2020 16:01:27 -0500 Subject: [PATCH 10/38] Now that CalibreWeb delete requests are respected, we can forward them to the KoboStore for books that aren't in Calibre. Note: There's still an edge case where a book is removed from Calibre without first being archived, in which case the delete call will fail. --- cps/kobo.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/cps/kobo.py b/cps/kobo.py index d06b924d..ba1e171e 100644 --- a/cps/kobo.py +++ b/cps/kobo.py @@ -77,9 +77,6 @@ def redirect_or_proxy_request(): if config.config_kobo_proxy: if request.method == "GET": return redirect(get_store_url_for_current_request(), 307) - if request.method == "DELETE": - log.info('Delete Book') - return make_response(jsonify({})) else: # The Kobo device turns other request types into GET requests on redirects, so we instead proxy to the Kobo store ourselves. outgoing_headers = Headers(request.headers) From f9dbc6bc78c8d6901cc8ccb1748066ffc7d94ab8 Mon Sep 17 00:00:00 2001 From: Michael Shavit Date: Sun, 26 Jan 2020 16:20:10 -0500 Subject: [PATCH 11/38] Clean-up book from ArchivedBook on hard-delete. This change also adds a warning to the hard-delete prompt that deleted books should first be archived if the Kobo Sync feature is enabled. An alternative would be to keep a permanent record of hard-deleted book. --- cps/editbooks.py | 2 ++ cps/templates/book_edit.html | 12 ++++++++++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/cps/editbooks.py b/cps/editbooks.py index 0bb005fb..cfbae1e6 100644 --- a/cps/editbooks.py +++ b/cps/editbooks.py @@ -159,6 +159,7 @@ def delete_book(book_id, book_format): # delete book from Shelfs, Downloads, Read list ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete() ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete() + ub.session.query(ub.ArchivedBook).filter(ub.ReadBook.book_id == book_id).delete() ub.delete_download(book_id) ub.session.commit() @@ -241,6 +242,7 @@ def render_edit_book(book_id): return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc, title=_(u"edit metadata"), page="editbook", conversion_formats=allowed_conversion_formats, + config=config, source_formats=valid_source_formats) diff --git a/cps/templates/book_edit.html b/cps/templates/book_edit.html index 78b427eb..efe86aed 100644 --- a/cps/templates/book_edit.html +++ b/cps/templates/book_edit.html @@ -185,8 +185,16 @@ {{_('Are you really sure?')}}
Traceback (most recent call last):
-  File "/home/matthias/Entwicklung/calibre-web-test/test/test_logging.py", line 136, in test_logfile_recover
+  File "/home/matthias/Entwicklung/calibre-web-test/test/test_logging.py", line 137, in test_logfile_recover
     self.assertTrue(logpath=="", "logfile config value is not empty after reseting to default")
 AssertionError: False is not true : logfile config value is not empty after reseting to default
@@ -1354,8 +1385,8 @@ AssertionError: False is not true : logfile config value is not empty after rese test_opds_feed.test_opds_feed 20 - 20 - 0 + 19 + 1 0 0 @@ -1509,11 +1540,31 @@ AssertionError: False is not true : logfile config value is not empty after rese - +
test_opds_shelf_access
- PASS + +
+ FAIL +
+ + + + @@ -2205,8 +2256,8 @@ AssertionError: False is not true : logfile config value is not empty after rese Total - 186 - 177 + 192 + 183 2 0 7 @@ -2253,6 +2304,12 @@ AssertionError: False is not true : logfile config value is not empty after rese Basic + + backports-abc + 0.5 + Basic + + Flask 1.1.2 @@ -2283,6 +2340,12 @@ AssertionError: False is not true : logfile config value is not empty after rese Basic + + Jinja2 + 2.11.2 + Basic + + PyPDF2 1.26.0 @@ -2325,12 +2388,24 @@ AssertionError: False is not true : logfile config value is not empty after rese Basic + + Unidecode + 1.1.1 + Basic + + Wand 0.5.9 Basic + + Werkzeug + 1.0.1 + Basic + + lxml 4.5.0 @@ -2339,7 +2414,7 @@ AssertionError: False is not true : logfile config value is not empty after rese Pillow - 7.1.1 + 7.1.2 test_edit_books @@ -2381,7 +2456,7 @@ AssertionError: False is not true : logfile config value is not empty after rese From 456550a943004d9bd8d39748c6e824c5c08b3f8e Mon Sep 17 00:00:00 2001 From: Ozzieisaacs Date: Sun, 26 Apr 2020 11:34:10 +0200 Subject: [PATCH 28/38] Bugfix view shelfs on detiled page bugfix datetime in archive mode --- cps/templates/detail.html | 6 ++++-- cps/web.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/cps/templates/detail.html b/cps/templates/detail.html index d2a15d7b..8315a8f2 100644 --- a/cps/templates/detail.html +++ b/cps/templates/detail.html @@ -234,7 +234,7 @@