Merge branch 'Develop'

# Conflicts:
#	cps/__init__.py
pull/1350/head
Ozzieisaacs 5 years ago
commit 8646f8f23a

@ -33,7 +33,7 @@ from flask_login import LoginManager
from flask_babel import Babel
from flask_principal import Principal
from . import logger, cache_buster, cli, config_sql, ub, db, services
from . import config_sql, logger, cache_buster, cli, ub, db
from .reverseproxy import ReverseProxied
from .server import WebServer
try:
@ -65,7 +65,6 @@ lm = LoginManager()
lm.login_view = 'web.login'
lm.anonymous_user = ub.Anonymous
ub.init_db(cli.settingspath)
# pylint: disable=no-member
config = config_sql.load_configuration(ub.session)
@ -78,11 +77,12 @@ _BABEL_TRANSLATIONS = set()
log = logger.create()
from . import services
def create_app():
try:
app.wsgi_app = ReverseProxied(ProxyFix(app.wsgi_app, x_for=1, x_host=1))
except ValueError:
except (ValueError, TypeError):
app.wsgi_app = ReverseProxied(ProxyFix(app.wsgi_app))
# For python2 convert path to unicode
if sys.version_info < (3, 0):

@ -817,6 +817,9 @@ def update_mailsettings():
@admin_required
def edit_user(user_id):
content = ub.session.query(ub.User).filter(ub.User.id == int(user_id)).first() # type: ub.User
if not content:
flash(_(u"User not found"), category="error")
return redirect(url_for('admin.admin'))
downloads = list()
languages = speaking_language()
translations = babel.list_translations() + [LC('en')]
@ -933,8 +936,6 @@ def edit_user(user_id):
@login_required
@admin_required
def reset_user_password(user_id):
if not config.config_public_reg:
abort(404)
if current_user is not None and current_user.is_authenticated:
ret, message = reset_password(user_id)
if ret == 1:

@ -34,7 +34,7 @@ def version_info():
parser = argparse.ArgumentParser(description='Calibre Web is a web app'
' providing a interface for browsing, reading and downloading eBooks\n', prog='cps.py')
' providing a interface for browsing, reading and downloading eBooks\n', prog='cps.py')
parser.add_argument('-p', metavar='path', help='path and name to settings db, e.g. /opt/cw.db')
parser.add_argument('-g', metavar='path', help='path and name to gdrive db, e.g. /opt/gd.db')
parser.add_argument('-c', metavar='path',

@ -37,8 +37,6 @@ _Base = declarative_base()
class _Settings(_Base):
__tablename__ = 'settings'
# config_is_initial = Column(Boolean, default=True)
id = Column(Integer, primary_key=True)
mail_server = Column(String, default=constants.DEFAULT_MAIL_SERVER)
mail_port = Column(Integer, default=25)

@ -80,9 +80,10 @@ MATURE_CONTENT = 1 << 11
SIDEBAR_PUBLISHER = 1 << 12
SIDEBAR_RATING = 1 << 13
SIDEBAR_FORMAT = 1 << 14
SIDEBAR_ARCHIVED = 1 << 15
ADMIN_USER_ROLES = sum(r for r in ALL_ROLES.values()) & ~ROLE_EDIT_SHELFS & ~ROLE_ANONYMOUS
ADMIN_USER_SIDEBAR = (SIDEBAR_FORMAT << 1) - 1
ADMIN_USER_ROLES = sum(r for r in ALL_ROLES.values()) & ~ROLE_ANONYMOUS
ADMIN_USER_SIDEBAR = (SIDEBAR_ARCHIVED << 1) - 1
UPDATE_STABLE = 0 << 0
AUTO_UPDATE_STABLE = 1 << 0
@ -112,7 +113,7 @@ del env_CALIBRE_PORT
EXTENSIONS_AUDIO = {'mp3', 'm4a', 'm4b'}
EXTENSIONS_CONVERT = {'pdf', 'epub', 'mobi', 'azw3', 'docx', 'rtf', 'fb2', 'lit', 'lrf', 'txt', 'htmlz', 'rtf', 'odt'}
EXTENSIONS_UPLOAD = {'txt', 'pdf', 'epub', 'mobi', 'azw', 'azw3', 'cbr', 'cbz', 'cbt', 'djvu', 'prc', 'doc', 'docx',
'fb2', 'html', 'rtf', 'odt', 'mp3', 'm4a', 'm4b'}
'fb2', 'html', 'rtf', 'lit', 'odt', 'mp3', 'm4a', 'm4b'}
# EXTENSIONS_READER = set(['txt', 'pdf', 'epub', 'zip', 'cbz', 'tar', 'cbt'] +
# (['rar','cbr'] if feature_support['rar'] else []))

@ -25,7 +25,7 @@ import ast
from sqlalchemy import create_engine
from sqlalchemy import Table, Column, ForeignKey
from sqlalchemy import String, Integer, Boolean, TIMESTAMP, Float
from sqlalchemy import String, Integer, Boolean, TIMESTAMP, Float, DateTime
from sqlalchemy.orm import relationship, sessionmaker, scoped_session
from sqlalchemy.ext.declarative import declarative_base

@ -22,7 +22,7 @@
from __future__ import division, print_function, unicode_literals
import os
import datetime
from datetime import datetime
import json
from shutil import move, copyfile
from uuid import uuid4
@ -47,7 +47,7 @@ def modify_database_object(input_elements, db_book_object, db_object, db_session
# passing input_elements not as a list may lead to undesired results
if not isinstance(input_elements, list):
raise TypeError(str(input_elements) + " should be passed as a list")
changed = False
input_elements = [x for x in input_elements if x != '']
# we have all input element (authors, series, tags) names now
# 1. search for elements to remove
@ -88,6 +88,7 @@ def modify_database_object(input_elements, db_book_object, db_object, db_session
if len(del_elements) > 0:
for del_element in del_elements:
db_book_object.remove(del_element)
changed = True
if len(del_element.books) == 0:
db_session.delete(del_element)
# if there are elements to add, we add them now!
@ -114,37 +115,58 @@ def modify_database_object(input_elements, db_book_object, db_object, db_session
else: # db_type should be tag or language
new_element = db_object(add_element)
if db_element is None:
changed = True
db_session.add(new_element)
db_book_object.append(new_element)
else:
if db_type == 'custom':
if db_element.value != add_element:
new_element.value = add_element
# new_element = db_element
elif db_type == 'languages':
if db_element.lang_code != add_element:
db_element.lang_code = add_element
# new_element = db_element
elif db_type == 'series':
if db_element.name != add_element:
db_element.name = add_element # = add_element # new_element = db_object(add_element, add_element)
db_element.name = add_element
db_element.sort = add_element
# new_element = db_element
elif db_type == 'author':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = add_element.replace('|', ',')
# new_element = db_element
elif db_type == 'publisher':
if db_element.name != add_element:
db_element.name = add_element
db_element.sort = None
# new_element = db_element
elif db_element.name != add_element:
db_element.name = add_element
# new_element = db_element
# add element to book
changed = True
db_book_object.append(db_element)
return changed
def modify_identifiers(input_identifiers, db_identifiers, db_session):
"""Modify Identifiers to match input information.
input_identifiers is a list of read-to-persist Identifiers objects.
db_identifiers is a list of already persisted list of Identifiers objects."""
changed = False
input_dict = dict([ (identifier.type.lower(), identifier) for identifier in input_identifiers ])
db_dict = dict([ (identifier.type.lower(), identifier) for identifier in db_identifiers ])
# delete db identifiers not present in input or modify them with input val
for identifier_type, identifier in db_dict.items():
if identifier_type not in input_dict.keys():
db_session.delete(identifier)
changed = True
else:
input_identifier = input_dict[identifier_type]
identifier.type = input_identifier.type
identifier.val = input_identifier.val
# add input identifiers not present in db
for identifier_type, identifier in input_dict.items():
if identifier_type not in db_dict.keys():
db_session.add(identifier)
changed = True
return changed
@editbook.route("/delete/<int:book_id>/", defaults={'book_format': ""})
@ -155,7 +177,10 @@ def delete_book(book_id, book_format):
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
if book:
try:
helper.delete_book(book, config.config_calibre_dir, book_format=book_format.upper())
result, error = helper.delete_book(book, config.config_calibre_dir, book_format=book_format.upper())
if not result:
flash(error, category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
if not book_format:
# delete book from Shelfs, Downloads, Read list
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
@ -177,7 +202,7 @@ def delete_book(book_id, book_format):
cc_string = "custom_column_" + str(c.id)
if not c.is_multiple:
if len(getattr(book, cc_string)) > 0:
if c.datatype == 'bool' or c.datatype == 'int' or c.datatype == 'float':
if c.datatype == 'bool' or c.datatype == 'integer' or c.datatype == 'float':
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
log.debug('remove ' + str(c.id))
@ -211,8 +236,10 @@ def delete_book(book_id, book_format):
# book not found
log.error('Book with id "%s" could not be deleted: not found', book_id)
if book_format:
flash(_('Book Format Successfully Deleted'), category="success")
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
flash(_('Book Successfully Deleted'), category="success")
return redirect(url_for('web.index'))
@ -253,10 +280,57 @@ def render_edit_book(book_id):
return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc,
title=_(u"edit metadata"), page="editbook",
conversion_formats=allowed_conversion_formats,
config=config,
source_formats=valid_source_formats)
def edit_book_ratings(to_save, book):
changed = False
if to_save["rating"].strip():
old_rating = False
if len(book.ratings) > 0:
old_rating = book.ratings[0].rating
ratingx2 = int(float(to_save["rating"]) * 2)
if ratingx2 != old_rating:
changed = True
is_rating = db.session.query(db.Ratings).filter(db.Ratings.rating == ratingx2).first()
if is_rating:
book.ratings.append(is_rating)
else:
new_rating = db.Ratings(rating=ratingx2)
book.ratings.append(new_rating)
if old_rating:
book.ratings.remove(book.ratings[0])
else:
if len(book.ratings) > 0:
book.ratings.remove(book.ratings[0])
changed = True
return changed
def edit_book_languages(to_save, book):
input_languages = to_save["languages"].split(',')
unknown_languages = []
input_l = isoLanguages.get_language_codes(get_locale(), input_languages, unknown_languages)
for l in unknown_languages:
log.error('%s is not a valid language', l)
flash(_(u"%(langname)s is not a valid language", langname=l), category="error")
return modify_database_object(list(input_l), book.languages, db.Languages, db.session, 'languages')
def edit_book_publisher(to_save, book):
changed = False
if to_save["publisher"]:
publisher = to_save["publisher"].rstrip().strip()
if len(book.publishers) == 0 or (len(book.publishers) > 0 and publisher != book.publishers[0].name):
changed |= modify_database_object([publisher], book.publishers, db.Publishers, db.session, 'publisher')
elif len(book.publishers):
changed |= modify_database_object([], book.publishers, db.Publishers, db.session, 'publisher')
return changed
def edit_cc_data(book_id, book, to_save):
changed = False
cc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc:
cc_string = "custom_column_" + str(c.id)
@ -276,14 +350,17 @@ def edit_cc_data(book_id, book, to_save):
if cc_db_value is not None:
if to_save[cc_string] is not None:
setattr(getattr(book, cc_string)[0], 'value', to_save[cc_string])
changed = True
else:
del_cc = getattr(book, cc_string)[0]
getattr(book, cc_string).remove(del_cc)
db.session.delete(del_cc)
changed = True
else:
cc_class = db.cc_classes[c.id]
new_cc = cc_class(value=to_save[cc_string], book=book_id)
db.session.add(new_cc)
changed = True
else:
if c.datatype == 'rating':
@ -295,6 +372,7 @@ def edit_cc_data(book_id, book, to_save):
getattr(book, cc_string).remove(del_cc)
if len(del_cc.books) == 0:
db.session.delete(del_cc)
changed = True
cc_class = db.cc_classes[c.id]
new_cc = db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
@ -302,6 +380,7 @@ def edit_cc_data(book_id, book, to_save):
if new_cc is None:
new_cc = cc_class(value=to_save[cc_string].strip())
db.session.add(new_cc)
changed = True
db.session.flush()
new_cc = db.session.query(cc_class).filter(
cc_class.value == to_save[cc_string].strip()).first()
@ -314,12 +393,13 @@ def edit_cc_data(book_id, book, to_save):
getattr(book, cc_string).remove(del_cc)
if not del_cc.books or len(del_cc.books) == 0:
db.session.delete(del_cc)
changed = True
else:
input_tags = to_save[cc_string].split(',')
input_tags = list(map(lambda it: it.strip(), input_tags))
modify_database_object(input_tags, getattr(book, cc_string), db.cc_classes[c.id], db.session,
changed |= modify_database_object(input_tags, getattr(book, cc_string), db.cc_classes[c.id], db.session,
'custom')
return cc
return changed
def upload_single_file(request, book, book_id):
# Check and handle Uploaded file
@ -394,6 +474,7 @@ def upload_cover(request, book):
@login_required_if_no_ano
@edit_required
def edit_book(book_id):
modif_date = False
# Show form
if request.method != 'POST':
return render_edit_book(book_id)
@ -411,6 +492,7 @@ def edit_book(book_id):
meta = upload_single_file(request, book, book_id)
if upload_cover(request, book) is True:
book.has_cover = 1
modif_date = True
try:
to_save = request.form.to_dict()
merge_metadata(to_save, meta)
@ -422,6 +504,7 @@ def edit_book(book_id):
to_save["book_title"] = _(u'Unknown')
book.title = to_save["book_title"].rstrip().strip()
edited_books_id = book.id
modif_date = True
# handle author(s)
input_authors = to_save["author_name"].split('&')
@ -430,7 +513,7 @@ def edit_book(book_id):
if input_authors == ['']:
input_authors = [_(u'Unknown')] # prevent empty Author
modify_database_object(input_authors, book.authors, db.Authors, db.session, 'author')
modif_date |= modify_database_object(input_authors, book.authors, db.Authors, db.session, 'author')
# Search for each author if author is in database, if not, authorname and sorted authorname is generated new
# everything then is assembled for sorted author field in database
@ -446,7 +529,7 @@ def edit_book(book_id):
if book.author_sort != sort_authors:
edited_books_id = book.id
book.author_sort = sort_authors
modif_date = True
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
@ -460,75 +543,60 @@ def edit_book(book_id):
result, error = helper.save_cover_from_url(to_save["cover_url"], book.path)
if result is True:
book.has_cover = 1
modif_date = True
else:
flash(error, category="error")
if book.series_index != to_save["series_index"]:
book.series_index = to_save["series_index"]
modif_date = True
# Handle book comments/description
if len(book.comments):
book.comments[0].text = to_save["description"]
if book.comments[0].text != to_save["description"]:
book.comments[0].text = to_save["description"]
modif_date = True
else:
book.comments.append(db.Comments(text=to_save["description"], book=book.id))
if to_save["description"]:
book.comments.append(db.Comments(text=to_save["description"], book=book.id))
modif_date = True
# Handle identifiers
input_identifiers = identifier_list(to_save, book)
modif_date |= modify_identifiers(input_identifiers, book.identifiers, db.session)
# Handle book tags
input_tags = to_save["tags"].split(',')
input_tags = list(map(lambda it: it.strip(), input_tags))
modify_database_object(input_tags, book.tags, db.Tags, db.session, 'tags')
modif_date |= modify_database_object(input_tags, book.tags, db.Tags, db.session, 'tags')
# Handle book series
input_series = [to_save["series"].strip()]
input_series = [x for x in input_series if x != '']
modify_database_object(input_series, book.series, db.Series, db.session, 'series')
modif_date |= modify_database_object(input_series, book.series, db.Series, db.session, 'series')
if to_save["pubdate"]:
try:
book.pubdate = datetime.datetime.strptime(to_save["pubdate"], "%Y-%m-%d")
book.pubdate = datetime.strptime(to_save["pubdate"], "%Y-%m-%d")
except ValueError:
book.pubdate = db.Books.DEFAULT_PUBDATE
else:
book.pubdate = db.Books.DEFAULT_PUBDATE
if to_save["publisher"]:
publisher = to_save["publisher"].rstrip().strip()
if len(book.publishers) == 0 or (len(book.publishers) > 0 and publisher != book.publishers[0].name):
modify_database_object([publisher], book.publishers, db.Publishers, db.session, 'publisher')
elif len(book.publishers):
modify_database_object([], book.publishers, db.Publishers, db.session, 'publisher')
# handle book publisher
modif_date |= edit_book_publisher(to_save, book)
# handle book languages
input_languages = to_save["languages"].split(',')
unknown_languages = []
input_l = isoLanguages.get_language_codes(get_locale(), input_languages, unknown_languages)
for l in unknown_languages:
log.error('%s is not a valid language', l)
flash(_(u"%(langname)s is not a valid language", langname=l), category="error")
modify_database_object(list(input_l), book.languages, db.Languages, db.session, 'languages')
modif_date |= edit_book_languages(to_save, book)
# handle book ratings
if to_save["rating"].strip():
old_rating = False
if len(book.ratings) > 0:
old_rating = book.ratings[0].rating
ratingx2 = int(float(to_save["rating"]) * 2)
if ratingx2 != old_rating:
is_rating = db.session.query(db.Ratings).filter(db.Ratings.rating == ratingx2).first()
if is_rating:
book.ratings.append(is_rating)
else:
new_rating = db.Ratings(rating=ratingx2)
book.ratings.append(new_rating)
if old_rating:
book.ratings.remove(book.ratings[0])
else:
if len(book.ratings) > 0:
book.ratings.remove(book.ratings[0])
modif_date |= edit_book_ratings(to_save, book)
# handle cc data
edit_cc_data(book_id, book, to_save)
modif_date |= edit_cc_data(book_id, book, to_save)
if modif_date:
book.last_modified = datetime.utcnow()
db.session.commit()
if config.config_use_google_drive:
gdriveutils.updateGdriveCalibreFromLocal()
@ -561,6 +629,19 @@ def merge_metadata(to_save, meta):
to_save["description"] = to_save["description"] or Markup(
getattr(meta, 'description', '')).unescape()
def identifier_list(to_save, book):
"""Generate a list of Identifiers from form information"""
id_type_prefix = 'identifier-type-'
id_val_prefix = 'identifier-val-'
result = []
for type_key, type_value in to_save.items():
if not type_key.startswith(id_type_prefix):
continue
val_key = id_val_prefix + type_key[len(id_type_prefix):]
if val_key not in to_save.keys():
continue
result.append( db.Identifiers(to_save[val_key], type_value, book.id) )
return result
@editbook.route("/upload", methods=["GET", "POST"])
@login_required_if_no_ano
@ -677,8 +758,9 @@ def upload():
# combine path and normalize path from windows systems
path = os.path.join(author_dir, title_dir).replace('\\', '/')
db_book = db.Books(title, "", db_author.sort, datetime.datetime.now(), datetime.datetime(101, 1, 1),
series_index, datetime.datetime.now(), path, has_cover, db_author, [], db_language)
# Calibre adds books with utc as timezone
db_book = db.Books(title, "", db_author.sort, datetime.utcnow(), datetime(101, 1, 1),
series_index, datetime.utcnow(), path, has_cover, db_author, [], db_language)
db_book.authors.append(db_author)
if db_series:
db_book.series.append(db_series)

@ -96,7 +96,7 @@ def convert_book_format(book_id, calibrepath, old_book_format, new_book_format,
# read settings and append converter task to queue
if kindle_mail:
settings = config.get_mail_settings()
settings['subject'] = _('Send to Kindle') # pretranslate Subject for e-mail
settings['subject'] = _('Send to Kindle') # pretranslate Subject for e-mail
settings['body'] = _(u'This e-mail has been sent via Calibre-Web.')
# text = _(u"%(format)s: %(book)s", format=new_book_format, book=book.title)
else:
@ -108,7 +108,7 @@ def convert_book_format(book_id, calibrepath, old_book_format, new_book_format,
return None
else:
error_message = _(u"%(format)s not found: %(fn)s",
format=old_book_format, fn=data.name + "." + old_book_format.lower())
format=old_book_format, fn=data.name + "." + old_book_format.lower())
return error_message
@ -141,34 +141,52 @@ def check_send_to_kindle(entry):
returns all available book formats for sending to Kindle
"""
if len(entry.data):
bookformats=list()
bookformats = list()
if config.config_ebookconverter == 0:
# no converter - only for mobi and pdf formats
for ele in iter(entry.data):
if 'MOBI' in ele.format:
bookformats.append({'format':'Mobi','convert':0,'text':_('Send %(format)s to Kindle',format='Mobi')})
bookformats.append({'format': 'Mobi',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Mobi')})
if 'PDF' in ele.format:
bookformats.append({'format':'Pdf','convert':0,'text':_('Send %(format)s to Kindle',format='Pdf')})
bookformats.append({'format': 'Pdf',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Pdf')})
if 'AZW' in ele.format:
bookformats.append({'format':'Azw','convert':0,'text':_('Send %(format)s to Kindle',format='Azw')})
bookformats.append({'format': 'Azw',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Azw')})
else:
formats = list()
for ele in iter(entry.data):
formats.append(ele.format)
if 'MOBI' in formats:
bookformats.append({'format': 'Mobi','convert':0,'text':_('Send %(format)s to Kindle',format='Mobi')})
bookformats.append({'format': 'Mobi',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Mobi')})
if 'AZW' in formats:
bookformats.append({'format': 'Azw','convert':0,'text':_('Send %(format)s to Kindle',format='Azw')})
bookformats.append({'format': 'Azw',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Azw')})
if 'PDF' in formats:
bookformats.append({'format': 'Pdf','convert':0,'text':_('Send %(format)s to Kindle',format='Pdf')})
bookformats.append({'format': 'Pdf',
'convert': 0,
'text': _('Send %(format)s to Kindle', format='Pdf')})
if config.config_ebookconverter >= 1:
if 'EPUB' in formats and not 'MOBI' in formats:
bookformats.append({'format': 'Mobi','convert':1,
'text':_('Convert %(orig)s to %(format)s and send to Kindle',orig='Epub',format='Mobi')})
bookformats.append({'format': 'Mobi',
'convert':1,
'text': _('Convert %(orig)s to %(format)s and send to Kindle',
orig='Epub',
format='Mobi')})
if config.config_ebookconverter == 2:
if 'AZW3' in formats and not 'MOBI' in formats:
bookformats.append({'format': 'Mobi','convert':2,
'text':_('Convert %(orig)s to %(format)s and send to Kindle',orig='Azw3',format='Mobi')})
bookformats.append({'format': 'Mobi',
'convert': 2,
'text': _('Convert %(orig)s to %(format)s and send to Kindle',
orig='Azw3',
format='Mobi')})
return bookformats
else:
log.error(u'Cannot find book entry %d', entry.id)
@ -202,7 +220,6 @@ def send_mail(book_id, book_format, convert, kindle_mail, calibrepath, user_id):
# returns None if success, otherwise errormessage
return convert_book_format(book_id, calibrepath, u'azw3', book_format.lower(), user_id, kindle_mail)
for entry in iter(book.data):
if entry.format.upper() == book_format.upper():
converted_file_name = entry.name + '.' + book_format.lower()
@ -279,15 +296,29 @@ def delete_book_file(book, calibrepath, book_format=None):
if os.path.isdir(path):
if len(next(os.walk(path))[1]):
log.error("Deleting book %s failed, path has subfolders: %s", book.id, book.path)
return False
shutil.rmtree(path, ignore_errors=True)
return False , _("Deleting book %(id)s failed, path has subfolders: %(path)s",
id=book.id,
path=book.path)
try:
for root, __, files in os.walk(path):
for f in files:
os.unlink(os.path.join(root, f))
shutil.rmtree(path)
except (IOError, OSError) as e:
log.error("Deleting book %s failed: %s", book.id, e)
return False, _("Deleting book %(id)s failed: %(message)s", id=book.id, message=e)
authorpath = os.path.join(calibrepath, os.path.split(book.path)[0])
if not os.listdir(authorpath):
shutil.rmtree(authorpath, ignore_errors=True)
return True
try:
shutil.rmtree(authorpath)
except (IOError, OSError) as e:
log.error("Deleting authorpath for book %s failed: %s", book.id, e)
return True, None
else:
log.error("Deleting book %s failed, book path not valid: %s", book.id, book.path)
return False
return False, _("Deleting book %(id)s failed, book path not valid: %(path)s",
id=book.id,
path=book.path)
def update_dir_structure_file(book_id, calibrepath, first_author):
@ -370,7 +401,7 @@ def update_dir_structure_gdrive(book_id, first_author):
path = book.path
gd.updateDatabaseOnEdit(gFile['id'], book.path) # only child folder affected
else:
error = _(u'File %(file)s not found on Google Drive', file=book.path) # file not found
error = _(u'File %(file)s not found on Google Drive', file=book.path) # file not found
if authordir != new_authordir:
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), new_titledir)
@ -380,7 +411,7 @@ def update_dir_structure_gdrive(book_id, first_author):
path = book.path
gd.updateDatabaseOnEdit(gFile['id'], book.path)
else:
error = _(u'File %(file)s not found on Google Drive', file=authordir) # file not found
error = _(u'File %(file)s not found on Google Drive', file=authordir) # file not found
# Rename all files from old names to new names
if authordir != new_authordir or titledir != new_titledir:
@ -396,7 +427,7 @@ def update_dir_structure_gdrive(book_id, first_author):
def delete_book_gdrive(book, book_format):
error= False
error = None
if book_format:
name = ''
for entry in book.data:
@ -404,38 +435,42 @@ def delete_book_gdrive(book, book_format):
name = entry.name + '.' + book_format
gFile = gd.getFileFromEbooksFolder(book.path, name)
else:
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path),book.path.split('/')[1])
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), book.path.split('/')[1])
if gFile:
gd.deleteDatabaseEntry(gFile['id'])
gFile.Trash()
else:
error =_(u'Book path %(path)s not found on Google Drive', path=book.path) # file not found
return error
error = _(u'Book path %(path)s not found on Google Drive', path=book.path) # file not found
return error is None, error
def reset_password(user_id):
existing_user = ub.session.query(ub.User).filter(ub.User.id == user_id).first()
if not existing_user:
return 0, None
password = generate_random_password()
existing_user.password = generate_password_hash(password)
if not config.get_mail_server_configured():
return (2, None)
return 2, None
try:
ub.session.commit()
send_registration_mail(existing_user.email, existing_user.nickname, password, True)
return (1, existing_user.nickname)
return 1, existing_user.nickname
except Exception:
ub.session.rollback()
return (0, None)
return 0, None
def generate_random_password():
s = "abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%&*()?"
passlen = 8
return "".join(random.sample(s,passlen ))
return "".join(random.sample(s, passlen))
################################## External interface
def update_dir_stucture(book_id, calibrepath, first_author = None):
def update_dir_stucture(book_id, calibrepath, first_author=None):
if config.config_use_google_drive:
return update_dir_structure_gdrive(book_id, first_author)
else:
@ -455,23 +490,26 @@ def get_cover_on_failure(use_generic_cover):
else:
return None
def get_book_cover(book_id):
book = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
return get_book_cover_internal(book, use_generic_cover_on_failure=True)
def get_book_cover_with_uuid(book_uuid,
use_generic_cover_on_failure=True):
use_generic_cover_on_failure=True):
book = db.session.query(db.Books).filter(db.Books.uuid == book_uuid).first()
return get_book_cover_internal(book, use_generic_cover_on_failure)
def get_book_cover_internal(book,
use_generic_cover_on_failure):
use_generic_cover_on_failure):
if book and book.has_cover:
if config.config_use_google_drive:
try:
if not gd.is_gdrive_ready():
return get_cover_on_failure(use_generic_cover_on_failure)
path=gd.get_cover_via_gdrive(book.path)
path = gd.get_cover_via_gdrive(book.path)
if path:
return redirect(path)
else:
@ -528,7 +566,7 @@ def save_cover(img, book_path):
return False, _("Only jpg/jpeg/png/webp files are supported as coverfile")
# convert to jpg because calibre only supports jpg
if content_type in ('image/png', 'image/webp'):
if hasattr(img,'stream'):
if hasattr(img, 'stream'):
imgc = PILImage.open(img.stream)
else:
imgc = PILImage.open(io.BytesIO(img.content))
@ -537,7 +575,7 @@ def save_cover(img, book_path):
im.save(tmp_bytesio, format='JPEG')
img._content = tmp_bytesio.getvalue()
else:
if content_type not in ('image/jpeg'):
if content_type not in 'image/jpeg':
log.error("Only jpg/jpeg files are supported as coverfile")
return False, _("Only jpg/jpeg files are supported as coverfile")
@ -555,7 +593,6 @@ def save_cover(img, book_path):
return save_cover_from_filestorage(os.path.join(config.config_calibre_dir, book_path), "cover.jpg", img)
def do_download_file(book, book_format, data, headers):
if config.config_use_google_drive:
startTime = time.time()
@ -577,7 +614,6 @@ def do_download_file(book, book_format, data, headers):
##################################
def check_unrar(unrarLocation):
if not unrarLocation:
return
@ -599,13 +635,12 @@ def check_unrar(unrarLocation):
return _('Error excecuting UnRar')
def json_serial(obj):
"""JSON serializer for objects not serializable by default json code"""
if isinstance(obj, (datetime)):
if isinstance(obj, datetime):
return obj.isoformat()
if isinstance(obj, (timedelta)):
if isinstance(obj, timedelta):
return {
'__type__': 'timedelta',
'days': obj.days,
@ -613,7 +648,7 @@ def json_serial(obj):
'microseconds': obj.microseconds,
}
# return obj.isoformat()
raise TypeError ("Type %s not serializable" % type(obj))
raise TypeError("Type %s not serializable" % type(obj))
# helper function for displaying the runtime of tasks
@ -635,7 +670,7 @@ def format_runtime(runtime):
# helper function to apply localize status information in tasklist entries
def render_task_status(tasklist):
renderedtasklist=list()
renderedtasklist = list()
for task in tasklist:
if task['user'] == current_user.nickname or current_user.role_admin():
if task['formStarttime']:
@ -651,7 +686,7 @@ def render_task_status(tasklist):
task['runtime'] = format_runtime(task['formRuntime'])
# localize the task status
if isinstance( task['stat'], int ):
if isinstance( task['stat'], int):
if task['stat'] == STAT_WAITING:
task['status'] = _(u'Waiting')
elif task['stat'] == STAT_FAIL:
@ -664,14 +699,14 @@ def render_task_status(tasklist):
task['status'] = _(u'Unknown Status')
# localize the task type
if isinstance( task['taskType'], int ):
if isinstance( task['taskType'], int):
if task['taskType'] == TASK_EMAIL:
task['taskMessage'] = _(u'E-mail: ') + task['taskMess']
elif task['taskType'] == TASK_CONVERT:
elif task['taskType'] == TASK_CONVERT:
task['taskMessage'] = _(u'Convert: ') + task['taskMess']
elif task['taskType'] == TASK_UPLOAD:
elif task['taskType'] == TASK_UPLOAD:
task['taskMessage'] = _(u'Upload: ') + task['taskMess']
elif task['taskType'] == TASK_CONVERT_ANY:
elif task['taskType'] == TASK_CONVERT_ANY:
task['taskMessage'] = _(u'Convert: ') + task['taskMess']
else:
task['taskMessage'] = _(u'Unknown Task: ') + task['taskMess']
@ -682,7 +717,19 @@ def render_task_status(tasklist):
# Language and content filters for displaying in the UI
def common_filters():
def common_filters(allow_show_archived=False):
if not allow_show_archived:
archived_books = (
ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.user_id == int(current_user.id))
.filter(ub.ArchivedBook.is_archived == True)
.all()
)
archived_book_ids = [archived_book.book_id for archived_book in archived_books]
archived_filter = db.Books.id.notin_(archived_book_ids)
else:
archived_filter = true()
if current_user.filter_language() != "all":
lang_filter = db.Books.languages.any(db.Languages.lang_code == current_user.filter_language())
else:
@ -695,16 +742,16 @@ def common_filters():
pos_cc_list = current_user.allowed_column_value.split(',')
pos_content_cc_filter = true() if pos_cc_list == [''] else \
getattr(db.Books, 'custom_column_' + str(config.config_restricted_column)).\
any(db.cc_classes[config.config_restricted_column].value.in_(pos_cc_list))
any(db.cc_classes[config.config_restricted_column].value.in_(pos_cc_list))
neg_cc_list = current_user.denied_column_value.split(',')
neg_content_cc_filter = false() if neg_cc_list == [''] else \
getattr(db.Books, 'custom_column_' + str(config.config_restricted_column)).\
any(db.cc_classes[config.config_restricted_column].value.in_(neg_cc_list))
any(db.cc_classes[config.config_restricted_column].value.in_(neg_cc_list))
else:
pos_content_cc_filter = true()
neg_content_cc_filter = false()
return and_(lang_filter, pos_content_tags_filter, ~neg_content_tags_filter,
pos_content_cc_filter, ~neg_content_cc_filter)
pos_content_cc_filter, ~neg_content_cc_filter, archived_filter)
def tags_filters():
@ -719,8 +766,9 @@ def tags_filters():
# Creates for all stored languages a translated speaking name in the array for the UI
def speaking_language(languages=None):
if not languages:
languages = db.session.query(db.Languages).join(db.books_languages_link).join(db.Books).filter(common_filters())\
.group_by(text('books_languages_link.lang_code')).all()
languages = db.session.query(db.Languages).join(db.books_languages_link).join(db.Books)\
.filter(common_filters())\
.group_by(text('books_languages_link.lang_code')).all()
for lang in languages:
try:
cur_l = LC.parse(lang.lang_code)
@ -729,6 +777,7 @@ def speaking_language(languages=None):
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
return languages
# checks if domain is in database (including wildcards)
# example SELECT * FROM @TABLE WHERE 'abcdefg' LIKE Name;
# from https://code.luasoftware.com/tutorials/flask/execute-raw-sql-in-flask-sqlalchemy/
@ -762,28 +811,36 @@ def order_authors(entry):
# Fill indexpage with all requested data from database
def fill_indexpage(page, database, db_filter, order, *join):
return fill_indexpage_with_archived_books(page, database, db_filter, order, False, *join)
def fill_indexpage_with_archived_books(page, database, db_filter, order, allow_show_archived, *join):
if current_user.show_detail_random():
randm = db.session.query(db.Books).filter(common_filters())\
randm = db.session.query(db.Books).filter(common_filters(allow_show_archived))\
.order_by(func.random()).limit(config.config_random_books)
else:
randm = false()
off = int(int(config.config_books_per_page) * (page - 1))
pagination = Pagination(page, config.config_books_per_page,
len(db.session.query(database).filter(db_filter).filter(common_filters()).all()))
entries = db.session.query(database).join(*join, isouter=True).filter(db_filter).filter(common_filters()).\
order_by(*order).offset(off).limit(config.config_books_per_page).all()
len(db.session.query(database).filter(db_filter)
.filter(common_filters(allow_show_archived)).all()))
entries = db.session.query(database).join(*join, isouter=True).filter(db_filter)\
.filter(common_filters(allow_show_archived))\
.order_by(*order).offset(off).limit(config.config_books_per_page).all()
for book in entries:
book = order_authors(book)
return entries, randm, pagination
def get_typeahead(database, query, replace=('',''), tag_filter=true()):
def get_typeahead(database, query, replace=('', ''), tag_filter=true()):
query = query or ''
db.session.connection().connection.connection.create_function("lower", 1, lcase)
entries = db.session.query(database).filter(tag_filter).filter(func.lower(database.name).ilike("%" + query + "%")).all()
entries = db.session.query(database).filter(tag_filter).\
filter(func.lower(database.name).ilike("%" + query + "%")).all()
json_dumps = json.dumps([dict(name=r.name.replace(*replace)) for r in entries])
return json_dumps
# read search results from calibre-database and return it (function is used for feed and simple search
def get_search_results(term):
db.session.connection().connection.connection.create_function("lower", 1, lcase)
@ -802,6 +859,7 @@ def get_search_results(term):
func.lower(db.Books.title).ilike("%" + term + "%")
)).order_by(db.Books.sort).all()
def get_cc_columns():
tmpcc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
if config.config_columns_to_ignore:
@ -814,6 +872,7 @@ def get_cc_columns():
cc = tmpcc
return cc
def get_download_link(book_id, book_format):
book_format = book_format.split(".")[0]
book = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
@ -838,7 +897,8 @@ def get_download_link(book_id, book_format):
else:
abort(404)
def check_exists_book(authr,title):
def check_exists_book(authr, title):
db.session.connection().connection.connection.create_function("lower", 1, lcase)
q = list()
authorterms = re.split(r'\s*&\s*', authr)
@ -847,11 +907,12 @@ def check_exists_book(authr,title):
return db.session.query(db.Books).filter(
and_(db.Books.authors.any(and_(*q)),
func.lower(db.Books.title).ilike("%" + title + "%")
)).first()
func.lower(db.Books.title).ilike("%" + title + "%")
)).first()
############### Database Helper functions
def lcase(s):
try:
return unidecode.unidecode(s.lower())

@ -80,9 +80,13 @@ def formatdate_filter(val):
formatdate = datetime.datetime.strptime(conformed_timestamp[:15], "%Y%m%d %H%M%S")
return format_date(formatdate, format='medium', locale=get_locale())
except AttributeError as e:
log.error('Babel error: %s, Current user locale: %s, Current User: %s', e, current_user.locale, current_user.nickname)
log.error('Babel error: %s, Current user locale: %s, Current User: %s', e,
current_user.locale,
current_user.nickname
)
return formatdate
@jinjia.app_template_filter('formatdateinput')
def format_date_input(val):
conformed_timestamp = re.sub(r"[:]|([-](?!((\d{2}[:]\d{2})|(\d{4}))$))", '', val)

@ -17,11 +17,15 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import base64
import datetime
import itertools
import json
import sys
import os
import uuid
from time import gmtime, strftime
try:
from urllib import unquote
except ImportError:
@ -34,20 +38,24 @@ from flask import (
jsonify,
current_app,
url_for,
redirect
redirect,
abort
)
from flask_login import current_user, login_required
from werkzeug.datastructures import Headers
from sqlalchemy import func
from sqlalchemy.sql.expression import and_, or_
from sqlalchemy.exc import StatementError
import requests
from . import config, logger, kobo_auth, db, helper
from . import config, logger, kobo_auth, db, helper, shelf as shelf_lib, ub
from .services import SyncToken as SyncToken
from .web import download_required
from .kobo_auth import requires_kobo_auth
KOBO_FORMATS = {"KEPUB": ["KEPUB"], "EPUB": ["EPUB3", "EPUB"]}
KOBO_STOREAPI_URL = "https://storeapi.kobo.com"
KOBO_IMAGEHOST_URL = "https://kbimages1-a.akamaihd.net"
kobo = Blueprint("kobo", __name__, url_prefix="/kobo/<auth_token>")
kobo_auth.disable_failed_auth_redirect_for_blueprint(kobo)
@ -55,6 +63,7 @@ kobo_auth.register_url_value_preprocessor(kobo)
log = logger.create()
def get_store_url_for_current_request():
# Programmatically modify the current url to point to the official Kobo store
__, __, request_path_with_auth_token = request.full_path.rpartition("/kobo/")
@ -96,9 +105,6 @@ def redirect_or_proxy_request():
if config.config_kobo_proxy:
if request.method == "GET":
return redirect(get_store_url_for_current_request(), 307)
if request.method == "DELETE":
log.info('Delete Book')
return make_response(jsonify({}))
else:
# The Kobo device turns other request types into GET requests on redirects, so we instead proxy to the Kobo store ourselves.
store_response = make_request_to_kobo_store()
@ -114,6 +120,10 @@ def redirect_or_proxy_request():
return make_response(jsonify({}))
def convert_to_kobo_timestamp_string(timestamp):
return timestamp.strftime("%Y-%m-%dT%H:%M:%SZ")
@kobo.route("/v1/library/sync")
@requires_kobo_auth
@download_required
@ -128,58 +138,103 @@ def HandleSyncRequest():
new_books_last_modified = sync_token.books_last_modified
new_books_last_created = sync_token.books_last_created
entitlements = []
new_reading_state_last_modified = sync_token.reading_state_last_modified
sync_results = []
# We reload the book database so that the user get's a fresh view of the library
# in case of external changes (e.g: adding a book through Calibre).
db.reconnect_db(config)
archived_books = (
ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.user_id == int(current_user.id))
.all()
)
# We join-in books that have had their Archived bit recently modified in order to either:
# * Restore them to the user's device.
# * Delete them from the user's device.
# (Ideally we would use a join for this logic, however cross-database joins don't look trivial in SqlAlchemy.)
recently_restored_or_archived_books = []
archived_book_ids = {}
new_archived_last_modified = datetime.datetime.min
for archived_book in archived_books:
if archived_book.last_modified > sync_token.archive_last_modified:
recently_restored_or_archived_books.append(archived_book.book_id)
if archived_book.is_archived:
archived_book_ids[archived_book.book_id] = True
new_archived_last_modified = max(
new_archived_last_modified, archived_book.last_modified)
# sqlite gives unexpected results when performing the last_modified comparison without the datetime cast.
# It looks like it's treating the db.Books.last_modified field as a string and may fail
# the comparison because of the +00:00 suffix.
changed_entries = (
db.session.query(db.Books)
.join(db.Data)
.filter(func.datetime(db.Books.last_modified) > sync_token.books_last_modified)
.filter(or_(func.datetime(db.Books.last_modified) > sync_token.books_last_modified,
db.Books.id.in_(recently_restored_or_archived_books)))
.filter(db.Data.format.in_(KOBO_FORMATS))
.all()
)
reading_states_in_new_entitlements = []
for book in changed_entries:
kobo_reading_state = get_or_create_reading_state(book.id)
entitlement = {
"BookEntitlement": create_book_entitlement(book),
"BookEntitlement": create_book_entitlement(book, archived=(book.id in archived_book_ids)),
"BookMetadata": get_metadata(book),
"ReadingState": reading_state(book),
}
if kobo_reading_state.last_modified > sync_token.reading_state_last_modified:
entitlement["ReadingState"] = get_kobo_reading_state_response(book, kobo_reading_state)
new_reading_state_last_modified = max(new_reading_state_last_modified, kobo_reading_state.last_modified)
reading_states_in_new_entitlements.append(book.id)
if book.timestamp > sync_token.books_last_created:
entitlements.append({"NewEntitlement": entitlement})
sync_results.append({"NewEntitlement": entitlement})
else:
entitlements.append({"ChangedEntitlement": entitlement})
sync_results.append({"ChangedEntitlement": entitlement})
new_books_last_modified = max(
book.last_modified, sync_token.books_last_modified
book.last_modified, new_books_last_modified
)
new_books_last_created = max(book.timestamp, sync_token.books_last_created)
new_books_last_created = max(book.timestamp, new_books_last_created)
changed_reading_states = (
ub.session.query(ub.KoboReadingState)
.filter(and_(func.datetime(ub.KoboReadingState.last_modified) > sync_token.reading_state_last_modified,
ub.KoboReadingState.user_id == current_user.id,
ub.KoboReadingState.book_id.notin_(reading_states_in_new_entitlements))))
for kobo_reading_state in changed_reading_states.all():
book = db.session.query(db.Books).filter(db.Books.id == kobo_reading_state.book_id).one_or_none()
if book:
sync_results.append({
"ChangedReadingState": {
"ReadingState": get_kobo_reading_state_response(book, kobo_reading_state)
}
})
new_reading_state_last_modified = max(new_reading_state_last_modified, kobo_reading_state.last_modified)
sync_shelves(sync_token, sync_results)
sync_token.books_last_created = new_books_last_created
sync_token.books_last_modified = new_books_last_modified
sync_token.archive_last_modified = new_archived_last_modified
sync_token.reading_state_last_modified = new_reading_state_last_modified
if config.config_kobo_proxy:
return generate_sync_response(request, sync_token, entitlements)
return make_response(jsonify(entitlements))
# Missing feature: Detect server-side book deletions.
return generate_sync_response(sync_token, sync_results)
def generate_sync_response(request, sync_token, entitlements):
def generate_sync_response(sync_token, sync_results):
extra_headers = {}
if config.config_kobo_proxy:
# Merge in sync results from the official Kobo store.
try:
store_response = make_request_to_kobo_store(sync_token)
store_entitlements = store_response.json()
entitlements += store_entitlements
store_sync_results = store_response.json()
sync_results += store_sync_results
sync_token.merge_from_store_response(store_response)
extra_headers["x-kobo-sync"] = store_response.headers.get("x-kobo-sync")
extra_headers["x-kobo-sync-mode"] = store_response.headers.get("x-kobo-sync-mode")
@ -189,7 +244,7 @@ def generate_sync_response(request, sync_token, entitlements):
log.error("Failed to receive or parse response from Kobo's sync endpoint: " + str(e))
sync_token.to_headers(extra_headers)
response = make_response(jsonify(entitlements), extra_headers)
response = make_response(jsonify(sync_results), extra_headers)
return response
@ -231,19 +286,18 @@ def get_download_url_for_book(book, book_format):
)
def create_book_entitlement(book):
def create_book_entitlement(book, archived):
book_uuid = book.uuid
return {
"Accessibility": "Full",
"ActivePeriod": {"From": current_time(),},
"Created": book.timestamp.strftime("%Y-%m-%dT%H:%M:%SZ"),
"ActivePeriod": {"From": convert_to_kobo_timestamp_string(datetime.datetime.now())},
"Created": convert_to_kobo_timestamp_string(book.timestamp),
"CrossRevisionId": book_uuid,
"Id": book_uuid,
"IsRemoved": archived,
"IsHiddenFromArchive": False,
"IsLocked": False,
# Setting this to true removes from the device.
"IsRemoved": False,
"LastModified": book.last_modified.strftime("%Y-%m-%dT%H:%M:%SZ"),
"LastModified": convert_to_kobo_timestamp_string(book.last_modified),
"OriginCategory": "Imported",
"RevisionId": book_uuid,
"Status": "Active",
@ -316,6 +370,8 @@ def get_metadata(book):
"IsSocialEnabled": True,
"Language": "en",
"PhoneticPronunciations": {},
# TODO: Fix book.pubdate to return a datetime object so that we can easily
# convert it to the format Kobo devices expect.
"PublicationDate": book.pubdate,
"Publisher": {"Imprint": "", "Name": get_publisher(book),},
"RevisionId": book_uuid,
@ -330,7 +386,7 @@ def get_metadata(book):
name = get_series(book)
metadata["Series"] = {
"Name": get_series(book),
"Number": book.series_index,
"Number": book.series_index, # ToDo Check int() ?
"NumberFloat": float(book.series_index),
# Get a deterministic id based on the series name.
"Id": uuid.uuid3(uuid.NAMESPACE_DNS, name),
@ -339,31 +395,399 @@ def get_metadata(book):
return metadata
def reading_state(book):
# TODO: Implement
reading_state = {
# "StatusInfo": {
# "LastModified": get_single_cc_value(book, "lastreadtimestamp"),
# "Status": get_single_cc_value(book, "reading_status"),
# }
# TODO: CurrentBookmark, Location
@kobo.route("/v1/library/tags", methods=["POST", "DELETE"])
@login_required
# Creates a Shelf with the given items, and returns the shelf's uuid.
def HandleTagCreate():
# catch delete requests, otherwise the are handeld in the book delete handler
if request.method == "DELETE":
abort(405)
name, items = None, None
try:
shelf_request = request.json
name = shelf_request["Name"]
items = shelf_request["Items"]
if not name:
raise TypeError
except (KeyError, TypeError):
log.debug("Received malformed v1/library/tags request.")
abort(400, description="Malformed tags POST request. Data has empty 'Name', missing 'Name' or 'Items' field")
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.name == name, ub.Shelf.user_id ==
current_user.id).one_or_none()
if shelf and not shelf_lib.check_shelf_edit_permissions(shelf):
abort(401, description="User is unauthaurized to create shelf.")
if not shelf:
shelf = ub.Shelf(user_id=current_user.id, name=name, uuid=str(uuid.uuid4()))
ub.session.add(shelf)
items_unknown_to_calibre = add_items_to_shelf(items, shelf)
if items_unknown_to_calibre:
log.debug("Received request to add unknown books to a collection. Silently ignoring items.")
ub.session.commit()
return make_response(jsonify(str(shelf.uuid)), 201)
@kobo.route("/v1/library/tags/<tag_id>", methods=["DELETE", "PUT"])
def HandleTagUpdate(tag_id):
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == tag_id,
ub.Shelf.user_id == current_user.id).one_or_none()
if not shelf:
log.debug("Received Kobo tag update request on a collection unknown to CalibreWeb")
if config.config_kobo_proxy:
return redirect_or_proxy_request()
else:
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(shelf):
abort(401, description="User is unauthaurized to edit shelf.")
if request.method == "DELETE":
shelf_lib.delete_shelf_helper(shelf)
else:
name = None
try:
shelf_request = request.json
name = shelf_request["Name"]
except (KeyError, TypeError):
log.debug("Received malformed v1/library/tags rename request.")
abort(400, description="Malformed tags POST request. Data is missing 'Name' field")
shelf.name = name
ub.session.merge(shelf)
ub.session.commit()
return make_response(' ', 200)
# Adds items to the given shelf.
def add_items_to_shelf(items, shelf):
book_ids_already_in_shelf = set([book_shelf.book_id for book_shelf in shelf.books])
items_unknown_to_calibre = []
for item in items:
try:
if item["Type"] != "ProductRevisionTagItem":
items_unknown_to_calibre.append(item)
continue
book = db.session.query(db.Books).filter(db.Books.uuid == item["RevisionId"]).one_or_none()
if not book:
items_unknown_to_calibre.append(item)
continue
book_id = book.id
if book_id not in book_ids_already_in_shelf:
shelf.books.append(ub.BookShelf(book_id=book_id))
except KeyError:
items_unknown_to_calibre.append(item)
return items_unknown_to_calibre
@kobo.route("/v1/library/tags/<tag_id>/items", methods=["POST"])
@login_required
def HandleTagAddItem(tag_id):
items = None
try:
tag_request = request.json
items = tag_request["Items"]
except (KeyError, TypeError):
log.debug("Received malformed v1/library/tags/<tag_id>/items/delete request.")
abort(400, description="Malformed tags POST request. Data is missing 'Items' field")
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == tag_id,
ub.Shelf.user_id == current_user.id).one_or_none()
if not shelf:
log.debug("Received Kobo request on a collection unknown to CalibreWeb")
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(shelf):
abort(401, description="User is unauthaurized to edit shelf.")
items_unknown_to_calibre = add_items_to_shelf(items, shelf)
if items_unknown_to_calibre:
log.debug("Received request to add an unknown book to a collection. Silently ignoring item.")
ub.session.merge(shelf)
ub.session.commit()
return make_response('', 201)
@kobo.route("/v1/library/tags/<tag_id>/items/delete", methods=["POST"])
@login_required
def HandleTagRemoveItem(tag_id):
items = None
try:
tag_request = request.json
items = tag_request["Items"]
except (KeyError, TypeError):
log.debug("Received malformed v1/library/tags/<tag_id>/items/delete request.")
abort(400, description="Malformed tags POST request. Data is missing 'Items' field")
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.uuid == tag_id,
ub.Shelf.user_id == current_user.id).one_or_none()
if not shelf:
log.debug(
"Received a request to remove an item from a Collection unknown to CalibreWeb.")
abort(404, description="Collection isn't known to CalibreWeb")
if not shelf_lib.check_shelf_edit_permissions(shelf):
abort(401, description="User is unauthaurized to edit shelf.")
items_unknown_to_calibre = []
for item in items:
try:
if item["Type"] != "ProductRevisionTagItem":
items_unknown_to_calibre.append(item)
continue
book = db.session.query(db.Books).filter(db.Books.uuid == item["RevisionId"]).one_or_none()
if not book:
items_unknown_to_calibre.append(item)
continue
shelf.books.filter(ub.BookShelf.book_id == book.id).delete()
except KeyError:
items_unknown_to_calibre.append(item)
ub.session.commit()
if items_unknown_to_calibre:
log.debug("Received request to remove an unknown book to a collecition. Silently ignoring item.")
return make_response('', 200)
# Add new, changed, or deleted shelves to the sync_results.
# Note: Public shelves that aren't owned by the user aren't supported.
def sync_shelves(sync_token, sync_results):
new_tags_last_modified = sync_token.tags_last_modified
for shelf in ub.session.query(ub.ShelfArchive).filter(func.datetime(ub.ShelfArchive.last_modified) > sync_token.tags_last_modified,
ub.ShelfArchive.user_id == current_user.id):
new_tags_last_modified = max(shelf.last_modified, new_tags_last_modified)
sync_results.append({
"DeletedTag": {
"Tag": {
"Id": shelf.uuid,
"LastModified": convert_to_kobo_timestamp_string(shelf.last_modified)
}
}
})
for shelf in ub.session.query(ub.Shelf).filter(func.datetime(ub.Shelf.last_modified) > sync_token.tags_last_modified,
ub.Shelf.user_id == current_user.id):
if not shelf_lib.check_shelf_view_permissions(shelf):
continue
new_tags_last_modified = max(shelf.last_modified, new_tags_last_modified)
tag = create_kobo_tag(shelf)
if not tag:
continue
if shelf.created > sync_token.tags_last_modified:
sync_results.append({
"NewTag": tag
})
else:
sync_results.append({
"ChangedTag": tag
})
sync_token.tags_last_modified = new_tags_last_modified
ub.session.commit()
# Creates a Kobo "Tag" object from a ub.Shelf object
def create_kobo_tag(shelf):
tag = {
"Created": convert_to_kobo_timestamp_string(shelf.created),
"Id": shelf.uuid,
"Items": [],
"LastModified": convert_to_kobo_timestamp_string(shelf.last_modified),
"Name": shelf.name,
"Type": "UserTag"
}
return reading_state
for book_shelf in shelf.books:
book = db.session.query(db.Books).filter(db.Books.id == book_shelf.book_id).one_or_none()
if not book:
log.info(u"Book (id: %s) in BookShelf (id: %s) not found in book database", book_shelf.book_id, shelf.id)
continue
tag["Items"].append(
{
"RevisionId": book.uuid,
"Type": "ProductRevisionTagItem"
}
)
return {"Tag": tag}
@kobo.route("/v1/library/<book_uuid>/state", methods=["GET", "PUT"])
@login_required
def HandleStateRequest(book_uuid):
book = db.session.query(db.Books).filter(db.Books.uuid == book_uuid).first()
if not book or not book.data:
log.info(u"Book %s not found in database", book_uuid)
return redirect_or_proxy_request()
kobo_reading_state = get_or_create_reading_state(book.id)
@kobo.route("/<book_uuid>/image.jpg")
if request.method == "GET":
return jsonify([get_kobo_reading_state_response(book, kobo_reading_state)])
else:
update_results_response = {"EntitlementId": book_uuid}
try:
request_data = request.json
request_reading_state = request_data["ReadingStates"][0]
request_bookmark = request_reading_state["CurrentBookmark"]
if request_bookmark:
current_bookmark = kobo_reading_state.current_bookmark
current_bookmark.progress_percent = request_bookmark["ProgressPercent"]
current_bookmark.content_source_progress_percent = request_bookmark["ContentSourceProgressPercent"]
location = request_bookmark["Location"]
if location:
current_bookmark.location_value = location["Value"]
current_bookmark.location_type = location["Type"]
current_bookmark.location_source = location["Source"]
update_results_response["CurrentBookmarkResult"] = {"Result": "Success"}
request_statistics = request_reading_state["Statistics"]
if request_statistics:
statistics = kobo_reading_state.statistics
statistics.spent_reading_minutes = int(request_statistics["SpentReadingMinutes"])
statistics.remaining_time_minutes = int(request_statistics["RemainingTimeMinutes"])
update_results_response["StatisticsResult"] = {"Result": "Success"}
request_status_info = request_reading_state["StatusInfo"]
if request_status_info:
book_read = kobo_reading_state.book_read_link
new_book_read_status = get_ub_read_status(request_status_info["Status"])
if new_book_read_status == ub.ReadBook.STATUS_IN_PROGRESS \
and new_book_read_status != book_read.read_status:
book_read.times_started_reading += 1
book_read.last_time_started_reading = datetime.datetime.utcnow()
book_read.read_status = new_book_read_status
update_results_response["StatusInfoResult"] = {"Result": "Success"}
except (KeyError, TypeError, ValueError, StatementError):
log.debug("Received malformed v1/library/<book_uuid>/state request.")
ub.session.rollback()
abort(400, description="Malformed request data is missing 'ReadingStates' key")
ub.session.merge(kobo_reading_state)
ub.session.commit()
return jsonify({
"RequestResult": "Success",
"UpdateResults": [update_results_response],
})
def get_read_status_for_kobo(ub_book_read):
enum_to_string_map = {
None: "ReadyToRead",
ub.ReadBook.STATUS_UNREAD: "ReadyToRead",
ub.ReadBook.STATUS_FINISHED: "Finished",
ub.ReadBook.STATUS_IN_PROGRESS: "Reading",
}
return enum_to_string_map[ub_book_read.read_status]
def get_ub_read_status(kobo_read_status):
string_to_enum_map = {
None: None,
"ReadyToRead": ub.ReadBook.STATUS_UNREAD,
"Finished": ub.ReadBook.STATUS_FINISHED,
"Reading": ub.ReadBook.STATUS_IN_PROGRESS,
}
return string_to_enum_map[kobo_read_status]
def get_or_create_reading_state(book_id):
book_read = ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id,
ub.ReadBook.user_id == current_user.id).one_or_none()
if not book_read:
book_read = ub.ReadBook(user_id=current_user.id, book_id=book_id)
if not book_read.kobo_reading_state:
kobo_reading_state = ub.KoboReadingState(user_id=book_read.user_id, book_id=book_id)
kobo_reading_state.current_bookmark = ub.KoboBookmark()
kobo_reading_state.statistics = ub.KoboStatistics()
book_read.kobo_reading_state = kobo_reading_state
ub.session.add(book_read)
ub.session.commit()
return book_read.kobo_reading_state
def get_kobo_reading_state_response(book, kobo_reading_state):
return {
"EntitlementId": book.uuid,
"Created": convert_to_kobo_timestamp_string(book.timestamp),
"LastModified": convert_to_kobo_timestamp_string(kobo_reading_state.last_modified),
# AFAICT PriorityTimestamp is always equal to LastModified.
"PriorityTimestamp": convert_to_kobo_timestamp_string(kobo_reading_state.priority_timestamp),
"StatusInfo": get_status_info_response(kobo_reading_state.book_read_link),
"Statistics": get_statistics_response(kobo_reading_state.statistics),
"CurrentBookmark": get_current_bookmark_response(kobo_reading_state.current_bookmark),
}
def get_status_info_response(book_read):
resp = {
"LastModified": convert_to_kobo_timestamp_string(book_read.last_modified),
"Status": get_read_status_for_kobo(book_read),
"TimesStartedReading": book_read.times_started_reading,
}
if book_read.last_time_started_reading:
resp["LastTimeStartedReading"] = convert_to_kobo_timestamp_string(book_read.last_time_started_reading)
return resp
def get_statistics_response(statistics):
resp = {
"LastModified": convert_to_kobo_timestamp_string(statistics.last_modified),
}
if statistics.spent_reading_minutes:
resp["SpentReadingMinutes"] = statistics.spent_reading_minutes
if statistics.remaining_time_minutes:
resp["RemainingTimeMinutes"] = statistics.remaining_time_minutes
return resp
def get_current_bookmark_response(current_bookmark):
resp = {
"LastModified": convert_to_kobo_timestamp_string(current_bookmark.last_modified),
}
if current_bookmark.progress_percent:
resp["ProgressPercent"] = current_bookmark.progress_percent
if current_bookmark.content_source_progress_percent:
resp["ContentSourceProgressPercent"] = current_bookmark.content_source_progress_percent
if current_bookmark.location_value:
resp["Location"] = {
"Value": current_bookmark.location_value,
"Type": current_bookmark.location_type,
"Source": current_bookmark.location_source,
}
return resp
@kobo.route("/<book_uuid>/<width>/<height>/<isGreyscale>/image.jpg", defaults={'Quality': ""})
@kobo.route("/<book_uuid>/<width>/<height>/<Quality>/<isGreyscale>/image.jpg")
@requires_kobo_auth
def HandleCoverImageRequest(book_uuid):
def HandleCoverImageRequest(book_uuid, width, height,Quality, isGreyscale):
book_cover = helper.get_book_cover_with_uuid(
book_uuid, use_generic_cover_on_failure=False
)
if not book_cover:
if config.config_kobo_proxy:
log.debug("Cover for unknown book: %s proxied to kobo" % book_uuid)
return redirect(get_store_url_for_current_request(), 307)
return redirect(KOBO_IMAGEHOST_URL +
"/{book_uuid}/{width}/{height}/false/image.jpg".format(book_uuid=book_uuid,
width=width,
height=height), 307)
else:
log.debug("Cover for unknown book: %s requested" % book_uuid)
return redirect_or_proxy_request()
# additional proxy request make no sense, -> direct return
return make_response(jsonify({}))
log.debug("Cover request received for book %s" % book_uuid)
return book_cover
@ -373,13 +797,35 @@ def TopLevelEndpoint():
return make_response(jsonify({}))
@kobo.route("/v1/library/<book_uuid>", methods=["DELETE"])
@login_required
def HandleBookDeletionRequest(book_uuid):
log.info("Kobo book deletion request received for book %s" % book_uuid)
book = db.session.query(db.Books).filter(db.Books.uuid == book_uuid).first()
if not book:
log.info(u"Book %s not found in database", book_uuid)
return redirect_or_proxy_request()
book_id = book.id
archived_book = (
ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.book_id == book_id)
.first()
)
if not archived_book:
archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id)
archived_book.is_archived = True
archived_book.last_modified = datetime.datetime.utcnow()
ub.session.merge(archived_book)
ub.session.commit()
return ("", 204)
# TODO: Implement the following routes
@kobo.route("/v1/library/<dummy>", methods=["DELETE", "GET"])
@kobo.route("/v1/library/<book_uuid>/state", methods=["PUT"])
@kobo.route("/v1/library/tags", methods=["POST"])
@kobo.route("/v1/library/tags/<shelf_name>", methods=["POST"])
@kobo.route("/v1/library/tags/<tag_id>", methods=["DELETE"])
def HandleUnimplementedRequest(dummy=None, book_uuid=None, shelf_name=None, tag_id=None):
def HandleUnimplementedRequest(dummy=None):
log.debug("Unimplemented Library Request received: %s", request.base_url)
return redirect_or_proxy_request()
@ -399,6 +845,7 @@ def HandleUserRequest(dummy=None):
@kobo.route("/v1/products/<dummy>/recommendations", methods=["GET", "POST"])
@kobo.route("/v1/products/<dummy>/nextread", methods=["GET", "POST"])
@kobo.route("/v1/products/<dummy>/reviews", methods=["GET", "POST"])
@kobo.route("/v1/products/books/series/<dummy>", methods=["GET", "POST"])
@kobo.route("/v1/products/books/<dummy>", methods=["GET", "POST"])
@kobo.route("/v1/products/dailydeal", methods=["GET", "POST"])
@kobo.route("/v1/products", methods=["GET", "POST"])
@ -407,12 +854,15 @@ def HandleProductsRequest(dummy=None):
return redirect_or_proxy_request()
@kobo.app_errorhandler(404)
'''@kobo.errorhandler(404)
def handle_404(err):
# This handler acts as a catch-all for endpoints that we don't have an interest in
# implementing (e.g: v1/analytics/gettests, v1/user/recommendations, etc)
log.debug("Unknown Request received: %s", request.base_url)
return redirect_or_proxy_request()
if err:
print('404')
return jsonify(error=str(err)), 404
log.debug("Unknown Request received: %s, method: %s, data: %s", request.base_url, request.method, request.data)
return redirect_or_proxy_request()'''
def make_calibre_web_auth_response():
@ -446,18 +896,23 @@ def HandleAuthRequest():
return make_calibre_web_auth_response()
def make_calibre_web_init_response(calibre_web_url):
resources = NATIVE_KOBO_RESOURCES(calibre_web_url)
response = make_response(jsonify({"Resources": resources}))
response.headers["x-kobo-apitoken"] = "e30="
return response
@kobo.route("/v1/initialization")
@requires_kobo_auth
def HandleInitRequest():
log.info('Init')
kobo_resources = None
if config.config_kobo_proxy:
try:
store_response = make_request_to_kobo_store()
store_response_json = store_response.json()
if "Resources" in store_response_json:
kobo_resources = store_response_json["Resources"]
except:
log.error("Failed to receive or parse response from Kobo's init endpoint. Falling back to un-proxied mode.")
if not kobo_resources:
kobo_resources = NATIVE_KOBO_RESOURCES()
if not current_app.wsgi_app.is_proxied:
log.debug('Kobo: Received unproxied request, changed request port to server port')
if ':' in request.host and not request.host.endswith(']'):
@ -469,33 +924,47 @@ def HandleInitRequest():
url_base=host,
url_port=config.config_port
)
kobo_resources["image_host"] = calibre_web_url
kobo_resources["image_url_quality_template"] = unquote(calibre_web_url +
url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
book_uuid="{ImageId}",
width="{width}",
height="{height}",
Quality='{Quality}',
isGreyscale='isGreyscale'
))
kobo_resources["image_url_template"] = unquote(calibre_web_url +
url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
book_uuid="{ImageId}",
width="{width}",
height="{height}",
isGreyscale='false'
))
else:
calibre_web_url = url_for("web.index", _external=True).strip("/")
if config.config_kobo_proxy:
try:
store_response = make_request_to_kobo_store()
kobo_resources["image_host"] = url_for("web.index", _external=True).strip("/")
kobo_resources["image_url_quality_template"] = unquote(url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
book_uuid="{ImageId}",
width="{width}",
height="{height}",
_external=True))
kobo_resources["image_url_template"] = unquote(url_for("kobo.HandleCoverImageRequest",
auth_token=kobo_auth.get_auth_token(),
book_uuid="{ImageId}",
width="{width}",
height="{height}",
_external=True))
response = make_response(jsonify({"Resources": kobo_resources}))
response.headers["x-kobo-apitoken"] = "e30="
store_response_json = store_response.json()
if "Resources" in store_response_json:
kobo_resources = store_response_json["Resources"]
# calibre_web_url = url_for("web.index", _external=True).strip("/")
kobo_resources["image_host"] = calibre_web_url
kobo_resources["image_url_quality_template"] = unquote(calibre_web_url + url_for("kobo.HandleCoverImageRequest",
auth_token = kobo_auth.get_auth_token(),
book_uuid="{ImageId}"))
kobo_resources["image_url_template"] = unquote(calibre_web_url + url_for("kobo.HandleCoverImageRequest",
auth_token = kobo_auth.get_auth_token(),
book_uuid="{ImageId}"))
return make_response(store_response_json, store_response.status_code)
except:
log.error("Failed to receive or parse response from Kobo's init endpoint. Falling back to un-proxied mode.")
return make_calibre_web_init_response(calibre_web_url)
return response
def NATIVE_KOBO_RESOURCES(calibre_web_url):
def NATIVE_KOBO_RESOURCES():
return {
"account_page": "https://secure.kobobooks.com/profile",
"account_page_rakuten": "https://my.rakuten.co.jp/",
@ -546,13 +1015,6 @@ def NATIVE_KOBO_RESOURCES(calibre_web_url):
"giftcard_epd_redeem_url": "https://www.kobo.com/{storefront}/{language}/redeem-ereader",
"giftcard_redeem_url": "https://www.kobo.com/{storefront}/{language}/redeem",
"help_page": "http://www.kobo.com/help",
"image_host": calibre_web_url,
"image_url_quality_template": unquote(calibre_web_url + url_for("kobo.HandleCoverImageRequest",
auth_token = kobo_auth.get_auth_token(),
book_uuid="{ImageId}")),
"image_url_template": unquote(calibre_web_url + url_for("kobo.HandleCoverImageRequest",
auth_token = kobo_auth.get_auth_token(),
book_uuid="{ImageId}")),
"kobo_audiobooks_enabled": "False",
"kobo_audiobooks_orange_deal_enabled": "False",
"kobo_audiobooks_subscriptions_enabled": "False",

@ -67,6 +67,8 @@ def get_level_name(level):
def is_valid_logfile(file_path):
if file_path == LOG_TO_STDERR or file_path == LOG_TO_STDOUT:
return True
if not file_path:
return True
if os.path.isdir(file_path):
@ -105,7 +107,9 @@ def setup(log_file, log_level=None):
# avoid spamming the log with debug messages from libraries
r.setLevel(log_level)
log_file = _absolute_log_file(log_file, DEFAULT_LOG_FILE)
# Otherwise name get's destroyed on windows
if log_file != LOG_TO_STDERR and log_file != LOG_TO_STDOUT:
log_file = _absolute_log_file(log_file, DEFAULT_LOG_FILE)
previous_handler = r.handlers[0] if r.handlers else None
if previous_handler:
@ -119,7 +123,7 @@ def setup(log_file, log_level=None):
file_handler = StreamHandler(sys.stdout)
file_handler.baseFilename = log_file
else:
file_handler = StreamHandler()
file_handler = StreamHandler(sys.stderr)
file_handler.baseFilename = log_file
else:
try:

@ -30,7 +30,7 @@ except ImportError:
from flask_dance.consumer.storage.sqla import SQLAlchemyStorage as SQLAlchemyBackend
from flask_dance.consumer.storage.sqla import first, _get_real_user
from sqlalchemy.orm.exc import NoResultFound
backend_resultcode = True # prevent storing values with this resultcode
backend_resultcode = True # prevent storing values with this resultcode
except ImportError:
pass
@ -97,7 +97,7 @@ try:
def set(self, blueprint, token, user=None, user_id=None):
uid = first([user_id, self.user_id, blueprint.config.get("user_id")])
u = first(_get_real_user(ref, self.anon_user)
for ref in (user, self.user, blueprint.config.get("user")))
for ref in (user, self.user, blueprint.config.get("user")))
if self.user_required and not u and not uid:
raise ValueError("Cannot set OAuth token without an associated user")

@ -56,8 +56,8 @@ def requires_basic_auth_if_no_ano(f):
return decorated
class FeedObject():
def __init__(self,rating_id , rating_name):
class FeedObject:
def __init__(self, rating_id, rating_name):
self.rating_id = rating_id
self.rating_name = rating_name
@ -101,7 +101,7 @@ def feed_normal_search():
def feed_new():
off = request.args.get("offset") or 0
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
db.Books, True, [db.Books.timestamp.desc()])
db.Books, True, [db.Books.timestamp.desc()])
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
@ -119,7 +119,8 @@ def feed_discover():
def feed_best_rated():
off = request.args.get("offset") or 0
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
db.Books, db.Books.ratings.any(db.Ratings.rating > 9), [db.Books.timestamp.desc()])
db.Books, db.Books.ratings.any(db.Ratings.rating > 9),
[db.Books.timestamp.desc()])
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
@ -153,7 +154,8 @@ def feed_hot():
def feed_authorindex():
off = request.args.get("offset") or 0
entries = db.session.query(db.Authors).join(db.books_authors_link).join(db.Books).filter(common_filters())\
.group_by(text('books_authors_link.author')).order_by(db.Authors.sort).limit(config.config_books_per_page).offset(off)
.group_by(text('books_authors_link.author')).order_by(db.Authors.sort).limit(config.config_books_per_page)\
.offset(off)
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
len(db.session.query(db.Authors).all()))
return render_xml_template('feed.xml', listelements=entries, folder='opds.feed_author', pagination=pagination)
@ -164,7 +166,9 @@ def feed_authorindex():
def feed_author(book_id):
off = request.args.get("offset") or 0
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
db.Books, db.Books.authors.any(db.Authors.id == book_id), [db.Books.timestamp.desc()])
db.Books,
db.Books.authors.any(db.Authors.id == book_id),
[db.Books.timestamp.desc()])
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
@ -173,7 +177,8 @@ def feed_author(book_id):
def feed_publisherindex():
off = request.args.get("offset") or 0
entries = db.session.query(db.Publishers).join(db.books_publishers_link).join(db.Books).filter(common_filters())\
.group_by(text('books_publishers_link.publisher')).order_by(db.Publishers.sort).limit(config.config_books_per_page).offset(off)
.group_by(text('books_publishers_link.publisher')).order_by(db.Publishers.sort)\
.limit(config.config_books_per_page).offset(off)
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
len(db.session.query(db.Publishers).all()))
return render_xml_template('feed.xml', listelements=entries, folder='opds.feed_publisher', pagination=pagination)
@ -184,7 +189,8 @@ def feed_publisherindex():
def feed_publisher(book_id):
off = request.args.get("offset") or 0
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
db.Books, db.Books.publishers.any(db.Publishers.id == book_id),
db.Books,
db.Books.publishers.any(db.Publishers.id == book_id),
[db.Books.timestamp.desc()])
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
@ -205,7 +211,9 @@ def feed_categoryindex():
def feed_category(book_id):
off = request.args.get("offset") or 0
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
db.Books, db.Books.tags.any(db.Tags.id == book_id), [db.Books.timestamp.desc()])
db.Books,
db.Books.tags.any(db.Tags.id == book_id),
[db.Books.timestamp.desc()])
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
@ -225,9 +233,12 @@ def feed_seriesindex():
def feed_series(book_id):
off = request.args.get("offset") or 0
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
db.Books, db.Books.series.any(db.Series.id == book_id), [db.Books.series_index])
db.Books,
db.Books.series.any(db.Series.id == book_id),
[db.Books.series_index])
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
@opds.route("/opds/ratings")
@requires_basic_auth_if_no_ano
def feed_ratingindex():
@ -244,16 +255,18 @@ def feed_ratingindex():
element.append(FeedObject(entry[0].id, "{} Stars".format(entry.name)))
return render_xml_template('feed.xml', listelements=element, folder='opds.feed_ratings', pagination=pagination)
@opds.route("/opds/ratings/<book_id>")
@requires_basic_auth_if_no_ano
def feed_ratings(book_id):
off = request.args.get("offset") or 0
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
db.Books, db.Books.ratings.any(db.Ratings.id == book_id),[db.Books.timestamp.desc()])
db.Books,
db.Books.ratings.any(db.Ratings.id == book_id),
[db.Books.timestamp.desc()])
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
@opds.route("/opds/formats")
@requires_basic_auth_if_no_ano
def feed_formatindex():
@ -274,7 +287,9 @@ def feed_formatindex():
def feed_format(book_id):
off = request.args.get("offset") or 0
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
db.Books, db.Books.data.any(db.Data.format == book_id.upper()), [db.Books.timestamp.desc()])
db.Books,
db.Books.data.any(db.Data.format == book_id.upper()),
[db.Books.timestamp.desc()])
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
@ -306,7 +321,9 @@ def feed_languagesindex():
def feed_languages(book_id):
off = request.args.get("offset") or 0
entries, __, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
db.Books, db.Books.languages.any(db.Languages.id == book_id), [db.Books.timestamp.desc()])
db.Books,
db.Books.languages.any(db.Languages.id == book_id),
[db.Books.timestamp.desc()])
return render_xml_template('feed.xml', entries=entries, pagination=pagination)
@ -326,7 +343,8 @@ def feed_shelfindex():
def feed_shelf(book_id):
off = request.args.get("offset") or 0
if current_user.is_anonymous:
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == book_id).first()
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1,
ub.Shelf.id == book_id).first()
else:
shelf = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
ub.Shelf.id == book_id),
@ -349,11 +367,11 @@ def feed_shelf(book_id):
@requires_basic_auth_if_no_ano
@download_required
def opds_download_link(book_id, book_format):
return get_download_link(book_id,book_format.lower())
return get_download_link(book_id, book_format.lower())
@opds.route("/ajax/book/<string:uuid>/<library>")
@opds.route("/ajax/book/<string:uuid>",defaults={'library': ""})
@opds.route("/ajax/book/<string:uuid>", defaults={'library': ""})
@requires_basic_auth_if_no_ano
def get_metadata_calibre_companion(uuid, library):
entry = db.session.query(db.Books).filter(db.Books.uuid.like("%" + uuid + "%")).first()
@ -369,16 +387,17 @@ def get_metadata_calibre_companion(uuid, library):
def feed_search(term):
if term:
term = term.strip().lower()
entries = get_search_results( term)
entries = get_search_results(term)
entriescount = len(entries) if len(entries) > 0 else 1
pagination = Pagination(1, entriescount, entriescount)
return render_xml_template('feed.xml', searchterm=term, entries=entries, pagination=pagination)
else:
return render_xml_template('feed.xml', searchterm="")
def check_auth(username, password):
if sys.version_info.major == 3:
username=username.encode('windows-1252')
username = username.encode('windows-1252')
user = ub.session.query(ub.User).filter(func.lower(ub.User.nickname) ==
username.decode('utf-8').lower()).first()
return bool(user and check_password_hash(str(user.password), password))
@ -392,13 +411,14 @@ def authenticate():
def render_xml_template(*args, **kwargs):
#ToDo: return time in current timezone similar to %z
# ToDo: return time in current timezone similar to %z
currtime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S+00:00")
xml = render_template(current_time=currtime, instance=config.config_calibre_web_title, *args, **kwargs)
response = make_response(xml)
response.headers["Content-Type"] = "application/atom+xml; charset=utf-8"
return response
@opds.route("/opds/thumb_240_240/<book_id>")
@opds.route("/opds/cover_240_240/<book_id>")
@opds.route("/opds/cover_90_90/<book_id>")
@ -407,13 +427,15 @@ def render_xml_template(*args, **kwargs):
def feed_get_cover(book_id):
return get_book_cover(book_id)
@opds.route("/opds/readbooks")
@requires_basic_auth_if_no_ano
def feed_read_books():
off = request.args.get("offset") or 0
result, pagination = render_read_books(int(off) / (int(config.config_books_per_page)) + 1, True, True)
result, pagination = render_read_books(int(off) / (int(config.config_books_per_page)) + 1, True, True)
return render_xml_template('feed.xml', entries=result, pagination=pagination)
@opds.route("/opds/unreadbooks")
@requires_basic_auth_if_no_ano
def feed_unread_books():

@ -43,7 +43,6 @@ from . import logger
log = logger.create()
def _readable_listen_address(address, port):
if ':' in address:
address = "[" + address + "]"
@ -84,7 +83,8 @@ class WebServer(object):
if os.path.isfile(certfile_path) and os.path.isfile(keyfile_path):
self.ssl_args = dict(certfile=certfile_path, keyfile=keyfile_path)
else:
log.warning('The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl.')
log.warning('The specified paths for the ssl certificate file and/or key file seem to be broken. '
'Ignoring ssl.')
log.warning('Cert path: %s', certfile_path)
log.warning('Key path: %s', keyfile_path)

@ -42,10 +42,18 @@ def to_epoch_timestamp(datetime_object):
return (datetime_object - datetime(1970, 1, 1)).total_seconds()
class SyncToken():
def get_datetime_from_json(json_object, field_name):
try:
return datetime.utcfromtimestamp(json_object[field_name])
except KeyError:
return datetime.min
class SyncToken:
""" The SyncToken is used to persist state accross requests.
When serialized over the response headers, the Kobo device will propagate the token onto following requests to the service.
As an example use-case, the SyncToken is used to detect books that have been added to the library since the last time the device synced to the server.
When serialized over the response headers, the Kobo device will propagate the token onto following
requests to the service. As an example use-case, the SyncToken is used to detect books that have been added
to the library since the last time the device synced to the server.
Attributes:
books_last_created: Datetime representing the newest book that the device knows about.
@ -53,21 +61,26 @@ class SyncToken():
"""
SYNC_TOKEN_HEADER = "x-kobo-synctoken"
VERSION = "1-0-0"
VERSION = "1-1-0"
LAST_MODIFIED_ADDED_VERSION = "1-1-0"
MIN_VERSION = "1-0-0"
token_schema = {
"type": "object",
"properties": {"version": {"type": "string"}, "data": {"type": "object"},},
"properties": {"version": {"type": "string"}, "data": {"type": "object"}, },
}
# This Schema doesn't contain enough information to detect and propagate book deletions from Calibre to the device.
# A potential solution might be to keep a list of all known book uuids in the token, and look for any missing from the db.
# A potential solution might be to keep a list of all known book uuids in the token, and look for any missing
# from the db.
data_schema_v1 = {
"type": "object",
"properties": {
"raw_kobo_store_token": {"type": "string"},
"books_last_modified": {"type": "string"},
"books_last_created": {"type": "string"},
"archive_last_modified": {"type": "string"},
"reading_state_last_modified": {"type": "string"},
"tags_last_modified": {"type": "string"},
},
}
@ -76,10 +89,16 @@ class SyncToken():
raw_kobo_store_token="",
books_last_created=datetime.min,
books_last_modified=datetime.min,
archive_last_modified=datetime.min,
reading_state_last_modified=datetime.min,
tags_last_modified=datetime.min,
):
self.raw_kobo_store_token = raw_kobo_store_token
self.books_last_created = books_last_created
self.books_last_modified = books_last_modified
self.archive_last_modified = archive_last_modified
self.reading_state_last_modified = reading_state_last_modified
self.tags_last_modified = tags_last_modified
@staticmethod
def from_headers(headers):
@ -109,12 +128,11 @@ class SyncToken():
raw_kobo_store_token = data_json["raw_kobo_store_token"]
try:
books_last_modified = datetime.utcfromtimestamp(
data_json["books_last_modified"]
)
books_last_created = datetime.utcfromtimestamp(
data_json["books_last_created"]
)
books_last_modified = get_datetime_from_json(data_json, "books_last_modified")
books_last_created = get_datetime_from_json(data_json, "books_last_created")
archive_last_modified = get_datetime_from_json(data_json, "archive_last_modified")
reading_state_last_modified = get_datetime_from_json(data_json, "reading_state_last_modified")
tags_last_modified = get_datetime_from_json(data_json, "tags_last_modified")
except TypeError:
log.error("SyncToken timestamps don't parse to a datetime.")
return SyncToken(raw_kobo_store_token=raw_kobo_store_token)
@ -123,6 +141,9 @@ class SyncToken():
raw_kobo_store_token=raw_kobo_store_token,
books_last_created=books_last_created,
books_last_modified=books_last_modified,
archive_last_modified=archive_last_modified,
reading_state_last_modified=reading_state_last_modified,
tags_last_modified=tags_last_modified
)
def set_kobo_store_header(self, store_headers):
@ -143,6 +164,9 @@ class SyncToken():
"raw_kobo_store_token": self.raw_kobo_store_token,
"books_last_modified": to_epoch_timestamp(self.books_last_modified),
"books_last_created": to_epoch_timestamp(self.books_last_created),
"archive_last_modified": to_epoch_timestamp(self.archive_last_modified),
"reading_state_last_modified": to_epoch_timestamp(self.reading_state_last_modified),
"tags_last_modified": to_epoch_timestamp(self.tags_last_modified)
},
}
return b64encode_json(token)

@ -21,11 +21,12 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division, print_function, unicode_literals
from datetime import datetime
from flask import Blueprint, request, flash, redirect, url_for
from flask_babel import gettext as _
from flask_login import login_required, current_user
from sqlalchemy.sql.expression import func, or_, and_
from sqlalchemy.sql.expression import func
from . import logger, ub, searched_ids, db
from .web import render_title_template
@ -36,6 +37,25 @@ shelf = Blueprint('shelf', __name__)
log = logger.create()
def check_shelf_edit_permissions(cur_shelf):
if not cur_shelf.is_public and not cur_shelf.user_id == int(current_user.id):
log.error("User %s not allowed to edit shelf %s", current_user, cur_shelf)
return False
if cur_shelf.is_public and not current_user.role_edit_shelfs():
log.info("User %s not allowed to edit public shelves", current_user)
return False
return True
def check_shelf_view_permissions(cur_shelf):
if cur_shelf.is_public:
return True
if current_user.is_anonymous or cur_shelf.user_id != current_user.id:
log.error("User is unauthorized to view non-public shelf: %s", cur_shelf)
return False
return True
@shelf.route("/shelf/add/<int:shelf_id>/<int:book_id>")
@login_required
def add_to_shelf(shelf_id, book_id):
@ -48,23 +68,15 @@ def add_to_shelf(shelf_id, book_id):
return redirect(url_for('web.index'))
return "Invalid shelf specified", 400
if not shelf.is_public and not shelf.user_id == int(current_user.id):
log.error("User %s not allowed to add a book to %s", current_user, shelf)
if not check_shelf_edit_permissions(shelf):
if not xhr:
flash(_(u"Sorry you are not allowed to add a book to the the shelf: %(shelfname)s", shelfname=shelf.name),
category="error")
return redirect(url_for('web.index'))
return "Sorry you are not allowed to add a book to the the shelf: %s" % shelf.name, 403
if shelf.is_public and not current_user.role_edit_shelfs():
log.info("User %s not allowed to edit public shelves", current_user)
if not xhr:
flash(_(u"You are not allowed to edit public shelves"), category="error")
return redirect(url_for('web.index'))
return "User is not allowed to edit public shelves", 403
book_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id,
ub.BookShelf.book_id == book_id).first()
ub.BookShelf.book_id == book_id).first()
if book_in_shelf:
log.error("Book %s is already part of %s", book_id, shelf)
if not xhr:
@ -78,8 +90,9 @@ def add_to_shelf(shelf_id, book_id):
else:
maxOrder = maxOrder[0]
ins = ub.BookShelf(shelf=shelf.id, book_id=book_id, order=maxOrder + 1)
ub.session.add(ins)
shelf.books.append(ub.BookShelf(shelf=shelf.id, book_id=book_id, order=maxOrder + 1))
shelf.last_modified = datetime.utcnow()
ub.session.merge(shelf)
ub.session.commit()
if not xhr:
flash(_(u"Book has been added to shelf: %(sname)s", sname=shelf.name), category="success")
@ -99,16 +112,10 @@ def search_to_shelf(shelf_id):
flash(_(u"Invalid shelf specified"), category="error")
return redirect(url_for('web.index'))
if not shelf.is_public and not shelf.user_id == int(current_user.id):
log.error("User %s not allowed to add a book to %s", current_user, shelf)
if not check_shelf_edit_permissions(shelf):
flash(_(u"You are not allowed to add a book to the the shelf: %(name)s", name=shelf.name), category="error")
return redirect(url_for('web.index'))
if shelf.is_public and not current_user.role_edit_shelfs():
log.error("User %s not allowed to edit public shelves", current_user)
flash(_(u"User is not allowed to edit public shelves"), category="error")
return redirect(url_for('web.index'))
if current_user.id in searched_ids and searched_ids[current_user.id]:
books_for_shelf = list()
books_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).all()
@ -135,8 +142,9 @@ def search_to_shelf(shelf_id):
for book in books_for_shelf:
maxOrder = maxOrder + 1
ins = ub.BookShelf(shelf=shelf.id, book_id=book, order=maxOrder)
ub.session.add(ins)
shelf.books.append(ub.BookShelf(shelf=shelf.id, book_id=book, order=maxOrder))
shelf.last_modified = datetime.utcnow()
ub.session.merge(shelf)
ub.session.commit()
flash(_(u"Books have been added to shelf: %(sname)s", sname=shelf.name), category="success")
else:
@ -163,8 +171,7 @@ def remove_from_shelf(shelf_id, book_id):
# true 0 x 1
# false 0 x 0
if (not shelf.is_public and shelf.user_id == int(current_user.id)) \
or (shelf.is_public and current_user.role_edit_shelfs()):
if check_shelf_edit_permissions(shelf):
book_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id,
ub.BookShelf.book_id == book_id).first()
@ -175,6 +182,7 @@ def remove_from_shelf(shelf_id, book_id):
return "Book already removed from shelf", 410
ub.session.delete(book_shelf)
shelf.last_modified = datetime.utcnow()
ub.session.commit()
if not xhr:
@ -185,7 +193,6 @@ def remove_from_shelf(shelf_id, book_id):
return redirect(url_for('web.index'))
return "", 204
else:
log.error("User %s not allowed to remove a book from %s", current_user, shelf)
if not xhr:
flash(_(u"Sorry you are not allowed to remove a book from this shelf: %(sname)s", sname=shelf.name),
category="error")
@ -193,7 +200,6 @@ def remove_from_shelf(shelf_id, book_id):
return "Sorry you are not allowed to remove a book from this shelf: %s" % shelf.name, 403
@shelf.route("/shelf/create", methods=["GET", "POST"])
@login_required
def create_shelf():
@ -212,21 +218,24 @@ def create_shelf():
.first() is None
if not is_shelf_name_unique:
flash(_(u"A public shelf with the name '%(title)s' already exists.", title=to_save["title"]), category="error")
flash(_(u"A public shelf with the name '%(title)s' already exists.", title=to_save["title"]),
category="error")
else:
is_shelf_name_unique = ub.session.query(ub.Shelf) \
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) & (ub.Shelf.user_id == int(current_user.id))) \
.first() is None
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) &
(ub.Shelf.user_id == int(current_user.id)))\
.first() is None
if not is_shelf_name_unique:
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]), category="error")
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]),
category="error")
if is_shelf_name_unique:
try:
ub.session.add(shelf)
ub.session.commit()
flash(_(u"Shelf %(title)s created", title=to_save["title"]), category="success")
return redirect(url_for('shelf.show_shelf', shelf_id = shelf.id ))
return redirect(url_for('shelf.show_shelf', shelf_id=shelf.id))
except Exception:
flash(_(u"There was an error"), category="error")
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Create a Shelf"), page="shelfcreate")
@ -249,18 +258,22 @@ def edit_shelf(shelf_id):
.first() is None
if not is_shelf_name_unique:
flash(_(u"A public shelf with the name '%(title)s' already exists.", title=to_save["title"]), category="error")
flash(_(u"A public shelf with the name '%(title)s' already exists.", title=to_save["title"]),
category="error")
else:
is_shelf_name_unique = ub.session.query(ub.Shelf) \
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) & (ub.Shelf.user_id == int(current_user.id))) \
.filter(ub.Shelf.id != shelf_id) \
.first() is None
.filter((ub.Shelf.name == to_save["title"]) & (ub.Shelf.is_public == 0) &
(ub.Shelf.user_id == int(current_user.id)))\
.filter(ub.Shelf.id != shelf_id)\
.first() is None
if not is_shelf_name_unique:
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]), category="error")
flash(_(u"A private shelf with the name '%(title)s' already exists.", title=to_save["title"]),
category="error")
if is_shelf_name_unique:
shelf.name = to_save["title"]
shelf.last_modified = datetime.utcnow()
if "is_public" in to_save:
shelf.is_public = 1
else:
@ -275,41 +288,33 @@ def edit_shelf(shelf_id):
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"), page="shelfedit")
def delete_shelf_helper(cur_shelf):
if not cur_shelf or not check_shelf_edit_permissions(cur_shelf):
return
shelf_id = cur_shelf.id
ub.session.delete(cur_shelf)
ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete()
ub.session.add(ub.ShelfArchive(uuid=cur_shelf.uuid, user_id=cur_shelf.user_id))
ub.session.commit()
log.info("successfully deleted %s", cur_shelf)
@shelf.route("/shelf/delete/<int:shelf_id>")
@login_required
def delete_shelf(shelf_id):
cur_shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
deleted = None
if current_user.role_admin():
deleted = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).delete()
else:
if (not cur_shelf.is_public and cur_shelf.user_id == int(current_user.id)) \
or (cur_shelf.is_public and current_user.role_edit_shelfs()):
deleted = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
ub.Shelf.id == shelf_id),
and_(ub.Shelf.is_public == 1,
ub.Shelf.id == shelf_id))).delete()
if deleted:
ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete()
ub.session.commit()
log.info("successfully deleted %s", cur_shelf)
delete_shelf_helper(cur_shelf)
return redirect(url_for('web.index'))
# @shelf.route("/shelfdown/<int:shelf_id>")
@shelf.route("/shelf/<int:shelf_id>", defaults={'shelf_type': 1})
@shelf.route("/shelf/<int:shelf_id>/<int:shelf_type>")
def show_shelf(shelf_type, shelf_id):
if current_user.is_anonymous:
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == shelf_id).first()
else:
shelf = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
ub.Shelf.id == shelf_id),
and_(ub.Shelf.is_public == 1,
ub.Shelf.id == shelf_id))).first()
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
result = list()
# user is allowed to access shelf
if shelf:
if shelf and check_shelf_view_permissions(shelf):
page = "shelf.html" if shelf_type == 1 else 'shelfdown.html'
books_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id)\
@ -325,13 +330,12 @@ def show_shelf(shelf_type, shelf_id):
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book.book_id).delete()
ub.session.commit()
return render_title_template(page, entries=result, title=_(u"Shelf: '%(name)s'", name=shelf.name),
shelf=shelf, page="shelf")
shelf=shelf, page="shelf")
else:
flash(_(u"Error opening shelf. Shelf does not exist or is not accessible"), category="error")
return redirect(url_for("web.index"))
@shelf.route("/shelf/order/<int:shelf_id>", methods=["GET", "POST"])
@login_required
def order_shelf(shelf_id):
@ -343,32 +347,28 @@ def order_shelf(shelf_id):
for book in books_in_shelf:
setattr(book, 'order', to_save[str(book.book_id)])
counter += 1
# if order diffrent from before -> shelf.last_modified = datetime.utcnow()
ub.session.commit()
if current_user.is_anonymous:
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1, ub.Shelf.id == shelf_id).first()
else:
shelf = ub.session.query(ub.Shelf).filter(or_(and_(ub.Shelf.user_id == int(current_user.id),
ub.Shelf.id == shelf_id),
and_(ub.Shelf.is_public == 1,
ub.Shelf.id == shelf_id))).first()
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
result = list()
if shelf:
if shelf and check_shelf_view_permissions(shelf):
books_in_shelf2 = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id) \
.order_by(ub.BookShelf.order.asc()).all()
for book in books_in_shelf2:
cur_book = db.session.query(db.Books).filter(db.Books.id == book.book_id).filter(common_filters()).first()
if cur_book:
result.append({'title':cur_book.title,
'id':cur_book.id,
'author':cur_book.authors,
'series':cur_book.series,
'series_index':cur_book.series_index})
result.append({'title': cur_book.title,
'id': cur_book.id,
'author': cur_book.authors,
'series': cur_book.series,
'series_index': cur_book.series_index})
else:
cur_book = db.session.query(db.Books).filter(db.Books.id == book.book_id).first()
result.append({'title':_('Hidden Book'),
'id':cur_book.id,
'author':[],
'series':[]})
result.append({'title': _('Hidden Book'),
'id': cur_book.id,
'author': [],
'series': []})
return render_title_template('shelf_order.html', entries=result,
title=_(u"Change order of Shelf: '%(name)s'", name=shelf.name),
shelf=shelf, page="shelforder")

@ -216,6 +216,8 @@ if ( $( 'body.book' ).length > 0 ) {
.prependTo( '[aria-label^="Download, send"]' );
$( '#have_read_cb' )
.after( '<label class="block-label readLbl" for="#have_read_cb"></label>' );
$( '#archived_cb' )
.after( '<label class="block-label readLbl" for="#archived_cb"></label>' );
$( '#shelf-actions' ).prependTo( '[aria-label^="Download, send"]' );
@ -586,6 +588,20 @@ $( '#have_read_cb:checked' ).attr({
'data-viewport': '.btn-toolbar' })
.addClass('readunread-btn-tooltip');
$( '#archived_cb' ).attr({
'data-toggle': 'tooltip',
'title': $( '#archived_cb').attr('data-unchecked'),
'data-placement': 'bottom',
'data-viewport': '.btn-toolbar' })
.addClass('readunread-btn-tooltip');
$( '#archived_cb:checked' ).attr({
'data-toggle': 'tooltip',
'title': $( '#archived_cb').attr('data-checked'),
'data-placement': 'bottom',
'data-viewport': '.btn-toolbar' })
.addClass('readunread-btn-tooltip');
$( 'button#delete' ).attr({
'data-toggle-two': 'tooltip',
'title': $( 'button#delete' ).text(), //'Delete'
@ -601,6 +617,14 @@ $( '#have_read_cb' ).click(function() {
}
});
$( '#archived_cb' ).click(function() {
if ( $( '#archived_cb:checked' ).length > 0 ) {
$( this ).attr('data-original-title', $('#archived_cb').attr('data-checked'));
} else {
$( this).attr('data-original-title', $('#archived_cb').attr('data-unchecked'));
}
});
$( '.btn-group[aria-label="Edit/Delete book"] a' ).attr({
'data-toggle': 'tooltip',
'title': $( '#edit_book' ).text(), // 'Edit'

@ -25,6 +25,14 @@ $("#have_read_cb").on("change", function() {
$(this).closest("form").submit();
});
$(function() {
$("#archived_form").ajaxForm();
});
$("#archived_cb").on("change", function() {
$(this).closest("form").submit();
});
(function() {
var templates = {
add: _.template(

@ -45,10 +45,10 @@ def process_open(command, quotes=(), env=None, sout=subprocess.PIPE, serr=subpro
def process_wait(command, serr=subprocess.PIPE):
'''Run command, wait for process to terminate, and return an iterator over lines of its output.'''
# Run command, wait for process to terminate, and return an iterator over lines of its output.
p = process_open(command, serr=serr)
p.wait()
for l in p.stdout.readlines():
if isinstance(l, bytes):
l = l.decode('utf-8')
yield l
for line in p.stdout.readlines():
if isinstance(line, bytes):
line = line.decode('utf-8')
yield line

@ -61,6 +61,21 @@
<label for="description">{{_('Description')}}</label>
<textarea class="form-control" name="description" id="description" rows="7">{% if book.comments %}{{book.comments[0].text}}{%endif%}</textarea>
</div>
<div class="form-group">
<label>{{_('Identifiers')}}</label>
<table class="table" id="identifier-table">
{% for identifier in book.identifiers %}
<tr>
<td><input type="text" class="form-control" name="identifier-type-{{identifier.type}}" value="{{identifier.type}}" required="required" placeholder="{{_('Identifier Type')}}"></td>
<td><input type="text" class="form-control" name="identifier-val-{{identifier.type}}" value="{{identifier.val}}" required="required" placeholder="{{_('Identifier Value')}}"></td>
<td><a class="btn btn-default" onclick="removeIdentifierLine(this)">{{_('Remove')}}</a></td>
</tr>
{% endfor %}
</table>
<a id="add-identifier-line" class="btn btn-default">{{_('Add Identifier')}}</a>
</div>
<div class="form-group">
<label for="tags">{{_('Tags')}}</label>
<input type="text" class="form-control typeahead" name="tags" id="tags" value="{% for tag in book.tags %}{{tag.name.strip()}}{% if not loop.last %}, {% endif %}{% endfor %}">
@ -169,7 +184,7 @@
</div>
<a href="#" id="get_meta" class="btn btn-default" data-toggle="modal" data-target="#metaModal">{{_('Fetch Metadata')}}</a>
<button type="submit" id="submit" class="btn btn-default">{{_('Save')}}</button>
<a href="{{ url_for('web.show_book', book_id=book.id) }}" class="btn btn-default">{{_('Cancel')}}</a>
<a href="{{ url_for('web.show_book', book_id=book.id) }}" id="edit_cancel" class="btn btn-default">{{_('Cancel')}}</a>
</div>
</form>
@ -185,12 +200,20 @@
<span>{{_('Are you really sure?')}}</span>
</div>
<div class="modal-body text-center">
<p>
<span>{{_('This book will be permanently erased from database')}}</span>
<span>{{_('and hard disk')}}</span>
</p>
{% if config.config_kobo_sync %}
<p>
<span>{{_('Important Kobo Note: deleted books will remain on any paired Kobo device.')}}</span>
<span>{{_('Books must first be archived and the device synced before a book can safely be deleted.')}}</span>
</p>
{% endif %}
</div>
<div class="modal-footer">
<a href="{{ url_for('editbook.delete_book', book_id=book.id) }}" class="btn btn-danger">{{_('Delete')}}</a>
<a href="{{ url_for('editbook.delete_book', book_id=book.id) }}" id="delete_confirm" class="btn btn-danger">{{_('Delete')}}</a>
<button type="button" class="btn btn-default" data-dismiss="modal">{{_('Cancel')}}</button>
</div>
</div>
@ -277,6 +300,21 @@
'source': {{_('Source')|safe|tojson}},
};
var language = '{{ g.user.locale }}';
$("#add-identifier-line").click(function() {
// create a random identifier type to have a valid name in form. This will not be used when dealing with the form
var rand_id = Math.floor(Math.random() * 1000000).toString();
var line = '<tr>';
line += '<td><input type="text" class="form-control" name="identifier-type-'+ rand_id +'" required="required" placeholder="{{_('Identifier Type')}}"></td>';
line += '<td><input type="text" class="form-control" name="identifier-val-'+ rand_id +'" required="required" placeholder="{{_('Identifier Value')}}"></td>';
line += '<td><a class="btn btn-default" onclick="removeIdentifierLine(this)">{{_('Remove')}}</a></td>';
line += '</tr>';
$("#identifier-table").append(line);
});
function removeIdentifierLine(el) {
$(el).parent().parent().remove();
}
</script>
<script src="{{ url_for('static', filename='js/libs/typeahead.bundle.js') }}"></script>
<script src="{{ url_for('static', filename='js/libs/bootstrap-rating-input.min.js') }}"></script>

@ -202,6 +202,14 @@
</label>
</form>
</p>
<p>
<form id="archived_form" action="{{ url_for('web.toggle_archived', book_id=entry.id)}}" method="POST">
<label class="block-label">
<input id="archived_cb" data-checked="{{_('Restore from archive')}}" data-unchecked="{{_('Add to archive')}}" type="checkbox" {% if is_archived %}checked{% endif %} >
<span>{{_('Archived')}}</span>
</label>
</form>
</p>
</div>
{% endif %}

@ -4,7 +4,7 @@
<div class="filterheader hidden-xs hidden-sm">
{% if entries.__len__() %}
{% if entries[0][0].sort %}
{% if data == 'author' %}
<button id="sort_name" class="btn btn-primary"><b>B,A <-> A B</b></button>
{% endif %}
{% endif %}

@ -20,6 +20,8 @@
from __future__ import division, print_function, unicode_literals
import os
import datetime
import itertools
import uuid
from binascii import hexlify
from flask import g
@ -36,14 +38,14 @@ except ImportError:
oauth_support = True
except ImportError:
oauth_support = False
from sqlalchemy import create_engine, exc, exists
from sqlalchemy import create_engine, exc, exists, event
from sqlalchemy import Column, ForeignKey
from sqlalchemy import String, Integer, SmallInteger, Boolean, DateTime
from sqlalchemy.orm import relationship, sessionmaker
from sqlalchemy import String, Integer, SmallInteger, Boolean, DateTime, Float
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import backref, relationship, sessionmaker, Session
from werkzeug.security import generate_password_hash
from . import constants # , config
from . import constants
session = None
@ -54,7 +56,7 @@ def get_sidebar_config(kwargs=None):
kwargs = kwargs or []
if 'content' in kwargs:
content = kwargs['content']
content = isinstance(content, (User,LocalProxy)) and not content.role_anonymous()
content = isinstance(content, (User, LocalProxy)) and not content.role_anonymous()
else:
content = 'conf' in kwargs
sidebar = list()
@ -62,31 +64,31 @@ def get_sidebar_config(kwargs=None):
"visibility": constants.SIDEBAR_RECENT, 'public': True, "page": "root",
"show_text": _('Show recent books'), "config_show":False})
sidebar.append({"glyph": "glyphicon-fire", "text": _('Hot Books'), "link": 'web.books_list', "id": "hot",
"visibility": constants.SIDEBAR_HOT, 'public': True, "page": "hot", "show_text": _('Show Hot Books'),
"config_show":True})
"visibility": constants.SIDEBAR_HOT, 'public': True, "page": "hot",
"show_text": _('Show Hot Books'), "config_show": True})
sidebar.append(
{"glyph": "glyphicon-star", "text": _('Top Rated Books'), "link": 'web.books_list', "id": "rated",
"visibility": constants.SIDEBAR_BEST_RATED, 'public': True, "page": "rated",
"show_text": _('Show Top Rated Books'), "config_show":True})
"show_text": _('Show Top Rated Books'), "config_show": True})
sidebar.append({"glyph": "glyphicon-eye-open", "text": _('Read Books'), "link": 'web.books_list', "id": "read",
"visibility": constants.SIDEBAR_READ_AND_UNREAD, 'public': (not g.user.is_anonymous), "page": "read",
"show_text": _('Show read and unread'), "config_show": content})
sidebar.append(
{"glyph": "glyphicon-eye-close", "text": _('Unread Books'), "link": 'web.books_list', "id": "unread",
"visibility": constants.SIDEBAR_READ_AND_UNREAD, 'public': (not g.user.is_anonymous), "page": "unread",
"show_text": _('Show unread'), "config_show":False})
"show_text": _('Show unread'), "config_show": False})
sidebar.append({"glyph": "glyphicon-random", "text": _('Discover'), "link": 'web.books_list', "id": "rand",
"visibility": constants.SIDEBAR_RANDOM, 'public': True, "page": "discover",
"show_text": _('Show random books'), "config_show":True})
"show_text": _('Show random books'), "config_show": True})
sidebar.append({"glyph": "glyphicon-inbox", "text": _('Categories'), "link": 'web.category_list', "id": "cat",
"visibility": constants.SIDEBAR_CATEGORY, 'public': True, "page": "category",
"show_text": _('Show category selection'), "config_show":True})
"show_text": _('Show category selection'), "config_show": True})
sidebar.append({"glyph": "glyphicon-bookmark", "text": _('Series'), "link": 'web.series_list', "id": "serie",
"visibility": constants.SIDEBAR_SERIES, 'public': True, "page": "series",
"show_text": _('Show series selection'), "config_show":True})
"show_text": _('Show series selection'), "config_show": True})
sidebar.append({"glyph": "glyphicon-user", "text": _('Authors'), "link": 'web.author_list', "id": "author",
"visibility": constants.SIDEBAR_AUTHOR, 'public': True, "page": "author",
"show_text": _('Show author selection'), "config_show":True})
"show_text": _('Show author selection'), "config_show": True})
sidebar.append(
{"glyph": "glyphicon-text-size", "text": _('Publishers'), "link": 'web.publisher_list', "id": "publisher",
"visibility": constants.SIDEBAR_PUBLISHER, 'public': True, "page": "publisher",
@ -94,17 +96,20 @@ def get_sidebar_config(kwargs=None):
sidebar.append({"glyph": "glyphicon-flag", "text": _('Languages'), "link": 'web.language_overview', "id": "lang",
"visibility": constants.SIDEBAR_LANGUAGE, 'public': (g.user.filter_language() == 'all'),
"page": "language",
"show_text": _('Show language selection'), "config_show":True})
"show_text": _('Show language selection'), "config_show": True})
sidebar.append({"glyph": "glyphicon-star-empty", "text": _('Ratings'), "link": 'web.ratings_list', "id": "rate",
"visibility": constants.SIDEBAR_RATING, 'public': True,
"page": "rating", "show_text": _('Show ratings selection'), "config_show":True})
"page": "rating", "show_text": _('Show ratings selection'), "config_show": True})
sidebar.append({"glyph": "glyphicon-file", "text": _('File formats'), "link": 'web.formats_list', "id": "format",
"visibility": constants.SIDEBAR_FORMAT, 'public': True,
"page": "format", "show_text": _('Show file formats selection'), "config_show":True})
"page": "format", "show_text": _('Show file formats selection'), "config_show": True})
sidebar.append(
{"glyph": "glyphicon-trash", "text": _('Archived Books'), "link": 'web.books_list', "id": "archived",
"visibility": constants.SIDEBAR_ARCHIVED, 'public': (not g.user.is_anonymous), "page": "archived",
"show_text": _('Show archived books'), "config_show": content})
return sidebar
class UserBase:
@property
@ -232,7 +237,8 @@ class Anonymous(AnonymousUserMixin, UserBase):
self.loadSettings()
def loadSettings(self):
data = session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() # type: User
data = session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS)\
.first() # type: User
self.nickname = data.nickname
self.role = data.role
self.id=data.id
@ -255,7 +261,7 @@ class Anonymous(AnonymousUserMixin, UserBase):
@property
def is_anonymous(self):
return True # self.anon_browse
return True
@property
def is_authenticated(self):
@ -267,9 +273,13 @@ class Shelf(Base):
__tablename__ = 'shelf'
id = Column(Integer, primary_key=True)
uuid = Column(String, default=lambda: str(uuid.uuid4()))
name = Column(String)
is_public = Column(Integer, default=0)
user_id = Column(Integer, ForeignKey('user.id'))
books = relationship("BookShelf", backref="ub_shelf", cascade="all, delete-orphan", lazy="dynamic")
created = Column(DateTime, default=datetime.datetime.utcnow)
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
def __repr__(self):
return '<Shelf %d:%r>' % (self.id, self.name)
@ -283,18 +293,42 @@ class BookShelf(Base):
book_id = Column(Integer)
order = Column(Integer)
shelf = Column(Integer, ForeignKey('shelf.id'))
date_added = Column(DateTime, default=datetime.datetime.utcnow)
def __repr__(self):
return '<Book %r>' % self.id
# This table keeps track of deleted Shelves so that deletes can be propagated to any paired Kobo device.
class ShelfArchive(Base):
__tablename__ = 'shelf_archive'
id = Column(Integer, primary_key=True)
uuid = Column(String)
user_id = Column(Integer, ForeignKey('user.id'))
last_modified = Column(DateTime, default=datetime.datetime.utcnow)
class ReadBook(Base):
__tablename__ = 'book_read_link'
STATUS_UNREAD = 0
STATUS_FINISHED = 1
STATUS_IN_PROGRESS = 2
id = Column(Integer, primary_key=True)
book_id = Column(Integer, unique=False)
user_id = Column(Integer, ForeignKey('user.id'), unique=False)
is_read = Column(Boolean, unique=False)
read_status = Column(Integer, unique=False, default=STATUS_UNREAD, nullable=False)
kobo_reading_state = relationship("KoboReadingState", uselist=False,
primaryjoin="and_(ReadBook.user_id == foreign(KoboReadingState.user_id), "
"ReadBook.book_id == foreign(KoboReadingState.book_id))",
cascade="all",
backref=backref("book_read_link",
uselist=False))
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
last_time_started_reading = Column(DateTime, nullable=True)
times_started_reading = Column(Integer, default=0, nullable=False)
class Bookmark(Base):
@ -307,6 +341,69 @@ class Bookmark(Base):
bookmark_key = Column(String)
# Baseclass representing books that are archived on the user's Kobo device.
class ArchivedBook(Base):
__tablename__ = 'archived_book'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'))
book_id = Column(Integer)
is_archived = Column(Boolean, unique=False)
last_modified = Column(DateTime, default=datetime.datetime.utcnow)
# The Kobo ReadingState API keeps track of 4 timestamped entities:
# ReadingState, StatusInfo, Statistics, CurrentBookmark
# Which we map to the following 4 tables:
# KoboReadingState, ReadBook, KoboStatistics and KoboBookmark
class KoboReadingState(Base):
__tablename__ = 'kobo_reading_state'
id = Column(Integer, primary_key=True, autoincrement=True)
user_id = Column(Integer, ForeignKey('user.id'))
book_id = Column(Integer)
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
priority_timestamp = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
current_bookmark = relationship("KoboBookmark", uselist=False, backref="kobo_reading_state", cascade="all")
statistics = relationship("KoboStatistics", uselist=False, backref="kobo_reading_state", cascade="all")
class KoboBookmark(Base):
__tablename__ = 'kobo_bookmark'
id = Column(Integer, primary_key=True)
kobo_reading_state_id = Column(Integer, ForeignKey('kobo_reading_state.id'))
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
location_source = Column(String)
location_type = Column(String)
location_value = Column(String)
progress_percent = Column(Float)
content_source_progress_percent = Column(Float)
class KoboStatistics(Base):
__tablename__ = 'kobo_statistics'
id = Column(Integer, primary_key=True)
kobo_reading_state_id = Column(Integer, ForeignKey('kobo_reading_state.id'))
last_modified = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
remaining_time_minutes = Column(Integer)
spent_reading_minutes = Column(Integer)
# Updates the last_modified timestamp in the KoboReadingState table if any of its children tables are modified.
@event.listens_for(Session, 'before_flush')
def receive_before_flush(session, flush_context, instances):
for change in itertools.chain(session.new, session.dirty):
if isinstance(change, (ReadBook, KoboStatistics, KoboBookmark)):
if change.kobo_reading_state:
change.kobo_reading_state.last_modified = datetime.datetime.utcnow()
# Maintain the last_modified bit for the Shelf table.
for change in itertools.chain(session.new, session.deleted):
if isinstance(change, BookShelf):
change.ub_shelf.last_modified = datetime.datetime.utcnow()
# Baseclass representing Downloads from calibre-web in app.db
class Downloads(Base):
__tablename__ = 'downloads'
@ -331,7 +428,6 @@ class Registration(Base):
return u"<Registration('{0}')>".format(self.domain)
class RemoteAuthToken(Base):
__tablename__ = 'remote_auth_token'
@ -359,6 +455,14 @@ def migrate_Database(session):
ReadBook.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "bookmark"):
Bookmark.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "kobo_reading_state"):
KoboReadingState.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "kobo_bookmark"):
KoboBookmark.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "kobo_statistics"):
KoboStatistics.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "archived_book"):
ArchivedBook.__table__.create(bind=engine)
if not engine.dialect.has_table(engine.connect(), "registration"):
ReadBook.__table__.create(bind=engine)
conn = engine.connect()
@ -380,7 +484,31 @@ def migrate_Database(session):
conn.execute("ALTER TABLE remote_auth_token ADD column 'token_type' INTEGER DEFAULT 0")
conn.execute("update remote_auth_token set 'token_type' = 0")
session.commit()
try:
session.query(exists().where(ReadBook.read_status)).scalar()
except exc.OperationalError:
conn = engine.connect()
conn.execute("ALTER TABLE book_read_link ADD column 'read_status' INTEGER DEFAULT 0")
conn.execute("UPDATE book_read_link SET 'read_status' = 1 WHERE is_read")
conn.execute("ALTER TABLE book_read_link ADD column 'last_modified' DATETIME")
conn.execute("ALTER TABLE book_read_link ADD column 'last_time_started_reading' DATETIME")
conn.execute("ALTER TABLE book_read_link ADD column 'times_started_reading' INTEGER DEFAULT 0")
session.commit()
try:
session.query(exists().where(Shelf.uuid)).scalar()
except exc.OperationalError:
conn = engine.connect()
conn.execute("ALTER TABLE shelf ADD column 'uuid' STRING")
conn.execute("ALTER TABLE shelf ADD column 'created' DATETIME")
conn.execute("ALTER TABLE shelf ADD column 'last_modified' DATETIME")
conn.execute("ALTER TABLE book_shelf_link ADD column 'date_added' DATETIME")
for shelf in session.query(Shelf).all():
shelf.uuid = str(uuid.uuid4())
shelf.created = datetime.datetime.now()
shelf.last_modified = datetime.datetime.now()
for book_shelf in session.query(BookShelf).all():
book_shelf.date_added = datetime.datetime.now()
session.commit()
# Handle table exists, but no content
cnt = session.query(Registration).count()
if not cnt:
@ -409,13 +537,12 @@ def migrate_Database(session):
except exc.OperationalError:
conn = engine.connect()
conn.execute("UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
"+ series_books * :side_series + category_books * :side_category + hot_books * "
":side_hot + :side_autor + :detail_random)"
,{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE,
'side_series': constants.SIDEBAR_SERIES,
'side_category': constants.SIDEBAR_CATEGORY, 'side_hot': constants.SIDEBAR_HOT,
'side_autor': constants.SIDEBAR_AUTHOR,
'detail_random': constants.DETAIL_RANDOM})
"+ series_books * :side_series + category_books * :side_category + hot_books * "
":side_hot + :side_autor + :detail_random)",
{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE,
'side_series': constants.SIDEBAR_SERIES, 'side_category': constants.SIDEBAR_CATEGORY,
'side_hot': constants.SIDEBAR_HOT, 'side_autor': constants.SIDEBAR_AUTHOR,
'detail_random': constants.DETAIL_RANDOM})
session.commit()
try:
session.query(exists().where(User.denied_tags)).scalar()
@ -425,7 +552,8 @@ def migrate_Database(session):
conn.execute("ALTER TABLE user ADD column `allowed_tags` String DEFAULT ''")
conn.execute("ALTER TABLE user ADD column `denied_column_value` DEFAULT ''")
conn.execute("ALTER TABLE user ADD column `allowed_column_value` DEFAULT ''")
if session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() is None:
if session.query(User).filter(User.role.op('&')(constants.ROLE_ANONYMOUS) == constants.ROLE_ANONYMOUS).first() \
is None:
create_anonymous_user(session)
try:
# check if one table with autoincrement is existing (should be user table)
@ -435,20 +563,20 @@ def migrate_Database(session):
# Create new table user_id and copy contents of table user into it
conn = engine.connect()
conn.execute("CREATE TABLE user_id (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,"
"nickname VARCHAR(64),"
"email VARCHAR(120),"
"role SMALLINT,"
"password VARCHAR,"
"kindle_mail VARCHAR(120),"
"locale VARCHAR(2),"
"sidebar_view INTEGER,"
"default_language VARCHAR(3),"
"UNIQUE (nickname),"
"UNIQUE (email))")
" nickname VARCHAR(64),"
"email VARCHAR(120),"
"role SMALLINT,"
"password VARCHAR,"
"kindle_mail VARCHAR(120),"
"locale VARCHAR(2),"
"sidebar_view INTEGER,"
"default_language VARCHAR(3),"
"UNIQUE (nickname),"
"UNIQUE (email))")
conn.execute("INSERT INTO user_id(id, nickname, email, role, password, kindle_mail,locale,"
"sidebar_view, default_language) "
"sidebar_view, default_language) "
"SELECT id, nickname, email, role, password, kindle_mail, locale,"
"sidebar_view, default_language FROM user")
"sidebar_view, default_language FROM user")
# delete old user table and rename new user_id table to user:
conn.execute("DROP TABLE user")
conn.execute("ALTER TABLE user_id RENAME TO user")
@ -464,25 +592,26 @@ def clean_database(session):
# Remove expired remote login tokens
now = datetime.datetime.now()
session.query(RemoteAuthToken).filter(now > RemoteAuthToken.expiration).\
filter(RemoteAuthToken.token_type !=1 ).delete()
filter(RemoteAuthToken.token_type != 1).delete()
session.commit()
# Save downloaded books per user in calibre-web's own database
def update_download(book_id, user_id):
check = session.query(Downloads).filter(Downloads.user_id == user_id).filter(Downloads.book_id ==
book_id).first()
check = session.query(Downloads).filter(Downloads.user_id == user_id).filter(Downloads.book_id == book_id).first()
if not check:
new_download = Downloads(user_id=user_id, book_id=book_id)
session.add(new_download)
session.commit()
# Delete non exisiting downloaded books in calibre-web's own database
def delete_download(book_id):
session.query(Downloads).filter(book_id == Downloads.book_id).delete()
session.commit()
# Generate user Guest (translated text), as anoymous user, no rights
def create_anonymous_user(session):
user = User()
@ -540,8 +669,12 @@ def dispose():
old_session = session
session = None
if old_session:
try: old_session.close()
except Exception: pass
try:
old_session.close()
except Exception:
pass
if old_session.bind:
try: old_session.bind.dispose()
except Exception: pass
try:
old_session.bind.dispose()
except Exception:
pass

@ -69,7 +69,7 @@ class Updater(threading.Thread):
def get_available_updates(self, request_method, locale):
if config.config_updatechannel == constants.UPDATE_STABLE:
return self._stable_available_updates(request_method)
return self._nightly_available_updates(request_method,locale)
return self._nightly_available_updates(request_method, locale)
def do_work(self):
try:
@ -132,7 +132,7 @@ class Updater(threading.Thread):
def pause(self):
self.can_run.clear()
#should just resume the thread
# should just resume the thread
def resume(self):
self.can_run.set()
@ -268,7 +268,7 @@ class Updater(threading.Thread):
def is_venv(self):
if (hasattr(sys, 'real_prefix')) or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix):
return os.sep + os.path.relpath(sys.prefix,constants.BASE_DIR)
return os.sep + os.path.relpath(sys.prefix, constants.BASE_DIR)
else:
return False
@ -280,7 +280,7 @@ class Updater(threading.Thread):
@classmethod
def _stable_version_info(cls):
return constants.STABLE_VERSION # Current version
return constants.STABLE_VERSION # Current version
def _nightly_available_updates(self, request_method, locale):
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
@ -436,7 +436,7 @@ class Updater(threading.Thread):
patch_version_update > current_version[2]) or \
minor_version_update > current_version[1]:
parents.append([commit[i]['tag_name'], commit[i]['body'].replace('\r\n', '<p>')])
newer=True
newer = True
i -= 1
continue
if major_version_update < current_version[0]:

@ -23,7 +23,7 @@
from __future__ import division, print_function, unicode_literals
import os
import base64
import datetime
from datetime import datetime
import json
import mimetypes
import traceback
@ -40,16 +40,20 @@ from flask_login import login_user, logout_user, login_required, current_user
from sqlalchemy.exc import IntegrityError
from sqlalchemy.sql.expression import text, func, true, false, not_, and_, or_
from werkzeug.exceptions import default_exceptions
try:
from werkzeug.exceptions import FailedDependency
except ImportError:
from werkzeug.exceptions import UnprocessableEntity as FailedDependency
from werkzeug.datastructures import Headers
from werkzeug.security import generate_password_hash, check_password_hash
from . import constants, logger, isoLanguages, services, worker
from . import searched_ids, lm, babel, db, ub, config, get_locale, app
from .gdriveutils import getFileFromEbooksFolder, do_gdrive_download
from .helper import common_filters, get_search_results, fill_indexpage, speaking_language, check_valid_domain, \
order_authors, get_typeahead, render_task_status, json_serial, get_cc_columns, \
get_book_cover, get_download_link, send_mail, generate_random_password, send_registration_mail, \
check_send_to_kindle, check_read_formats, lcase, tags_filters, reset_password
from .helper import common_filters, get_search_results, fill_indexpage, fill_indexpage_with_archived_books, \
speaking_language, check_valid_domain, order_authors, get_typeahead, render_task_status, json_serial, \
get_cc_columns, get_book_cover, get_download_link, send_mail, generate_random_password, \
send_registration_mail, check_send_to_kindle, check_read_formats, lcase, tags_filters, reset_password
from .pagination import Pagination
from .redirect import redirect_back
@ -111,6 +115,15 @@ for ex in default_exceptions:
elif ex == 500:
app.register_error_handler(ex, internal_error)
if feature_support['ldap']:
# Only way of catching the LDAPException upon logging in with LDAP server down
@app.errorhandler(services.ldap.LDAPException)
def handle_exception(e):
log.debug('LDAP server not accessible while trying to login to opds feed')
return error_http(FailedDependency())
web = Blueprint('web', __name__)
log = logger.create()
@ -156,7 +169,7 @@ def load_user_from_auth_header(header_val):
except (TypeError, UnicodeDecodeError, binascii.Error):
pass
user = _fetch_user_by_name(basic_username)
if config.config_login_type == constants.LOGIN_LDAP and services.ldap:
if user and config.config_login_type == constants.LOGIN_LDAP and services.ldap:
if services.ldap.bind_user(str(user.password), basic_password):
return user
if user and check_password_hash(str(user.password), basic_password):
@ -392,13 +405,19 @@ def toggle_read(book_id):
book = ub.session.query(ub.ReadBook).filter(and_(ub.ReadBook.user_id == int(current_user.id),
ub.ReadBook.book_id == book_id)).first()
if book:
book.is_read = not book.is_read
if book.read_status == ub.ReadBook.STATUS_FINISHED:
book.read_status = ub.ReadBook.STATUS_UNREAD
else:
book.read_status = ub.ReadBook.STATUS_FINISHED
else:
readBook = ub.ReadBook()
readBook.user_id = int(current_user.id)
readBook.book_id = book_id
readBook.is_read = True
readBook = ub.ReadBook(user_id=current_user.id, book_id = book_id)
readBook.read_status = ub.ReadBook.STATUS_FINISHED
book = readBook
if not book.kobo_reading_state:
kobo_reading_state = ub.KoboReadingState(user_id=current_user.id, book_id=book_id)
kobo_reading_state.current_bookmark = ub.KoboBookmark()
kobo_reading_state.statistics = ub.KoboStatistics()
book.kobo_reading_state = kobo_reading_state
ub.session.merge(book)
ub.session.commit()
else:
@ -419,6 +438,22 @@ def toggle_read(book_id):
return ""
@web.route("/ajax/togglearchived/<int:book_id>", methods=['POST'])
@login_required
def toggle_archived(book_id):
archived_book = ub.session.query(ub.ArchivedBook).filter(and_(ub.ArchivedBook.user_id == int(current_user.id),
ub.ArchivedBook.book_id == book_id)).first()
if archived_book:
archived_book.is_archived = not archived_book.is_archived
archived_book.last_modified = datetime.utcnow()
else:
archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id)
archived_book.is_archived = True
ub.session.merge(archived_book)
ub.session.commit()
return ""
'''
@web.route("/ajax/getcomic/<int:book_id>/<book_format>/<int:page>")
@login_required
@ -608,6 +643,8 @@ def books_list(data, sort, book_id, page):
return render_category_books(page, book_id, order)
elif data == "language":
return render_language_books(page, book_id, order)
elif data == "archived":
return render_archived_books(page, order)
else:
entries, random, pagination = fill_indexpage(page, db.Books, True, order)
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
@ -954,14 +991,14 @@ def advanced_search():
if pub_start:
try:
searchterm.extend([_(u"Published after ") +
format_date(datetime.datetime.strptime(pub_start, "%Y-%m-%d"),
format_date(datetime.strptime(pub_start, "%Y-%m-%d"),
format='medium', locale=get_locale())])
except ValueError:
pub_start = u""
if pub_end:
try:
searchterm.extend([_(u"Published before ") +
format_date(datetime.datetime.strptime(pub_end, "%Y-%m-%d"),
format_date(datetime.strptime(pub_end, "%Y-%m-%d"),
format='medium', locale=get_locale())])
except ValueError:
pub_start = u""
@ -1062,8 +1099,8 @@ def advanced_search():
def render_read_books(page, are_read, as_xml=False, order=None, *args, **kwargs):
order = order or []
if not config.config_read_column:
readBooks = ub.session.query(ub.ReadBook).filter(ub.ReadBook.user_id == int(current_user.id)) \
.filter(ub.ReadBook.is_read == True).all()
readBooks = ub.session.query(ub.ReadBook).filter(ub.ReadBook.user_id == int(current_user.id))\
.filter(ub.ReadBook.read_status == ub.ReadBook.STATUS_FINISHED).all()
readBookIds = [x.book_id for x in readBooks]
else:
try:
@ -1095,6 +1132,26 @@ def render_read_books(page, are_read, as_xml=False, order=None, *args, **kwargs)
title=name, page=pagename)
def render_archived_books(page, order):
order = order or []
archived_books = (
ub.session.query(ub.ArchivedBook)
.filter(ub.ArchivedBook.user_id == int(current_user.id))
.filter(ub.ArchivedBook.is_archived == True)
.all()
)
archived_book_ids = [archived_book.book_id for archived_book in archived_books]
archived_filter = db.Books.id.in_(archived_book_ids)
entries, random, pagination = fill_indexpage_with_archived_books(page, db.Books, archived_filter, order,
allow_show_archived=True)
name = _(u'Archived Books') + ' (' + str(len(archived_book_ids)) + ')'
pagename = "archived"
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
title=name, page=pagename)
# ################################### Download/Send ##################################################################
@ -1320,7 +1377,7 @@ def verify_token(token):
return redirect(url_for('web.index'))
# Token expired
if datetime.datetime.now() > auth_token.expiration:
if datetime.now() > auth_token.expiration:
ub.session.delete(auth_token)
ub.session.commit()
@ -1352,7 +1409,7 @@ def token_verified():
data['message'] = _(u"Token not found")
# Token expired
elif datetime.datetime.now() > auth_token.expiration:
elif datetime.now() > auth_token.expiration:
ub.session.delete(auth_token)
ub.session.commit()
@ -1526,7 +1583,8 @@ def read_book(book_id, book_format):
@web.route("/book/<int:book_id>")
@login_required_if_no_ano
def show_book(book_id):
entries = db.session.query(db.Books).filter(db.Books.id == book_id).filter(common_filters()).first()
entries = db.session.query(db.Books).filter(and_(db.Books.id == book_id,
common_filters(allow_show_archived=True))).first()
if entries:
for index in range(0, len(entries.languages)):
try:
@ -1545,7 +1603,8 @@ def show_book(book_id):
if not config.config_read_column:
matching_have_read_book = ub.session.query(ub.ReadBook). \
filter(and_(ub.ReadBook.user_id == int(current_user.id), ub.ReadBook.book_id == book_id)).all()
have_read = len(matching_have_read_book) > 0 and matching_have_read_book[0].is_read
have_read = len(
matching_have_read_book) > 0 and matching_have_read_book[0].read_status == ub.ReadBook.STATUS_FINISHED
else:
try:
matching_have_read_book = getattr(entries, 'custom_column_' + str(config.config_read_column))
@ -1554,8 +1613,14 @@ def show_book(book_id):
log.error("Custom Column No.%d is not existing in calibre database", config.config_read_column)
have_read = None
archived_book = ub.session.query(ub.ArchivedBook).\
filter(and_(ub.ArchivedBook.user_id == int(current_user.id),
ub.ArchivedBook.book_id == book_id)).first()
is_archived = archived_book and archived_book.is_archived
else:
have_read = None
is_archived = None
entries.tags = sort(entries.tags, key=lambda tag: tag.name)
@ -1570,9 +1635,8 @@ def show_book(book_id):
audioentries.append(media_format.format.lower())
return render_title_template('detail.html', entry=entries, audioentries=audioentries, cc=cc,
is_xhr=request.headers.get('X-Requested-With') == 'XMLHttpRequest',
title=entries.title, books_shelfs=book_in_shelfs,
have_read=have_read, kindle_list=kindle_list, reader_list=reader_list, page="book")
is_xhr=request.headers.get('X-Requested-With')=='XMLHttpRequest', title=entries.title, books_shelfs=book_in_shelfs,
have_read=have_read, is_archived=is_archived, kindle_list=kindle_list, reader_list=reader_list, page="book")
else:
log.debug(u"Error opening eBook. File does not exist or file is not accessible:")
flash(_(u"Error opening eBook. File does not exist or file is not accessible:"), category="error")

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save