|
|
|
@ -1,16 +1,33 @@
|
|
|
|
|
#!/usr/bin/env python
|
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
|
|
# This file is part of the Calibre-Web (https://github.com/janeczku/calibre-web)
|
|
|
|
|
# Copyright (C) 2012-2019 cervinko, idalin, SiphonSquirrel, ouzklcn, akushsky,
|
|
|
|
|
# OzzieIsaacs, bodybybuddha, jkrehm, matthazinski, janeczku
|
|
|
|
|
#
|
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
|
# (at your option) any later version.
|
|
|
|
|
#
|
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
|
#
|
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import db
|
|
|
|
|
import ub
|
|
|
|
|
from flask import current_app as app
|
|
|
|
|
import logging
|
|
|
|
|
from tempfile import gettempdir
|
|
|
|
|
import sys
|
|
|
|
|
import io
|
|
|
|
|
import os
|
|
|
|
|
import re
|
|
|
|
|
import unicodedata
|
|
|
|
|
from io import BytesIO
|
|
|
|
|
import worker
|
|
|
|
|
import time
|
|
|
|
|
from flask import send_from_directory, make_response, redirect, abort
|
|
|
|
@ -18,16 +35,13 @@ from flask_babel import gettext as _
|
|
|
|
|
from flask_login import current_user
|
|
|
|
|
from babel.dates import format_datetime
|
|
|
|
|
from datetime import datetime
|
|
|
|
|
import threading
|
|
|
|
|
import shutil
|
|
|
|
|
import requests
|
|
|
|
|
import zipfile
|
|
|
|
|
try:
|
|
|
|
|
import gdriveutils as gd
|
|
|
|
|
except ImportError:
|
|
|
|
|
pass
|
|
|
|
|
import web
|
|
|
|
|
import server
|
|
|
|
|
import random
|
|
|
|
|
import subprocess
|
|
|
|
|
|
|
|
|
@ -37,8 +51,14 @@ try:
|
|
|
|
|
except ImportError:
|
|
|
|
|
use_unidecode = False
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
from PIL import Image
|
|
|
|
|
use_PIL = True
|
|
|
|
|
except ImportError:
|
|
|
|
|
use_PIL = False
|
|
|
|
|
|
|
|
|
|
# Global variables
|
|
|
|
|
updater_thread = None
|
|
|
|
|
# updater_thread = None
|
|
|
|
|
global_WorkerThread = worker.WorkerThread()
|
|
|
|
|
global_WorkerThread.start()
|
|
|
|
|
|
|
|
|
@ -110,7 +130,7 @@ def send_registration_mail(e_mail, user_name, default_password, resend=False):
|
|
|
|
|
text += "Sincerely\r\n\r\n"
|
|
|
|
|
text += "Your Calibre-Web team"
|
|
|
|
|
global_WorkerThread.add_email(_(u'Get Started with Calibre-Web'),None, None, ub.get_mail_settings(),
|
|
|
|
|
e_mail, user_name, _(u"Registration e-mail for user: %(name)s", name=user_name), text)
|
|
|
|
|
e_mail, None, _(u"Registration e-mail for user: %(name)s", name=user_name), text)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
def check_send_to_kindle(entry):
|
|
|
|
@ -128,8 +148,8 @@ def check_send_to_kindle(entry):
|
|
|
|
|
bookformats.append({'format':'Pdf','convert':0,'text':_('Send %(format)s to Kindle',format='Pdf')})
|
|
|
|
|
if 'AZW' in ele.format:
|
|
|
|
|
bookformats.append({'format':'Azw','convert':0,'text':_('Send %(format)s to Kindle',format='Azw')})
|
|
|
|
|
if 'AZW3' in ele.format:
|
|
|
|
|
bookformats.append({'format':'Azw3','convert':0,'text':_('Send %(format)s to Kindle',format='Azw3')})
|
|
|
|
|
'''if 'AZW3' in ele.format:
|
|
|
|
|
bookformats.append({'format':'Azw3','convert':0,'text':_('Send %(format)s to Kindle',format='Azw3')})'''
|
|
|
|
|
else:
|
|
|
|
|
formats = list()
|
|
|
|
|
for ele in iter(entry.data):
|
|
|
|
@ -138,18 +158,16 @@ def check_send_to_kindle(entry):
|
|
|
|
|
bookformats.append({'format': 'Mobi','convert':0,'text':_('Send %(format)s to Kindle',format='Mobi')})
|
|
|
|
|
if 'AZW' in formats:
|
|
|
|
|
bookformats.append({'format': 'Azw','convert':0,'text':_('Send %(format)s to Kindle',format='Azw')})
|
|
|
|
|
if 'AZW3' in formats:
|
|
|
|
|
bookformats.append({'format': 'Azw3','convert':0,'text':_('Send %(format)s to Kindle',format='Azw3')})
|
|
|
|
|
if 'PDF' in formats:
|
|
|
|
|
bookformats.append({'format': 'Pdf','convert':0,'text':_('Send %(format)s to Kindle',format='Pdf')})
|
|
|
|
|
if ub.config.config_ebookconverter >= 1:
|
|
|
|
|
if 'EPUB' in formats and not 'MOBI' in formats:
|
|
|
|
|
bookformats.append({'format': 'Mobi','convert':1,
|
|
|
|
|
'text':_('Convert %(orig)s to %(format)s and send to Kindle',orig='Epub',format='Mobi')})
|
|
|
|
|
if ub.config.config_ebookconverter == 2:
|
|
|
|
|
'''if ub.config.config_ebookconverter == 2:
|
|
|
|
|
if 'EPUB' in formats and not 'AZW3' in formats:
|
|
|
|
|
bookformats.append({'format': 'Azw3','convert':1,
|
|
|
|
|
'text':_('Convert %(orig)s to %(format)s and send to Kindle',orig='Epub',format='Azw3')})
|
|
|
|
|
'text':_('Convert %(orig)s to %(format)s and send to Kindle',orig='Epub',format='Azw3')})'''
|
|
|
|
|
return bookformats
|
|
|
|
|
else:
|
|
|
|
|
app.logger.error(u'Cannot find book entry %d', entry.id)
|
|
|
|
@ -159,7 +177,7 @@ def check_send_to_kindle(entry):
|
|
|
|
|
# Check if a reader is existing for any of the book formats, if not, return empty list, otherwise return
|
|
|
|
|
# list with supported formats
|
|
|
|
|
def check_read_formats(entry):
|
|
|
|
|
EXTENSIONS_READER = {'TXT', 'PDF', 'EPUB', 'ZIP', 'CBZ', 'TAR', 'CBT', 'RAR', 'CBR'}
|
|
|
|
|
EXTENSIONS_READER = {'TXT', 'PDF', 'EPUB', 'CBZ', 'CBT', 'CBR'}
|
|
|
|
|
bookformats = list()
|
|
|
|
|
if len(entry.data):
|
|
|
|
|
for ele in iter(entry.data):
|
|
|
|
@ -217,7 +235,10 @@ def get_valid_filename(value, replace_whitespace=True):
|
|
|
|
|
value = value[:128]
|
|
|
|
|
if not value:
|
|
|
|
|
raise ValueError("Filename cannot be empty")
|
|
|
|
|
return value
|
|
|
|
|
if sys.version_info.major == 3:
|
|
|
|
|
return value
|
|
|
|
|
else:
|
|
|
|
|
return value.decode('utf-8')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_sorted_author(value):
|
|
|
|
@ -306,12 +327,12 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
|
|
|
|
|
# Rename all files from old names to new names
|
|
|
|
|
if authordir != new_authordir or titledir != new_titledir:
|
|
|
|
|
try:
|
|
|
|
|
for format in localbook.data:
|
|
|
|
|
path_name = os.path.join(calibrepath, new_authordir, os.path.basename(path))
|
|
|
|
|
new_name = get_valid_filename(localbook.title) + ' - ' + get_valid_filename(new_authordir)
|
|
|
|
|
os.renames(os.path.join(path_name, format.name + '.' + format.format.lower()),
|
|
|
|
|
os.path.join(path_name,new_name + '.' + format.format.lower()))
|
|
|
|
|
format.name = new_name
|
|
|
|
|
new_name = get_valid_filename(localbook.title) + ' - ' + get_valid_filename(new_authordir)
|
|
|
|
|
path_name = os.path.join(calibrepath, new_authordir, os.path.basename(path))
|
|
|
|
|
for file_format in localbook.data:
|
|
|
|
|
os.renames(os.path.join(path_name, file_format.name + '.' + file_format.format.lower()),
|
|
|
|
|
os.path.join(path_name, new_name + '.' + file_format.format.lower()))
|
|
|
|
|
file_format.name = new_name
|
|
|
|
|
except OSError as ex:
|
|
|
|
|
web.app.logger.error("Rename file in path " + path + " to " + new_name + ": " + str(ex))
|
|
|
|
|
web.app.logger.debug(ex, exc_info=True)
|
|
|
|
@ -323,6 +344,7 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
|
|
|
|
|
def update_dir_structure_gdrive(book_id, first_author):
|
|
|
|
|
error = False
|
|
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
|
|
|
|
path = book.path
|
|
|
|
|
|
|
|
|
|
authordir = book.path.split('/')[0]
|
|
|
|
|
if first_author:
|
|
|
|
@ -330,40 +352,39 @@ def update_dir_structure_gdrive(book_id, first_author):
|
|
|
|
|
else:
|
|
|
|
|
new_authordir = get_valid_filename(book.authors[0].name)
|
|
|
|
|
titledir = book.path.split('/')[1]
|
|
|
|
|
new_titledir = get_valid_filename(book.title) + " (" + str(book_id) + ")"
|
|
|
|
|
new_titledir = get_valid_filename(book.title) + u" (" + str(book_id) + u")"
|
|
|
|
|
|
|
|
|
|
if titledir != new_titledir:
|
|
|
|
|
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), titledir)
|
|
|
|
|
if gFile:
|
|
|
|
|
gFile['title'] = new_titledir
|
|
|
|
|
|
|
|
|
|
gFile.Upload()
|
|
|
|
|
book.path = book.path.split('/')[0] + '/' + new_titledir
|
|
|
|
|
book.path = book.path.split('/')[0] + u'/' + new_titledir
|
|
|
|
|
path = book.path
|
|
|
|
|
gd.updateDatabaseOnEdit(gFile['id'], book.path) # only child folder affected
|
|
|
|
|
else:
|
|
|
|
|
error = _(u'File %(file)s not found on Google Drive', file=book.path) # file not found
|
|
|
|
|
|
|
|
|
|
if authordir != new_authordir:
|
|
|
|
|
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), titledir)
|
|
|
|
|
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), new_titledir)
|
|
|
|
|
if gFile:
|
|
|
|
|
gd.moveGdriveFolderRemote(gFile,new_authordir)
|
|
|
|
|
book.path = new_authordir + '/' + book.path.split('/')[1]
|
|
|
|
|
gd.moveGdriveFolderRemote(gFile, new_authordir)
|
|
|
|
|
book.path = new_authordir + u'/' + book.path.split('/')[1]
|
|
|
|
|
path = book.path
|
|
|
|
|
gd.updateDatabaseOnEdit(gFile['id'], book.path)
|
|
|
|
|
else:
|
|
|
|
|
error = _(u'File %(file)s not found on Google Drive', file=authordir) # file not found
|
|
|
|
|
# Rename all files from old names to new names
|
|
|
|
|
# ToDo: Rename also all bookfiles with new author name and new title name
|
|
|
|
|
'''
|
|
|
|
|
|
|
|
|
|
if authordir != new_authordir or titledir != new_titledir:
|
|
|
|
|
for format in book.data:
|
|
|
|
|
# path_name = os.path.join(calibrepath, new_authordir, os.path.basename(path))
|
|
|
|
|
new_name = get_valid_filename(book.title) + ' - ' + get_valid_filename(book)
|
|
|
|
|
format.name = new_name
|
|
|
|
|
if gFile:
|
|
|
|
|
pass
|
|
|
|
|
else:
|
|
|
|
|
error = _(u'File %(file)s not found on Google Drive', file=format.name) # file not found
|
|
|
|
|
break'''
|
|
|
|
|
new_name = get_valid_filename(book.title) + u' - ' + get_valid_filename(new_authordir)
|
|
|
|
|
for file_format in book.data:
|
|
|
|
|
gFile = gd.getFileFromEbooksFolder(path, file_format.name + u'.' + file_format.format.lower())
|
|
|
|
|
if not gFile:
|
|
|
|
|
error = _(u'File %(file)s not found on Google Drive', file=file_format.name) # file not found
|
|
|
|
|
break
|
|
|
|
|
gd.moveGdriveFileRemote(gFile, new_name + u'.' + file_format.format.lower())
|
|
|
|
|
file_format.name = new_name
|
|
|
|
|
return error
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@ -409,6 +430,8 @@ def delete_book(book, calibrepath, book_format):
|
|
|
|
|
def get_book_cover(cover_path):
|
|
|
|
|
if ub.config.config_use_google_drive:
|
|
|
|
|
try:
|
|
|
|
|
if not web.is_gdrive_ready():
|
|
|
|
|
return send_from_directory(os.path.join(os.path.dirname(__file__), "static"), "generic_cover.jpg")
|
|
|
|
|
path=gd.get_cover_via_gdrive(cover_path)
|
|
|
|
|
if path:
|
|
|
|
|
return redirect(path)
|
|
|
|
@ -416,7 +439,7 @@ def get_book_cover(cover_path):
|
|
|
|
|
web.app.logger.error(cover_path + '/cover.jpg not found on Google Drive')
|
|
|
|
|
return send_from_directory(os.path.join(os.path.dirname(__file__), "static"), "generic_cover.jpg")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
web.app.logger.error("Error Message: "+e.message)
|
|
|
|
|
web.app.logger.error("Error Message: " + e.message)
|
|
|
|
|
web.app.logger.exception(e)
|
|
|
|
|
# traceback.print_exc()
|
|
|
|
|
return send_from_directory(os.path.join(os.path.dirname(__file__), "static"),"generic_cover.jpg")
|
|
|
|
@ -424,29 +447,73 @@ def get_book_cover(cover_path):
|
|
|
|
|
return send_from_directory(os.path.join(ub.config.config_calibre_dir, cover_path), "cover.jpg")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# saves book cover to gdrive or locally
|
|
|
|
|
def save_cover(url, book_path):
|
|
|
|
|
# saves book cover from url
|
|
|
|
|
def save_cover_from_url(url, book_path):
|
|
|
|
|
img = requests.get(url)
|
|
|
|
|
if img.headers.get('content-type') != 'image/jpeg':
|
|
|
|
|
web.app.logger.error("Cover is no jpg file, can't save")
|
|
|
|
|
return False
|
|
|
|
|
return save_cover(img, book_path)
|
|
|
|
|
|
|
|
|
|
if ub.config.config_use_google_drive:
|
|
|
|
|
tmpDir = gettempdir()
|
|
|
|
|
f = open(os.path.join(tmpDir, "uploaded_cover.jpg"), "wb")
|
|
|
|
|
f.write(img.content)
|
|
|
|
|
|
|
|
|
|
def save_cover_from_filestorage(filepath, saved_filename, img):
|
|
|
|
|
if hasattr(img,'_content'):
|
|
|
|
|
f = open(os.path.join(filepath, saved_filename), "wb")
|
|
|
|
|
f.write(img._content)
|
|
|
|
|
f.close()
|
|
|
|
|
gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg'), os.path.join(tmpDir, f.name))
|
|
|
|
|
web.app.logger.info("Cover is saved on Google Drive")
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
f = open(os.path.join(ub.config.config_calibre_dir, book_path, "cover.jpg"), "wb")
|
|
|
|
|
f.write(img.content)
|
|
|
|
|
f.close()
|
|
|
|
|
web.app.logger.info("Cover is saved")
|
|
|
|
|
else:
|
|
|
|
|
# check if file path exists, otherwise create it, copy file to calibre path and delete temp file
|
|
|
|
|
if not os.path.exists(filepath):
|
|
|
|
|
try:
|
|
|
|
|
os.makedirs(filepath)
|
|
|
|
|
except OSError:
|
|
|
|
|
web.app.logger.error(u"Failed to create path for cover")
|
|
|
|
|
return False
|
|
|
|
|
try:
|
|
|
|
|
img.save(os.path.join(filepath, saved_filename))
|
|
|
|
|
except OSError:
|
|
|
|
|
web.app.logger.error(u"Failed to store cover-file")
|
|
|
|
|
return False
|
|
|
|
|
except IOError:
|
|
|
|
|
web.app.logger.error(u"Cover-file is not a valid image file")
|
|
|
|
|
return False
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# saves book cover to gdrive or locally
|
|
|
|
|
def save_cover(img, book_path):
|
|
|
|
|
content_type = img.headers.get('content-type')
|
|
|
|
|
|
|
|
|
|
if use_PIL:
|
|
|
|
|
if content_type not in ('image/jpeg', 'image/png', 'image/webp'):
|
|
|
|
|
web.app.logger.error("Only jpg/jpeg/png/webp files are supported as coverfile")
|
|
|
|
|
return False
|
|
|
|
|
# convert to jpg because calibre only supports jpg
|
|
|
|
|
if content_type in ('image/png', 'image/webp'):
|
|
|
|
|
if hasattr(img,'stream'):
|
|
|
|
|
imgc = Image.open(img.stream)
|
|
|
|
|
else:
|
|
|
|
|
imgc = Image.open(io.BytesIO(img.content))
|
|
|
|
|
im = imgc.convert('RGB')
|
|
|
|
|
tmp_bytesio = io.BytesIO()
|
|
|
|
|
im.save(tmp_bytesio, format='JPEG')
|
|
|
|
|
img._content = tmp_bytesio.getvalue()
|
|
|
|
|
else:
|
|
|
|
|
if content_type not in ('image/jpeg'):
|
|
|
|
|
web.app.logger.error("Only jpg/jpeg files are supported as coverfile")
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
if ub.config.config_use_google_drive:
|
|
|
|
|
tmpDir = gettempdir()
|
|
|
|
|
if save_cover_from_filestorage(tmpDir, "uploaded_cover.jpg", img) is True:
|
|
|
|
|
gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg'),
|
|
|
|
|
os.path.join(tmpDir, "uploaded_cover.jpg"))
|
|
|
|
|
web.app.logger.info("Cover is saved on Google Drive")
|
|
|
|
|
return True
|
|
|
|
|
else:
|
|
|
|
|
return False
|
|
|
|
|
else:
|
|
|
|
|
return save_cover_from_filestorage(os.path.join(ub.config.config_calibre_dir, book_path), "cover.jpg", img)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def do_download_file(book, book_format, data, headers):
|
|
|
|
|
if ub.config.config_use_google_drive:
|
|
|
|
|
startTime = time.time()
|
|
|
|
@ -468,167 +535,6 @@ def do_download_file(book, book_format, data, headers):
|
|
|
|
|
##################################
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class Updater(threading.Thread):
|
|
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
threading.Thread.__init__(self)
|
|
|
|
|
self.status = 0
|
|
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
|
try:
|
|
|
|
|
self.status = 1
|
|
|
|
|
r = requests.get('https://api.github.com/repos/janeczku/calibre-web/zipball/master', stream=True)
|
|
|
|
|
r.raise_for_status()
|
|
|
|
|
|
|
|
|
|
fname = re.findall("filename=(.+)", r.headers['content-disposition'])[0]
|
|
|
|
|
self.status = 2
|
|
|
|
|
z = zipfile.ZipFile(BytesIO(r.content))
|
|
|
|
|
self.status = 3
|
|
|
|
|
tmp_dir = gettempdir()
|
|
|
|
|
z.extractall(tmp_dir)
|
|
|
|
|
self.status = 4
|
|
|
|
|
self.update_source(os.path.join(tmp_dir, os.path.splitext(fname)[0]), ub.config.get_main_dir)
|
|
|
|
|
self.status = 6
|
|
|
|
|
time.sleep(2)
|
|
|
|
|
server.Server.setRestartTyp(True)
|
|
|
|
|
server.Server.stopServer()
|
|
|
|
|
self.status = 7
|
|
|
|
|
time.sleep(2)
|
|
|
|
|
except requests.exceptions.HTTPError as ex:
|
|
|
|
|
logging.getLogger('cps.web').info( u'HTTP Error' + ' ' + str(ex))
|
|
|
|
|
self.status = 8
|
|
|
|
|
except requests.exceptions.ConnectionError:
|
|
|
|
|
logging.getLogger('cps.web').info(u'Connection error')
|
|
|
|
|
self.status = 9
|
|
|
|
|
except requests.exceptions.Timeout:
|
|
|
|
|
logging.getLogger('cps.web').info(u'Timeout while establishing connection')
|
|
|
|
|
self.status = 10
|
|
|
|
|
except requests.exceptions.RequestException:
|
|
|
|
|
self.status = 11
|
|
|
|
|
logging.getLogger('cps.web').info(u'General error')
|
|
|
|
|
|
|
|
|
|
def get_update_status(self):
|
|
|
|
|
return self.status
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def file_to_list(self, filelist):
|
|
|
|
|
return [x.strip() for x in open(filelist, 'r') if not x.startswith('#EXT')]
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def one_minus_two(self, one, two):
|
|
|
|
|
return [x for x in one if x not in set(two)]
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def reduce_dirs(self, delete_files, new_list):
|
|
|
|
|
new_delete = []
|
|
|
|
|
for filename in delete_files:
|
|
|
|
|
parts = filename.split(os.sep)
|
|
|
|
|
sub = ''
|
|
|
|
|
for part in parts:
|
|
|
|
|
sub = os.path.join(sub, part)
|
|
|
|
|
if sub == '':
|
|
|
|
|
sub = os.sep
|
|
|
|
|
count = 0
|
|
|
|
|
for song in new_list:
|
|
|
|
|
if song.startswith(sub):
|
|
|
|
|
count += 1
|
|
|
|
|
break
|
|
|
|
|
if count == 0:
|
|
|
|
|
if sub != '\\':
|
|
|
|
|
new_delete.append(sub)
|
|
|
|
|
break
|
|
|
|
|
return list(set(new_delete))
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def reduce_files(self, remove_items, exclude_items):
|
|
|
|
|
rf = []
|
|
|
|
|
for item in remove_items:
|
|
|
|
|
if not item.startswith(exclude_items):
|
|
|
|
|
rf.append(item)
|
|
|
|
|
return rf
|
|
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
|
def moveallfiles(self, root_src_dir, root_dst_dir):
|
|
|
|
|
change_permissions = True
|
|
|
|
|
if sys.platform == "win32" or sys.platform == "darwin":
|
|
|
|
|
change_permissions = False
|
|
|
|
|
else:
|
|
|
|
|
logging.getLogger('cps.web').debug('Update on OS-System : ' + sys.platform)
|
|
|
|
|
new_permissions = os.stat(root_dst_dir)
|
|
|
|
|
# print new_permissions
|
|
|
|
|
for src_dir, __, files in os.walk(root_src_dir):
|
|
|
|
|
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
|
|
|
|
|
if not os.path.exists(dst_dir):
|
|
|
|
|
os.makedirs(dst_dir)
|
|
|
|
|
logging.getLogger('cps.web').debug('Create-Dir: '+dst_dir)
|
|
|
|
|
if change_permissions:
|
|
|
|
|
# print('Permissions: User '+str(new_permissions.st_uid)+' Group '+str(new_permissions.st_uid))
|
|
|
|
|
os.chown(dst_dir, new_permissions.st_uid, new_permissions.st_gid)
|
|
|
|
|
for file_ in files:
|
|
|
|
|
src_file = os.path.join(src_dir, file_)
|
|
|
|
|
dst_file = os.path.join(dst_dir, file_)
|
|
|
|
|
if os.path.exists(dst_file):
|
|
|
|
|
if change_permissions:
|
|
|
|
|
permission = os.stat(dst_file)
|
|
|
|
|
logging.getLogger('cps.web').debug('Remove file before copy: '+dst_file)
|
|
|
|
|
os.remove(dst_file)
|
|
|
|
|
else:
|
|
|
|
|
if change_permissions:
|
|
|
|
|
permission = new_permissions
|
|
|
|
|
shutil.move(src_file, dst_dir)
|
|
|
|
|
logging.getLogger('cps.web').debug('Move File '+src_file+' to '+dst_dir)
|
|
|
|
|
if change_permissions:
|
|
|
|
|
try:
|
|
|
|
|
os.chown(dst_file, permission.st_uid, permission.st_gid)
|
|
|
|
|
except (Exception) as e:
|
|
|
|
|
# ex = sys.exc_info()
|
|
|
|
|
old_permissions = os.stat(dst_file)
|
|
|
|
|
logging.getLogger('cps.web').debug('Fail change permissions of ' + str(dst_file) + '. Before: '
|
|
|
|
|
+ str(old_permissions.st_uid) + ':' + str(old_permissions.st_gid) + ' After: '
|
|
|
|
|
+ str(permission.st_uid) + ':' + str(permission.st_gid) + ' error: '+str(e))
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
def update_source(self, source, destination):
|
|
|
|
|
# destination files
|
|
|
|
|
old_list = list()
|
|
|
|
|
exclude = (
|
|
|
|
|
'vendor' + os.sep + 'kindlegen.exe', 'vendor' + os.sep + 'kindlegen', os.sep + 'app.db',
|
|
|
|
|
os.sep + 'vendor', os.sep + 'calibre-web.log')
|
|
|
|
|
for root, dirs, files in os.walk(destination, topdown=True):
|
|
|
|
|
for name in files:
|
|
|
|
|
old_list.append(os.path.join(root, name).replace(destination, ''))
|
|
|
|
|
for name in dirs:
|
|
|
|
|
old_list.append(os.path.join(root, name).replace(destination, ''))
|
|
|
|
|
# source files
|
|
|
|
|
new_list = list()
|
|
|
|
|
for root, dirs, files in os.walk(source, topdown=True):
|
|
|
|
|
for name in files:
|
|
|
|
|
new_list.append(os.path.join(root, name).replace(source, ''))
|
|
|
|
|
for name in dirs:
|
|
|
|
|
new_list.append(os.path.join(root, name).replace(source, ''))
|
|
|
|
|
|
|
|
|
|
delete_files = self.one_minus_two(old_list, new_list)
|
|
|
|
|
|
|
|
|
|
rf = self.reduce_files(delete_files, exclude)
|
|
|
|
|
|
|
|
|
|
remove_items = self.reduce_dirs(rf, new_list)
|
|
|
|
|
|
|
|
|
|
self.moveallfiles(source, destination)
|
|
|
|
|
|
|
|
|
|
for item in remove_items:
|
|
|
|
|
item_path = os.path.join(destination, item[1:])
|
|
|
|
|
if os.path.isdir(item_path):
|
|
|
|
|
logging.getLogger('cps.web').debug("Delete dir " + item_path)
|
|
|
|
|
shutil.rmtree(item_path)
|
|
|
|
|
else:
|
|
|
|
|
try:
|
|
|
|
|
logging.getLogger('cps.web').debug("Delete file " + item_path)
|
|
|
|
|
# log_from_thread("Delete file " + item_path)
|
|
|
|
|
os.remove(item_path)
|
|
|
|
|
except Exception:
|
|
|
|
|
logging.getLogger('cps.web').debug("Could not remove:" + item_path)
|
|
|
|
|
shutil.rmtree(source, ignore_errors=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def check_unrar(unrarLocation):
|
|
|
|
|
error = False
|
|
|
|
@ -654,26 +560,6 @@ def check_unrar(unrarLocation):
|
|
|
|
|
return (error, version)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def is_sha1(sha1):
|
|
|
|
|
if len(sha1) != 40:
|
|
|
|
|
return False
|
|
|
|
|
try:
|
|
|
|
|
int(sha1, 16)
|
|
|
|
|
except ValueError:
|
|
|
|
|
return False
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_current_version_info():
|
|
|
|
|
content = {}
|
|
|
|
|
content[0] = '$Format:%H$'
|
|
|
|
|
content[1] = '$Format:%cI$'
|
|
|
|
|
# content[0] = 'bb7d2c6273ae4560e83950d36d64533343623a57'
|
|
|
|
|
# content[1] = '2018-09-09T10:13:08+02:00'
|
|
|
|
|
if is_sha1(content[0]) and len(content[1]) > 0:
|
|
|
|
|
return {'hash': content[0], 'datetime': content[1]}
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def json_serial(obj):
|
|
|
|
|
"""JSON serializer for objects not serializable by default json code"""
|
|
|
|
@ -682,17 +568,13 @@ def json_serial(obj):
|
|
|
|
|
return obj.isoformat()
|
|
|
|
|
raise TypeError ("Type %s not serializable" % type(obj))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# helper function to apply localize status information in tasklist entries
|
|
|
|
|
def render_task_status(tasklist):
|
|
|
|
|
#helper function to apply localize status information in tasklist entries
|
|
|
|
|
renderedtasklist=list()
|
|
|
|
|
# task2 = task
|
|
|
|
|
for task in tasklist:
|
|
|
|
|
if task['user'] == current_user.nickname or current_user.role_admin():
|
|
|
|
|
# task2 = copy.deepcopy(task) # = task
|
|
|
|
|
if task['formStarttime']:
|
|
|
|
|
task['starttime'] = format_datetime(task['formStarttime'], format='short', locale=web.get_locale())
|
|
|
|
|
# task2['formStarttime'] = ""
|
|
|
|
|
else:
|
|
|
|
|
if 'starttime' not in task:
|
|
|
|
|
task['starttime'] = ""
|
|
|
|
|