Code cosmetics

pull/164/merge
OzzieIsaacs 8 years ago
parent 00462237fe
commit 5044b78b08

@ -1,6 +1,8 @@
/**
* Created by SpeedProg on 05.04.2015.
*/
/* global Bloodhound */
/*
Takes a prefix, query typeahead callback, Bloodhound typeahead adapter
@ -33,38 +35,6 @@ var authors = new Bloodhound({
}
});
function authors_source(query, cb) {
var bhAdapter = authors.ttAdapter();
var tokens = query.split("&");
var current_author = tokens[tokens.length-1].trim();
tokens.splice(tokens.length-1, 1); // remove last element
var prefix = "";
for (var i = 0; i < tokens.length; i++) {
var author = tokens[i].trim();
prefix += author + " & ";
}
prefixed_source(prefix, current_author, cb, bhAdapter);
}
var promise = authors.initialize();
promise.done(function(){
$("#bookAuthor").typeahead(
{
highlight: true, minLength: 1,
hint: true
}, {
name: "authors",
displayKey: "name",
source: authors_source
}
)
});
var series = new Bloodhound({
name: "series",
datumTokenizer: function(datum) {
@ -80,19 +50,7 @@ var series = new Bloodhound({
}
}
});
var promise = series.initialize();
promise.done(function(){
$("#series").typeahead(
{
highlight: true, minLength: 0,
hint: true
}, {
name: "series",
displayKey: "name",
source: series.ttAdapter()
}
)
});
var tags = new Bloodhound({
name: "tags",
@ -109,36 +67,6 @@ var tags = new Bloodhound({
}
});
function tag_source(query, cb) {
var bhAdapter = tags.ttAdapter();
var tokens = query.split(",");
var current_tag = tokens[tokens.length-1].trim();
tokens.splice(tokens.length-1, 1); // remove last element
var prefix = "";
for (var i = 0; i < tokens.length; i++) {
var tag = tokens[i].trim();
prefix += tag + ", ";
}
prefixed_source(prefix, current_tag, cb, bhAdapter);
}
var promise = tags.initialize();
promise.done(function(){
$("#tags").typeahead(
{
highlight: true, minLength: 0,
hint: true
}, {
name: "tags",
displayKey: "name",
source: tag_source
}
)
});
var languages = new Bloodhound({
name: "languages",
datumTokenizer: function(datum) {
@ -156,36 +84,85 @@ var languages = new Bloodhound({
}
});
function language_source(query, cb) {
var bhAdapter = languages.ttAdapter();
function sourceSplit(query, cb, split, source) {
var bhAdapter = source.ttAdapter();
var tokens = query.split(",");
var currentLanguage = tokens[tokens.length-1].trim();
var tokens = query.split(split);
var currentSource = tokens[tokens.length-1].trim();
tokens.splice(tokens.length-1, 1); // remove last element
var prefix = "";
var newSplit;
if (split === "&"){
newSplit = " " + split + " ";
}else{
newSplit = split + " ";
}
for (var i = 0; i < tokens.length; i++) {
var tag = tokens[i].trim();
prefix += tag + ", ";
prefix += tokens[i].trim() + newSplit;
}
prefixed_source(prefix, currentLanguage, cb, bhAdapter);
prefixed_source(prefix, currentSource, cb, bhAdapter);
}
var promise = languages.initialize();
promise.done(function(){
$("#languages").typeahead(
var promiseAuthors = authors.initialize();
promiseAuthors.done(function(){
$("#bookAuthor").typeahead(
{
highlight: true, minLength: 1,
hint: true
}, {
name: "authors",
displayKey: "name",
source: function(query, cb){
return sourceSplit(query, cb, "&", authors); //sourceSplit //("&")
}
});
});
var promiseSeries = series.initialize();
promiseSeries.done(function(){
$("#series").typeahead(
{
highlight: true, minLength: 0,
hint: true
}, {
name: "languages",
name: "series",
displayKey: "name",
source: language_source
source: series.ttAdapter()
}
)
});
var promiseTags = tags.initialize();
promiseTags.done(function(){
$("#tags").typeahead(
{
highlight: true, minLength: 0,
hint: true
}, {
name: "tags",
displayKey: "name",
source: function(query, cb){
return sourceSplit(query, cb, ",", tags);
}
});
});
var promiseLanguages = languages.initialize();
promiseLanguages.done(function(){
$("#languages").typeahead(
{
highlight: true, minLength: 0,
hint: true
}, {
name: "languages",
displayKey: "name",
source: function(query, cb){
return sourceSplit(query, cb, ",", languages); //(",")
}
});
});
$("form").on("change input typeahead:selected", function(data){
var form = $("form").serialize();
$.getJSON( get_path()+"/get_matching_tags", form, function( data ) {

@ -86,42 +86,42 @@ $(function() {
updateTimerID=setInterval(updateTimer, 2000);}
});
});
});
function restartTimer() {
$("#spinner").hide();
$("#RestartDialog").modal("hide");
}
function restartTimer() {
$("#spinner").addClass("hidden");
$("#RestartDialog").modal("hide");
}
function updateTimer() {
$.ajax({
dataType: 'json',
url: window.location.pathname+"/../../get_updater_status",
success: function(data) {
console.log(data.status);
$("#UpdateprogressDialog #Updatecontent").html(updateText[data.status]);
if (data.status >6){
function updateTimer() {
$.ajax({
dataType: 'json',
url: window.location.pathname+"/../../get_updater_status",
success: function(data) {
console.log(data.status);
$("#UpdateprogressDialog #Updatecontent").html(updateText[data.status]);
if (data.status >6){
clearInterval(updateTimerID);
$("#spinner2").hide();
$("#UpdateprogressDialog #updateFinished").removeClass("hidden");
$("#check_for_update").removeClass("hidden");
$("#perform_update").addClass("hidden");
}
},
error: function() {
// console.log('Done');
clearInterval(updateTimerID);
$("#spinner2").hide();
$("#UpdateprogressDialog #Updatecontent").html(updateText[7]);
$("#UpdateprogressDialog #updateFinished").removeClass("hidden");
$("#check_for_update").removeClass("hidden");
$("#perform_update").addClass("hidden");
}
},
error: function() {
// console.log('Done');
clearInterval(updateTimerID);
$("#spinner2").hide();
$("#UpdateprogressDialog #Updatecontent").html(updateText[7]);
$("#UpdateprogressDialog #updateFinished").removeClass("hidden");
$("#check_for_update").removeClass("hidden");
$("#perform_update").addClass("hidden");
},
timeout:2000
});
}
},
timeout:2000
});
}
$(window).resize(function(event) {
$(".discover .row").isotope("reLayout");
});
$(window).resize(function(event) {
$(".discover .row").isotope("reLayout");
});
});

@ -1,3 +1,5 @@
/* global Sortable,sortTrue */
var sortable = Sortable.create(sortTrue, {
group: "sorting",
sort: true

@ -62,12 +62,12 @@ from tornado import version as tornadoVersion
try:
from urllib.parse import quote
from imp import reload
except ImportError as e:
except ImportError:
from urllib import quote
try:
from flask_login import __version__ as flask_loginVersion
except ImportError as e:
except ImportError:
from flask_login.__about__ import __version__ as flask_loginVersion
import time
@ -613,7 +613,7 @@ def feed_new():
off = request.args.get("offset")
if not off:
off = 0
entries, random, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
entries, _, pagination = fill_indexpage((int(off) / (int(config.config_books_per_page)) + 1),
db.Books, True, db.Books.timestamp.desc())
xml = render_title_template('feed.xml', entries=entries, pagination=pagination)
response = make_response(xml)
@ -850,8 +850,9 @@ def get_metadata_calibre_companion(uuid):
def get_authors_json():
if request.method == "GET":
query = request.args.get('q')
entries = db.session.execute("select name from authors where name like '%" + query + "%'")
json_dumps = json.dumps([dict(r) for r in entries])
# entries = db.session.execute("select name from authors where name like '%" + query + "%'")
entries = db.session.query(db.Authors).filter(db.Authors.name.like("%" + query + "%")).all()
json_dumps = json.dumps([dict(name=r.name) for r in entries])
return json_dumps
@ -860,8 +861,11 @@ def get_authors_json():
def get_tags_json():
if request.method == "GET":
query = request.args.get('q')
entries = db.session.execute("select name from tags where name like '%" + query + "%'")
json_dumps = json.dumps([dict(r) for r in entries])
# entries = db.session.execute("select name from tags where name like '%" + query + "%'")
entries = db.session.query(db.Tags).filter(db.Tags.name.like("%" + query + "%")).all()
#for x in entries:
# alfa = dict(name=x.name)
json_dumps = json.dumps([dict(name=r.name) for r in entries])
return json_dumps
@app.route("/get_update_status", methods=['GET'])
@ -922,7 +926,7 @@ def get_languages_json():
try:
cur_l = LC.parse(lang.lang_code)
lang.name = cur_l.get_language_name(get_locale())
except Exception as e:
except Exception:
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
entries = [s for s in languages if query in s.name.lower()]
json_dumps = json.dumps([dict(name=r.name) for r in entries])
@ -934,8 +938,9 @@ def get_languages_json():
def get_series_json():
if request.method == "GET":
query = request.args.get('q')
entries = db.session.execute("select name from series where name like '%" + query + "%'")
json_dumps = json.dumps([dict(r) for r in entries])
entries = db.session.query(db.Series).filter(db.Series.name.like("%" + query + "%")).all()
# entries = db.session.execute("select name from series where name like '%" + query + "%'")
json_dumps = json.dumps([dict(name=r.name) for r in entries])
return json_dumps
@ -1088,13 +1093,13 @@ def language_overview():
try:
cur_l = LC.parse(lang.lang_code)
lang.name = cur_l.get_language_name(get_locale())
except Exception as e:
except Exception:
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
else:
try:
langfound = 1
cur_l = LC.parse(current_user.filter_language())
except Exception as e:
except Exception:
langfound = 0
languages = db.session.query(db.Languages).filter(
db.Languages.lang_code == current_user.filter_language()).all()
@ -1118,7 +1123,7 @@ def language(name, page):
try:
cur_l = LC.parse(name)
name = cur_l.get_language_name(get_locale())
except Exception as e:
except Exception:
name = _(isoLanguages.get(part3=name).name)
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
title=_(u"Language: %(name)s", name=name))
@ -1179,7 +1184,7 @@ def show_book(id):
try:
entries.languages[index].language_name = LC.parse(entries.languages[index].lang_code).get_language_name(
get_locale())
except Exception as e:
except Exception:
entries.languages[index].language_name = _(
isoLanguages.get(part3=entries.languages[index].lang_code).name)
tmpcc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
@ -1234,7 +1239,7 @@ def stats():
kindlegen = os.path.join(vendorpath, u"kindlegen")
versions['KindlegenVersion'] = _('not installed')
if os.path.exists(kindlegen):
p = subprocess.Popen(kindlegen, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
p = subprocess.Popen(kindlegen, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
p.wait()
for lines in p.stdout.readlines():
@ -1435,7 +1440,7 @@ def advanced_search():
try:
cur_l = LC.parse(lang.lang_code)
lang.name = cur_l.get_language_name(get_locale())
except Exception as e:
except Exception:
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
searchterm.extend(language.name for language in language_names)
searchterm = " + ".join(filter(None, searchterm))
@ -1678,7 +1683,7 @@ def register():
try:
ub.session.add(content)
ub.session.commit()
except Exception as e:
except Exception:
ub.session.rollback()
flash(_(u"An unknown error occured. Please try again later."), category="error")
return render_title_template('register.html', title=_(u"register"))
@ -1806,7 +1811,7 @@ def create_shelf():
ub.session.add(shelf)
ub.session.commit()
flash(_(u"Shelf %(title)s created", title=to_save["title"]), category="success")
except Exception as e:
except Exception:
flash(_(u"There was an error"), category="error")
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"create a shelf"))
else:
@ -1834,7 +1839,7 @@ def edit_shelf(shelf_id):
try:
ub.session.commit()
flash(_(u"Shelf %(title)s changed", title=to_save["title"]), category="success")
except Exception as e:
except Exception:
flash(_(u"There was an error"), category="error")
return render_title_template('shelf_edit.html', shelf=shelf, title=_(u"Edit a shelf"))
else:
@ -1924,7 +1929,7 @@ def profile():
try:
cur_l = LC.parse(lang.lang_code)
lang.name = cur_l.get_language_name(get_locale())
except Exception as e:
except Exception:
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
translations = babel.list_translations() + [LC('en')]
for book in content.downloads:
@ -2138,7 +2143,7 @@ def new_user():
try:
cur_l = LC.parse(lang.lang_code)
lang.name = cur_l.get_language_name(get_locale())
except Exception as e:
except Exception:
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
translations = [LC('en')] + babel.list_translations()
if request.method == "POST":
@ -2243,7 +2248,7 @@ def edit_user(user_id):
try:
cur_l = LC.parse(lang.lang_code)
lang.name = cur_l.get_language_name(get_locale())
except Exception as e:
except Exception:
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
translations = babel.list_translations() + [LC('en')]
for book in content.downloads:
@ -2375,7 +2380,7 @@ def edit_book(book_id):
try:
book.languages[index].language_name = LC.parse(book.languages[index].lang_code).get_language_name(
get_locale())
except Exception as e:
except Exception:
book.languages[index].language_name = _(isoLanguages.get(part3=book.languages[index].lang_code).name)
for author in book.authors:
author_names.append(author.name)
@ -2433,7 +2438,7 @@ def edit_book(book_id):
for lang in languages:
try:
lang.name = LC.parse(lang.lang_code).get_language_name(get_locale()).lower()
except Exception as e:
except Exception:
lang.name = _(isoLanguages.get(part3=lang.lang_code).name).lower()
for inp_lang in input_languages:
if inp_lang == lang.name:
@ -2624,12 +2629,12 @@ def upload():
return redirect(url_for('index'))
try:
copyfile(meta.file_path, saved_filename)
except OSError as e:
except OSError:
flash(_(u"Failed to store file %s (Permission denied)." % saved_filename), category="error")
return redirect(url_for('index'))
try:
os.unlink(meta.file_path)
except OSError as e:
except OSError:
flash(_(u"Failed to delete file %s (Permission denied)." % meta.file_path), category="warning")
file_size = os.path.getsize(saved_filename)

Loading…
Cancel
Save