Update testing

Update custom columns unicode enums
pull/774/head
Ozzieisaacs 6 years ago
parent c527d1f49a
commit 32568c9009

@ -9,6 +9,7 @@ import re
import ast
from ub import config
import ub
import sys
session = None
cc_exceptions = ['datetime', 'comments', 'float', 'composite', 'series']
@ -301,6 +302,8 @@ class Custom_Columns(Base):
def get_display_dict(self):
display_dict = ast.literal_eval(self.display)
if sys.version_info < (3, 0):
display_dict['enum_values'] = [x.decode('unicode_escape') for x in display_dict['enum_values']]
return display_dict

@ -306,12 +306,12 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
# Rename all files from old names to new names
if authordir != new_authordir or titledir != new_titledir:
try:
for format in localbook.data:
for file_format in localbook.data:
path_name = os.path.join(calibrepath, new_authordir, os.path.basename(path))
new_name = get_valid_filename(localbook.title) + ' - ' + get_valid_filename(new_authordir)
os.renames(os.path.join(path_name, format.name + '.' + format.format.lower()),
os.path.join(path_name,new_name + '.' + format.format.lower()))
format.name = new_name
os.renames(os.path.join(path_name, file_format.name + '.' + file_format.format.lower()),
os.path.join(path_name,new_name + '.' + file_format.format.lower()))
file_format.name = new_name
except OSError as ex:
web.app.logger.error("Rename file in path " + path + " to " + new_name + ": " + str(ex))
web.app.logger.debug(ex, exc_info=True)

File diff suppressed because one or more lines are too long

@ -40,7 +40,7 @@ $( 'a.navbar-brand' ).clone().appendTo( '.home-btn' ).empty().removeClass('navba
// Wrap book description in div container
if ( $( 'body.book' ).length > 0 ) {
description = $( '.comments' );
bookInfo = $( '.author' ).nextUntil( 'h3:contains("Description")');
$( 'h3:contains("Description")' ).detach();
@ -144,7 +144,7 @@ return $(this).text().replace(/^\s+|^\t+|\t+|\s+$/g, "");
$.each(published, function(i, val) {
$( '.publishing-date' ).append( '<span>' + published[i] + '</span>' );
});
languages = $( '.languages p span' ).text().split( ': ' );
$( '.languages p span' ).remove();
$.each(languages, function(i, val) {
@ -335,7 +335,7 @@ $( 'input#query' ).focusout(function() {
$( 'form[role="search"]' ).removeClass( 'search-focus' );
}, 100);
});
// Check if dropdown goes out of viewport and add class
$(document).on('click','.dropdown-toggle',function() {
@ -521,8 +521,6 @@ $( '#add-to-shelf' ).attr({
'data-viewport': '.btn-toolbar' })
.addClass('addtoshelf-btn-tooltip');
var teetet = $( '#add-to-shelf' ).text()
$( '#have_read_cb' ).attr({
'data-toggle': 'tooltip',
'title': 'Mark As Read',
@ -559,8 +557,6 @@ $( '.btn-group[aria-label="Edit/Delete book"] a' ).attr({
'data-viewport': '.btn-toolbar' })
.addClass('edit-btn-tooltip');
var teetet = $( '#edit_book' ).text()
$( '#sendbtn' ).attr({
'data-toggle': 'tooltip',
'title': 'Send to Kindle',

File diff suppressed because one or more lines are too long

@ -0,0 +1 @@
!function(a){a.fn.datepicker.dates.ja={days:["日曜","月曜","火曜","水曜","木曜","金曜","土曜"],daysShort:["日","月","火","水","木","金","土"],daysMin:["日","月","火","水","木","金","土"],months:["1月","2月","3月","4月","5月","6月","7月","8月","9月","10月","11月","12月"],monthsShort:["1月","2月","3月","4月","5月","6月","7月","8月","9月","10月","11月","12月"],today:"今日",format:"yyyy/mm/dd",titleFormat:"yyyy年mm月",clear:"クリア"}}(jQuery);

@ -79,7 +79,7 @@ var RarVolumeHeader = function(bstream) {
// bytes 4,5
this.flags = {};
this.flags.value = bstream.peekBits(16);
info(" flags=" + twoByteValueToHexString(this.flags.value));
switch (this.headType) {
case MAIN_HEAD:
@ -115,7 +115,7 @@ var RarVolumeHeader = function(bstream) {
default:
bstream.readBits(16);
}
// byte 6,7
this.headSize = bstream.readBits(16);
info(" headSize=" + this.headSize);
@ -212,12 +212,12 @@ var RarVolumeHeader = function(bstream) {
//var BLOCK_LZ = 0;
var rLDecode = [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224],
var rLDecode = [0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 12, 14, 16, 20, 24, 28, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224],
rLBits = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5],
rDBitLengthCounts = [4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 14, 0, 12],
rSDDecode = [0, 4, 8, 16, 32, 64, 128, 192],
rSDBits = [2, 2, 3, 4, 5, 6, 6, 6];
var rDDecode = [0, 1, 2, 3, 4, 6, 8, 12, 16, 24, 32,
48, 64, 96, 128, 192, 256, 384, 512, 768, 1024, 1536, 2048, 3072,
4096, 6144, 8192, 12288, 16384, 24576, 32768, 49152, 65536, 98304,
@ -275,12 +275,12 @@ function rarReadTables(bstream) {
var i;
// before we start anything we need to get byte-aligned
bstream.readBits( (8 - bstream.bitPtr) & 0x7 );
if (bstream.readBits(1)) {
info("Error! PPM not implemented yet");
return;
}
if (!bstream.readBits(1)) { //discard old table
for (i = UnpOldTable.length; i--;) UnpOldTable[i] = 0;
}
@ -308,7 +308,7 @@ function rarReadTables(bstream) {
// now all 20 bit lengths are obtained, we construct the Huffman Table:
rarMakeDecodeTables(BitLength, 0, BD, rBC);
var TableSize = rHuffTableSize;
//console.log(DecodeLen, DecodePos, DecodeNum);
for (i = 0; i < TableSize;) {
@ -332,12 +332,12 @@ function rarReadTables(bstream) {
}
}
}
rarMakeDecodeTables(Table, 0, LD, rNC);
rarMakeDecodeTables(Table, rNC, DD, rDC);
rarMakeDecodeTables(Table, rNC + rDC, LDD, rLDC);
rarMakeDecodeTables(Table, rNC + rDC + rLDC, RD, rRC);
for (i = UnpOldTable.length; i--;) {
UnpOldTable[i] = Table[i];
}
@ -366,7 +366,7 @@ function rarDecodeNumber(bstream, dec) {
: 15));
bstream.readBits(bits);
var N = DecodePos[bits] + ((bitField - DecodeLen[bits - 1]) >>> (16 - bits));
return DecodeNum[N];
}
@ -568,7 +568,7 @@ function Unpack29(bstream) {
DBits[Slot] = BitLength;
}
}
var Bits;
//tablesRead = false;
@ -578,13 +578,13 @@ function Unpack29(bstream) {
lastLength = 0;
var i;
for (i = UnpOldTable.length; i--;) UnpOldTable[i] = 0;
// read in Huffman tables
rarReadTables(bstream);
while (true) {
var num = rarDecodeNumber(bstream, LD);
if (num < 256) {
rBuffer.insertByte(num);
continue;
@ -675,11 +675,11 @@ function Unpack29(bstream) {
continue;
}
}
rarUpdateProgress()
rarUpdateProgress();
}
function rarReadEndOfBlock(bstream) {
rarUpdateProgress();
var NewTable = false, NewFile = false;
@ -703,7 +703,7 @@ function rarReadVMCode(bstream) {
Length = bstream.readBits(16);
}
var vmCode = [];
for(var I = 0; I < Length; I++) {
for (var I = 0; I < Length; I++) {
//do something here with cheking readbuf
vmCode.push(bstream.readBits(8));
}
@ -724,8 +724,8 @@ function rarInsertLastMatch(length, distance) {
}
function rarInsertOldDist(distance) {
rOldDist.splice(3,1);
rOldDist.splice(0,0,distance);
rOldDist.splice(3, 1);
rOldDist.splice(0, 0, distance);
}
//this is the real function, the other one is for debugging
@ -737,28 +737,28 @@ function rarCopyString(length, distance) {
destPtr = rOldBuffers[--l].data.length + destPtr;
}
//TODO: lets hope that it never needs to read beyond file boundaries
while(length--) rBuffer.insertByte(rOldBuffers[l].data[destPtr++]);
while (length--) rBuffer.insertByte(rOldBuffers[l].data[destPtr++]);
}
if (length > distance) {
while(length--) rBuffer.insertByte(rBuffer.data[destPtr++]);
while (length--) rBuffer.insertByte(rBuffer.data[destPtr++]);
} else {
rBuffer.insertBytes(rBuffer.data.subarray(destPtr, destPtr + length));
}
}
var rOldBuffers = []
var rOldBuffers = [];
// v must be a valid RarVolume
function unpack(v) {
// TODO: implement what happens when unpVer is < 15
// TODO: implement what happens when unpVer is < 15
var Ver = v.header.unpVer <= 15 ? 15 : v.header.unpVer,
Solid = v.header.LHD_SOLID,
bstream = new bitjs.io.BitStream(v.fileData.buffer, true /* rtl */, v.fileData.byteOffset, v.fileData.byteLength );
rBuffer = new bitjs.io.ByteBuffer(v.header.unpackedSize);
info("Unpacking " + v.filename+" RAR v" + Ver);
info("Unpacking " + v.filename + " RAR v" + Ver);
switch(Ver) {
case 15: // rar 1.5 compression
Unpack15(); //(bstream, Solid);
@ -772,7 +772,7 @@ function unpack(v) {
Unpack29(bstream);
break;
} // switch(method)
rOldBuffers.push(rBuffer);
//TODO: clear these old buffers when there's over 4MB of history
return rBuffer.data;
@ -780,10 +780,10 @@ function unpack(v) {
// bstream is a bit stream
var RarLocalFile = function(bstream) {
this.header = new RarVolumeHeader(bstream);
this.filename = this.header.filename;
if (this.header.headType != FILE_HEAD && this.header.headType != ENDARC_HEAD) {
this.isValid = false;
info("Error! RAR Volume did not include a FILE_HEAD header ");
@ -804,7 +804,7 @@ RarLocalFile.prototype.unrar = function() {
if (this.header.method === 0x30) {
info("Unstore " + this.filename);
this.isValid = true;
currentBytesUnarchivedInFile += this.fileData.length;
currentBytesUnarchived += this.fileData.length;
@ -818,7 +818,7 @@ RarLocalFile.prototype.unrar = function() {
this.fileData = unpack(this);
}
}
}
};
var unrar = function(arrayBuffer) {
currentFilename = "";
@ -835,16 +835,16 @@ var unrar = function(arrayBuffer) {
if (header.crc === 0x6152 &&
header.headType === 0x72 &&
header.flags.value === 0x1A21 &&
header.headSize === 7)
{
header.headSize === 7) {
info("Found RAR signature");
var mhead = new RarVolumeHeader(bstream);
if (mhead.headType != MAIN_HEAD) {
info("Error! RAR did not include a MAIN_HEAD header");
} else {
var localFiles = [],
localFile = null;
var localFiles = [];
var localFile = null;
do {
try {
localFile = new RarLocalFile(bstream);
@ -853,24 +853,24 @@ var unrar = function(arrayBuffer) {
totalUncompressedBytesInArchive += localFile.header.unpackedSize;
localFiles.push(localFile);
} else if (localFile.header.packSize === 0 && localFile.header.unpackedSize === 0) {
localFile.isValid = true;
localFile.isValid = true;
}
} catch(err) {
} catch (err) {
break;
}
//info("bstream" + bstream.bytePtr+"/"+bstream.bytes.length);
} while( localFile.isValid );
} while ( localFile.isValid );
totalFilesInArchive = localFiles.length;
// now we have all information but things are unpacked
// TODO: unpack
localFiles = localFiles.sort(function(a,b) {
localFiles = localFiles.sort(function(a, b) {
var aname = a.filename.toLowerCase();
var bname = b.filename.toLowerCase();
return aname > bname ? 1 : -1;
});
info(localFiles.map(function(a) {return a.filename}).join(', '));
info(localFiles.map(function(a) {return a.filename;}).join(", "));
for (var i = 0; i < localFiles.length; ++i) {
var localfile = localFiles[i];

@ -236,11 +236,11 @@ var unzip = function(arrayBuffer) {
}
postProgress();
postMessage(new bitjs.archive.UnarchiveFinishEvent());
}
}
};
// returns a table of Huffman codes
// each entry's index is its code and its value is a JavaScript object
// returns a table of Huffman codes
// each entry's index is its code and its value is a JavaScript object
// containing {length: 6, symbol: X}
function getHuffmanCodes(bitLengths) {
// ensure bitLengths is an array containing at least one element
@ -389,14 +389,14 @@ Code Bits Length(s) Code Bits Lengths Code Bits Length(s)
*/
var LengthLookupTable = [
[0,3], [0,4], [0,5], [0,6],
[0,7], [0,8], [0,9], [0,10],
[1,11], [1,13], [1,15], [1,17],
[2,19], [2,23], [2,27], [2,31],
[3,35], [3,43], [3,51], [3,59],
[4,67], [4,83], [4,99], [4,115],
[5,131], [5,163], [5,195], [5,227],
[0,258]
[0, 3], [0, 4], [0, 5], [0, 6],
[0, 7], [0, 8], [0, 9], [0, 10],
[1, 11], [1, 13], [1, 15], [1, 17],
[2, 19], [2, 23], [2, 27], [2, 31],
[3, 35], [3, 43], [3, 51], [3, 59],
[4, 67], [4, 83], [4, 99], [4, 115],
[5, 131], [5, 163], [5, 195], [5, 227],
[0, 258]
];
/*
Extra Extra Extra
@ -414,20 +414,20 @@ var LengthLookupTable = [
9 3 25-32 19 8 769-1024 29 13 24577-32768
*/
var DistLookupTable = [
[0,1], [0,2], [0,3], [0,4],
[1,5], [1,7],
[2,9], [2,13],
[3,17], [3,25],
[4,33], [4,49],
[5,65], [5,97],
[6,129], [6,193],
[7,257], [7,385],
[8,513], [8,769],
[9,1025], [9,1537],
[10,2049], [10,3073],
[11,4097], [11,6145],
[12,8193], [12,12289],
[13,16385], [13,24577]
[0, 1], [0, 2], [0, 3], [0, 4],
[1, 5], [1, 7],
[2, 9], [2, 13],
[3, 17], [3, 25],
[4, 33], [4, 49],
[5, 65], [5, 97],
[6, 129], [6, 193],
[7, 257], [7, 385],
[8, 513], [8, 769],
[9, 1025], [9, 1537],
[10, 2049], [10, 3073],
[11, 4097], [11, 6145],
[12, 8193], [12, 12289],
[13, 16385], [13, 24577]
];
function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
@ -572,14 +572,13 @@ function inflate(compressedData, numDecompressedBytes) {
}
}
else if (symbol == 17) {
var repeat = bstream.readBits(3) + 3;
while (repeat--) {
var repeat1 = bstream.readBits(3) + 3;
while (repeat1--) {
literalCodeLengths.push(0);
}
}
else if (symbol == 18) {
var repeat = bstream.readBits(7) + 11;
while (repeat--) {
} else if (symbol == 18) {
var repeat2 = bstream.readBits(7) + 11;
while (repeat2--) {
literalCodeLengths.push(0);
}
}

@ -176,7 +176,7 @@
</div>
<div class="modal-body text-center">
<div id="spinner2" class="spinner2" style="display:none;">
<img id="img-spinner" src="{{ url_for('static', filename='css/images/loading-icon.gif') }}"/>
<img id="img-spinner2" src="{{ url_for('static', filename='css/images/loading-icon.gif') }}"/>
</div>
<p></p>
<div id="Updatecontent"></div>

@ -103,7 +103,7 @@
{% if entry.languages.__len__() > 0 %}
<div class="languages">
<p>
<span class="label label-default">{{_('language')}}: {% for language in entry.languages %} {{language.language_name}}{% if not loop.last %},{% endif %}{% endfor %} </span>
<span class="label label-default">{{_('language')}}: {% for language in entry.languages %}{{language.language_name}}{% if not loop.last %}, {% endif %}{% endfor %}</span>
</p>
</div>
{% endif %}

@ -41,16 +41,11 @@ See https://github.com/adobe-type-tools/cmap-resources
<!--<link rel="resource" type="application/l10n" href="locale/locale.properties">-->
<link rel="resource" type="application/l10n" href="{{ url_for('static', filename='locale/locale.properties') }}">
<script src="{{ url_for('static', filename='js/libs/l10n.js') }}"></script>
<!--<script src="l10n.js"></script>-->
<!--script src="{{ url_for('static', filename='js/libs/debugger.js') }}"></script-->
<!--<script src="debugger.js"></script>-->
<script src="{{ url_for('static', filename='js/libs/pdf.js') }}"></script>
<!--<script src="pdf.js"></script>-->
<script type="text/javascript">
var DEFAULT_URL = "{{ url_for('serve_book', book_id=pdffile, book_format='pdf') }}";
var PDFWORKER_LOCATION="{{ url_for('static', filename='js/libs/pdf.worker.js') }}";
// var IMAGE_LOCATION="{{ url_for('static', filename='css/../images') }}";
var IMAGE_LOCATION="{{ url_for('static', filename='/images/') }}";
var PDFWORKER_LOCATION_JS="{{ url_for('static', filename='js/libs/pdf.worker') }}";
</script>
@ -420,8 +415,7 @@ See https://github.com/adobe-type-tools/cmap-resources
}
</style>
<div class="mozPrintCallback-dialog-box">
<!-- TODO: Localise the following strings -->
Preparing document for printing...
{{_('Preparing document for printing...')}}
<div class="progress-row">
<progress value="0" max="100"></progress>
<span class="relative-progress">0%</span>

@ -141,10 +141,7 @@
{% endif %}
{% if c.datatype == 'rating' %}
<input type="number" min="1" max="5" step="1" class="form-control" name="{{ 'custom_column_' ~ c.id }}" id="{{ 'custom_column_' ~ c.id }}"
{% if book['custom_column_' ~ c.id]|length > 0 %}
value="{{ '%d' % (book['custom_column_' ~ c.id][0].value / 2) }}"
{% endif %}>
<input type="number" min="1" max="5" step="1" class="form-control" name="{{ 'custom_column_' ~ c.id }}" id="{{ 'custom_column_' ~ c.id }}">
{% endif %}
</div>
{% endfor %}

@ -268,8 +268,8 @@ class Registration(Base):
domain = Column(String)
def __repr__(self):
return u"<Registration('{0}')>".format(self.domain)
return u"<Registration('{0}')>".format(self.domain)
# Baseclass for representing settings in app.db with email server settings and Calibre database settings
# (application settings)
@ -555,7 +555,7 @@ def migrate_Database():
conn.execute("ALTER TABLE Settings ADD column `config_use_google_drive` INTEGER DEFAULT 0")
conn.execute("ALTER TABLE Settings ADD column `config_google_drive_folder` String DEFAULT ''")
conn.execute("ALTER TABLE Settings ADD column `config_google_drive_watch_changes_response` String DEFAULT ''")
session.commit()
session.commit()
try:
session.query(exists().where(Settings.config_columns_to_ignore)).scalar()
except exc.OperationalError:

@ -1072,7 +1072,7 @@ def get_publishers_json():
json_dumps = json.dumps([dict(name=r.name.replace('|',',')) for r in entries])
return json_dumps
@app.route("/get_tags_json", methods=['GET', 'POST'])
@login_required_if_no_ano
def get_tags_json():
@ -1192,8 +1192,8 @@ def get_update_status():
r = requests.get(repository_url + '/git/commits/' + commit['object']['sha'])
r.raise_for_status()
update_data = r.json()
except requests.exceptions.HTTPError as ex:
status['error'] = _(u'HTTP Error') + ' ' + str(ex)
except requests.exceptions.HTTPError as e:
status['error'] = _(u'HTTP Error') + ' ' + str(e)
except requests.exceptions.ConnectionError:
status['error'] = _(u'Connection error')
except requests.exceptions.Timeout:

@ -108,7 +108,7 @@ class emailbase():
self.transferSize = len(strg)
lock.release()
for i in range(0, self.transferSize, chunksize):
if type(strg) == bytes:
if isinstance(strg, bytes):
self.sock.send((strg[i:i+chunksize]))
else:
self.sock.send((strg[i:i + chunksize]).encode('utf-8'))
@ -455,6 +455,8 @@ class WorkerThread(threading.Thread):
except (smtplib.SMTPException) as e:
if hasattr(e, "smtp_error"):
text = e.smtp_error.replace("\n",'. ')
elif hasattr(e, "message"):
text = e.message
else:
text = ''
self._handleError(u'Error sending email: ' + text)
@ -501,10 +503,13 @@ class StderrLogger(object):
self.logger = web.app.logger
def write(self, message):
if message == '\n':
self.logger.debug(self.buffer)
print(self.buffer)
self.buffer = ''
else:
self.buffer += message
try:
if message == '\n':
self.logger.debug(self.buffer)
print(self.buffer)
self.buffer = ''
else:
self.buffer += message
except:
pass

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save