mirror of
https://github.com/janeczku/calibre-web.git
synced 2024-11-26 08:51:05 +02:00
Merge branch 'master' of https://github.com/Krakinou/calibre-web
This commit is contained in:
commit
d763168dec
@ -112,7 +112,8 @@ def migrate():
|
||||
sql=sql[0].replace(currUniqueConstraint, 'UNIQUE (gdrive_id, path)')
|
||||
sql=sql.replace(GdriveId.__tablename__, GdriveId.__tablename__ + '2')
|
||||
session.execute(sql)
|
||||
session.execute('INSERT INTO gdrive_ids2 (id, gdrive_id, path) SELECT id, gdrive_id, path FROM gdrive_ids;')
|
||||
session.execute("INSERT INTO gdrive_ids2 (id, gdrive_id, path) SELECT id, "
|
||||
"gdrive_id, path FROM gdrive_ids;")
|
||||
session.commit()
|
||||
session.execute('DROP TABLE %s' % 'gdrive_ids')
|
||||
session.execute('ALTER TABLE gdrive_ids2 RENAME to gdrive_ids')
|
||||
@ -165,7 +166,8 @@ def getFolderInFolder(parentId, folderName, drive):
|
||||
query=""
|
||||
if folderName:
|
||||
query = "title = '%s' and " % folderName.replace("'", "\\'")
|
||||
folder = query + "'%s' in parents and mimeType = 'application/vnd.google-apps.folder' and trashed = false" % parentId
|
||||
folder = query + "'%s' in parents and mimeType = 'application/vnd.google-apps.folder'" \
|
||||
" and trashed = false" % parentId
|
||||
fileList = drive.ListFile({'q': folder}).GetList()
|
||||
if fileList.__len__() == 0:
|
||||
return None
|
||||
@ -191,7 +193,6 @@ def getEbooksFolderId(drive=None):
|
||||
|
||||
def getFile(pathId, fileName, drive):
|
||||
metaDataFile = "'%s' in parents and trashed = false and title = '%s'" % (pathId, fileName.replace("'", "\\'"))
|
||||
|
||||
fileList = drive.ListFile({'q': metaDataFile}).GetList()
|
||||
if fileList.__len__() == 0:
|
||||
return None
|
||||
@ -299,9 +300,11 @@ def copyToDrive(drive, uploadFile, createRoot, replaceFiles,
|
||||
if not parent:
|
||||
parent = getEbooksFolder(drive)
|
||||
if os.path.isdir(os.path.join(prevDir,uploadFile)):
|
||||
existingFolder = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" % (os.path.basename(uploadFile), parent['id'])}).GetList()
|
||||
existingFolder = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" %
|
||||
(os.path.basename(uploadFile), parent['id'])}).GetList()
|
||||
if len(existingFolder) == 0 and (not isInitial or createRoot):
|
||||
parent = drive.CreateFile({'title': os.path.basename(uploadFile), 'parents': [{"kind": "drive#fileLink", 'id': parent['id']}],
|
||||
parent = drive.CreateFile({'title': os.path.basename(uploadFile),
|
||||
'parents': [{"kind": "drive#fileLink", 'id': parent['id']}],
|
||||
"mimeType": "application/vnd.google-apps.folder"})
|
||||
parent.Upload()
|
||||
else:
|
||||
@ -312,11 +315,13 @@ def copyToDrive(drive, uploadFile, createRoot, replaceFiles,
|
||||
copyToDrive(drive, f, True, replaceFiles, ignoreFiles, parent, os.path.join(prevDir, uploadFile))
|
||||
else:
|
||||
if os.path.basename(uploadFile) not in ignoreFiles:
|
||||
existingFiles = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" % (os.path.basename(uploadFile), parent['id'])}).GetList()
|
||||
existingFiles = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" %
|
||||
(os.path.basename(uploadFile), parent['id'])}).GetList()
|
||||
if len(existingFiles) > 0:
|
||||
driveFile = existingFiles[0]
|
||||
else:
|
||||
driveFile = drive.CreateFile({'title': os.path.basename(uploadFile), 'parents': [{"kind":"drive#fileLink", 'id': parent['id']}], })
|
||||
driveFile = drive.CreateFile({'title': os.path.basename(uploadFile),
|
||||
'parents': [{"kind":"drive#fileLink", 'id': parent['id']}], })
|
||||
driveFile.SetContentFile(os.path.join(prevDir, uploadFile))
|
||||
driveFile.Upload()
|
||||
|
||||
@ -327,7 +332,8 @@ def uploadFileToEbooksFolder(destFile, f):
|
||||
splitDir = destFile.split('/')
|
||||
for i, x in enumerate(splitDir):
|
||||
if i == len(splitDir)-1:
|
||||
existingFiles = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" % (x, parent['id'])}).GetList()
|
||||
existingFiles = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" %
|
||||
(x, parent['id'])}).GetList()
|
||||
if len(existingFiles) > 0:
|
||||
driveFile = existingFiles[0]
|
||||
else:
|
||||
@ -335,7 +341,8 @@ def uploadFileToEbooksFolder(destFile, f):
|
||||
driveFile.SetContentFile(f)
|
||||
driveFile.Upload()
|
||||
else:
|
||||
existingFolder = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" % (x, parent['id'])}).GetList()
|
||||
existingFolder = drive.ListFile({'q': "title = '%s' and '%s' in parents and trashed = false" %
|
||||
(x, parent['id'])}).GetList()
|
||||
if len(existingFolder) == 0:
|
||||
parent = drive.CreateFile({'title': x, 'parents': [{"kind": "drive#fileLink", 'id': parent['id']}],
|
||||
"mimeType": "application/vnd.google-apps.folder"})
|
||||
|
@ -262,12 +262,15 @@ def delete_book_file(book, calibrepath, book_format=None):
|
||||
return False
|
||||
|
||||
|
||||
def update_dir_structure_file(book_id, calibrepath):
|
||||
def update_dir_structure_file(book_id, calibrepath, first_author):
|
||||
localbook = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
||||
path = os.path.join(calibrepath, localbook.path)
|
||||
|
||||
authordir = localbook.path.split('/')[0]
|
||||
new_authordir = get_valid_filename(localbook.authors[0].name)
|
||||
if first_author:
|
||||
new_authordir = get_valid_filename(first_author)
|
||||
else:
|
||||
new_authordir = get_valid_filename(localbook.authors[0].name)
|
||||
|
||||
titledir = localbook.path.split('/')[1]
|
||||
new_titledir = get_valid_filename(localbook.title) + " (" + str(book_id) + ")"
|
||||
@ -281,31 +284,51 @@ def update_dir_structure_file(book_id, calibrepath):
|
||||
web.app.logger.info("Copying title: " + path + " into existing: " + new_title_path)
|
||||
for dir_name, subdir_list, file_list in os.walk(path):
|
||||
for file in file_list:
|
||||
os.renames(os.path.join(dir_name, file), os.path.join(new_title_path + dir_name[len(path):], file))
|
||||
os.renames(os.path.join(dir_name, file),
|
||||
os.path.join(new_title_path + dir_name[len(path):], file))
|
||||
path = new_title_path
|
||||
localbook.path = localbook.path.split('/')[0] + '/' + new_titledir
|
||||
except OSError as ex:
|
||||
web.app.logger.error("Rename title from: " + path + " to " + new_title_path + ": " + str(ex))
|
||||
web.app.logger.debug(ex, exc_info=True)
|
||||
return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s", src=path, dest=new_title_path, error=str(ex))
|
||||
return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
||||
src=path, dest=new_title_path, error=str(ex))
|
||||
if authordir != new_authordir:
|
||||
try:
|
||||
new_author_path = os.path.join(os.path.join(calibrepath, new_authordir), os.path.basename(path))
|
||||
new_author_path = os.path.join(calibrepath, new_authordir, os.path.basename(path))
|
||||
os.renames(path, new_author_path)
|
||||
localbook.path = new_authordir + '/' + localbook.path.split('/')[1]
|
||||
except OSError as ex:
|
||||
web.app.logger.error("Rename author from: " + path + " to " + new_author_path + ": " + str(ex))
|
||||
web.app.logger.debug(ex, exc_info=True)
|
||||
return _("Rename author from: '%(src)s' to '%(dest)s' failed with error: %(error)s", src=path, dest=new_author_path, error=str(ex))
|
||||
return _("Rename author from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
||||
src=path, dest=new_author_path, error=str(ex))
|
||||
# Rename all files from old names to new names
|
||||
if authordir != new_authordir or titledir != new_titledir:
|
||||
try:
|
||||
for format in localbook.data:
|
||||
path_name = os.path.join(calibrepath, new_authordir, os.path.basename(path))
|
||||
new_name = get_valid_filename(localbook.title) + ' - ' + get_valid_filename(new_authordir)
|
||||
os.renames(os.path.join(path_name, format.name + '.' + format.format.lower()),
|
||||
os.path.join(path_name,new_name + '.' + format.format.lower()))
|
||||
format.name = new_name
|
||||
except OSError as ex:
|
||||
web.app.logger.error("Rename file in path " + path + " to " + new_name + ": " + str(ex))
|
||||
web.app.logger.debug(ex, exc_info=True)
|
||||
return _("Rename file in path '%(src)s' to '%(dest)s' failed with error: %(error)s",
|
||||
src=path, dest=new_name, error=str(ex))
|
||||
return False
|
||||
|
||||
|
||||
def update_dir_structure_gdrive(book_id):
|
||||
def update_dir_structure_gdrive(book_id, first_author):
|
||||
error = False
|
||||
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
||||
|
||||
authordir = book.path.split('/')[0]
|
||||
new_authordir = get_valid_filename(book.authors[0].name)
|
||||
if first_author:
|
||||
new_authordir = get_valid_filename(first_author)
|
||||
else:
|
||||
new_authordir = get_valid_filename(book.authors[0].name)
|
||||
titledir = book.path.split('/')[1]
|
||||
new_titledir = get_valid_filename(book.title) + " (" + str(book_id) + ")"
|
||||
|
||||
@ -318,7 +341,7 @@ def update_dir_structure_gdrive(book_id):
|
||||
book.path = book.path.split('/')[0] + '/' + new_titledir
|
||||
gd.updateDatabaseOnEdit(gFile['id'], book.path) # only child folder affected
|
||||
else:
|
||||
error = _(u'File %(file)s not found on Google Drive', file= book.path) # file not found
|
||||
error = _(u'File %(file)s not found on Google Drive', file=book.path) # file not found
|
||||
|
||||
if authordir != new_authordir:
|
||||
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), titledir)
|
||||
@ -328,6 +351,19 @@ def update_dir_structure_gdrive(book_id):
|
||||
gd.updateDatabaseOnEdit(gFile['id'], book.path)
|
||||
else:
|
||||
error = _(u'File %(file)s not found on Google Drive', file=authordir) # file not found
|
||||
# Rename all files from old names to new names
|
||||
# ToDo: Rename also all bookfiles with new author name and new title name
|
||||
'''
|
||||
if authordir != new_authordir or titledir != new_titledir:
|
||||
for format in book.data:
|
||||
# path_name = os.path.join(calibrepath, new_authordir, os.path.basename(path))
|
||||
new_name = get_valid_filename(book.title) + ' - ' + get_valid_filename(book)
|
||||
format.name = new_name
|
||||
if gFile:
|
||||
pass
|
||||
else:
|
||||
error = _(u'File %(file)s not found on Google Drive', file=format.name) # file not found
|
||||
break'''
|
||||
return error
|
||||
|
||||
|
||||
@ -356,11 +392,11 @@ def generate_random_password():
|
||||
|
||||
################################## External interface
|
||||
|
||||
def update_dir_stucture(book_id, calibrepath):
|
||||
def update_dir_stucture(book_id, calibrepath, first_author = None):
|
||||
if ub.config.config_use_google_drive:
|
||||
return update_dir_structure_gdrive(book_id)
|
||||
return update_dir_structure_gdrive(book_id, first_author)
|
||||
else:
|
||||
return update_dir_structure_file(book_id, calibrepath)
|
||||
return update_dir_structure_file(book_id, calibrepath, first_author)
|
||||
|
||||
|
||||
def delete_book(book, calibrepath, book_format):
|
||||
|
File diff suppressed because it is too large
Load Diff
100
cps/static/css/caliBlur.min.css
vendored
100
cps/static/css/caliBlur.min.css
vendored
File diff suppressed because one or more lines are too long
@ -277,8 +277,6 @@ bitjs.archive = bitjs.archive || {};
|
||||
if (e.type === bitjs.archive.UnarchiveEvent.Type.FINISH) {
|
||||
this.worker_.terminate();
|
||||
}
|
||||
} else {
|
||||
console.log(e);
|
||||
}
|
||||
};
|
||||
|
||||
@ -292,15 +290,11 @@ bitjs.archive = bitjs.archive || {};
|
||||
this.worker_ = new Worker(scriptFileName);
|
||||
|
||||
this.worker_.onerror = function(e) {
|
||||
console.log("Worker error: message = " + e.message);
|
||||
throw e;
|
||||
};
|
||||
|
||||
this.worker_.onmessage = function(e) {
|
||||
if (typeof e.data === "string") {
|
||||
// Just log any strings the workers pump our way.
|
||||
console.log(e.data);
|
||||
} else {
|
||||
if (typeof e.data !== "string") {
|
||||
// Assume that it is an UnarchiveEvent. Some browsers preserve the 'type'
|
||||
// so that instanceof UnarchiveEvent returns true, but others do not.
|
||||
me.handleWorkerEvent_(e.data);
|
||||
|
@ -1,6 +1,12 @@
|
||||
// Move advanced search to side-menu
|
||||
$( 'a[href*="advanced"]' ).parent().insertAfter( '#nav_new' );
|
||||
$( 'body' ).addClass('blur');
|
||||
$( 'body.stat' ).addClass( 'stats' );
|
||||
$( 'body.config' ).addClass( 'admin');
|
||||
$( 'body.uiconfig' ).addClass( 'admin');
|
||||
$( 'body.advsearch' ).addClass( 'advanced_search' );
|
||||
$( 'body.newuser' ).addClass( 'admin' );
|
||||
$( 'body.mailset' ).addClass( 'admin' );
|
||||
|
||||
// Back button
|
||||
curHref = window.location.href.split('/');
|
||||
@ -34,99 +40,7 @@ $( 'a.navbar-brand' ).clone().appendTo( '.home-btn' ).empty().removeClass('navba
|
||||
|
||||
// Wrap book description in div container
|
||||
if ( $( 'body.book' ).length > 0 ) {
|
||||
|
||||
/* description = $( 'h3:contains("Description:")' ).nextUntil( '.morestuff' ).slice(0,-1);
|
||||
bookInfo = $( '.author' ).nextUntil( 'h3:contains("Description:")');
|
||||
$( 'h3:contains("Description:")' ).hide();
|
||||
$( description ).detach();
|
||||
$( bookInfo ).wrapAll( '<div class="bookinfo"></div>' );
|
||||
$( 'h3:contains("Description:")' ).after( '<div class="description"></div>' );
|
||||
$( '.languages' ).appendTo( '.bookinfo' );
|
||||
$('.hr').detach();
|
||||
if ( $( '.identifiers ').length > 0 ) {
|
||||
console.log(".identifiers length " + $( '.identifiers ').length );
|
||||
$( '.identifiers' ).before( '<div class="hr"></div>' );
|
||||
} else {
|
||||
if ( $( '.bookinfo > p:first-child' ).length > 0 ) {
|
||||
console.log(".bookinfo > p:first-child length " + $( '.bookinfo > p' ).length );
|
||||
$( '.bookinfo > p:first-child' ).first().after( '<div class="hr"></div>' );
|
||||
} else{
|
||||
if ( $( '.bookinfo a[href*="/series/"]' ).length > 0 ) {
|
||||
console.log( 'series text found; placing hr below series' );
|
||||
$( '.bookinfo a[href*="/series/"]' ).parent().after( '<div class="hr"></div>' );
|
||||
} else {
|
||||
console.log("prepending hr div to top of .bookinfo");
|
||||
$( '.bookinfo' ).prepend( '<div class="hr"></div>' );
|
||||
}
|
||||
}
|
||||
}
|
||||
$( '.rating' ).insertBefore( '.hr' );
|
||||
$( 'div.description' ).hide();
|
||||
$( '#remove-from-shelves' ).insertAfter( '.hr' );
|
||||
|
||||
/* if book description is not in html format, Remove extra line breaks
|
||||
Remove blank lines/unnecessary spaces, split by line break to array
|
||||
Push array into .description div. If there is still a wall of text,
|
||||
find sentences and split wall into groups of three sentence paragraphs.
|
||||
If the book format is in html format, Keep html, but strip away inline
|
||||
styles and empty elements */
|
||||
/*
|
||||
// If text is sitting in div as text node
|
||||
if ( description[0] === undefined ) {
|
||||
textValue = $( '.book-meta' )
|
||||
.contents()
|
||||
.filter(function() {
|
||||
return this.nodeType == Node.TEXT_NODE;
|
||||
}).text();
|
||||
description = $.makeArray(
|
||||
textValue.replace(/(?:(?:\r\n|\r|\n)\s*){2}/gm, "")
|
||||
);
|
||||
$( '.book-meta' ).contents().filter(function() {
|
||||
return this.nodeType === 3;
|
||||
}).remove();
|
||||
}
|
||||
if ( description[1] === undefined ) {
|
||||
newdesc = description.toString()
|
||||
.replace(/^(?=\n)$|^\s*|\s*$|\n\n+/gm,"").split(/\n/);
|
||||
$.each(newdesc, function(i, val) {
|
||||
$( 'div.description' ).append( '<p>' + newdesc[i] + '</p>' );
|
||||
});
|
||||
$( '.description' ).fadeIn(100);
|
||||
//If still a wall of text create 3 sentence paragraphs.
|
||||
if( $( '.description p' ).length === 1 ) {
|
||||
if ( description.context != undefined ) {
|
||||
newdesc = description.text()
|
||||
.replace(/^(?=\n)$|^\s*|\s*$|\n\n+/gm,"").split(/\n/);
|
||||
}
|
||||
else {
|
||||
newdesc = description.toString();
|
||||
}
|
||||
doc = nlp ( newdesc.toString() );
|
||||
sentences = doc.map((m)=> m.out( 'text' ));
|
||||
$( '.description p' ).remove();
|
||||
let size = 3; let sentenceChunks = [];
|
||||
for (var i=0; i<sentences.length; i+=size) {
|
||||
sentenceChunks.push(sentences.slice(i,i+size));
|
||||
}
|
||||
let output = '';
|
||||
$.each(sentenceChunks, function(i, val) {
|
||||
let preOutput = '';
|
||||
$.each(val, function(i, val) {
|
||||
preOutput += val;
|
||||
});
|
||||
output += '<p>' + preOutput + '</p>';
|
||||
});
|
||||
$( 'div.description' ).append( output );
|
||||
}
|
||||
} else {
|
||||
$.each(description, function(i, val) {
|
||||
$( description[i].outerHTML ).appendTo( '.description' );
|
||||
$( 'div.description :empty' ).remove();
|
||||
$( 'div.description ').attr( 'style', '' );
|
||||
});
|
||||
$( 'div.description' ).fadeIn( 100 );
|
||||
}*/
|
||||
|
||||
|
||||
description = $( '.comments' );
|
||||
bookInfo = $( '.author' ).nextUntil( 'h3:contains("Description")');
|
||||
$( 'h3:contains("Description")' ).detach();
|
||||
@ -240,7 +154,6 @@ return $(this).text().replace(/^\s+|^\t+|\t+|\s+$/g, "");
|
||||
$( '.book-meta h2:first' ).clone()
|
||||
.prependTo( '.book-meta > .btn-toolbar:first' );
|
||||
|
||||
|
||||
// If only one download type exists still put the items into a drop-drown list.
|
||||
downloads = $( 'a[id^=btnGroupDrop]' ).get();
|
||||
if ( $( downloads ).length === 1 ) {
|
||||
@ -385,18 +298,6 @@ $(document).mouseup(function (e) {
|
||||
|
||||
// Split path name to array and remove blanks
|
||||
url = window.location.pathname
|
||||
.split( "/" ).filter( function(v){return v!==''} );
|
||||
// Add classes to some body elements that don't have it
|
||||
if ( jQuery.inArray( 'epub', url ) != -1 ) {
|
||||
$( 'body' ).addClass( url[3] );
|
||||
} else {
|
||||
$( 'body' ).addClass( url[1] );
|
||||
}
|
||||
if ( $( 'body.shelf' ).length > 0 ) {
|
||||
$( 'a[href*= "'+url[1]+"/"+url[2]+'"]' )
|
||||
.parent()
|
||||
.addClass( 'active' );
|
||||
}
|
||||
|
||||
// Move create shelf
|
||||
$( '#nav_createshelf' ).prependTo( '.your-shelves' );
|
||||
@ -434,13 +335,7 @@ $( 'input#query' ).focusout(function() {
|
||||
$( 'form[role="search"]' ).removeClass( 'search-focus' );
|
||||
}, 100);
|
||||
});
|
||||
|
||||
// Add class to random book discover
|
||||
// ToDo: done
|
||||
$( 'h2:contains("Discover (Random Books")' )
|
||||
.parent()
|
||||
.addClass( 'random-books' );
|
||||
|
||||
|
||||
// Check if dropdown goes out of viewport and add class
|
||||
|
||||
$(document).on('click','.dropdown-toggle',function() {
|
||||
@ -454,7 +349,7 @@ $(document).on('click','.dropdown-toggle',function() {
|
||||
|
||||
// Fade out content on page unload
|
||||
// delegate all clicks on "a" tag (links)
|
||||
$(document).on("click", "a:not(.btn-toolbar a, a[href*='shelf/remove'], .identifiers a, .bookinfo , .btn-group > a, #add-to-shelves a, #book-list a, .stat.blur.stats a )", function () {
|
||||
/*$(document).on("click", "a:not(.btn-toolbar a, a[href*='shelf/remove'], .identifiers a, .bookinfo , .btn-group > a, #add-to-shelves a, #book-list a, .stat.blur a )", function () {
|
||||
|
||||
// get the href attribute
|
||||
var newUrl = $(this).attr("href");
|
||||
@ -466,7 +361,7 @@ $(document).on("click", "a:not(.btn-toolbar a, a[href*='shelf/remove'], .identif
|
||||
return;
|
||||
}
|
||||
|
||||
// now, fadeout the html (whole page)
|
||||
now, fadeout the html (whole page)
|
||||
$( '.blur-wrapper' ).fadeOut(250);
|
||||
$(".row-fluid .col-sm-10").fadeOut(500,function () {
|
||||
// when the animation is complete, set the new location
|
||||
@ -475,7 +370,7 @@ $(document).on("click", "a:not(.btn-toolbar a, a[href*='shelf/remove'], .identif
|
||||
|
||||
// prevent the default browser behavior.
|
||||
return false;
|
||||
});
|
||||
});*/
|
||||
|
||||
// Collapse long text into read-more
|
||||
$( 'div.comments' ).readmore( {
|
||||
@ -507,7 +402,7 @@ backurl = '../../book/' + url[2]
|
||||
$( 'body.epub #title-controls' )
|
||||
.append('<div class="epub-back"><input action="action" onclick="location.href=backurl; return false;" type="button" value="Back" /></div>')
|
||||
|
||||
$( 'body.stats .col-sm-10 p:first' ).insertAfter( '#libs' );
|
||||
$( 'body.stat .col-sm-10 p:first' ).insertAfter( '#libs' );
|
||||
|
||||
// Check if link is external and force _blank attribute
|
||||
$(function(){ // document ready
|
||||
@ -593,7 +488,7 @@ $( '.plexBack > a' ).attr({
|
||||
|
||||
$( '#top_tasks' ).attr({
|
||||
'data-toggle': 'tooltip',
|
||||
'title': 'Tasks',
|
||||
'title': $( '#top_tasks' ).text(), //'Tasks',
|
||||
'data-placement': 'bottom',
|
||||
'data-viewport': '#main-nav' })
|
||||
.addClass('tasks-btn-tooltip');
|
||||
@ -614,18 +509,20 @@ $( '.profileDrop' ).attr({
|
||||
|
||||
$( '#btn-upload' ).attr({
|
||||
'data-toggle': 'tooltip',
|
||||
'title': 'Upload',
|
||||
'title': $( '#btn-upload' ).text() , // 'Upload',
|
||||
'data-placement': 'bottom',
|
||||
'data-viewport': '#main-nav' })
|
||||
.addClass('upload-btn-tooltip');
|
||||
|
||||
$( '#add-to-shelf' ).attr({
|
||||
'data-toggle-two': 'tooltip',
|
||||
'title': 'Add to Shelf',
|
||||
'title': $( '#add-to-shelf' ).text() , // 'Add to Shelf',
|
||||
'data-placement': 'bottom',
|
||||
'data-viewport': '.btn-toolbar' })
|
||||
.addClass('addtoshelf-btn-tooltip');
|
||||
|
||||
var teetet = $( '#add-to-shelf' ).text()
|
||||
|
||||
$( '#have_read_cb' ).attr({
|
||||
'data-toggle': 'tooltip',
|
||||
'title': 'Mark As Read',
|
||||
@ -642,7 +539,7 @@ $( '#have_read_cb:checked' ).attr({
|
||||
|
||||
$( 'button#delete' ).attr({
|
||||
'data-toggle-two': 'tooltip',
|
||||
'title': 'Delete',
|
||||
'title': $( 'button#delete' ).text(), //'Delete',
|
||||
'data-placement': 'bottom',
|
||||
'data-viewport': '.btn-toolbar' })
|
||||
.addClass('delete-book-btn-tooltip');
|
||||
@ -657,11 +554,13 @@ $( '#have_read_cb' ).click(function() {
|
||||
|
||||
$( '.btn-group[aria-label="Edit/Delete book"] a' ).attr({
|
||||
'data-toggle': 'tooltip',
|
||||
'title': 'Edit',
|
||||
'title': $( '#edit_book' ).text(), // 'Edit',
|
||||
'data-placement': 'bottom',
|
||||
'data-viewport': '.btn-toolbar' })
|
||||
.addClass('edit-btn-tooltip');
|
||||
|
||||
var teetet = $( '#edit_book' ).text()
|
||||
|
||||
$( '#sendbtn' ).attr({
|
||||
'data-toggle': 'tooltip',
|
||||
'title': 'Send to Kindle',
|
||||
@ -742,10 +641,10 @@ if ( $( window ).width() <= 768 ) {
|
||||
}
|
||||
|
||||
// LayerCake plug
|
||||
if ( $(' .stat.blur.stats p').length > 0 ) {
|
||||
$(' .stat.blur.stats p').append(" and <a href='https://github.com/leram84/layer.Cake/tree/master/caliBlur' target='_blank'>layer.Cake</a>");
|
||||
str = $(' .stat.blur.stats p').html().replace("</a>.","</a>");
|
||||
$(' .stat.blur.stats p').html(str);
|
||||
if ( $(' body.stat p').length > 0 ) {
|
||||
$(' body.stat p').append(" and <a href='https://github.com/leram84/layer.Cake/tree/master/caliBlur' target='_blank'>layer.Cake</a>");
|
||||
str = $(' body.stat p').html().replace("</a>.","</a>");
|
||||
$(' body.stat p').html(str);
|
||||
}
|
||||
// Collect delete buttons in editbook to single dropdown
|
||||
$( '.editbook .text-center.more-stuff' ).prepend( '<button id="deleteButton" type="button" class="btn btn-danger dropdown-toggle" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false"><span class="glyphicon glyphicon-remove"></span>Delete Format<span class="caret"></span></button><ul class="dropdown-menu delete-dropdown"></ul>' );
|
||||
|
@ -105,7 +105,7 @@ $(function() {
|
||||
var buttonText = $this.html();
|
||||
$this.html("...");
|
||||
$("#update_error").addClass("hidden");
|
||||
if($("#message").length){
|
||||
if ($("#message").length){
|
||||
$("#message").alert("close");
|
||||
}
|
||||
$.ajax({
|
||||
@ -114,8 +114,8 @@ $(function() {
|
||||
success: function success(data) {
|
||||
$this.html(buttonText);
|
||||
|
||||
var cssClass = '';
|
||||
var message = ''
|
||||
var cssClass = "";
|
||||
var message = "";
|
||||
|
||||
if (data.success === true) {
|
||||
if (data.update === true) {
|
||||
@ -125,19 +125,20 @@ $(function() {
|
||||
.removeClass("hidden")
|
||||
.find("span").html(data.commit);
|
||||
|
||||
data.history.reverse().forEach(function(entry, index) {
|
||||
data.history.reverse().forEach(function(entry) {
|
||||
$("<tr><td>" + entry[0] + "</td><td>" + entry[1] + "</td></tr>").appendTo($("#update_table"));
|
||||
});
|
||||
cssClass = 'alert-warning';
|
||||
cssClass = "alert-warning";
|
||||
} else {
|
||||
cssClass = 'alert-success';
|
||||
cssClass = "alert-success";
|
||||
}
|
||||
} else {
|
||||
cssClass = 'alert-danger';
|
||||
cssClass = "alert-danger";
|
||||
}
|
||||
|
||||
message = '<div id="message" class="alert ' + cssClass
|
||||
+ ' fade in"><a href="#" class="close" data-dismiss="alert">×</a>' + data.message + '</div>';
|
||||
message = "<div id=\"message\" class=\"alert " + cssClass
|
||||
+ " fade in\"><a href=\"#\" class=\"close\" data-dismiss=\"alert\">×</a>"
|
||||
+ data.message + "</div>";
|
||||
|
||||
$(message).insertAfter($("#update_table"));
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
$(function() {
|
||||
|
||||
$("#domain_submit").click(function(event){
|
||||
$("#domain_submit").click(function(event) {
|
||||
event.preventDefault();
|
||||
$("#domain_add").ajaxForm();
|
||||
$(this).closest("form").submit();
|
||||
@ -14,44 +14,45 @@ $(function() {
|
||||
}
|
||||
});
|
||||
});
|
||||
$('#domain-table').bootstrapTable({
|
||||
formatNoMatches: function () {
|
||||
return '';
|
||||
},
|
||||
striped: false
|
||||
$("#domain-table").bootstrapTable({
|
||||
formatNoMatches: function () {
|
||||
return "";
|
||||
},
|
||||
striped: false
|
||||
});
|
||||
$("#btndeletedomain").click(function() {
|
||||
//get data-id attribute of the clicked element
|
||||
var domainId = $(this).data('domainId');
|
||||
var domainId = $(this).data("domainId");
|
||||
$.ajax({
|
||||
method:"post",
|
||||
url: window.location.pathname + "/../../ajax/deletedomain",
|
||||
data: {"domainid":domainId}
|
||||
});
|
||||
$('#DeleteDomain').modal('hide');
|
||||
$("#DeleteDomain").modal("hide");
|
||||
$.ajax({
|
||||
method:"get",
|
||||
url: window.location.pathname + "/../../ajax/domainlist",
|
||||
async: true,
|
||||
timeout: 900,
|
||||
success:function(data){
|
||||
$('#domain-table').bootstrapTable("load", data);
|
||||
$("#domain-table").bootstrapTable("load", data);
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
//triggered when modal is about to be shown
|
||||
$('#DeleteDomain').on('show.bs.modal', function(e) {
|
||||
$("#DeleteDomain").on("show.bs.modal" function(e) {
|
||||
//get data-id attribute of the clicked element and store in button
|
||||
var domainId = $(e.relatedTarget).data('domain-id');
|
||||
$(e.currentTarget).find("#btndeletedomain").data('domainId',domainId);
|
||||
var domainId = $(e.relatedTarget).data("domain-id");
|
||||
$(e.currentTarget).find("#btndeletedomain").data("domainId", domainId);
|
||||
});
|
||||
});
|
||||
|
||||
function TableActions (value, row, index) {
|
||||
return [
|
||||
'<a class="danger remove" data-toggle="modal" data-target="#DeleteDomain" data-domain-id="'+row.id+'" title="Remove">',
|
||||
'<i class="glyphicon glyphicon-trash"></i>',
|
||||
'</a>'
|
||||
].join('');
|
||||
"<a class=\"danger remove\" data-toggle=\"modal\" data-target=\"#DeleteDomain\" data-domain-id=\"" + row.id
|
||||
+ "\" title=\"Remove\">",
|
||||
"<i class=\"glyphicon glyphicon-trash\"></i>",
|
||||
"</a>"
|
||||
].join("");
|
||||
}
|
||||
|
@ -269,7 +269,7 @@ var RD = { //rep decode
|
||||
var rBuffer;
|
||||
|
||||
// read in Huffman tables for RAR
|
||||
function RarReadTables(bstream) {
|
||||
function rarReadTables(bstream) {
|
||||
var BitLength = new Array(rBC),
|
||||
Table = new Array(rHuffTableSize);
|
||||
var i;
|
||||
@ -480,7 +480,7 @@ function Unpack20(bstream) { //, Solid) {
|
||||
continue;
|
||||
}
|
||||
if (num < 270) {
|
||||
var Distance = rSDDecode[num -= 261] + 1;
|
||||
Distance = rSDDecode[num -= 261] + 1;
|
||||
if ((Bits = rSDBits[num]) > 0) {
|
||||
Distance += bstream.readBits(Bits);
|
||||
}
|
||||
@ -513,9 +513,9 @@ function rarReadTables20(bstream) {
|
||||
var BitLength = new Array(rBC20);
|
||||
var Table = new Array(rMC20 * 4);
|
||||
var TableSize, N, I;
|
||||
var i;
|
||||
bstream.readBits(1);
|
||||
if (!bstream.readBits(1)) {
|
||||
var i;
|
||||
for (i = UnpOldTable20.length; i--;) UnpOldTable20[i] = 0;
|
||||
}
|
||||
TableSize = rNC20 + rDC20 + rRC20;
|
||||
@ -553,25 +553,26 @@ function rarReadTables20(bstream) {
|
||||
}
|
||||
|
||||
|
||||
function Unpack29(bstream, Solid) {
|
||||
function Unpack29(bstream) {
|
||||
// lazy initialize rDDecode and rDBits
|
||||
|
||||
var DDecode = new Array(rDC);
|
||||
var DBits = new Array(rDC);
|
||||
|
||||
var Distance = 0;
|
||||
var Length = 0;
|
||||
var Dist = 0, BitLength = 0, Slot = 0;
|
||||
var I;
|
||||
for (I = 0; I < rDBitLengthCounts.length; I++,BitLength++) {
|
||||
for (var J = 0; J < rDBitLengthCounts[I]; J++,Slot++,Dist+=(1<<BitLength)) {
|
||||
DDecode[Slot]=Dist;
|
||||
DBits[Slot]=BitLength;
|
||||
for (I = 0; I < rDBitLengthCounts.length; I++, BitLength++) {
|
||||
for (var J = 0; J < rDBitLengthCounts[I]; J++, Slot++, Dist += (1 << BitLength)) {
|
||||
DDecode[Slot] = Dist;
|
||||
DBits[Slot] = BitLength;
|
||||
}
|
||||
}
|
||||
|
||||
var Bits;
|
||||
//tablesRead = false;
|
||||
|
||||
rOldDist = [0, 0, 0, 0]
|
||||
rOldDist = [0, 0, 0, 0];
|
||||
|
||||
lastDist = 0;
|
||||
lastLength = 0;
|
||||
@ -579,7 +580,7 @@ function Unpack29(bstream, Solid) {
|
||||
for (i = UnpOldTable.length; i--;) UnpOldTable[i] = 0;
|
||||
|
||||
// read in Huffman tables
|
||||
RarReadTables(bstream);
|
||||
rarReadTables(bstream);
|
||||
|
||||
while (true) {
|
||||
var num = rarDecodeNumber(bstream, LD);
|
||||
@ -589,12 +590,12 @@ function Unpack29(bstream, Solid) {
|
||||
continue;
|
||||
}
|
||||
if (num >= 271) {
|
||||
var Length = rLDecode[num -= 271] + 3;
|
||||
Length = rLDecode[num -= 271] + 3;
|
||||
if ((Bits = rLBits[num]) > 0) {
|
||||
Length += bstream.readBits(Bits);
|
||||
}
|
||||
var DistNumber = rarDecodeNumber(bstream, DD);
|
||||
var Distance = DDecode[DistNumber]+1;
|
||||
Distance = DDecode[DistNumber] + 1;
|
||||
if ((Bits = DBits[DistNumber]) > 0) {
|
||||
if (DistNumber > 9) {
|
||||
if (Bits > 4) {
|
||||
@ -625,19 +626,19 @@ function Unpack29(bstream, Solid) {
|
||||
Length++;
|
||||
}
|
||||
}
|
||||
RarInsertOldDist(Distance);
|
||||
RarInsertLastMatch(Length, Distance);
|
||||
rarInsertOldDist(Distance);
|
||||
rarInsertLastMatch(Length, Distance);
|
||||
rarCopyString(Length, Distance);
|
||||
continue;
|
||||
}
|
||||
if (num === 256) {
|
||||
if (!RarReadEndOfBlock(bstream)) break;
|
||||
if (!rarReadEndOfBlock(bstream)) break;
|
||||
continue;
|
||||
}
|
||||
if (num === 257) {
|
||||
//console.log("READVMCODE");
|
||||
if (!RarReadVMCode(bstream)) break;
|
||||
continue;
|
||||
if (!rarReadVMCode(bstream)) break;
|
||||
continue;
|
||||
}
|
||||
if (num === 258) {
|
||||
if (lastLength != 0) {
|
||||
@ -647,29 +648,29 @@ function Unpack29(bstream, Solid) {
|
||||
}
|
||||
if (num < 263) {
|
||||
var DistNum = num - 259;
|
||||
var Distance = rOldDist[DistNum];
|
||||
Distance = rOldDist[DistNum];
|
||||
|
||||
for (var I = DistNum; I > 0; I--) {
|
||||
rOldDist[I] = rOldDist[I-1];
|
||||
rOldDist[I] = rOldDist[I - 1];
|
||||
}
|
||||
rOldDist[0] = Distance;
|
||||
|
||||
var LengthNumber = rarDecodeNumber(bstream, RD);
|
||||
var Length = rLDecode[LengthNumber] + 2;
|
||||
Length = rLDecode[LengthNumber] + 2;
|
||||
if ((Bits = rLBits[LengthNumber]) > 0) {
|
||||
Length += bstream.readBits(Bits);
|
||||
}
|
||||
RarInsertLastMatch(Length, Distance);
|
||||
rarInsertLastMatch(Length, Distance);
|
||||
rarCopyString(Length, Distance);
|
||||
continue;
|
||||
}
|
||||
if (num < 272) {
|
||||
var Distance = rSDDecode[num -= 263] + 1;
|
||||
Distance = rSDDecode[num -= 263] + 1;
|
||||
if ((Bits = rSDBits[num]) > 0) {
|
||||
Distance += bstream.readBits(Bits);
|
||||
}
|
||||
RarInsertOldDist(Distance);
|
||||
RarInsertLastMatch(2, Distance);
|
||||
rarInsertOldDist(Distance);
|
||||
rarInsertLastMatch(2, Distance);
|
||||
rarCopyString(2, Distance);
|
||||
continue;
|
||||
}
|
||||
@ -677,9 +678,9 @@ function Unpack29(bstream, Solid) {
|
||||
rarUpdateProgress()
|
||||
}
|
||||
|
||||
function RarReadEndOfBlock(bstream) {
|
||||
function rarReadEndOfBlock(bstream) {
|
||||
|
||||
rarUpdateProgress()
|
||||
rarUpdateProgress();
|
||||
|
||||
var NewTable = false, NewFile = false;
|
||||
if (bstream.readBits(1)) {
|
||||
@ -689,11 +690,11 @@ function RarReadEndOfBlock(bstream) {
|
||||
NewTable = !!bstream.readBits(1);
|
||||
}
|
||||
//tablesRead = !NewTable;
|
||||
return !(NewFile || NewTable && !RarReadTables(bstream));
|
||||
return !(NewFile || NewTable && !rarReadTables(bstream));
|
||||
}
|
||||
|
||||
|
||||
function RarReadVMCode(bstream) {
|
||||
function rarReadVMCode(bstream) {
|
||||
var FirstByte = bstream.readBits(8);
|
||||
var Length = (FirstByte & 7) + 1;
|
||||
if (Length === 7) {
|
||||
@ -717,12 +718,12 @@ function RarAddVMCode(firstByte, vmCode, length) {
|
||||
return true;
|
||||
}
|
||||
|
||||
function RarInsertLastMatch(length, distance) {
|
||||
function rarInsertLastMatch(length, distance) {
|
||||
lastDist = distance;
|
||||
lastLength = length;
|
||||
}
|
||||
|
||||
function RarInsertOldDist(distance) {
|
||||
function rarInsertOldDist(distance) {
|
||||
rOldDist.splice(3,1);
|
||||
rOldDist.splice(0,0,distance);
|
||||
}
|
||||
@ -768,7 +769,7 @@ function unpack(v) {
|
||||
break;
|
||||
case 29: // rar 3.x compression
|
||||
case 36: // alternative hash
|
||||
Unpack29(bstream, Solid);
|
||||
Unpack29(bstream);
|
||||
break;
|
||||
} // switch(method)
|
||||
|
||||
|
@ -9,7 +9,7 @@
|
||||
* ZIP format: http://www.pkware.com/documents/casestudies/APPNOTE.TXT
|
||||
* DEFLATE format: http://tools.ietf.org/html/rfc1951
|
||||
*/
|
||||
/* global bitjs */
|
||||
/* global bitjs, importScripts, Uint8Array*/
|
||||
|
||||
// This file expects to be invoked as a Worker (see onmessage below).
|
||||
importScripts("io.js");
|
||||
@ -44,12 +44,10 @@ var zLocalFileHeaderSignature = 0x04034b50;
|
||||
var zArchiveExtraDataSignature = 0x08064b50;
|
||||
var zCentralFileHeaderSignature = 0x02014b50;
|
||||
var zDigitalSignatureSignature = 0x05054b50;
|
||||
var zEndOfCentralDirSignature = 0x06064b50;
|
||||
var zEndOfCentralDirLocatorSignature = 0x07064b50;
|
||||
|
||||
// takes a ByteStream and parses out the local file information
|
||||
var ZipLocalFile = function(bstream) {
|
||||
if (typeof bstream != typeof {} || !bstream.readNumber || typeof bstream.readNumber != typeof function(){}) {
|
||||
if (typeof bstream != typeof {} || !bstream.readNumber || typeof bstream.readNumber != typeof function() {}) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -112,7 +110,7 @@ ZipLocalFile.prototype.unzip = function() {
|
||||
|
||||
// Zip Version 1.0, no compression (store only)
|
||||
if (this.compressionMethod == 0 ) {
|
||||
info("ZIP v"+this.version+", store only: " + this.filename + " (" + this.compressedSize + " bytes)");
|
||||
info("ZIP v" + this.version + ", store only: " + this.filename + " (" + this.compressedSize + " bytes)");
|
||||
currentBytesUnarchivedInFile = this.compressedSize;
|
||||
currentBytesUnarchived += this.compressedSize;
|
||||
}
|
||||
@ -158,7 +156,7 @@ var unzip = function(arrayBuffer) {
|
||||
totalFilesInArchive = localFiles.length;
|
||||
|
||||
// got all local files, now sort them
|
||||
localFiles.sort(function(a,b) {
|
||||
localFiles.sort(function(a, b) {
|
||||
var aname = a.filename.toLowerCase();
|
||||
var bname = b.filename.toLowerCase();
|
||||
return aname > bname ? 1 : -1;
|
||||
@ -239,7 +237,7 @@ var unzip = function(arrayBuffer) {
|
||||
postProgress();
|
||||
postMessage(new bitjs.archive.UnarchiveFinishEvent());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// returns a table of Huffman codes
|
||||
// each entry's index is its code and its value is a JavaScript object
|
||||
@ -253,7 +251,7 @@ function getHuffmanCodes(bitLengths) {
|
||||
|
||||
// Reference: http://tools.ietf.org/html/rfc1951#page-8
|
||||
var numLengths = bitLengths.length,
|
||||
bl_count = [],
|
||||
blCount = [],
|
||||
MAX_BITS = 1;
|
||||
|
||||
// Step 1: count up how many codes of each length we have
|
||||
@ -265,22 +263,22 @@ function getHuffmanCodes(bitLengths) {
|
||||
return null;
|
||||
}
|
||||
// increment the appropriate bitlength count
|
||||
if (bl_count[length] == undefined) bl_count[length] = 0;
|
||||
if (blCount[length] == undefined) blCount[length] = 0;
|
||||
// a length of zero means this symbol is not participating in the huffman coding
|
||||
if (length > 0) bl_count[length]++;
|
||||
if (length > 0) blCount[length]++;
|
||||
|
||||
if (length > MAX_BITS) MAX_BITS = length;
|
||||
}
|
||||
|
||||
// Step 2: Find the numerical value of the smallest code for each code length
|
||||
var next_code = [],
|
||||
var nextCode = [],
|
||||
code = 0;
|
||||
for (var bits = 1; bits <= MAX_BITS; ++bits) {
|
||||
var length = bits-1;
|
||||
var length = bits - 1;
|
||||
// ensure undefined lengths are zero
|
||||
if (bl_count[length] == undefined) bl_count[length] = 0;
|
||||
code = (code + bl_count[bits-1]) << 1;
|
||||
next_code[bits] = code;
|
||||
if (blCount[length] == undefined) blCount[length] = 0;
|
||||
code = (code + blCount[bits - 1]) << 1;
|
||||
nextCode [bits] = code;
|
||||
}
|
||||
|
||||
// Step 3: Assign numerical values to all codes
|
||||
@ -288,9 +286,9 @@ function getHuffmanCodes(bitLengths) {
|
||||
for (var n = 0; n < numLengths; ++n) {
|
||||
var len = bitLengths[n];
|
||||
if (len != 0) {
|
||||
table[next_code[len]] = { length: len, symbol: n }; //, bitstring: binaryValueToString(next_code[len],len) };
|
||||
table[nextCode [len]] = { length: len, symbol: n }; //, bitstring: binaryValueToString(nextCode [len],len) };
|
||||
tableLength++;
|
||||
next_code[len]++;
|
||||
nextCode [len]++;
|
||||
}
|
||||
}
|
||||
table.maxLength = tableLength;
|
||||
@ -321,7 +319,8 @@ function getFixedLiteralTable() {
|
||||
// create once
|
||||
if (!fixedHCtoLiteral) {
|
||||
var bitlengths = new Array(288);
|
||||
for (var i = 0; i <= 143; ++i) bitlengths[i] = 8;
|
||||
var i;
|
||||
for (i = 0; i <= 143; ++i) bitlengths[i] = 8;
|
||||
for (i = 144; i <= 255; ++i) bitlengths[i] = 9;
|
||||
for (i = 256; i <= 279; ++i) bitlengths[i] = 7;
|
||||
for (i = 280; i <= 287; ++i) bitlengths[i] = 8;
|
||||
@ -335,7 +334,9 @@ function getFixedDistanceTable() {
|
||||
// create once
|
||||
if (!fixedHCtoDistance) {
|
||||
var bitlengths = new Array(32);
|
||||
for (var i = 0; i < 32; ++i) { bitlengths[i] = 5; }
|
||||
for (var i = 0; i < 32; ++i) {
|
||||
bitlengths[i] = 5;
|
||||
}
|
||||
|
||||
// get huffman code table
|
||||
fixedHCtoDistance = getHuffmanCodes(bitlengths);
|
||||
@ -347,13 +348,12 @@ function getFixedDistanceTable() {
|
||||
// then return that symbol
|
||||
function decodeSymbol(bstream, hcTable) {
|
||||
var code = 0, len = 0;
|
||||
var match = false;
|
||||
|
||||
// loop until we match
|
||||
for (;;) {
|
||||
// read in next bit
|
||||
var bit = bstream.readBits(1);
|
||||
code = (code<<1) | bit;
|
||||
code = (code << 1) | bit;
|
||||
++len;
|
||||
|
||||
// check against Huffman Code table and break if found
|
||||
@ -372,47 +372,47 @@ function decodeSymbol(bstream, hcTable) {
|
||||
|
||||
|
||||
var CodeLengthCodeOrder = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15];
|
||||
/*
|
||||
Extra Extra Extra
|
||||
Code Bits Length(s) Code Bits Lengths Code Bits Length(s)
|
||||
---- ---- ------ ---- ---- ------- ---- ---- -------
|
||||
257 0 3 267 1 15,16 277 4 67-82
|
||||
258 0 4 268 1 17,18 278 4 83-98
|
||||
259 0 5 269 2 19-22 279 4 99-114
|
||||
260 0 6 270 2 23-26 280 4 115-130
|
||||
261 0 7 271 2 27-30 281 5 131-162
|
||||
262 0 8 272 2 31-34 282 5 163-194
|
||||
263 0 9 273 3 35-42 283 5 195-226
|
||||
264 0 10 274 3 43-50 284 5 227-257
|
||||
265 1 11,12 275 3 51-58 285 0 258
|
||||
266 1 13,14 276 3 59-66
|
||||
/*
|
||||
Extra Extra Extra
|
||||
Code Bits Length(s) Code Bits Lengths Code Bits Length(s)
|
||||
---- ---- ------ ---- ---- ------- ---- ---- -------
|
||||
257 0 3 267 1 15,16 277 4 67-82
|
||||
258 0 4 268 1 17,18 278 4 83-98
|
||||
259 0 5 269 2 19-22 279 4 99-114
|
||||
260 0 6 270 2 23-26 280 4 115-130
|
||||
261 0 7 271 2 27-30 281 5 131-162
|
||||
262 0 8 272 2 31-34 282 5 163-194
|
||||
263 0 9 273 3 35-42 283 5 195-226
|
||||
264 0 10 274 3 43-50 284 5 227-257
|
||||
265 1 11,12 275 3 51-58 285 0 258
|
||||
266 1 13,14 276 3 59-66
|
||||
|
||||
*/
|
||||
*/
|
||||
var LengthLookupTable = [
|
||||
[0,3], [0,4], [0,5], [0,6],
|
||||
[0,7], [0,8], [0,9], [0,10],
|
||||
[1,11], [1,13], [1,15], [1,17],
|
||||
[2,19], [2,23], [2,27], [2,31],
|
||||
[3,35], [3,43], [3,51], [3,59],
|
||||
[4,67], [4,83], [4,99], [4,115],
|
||||
[5,131], [5,163], [5,195], [5,227],
|
||||
[0,258]
|
||||
[0,3], [0,4], [0,5], [0,6],
|
||||
[0,7], [0,8], [0,9], [0,10],
|
||||
[1,11], [1,13], [1,15], [1,17],
|
||||
[2,19], [2,23], [2,27], [2,31],
|
||||
[3,35], [3,43], [3,51], [3,59],
|
||||
[4,67], [4,83], [4,99], [4,115],
|
||||
[5,131], [5,163], [5,195], [5,227],
|
||||
[0,258]
|
||||
];
|
||||
/*
|
||||
Extra Extra Extra
|
||||
Code Bits Dist Code Bits Dist Code Bits Distance
|
||||
---- ---- ---- ---- ---- ------ ---- ---- --------
|
||||
0 0 1 10 4 33-48 20 9 1025-1536
|
||||
1 0 2 11 4 49-64 21 9 1537-2048
|
||||
2 0 3 12 5 65-96 22 10 2049-3072
|
||||
3 0 4 13 5 97-128 23 10 3073-4096
|
||||
4 1 5,6 14 6 129-192 24 11 4097-6144
|
||||
5 1 7,8 15 6 193-256 25 11 6145-8192
|
||||
6 2 9-12 16 7 257-384 26 12 8193-12288
|
||||
7 2 13-16 17 7 385-512 27 12 12289-16384
|
||||
8 3 17-24 18 8 513-768 28 13 16385-24576
|
||||
9 3 25-32 19 8 769-1024 29 13 24577-32768
|
||||
*/
|
||||
/*
|
||||
Extra Extra Extra
|
||||
Code Bits Dist Code Bits Dist Code Bits Distance
|
||||
---- ---- ---- ---- ---- ------ ---- ---- --------
|
||||
0 0 1 10 4 33-48 20 9 1025-1536
|
||||
1 0 2 11 4 49-64 21 9 1537-2048
|
||||
2 0 3 12 5 65-96 22 10 2049-3072
|
||||
3 0 4 13 5 97-128 23 10 3073-4096
|
||||
4 1 5,6 14 6 129-192 24 11 4097-6144
|
||||
5 1 7,8 15 6 193-256 25 11 6145-8192
|
||||
6 2 9-12 16 7 257-384 26 12 8193-12288
|
||||
7 2 13-16 17 7 385-512 27 12 12289-16384
|
||||
8 3 17-24 18 8 513-768 28 13 16385-24576
|
||||
9 3 25-32 19 8 769-1024 29 13 24577-32768
|
||||
*/
|
||||
var DistLookupTable = [
|
||||
[0,1], [0,2], [0,3], [0,4],
|
||||
[1,5], [1,7],
|
||||
@ -446,10 +446,9 @@ function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
|
||||
stream, and copy length bytes from this
|
||||
position to the output stream.
|
||||
*/
|
||||
var numSymbols = 0, blockSize = 0;
|
||||
var blockSize = 0;
|
||||
for (;;) {
|
||||
var symbol = decodeSymbol(bstream, hcLiteralTable);
|
||||
++numSymbols;
|
||||
if (symbol < 256) {
|
||||
// copy literal byte to output
|
||||
buffer.insertByte(symbol);
|
||||
@ -461,7 +460,7 @@ function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
|
||||
break;
|
||||
}
|
||||
else {
|
||||
var lengthLookup = LengthLookupTable[symbol-257],
|
||||
var lengthLookup = LengthLookupTable[symbol - 257],
|
||||
length = lengthLookup[1] + bstream.readBits(lengthLookup[0]),
|
||||
distLookup = DistLookupTable[decodeSymbol(bstream, hcDistanceTable)],
|
||||
distance = distLookup[1] + bstream.readBits(distLookup[0]);
|
||||
@ -479,13 +478,13 @@ function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
|
||||
// loop for each character
|
||||
var ch = buffer.ptr - distance;
|
||||
blockSize += length;
|
||||
if(length > distance) {
|
||||
var data = buffer.data;
|
||||
while (length--) {
|
||||
buffer.insertByte(data[ch++]);
|
||||
}
|
||||
if (length > distance) {
|
||||
var data = buffer.data;
|
||||
while (length--) {
|
||||
buffer.insertByte(data[ch++]);
|
||||
}
|
||||
} else {
|
||||
buffer.insertBytes(buffer.data.subarray(ch, ch + length))
|
||||
buffer.insertBytes(buffer.data.subarray(ch, ch + length));
|
||||
}
|
||||
|
||||
} // length-distance pair
|
||||
@ -516,11 +515,11 @@ function inflate(compressedData, numDecompressedBytes) {
|
||||
if (bType == 0) {
|
||||
// skip remaining bits in this byte
|
||||
while (bstream.bitPtr != 0) bstream.readBits(1);
|
||||
var len = bstream.readBits(16),
|
||||
nlen = bstream.readBits(16);
|
||||
// TODO: check if nlen is the ones-complement of len?
|
||||
var len = bstream.readBits(16);
|
||||
bstream.readBits(16);
|
||||
// TODO: check if nlen is the ones-complement of len?
|
||||
|
||||
if(len > 0) buffer.insertBytes(bstream.readBytes(len));
|
||||
if (len > 0) buffer.insertBytes(bstream.readBytes(len));
|
||||
blockSize = len;
|
||||
}
|
||||
// fixed Huffman codes
|
||||
@ -593,9 +592,8 @@ function inflate(compressedData, numDecompressedBytes) {
|
||||
var hcLiteralTable = getHuffmanCodes(literalCodeLengths),
|
||||
hcDistanceTable = getHuffmanCodes(distanceCodeLengths);
|
||||
blockSize = inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer);
|
||||
}
|
||||
// error
|
||||
else {
|
||||
} else {
|
||||
// error
|
||||
err("Error! Encountered deflate block of type 3");
|
||||
return null;
|
||||
}
|
||||
|
@ -144,7 +144,7 @@
|
||||
<div class="modal-body text-center">
|
||||
<p>{{_('Do you really want to restart Calibre-Web?')}}</p>
|
||||
<div id="spinner" class="spinner" style="display:none;">
|
||||
<img id="img-spinner" src="/static/css/images/loading-icon.gif"/>
|
||||
<img id="img-spinner" src="{{ url_for('static', filename='css/images/loading-icon.gif') }}"/>
|
||||
</div>
|
||||
<p></p>
|
||||
<button type="button" class="btn btn-default" id="restart" >{{_('Ok')}}</button>
|
||||
@ -176,7 +176,7 @@
|
||||
</div>
|
||||
<div class="modal-body text-center">
|
||||
<div id="spinner2" class="spinner2" style="display:none;">
|
||||
<img id="img-spinner" src="/static/css/images/loading-icon.gif"/>
|
||||
<img id="img-spinner" src="{{ url_for('static', filename='css/images/loading-icon.gif') }}"/>
|
||||
</div>
|
||||
<p></p>
|
||||
<div id="Updatecontent"></div>
|
||||
|
@ -27,6 +27,13 @@
|
||||
<label for="config_random_books">{{_('No. of random books to show')}}</label>
|
||||
<input type="number" min="1" max="30" class="form-control" name="config_random_books" id="config_random_books" value="{% if content.config_random_books != None %}{{ content.config_random_books }}{% endif %}" autocomplete="off">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="config_theme">{{_('Theme')}}</label>
|
||||
<select name="config_theme" id="config_theme" class="form-control">
|
||||
<option value="0" {% if content.config_theme == 0 %}selected{% endif %}>{{ _("Standard Theme") }}</option>
|
||||
<option value="1" {% if content.config_theme == 1 %}selected{% endif %}>{{ _("caliBlur! Dark Theme") }}</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="config_columns_to_ignore">{{_('Regular expression for ignoring columns')}}</label>
|
||||
<input type="text" class="form-control" name="config_columns_to_ignore" id="config_columns_to_ignore" value="{% if content.config_columns_to_ignore != None %}{{ content.config_columns_to_ignore }}{% endif %}" autocomplete="off">
|
||||
|
@ -12,8 +12,8 @@
|
||||
<link rel="shortcut icon" href="{{ url_for('static', filename='favicon.ico') }}">
|
||||
<link href="{{ url_for('static', filename='css/libs/bootstrap.min.css') }}" rel="stylesheet" media="screen">
|
||||
<link href="{{ url_for('static', filename='css/style.css') }}" rel="stylesheet" media="screen">
|
||||
{% if g.user.get_theme == 1 %}
|
||||
<link href="{{ url_for('static', filename='css/caliBlur-style.css') }}" rel="stylesheet" media="screen">
|
||||
{% if g.current_theme == 1 %}
|
||||
<link href="{{ url_for('static', filename='css/caliBlur.min.css') }}" rel="stylesheet" media="screen">
|
||||
{% endif %}
|
||||
</head>
|
||||
<body>
|
||||
|
@ -47,7 +47,7 @@
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="discover load-more">
|
||||
<h2>{{title}}</h2>
|
||||
<h2 class="{{title}}">{{_(title)}}</h2>
|
||||
<div class="row">
|
||||
{% if entries[0] %}
|
||||
{% for entry in entries %}
|
||||
|
@ -12,7 +12,7 @@
|
||||
<link rel="shortcut icon" href="{{ url_for('static', filename='favicon.ico') }}">
|
||||
<link href="{{ url_for('static', filename='css/libs/bootstrap.min.css') }}" rel="stylesheet" media="screen">
|
||||
<link href="{{ url_for('static', filename='css/style.css') }}" rel="stylesheet" media="screen">
|
||||
{% if g.user.get_theme == 1 %}
|
||||
{% if g.current_theme == 1 %}
|
||||
<link href="{{ url_for('static', filename='css/caliBlur.min.css') }}" rel="stylesheet" media="screen">
|
||||
{% endif %}
|
||||
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
|
||||
@ -240,7 +240,7 @@
|
||||
<script src="{{ url_for('static', filename='js/libs/plugins.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='js/libs/jquery.form.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='js/main.js') }}"></script>
|
||||
{% if g.user.get_theme == 1 %}
|
||||
{% if g.current_theme == 1 %}
|
||||
<script src="{{ url_for('static', filename='js/libs/jquery.visible.min.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='js/libs/compromise.min.js') }}"></script>
|
||||
<script src="{{ url_for('static', filename='js/libs/readmore.min.js') }}"></script>
|
||||
|
@ -1,6 +1,6 @@
|
||||
{% extends "layout.html" %}
|
||||
{% block body %}
|
||||
<h1>{{title}}</h1>
|
||||
<h1 class="{{page}}">{{_(title)}}</h1>
|
||||
<div class="container">
|
||||
<div class="col-xs-12 col-sm-6">
|
||||
{% for entry in entries %}
|
||||
|
@ -35,14 +35,6 @@
|
||||
{% endfor %}
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="theme">{{_('Theme')}}</label>
|
||||
<select name="theme" id="theme" class="form-control">
|
||||
<option value="0" {% if content.get_theme == 0 %}selected{% endif %}>{{ _("Standard Theme") }}</option>
|
||||
<option value="1" {% if content.get_theme == 1 %}selected{% endif %}>{{ _("caliBlur! Dark Theme (Beta)") }}</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label for="default_language">{{_('Show books with language')}}</label>
|
||||
<select name="default_language" id="default_language" class="form-control">
|
||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
22
cps/ub.py
22
cps/ub.py
@ -103,10 +103,6 @@ class UserBase:
|
||||
def is_anonymous(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def get_theme(self):
|
||||
return self.theme
|
||||
|
||||
def get_id(self):
|
||||
return str(self.id)
|
||||
|
||||
@ -178,7 +174,7 @@ class User(UserBase, Base):
|
||||
sidebar_view = Column(Integer, default=1)
|
||||
default_language = Column(String(3), default="all")
|
||||
mature_content = Column(Boolean, default=True)
|
||||
theme = Column(Integer, default=0)
|
||||
# theme = Column(Integer, default=0)
|
||||
|
||||
|
||||
# Class for anonymous user is derived from User base and completly overrides methods and properties for the
|
||||
@ -327,6 +323,7 @@ class Settings(Base):
|
||||
config_converterpath = Column(String)
|
||||
config_calibre = Column(String)
|
||||
config_rarfile_location = Column(String)
|
||||
config_theme = Column(Integer, default=0)
|
||||
|
||||
def __repr__(self):
|
||||
pass
|
||||
@ -403,6 +400,7 @@ class Config:
|
||||
if data.config_logfile:
|
||||
self.config_logfile = data.config_logfile
|
||||
self.config_rarfile_location = data.config_rarfile_location
|
||||
self.config_theme = data.config_theme
|
||||
|
||||
@property
|
||||
def get_main_dir(self):
|
||||
@ -624,12 +622,7 @@ def migrate_Database():
|
||||
except exc.OperationalError:
|
||||
conn = engine.connect()
|
||||
conn.execute("ALTER TABLE user ADD column `mature_content` INTEGER DEFAULT 1")
|
||||
try:
|
||||
session.query(exists().where(User.theme)).scalar()
|
||||
except exc.OperationalError:
|
||||
conn = engine.connect()
|
||||
conn.execute("ALTER TABLE user ADD column `theme` INTEGER DEFAULT 0")
|
||||
session.commit()
|
||||
|
||||
if session.query(User).filter(User.role.op('&')(ROLE_ANONYMOUS) == ROLE_ANONYMOUS).first() is None:
|
||||
create_anonymous_user()
|
||||
try:
|
||||
@ -690,6 +683,13 @@ def migrate_Database():
|
||||
conn.execute("ALTER TABLE Settings ADD column `config_ldap_provider_url` String DEFAULT ''")
|
||||
conn.execute("ALTER TABLE Settings ADD column `config_ldap_dn` String DEFAULT ''")
|
||||
session.commit()
|
||||
try:
|
||||
session.query(exists().where(Settings.config_theme)).scalar()
|
||||
except exc.OperationalError: # Database is not compatible, some rows are missing
|
||||
conn = engine.connect()
|
||||
conn.execute("ALTER TABLE Settings ADD column `config_theme` INTEGER DEFAULT 0")
|
||||
session.commit()
|
||||
|
||||
# Remove login capability of user Guest
|
||||
conn = engine.connect()
|
||||
conn.execute("UPDATE user SET password='' where nickname = 'Guest' and password !=''")
|
||||
|
75
cps/web.py
75
cps/web.py
@ -494,6 +494,21 @@ def speaking_language(languages=None):
|
||||
lang.name = _(isoLanguages.get(part3=lang.lang_code).name)
|
||||
return languages
|
||||
|
||||
# Orders all Authors in the list according to authors sort
|
||||
def order_authors(entry):
|
||||
sort_authors = entry.author_sort.split('&')
|
||||
authors_ordered = list()
|
||||
error = False
|
||||
for auth in sort_authors:
|
||||
# ToDo: How to handle not found authorname
|
||||
result = db.session.query(db.Authors).filter(db.Authors.sort == auth.lstrip().strip()).first()
|
||||
if not result:
|
||||
error = True
|
||||
break
|
||||
authors_ordered.append(result)
|
||||
if not error:
|
||||
entry.authors = authors_ordered
|
||||
return entry
|
||||
|
||||
# Fill indexpage with all requested data from database
|
||||
def fill_indexpage(page, database, db_filter, order, *join):
|
||||
@ -508,6 +523,8 @@ def fill_indexpage(page, database, db_filter, order, *join):
|
||||
.filter(db_filter).filter(common_filters()).all()))
|
||||
entries = db.session.query(database).join(*join,isouter=True).filter(db_filter)\
|
||||
.filter(common_filters()).order_by(*order).offset(off).limit(config.config_books_per_page).all()
|
||||
for book in entries:
|
||||
book = order_authors(book)
|
||||
return entries, randm, pagination
|
||||
|
||||
|
||||
@ -532,6 +549,7 @@ def modify_database_object(input_elements, db_book_object, db_object, db_session
|
||||
type_elements = c_elements.name
|
||||
for inp_element in input_elements:
|
||||
if inp_element.lower() == type_elements.lower():
|
||||
# if inp_element == type_elements:
|
||||
found = True
|
||||
break
|
||||
# if the element was not found in the new list, add it to remove list
|
||||
@ -663,6 +681,7 @@ def before_request():
|
||||
g.user = current_user
|
||||
g.allow_registration = config.config_public_reg
|
||||
g.allow_upload = config.config_uploading
|
||||
g.current_theme = config.config_theme
|
||||
g.public_shelfes = ub.session.query(ub.Shelf).filter(ub.Shelf.is_public == 1).order_by(ub.Shelf.name).all()
|
||||
if not config.db_configured and request.endpoint not in ('basic_configuration', 'login') and '/static/' not in request.path:
|
||||
return redirect(url_for('basic_configuration'))
|
||||
@ -926,14 +945,14 @@ def check_valid_domain(domain_text):
|
||||
return len(result)
|
||||
|
||||
|
||||
''' POST /post
|
||||
name: 'username', //name of field (column in db)
|
||||
pk: 1 //primary key (record id)
|
||||
value: 'superuser!' //new value'''
|
||||
@app.route("/ajax/editdomain", methods=['POST'])
|
||||
@login_required
|
||||
@admin_required
|
||||
def edit_domain():
|
||||
''' POST /post
|
||||
name: 'username', //name of field (column in db)
|
||||
pk: 1 //primary key (record id)
|
||||
value: 'superuser!' //new value'''
|
||||
vals = request.form.to_dict()
|
||||
answer = ub.session.query(ub.Registration).filter(ub.Registration.id == vals['pk']).first()
|
||||
# domain_name = request.args.get('domain')
|
||||
@ -1043,7 +1062,7 @@ def get_authors_json():
|
||||
json_dumps = json.dumps([dict(name=r.name.replace('|',',')) for r in entries])
|
||||
return json_dumps
|
||||
|
||||
|
||||
|
||||
@app.route("/get_publishers_json", methods=['GET', 'POST'])
|
||||
@login_required_if_no_ano
|
||||
def get_publishers_json():
|
||||
@ -1142,8 +1161,8 @@ def get_update_status():
|
||||
r = requests.get(repository_url + '/git/refs/heads/master')
|
||||
r.raise_for_status()
|
||||
commit = r.json()
|
||||
except requests.exceptions.HTTPError as ex:
|
||||
status['message'] = _(u'HTTP Error') + ' ' + str(ex)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
status['message'] = _(u'HTTP Error') + ' ' + str(e)
|
||||
except requests.exceptions.ConnectionError:
|
||||
status['message'] = _(u'Connection error')
|
||||
except requests.exceptions.Timeout:
|
||||
@ -1400,7 +1419,7 @@ def author_list():
|
||||
for entry in entries:
|
||||
entry.Authors.name = entry.Authors.name.replace('|', ',')
|
||||
return render_title_template('list.html', entries=entries, folder='author',
|
||||
title=_(u"Author list"), page="authorlist")
|
||||
title=u"Author list", page="authorlist")
|
||||
else:
|
||||
abort(404)
|
||||
|
||||
@ -1658,6 +1677,8 @@ def show_book(book_id):
|
||||
|
||||
entries.tags = sort(entries.tags, key = lambda tag: tag.name)
|
||||
|
||||
entries = order_authors(entries)
|
||||
|
||||
kindle_list = helper.check_send_to_kindle(entries)
|
||||
reader_list = helper.check_read_formats(entries)
|
||||
|
||||
@ -1701,7 +1722,7 @@ def get_tasks_status():
|
||||
# UIanswer = copy.deepcopy(answer)
|
||||
answer = helper.render_task_status(tasks)
|
||||
# foreach row format row
|
||||
return render_title_template('tasks.html', entries=answer, title=_(u"Tasks"))
|
||||
return render_title_template('tasks.html', entries=answer, title=_(u"Tasks"), page="tasks")
|
||||
|
||||
|
||||
@app.route("/admin")
|
||||
@ -2833,7 +2854,6 @@ def profile():
|
||||
content.sidebar_view += ub.DETAIL_RANDOM
|
||||
|
||||
content.mature_content = "show_mature_content" in to_save
|
||||
content.theme = int(to_save["theme"])
|
||||
|
||||
try:
|
||||
ub.session.commit()
|
||||
@ -2894,6 +2914,8 @@ def view_configuration():
|
||||
content.config_columns_to_ignore = to_save["config_columns_to_ignore"]
|
||||
if "config_read_column" in to_save:
|
||||
content.config_read_column = int(to_save["config_read_column"])
|
||||
if "config_theme" in to_save:
|
||||
content.config_theme = int(to_save["config_theme"])
|
||||
if "config_title_regex" in to_save:
|
||||
if content.config_title_regex != to_save["config_title_regex"]:
|
||||
content.config_title_regex = to_save["config_title_regex"]
|
||||
@ -2952,6 +2974,7 @@ def view_configuration():
|
||||
ub.session.commit()
|
||||
flash(_(u"Calibre-Web configuration updated"), category="success")
|
||||
config.loadSettings()
|
||||
before_request()
|
||||
if reboot_required:
|
||||
# db.engine.dispose() # ToDo verify correct
|
||||
# ub.session.close()
|
||||
@ -3186,7 +3209,6 @@ def new_user():
|
||||
to_save = request.form.to_dict()
|
||||
content.default_language = to_save["default_language"]
|
||||
content.mature_content = "show_mature_content" in to_save
|
||||
content.theme = int(to_save["theme"])
|
||||
if "locale" in to_save:
|
||||
content.locale = to_save["locale"]
|
||||
content.sidebar_view = 0
|
||||
@ -3409,7 +3431,6 @@ def edit_user(user_id):
|
||||
content.sidebar_view -= ub.DETAIL_RANDOM
|
||||
|
||||
content.mature_content = "show_mature_content" in to_save
|
||||
content.theme = int(to_save["theme"])
|
||||
|
||||
if "default_language" in to_save:
|
||||
content.default_language = to_save["default_language"]
|
||||
@ -3462,6 +3483,9 @@ def render_edit_book(book_id):
|
||||
|
||||
for indx in range(0, len(book.languages)):
|
||||
book.languages[indx].language_name = language_table[get_locale()][book.languages[indx].lang_code]
|
||||
|
||||
book = order_authors(book)
|
||||
|
||||
author_names = []
|
||||
for authr in book.authors:
|
||||
author_names.append(authr.name.replace('|', ','))
|
||||
@ -3682,22 +3706,31 @@ def edit_book(book_id):
|
||||
# we have all author names now
|
||||
if input_authors == ['']:
|
||||
input_authors = [_(u'unknown')] # prevent empty Author
|
||||
if book.authors:
|
||||
author0_before_edit = book.authors[0].name
|
||||
else:
|
||||
author0_before_edit = db.Authors(_(u'unknown'), '', 0)
|
||||
|
||||
modify_database_object(input_authors, book.authors, db.Authors, db.session, 'author')
|
||||
if book.authors:
|
||||
if author0_before_edit != book.authors[0].name:
|
||||
edited_books_id = book.id
|
||||
book.author_sort = helper.get_sorted_author(input_authors[0])
|
||||
|
||||
# Search for each author if author is in database, if not, authorname and sorted authorname is generated new
|
||||
# everything then is assembled for sorted author field in database
|
||||
sort_authors_list = list()
|
||||
for inp in input_authors:
|
||||
stored_author = db.session.query(db.Authors).filter(db.Authors.name == inp).first()
|
||||
if not stored_author:
|
||||
stored_author = helper.get_sorted_author(inp)
|
||||
else:
|
||||
stored_author = stored_author.sort
|
||||
sort_authors_list.append(helper.get_sorted_author(stored_author))
|
||||
sort_authors = ' & '.join(sort_authors_list)
|
||||
if book.author_sort != sort_authors:
|
||||
edited_books_id = book.id
|
||||
book.author_sort = sort_authors
|
||||
|
||||
|
||||
if config.config_use_google_drive:
|
||||
gdriveutils.updateGdriveCalibreFromLocal()
|
||||
|
||||
error = False
|
||||
if edited_books_id:
|
||||
error = helper.update_dir_stucture(edited_books_id, config.config_calibre_dir)
|
||||
error = helper.update_dir_stucture(edited_books_id, config.config_calibre_dir, input_authors[0])
|
||||
|
||||
if not error:
|
||||
if to_save["cover_url"]:
|
||||
|
429
messages.pot
429
messages.pot
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user