2015-08-02 20:59:11 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2017-01-28 21:16:40 +02:00
|
|
|
import db
|
|
|
|
import ub
|
2016-03-28 21:07:13 +02:00
|
|
|
from flask import current_app as app
|
2016-12-23 10:53:39 +02:00
|
|
|
import logging
|
2017-02-20 20:52:00 +02:00
|
|
|
from tempfile import gettempdir
|
2015-08-02 20:59:11 +02:00
|
|
|
import sys
|
|
|
|
import os
|
2016-03-26 17:12:29 +02:00
|
|
|
import re
|
|
|
|
import unicodedata
|
2018-04-10 18:50:04 +02:00
|
|
|
from io import BytesIO
|
2018-08-12 09:29:57 +02:00
|
|
|
import worker
|
2018-08-04 10:56:42 +02:00
|
|
|
import time
|
2017-04-02 10:05:07 +02:00
|
|
|
|
2018-08-04 10:56:42 +02:00
|
|
|
from flask import send_from_directory, make_response, redirect, abort
|
2016-11-09 20:24:33 +02:00
|
|
|
from flask_babel import gettext as _
|
2017-02-20 20:52:00 +02:00
|
|
|
import threading
|
2017-01-30 19:58:36 +02:00
|
|
|
import shutil
|
2017-02-20 20:52:00 +02:00
|
|
|
import requests
|
|
|
|
import zipfile
|
2017-03-17 01:36:37 +02:00
|
|
|
try:
|
|
|
|
import gdriveutils as gd
|
|
|
|
except ImportError:
|
|
|
|
pass
|
2017-03-02 00:38:03 +02:00
|
|
|
import web
|
2018-07-09 18:47:36 +02:00
|
|
|
import server
|
2018-08-24 15:48:09 +02:00
|
|
|
import random
|
2018-08-31 10:47:58 +02:00
|
|
|
import subprocess
|
2017-02-20 20:52:00 +02:00
|
|
|
|
2017-02-15 19:09:17 +02:00
|
|
|
try:
|
|
|
|
import unidecode
|
2017-03-31 16:52:25 +02:00
|
|
|
use_unidecode = True
|
2017-11-30 17:49:46 +02:00
|
|
|
except ImportError:
|
2017-03-31 16:52:25 +02:00
|
|
|
use_unidecode = False
|
2016-12-23 10:53:39 +02:00
|
|
|
|
2017-02-20 20:52:00 +02:00
|
|
|
# Global variables
|
|
|
|
updater_thread = None
|
2018-08-12 09:29:57 +02:00
|
|
|
global_WorkerThread = worker.WorkerThread()
|
|
|
|
global_WorkerThread.start()
|
2017-05-19 21:30:39 +02:00
|
|
|
|
2017-11-30 17:49:46 +02:00
|
|
|
|
2018-07-30 20:12:41 +02:00
|
|
|
def update_download(book_id, user_id):
|
|
|
|
check = ub.session.query(ub.Downloads).filter(ub.Downloads.user_id == user_id).filter(ub.Downloads.book_id ==
|
|
|
|
book_id).first()
|
|
|
|
if not check:
|
|
|
|
new_download = ub.Downloads(user_id=user_id, book_id=book_id)
|
|
|
|
ub.session.add(new_download)
|
|
|
|
ub.session.commit()
|
|
|
|
|
2018-08-31 15:19:48 +02:00
|
|
|
# Convert existing book entry to new format
|
|
|
|
def convert_book_format(book_id, calibrepath, old_book_format, new_book_format, user_id, kindle_mail=None):
|
2015-08-02 20:59:11 +02:00
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2018-08-31 15:19:48 +02:00
|
|
|
data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == old_book_format).first()
|
2016-03-27 23:36:51 +02:00
|
|
|
if not data:
|
2018-08-31 15:19:48 +02:00
|
|
|
error_message = _(u"%(format)s format not found for book id: %(book)d", format=old_book_format, book=book_id)
|
|
|
|
app.logger.error("convert_book_format: " + error_message)
|
2018-08-12 09:29:57 +02:00
|
|
|
return error_message
|
2018-08-04 14:00:51 +02:00
|
|
|
if ub.config.config_use_google_drive:
|
2018-08-31 15:19:48 +02:00
|
|
|
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + old_book_format.lower())
|
2018-08-04 14:00:51 +02:00
|
|
|
if df:
|
2018-08-31 15:19:48 +02:00
|
|
|
datafile = os.path.join(calibrepath, book.path, data.name + u"." + old_book_format.lower())
|
2018-08-04 14:00:51 +02:00
|
|
|
if not os.path.exists(os.path.join(calibrepath, book.path)):
|
|
|
|
os.makedirs(os.path.join(calibrepath, book.path))
|
|
|
|
df.GetContentFile(datafile)
|
|
|
|
else:
|
2018-08-31 15:19:48 +02:00
|
|
|
error_message = _(u"%(format)s not found on Google Drive: %(fn)s",
|
|
|
|
format=old_book_format, fn=data.name + "." + old_book_format.lower())
|
2018-08-12 09:29:57 +02:00
|
|
|
return error_message
|
2017-01-22 17:44:37 +02:00
|
|
|
file_path = os.path.join(calibrepath, book.path, data.name)
|
2018-08-31 15:19:48 +02:00
|
|
|
if os.path.exists(file_path + "." + old_book_format.lower()):
|
2018-08-31 15:00:22 +02:00
|
|
|
# read settings and append converter task to queue
|
2018-08-31 15:19:48 +02:00
|
|
|
if kindle_mail:
|
|
|
|
settings = ub.get_mail_settings()
|
2018-09-08 12:28:48 +02:00
|
|
|
text = _(u"Convert: %(book)s" , book=book.title)
|
2018-08-31 15:19:48 +02:00
|
|
|
else:
|
2018-09-06 20:54:48 +02:00
|
|
|
settings = dict()
|
2018-08-31 15:19:48 +02:00
|
|
|
text = _(u"Convert to %(format)s: %(book)s", format=new_book_format, book=book.title)
|
2018-09-06 20:54:48 +02:00
|
|
|
settings['old_book_format'] = old_book_format
|
|
|
|
settings['new_book_format'] = new_book_format
|
2018-08-31 15:19:48 +02:00
|
|
|
global_WorkerThread.add_convert(file_path, book.id, user_id, text, settings, kindle_mail)
|
2018-08-12 09:29:57 +02:00
|
|
|
return None
|
2015-08-02 20:59:11 +02:00
|
|
|
else:
|
2018-08-31 15:19:48 +02:00
|
|
|
error_message = _(u"%(format)s not found: %(fn)s",
|
|
|
|
format=old_book_format, fn=data.name + "." + old_book_format.lower())
|
2018-08-12 09:29:57 +02:00
|
|
|
return error_message
|
2015-08-02 20:59:11 +02:00
|
|
|
|
2016-12-23 10:53:39 +02:00
|
|
|
|
2018-07-30 20:12:41 +02:00
|
|
|
def send_test_mail(kindle_mail, user_name):
|
2018-08-28 10:29:11 +02:00
|
|
|
global_WorkerThread.add_email(_(u'Calibre-Web test e-mail'),None, None, ub.get_mail_settings(),
|
|
|
|
kindle_mail, user_name, _(u"Test e-mail"))
|
2018-08-12 09:29:57 +02:00
|
|
|
return
|
|
|
|
|
2018-08-24 15:48:09 +02:00
|
|
|
|
|
|
|
# Send registration email or password reset email, depending on parameter resend (False means welcome email)
|
|
|
|
def send_registration_mail(e_mail, user_name, default_password, resend=False):
|
|
|
|
text = "Hello %s!\r\n" % user_name
|
|
|
|
if not resend:
|
|
|
|
text += "Your new account at Calibre-Web has been created. Thanks for joining us!\r\n"
|
|
|
|
text += "Please log in to your account using the following informations:\r\n"
|
|
|
|
text += "User name: %s\n" % user_name
|
|
|
|
text += "Password: %s\r\n" % default_password
|
|
|
|
text += "Don't forget to change your password after first login.\r\n"
|
|
|
|
text += "Sincerely\r\n\r\n"
|
|
|
|
text += "Your Calibre-Web team"
|
|
|
|
global_WorkerThread.add_email(_(u'Get Started with Calibre-Web'),None, None, ub.get_mail_settings(),
|
2018-09-08 12:28:48 +02:00
|
|
|
e_mail, user_name, _(u"Registration e-mail for user: %(name)s", name=user_name),text)
|
2018-08-24 15:48:09 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
|
2018-08-12 09:29:57 +02:00
|
|
|
# Files are processed in the following order/priority:
|
|
|
|
# 1: If Mobi file is exisiting, it's directly send to kindle email,
|
|
|
|
# 2: If Epub file is exisiting, it's converted and send to kindle email
|
|
|
|
# 3: If Pdf file is exisiting, it's directly send to kindle email,
|
2018-07-30 20:12:41 +02:00
|
|
|
def send_mail(book_id, kindle_mail, calibrepath, user_id):
|
2016-12-23 10:53:39 +02:00
|
|
|
"""Send email with attachments"""
|
2015-08-02 20:59:11 +02:00
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2018-08-04 14:00:51 +02:00
|
|
|
data = db.session.query(db.Data).filter(db.Data.book == book.id).all()
|
2016-03-27 23:36:51 +02:00
|
|
|
|
|
|
|
formats = {}
|
2018-08-12 09:29:57 +02:00
|
|
|
for entry in data:
|
2016-03-27 23:36:51 +02:00
|
|
|
if entry.format == "MOBI":
|
2018-08-04 14:00:51 +02:00
|
|
|
formats["mobi"] = entry.name + ".mobi"
|
2016-03-27 23:36:51 +02:00
|
|
|
if entry.format == "EPUB":
|
2018-08-04 14:00:51 +02:00
|
|
|
formats["epub"] = entry.name + ".epub"
|
2016-03-27 23:36:51 +02:00
|
|
|
if entry.format == "PDF":
|
2018-08-04 14:00:51 +02:00
|
|
|
formats["pdf"] = entry.name + ".pdf"
|
2016-03-27 23:36:51 +02:00
|
|
|
|
|
|
|
if len(formats) == 0:
|
2018-08-28 10:29:11 +02:00
|
|
|
return _(u"Could not find any formats suitable for sending by e-mail")
|
2016-03-27 23:36:51 +02:00
|
|
|
|
2016-03-29 01:25:25 +02:00
|
|
|
if 'mobi' in formats:
|
2018-08-12 09:29:57 +02:00
|
|
|
result = formats['mobi']
|
2016-03-27 23:36:51 +02:00
|
|
|
elif 'epub' in formats:
|
2018-08-12 09:29:57 +02:00
|
|
|
# returns None if sucess, otherwise errormessage
|
2018-08-31 15:19:48 +02:00
|
|
|
return convert_book_format(book_id, calibrepath, u'epub', u'mobi', user_id, kindle_mail)
|
2016-03-27 23:36:51 +02:00
|
|
|
elif 'pdf' in formats:
|
2018-08-12 09:29:57 +02:00
|
|
|
result = formats['pdf'] # worker.get_attachment()
|
2015-08-02 20:59:11 +02:00
|
|
|
else:
|
2018-08-28 10:29:11 +02:00
|
|
|
return _(u"Could not find any formats suitable for sending by e-mail")
|
2018-08-04 10:56:42 +02:00
|
|
|
if result:
|
2018-08-12 09:29:57 +02:00
|
|
|
global_WorkerThread.add_email(_(u"Send to Kindle"), book.path, result, ub.get_mail_settings(),
|
2018-09-08 12:28:48 +02:00
|
|
|
kindle_mail, user_id, _(u"E-mail: %(book)s", book=book.title))
|
2018-08-04 10:56:42 +02:00
|
|
|
else:
|
2018-08-12 09:29:57 +02:00
|
|
|
return _(u"The requested file could not be read. Maybe wrong permissions?")
|
2016-03-26 17:12:29 +02:00
|
|
|
|
2016-12-23 10:53:39 +02:00
|
|
|
|
2016-04-03 23:52:32 +02:00
|
|
|
def get_valid_filename(value, replace_whitespace=True):
|
2016-03-26 17:12:29 +02:00
|
|
|
"""
|
|
|
|
Returns the given string converted to a string that can be used for a clean
|
|
|
|
filename. Limits num characters to 128 max.
|
|
|
|
"""
|
2017-03-31 16:52:25 +02:00
|
|
|
if value[-1:] == u'.':
|
2017-02-15 19:09:17 +02:00
|
|
|
value = value[:-1]+u'_'
|
2017-09-16 19:57:00 +02:00
|
|
|
value = value.replace("/", "_").replace(":", "_").strip('\0')
|
2017-02-15 19:09:17 +02:00
|
|
|
if use_unidecode:
|
2017-11-30 17:49:46 +02:00
|
|
|
value = (unidecode.unidecode(value)).strip()
|
2017-02-15 19:09:17 +02:00
|
|
|
else:
|
2017-11-30 17:49:46 +02:00
|
|
|
value = value.replace(u'§', u'SS')
|
|
|
|
value = value.replace(u'ß', u'ss')
|
2017-02-15 19:09:17 +02:00
|
|
|
value = unicodedata.normalize('NFKD', value)
|
|
|
|
re_slugify = re.compile('[\W\s-]', re.UNICODE)
|
2017-11-30 17:49:46 +02:00
|
|
|
if isinstance(value, str): # Python3 str, Python2 unicode
|
2017-03-06 07:42:00 +02:00
|
|
|
value = re_slugify.sub('', value).strip()
|
|
|
|
else:
|
2017-03-05 12:48:59 +02:00
|
|
|
value = unicode(re_slugify.sub('', value).strip())
|
2016-04-03 23:52:32 +02:00
|
|
|
if replace_whitespace:
|
2017-11-30 17:49:46 +02:00
|
|
|
# *+:\"/<>? are replaced by _
|
2017-11-12 19:48:44 +02:00
|
|
|
value = re.sub(r'[\*\+:\\\"/<>\?]+', u'_', value, flags=re.U)
|
2017-11-28 09:54:21 +02:00
|
|
|
# pipe has to be replaced with comma
|
|
|
|
value = re.sub(r'[\|]+', u',', value, flags=re.U)
|
2017-02-15 19:09:17 +02:00
|
|
|
value = value[:128]
|
2017-04-23 08:22:10 +02:00
|
|
|
if not value:
|
|
|
|
raise ValueError("Filename cannot be empty")
|
2016-03-26 17:12:29 +02:00
|
|
|
return value
|
|
|
|
|
2017-11-30 17:49:46 +02:00
|
|
|
|
2017-02-15 19:09:17 +02:00
|
|
|
def get_sorted_author(value):
|
2017-11-30 17:49:46 +02:00
|
|
|
try:
|
2017-12-01 16:33:55 +02:00
|
|
|
regexes = ["^(JR|SR)\.?$", "^I{1,3}\.?$", "^IV\.?$"]
|
|
|
|
combined = "(" + ")|(".join(regexes) + ")"
|
2017-11-30 17:49:46 +02:00
|
|
|
value = value.split(" ")
|
|
|
|
if re.match(combined, value[-1].upper()):
|
|
|
|
value2 = value[-2] + ", " + " ".join(value[:-2]) + " " + value[-1]
|
|
|
|
else:
|
|
|
|
value2 = value[-1] + ", " + " ".join(value[:-1])
|
|
|
|
except Exception:
|
2018-07-14 19:40:59 +02:00
|
|
|
web.app.logger.error("Sorting author " + str(value) + "failed")
|
2017-11-30 17:49:46 +02:00
|
|
|
value2 = value
|
2017-02-15 19:09:17 +02:00
|
|
|
return value2
|
2016-12-23 10:53:39 +02:00
|
|
|
|
2018-08-04 18:22:43 +02:00
|
|
|
|
2018-07-14 19:40:59 +02:00
|
|
|
# Deletes a book fro the local filestorage, returns True if deleting is successfull, otherwise false
|
2018-08-04 18:22:43 +02:00
|
|
|
def delete_book_file(book, calibrepath, book_format=None):
|
2018-07-09 20:30:38 +02:00
|
|
|
# check that path is 2 elements deep, check that target path has no subfolders
|
2018-07-14 19:40:59 +02:00
|
|
|
if book.path.count('/') == 1:
|
2018-01-10 22:16:51 +02:00
|
|
|
path = os.path.join(calibrepath, book.path)
|
2018-08-04 18:22:43 +02:00
|
|
|
if book_format:
|
|
|
|
for file in os.listdir(path):
|
|
|
|
if file.upper().endswith("."+book_format):
|
|
|
|
os.remove(os.path.join(path, file))
|
2018-07-14 23:03:54 +02:00
|
|
|
else:
|
2018-08-04 18:22:43 +02:00
|
|
|
if os.path.isdir(path):
|
|
|
|
if len(next(os.walk(path))[1]):
|
|
|
|
web.app.logger.error(
|
|
|
|
"Deleting book " + str(book.id) + " failed, path has subfolders: " + book.path)
|
|
|
|
return False
|
|
|
|
shutil.rmtree(path, ignore_errors=True)
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
web.app.logger.error("Deleting book " + str(book.id) + " failed, book path not valid: " + book.path)
|
|
|
|
return False
|
2017-11-30 17:49:46 +02:00
|
|
|
|
|
|
|
|
2018-08-18 12:15:32 +02:00
|
|
|
def update_dir_structure_file(book_id, calibrepath):
|
2017-11-30 17:49:46 +02:00
|
|
|
localbook = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
|
|
|
path = os.path.join(calibrepath, localbook.path)
|
2017-03-30 21:17:18 +02:00
|
|
|
|
2017-11-30 17:49:46 +02:00
|
|
|
authordir = localbook.path.split('/')[0]
|
|
|
|
new_authordir = get_valid_filename(localbook.authors[0].name)
|
2017-03-30 21:17:18 +02:00
|
|
|
|
2017-11-30 17:49:46 +02:00
|
|
|
titledir = localbook.path.split('/')[1]
|
|
|
|
new_titledir = get_valid_filename(localbook.title) + " (" + str(book_id) + ")"
|
2017-04-03 20:05:55 +02:00
|
|
|
|
2017-11-30 17:49:46 +02:00
|
|
|
if titledir != new_titledir:
|
|
|
|
try:
|
|
|
|
new_title_path = os.path.join(os.path.dirname(path), new_titledir)
|
2018-06-04 03:17:22 +02:00
|
|
|
if not os.path.exists(new_title_path):
|
|
|
|
os.renames(path, new_title_path)
|
|
|
|
else:
|
2018-07-14 19:40:59 +02:00
|
|
|
web.app.logger.info("Copying title: " + path + " into existing: " + new_title_path)
|
2018-06-04 03:17:22 +02:00
|
|
|
for dir_name, subdir_list, file_list in os.walk(path):
|
|
|
|
for file in file_list:
|
|
|
|
os.renames(os.path.join(dir_name, file), os.path.join(new_title_path + dir_name[len(path):], file))
|
2017-11-30 17:49:46 +02:00
|
|
|
path = new_title_path
|
|
|
|
localbook.path = localbook.path.split('/')[0] + '/' + new_titledir
|
|
|
|
except OSError as ex:
|
2018-07-14 19:40:59 +02:00
|
|
|
web.app.logger.error("Rename title from: " + path + " to " + new_title_path)
|
|
|
|
web.app.logger.error(ex, exc_info=True)
|
2018-09-08 14:50:52 +02:00
|
|
|
return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s", src=path, dest=new_title_path, error=str(ex))
|
2016-04-03 23:52:32 +02:00
|
|
|
if authordir != new_authordir:
|
2017-11-30 17:49:46 +02:00
|
|
|
try:
|
|
|
|
new_author_path = os.path.join(os.path.join(calibrepath, new_authordir), os.path.basename(path))
|
|
|
|
os.renames(path, new_author_path)
|
|
|
|
localbook.path = new_authordir + '/' + localbook.path.split('/')[1]
|
|
|
|
except OSError as ex:
|
2018-07-14 19:40:59 +02:00
|
|
|
web.app.logger.error("Rename author from: " + path + " to " + new_author_path)
|
|
|
|
web.app.logger.error(ex, exc_info=True)
|
2018-09-08 14:50:52 +02:00
|
|
|
return _("Rename author from: '%(src)s' to '%(dest)s' failed with error: %(error)s", src=path, dest=new_title_path, error=str(ex))
|
2017-11-30 17:49:46 +02:00
|
|
|
return False
|
2017-01-30 19:58:36 +02:00
|
|
|
|
2017-03-31 16:52:25 +02:00
|
|
|
|
2017-03-02 00:38:03 +02:00
|
|
|
def update_dir_structure_gdrive(book_id):
|
2017-11-30 17:49:46 +02:00
|
|
|
error = False
|
2017-03-02 00:38:03 +02:00
|
|
|
book = db.session.query(db.Books).filter(db.Books.id == book_id).first()
|
2017-04-04 19:05:09 +02:00
|
|
|
|
2017-03-02 00:38:03 +02:00
|
|
|
authordir = book.path.split('/')[0]
|
|
|
|
new_authordir = get_valid_filename(book.authors[0].name)
|
|
|
|
titledir = book.path.split('/')[1]
|
|
|
|
new_titledir = get_valid_filename(book.title) + " (" + str(book_id) + ")"
|
2017-07-09 20:15:15 +02:00
|
|
|
|
2017-03-02 00:38:03 +02:00
|
|
|
if titledir != new_titledir:
|
2018-07-14 08:31:52 +02:00
|
|
|
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), titledir)
|
|
|
|
if gFile:
|
|
|
|
gFile['title'] = new_titledir
|
|
|
|
|
|
|
|
gFile.Upload()
|
|
|
|
book.path = book.path.split('/')[0] + '/' + new_titledir
|
|
|
|
gd.updateDatabaseOnEdit(gFile['id'], book.path) # only child folder affected
|
|
|
|
else:
|
2018-09-08 12:28:48 +02:00
|
|
|
error = _(u'File %(file)s not found on Google Drive', file= book.path) # file not found
|
2017-07-11 16:13:33 +02:00
|
|
|
|
2017-03-02 00:38:03 +02:00
|
|
|
if authordir != new_authordir:
|
2018-07-14 08:31:52 +02:00
|
|
|
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path), titledir)
|
|
|
|
if gFile:
|
2018-07-14 13:48:51 +02:00
|
|
|
gd.moveGdriveFolderRemote(gFile,new_authordir)
|
2018-07-14 08:31:52 +02:00
|
|
|
book.path = new_authordir + '/' + book.path.split('/')[1]
|
|
|
|
gd.updateDatabaseOnEdit(gFile['id'], book.path)
|
|
|
|
else:
|
2018-09-08 12:28:48 +02:00
|
|
|
error = _(u'File %(file)s not found on Google Drive', file=authordir) # file not found
|
2017-11-30 17:49:46 +02:00
|
|
|
return error
|
2017-03-02 00:38:03 +02:00
|
|
|
|
2018-07-14 08:31:52 +02:00
|
|
|
|
2018-08-04 18:22:43 +02:00
|
|
|
def delete_book_gdrive(book, book_format):
|
2018-07-14 13:48:51 +02:00
|
|
|
error= False
|
2018-08-04 18:22:43 +02:00
|
|
|
if book_format:
|
|
|
|
name = ''
|
|
|
|
for entry in book.data:
|
|
|
|
if entry.format.upper() == book_format:
|
|
|
|
name = entry.name + '.' + book_format
|
|
|
|
gFile = gd.getFileFromEbooksFolder(book.path, name)
|
|
|
|
else:
|
|
|
|
gFile = gd.getFileFromEbooksFolder(os.path.dirname(book.path),book.path.split('/')[1])
|
2018-07-14 13:48:51 +02:00
|
|
|
if gFile:
|
|
|
|
gd.deleteDatabaseEntry(gFile['id'])
|
|
|
|
gFile.Trash()
|
|
|
|
else:
|
2018-09-08 12:28:48 +02:00
|
|
|
error =_(u'Book path %(path)s not found on Google Drive', path=book.path) # file not found
|
2018-07-14 13:48:51 +02:00
|
|
|
return error
|
2018-07-14 08:31:52 +02:00
|
|
|
|
2018-08-24 15:48:09 +02:00
|
|
|
def generate_random_password():
|
|
|
|
s = "abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%&*()?"
|
|
|
|
passlen = 8
|
|
|
|
return "".join(random.sample(s,passlen ))
|
|
|
|
|
2018-07-14 08:31:52 +02:00
|
|
|
################################## External interface
|
|
|
|
|
|
|
|
def update_dir_stucture(book_id, calibrepath):
|
|
|
|
if ub.config.config_use_google_drive:
|
|
|
|
return update_dir_structure_gdrive(book_id)
|
|
|
|
else:
|
2018-08-18 12:15:32 +02:00
|
|
|
return update_dir_structure_file(book_id, calibrepath)
|
2018-07-14 08:31:52 +02:00
|
|
|
|
2018-08-04 18:22:43 +02:00
|
|
|
def delete_book(book, calibrepath, book_format):
|
2018-07-14 08:31:52 +02:00
|
|
|
if ub.config.config_use_google_drive:
|
2018-08-04 18:22:43 +02:00
|
|
|
return delete_book_gdrive(book, book_format)
|
2018-07-14 13:48:51 +02:00
|
|
|
else:
|
2018-08-04 18:22:43 +02:00
|
|
|
return delete_book_file(book, calibrepath, book_format)
|
2018-08-04 10:56:42 +02:00
|
|
|
|
|
|
|
def get_book_cover(cover_path):
|
|
|
|
if ub.config.config_use_google_drive:
|
|
|
|
try:
|
|
|
|
path=gd.get_cover_via_gdrive(cover_path)
|
|
|
|
if path:
|
|
|
|
return redirect(path)
|
|
|
|
else:
|
|
|
|
web.app.logger.error(cover_path + '/cover.jpg not found on Google Drive')
|
|
|
|
return send_from_directory(os.path.join(os.path.dirname(__file__), "static"), "generic_cover.jpg")
|
|
|
|
except Exception as e:
|
|
|
|
web.app.logger.error("Error Message: "+e.message)
|
|
|
|
web.app.logger.exception(e)
|
|
|
|
# traceback.print_exc()
|
|
|
|
return send_from_directory(os.path.join(os.path.dirname(__file__), "static"),"generic_cover.jpg")
|
|
|
|
else:
|
|
|
|
return send_from_directory(os.path.join(ub.config.config_calibre_dir, cover_path), "cover.jpg")
|
|
|
|
|
|
|
|
# saves book cover to gdrive or locally
|
|
|
|
def save_cover(url, book_path):
|
|
|
|
img = requests.get(url)
|
|
|
|
if img.headers.get('content-type') != 'image/jpeg':
|
|
|
|
web.app.logger.error("Cover is no jpg file, can't save")
|
2018-08-04 17:08:32 +02:00
|
|
|
return False
|
2018-08-04 10:56:42 +02:00
|
|
|
|
|
|
|
if ub.config.config_use_google_drive:
|
|
|
|
tmpDir = gettempdir()
|
|
|
|
f = open(os.path.join(tmpDir, "uploaded_cover.jpg"), "wb")
|
|
|
|
f.write(img.content)
|
|
|
|
f.close()
|
|
|
|
uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg'), os.path.join(tmpDir, f.name))
|
2018-08-31 15:00:22 +02:00
|
|
|
web.app.logger.info("Cover is saved on Google Drive")
|
2018-08-04 17:08:32 +02:00
|
|
|
return True
|
2018-08-04 10:56:42 +02:00
|
|
|
|
|
|
|
f = open(os.path.join(ub.config.config_calibre_dir, book_path, "cover.jpg"), "wb")
|
|
|
|
f.write(img.content)
|
|
|
|
f.close()
|
|
|
|
web.app.logger.info("Cover is saved")
|
2018-08-04 17:08:32 +02:00
|
|
|
return True
|
2018-08-04 10:56:42 +02:00
|
|
|
|
|
|
|
def do_download_file(book, book_format, data, headers):
|
|
|
|
if ub.config.config_use_google_drive:
|
|
|
|
startTime = time.time()
|
|
|
|
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + book_format)
|
|
|
|
web.app.logger.debug(time.time() - startTime)
|
|
|
|
if df:
|
|
|
|
return gd.do_gdrive_download(df, headers)
|
|
|
|
else:
|
|
|
|
abort(404)
|
|
|
|
else:
|
|
|
|
response = make_response(send_from_directory(os.path.join(ub.config.config_calibre_dir, book.path), data.name + "." + book_format))
|
|
|
|
response.headers = headers
|
|
|
|
return response
|
|
|
|
|
2018-07-14 08:31:52 +02:00
|
|
|
##################################
|
|
|
|
|
2017-03-02 00:38:03 +02:00
|
|
|
|
2017-02-20 20:52:00 +02:00
|
|
|
class Updater(threading.Thread):
|
2017-01-30 19:58:36 +02:00
|
|
|
|
2017-02-20 20:52:00 +02:00
|
|
|
def __init__(self):
|
|
|
|
threading.Thread.__init__(self)
|
2017-11-30 17:49:46 +02:00
|
|
|
self.status = 0
|
2017-02-20 20:52:00 +02:00
|
|
|
|
|
|
|
def run(self):
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 1
|
2017-02-20 20:52:00 +02:00
|
|
|
r = requests.get('https://api.github.com/repos/janeczku/calibre-web/zipball/master', stream=True)
|
|
|
|
fname = re.findall("filename=(.+)", r.headers['content-disposition'])[0]
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 2
|
2018-04-10 18:50:04 +02:00
|
|
|
z = zipfile.ZipFile(BytesIO(r.content))
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 3
|
2017-02-20 20:52:00 +02:00
|
|
|
tmp_dir = gettempdir()
|
|
|
|
z.extractall(tmp_dir)
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 4
|
2017-11-30 17:49:46 +02:00
|
|
|
self.update_source(os.path.join(tmp_dir, os.path.splitext(fname)[0]), ub.config.get_main_dir)
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 5
|
2017-02-20 20:52:00 +02:00
|
|
|
db.session.close()
|
|
|
|
db.engine.dispose()
|
|
|
|
ub.session.close()
|
|
|
|
ub.engine.dispose()
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 6
|
2018-07-09 18:47:36 +02:00
|
|
|
server.Server.setRestartTyp(True)
|
|
|
|
server.Server.stopServer()
|
2017-03-31 16:52:25 +02:00
|
|
|
self.status = 7
|
2017-02-20 20:52:00 +02:00
|
|
|
|
|
|
|
def get_update_status(self):
|
|
|
|
return self.status
|
|
|
|
|
2017-04-03 20:05:55 +02:00
|
|
|
@classmethod
|
2017-11-30 17:49:46 +02:00
|
|
|
def file_to_list(self, filelist):
|
|
|
|
return [x.strip() for x in open(filelist, 'r') if not x.startswith('#EXT')]
|
2017-02-20 20:52:00 +02:00
|
|
|
|
2017-04-03 20:05:55 +02:00
|
|
|
@classmethod
|
2017-02-20 20:52:00 +02:00
|
|
|
def one_minus_two(self, one, two):
|
|
|
|
return [x for x in one if x not in set(two)]
|
|
|
|
|
2017-04-03 21:05:28 +02:00
|
|
|
@classmethod
|
2017-02-20 20:52:00 +02:00
|
|
|
def reduce_dirs(self, delete_files, new_list):
|
|
|
|
new_delete = []
|
2017-11-30 17:49:46 +02:00
|
|
|
for filename in delete_files:
|
|
|
|
parts = filename.split(os.sep)
|
2017-02-20 20:52:00 +02:00
|
|
|
sub = ''
|
2017-07-09 20:15:15 +02:00
|
|
|
for part in parts:
|
|
|
|
sub = os.path.join(sub, part)
|
2017-02-20 20:52:00 +02:00
|
|
|
if sub == '':
|
|
|
|
sub = os.sep
|
|
|
|
count = 0
|
|
|
|
for song in new_list:
|
|
|
|
if song.startswith(sub):
|
|
|
|
count += 1
|
|
|
|
break
|
|
|
|
if count == 0:
|
|
|
|
if sub != '\\':
|
|
|
|
new_delete.append(sub)
|
2017-01-30 19:58:36 +02:00
|
|
|
break
|
2017-02-20 20:52:00 +02:00
|
|
|
return list(set(new_delete))
|
|
|
|
|
2017-04-03 20:05:55 +02:00
|
|
|
@classmethod
|
2017-02-20 20:52:00 +02:00
|
|
|
def reduce_files(self, remove_items, exclude_items):
|
|
|
|
rf = []
|
|
|
|
for item in remove_items:
|
2017-02-21 20:40:22 +02:00
|
|
|
if not item.startswith(exclude_items):
|
2017-02-20 20:52:00 +02:00
|
|
|
rf.append(item)
|
|
|
|
return rf
|
|
|
|
|
2017-04-03 20:05:55 +02:00
|
|
|
@classmethod
|
2017-02-20 20:52:00 +02:00
|
|
|
def moveallfiles(self, root_src_dir, root_dst_dir):
|
|
|
|
change_permissions = True
|
|
|
|
if sys.platform == "win32" or sys.platform == "darwin":
|
|
|
|
change_permissions = False
|
|
|
|
else:
|
2017-02-21 20:40:22 +02:00
|
|
|
logging.getLogger('cps.web').debug('Update on OS-System : ' + sys.platform)
|
2017-02-20 20:52:00 +02:00
|
|
|
new_permissions = os.stat(root_dst_dir)
|
|
|
|
# print new_permissions
|
2017-07-09 20:15:15 +02:00
|
|
|
for src_dir, __, files in os.walk(root_src_dir):
|
2017-02-20 20:52:00 +02:00
|
|
|
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
|
|
|
|
if not os.path.exists(dst_dir):
|
|
|
|
os.makedirs(dst_dir)
|
2017-02-21 20:40:22 +02:00
|
|
|
logging.getLogger('cps.web').debug('Create-Dir: '+dst_dir)
|
2017-01-30 20:45:03 +02:00
|
|
|
if change_permissions:
|
2017-02-20 20:52:00 +02:00
|
|
|
# print('Permissions: User '+str(new_permissions.st_uid)+' Group '+str(new_permissions.st_uid))
|
|
|
|
os.chown(dst_dir, new_permissions.st_uid, new_permissions.st_gid)
|
|
|
|
for file_ in files:
|
|
|
|
src_file = os.path.join(src_dir, file_)
|
|
|
|
dst_file = os.path.join(dst_dir, file_)
|
|
|
|
if os.path.exists(dst_file):
|
|
|
|
if change_permissions:
|
|
|
|
permission = os.stat(dst_file)
|
2017-02-21 20:40:22 +02:00
|
|
|
logging.getLogger('cps.web').debug('Remove file before copy: '+dst_file)
|
2017-02-20 20:52:00 +02:00
|
|
|
os.remove(dst_file)
|
|
|
|
else:
|
|
|
|
if change_permissions:
|
|
|
|
permission = new_permissions
|
|
|
|
shutil.move(src_file, dst_dir)
|
2017-02-21 20:40:22 +02:00
|
|
|
logging.getLogger('cps.web').debug('Move File '+src_file+' to '+dst_dir)
|
2017-02-20 20:52:00 +02:00
|
|
|
if change_permissions:
|
|
|
|
try:
|
2017-07-08 14:57:39 +02:00
|
|
|
os.chown(dst_file, permission.st_uid, permission.st_gid)
|
2018-04-10 18:50:04 +02:00
|
|
|
except (Exception) as e:
|
2017-11-30 17:49:46 +02:00
|
|
|
# ex = sys.exc_info()
|
2017-07-08 14:57:39 +02:00
|
|
|
old_permissions = os.stat(dst_file)
|
|
|
|
logging.getLogger('cps.web').debug('Fail change permissions of ' + str(dst_file) + '. Before: '
|
|
|
|
+ str(old_permissions.st_uid) + ':' + str(old_permissions.st_gid) + ' After: '
|
|
|
|
+ str(permission.st_uid) + ':' + str(permission.st_gid) + ' error: '+str(e))
|
2017-02-20 20:52:00 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
def update_source(self, source, destination):
|
|
|
|
# destination files
|
|
|
|
old_list = list()
|
|
|
|
exclude = (
|
2017-03-31 16:52:25 +02:00
|
|
|
'vendor' + os.sep + 'kindlegen.exe', 'vendor' + os.sep + 'kindlegen', os.sep + 'app.db',
|
|
|
|
os.sep + 'vendor', os.sep + 'calibre-web.log')
|
2017-02-20 20:52:00 +02:00
|
|
|
for root, dirs, files in os.walk(destination, topdown=True):
|
|
|
|
for name in files:
|
|
|
|
old_list.append(os.path.join(root, name).replace(destination, ''))
|
|
|
|
for name in dirs:
|
|
|
|
old_list.append(os.path.join(root, name).replace(destination, ''))
|
|
|
|
# source files
|
|
|
|
new_list = list()
|
|
|
|
for root, dirs, files in os.walk(source, topdown=True):
|
|
|
|
for name in files:
|
|
|
|
new_list.append(os.path.join(root, name).replace(source, ''))
|
|
|
|
for name in dirs:
|
|
|
|
new_list.append(os.path.join(root, name).replace(source, ''))
|
|
|
|
|
|
|
|
delete_files = self.one_minus_two(old_list, new_list)
|
|
|
|
|
|
|
|
rf = self.reduce_files(delete_files, exclude)
|
|
|
|
|
|
|
|
remove_items = self.reduce_dirs(rf, new_list)
|
|
|
|
|
|
|
|
self.moveallfiles(source, destination)
|
|
|
|
|
|
|
|
for item in remove_items:
|
|
|
|
item_path = os.path.join(destination, item[1:])
|
|
|
|
if os.path.isdir(item_path):
|
2017-02-21 20:40:22 +02:00
|
|
|
logging.getLogger('cps.web').debug("Delete dir " + item_path)
|
2017-02-20 20:52:00 +02:00
|
|
|
shutil.rmtree(item_path)
|
2017-01-30 20:45:03 +02:00
|
|
|
else:
|
|
|
|
try:
|
2017-02-21 20:40:22 +02:00
|
|
|
logging.getLogger('cps.web').debug("Delete file " + item_path)
|
2017-03-31 16:52:25 +02:00
|
|
|
# log_from_thread("Delete file " + item_path)
|
2017-02-20 20:52:00 +02:00
|
|
|
os.remove(item_path)
|
2017-03-29 21:43:55 +02:00
|
|
|
except Exception:
|
2017-02-21 20:40:22 +02:00
|
|
|
logging.getLogger('cps.web').debug("Could not remove:" + item_path)
|
2017-02-20 20:52:00 +02:00
|
|
|
shutil.rmtree(source, ignore_errors=True)
|
2017-02-21 20:40:22 +02:00
|
|
|
|
2018-08-31 10:47:58 +02:00
|
|
|
|
2017-11-19 19:08:55 +02:00
|
|
|
def check_unrar(unrarLocation):
|
|
|
|
error = False
|
|
|
|
if os.path.exists(unrarLocation):
|
|
|
|
try:
|
2018-08-31 10:47:58 +02:00
|
|
|
if sys.version_info < (3, 0):
|
|
|
|
unrarLocation = unrarLocation.encode(sys.getfilesystemencoding())
|
2017-11-19 19:08:55 +02:00
|
|
|
p = subprocess.Popen(unrarLocation, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
|
p.wait()
|
|
|
|
for lines in p.stdout.readlines():
|
|
|
|
if isinstance(lines, bytes):
|
|
|
|
lines = lines.decode('utf-8')
|
|
|
|
value=re.search('UNRAR (.*) freeware', lines)
|
|
|
|
if value:
|
|
|
|
version = value.group(1)
|
2018-08-31 10:47:58 +02:00
|
|
|
except OSError as e:
|
2017-11-19 19:08:55 +02:00
|
|
|
error = True
|
2018-08-31 10:47:58 +02:00
|
|
|
web.app.logger.exception(e)
|
|
|
|
version =_(u'Error excecuting UnRar')
|
2017-11-19 19:08:55 +02:00
|
|
|
else:
|
|
|
|
version = _(u'Unrar binary file not found')
|
|
|
|
error=True
|
|
|
|
return (error, version)
|
2018-08-31 10:47:58 +02:00
|
|
|
|
2018-09-10 10:42:28 +02:00
|
|
|
|
2018-09-09 13:00:46 +02:00
|
|
|
def is_sha1(sha1):
|
|
|
|
if len(sha1) != 40:
|
|
|
|
return False
|
|
|
|
try:
|
2018-09-10 18:21:44 +02:00
|
|
|
int(sha1, 16)
|
2018-09-09 13:00:46 +02:00
|
|
|
except ValueError:
|
|
|
|
return False
|
|
|
|
return True
|
2018-09-10 10:42:28 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_current_version_info():
|
2018-09-14 21:12:45 +02:00
|
|
|
content = {}
|
2018-09-14 21:16:09 +02:00
|
|
|
content[0] = '$Format: % H$'
|
|
|
|
content[1] = '$Format: % cI$'
|
|
|
|
# content[0] = 'bb7d2c6273ae4560e83950d36d64533343623a57'
|
|
|
|
# content[1] = '2018-09-09T10:13:08+02:00'
|
2018-09-14 21:12:45 +02:00
|
|
|
if is_sha1(content[0]) and len(content[1]) > 0:
|
|
|
|
return {'hash': content[0], 'datetime': content[1]}
|
2018-09-10 10:42:28 +02:00
|
|
|
return False
|