New updater with default setting -> only stable releases #653
This commit is contained in:
parent
62574939b2
commit
bacb6fb561
2
.gitattributes
vendored
2
.gitattributes
vendored
@ -1,4 +1,4 @@
|
||||
helper.py ident export-subst
|
||||
updater.py ident export-subst
|
||||
/test export-ignore
|
||||
cps/static/css/libs/* linguist-vendored
|
||||
cps/static/js/libs/* linguist-vendored
|
||||
|
192
cps/helper.py
192
cps/helper.py
@ -4,13 +4,13 @@
|
||||
import db
|
||||
import ub
|
||||
from flask import current_app as app
|
||||
import logging
|
||||
# import logging
|
||||
from tempfile import gettempdir
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import unicodedata
|
||||
from io import BytesIO
|
||||
# from io import BytesIO
|
||||
import worker
|
||||
import time
|
||||
from flask import send_from_directory, make_response, redirect, abort
|
||||
@ -18,16 +18,16 @@ from flask_babel import gettext as _
|
||||
from flask_login import current_user
|
||||
from babel.dates import format_datetime
|
||||
from datetime import datetime
|
||||
import threading
|
||||
# import threading
|
||||
import shutil
|
||||
import requests
|
||||
import zipfile
|
||||
# import zipfile
|
||||
try:
|
||||
import gdriveutils as gd
|
||||
except ImportError:
|
||||
pass
|
||||
import web
|
||||
import server
|
||||
# import server
|
||||
import random
|
||||
import subprocess
|
||||
|
||||
@ -38,7 +38,7 @@ except ImportError:
|
||||
use_unidecode = False
|
||||
|
||||
# Global variables
|
||||
updater_thread = None
|
||||
# updater_thread = None
|
||||
global_WorkerThread = worker.WorkerThread()
|
||||
global_WorkerThread.start()
|
||||
|
||||
@ -468,166 +468,6 @@ def do_download_file(book, book_format, data, headers):
|
||||
##################################
|
||||
|
||||
|
||||
class Updater(threading.Thread):
|
||||
|
||||
def __init__(self):
|
||||
threading.Thread.__init__(self)
|
||||
self.status = 0
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.status = 1
|
||||
r = requests.get('https://api.github.com/repos/janeczku/calibre-web/zipball/master', stream=True)
|
||||
r.raise_for_status()
|
||||
|
||||
fname = re.findall("filename=(.+)", r.headers['content-disposition'])[0]
|
||||
self.status = 2
|
||||
z = zipfile.ZipFile(BytesIO(r.content))
|
||||
self.status = 3
|
||||
tmp_dir = gettempdir()
|
||||
z.extractall(tmp_dir)
|
||||
self.status = 4
|
||||
self.update_source(os.path.join(tmp_dir, os.path.splitext(fname)[0]), ub.config.get_main_dir)
|
||||
self.status = 6
|
||||
time.sleep(2)
|
||||
server.Server.setRestartTyp(True)
|
||||
server.Server.stopServer()
|
||||
self.status = 7
|
||||
time.sleep(2)
|
||||
except requests.exceptions.HTTPError as ex:
|
||||
logging.getLogger('cps.web').info( u'HTTP Error' + ' ' + str(ex))
|
||||
self.status = 8
|
||||
except requests.exceptions.ConnectionError:
|
||||
logging.getLogger('cps.web').info(u'Connection error')
|
||||
self.status = 9
|
||||
except requests.exceptions.Timeout:
|
||||
logging.getLogger('cps.web').info(u'Timeout while establishing connection')
|
||||
self.status = 10
|
||||
except requests.exceptions.RequestException:
|
||||
self.status = 11
|
||||
logging.getLogger('cps.web').info(u'General error')
|
||||
|
||||
def get_update_status(self):
|
||||
return self.status
|
||||
|
||||
@classmethod
|
||||
def file_to_list(self, filelist):
|
||||
return [x.strip() for x in open(filelist, 'r') if not x.startswith('#EXT')]
|
||||
|
||||
@classmethod
|
||||
def one_minus_two(self, one, two):
|
||||
return [x for x in one if x not in set(two)]
|
||||
|
||||
@classmethod
|
||||
def reduce_dirs(self, delete_files, new_list):
|
||||
new_delete = []
|
||||
for filename in delete_files:
|
||||
parts = filename.split(os.sep)
|
||||
sub = ''
|
||||
for part in parts:
|
||||
sub = os.path.join(sub, part)
|
||||
if sub == '':
|
||||
sub = os.sep
|
||||
count = 0
|
||||
for song in new_list:
|
||||
if song.startswith(sub):
|
||||
count += 1
|
||||
break
|
||||
if count == 0:
|
||||
if sub != '\\':
|
||||
new_delete.append(sub)
|
||||
break
|
||||
return list(set(new_delete))
|
||||
|
||||
@classmethod
|
||||
def reduce_files(self, remove_items, exclude_items):
|
||||
rf = []
|
||||
for item in remove_items:
|
||||
if not item.startswith(exclude_items):
|
||||
rf.append(item)
|
||||
return rf
|
||||
|
||||
@classmethod
|
||||
def moveallfiles(self, root_src_dir, root_dst_dir):
|
||||
change_permissions = True
|
||||
if sys.platform == "win32" or sys.platform == "darwin":
|
||||
change_permissions = False
|
||||
else:
|
||||
logging.getLogger('cps.web').debug('Update on OS-System : ' + sys.platform)
|
||||
new_permissions = os.stat(root_dst_dir)
|
||||
# print new_permissions
|
||||
for src_dir, __, files in os.walk(root_src_dir):
|
||||
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
|
||||
if not os.path.exists(dst_dir):
|
||||
os.makedirs(dst_dir)
|
||||
logging.getLogger('cps.web').debug('Create-Dir: '+dst_dir)
|
||||
if change_permissions:
|
||||
# print('Permissions: User '+str(new_permissions.st_uid)+' Group '+str(new_permissions.st_uid))
|
||||
os.chown(dst_dir, new_permissions.st_uid, new_permissions.st_gid)
|
||||
for file_ in files:
|
||||
src_file = os.path.join(src_dir, file_)
|
||||
dst_file = os.path.join(dst_dir, file_)
|
||||
if os.path.exists(dst_file):
|
||||
if change_permissions:
|
||||
permission = os.stat(dst_file)
|
||||
logging.getLogger('cps.web').debug('Remove file before copy: '+dst_file)
|
||||
os.remove(dst_file)
|
||||
else:
|
||||
if change_permissions:
|
||||
permission = new_permissions
|
||||
shutil.move(src_file, dst_dir)
|
||||
logging.getLogger('cps.web').debug('Move File '+src_file+' to '+dst_dir)
|
||||
if change_permissions:
|
||||
try:
|
||||
os.chown(dst_file, permission.st_uid, permission.st_gid)
|
||||
except (Exception) as e:
|
||||
# ex = sys.exc_info()
|
||||
old_permissions = os.stat(dst_file)
|
||||
logging.getLogger('cps.web').debug('Fail change permissions of ' + str(dst_file) + '. Before: '
|
||||
+ str(old_permissions.st_uid) + ':' + str(old_permissions.st_gid) + ' After: '
|
||||
+ str(permission.st_uid) + ':' + str(permission.st_gid) + ' error: '+str(e))
|
||||
return
|
||||
|
||||
def update_source(self, source, destination):
|
||||
# destination files
|
||||
old_list = list()
|
||||
exclude = (
|
||||
'vendor' + os.sep + 'kindlegen.exe', 'vendor' + os.sep + 'kindlegen', os.sep + 'app.db',
|
||||
os.sep + 'vendor', os.sep + 'calibre-web.log')
|
||||
for root, dirs, files in os.walk(destination, topdown=True):
|
||||
for name in files:
|
||||
old_list.append(os.path.join(root, name).replace(destination, ''))
|
||||
for name in dirs:
|
||||
old_list.append(os.path.join(root, name).replace(destination, ''))
|
||||
# source files
|
||||
new_list = list()
|
||||
for root, dirs, files in os.walk(source, topdown=True):
|
||||
for name in files:
|
||||
new_list.append(os.path.join(root, name).replace(source, ''))
|
||||
for name in dirs:
|
||||
new_list.append(os.path.join(root, name).replace(source, ''))
|
||||
|
||||
delete_files = self.one_minus_two(old_list, new_list)
|
||||
|
||||
rf = self.reduce_files(delete_files, exclude)
|
||||
|
||||
remove_items = self.reduce_dirs(rf, new_list)
|
||||
|
||||
self.moveallfiles(source, destination)
|
||||
|
||||
for item in remove_items:
|
||||
item_path = os.path.join(destination, item[1:])
|
||||
if os.path.isdir(item_path):
|
||||
logging.getLogger('cps.web').debug("Delete dir " + item_path)
|
||||
shutil.rmtree(item_path)
|
||||
else:
|
||||
try:
|
||||
logging.getLogger('cps.web').debug("Delete file " + item_path)
|
||||
# log_from_thread("Delete file " + item_path)
|
||||
os.remove(item_path)
|
||||
except Exception:
|
||||
logging.getLogger('cps.web').debug("Could not remove:" + item_path)
|
||||
shutil.rmtree(source, ignore_errors=True)
|
||||
|
||||
|
||||
def check_unrar(unrarLocation):
|
||||
@ -654,26 +494,6 @@ def check_unrar(unrarLocation):
|
||||
return (error, version)
|
||||
|
||||
|
||||
def is_sha1(sha1):
|
||||
if len(sha1) != 40:
|
||||
return False
|
||||
try:
|
||||
int(sha1, 16)
|
||||
except ValueError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_current_version_info():
|
||||
content = {}
|
||||
content[0] = '$Format:%H$'
|
||||
content[1] = '$Format:%cI$'
|
||||
# content[0] = 'bb7d2c6273ae4560e83950d36d64533343623a57'
|
||||
# content[1] = '2018-09-09T10:13:08+02:00'
|
||||
if is_sha1(content[0]) and len(content[1]) > 0:
|
||||
return {'hash': content[0], 'datetime': content[1]}
|
||||
return False
|
||||
|
||||
|
||||
def json_serial(obj):
|
||||
"""JSON serializer for objects not serializable by default json code"""
|
||||
|
@ -120,7 +120,7 @@ $(function() {
|
||||
.removeClass("hidden")
|
||||
.find("span").html(data.commit);
|
||||
|
||||
data.history.reverse().forEach(function(entry) {
|
||||
data.history.forEach(function(entry) {
|
||||
$("<tr><td>" + entry[0] + "</td><td>" + entry[1] + "</td></tr>").appendTo($("#update_table"));
|
||||
});
|
||||
cssClass = "alert-warning";
|
||||
|
@ -105,7 +105,7 @@
|
||||
<div class="col">
|
||||
<h2>{{_('Administration')}}</h2>
|
||||
<div class="btn btn-default" id="restart_database">{{_('Reconnect to Calibre DB')}}</div>
|
||||
<div class="btn btn-default" id="admin_restart"data-toggle="modal" data-target="#RestartDialog">{{_('Restart Calibre-Web')}}</div>
|
||||
<div class="btn btn-default" id="admin_restart" data-toggle="modal" data-target="#RestartDialog">{{_('Restart Calibre-Web')}}</div>
|
||||
<div class="btn btn-default" id="admin_stop" data-toggle="modal" data-target="#ShutdownDialog">{{_('Stop Calibre-Web')}}</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -76,7 +76,7 @@
|
||||
</div>
|
||||
<div id="collapsetwo" class="panel-collapse collapse">
|
||||
<div class="panel-body">
|
||||
<div class="form-group">
|
||||
<div class="form-group">
|
||||
<label for="config_port">{{_('Server Port')}}</label>
|
||||
<input type="number" min="1" max="65535" class="form-control" name="config_port" id="config_port" value="{% if content.config_port != None %}{{ content.config_port }}{% endif %}" autocomplete="off" required>
|
||||
</div>
|
||||
@ -88,6 +88,15 @@
|
||||
<label for="config_keyfile">{{_('SSL Keyfile location (leave it empty for non-SSL Servers)')}}</label>
|
||||
<input type="text" class="form-control" name="config_keyfile" id="config_keyfile" value="{% if content.config_keyfile != None %}{{ content.config_keyfile }}{% endif %}" autocomplete="off">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="config_updater">{{_('Update channel')}}</label>
|
||||
<select name="config_updater" id="config_updater" class="form-control">
|
||||
<option value="0" {% if content.config_updatechannel == 0 %}selected{% endif %}>{{_('Stable')}}</option>
|
||||
<!--option value="1" {% if content.config_updatechannel == 1 %}selected{% endif %}>{{_('Stable (Automatic))')}}</option-->
|
||||
<option value="2" {% if content.config_updatechannel == 2 %}selected{% endif %}>{{_('Nightly')}}</option>
|
||||
<!--option-- value="3" {% if content.config_updatechannel == 3 %}selected{% endif %}>{{_('Nightly (Automatic))')}}</option-->
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
19
cps/ub.py
19
cps/ub.py
@ -46,6 +46,10 @@ SIDEBAR_PUBLISHER = 4096
|
||||
DEFAULT_PASS = "admin123"
|
||||
DEFAULT_PORT = int(os.environ.get("CALIBRE_PORT", 8083))
|
||||
|
||||
UPDATE_STABLE = 0
|
||||
AUTO_UPDATE_STABLE = 1
|
||||
UPDATE_NIGHTLY = 2
|
||||
AUTO_UPDATE_NIGHTLY = 4
|
||||
|
||||
class UserBase:
|
||||
|
||||
@ -166,7 +170,6 @@ class User(UserBase, Base):
|
||||
sidebar_view = Column(Integer, default=1)
|
||||
default_language = Column(String(3), default="all")
|
||||
mature_content = Column(Boolean, default=True)
|
||||
# theme = Column(Integer, default=0)
|
||||
|
||||
|
||||
# Class for anonymous user is derived from User base and completly overrides methods and properties for the
|
||||
@ -259,7 +262,7 @@ class Downloads(Base):
|
||||
def __repr__(self):
|
||||
return '<Download %r' % self.book_id
|
||||
|
||||
|
||||
|
||||
# Baseclass representing allowed domains for registration
|
||||
class Registration(Base):
|
||||
__tablename__ = 'registration'
|
||||
@ -313,6 +316,7 @@ class Settings(Base):
|
||||
config_calibre = Column(String)
|
||||
config_rarfile_location = Column(String)
|
||||
config_theme = Column(Integer, default=0)
|
||||
config_updatechannel = Column(Integer, default=0)
|
||||
|
||||
def __repr__(self):
|
||||
pass
|
||||
@ -387,11 +391,16 @@ class Config:
|
||||
self.config_logfile = data.config_logfile
|
||||
self.config_rarfile_location = data.config_rarfile_location
|
||||
self.config_theme = data.config_theme
|
||||
self.config_updatechannel = data.config_updatechannel
|
||||
|
||||
@property
|
||||
def get_main_dir(self):
|
||||
return self.config_main_dir
|
||||
|
||||
@property
|
||||
def get_update_channel(self):
|
||||
return self.config_updatechannel
|
||||
|
||||
def get_config_certfile(self):
|
||||
if cli.certfilepath:
|
||||
return cli.certfilepath
|
||||
@ -667,6 +676,12 @@ def migrate_Database():
|
||||
conn = engine.connect()
|
||||
conn.execute("ALTER TABLE Settings ADD column `config_theme` INTEGER DEFAULT 0")
|
||||
session.commit()
|
||||
try:
|
||||
session.query(exists().where(Settings.config_updatechannel)).scalar()
|
||||
except exc.OperationalError: # Database is not compatible, some rows are missing
|
||||
conn = engine.connect()
|
||||
conn.execute("ALTER TABLE Settings ADD column `config_updatechannel` INTEGER DEFAULT 0")
|
||||
session.commit()
|
||||
|
||||
|
||||
# Remove login capability of user Guest
|
||||
|
496
cps/updater.py
Normal file
496
cps/updater.py
Normal file
@ -0,0 +1,496 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import threading
|
||||
import zipfile
|
||||
import requests
|
||||
import re
|
||||
import logging
|
||||
import server
|
||||
import time
|
||||
from io import BytesIO
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
from ub import config, UPDATE_STABLE
|
||||
from tempfile import gettempdir
|
||||
import datetime
|
||||
import json
|
||||
from flask_babel import gettext as _
|
||||
from babel.dates import format_datetime
|
||||
import web
|
||||
|
||||
|
||||
def is_sha1(sha1):
|
||||
if len(sha1) != 40:
|
||||
return False
|
||||
try:
|
||||
int(sha1, 16)
|
||||
except ValueError:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class Updater(threading.Thread):
|
||||
|
||||
def __init__(self):
|
||||
threading.Thread.__init__(self)
|
||||
self.status = 0
|
||||
self.updateIndex = None
|
||||
|
||||
def get_current_version_info(self):
|
||||
if config.get_update_channel == UPDATE_STABLE:
|
||||
return self._stable_version_info()
|
||||
else:
|
||||
return self._nightly_version_info()
|
||||
|
||||
def get_available_updates(self, request_method):
|
||||
if config.get_update_channel == UPDATE_STABLE:
|
||||
return self._stable_available_updates(request_method)
|
||||
else:
|
||||
return self._nightly_available_updates(request_method)
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self.status = 1
|
||||
r = requests.get(self._get_request_path(), stream=True)
|
||||
r.raise_for_status()
|
||||
|
||||
fname = re.findall("filename=(.+)", r.headers['content-disposition'])[0]
|
||||
self.status = 2
|
||||
z = zipfile.ZipFile(BytesIO(r.content))
|
||||
self.status = 3
|
||||
tmp_dir = gettempdir()
|
||||
z.extractall(tmp_dir)
|
||||
foldername = os.path.join(tmp_dir, z.namelist()[0])[:-1]
|
||||
if not os.path.isdir(foldername):
|
||||
self.status = 11
|
||||
logging.getLogger('cps.web').info(u'Extracted contents of zipfile not found in temp folder')
|
||||
return
|
||||
self.status = 4
|
||||
self.update_source(foldername, config.get_main_dir)
|
||||
self.status = 6
|
||||
time.sleep(2)
|
||||
server.Server.setRestartTyp(True)
|
||||
server.Server.stopServer()
|
||||
self.status = 7
|
||||
time.sleep(2)
|
||||
except requests.exceptions.HTTPError as ex:
|
||||
logging.getLogger('cps.web').info( u'HTTP Error' + ' ' + str(ex))
|
||||
self.status = 8
|
||||
except requests.exceptions.ConnectionError:
|
||||
logging.getLogger('cps.web').info(u'Connection error')
|
||||
self.status = 9
|
||||
except requests.exceptions.Timeout:
|
||||
logging.getLogger('cps.web').info(u'Timeout while establishing connection')
|
||||
self.status = 10
|
||||
except requests.exceptions.RequestException:
|
||||
self.status = 11
|
||||
logging.getLogger('cps.web').info(u'General error')
|
||||
|
||||
def get_update_status(self):
|
||||
return self.status
|
||||
|
||||
@classmethod
|
||||
def file_to_list(self, filelist):
|
||||
return [x.strip() for x in open(filelist, 'r') if not x.startswith('#EXT')]
|
||||
|
||||
@classmethod
|
||||
def one_minus_two(self, one, two):
|
||||
return [x for x in one if x not in set(two)]
|
||||
|
||||
@classmethod
|
||||
def reduce_dirs(self, delete_files, new_list):
|
||||
new_delete = []
|
||||
for filename in delete_files:
|
||||
parts = filename.split(os.sep)
|
||||
sub = ''
|
||||
for part in parts:
|
||||
sub = os.path.join(sub, part)
|
||||
if sub == '':
|
||||
sub = os.sep
|
||||
count = 0
|
||||
for song in new_list:
|
||||
if song.startswith(sub):
|
||||
count += 1
|
||||
break
|
||||
if count == 0:
|
||||
if sub != '\\':
|
||||
new_delete.append(sub)
|
||||
break
|
||||
return list(set(new_delete))
|
||||
|
||||
@classmethod
|
||||
def reduce_files(self, remove_items, exclude_items):
|
||||
rf = []
|
||||
for item in remove_items:
|
||||
if not item.startswith(exclude_items):
|
||||
rf.append(item)
|
||||
return rf
|
||||
|
||||
@classmethod
|
||||
def moveallfiles(self, root_src_dir, root_dst_dir):
|
||||
change_permissions = True
|
||||
if sys.platform == "win32" or sys.platform == "darwin":
|
||||
change_permissions = False
|
||||
else:
|
||||
logging.getLogger('cps.web').debug('Update on OS-System : ' + sys.platform)
|
||||
new_permissions = os.stat(root_dst_dir)
|
||||
# print new_permissions
|
||||
for src_dir, __, files in os.walk(root_src_dir):
|
||||
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
|
||||
if not os.path.exists(dst_dir):
|
||||
os.makedirs(dst_dir)
|
||||
logging.getLogger('cps.web').debug('Create-Dir: '+dst_dir)
|
||||
if change_permissions:
|
||||
# print('Permissions: User '+str(new_permissions.st_uid)+' Group '+str(new_permissions.st_uid))
|
||||
os.chown(dst_dir, new_permissions.st_uid, new_permissions.st_gid)
|
||||
for file_ in files:
|
||||
src_file = os.path.join(src_dir, file_)
|
||||
dst_file = os.path.join(dst_dir, file_)
|
||||
if os.path.exists(dst_file):
|
||||
if change_permissions:
|
||||
permission = os.stat(dst_file)
|
||||
logging.getLogger('cps.web').debug('Remove file before copy: '+dst_file)
|
||||
os.remove(dst_file)
|
||||
else:
|
||||
if change_permissions:
|
||||
permission = new_permissions
|
||||
shutil.move(src_file, dst_dir)
|
||||
logging.getLogger('cps.web').debug('Move File '+src_file+' to '+dst_dir)
|
||||
if change_permissions:
|
||||
try:
|
||||
os.chown(dst_file, permission.st_uid, permission.st_gid)
|
||||
except (Exception) as e:
|
||||
# ex = sys.exc_info()
|
||||
old_permissions = os.stat(dst_file)
|
||||
logging.getLogger('cps.web').debug('Fail change permissions of ' + str(dst_file) + '. Before: '
|
||||
+ str(old_permissions.st_uid) + ':' + str(old_permissions.st_gid) + ' After: '
|
||||
+ str(permission.st_uid) + ':' + str(permission.st_gid) + ' error: '+str(e))
|
||||
return
|
||||
|
||||
def update_source(self, source, destination):
|
||||
# destination files
|
||||
old_list = list()
|
||||
exclude = (
|
||||
'vendor' + os.sep + 'kindlegen.exe', 'vendor' + os.sep + 'kindlegen', os.sep + 'app.db',
|
||||
os.sep + 'vendor', os.sep + 'calibre-web.log')
|
||||
for root, dirs, files in os.walk(destination, topdown=True):
|
||||
for name in files:
|
||||
old_list.append(os.path.join(root, name).replace(destination, ''))
|
||||
for name in dirs:
|
||||
old_list.append(os.path.join(root, name).replace(destination, ''))
|
||||
# source files
|
||||
new_list = list()
|
||||
for root, dirs, files in os.walk(source, topdown=True):
|
||||
for name in files:
|
||||
new_list.append(os.path.join(root, name).replace(source, ''))
|
||||
for name in dirs:
|
||||
new_list.append(os.path.join(root, name).replace(source, ''))
|
||||
|
||||
delete_files = self.one_minus_two(old_list, new_list)
|
||||
|
||||
rf = self.reduce_files(delete_files, exclude)
|
||||
|
||||
remove_items = self.reduce_dirs(rf, new_list)
|
||||
|
||||
self.moveallfiles(source, destination)
|
||||
|
||||
for item in remove_items:
|
||||
item_path = os.path.join(destination, item[1:])
|
||||
if os.path.isdir(item_path):
|
||||
logging.getLogger('cps.web').debug("Delete dir " + item_path)
|
||||
shutil.rmtree(item_path)
|
||||
else:
|
||||
try:
|
||||
logging.getLogger('cps.web').debug("Delete file " + item_path)
|
||||
# log_from_thread("Delete file " + item_path)
|
||||
os.remove(item_path)
|
||||
except Exception:
|
||||
logging.getLogger('cps.web').debug("Could not remove:" + item_path)
|
||||
shutil.rmtree(source, ignore_errors=True)
|
||||
|
||||
def _nightly_version_info(self):
|
||||
content = {}
|
||||
content[0] = '$Format:%H$'
|
||||
content[1] = '$Format:%cI$'
|
||||
# content[0] = 'bb7d2c6273ae4560e83950d36d64533343623a57'
|
||||
# content[1] = '2018-09-09T10:13:08+02:00'
|
||||
if is_sha1(content[0]) and len(content[1]) > 0:
|
||||
return {'version': content[0], 'datetime': content[1]}
|
||||
return False
|
||||
|
||||
def _stable_version_info(self):
|
||||
return {'version': '0.6.0'}
|
||||
|
||||
def _nightly_available_updates(self, request_method):
|
||||
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
|
||||
if request_method == "GET":
|
||||
repository_url = 'https://api.github.com/repos/janeczku/calibre-web'
|
||||
status, commit = self._load_remote_data(repository_url +'/git/refs/heads/master')
|
||||
parents = []
|
||||
if status['message'] != '':
|
||||
return json.dumps(status)
|
||||
if 'object' not in commit:
|
||||
status['message'] = _(u'Unexpected data while reading update information')
|
||||
return json.dumps(status)
|
||||
|
||||
if commit['object']['sha'] == status['current_commit_hash']:
|
||||
status.update({
|
||||
'update': False,
|
||||
'success': True,
|
||||
'message': _(u'No update available. You already have the latest version installed')
|
||||
})
|
||||
return json.dumps(status)
|
||||
|
||||
# a new update is available
|
||||
status['update'] = True
|
||||
|
||||
try:
|
||||
r = requests.get(repository_url + '/git/commits/' + commit['object']['sha'])
|
||||
r.raise_for_status()
|
||||
update_data = r.json()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
status['error'] = _(u'HTTP Error') + ' ' + str(e)
|
||||
except requests.exceptions.ConnectionError:
|
||||
status['error'] = _(u'Connection error')
|
||||
except requests.exceptions.Timeout:
|
||||
status['error'] = _(u'Timeout while establishing connection')
|
||||
except requests.exceptions.RequestException:
|
||||
status['error'] = _(u'General error')
|
||||
|
||||
if status['message'] != '':
|
||||
return json.dumps(status)
|
||||
|
||||
if 'committer' in update_data and 'message' in update_data:
|
||||
status['success'] = True
|
||||
status['message'] = _(u'A new update is available. Click on the button below to update to the latest version.')
|
||||
|
||||
new_commit_date = datetime.datetime.strptime(
|
||||
update_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
|
||||
parents.append(
|
||||
[
|
||||
format_datetime(new_commit_date, format='short', locale=web.get_locale()),
|
||||
update_data['message'],
|
||||
update_data['sha']
|
||||
]
|
||||
)
|
||||
|
||||
# it only makes sense to analyze the parents if we know the current commit hash
|
||||
if status['current_commit_hash'] != '':
|
||||
try:
|
||||
parent_commit = update_data['parents'][0]
|
||||
# limit the maximum search depth
|
||||
remaining_parents_cnt = 10
|
||||
except IndexError:
|
||||
remaining_parents_cnt = None
|
||||
|
||||
if remaining_parents_cnt is not None:
|
||||
while True:
|
||||
if remaining_parents_cnt == 0:
|
||||
break
|
||||
|
||||
# check if we are more than one update behind if so, go up the tree
|
||||
if parent_commit['sha'] != status['current_commit_hash']:
|
||||
try:
|
||||
r = requests.get(parent_commit['url'])
|
||||
r.raise_for_status()
|
||||
parent_data = r.json()
|
||||
|
||||
parent_commit_date = datetime.datetime.strptime(
|
||||
parent_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
|
||||
parent_commit_date = format_datetime(
|
||||
parent_commit_date, format='short', locale=web.get_locale())
|
||||
|
||||
parents.append([parent_commit_date,
|
||||
parent_data['message'].replace('\r\n','<p>').replace('\n','<p>')])
|
||||
parent_commit = parent_data['parents'][0]
|
||||
remaining_parents_cnt -= 1
|
||||
except Exception:
|
||||
# it isn't crucial if we can't get information about the parent
|
||||
break
|
||||
else:
|
||||
# parent is our current version
|
||||
break
|
||||
|
||||
else:
|
||||
status['success'] = False
|
||||
status['message'] = _(u'Could not fetch update information')
|
||||
|
||||
# a new update is available
|
||||
status['update'] = True
|
||||
if 'body' in commit:
|
||||
status['success'] = True
|
||||
status['message'] = _(u'A new update is available. Click on the button below to update to the latest version.')
|
||||
|
||||
new_commit_date = datetime.datetime.strptime(
|
||||
commit['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
|
||||
parents.append(
|
||||
[
|
||||
format_datetime(new_commit_date, format='short', locale=web.get_locale()),
|
||||
commit['message'],
|
||||
commit['sha']
|
||||
]
|
||||
)
|
||||
|
||||
# it only makes sense to analyze the parents if we know the current commit hash
|
||||
if status['current_commit_hash'] != '':
|
||||
try:
|
||||
parent_commit = commit['parents'][0]
|
||||
# limit the maximum search depth
|
||||
remaining_parents_cnt = 10
|
||||
except IndexError:
|
||||
remaining_parents_cnt = None
|
||||
|
||||
if remaining_parents_cnt is not None:
|
||||
while True:
|
||||
if remaining_parents_cnt == 0:
|
||||
break
|
||||
|
||||
# check if we are more than one update behind if so, go up the tree
|
||||
if commit['sha'] != status['current_commit_hash']:
|
||||
try:
|
||||
r = requests.get(parent_commit['url'])
|
||||
r.raise_for_status()
|
||||
parent_data = r.json()
|
||||
|
||||
parent_commit_date = datetime.datetime.strptime(
|
||||
parent_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
|
||||
parent_commit_date = format_datetime(
|
||||
parent_commit_date, format='short', locale=web.get_locale())
|
||||
|
||||
parents.append([parent_commit_date, parent_data['message'], parent_data['sha']])
|
||||
parent_commit = parent_data['parents'][0]
|
||||
remaining_parents_cnt -= 1
|
||||
except Exception:
|
||||
# it isn't crucial if we can't get information about the parent
|
||||
break
|
||||
else:
|
||||
# parent is our current version
|
||||
break
|
||||
status['history'] = parents[::-1]
|
||||
return json.dumps(status)
|
||||
return ''
|
||||
|
||||
def _stable_available_updates(self, request_method):
|
||||
if request_method == "GET":
|
||||
parents = []
|
||||
# repository_url = 'https://api.github.com/repos/flatpak/flatpak/releases' # test URL
|
||||
repository_url = 'https://api.github.com/repos/janeczku/calibre-web/releases'
|
||||
status, commit = self._load_remote_data(repository_url)
|
||||
if status['message'] != '':
|
||||
return json.dumps(status)
|
||||
if not commit:
|
||||
status['success'] = True
|
||||
status['message'] = _(u'No release information available')
|
||||
return json.dumps(status)
|
||||
version = status['current_commit_hash']
|
||||
current_version = status['current_commit_hash'].split('.')
|
||||
|
||||
# we are already on newest version, no update available
|
||||
if 'tag_name' not in commit[0]:
|
||||
status['message'] = _(u'Unexpected data while reading update information')
|
||||
return json.dumps(status)
|
||||
if commit[0]['tag_name'] == version:
|
||||
status.update({
|
||||
'update': False,
|
||||
'success': True,
|
||||
'message': _(u'No update available. You already have the latest version installed')
|
||||
})
|
||||
return json.dumps(status)
|
||||
|
||||
i = len(commit) - 1
|
||||
while i >= 0:
|
||||
if 'tag_name' not in commit[i] or 'body' not in commit[i]:
|
||||
status['message'] = _(u'Unexpected data while reading update information')
|
||||
return json.dumps(status)
|
||||
major_version_update = int(commit[i]['tag_name'].split('.')[0])
|
||||
minor_version_update = int(commit[i]['tag_name'].split('.')[1])
|
||||
patch_version_update = int(commit[i]['tag_name'].split('.')[2])
|
||||
|
||||
# Check if major versions are identical search for newest nonenqual commit and update to this one
|
||||
if major_version_update == int(current_version[0]):
|
||||
if (minor_version_update == int(current_version[1]) and patch_version_update > int(current_version[2])) \
|
||||
or minor_version_update > int(current_version[1]):
|
||||
parents.append([commit[i]['tag_name'],commit[i]['body'].replace('\r\n', '<p>')])
|
||||
i -= 1
|
||||
continue
|
||||
if major_version_update < int(current_version[0]):
|
||||
i -= 1
|
||||
continue
|
||||
if major_version_update > int(current_version[0]):
|
||||
# found update update to last version before major update, unless current version is on last version
|
||||
# before major update
|
||||
if commit[i+1]['tag_name'].split('.')[1] == current_version[1]:
|
||||
parents.append([commit[i]['tag_name'],
|
||||
commit[i]['body'].replace('\r\n', '<p>').replace('\n', '<p>')])
|
||||
status.update({
|
||||
'update': True,
|
||||
'success': True,
|
||||
'message': _(
|
||||
u'A new update is available. Click on the button below to update to version: ') +
|
||||
commit[i]['tag_name'],
|
||||
'history': parents
|
||||
})
|
||||
self.updateFile = commit[i]['zipball_url']
|
||||
else:
|
||||
status.update({
|
||||
'update': True,
|
||||
'success': True,
|
||||
'message': _(
|
||||
u'A new update is available. Click on the button below to update to version: ') +
|
||||
commit[i + 1]['tag_name'],
|
||||
'history': parents
|
||||
})
|
||||
self.updateFile = commit[i +1]['zipball_url']
|
||||
break
|
||||
if i == -1:
|
||||
status.update({
|
||||
'update': True,
|
||||
'success': True,
|
||||
'message': _(u'A new update is available. Click on the button below to update to the latest version.'),
|
||||
'history': parents
|
||||
})
|
||||
self.updateFile = commit[0]['zipball_url']
|
||||
return json.dumps(status)
|
||||
|
||||
def _get_request_path(self):
|
||||
if config.get_update_channel == UPDATE_STABLE:
|
||||
return self.updateFile
|
||||
else:
|
||||
return 'https://api.github.com/repos/janeczku/calibre-web/zipball/master'
|
||||
|
||||
def _load_remote_data(self, repository_url):
|
||||
status = {
|
||||
'update': False,
|
||||
'success': False,
|
||||
'message': '',
|
||||
'current_commit_hash': ''
|
||||
}
|
||||
commit = None
|
||||
version = self.get_current_version_info()
|
||||
if version is False:
|
||||
status['current_commit_hash'] = _(u'Unknown')
|
||||
else:
|
||||
status['current_commit_hash'] = version['version']
|
||||
try:
|
||||
r = requests.get(repository_url)
|
||||
commit = r.json()
|
||||
r.raise_for_status()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
if commit:
|
||||
if 'message' in commit:
|
||||
status['message'] = _(u'HTTP Error') + ': ' + commit['message']
|
||||
else:
|
||||
status['message'] = _(u'HTTP Error') + ': ' + str(e)
|
||||
except requests.exceptions.ConnectionError:
|
||||
status['message'] = _(u'Connection error')
|
||||
except requests.exceptions.Timeout:
|
||||
status['message'] = _(u'Timeout while establishing connection')
|
||||
except requests.exceptions.RequestException:
|
||||
status['message'] = _(u'General error')
|
||||
|
||||
return status, commit
|
||||
|
||||
|
||||
updater_thread = Updater()
|
162
cps/web.py
162
cps/web.py
@ -57,6 +57,7 @@ from redirect import redirect_back
|
||||
import time
|
||||
import server
|
||||
from reverseproxy import ReverseProxied
|
||||
from updater import updater_thread
|
||||
|
||||
try:
|
||||
from googleapiclient.errors import HttpError
|
||||
@ -1139,127 +1140,7 @@ def get_matching_tags():
|
||||
@app.route("/get_update_status", methods=['GET'])
|
||||
@login_required_if_no_ano
|
||||
def get_update_status():
|
||||
status = {
|
||||
'update': False,
|
||||
'success': False,
|
||||
'message': '',
|
||||
'current_commit_hash': ''
|
||||
}
|
||||
parents = []
|
||||
|
||||
repository_url = 'https://api.github.com/repos/janeczku/calibre-web'
|
||||
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
|
||||
|
||||
if request.method == "GET":
|
||||
version = helper.get_current_version_info()
|
||||
if version is False:
|
||||
status['current_commit_hash'] = _(u'Unknown')
|
||||
else:
|
||||
status['current_commit_hash'] = version['hash']
|
||||
|
||||
try:
|
||||
r = requests.get(repository_url + '/git/refs/heads/master')
|
||||
r.raise_for_status()
|
||||
commit = r.json()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
status['message'] = _(u'HTTP Error') + ' ' + str(e)
|
||||
except requests.exceptions.ConnectionError:
|
||||
status['message'] = _(u'Connection error')
|
||||
except requests.exceptions.Timeout:
|
||||
status['message'] = _(u'Timeout while establishing connection')
|
||||
except requests.exceptions.RequestException:
|
||||
status['message'] = _(u'General error')
|
||||
|
||||
if status['message'] != '':
|
||||
return json.dumps(status)
|
||||
|
||||
if 'object' not in commit:
|
||||
status['message'] = _(u'Unexpected data while reading update information')
|
||||
return json.dumps(status)
|
||||
|
||||
if commit['object']['sha'] == status['current_commit_hash']:
|
||||
status.update({
|
||||
'update': False,
|
||||
'success': True,
|
||||
'message': _(u'No update available. You already have the latest version installed')
|
||||
})
|
||||
return json.dumps(status)
|
||||
|
||||
# a new update is available
|
||||
status['update'] = True
|
||||
|
||||
try:
|
||||
r = requests.get(repository_url + '/git/commits/' + commit['object']['sha'])
|
||||
r.raise_for_status()
|
||||
update_data = r.json()
|
||||
except requests.exceptions.HTTPError as e:
|
||||
status['error'] = _(u'HTTP Error') + ' ' + str(e)
|
||||
except requests.exceptions.ConnectionError:
|
||||
status['error'] = _(u'Connection error')
|
||||
except requests.exceptions.Timeout:
|
||||
status['error'] = _(u'Timeout while establishing connection')
|
||||
except requests.exceptions.RequestException:
|
||||
status['error'] = _(u'General error')
|
||||
|
||||
if status['message'] != '':
|
||||
return json.dumps(status)
|
||||
|
||||
if 'committer' in update_data and 'message' in update_data:
|
||||
status['success'] = True
|
||||
status['message'] = _(u'A new update is available. Click on the button below to update to the latest version.')
|
||||
|
||||
new_commit_date = datetime.datetime.strptime(
|
||||
update_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
|
||||
parents.append(
|
||||
[
|
||||
format_datetime(new_commit_date, format='short', locale=get_locale()),
|
||||
update_data['message'],
|
||||
update_data['sha']
|
||||
]
|
||||
)
|
||||
|
||||
# it only makes sense to analyze the parents if we know the current commit hash
|
||||
if status['current_commit_hash'] != '':
|
||||
try:
|
||||
parent_commit = update_data['parents'][0]
|
||||
# limit the maximum search depth
|
||||
remaining_parents_cnt = 10
|
||||
except IndexError:
|
||||
remaining_parents_cnt = None
|
||||
|
||||
if remaining_parents_cnt is not None:
|
||||
while True:
|
||||
if remaining_parents_cnt == 0:
|
||||
break
|
||||
|
||||
# check if we are more than one update behind if so, go up the tree
|
||||
if parent_commit['sha'] != status['current_commit_hash']:
|
||||
try:
|
||||
r = requests.get(parent_commit['url'])
|
||||
r.raise_for_status()
|
||||
parent_data = r.json()
|
||||
|
||||
parent_commit_date = datetime.datetime.strptime(
|
||||
parent_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
|
||||
parent_commit_date = format_datetime(
|
||||
parent_commit_date, format='short', locale=get_locale())
|
||||
|
||||
parents.append([parent_commit_date, parent_data['message'], parent_data['sha']])
|
||||
parent_commit = parent_data['parents'][0]
|
||||
remaining_parents_cnt -= 1
|
||||
except Exception:
|
||||
# it isn't crucial if we can't get information about the parent
|
||||
break
|
||||
else:
|
||||
# parent is our current version
|
||||
break
|
||||
|
||||
else:
|
||||
status['success'] = False
|
||||
status['message'] = _(u'Could not fetch update information')
|
||||
|
||||
status['history'] = parents
|
||||
return json.dumps(status)
|
||||
return updater_thread.get_available_updates(request.method)
|
||||
|
||||
|
||||
@app.route("/get_updater_status", methods=['GET', 'POST'])
|
||||
@ -1284,12 +1165,12 @@ def get_updater_status():
|
||||
"11": _(u'Update failed:') + u' ' + _(u'General error')
|
||||
}
|
||||
status['text'] = text
|
||||
helper.updater_thread = helper.Updater()
|
||||
helper.updater_thread.start()
|
||||
status['status'] = helper.updater_thread.get_update_status()
|
||||
# helper.updater_thread = helper.Updater()
|
||||
updater_thread.start()
|
||||
status['status'] = updater_thread.get_update_status()
|
||||
elif request.method == "GET":
|
||||
try:
|
||||
status['status'] = helper.updater_thread.get_update_status()
|
||||
status['status'] = updater_thread.get_update_status()
|
||||
except AttributeError:
|
||||
# thread is not active, occours after restart on update
|
||||
status['status'] = 7
|
||||
@ -1957,13 +1838,13 @@ def shutdown():
|
||||
abort(404)
|
||||
|
||||
|
||||
@app.route("/update")
|
||||
'''@app.route("/update")
|
||||
@login_required
|
||||
@admin_required
|
||||
def update():
|
||||
helper.updater_thread = helper.Updater()
|
||||
# updater.updater_thread = helper.Updater()
|
||||
flash(_(u"Update done"), category="info")
|
||||
return abort(404)
|
||||
return abort(404)'''
|
||||
|
||||
|
||||
@app.route("/search", methods=["GET"])
|
||||
@ -2862,20 +2743,23 @@ def profile():
|
||||
@login_required
|
||||
@admin_required
|
||||
def admin():
|
||||
version = helper.get_current_version_info()
|
||||
version = updater_thread.get_current_version_info()
|
||||
if version is False:
|
||||
commit = _(u'Unknown')
|
||||
else:
|
||||
commit = version['datetime']
|
||||
if 'datetime' in version:
|
||||
commit = version['datetime']
|
||||
|
||||
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
|
||||
form_date = datetime.datetime.strptime(commit[:19], "%Y-%m-%dT%H:%M:%S")
|
||||
if len(commit) > 19: # check if string has timezone
|
||||
if commit[19] == '+':
|
||||
form_date -= datetime.timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))
|
||||
elif commit[19] == '-':
|
||||
form_date += datetime.timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))
|
||||
commit = format_datetime(form_date - tz, format='short', locale=get_locale())
|
||||
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
|
||||
form_date = datetime.datetime.strptime(commit[:19], "%Y-%m-%dT%H:%M:%S")
|
||||
if len(commit) > 19: # check if string has timezone
|
||||
if commit[19] == '+':
|
||||
form_date -= datetime.timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))
|
||||
elif commit[19] == '-':
|
||||
form_date += datetime.timedelta(hours=int(commit[20:22]), minutes=int(commit[23:]))
|
||||
commit = format_datetime(form_date - tz, format='short', locale=get_locale())
|
||||
else:
|
||||
commit = version['version']
|
||||
|
||||
content = ub.session.query(ub.User).all()
|
||||
settings = ub.session.query(ub.Settings).first()
|
||||
@ -3100,6 +2984,8 @@ def configuration_helper(origin):
|
||||
content.config_goodreads_api_key = to_save["config_goodreads_api_key"]
|
||||
if "config_goodreads_api_secret" in to_save:
|
||||
content.config_goodreads_api_secret = to_save["config_goodreads_api_secret"]
|
||||
if "config_updater" in to_save:
|
||||
content.config_updatechannel = int(to_save["config_updater"])
|
||||
if "config_log_level" in to_save:
|
||||
content.config_log_level = int(to_save["config_log_level"])
|
||||
if content.config_logfile != to_save["config_logfile"]:
|
||||
|
Loading…
Reference in New Issue
Block a user