2019-10-31 08:26:22 -04:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2021-01-12 01:48:22 -05:00
|
|
|
# TODO: logging
|
2021-01-12 04:54:27 -05:00
|
|
|
# Example .sysresccd.json:
|
|
|
|
# {
|
|
|
|
# "date": "Sun, 22 Nov 2020 18:03:52 +0900",
|
|
|
|
# "arch": "amd64",
|
|
|
|
# "ver": 7.01,
|
|
|
|
# "sha512": "9d8c7e6d5c5f22d42bc20a663(...)"
|
|
|
|
# }
|
2019-10-31 08:26:22 -04:00
|
|
|
|
2021-01-12 01:48:22 -05:00
|
|
|
import datetime
|
|
|
|
import json
|
|
|
|
import hashlib
|
|
|
|
import pathlib
|
2019-10-31 08:26:22 -04:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import shutil
|
2021-01-12 01:48:22 -05:00
|
|
|
# import subprocess
|
2019-10-31 08:26:22 -04:00
|
|
|
##
|
2021-01-12 01:48:22 -05:00
|
|
|
import psutil
|
2019-10-31 08:26:22 -04:00
|
|
|
import requests
|
|
|
|
from lxml import etree
|
|
|
|
|
2021-01-12 01:48:22 -05:00
|
|
|
|
|
|
|
class Updater(object):
|
|
|
|
_fname_re = re.compile(r'^systemrescue-(?P<version>[0-9.]+)-(?P<arch>(i686|amd64)).iso$')
|
|
|
|
_def_hash = 'sha256'
|
|
|
|
_allowed_hashes = ('sha256', 'sha512')
|
|
|
|
_allowed_arches = ('i686', 'amd64')
|
|
|
|
_date_fmt = '%a, %d %b %Y %H:%M:%S %z'
|
|
|
|
|
|
|
|
def __init__(self,
|
|
|
|
arch = 'amd64',
|
|
|
|
dest_dir = '/boot/iso',
|
|
|
|
dest_file = 'sysresccd.iso',
|
|
|
|
ver_file = '.sysresccd.json',
|
|
|
|
lock_path = '/tmp/.sysresccd.lck',
|
|
|
|
feed_url = 'https://osdn.net/projects/systemrescuecd/storage/!rss',
|
2021-01-12 03:27:45 -05:00
|
|
|
# dl_base = 'https://osdn.mirror.constant.com//storage/g/s/sy/systemrescuecd',
|
2021-01-12 01:48:22 -05:00
|
|
|
grub_cfg = '/etc/grub.d/40_custom_sysresccd',
|
|
|
|
# check_gpg = True, # TODO: GPG sig checking
|
|
|
|
hash_type = 'sha512'):
|
|
|
|
if arch.lower() not in self._allowed_arches:
|
|
|
|
raise ValueError('arch must be one of: {0}'.format(', '.join(self._allowed_arches)))
|
|
|
|
else:
|
|
|
|
self.arch = arch.lower()
|
|
|
|
if hash_type.lower() not in self._allowed_hashes:
|
|
|
|
raise ValueError('hash_type must be one of: {0}'.format(', '.join(self._allowed_hashes)))
|
|
|
|
else:
|
|
|
|
self.hash_type = hash_type.lower()
|
|
|
|
self.dest_dir = os.path.abspath(os.path.expanduser(dest_dir))
|
|
|
|
self.dest_file = dest_file
|
|
|
|
self.ver_file = ver_file
|
|
|
|
self.feed_url = feed_url
|
2021-01-12 03:27:45 -05:00
|
|
|
# self.dl_base = dl_base
|
|
|
|
self.dl_base = None
|
2021-01-12 01:48:22 -05:00
|
|
|
self.grub_cfg = grub_cfg
|
|
|
|
self.lckfile = os.path.abspath(os.path.expanduser(lock_path))
|
|
|
|
self.old_date = None
|
|
|
|
self.old_ver = None
|
|
|
|
self.old_hash = None
|
|
|
|
self.new_date = None
|
|
|
|
self.new_ver = None
|
|
|
|
self.new_hash = None
|
|
|
|
self.do_update = False
|
|
|
|
self.force_update = False
|
|
|
|
self.iso_url = None
|
|
|
|
self.dest_iso = os.path.join(self.dest_dir, self.dest_file)
|
|
|
|
self.dest_ver = os.path.join(self.dest_dir, self.ver_file)
|
|
|
|
self._init_vars()
|
|
|
|
|
|
|
|
def _init_vars(self):
|
|
|
|
if self.getRunning():
|
|
|
|
return(None)
|
|
|
|
self.getCurVer()
|
|
|
|
self.getNewVer()
|
|
|
|
return(None)
|
|
|
|
|
|
|
|
def main(self):
|
|
|
|
if self.getRunning():
|
|
|
|
return(None)
|
|
|
|
self.lock()
|
|
|
|
if self.do_update or \
|
|
|
|
self.force_update or not \
|
|
|
|
all((self.old_date,
|
|
|
|
self.old_ver,
|
|
|
|
self.old_hash)):
|
2021-01-12 03:27:45 -05:00
|
|
|
self.do_update = True
|
2021-01-12 01:48:22 -05:00
|
|
|
self.download()
|
|
|
|
self.touchVer()
|
|
|
|
self.unlock()
|
|
|
|
return(None)
|
|
|
|
|
|
|
|
def download(self):
|
|
|
|
if self.getRunning():
|
|
|
|
return(None)
|
|
|
|
if not any((self.do_update, self.force_update)):
|
|
|
|
return(None)
|
|
|
|
if not self.iso_url:
|
|
|
|
raise RuntimeError('iso_url attribute must be set first')
|
2021-01-12 03:27:45 -05:00
|
|
|
req = requests.get(self.iso_url, stream = True, headers = {'User-Agent': 'curl/7.74.0'})
|
2021-01-12 01:48:22 -05:00
|
|
|
if not req.ok:
|
2021-01-12 03:27:45 -05:00
|
|
|
raise RuntimeError('Received non-200/30x {0} for {1}'.format(req.status_code, self.iso_url))
|
2021-01-12 01:48:22 -05:00
|
|
|
with req as uri:
|
|
|
|
with open(self.dest_iso, 'wb') as fh:
|
|
|
|
shutil.copyfileobj(uri.raw, fh)
|
2021-01-12 03:27:45 -05:00
|
|
|
hasher = hashlib.new(self.hash_type)
|
2021-01-12 01:48:22 -05:00
|
|
|
with open(self.dest_iso, 'rb') as fh:
|
2021-01-12 03:27:45 -05:00
|
|
|
hasher.update(fh.read())
|
|
|
|
realhash = hasher.hexdigest().lower()
|
|
|
|
if realhash != self.new_hash:
|
|
|
|
raise RuntimeError('Hash mismatch: {0} (LOCAL), {1} (REMOTE)'.format(realhash, self.new_hash))
|
2021-01-12 01:48:22 -05:00
|
|
|
self.updateVer()
|
|
|
|
return(None)
|
|
|
|
|
|
|
|
def getCurVer(self):
|
|
|
|
if self.getRunning():
|
|
|
|
return(None)
|
|
|
|
if not os.path.isfile(self.dest_ver):
|
|
|
|
self.do_update = True
|
|
|
|
self.force_update = True
|
|
|
|
self.old_ver = 0.00
|
|
|
|
return(None)
|
|
|
|
with open(self.dest_ver, 'rb') as fh:
|
|
|
|
ver_info = json.load(fh)
|
2021-01-12 03:27:45 -05:00
|
|
|
self.old_date = datetime.datetime.strptime(ver_info['date'], self._date_fmt)
|
2021-01-12 01:48:22 -05:00
|
|
|
self.old_ver = ver_info['ver']
|
|
|
|
self.old_hash = ver_info.get(self.hash_type, self._def_hash)
|
2021-01-12 03:27:45 -05:00
|
|
|
self.new_hash = self.old_hash
|
|
|
|
self.new_ver = self.old_ver
|
|
|
|
self.new_date = self.old_date
|
2021-01-12 01:48:22 -05:00
|
|
|
if ver_info.get('arch') != self.arch:
|
|
|
|
self.do_update = True
|
|
|
|
self.force_update = True
|
|
|
|
try:
|
2021-01-12 03:27:45 -05:00
|
|
|
hasher = hashlib.new(self.hash_type)
|
2021-01-12 01:48:22 -05:00
|
|
|
with open(self.dest_iso, 'rb') as fh:
|
2021-01-12 03:27:45 -05:00
|
|
|
hasher.update(fh.read())
|
|
|
|
if self.old_hash != hasher.hexdigest().lower():
|
2021-01-12 01:48:22 -05:00
|
|
|
self.do_update = True
|
|
|
|
self.force_update = True
|
|
|
|
except FileNotFoundError:
|
2021-01-12 03:27:45 -05:00
|
|
|
self.do_update = True
|
|
|
|
self.force_update = True
|
2021-01-12 01:48:22 -05:00
|
|
|
return(None)
|
|
|
|
return (None)
|
|
|
|
|
|
|
|
def getNewVer(self):
|
|
|
|
if self.getRunning():
|
|
|
|
return(None)
|
2021-01-12 03:27:45 -05:00
|
|
|
req = requests.get(self.feed_url, headers = {'User-Agent': 'curl/7.74.0'})
|
2021-01-12 01:48:22 -05:00
|
|
|
if not req.ok:
|
2021-01-12 03:27:45 -05:00
|
|
|
raise RuntimeError('Received non-200/30x {0} for {1}'.format(req.status_code, self.feed_url))
|
2021-01-12 01:48:22 -05:00
|
|
|
feed = etree.fromstring(req.content)
|
2021-01-12 03:27:45 -05:00
|
|
|
self.dl_base = feed.xpath('channel/link')[0].text
|
2021-01-12 01:48:22 -05:00
|
|
|
for item in feed.xpath('//item'):
|
|
|
|
date_xml = item.find('pubDate')
|
|
|
|
title_xml = item.find('title')
|
2021-01-12 03:27:45 -05:00
|
|
|
# link_xml = item.find('link')
|
2021-01-12 01:48:22 -05:00
|
|
|
date = title = link = None
|
|
|
|
if date_xml is not None:
|
|
|
|
date = datetime.datetime.strptime(date_xml.text, self._date_fmt)
|
|
|
|
if title_xml is not None:
|
|
|
|
title = title_xml.text
|
2021-01-12 03:27:45 -05:00
|
|
|
# if link_xml is not None:
|
|
|
|
# link = link_xml.text
|
2021-01-12 01:48:22 -05:00
|
|
|
fname_r = self._fname_re.search(os.path.basename(title))
|
|
|
|
if not fname_r:
|
|
|
|
continue
|
|
|
|
ver_info = fname_r.groupdict()
|
|
|
|
if ver_info['arch'] != self.arch:
|
|
|
|
continue
|
|
|
|
new_ver = float(ver_info.get('version', self.old_ver))
|
|
|
|
if not all((self.old_ver, self.old_date)) or \
|
|
|
|
(new_ver > self.old_ver) or \
|
|
|
|
(self.old_date < date):
|
|
|
|
self.do_update = True
|
|
|
|
self.new_ver = new_ver
|
|
|
|
self.new_date = date
|
2021-01-12 03:27:45 -05:00
|
|
|
self.iso_url = os.path.join(self.dl_base, title.lstrip('/'))
|
2021-01-12 01:48:22 -05:00
|
|
|
hash_url = '{0}.{1}'.format(self.iso_url, self.hash_type)
|
2021-01-12 03:27:45 -05:00
|
|
|
req = requests.get(hash_url, headers = {'User-Agent': 'curl/7.74.0'})
|
2021-01-12 01:48:22 -05:00
|
|
|
if not req.ok:
|
2021-01-12 03:27:45 -05:00
|
|
|
raise RuntimeError('Received non-200/30x {0} for {1}'.format(req.status_code, hash_url))
|
|
|
|
self.new_hash = req.content.decode('utf-8').lower().split()[0]
|
|
|
|
break
|
2021-01-12 01:48:22 -05:00
|
|
|
return(None)
|
|
|
|
|
|
|
|
def getRunning(self):
|
|
|
|
if not os.path.isfile(self.lckfile):
|
|
|
|
return(False)
|
|
|
|
my_pid = os.getpid()
|
|
|
|
with open(self.lckfile, 'r') as fh:
|
|
|
|
pid = int(fh.read().strip())
|
|
|
|
if not psutil.pid_exists(pid):
|
|
|
|
os.remove(self.lckfile)
|
|
|
|
return(False)
|
|
|
|
if pid == my_pid:
|
|
|
|
return(False)
|
|
|
|
return(True)
|
|
|
|
|
|
|
|
def lock(self):
|
|
|
|
with open(self.lckfile, 'w') as fh:
|
|
|
|
fh.write(str(os.getpid()))
|
|
|
|
return(None)
|
|
|
|
|
|
|
|
def touchVer(self):
|
|
|
|
if self.getRunning():
|
|
|
|
return(None)
|
|
|
|
ver_path = pathlib.Path(self.dest_ver)
|
|
|
|
ver_path.touch(exist_ok = True)
|
|
|
|
return(None)
|
|
|
|
|
|
|
|
def unlock(self):
|
|
|
|
if os.path.isfile(self.lckfile):
|
|
|
|
os.remove(self.lckfile)
|
|
|
|
return(None)
|
|
|
|
|
|
|
|
def updateVer(self):
|
|
|
|
if self.getRunning():
|
|
|
|
return(None)
|
2021-01-12 03:27:45 -05:00
|
|
|
d = {'date': self.new_date.strftime(self._date_fmt),
|
2021-01-12 01:48:22 -05:00
|
|
|
'arch': self.arch,
|
|
|
|
'ver': self.new_ver,
|
|
|
|
self.hash_type: self.new_hash}
|
2021-01-12 03:27:45 -05:00
|
|
|
j = json.dumps(d, indent = 4)
|
|
|
|
with open(self.dest_ver, 'w') as fh:
|
|
|
|
fh.write(j)
|
2021-01-12 01:48:22 -05:00
|
|
|
fh.write('\n')
|
|
|
|
return(None)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
u = Updater()
|
|
|
|
u.main()
|