okay. fixed up some things.
This commit is contained in:
commit
25e594e602
27
.gitignore
vendored
Normal file
27
.gitignore
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
# https://git-scm.com/docs/gitignore
|
||||
# https://help.github.com/articles/ignoring-files
|
||||
# Example .gitignore files: https://github.com/github/gitignore
|
||||
*.bak
|
||||
screenlog*
|
||||
*.swp
|
||||
*.lck
|
||||
*~
|
||||
.~lock.*
|
||||
.editix
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.tar
|
||||
*.tar.bz2
|
||||
*.tar.xz
|
||||
*.tar.gz
|
||||
*.tgz
|
||||
*.txz
|
||||
*.tbz
|
||||
*.tbz2
|
||||
*.zip
|
||||
*.run
|
||||
*.7z
|
||||
*.rar
|
||||
*.sqlite3
|
||||
*.deb
|
||||
.idea/
|
4
__init__.py
Normal file
4
__init__.py
Normal file
@ -0,0 +1,4 @@
|
||||
# from . import _base
|
||||
# from . import arch_mirror_ranking
|
||||
# from . import arch
|
||||
# from . import sysresccd
|
151
_base.py
Normal file
151
_base.py
Normal file
@ -0,0 +1,151 @@
|
||||
import hashlib
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import subprocess
|
||||
##
|
||||
import psutil
|
||||
import requests
|
||||
|
||||
|
||||
class BaseUpdater(object):
|
||||
_tpl_dir = os.path.join(os.path.dirname(os.path.abspath(os.path.expanduser(__file__))), 'tpl')
|
||||
_tpl_file = None
|
||||
_date_fmt = '%a, %d %b %Y %H:%M:%S %z'
|
||||
|
||||
def __init__(self,
|
||||
dest_dir,
|
||||
dest_file,
|
||||
ver_file,
|
||||
lock_path,
|
||||
do_grub_cfg,
|
||||
boot_dir,
|
||||
grub_cfg,
|
||||
hash_type
|
||||
# check_gpg = True, # TODO: GPG sig checking
|
||||
):
|
||||
self.dest_dir = os.path.abspath(os.path.expanduser(dest_dir))
|
||||
self.dest_file = dest_file
|
||||
self.ver_file = ver_file
|
||||
self.do_grub = bool(do_grub_cfg)
|
||||
self.boot_dir = os.path.abspath(os.path.expanduser(boot_dir))
|
||||
self.grub_cfg = os.path.abspath(os.path.expanduser(grub_cfg))
|
||||
self.lckfile = os.path.abspath(os.path.expanduser(lock_path))
|
||||
p_dest = pathlib.Path(self.dest_dir)
|
||||
p_boot = pathlib.Path(self.boot_dir)
|
||||
self.grub_iso_dir = '/{0}'.format(str(p_dest.relative_to(p_boot)))
|
||||
self.old_date = None
|
||||
self.old_ver = None
|
||||
self.old_hash = None
|
||||
self.new_date = None
|
||||
self.new_ver = None
|
||||
self.new_hash = None
|
||||
self.do_update = False
|
||||
self.force_update = False
|
||||
self.iso_url = None
|
||||
self.hash_type = hash_type
|
||||
self.dest_iso = os.path.join(self.dest_dir, self.dest_file)
|
||||
self.dest_ver = os.path.join(self.dest_dir, self.ver_file)
|
||||
|
||||
def main(self):
|
||||
if self.getRunning():
|
||||
return(None)
|
||||
self.lock()
|
||||
if self.do_update or \
|
||||
self.force_update or not \
|
||||
all((self.old_date,
|
||||
self.old_ver,
|
||||
self.old_hash)):
|
||||
self.do_update = True
|
||||
self.download()
|
||||
if self.do_grub:
|
||||
self.grub()
|
||||
self.touchVer()
|
||||
self.unlock()
|
||||
return(None)
|
||||
|
||||
def download(self):
|
||||
if self.getRunning():
|
||||
return(None)
|
||||
if not any((self.do_update, self.force_update)):
|
||||
return(None)
|
||||
if not self.iso_url:
|
||||
raise RuntimeError('iso_url attribute must be set first')
|
||||
req = requests.get(self.iso_url, stream = True, headers = {'User-Agent': 'curl/7.74.0'})
|
||||
if not req.ok:
|
||||
raise RuntimeError('Received non-200/30x {0} for {1}'.format(req.status_code, self.iso_url))
|
||||
os.makedirs(os.path.dirname(self.dest_iso), exist_ok = True)
|
||||
with req as uri:
|
||||
with open(self.dest_iso, 'wb') as fh:
|
||||
shutil.copyfileobj(uri.raw, fh)
|
||||
hasher = hashlib.new(self.hash_type)
|
||||
with open(self.dest_iso, 'rb') as fh:
|
||||
hasher.update(fh.read())
|
||||
realhash = hasher.hexdigest().lower()
|
||||
if realhash != self.new_hash:
|
||||
raise RuntimeError('Hash mismatch: {0} (LOCAL), {1} (REMOTE)'.format(realhash, self.new_hash))
|
||||
self.updateVer()
|
||||
return(None)
|
||||
|
||||
def getCurVer(self):
|
||||
raise RuntimeError('BaseUpdater should be subclassed and its updateVer, getCurVer, and getNewVer methods '
|
||||
'should be replaced.')
|
||||
|
||||
def getNewVer(self):
|
||||
raise RuntimeError('BaseUpdater should be subclassed and its updateVer, getCurVer, and getNewVer methods '
|
||||
'should be replaced.')
|
||||
|
||||
def getRunning(self):
|
||||
if not os.path.isfile(self.lckfile):
|
||||
return(False)
|
||||
my_pid = os.getpid()
|
||||
with open(self.lckfile, 'r') as fh:
|
||||
pid = int(fh.read().strip())
|
||||
if not psutil.pid_exists(pid):
|
||||
os.remove(self.lckfile)
|
||||
return(False)
|
||||
if pid == my_pid:
|
||||
return(False)
|
||||
return(True)
|
||||
|
||||
def grub(self):
|
||||
import jinja2
|
||||
loader = jinja2.FileSystemLoader(searchpath = self._tpl_dir)
|
||||
tplenv = jinja2.Environment(loader = loader)
|
||||
tpl = tplenv.get_template(self._tpl_file)
|
||||
with open(self.grub_cfg, 'w') as fh:
|
||||
fh.write(tpl.render(iso_path = os.path.abspath(
|
||||
os.path.expanduser(
|
||||
os.path.join(self.grub_iso_dir,
|
||||
self.dest_file)))))
|
||||
cmd = subprocess.run(['grub-mkconfig',
|
||||
'-o', '{0}/grub/grub.cfg'.format(self.boot_dir)],
|
||||
stdout = subprocess.PIPE,
|
||||
stderr = subprocess.PIPE)
|
||||
if cmd.returncode != 0:
|
||||
stderr = cmd.stderr.decode('utf-8')
|
||||
if stderr.strip() != '':
|
||||
print(stderr)
|
||||
raise RuntimeError('grub-mkconfig returned a non-zero exit status ({0})'.format(cmd.returncode))
|
||||
return(None)
|
||||
|
||||
def lock(self):
|
||||
with open(self.lckfile, 'w') as fh:
|
||||
fh.write(str(os.getpid()))
|
||||
return(None)
|
||||
|
||||
def touchVer(self):
|
||||
if self.getRunning():
|
||||
return(None)
|
||||
ver_path = pathlib.Path(self.dest_ver)
|
||||
ver_path.touch(exist_ok = True)
|
||||
return(None)
|
||||
|
||||
def unlock(self):
|
||||
if os.path.isfile(self.lckfile):
|
||||
os.remove(self.lckfile)
|
||||
return(None)
|
||||
|
||||
def updateVer(self):
|
||||
raise RuntimeError('BaseUpdater should be subclassed and its updateVer, getCurVer, and getNewVer methods '
|
||||
'should be replaced.')
|
231
arch.py
Executable file
231
arch.py
Executable file
@ -0,0 +1,231 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# TODO: logging
|
||||
# Example .arch.json:
|
||||
# {
|
||||
# "date": "Fri, 01 Jan 2021 00:00:00 +0000",
|
||||
# "mirror": "http://arch.mirror.constant.com/",
|
||||
# "country": "US",
|
||||
# "notes": "https://archlinux.org/releng/releases/2021.01.01/",
|
||||
# "ver": "2021.01.01",
|
||||
# "sha1": "c3082b13d3cf0a253e1322568f2fd07479f86d52"
|
||||
# }
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import hashlib
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
##
|
||||
import psutil
|
||||
import requests
|
||||
from lxml import etree
|
||||
##
|
||||
import _base
|
||||
import arch_mirror_ranking
|
||||
|
||||
|
||||
class Updater(_base.BaseUpdater):
|
||||
_fname_re = re.compile(r'^archlinux-(?P<version>[0-9]{4}\.[0-9]{2}\.[0-9]{2})-(?P<arch>(i686|x86_64)).iso$')
|
||||
_def_hash = 'sha1'
|
||||
_allowed_hashes = ('md5', 'sha1')
|
||||
_allowed_arches = ('x86_64', )
|
||||
_datever_fmt = '%Y.%m.%d'
|
||||
_arch = 'x86_64' # Arch Linux proper only offers x86_64.
|
||||
_iso_dir = 'iso/latest'
|
||||
_iso_fname = 'archlinux-{ver}-{arch}.iso'
|
||||
_iso_file = os.path.join(_iso_dir, _iso_fname)
|
||||
_tpl_file = 'arch_grub.conf.j2'
|
||||
|
||||
def __init__(self,
|
||||
dest_dir = '/boot/iso', # Should be subdir of boot_dir
|
||||
dest_file = 'arch.iso',
|
||||
ver_file = '.arch.json',
|
||||
lock_path = '/tmp/.arch.lck',
|
||||
feed_url = 'https://archlinux.org/feeds/releases/',
|
||||
do_grub_cfg = True,
|
||||
boot_dir = '/boot', # ESP or boot partition mount; where GRUB files are installed *under*
|
||||
grub_cfg = '/etc/grub.d/40_custom_arch',
|
||||
# check_gpg = True, # TODO: GPG sig checking
|
||||
hash_type = 'sha1'):
|
||||
super().__init__(dest_dir, dest_file, ver_file, lock_path, do_grub_cfg, boot_dir, grub_cfg, hash_type)
|
||||
if hash_type.lower() not in self._allowed_hashes:
|
||||
raise ValueError('hash_type must be one of: {0}'.format(', '.join(self._allowed_hashes)))
|
||||
else:
|
||||
self.hash_type = hash_type.lower()
|
||||
self.feed_url = feed_url
|
||||
# From the JSON.
|
||||
self.rel_notes_url = None
|
||||
self.mirror_base = None
|
||||
self.country = None
|
||||
self.ipv4 = True
|
||||
self.ipv6 = False
|
||||
self._init_vars()
|
||||
|
||||
def _init_vars(self):
|
||||
if self.getRunning():
|
||||
return(None)
|
||||
self.getCountry()
|
||||
self.getNet()
|
||||
self.getCurVer()
|
||||
self.getNewVer()
|
||||
return(None)
|
||||
|
||||
def findMirror(self):
|
||||
self.getCountry()
|
||||
if self.mirror_base:
|
||||
return(None)
|
||||
mirrors = []
|
||||
for p in ('http', 'https'):
|
||||
m = arch_mirror_ranking.MirrorIdx(country = self.country,
|
||||
proto = p,
|
||||
ipv4 = self.ipv4,
|
||||
ipv6 = self.ipv6,
|
||||
isos = True)
|
||||
mirrors.extend(m.ranked_servers)
|
||||
# Find the "best" across http/https.
|
||||
mirrors = sorted(mirrors, key = lambda i: i['score'])
|
||||
for s in mirrors:
|
||||
try:
|
||||
req = requests.get(s['url'])
|
||||
if req.ok:
|
||||
self.mirror_base = s['url']
|
||||
break
|
||||
except (OSError, ConnectionRefusedError):
|
||||
continue
|
||||
return(None)
|
||||
|
||||
def getCountry(self):
|
||||
if self.country: # The API has limited number of accesses for free.
|
||||
return(None)
|
||||
url = 'https://ipinfo.io/country'
|
||||
req = requests.get(url, headers = {'User-Agent': 'curl/7.74.0'})
|
||||
if not req.ok:
|
||||
raise RuntimeError('Received non-200/30x {0} for {1}'.format(req.status_code, url))
|
||||
self.country = req.content.decode('utf-8').strip().upper()
|
||||
return(None)
|
||||
|
||||
def getCurVer(self):
|
||||
if self.getRunning():
|
||||
return(None)
|
||||
if not os.path.isfile(self.dest_ver):
|
||||
self.getCountry()
|
||||
self.getNet()
|
||||
self.findMirror()
|
||||
self.do_update = True
|
||||
self.force_update = True
|
||||
self.old_ver = 0.00
|
||||
return(None)
|
||||
with open(self.dest_ver, 'rb') as fh:
|
||||
ver_info = json.load(fh)
|
||||
self.old_date = datetime.datetime.strptime(ver_info['date'], self._date_fmt)
|
||||
self.old_ver = datetime.datetime.strptime(ver_info['ver'], self._datever_fmt)
|
||||
self.old_hash = ver_info.get(self.hash_type, self._def_hash)
|
||||
self.country = ver_info.get('country')
|
||||
self.mirror_base = ver_info.get('mirror')
|
||||
if not self.country:
|
||||
self.getCountry()
|
||||
if not self.mirror_base:
|
||||
self.getNet()
|
||||
self.findMirror()
|
||||
self.new_hash = self.old_hash
|
||||
self.new_ver = self.old_ver
|
||||
self.new_date = self.old_date
|
||||
# if ver_info.get('arch') != self._arch:
|
||||
# self.do_update = True
|
||||
# self.force_update = True
|
||||
try:
|
||||
hasher = hashlib.new(self.hash_type)
|
||||
with open(self.dest_iso, 'rb') as fh:
|
||||
hasher.update(fh.read())
|
||||
if self.old_hash != hasher.hexdigest().lower():
|
||||
self.do_update = True
|
||||
self.force_update = True
|
||||
except FileNotFoundError:
|
||||
self.do_update = True
|
||||
self.force_update = True
|
||||
return(None)
|
||||
return(None)
|
||||
|
||||
def getNet(self):
|
||||
for k in ('ipv4', 'ipv6'):
|
||||
url = 'https://{0}.clientinfo.square-r00t.net'.format(k)
|
||||
try:
|
||||
req = requests.get(url)
|
||||
setattr(self, k, req.json()['ip'])
|
||||
except OSError:
|
||||
setattr(self, k, False)
|
||||
return(None)
|
||||
|
||||
def getNewVer(self):
|
||||
if self.getRunning():
|
||||
return(None)
|
||||
if not self.mirror_base:
|
||||
self.findMirror()
|
||||
req = requests.get(self.feed_url, headers = {'User-Agent': 'curl/7.74.0'})
|
||||
if not req.ok:
|
||||
raise RuntimeError('Received non-200/30x {0} for {1}'.format(req.status_code, self.feed_url))
|
||||
feed = etree.fromstring(req.content)
|
||||
for item in feed.xpath('//item'):
|
||||
date_xml = item.find('pubDate')
|
||||
ver_xml = item.find('title')
|
||||
notes_xml = item.find('link')
|
||||
date = ver = notes = None
|
||||
if date_xml is not None:
|
||||
date = datetime.datetime.strptime(date_xml.text, self._date_fmt)
|
||||
if ver_xml is not None:
|
||||
ver = ver_xml.text
|
||||
if notes_xml is not None:
|
||||
notes = notes_xml.text
|
||||
new_ver = datetime.datetime.strptime(ver, self._datever_fmt)
|
||||
if not all((self.old_ver, self.old_date)) or \
|
||||
(new_ver > self.old_ver) or \
|
||||
(self.old_date < date):
|
||||
self.do_update = True
|
||||
self.new_ver = new_ver
|
||||
self.new_date = date
|
||||
self.rel_notes_url = notes
|
||||
datever = self.new_ver.strftime(self._datever_fmt)
|
||||
self.iso_url = os.path.join(self.mirror_base,
|
||||
self._iso_file.lstrip('/')).format(ver = datever, arch = self._arch)
|
||||
hash_url = os.path.join(self.mirror_base,
|
||||
self._iso_dir,
|
||||
'{0}sums.txt'.format(self.hash_type))
|
||||
req = requests.get(hash_url, headers = {'User-Agent': 'curl/7.74.0'})
|
||||
if not req.ok:
|
||||
raise RuntimeError('Received non-200/30x {0} for {1}'.format(req.status_code, hash_url))
|
||||
hash_lines = req.content.decode('utf-8').strip().splitlines()
|
||||
tgt_fname = os.path.basename(self.iso_url)
|
||||
for line in hash_lines:
|
||||
if line.strip().startswith('#'):
|
||||
continue
|
||||
hash_str, fname = line.split()
|
||||
if fname != tgt_fname:
|
||||
continue
|
||||
self.new_hash = hash_str.lower()
|
||||
break
|
||||
break
|
||||
return(None)
|
||||
|
||||
def updateVer(self):
|
||||
if self.getRunning():
|
||||
return(None)
|
||||
d = {'date': self.new_date.strftime(self._date_fmt),
|
||||
'mirror': self.mirror_base,
|
||||
'country': self.country,
|
||||
'notes': self.rel_notes_url,
|
||||
'ver': self.new_ver.strftime(self._datever_fmt),
|
||||
self.hash_type: self.new_hash}
|
||||
j = json.dumps(d, indent = 4)
|
||||
with open(self.dest_ver, 'w') as fh:
|
||||
fh.write(j)
|
||||
fh.write('\n')
|
||||
return(None)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
u = Updater()
|
||||
u.main()
|
98
arch_mirror_ranking.py
Normal file
98
arch_mirror_ranking.py
Normal file
@ -0,0 +1,98 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
# import dns # TODO: replace server['ipv4'] with IPv4 address(es)? etc.
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from urllib.request import urlopen
|
||||
##
|
||||
import iso3166
|
||||
|
||||
|
||||
servers_json_url = 'https://www.archlinux.org/mirrors/status/json/'
|
||||
protos = ('http', 'https')
|
||||
|
||||
|
||||
class MirrorIdx(object):
|
||||
def __init__(self,
|
||||
country = None,
|
||||
proto = None,
|
||||
json_url = servers_json_url,
|
||||
name_re = None,
|
||||
ipv4 = None,
|
||||
ipv6 = None,
|
||||
isos = None):
|
||||
_tmpargs = locals()
|
||||
del (_tmpargs['self'])
|
||||
for k, v in _tmpargs.items():
|
||||
setattr(self, k, v)
|
||||
self.validateParams()
|
||||
self.servers_json = {}
|
||||
self.servers = []
|
||||
self.servers_with_scores = []
|
||||
self.ranked_servers = []
|
||||
self.fetchJSON()
|
||||
self.buildServers()
|
||||
self.rankServers()
|
||||
|
||||
def fetchJSON(self):
|
||||
with urlopen(self.json_url) as u:
|
||||
self.servers_json = json.load(u)
|
||||
return()
|
||||
|
||||
def buildServers(self):
|
||||
_limiters = (self.proto, self.ipv4, self.ipv6, self.isos)
|
||||
_filters = list(_limiters)
|
||||
_filters.extend([self.name_re, self.country])
|
||||
_filters = tuple(_filters)
|
||||
for s in self.servers_json['urls']:
|
||||
if not s['active']:
|
||||
continue
|
||||
if not any(_filters):
|
||||
self.servers.append(s.copy())
|
||||
if s['score']:
|
||||
self.servers_with_scores.append(s)
|
||||
continue
|
||||
# These are based on string values.
|
||||
if self.name_re:
|
||||
if not self.name_re.search(s['url']):
|
||||
continue
|
||||
if self.country:
|
||||
if self.country != s['country_code']:
|
||||
continue
|
||||
# These are regular True/False switches
|
||||
match = False
|
||||
# We want to be *very* explicit about the ordering and inclusion/exclusion of these.
|
||||
# They MUST match the order of _limiters.
|
||||
values = []
|
||||
for k in ('protocol', 'ipv4', 'ipv6', 'isos'):
|
||||
values.append(s[k])
|
||||
valid = all([v for k, v in zip(_limiters, values) if k])
|
||||
if valid:
|
||||
self.servers.append(s)
|
||||
if s['score']:
|
||||
self.servers_with_scores.append(s)
|
||||
return()
|
||||
|
||||
def rankServers(self):
|
||||
self.ranked_servers = sorted(self.servers_with_scores, key = lambda i: i['score'])
|
||||
return()
|
||||
|
||||
def validateParams(self):
|
||||
if self.proto and self.proto.lower() not in protos:
|
||||
err = '{0} must be one of: {1}'.format(self.proto, ', '.join([i.upper() for i in protos]))
|
||||
raise ValueError(err)
|
||||
elif self.proto:
|
||||
self.proto = self.proto.upper()
|
||||
if self.country and self.country.upper() not in iso3166.countries:
|
||||
err = ('{0} must be a valid ISO-3166-1 ALPHA-2 country code. '
|
||||
'See https://en.wikipedia.org/wiki/List_of_ISO_3166_country_codes'
|
||||
'#Current_ISO_3166_country_codes').format(self.country)
|
||||
raise ValueError()
|
||||
elif self.country:
|
||||
self.country = self.country.upper()
|
||||
if self.name_re:
|
||||
self.name_re = re.compile(self.name_re)
|
||||
return()
|
152
sysresccd.py
Executable file
152
sysresccd.py
Executable file
@ -0,0 +1,152 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# TODO: logging
|
||||
# Example .sysresccd.json:
|
||||
# {
|
||||
# "date": "Sun, 22 Nov 2020 18:03:52 +0900",
|
||||
# "arch": "amd64",
|
||||
# "ver": 7.01,
|
||||
# "sha512": "9d8c7e6d5c5f22d42bc20a663(...)"
|
||||
# }
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
##
|
||||
import requests
|
||||
from lxml import etree
|
||||
##
|
||||
import _base
|
||||
|
||||
|
||||
class Updater(_base.BaseUpdater):
|
||||
_fname_re = re.compile(r'^systemrescue-(?P<version>[0-9.]+)-(?P<arch>(i686|amd64)).iso$')
|
||||
_def_hash = 'sha256'
|
||||
_allowed_hashes = ('sha256', 'sha512')
|
||||
_allowed_arches = ('i686', 'amd64')
|
||||
_tpl_file = 'sysresccd_grub.conf.j2'
|
||||
|
||||
def __init__(self,
|
||||
arch = 'amd64',
|
||||
dest_dir = '/boot/iso', # Should be subdir of boot_dir
|
||||
dest_file = 'sysresccd.iso',
|
||||
ver_file = '.sysresccd.json',
|
||||
lock_path = '/tmp/.sysresccd.lck',
|
||||
feed_url = 'https://osdn.net/projects/systemrescuecd/storage/!rss',
|
||||
do_grub_cfg = True,
|
||||
boot_dir = '/boot', # ESP or boot partition mount; where GRUB files are installed *under*
|
||||
grub_cfg = '/etc/grub.d/40_custom_sysresccd',
|
||||
# check_gpg = True, # TODO: GPG sig checking
|
||||
hash_type = 'sha512'):
|
||||
super().__init__(dest_dir, dest_file, ver_file, lock_path, do_grub_cfg, boot_dir, grub_cfg, hash_type)
|
||||
if arch.lower() not in self._allowed_arches:
|
||||
raise ValueError('arch must be one of: {0}'.format(', '.join(self._allowed_arches)))
|
||||
else:
|
||||
self.arch = arch.lower()
|
||||
if hash_type.lower() not in self._allowed_hashes:
|
||||
raise ValueError('hash_type must be one of: {0}'.format(', '.join(self._allowed_hashes)))
|
||||
else:
|
||||
self.hash_type = hash_type.lower()
|
||||
self.feed_url = feed_url
|
||||
self.dl_base = None
|
||||
self._init_vars()
|
||||
|
||||
def _init_vars(self):
|
||||
if self.getRunning():
|
||||
return(None)
|
||||
self.getCurVer()
|
||||
self.getNewVer()
|
||||
return(None)
|
||||
|
||||
def getCurVer(self):
|
||||
if self.getRunning():
|
||||
return(None)
|
||||
if not os.path.isfile(self.dest_ver):
|
||||
self.do_update = True
|
||||
self.force_update = True
|
||||
self.old_ver = 0.00
|
||||
return(None)
|
||||
with open(self.dest_ver, 'rb') as fh:
|
||||
ver_info = json.load(fh)
|
||||
self.old_date = datetime.datetime.strptime(ver_info['date'], self._date_fmt)
|
||||
self.old_ver = ver_info['ver']
|
||||
self.old_hash = ver_info.get(self.hash_type, self._def_hash)
|
||||
self.new_hash = self.old_hash
|
||||
self.new_ver = self.old_ver
|
||||
self.new_date = self.old_date
|
||||
if ver_info.get('arch') != self.arch:
|
||||
self.do_update = True
|
||||
self.force_update = True
|
||||
try:
|
||||
hasher = hashlib.new(self.hash_type)
|
||||
with open(self.dest_iso, 'rb') as fh:
|
||||
hasher.update(fh.read())
|
||||
if self.old_hash != hasher.hexdigest().lower():
|
||||
self.do_update = True
|
||||
self.force_update = True
|
||||
except FileNotFoundError:
|
||||
self.do_update = True
|
||||
self.force_update = True
|
||||
return(None)
|
||||
return (None)
|
||||
|
||||
def getNewVer(self):
|
||||
if self.getRunning():
|
||||
return(None)
|
||||
req = requests.get(self.feed_url, headers = {'User-Agent': 'curl/7.74.0'})
|
||||
if not req.ok:
|
||||
raise RuntimeError('Received non-200/30x {0} for {1}'.format(req.status_code, self.feed_url))
|
||||
feed = etree.fromstring(req.content)
|
||||
self.dl_base = feed.xpath('channel/link')[0].text
|
||||
for item in feed.xpath('//item'):
|
||||
date_xml = item.find('pubDate')
|
||||
title_xml = item.find('title')
|
||||
# link_xml = item.find('link')
|
||||
date = title = link = None
|
||||
if date_xml is not None:
|
||||
date = datetime.datetime.strptime(date_xml.text, self._date_fmt)
|
||||
if title_xml is not None:
|
||||
title = title_xml.text
|
||||
# if link_xml is not None:
|
||||
# link = link_xml.text
|
||||
fname_r = self._fname_re.search(os.path.basename(title))
|
||||
if not fname_r:
|
||||
continue
|
||||
ver_info = fname_r.groupdict()
|
||||
if ver_info['arch'] != self.arch:
|
||||
continue
|
||||
new_ver = float(ver_info.get('version', self.old_ver))
|
||||
if not all((self.old_ver, self.old_date)) or \
|
||||
(new_ver > self.old_ver) or \
|
||||
(self.old_date < date):
|
||||
self.do_update = True
|
||||
self.new_ver = new_ver
|
||||
self.new_date = date
|
||||
self.iso_url = os.path.join(self.dl_base, title.lstrip('/'))
|
||||
hash_url = '{0}.{1}'.format(self.iso_url, self.hash_type)
|
||||
req = requests.get(hash_url, headers = {'User-Agent': 'curl/7.74.0'})
|
||||
if not req.ok:
|
||||
raise RuntimeError('Received non-200/30x {0} for {1}'.format(req.status_code, hash_url))
|
||||
self.new_hash = req.content.decode('utf-8').lower().split()[0]
|
||||
break
|
||||
return(None)
|
||||
|
||||
def updateVer(self):
|
||||
if self.getRunning():
|
||||
return(None)
|
||||
d = {'date': self.new_date.strftime(self._date_fmt),
|
||||
'arch': self.arch,
|
||||
'ver': self.new_ver,
|
||||
self.hash_type: self.new_hash}
|
||||
j = json.dumps(d, indent = 4)
|
||||
with open(self.dest_ver, 'w') as fh:
|
||||
fh.write(j)
|
||||
fh.write('\n')
|
||||
return(None)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
u = Updater()
|
||||
u.main()
|
26
tpl/arch_grub.conf.j2
Normal file
26
tpl/arch_grub.conf.j2
Normal file
@ -0,0 +1,26 @@
|
||||
#!/bin/sh
|
||||
# Copy this file to /etc/grub.d/40_custom_arch with mode 0755 and run grub-mkconfig -o /boot/grub/grub.cfg
|
||||
exec tail -n +3 $0
|
||||
# Arch ISO
|
||||
# https://wiki.archlinux.org/index.php/Multiboot_USB_drive#Arch_Linux_monthly_release
|
||||
submenu 'Arch Install ISO' {
|
||||
|
||||
menuentry 'Default Options' {
|
||||
set isofile='{{ iso_path }}'
|
||||
probe -u $root --set=imgdevuuid
|
||||
set imgdevpath="/dev/disk/by-uuid/$imgdevuuid"
|
||||
loopback loop $isofile
|
||||
linux (loop)/arch/boot/x86_64/vmlinuz-linux archisobasedir=arch img_dev=$imgdevpath img_loop=$isofile earlymodules=loop
|
||||
initrd (loop)/arch/boot/intel-ucode.img (loop)/arch/boot/amd-ucode.img (loop)/arch/boot/x86_64/initramfs-linux.img
|
||||
}
|
||||
|
||||
menuentry 'Accessibility mode' {
|
||||
set isofile='{{ iso_path }}'
|
||||
probe -u $root --set=imgdevuuid
|
||||
set imgdevpath="/dev/disk/by-uuid/$imgdevuuid"
|
||||
loopback loop $isofile
|
||||
linux (loop)/arch/boot/x86_64/vmlinuz-linux archisobasedir=arch img_dev=$imgdevpath img_loop=$isofile earlymodules=loop accessibility=on
|
||||
initrd (loop)/arch/boot/intel-ucode.img (loop)/arch/boot/amd-ucode.img (loop)/arch/boot/x86_64/initramfs-linux.img
|
||||
}
|
||||
|
||||
}
|
62
tpl/sysresccd_grub.conf.j2
Normal file
62
tpl/sysresccd_grub.conf.j2
Normal file
@ -0,0 +1,62 @@
|
||||
#!/bin/sh
|
||||
# Copy this file to /etc/grub.d/40_custom_sysresccd with mode 0755 and run grub-mkconfig -o /boot/grub/grub.cfg
|
||||
exec tail -n +3 $0
|
||||
# System Rescue CD
|
||||
# https://www.system-rescue.org/
|
||||
submenu 'System Rescue CD' {
|
||||
|
||||
menuentry 'Default options' {
|
||||
set isofile='{{ iso_path }}'
|
||||
probe -u $root --set=imgdevuuid
|
||||
set imgdevpath="/dev/disk/by-uuid/$imgdevuuid"
|
||||
loopback loop $isofile
|
||||
linux (loop)/sysresccd/boot/x86_64/vmlinuz archisobasedir=sysresccd img_dev=$imgdevpath img_loop=$isofile earlymodules=loop
|
||||
initrd (loop)/sysresccd/boot/intel_ucode.img (loop)/sysresccd/boot/amd_ucode.img (loop)/sysresccd/boot/x86_64/sysresccd.img
|
||||
}
|
||||
|
||||
menuentry 'Run from RAM' {
|
||||
set isofile='{{ iso_path }}'
|
||||
probe -u $root --set=imgdevuuid
|
||||
set imgdevpath="/dev/disk/by-uuid/$imgdevuuid"
|
||||
loopback loop $isofile
|
||||
linux (loop)/sysresccd/boot/x86_64/vmlinuz archisobasedir=sysresccd img_dev=$imgdevpath img_loop=$isofile earlymodules=loop copytoram
|
||||
initrd (loop)/sysresccd/boot/intel_ucode.img (loop)/sysresccd/boot/amd_ucode.img (loop)/sysresccd/boot/x86_64/sysresccd.img
|
||||
}
|
||||
|
||||
menuentry 'Confirm/verify checksum' {
|
||||
set isofile='{{ iso_path }}'
|
||||
probe -u $root --set=imgdevuuid
|
||||
set imgdevpath="/dev/disk/by-uuid/$imgdevuuid"
|
||||
loopback loop $isofile
|
||||
linux (loop)/sysresccd/boot/x86_64/vmlinuz archisobasedir=sysresccd img_dev=$imgdevpath img_loop=$isofile earlymodules=loop checksum
|
||||
initrd (loop)/sysresccd/boot/intel_ucode.img (loop)/sysresccd/boot/amd_ucode.img (loop)/sysresccd/boot/x86_64/sysresccd.img
|
||||
}
|
||||
|
||||
menuentry 'Use basic display drivers' {
|
||||
set isofile='{{ iso_path }}'
|
||||
probe -u $root --set=imgdevuuid
|
||||
set imgdevpath="/dev/disk/by-uuid/$imgdevuuid"
|
||||
loopback loop $isofile
|
||||
linux (loop)/sysresccd/boot/x86_64/vmlinuz archisobasedir=sysresccd img_dev=$imgdevpath img_loop=$isofile earlymodules=loop nomodeset
|
||||
initrd (loop)/sysresccd/boot/intel_ucode.img (loop)/sysresccd/boot/amd_ucode.img (loop)/sysresccd/boot/x86_64/sysresccd.img
|
||||
}
|
||||
|
||||
menuentry 'Find and boot a locally installed Linux' {
|
||||
set isofile='{{ iso_path }}'
|
||||
probe -u $root --set=imgdevuuid
|
||||
set imgdevpath="/dev/disk/by-uuid/$imgdevuuid"
|
||||
loopback loop $isofile
|
||||
linux (loop)/sysresccd/boot/x86_64/vmlinuz archisobasedir=sysresccd img_dev=$imgdevpath img_loop=$isofile earlymodules=loop findroot
|
||||
initrd (loop)/sysresccd/boot/intel_ucode.img (loop)/sysresccd/boot/amd_ucode.img (loop)/sysresccd/boot/x86_64/sysresccd.img
|
||||
}
|
||||
|
||||
menuentry 'Stop during boot before the root filesystem is mounted' {
|
||||
set isofile='{{ iso_path }}'
|
||||
probe -u $root --set=imgdevuuid
|
||||
set imgdevpath="/dev/disk/by-uuid/$imgdevuuid"
|
||||
loopback loop $isofile
|
||||
linux (loop)/sysresccd/boot/x86_64/vmlinuz archisobasedir=sysresccd img_dev=$imgdevpath img_loop=$isofile earlymodules=loop break
|
||||
initrd (loop)/sysresccd/boot/intel_ucode.img (loop)/sysresccd/boot/amd_ucode.img (loop)/sysresccd/boot/x86_64/sysresccd.img
|
||||
}
|
||||
|
||||
}
|
Reference in New Issue
Block a user