ipxe should be done. untested currently
This commit is contained in:
parent
4f7c370499
commit
87b6ed1e80
12
_base.py
12
_base.py
@ -102,8 +102,8 @@ class BaseUpdater(object):
|
||||
with open(_tmpfile, 'wb') as fh:
|
||||
for chunk in req.iter_content(chunk_size = 8192):
|
||||
fh.write(chunk)
|
||||
realhash = self.getISOHash()
|
||||
if realhash != self.new_hash:
|
||||
realhash = self.getISOHash(_tmpfile)
|
||||
if self.new_hash and realhash != self.new_hash:
|
||||
os.remove(_tmpfile)
|
||||
raise RuntimeError('Hash mismatch: {0} (LOCAL), {1} (REMOTE)'.format(realhash, self.new_hash))
|
||||
os.makedirs(os.path.dirname(self.dest_iso), exist_ok = True)
|
||||
@ -117,10 +117,14 @@ class BaseUpdater(object):
|
||||
raise RuntimeError('BaseUpdater should be subclassed and its updateVer, getCurVer, and getNewVer methods '
|
||||
'should be replaced.')
|
||||
|
||||
def getISOHash(self):
|
||||
def getISOHash(self, filepath = None):
|
||||
if not filepath:
|
||||
filepath = self.dest_iso
|
||||
else:
|
||||
filepath = os.path.abspath(os.path.expanduser(filepath))
|
||||
hasher = hashlib.new(self.hash_type)
|
||||
# TODO: later on when python 3.8 is more prevalent, https://stackoverflow.com/a/1131238/733214
|
||||
with open(self.dest_iso, 'rb') as fh:
|
||||
with open(filepath, 'rb') as fh:
|
||||
while True:
|
||||
chunk = fh.read(8192)
|
||||
if not chunk:
|
||||
|
48
ipxe.py
Normal file → Executable file
48
ipxe.py
Normal file → Executable file
@ -30,9 +30,11 @@ class Updater(_base.BaseUpdater):
|
||||
r'(?P<variant>(iso|efi))$')
|
||||
_allowed_variants = ('iso', 'efi')
|
||||
_tpl_file = 'ipxe_grub.conf.j2'
|
||||
# I think this *technically* should be '%Y-%m-%d %H:%M %z' but it seems datetime cannot parse that if %z is empty.
|
||||
_datever_fmt = '%Y-%m-%d %H:%M'
|
||||
|
||||
def __init__(self,
|
||||
variant = 'full',
|
||||
variant = 'iso',
|
||||
dest_dir = '/boot/iso', # Should be subdir of boot_dir
|
||||
dest_file = 'ipxe.iso',
|
||||
ver_file = '.ipxe.json',
|
||||
@ -79,9 +81,11 @@ class Updater(_base.BaseUpdater):
|
||||
ver_info = json.load(fh)
|
||||
self.old_date = datetime.datetime.strptime(ver_info['date'], self._date_fmt)
|
||||
self.old_hash = ver_info.get(self.hash_type)
|
||||
self.old_ver = datetime.datetime.strptime(ver_info['ver'], self._datever_fmt)
|
||||
self.variant = ver_info.get('variant', self.variant)
|
||||
self.new_hash = self.old_hash
|
||||
self.new_date = self.old_date
|
||||
self.new_ver = self.old_ver
|
||||
if ver_info.get('arch') != self.arch:
|
||||
self.do_update = True
|
||||
self.force_update = True
|
||||
@ -109,8 +113,6 @@ class Updater(_base.BaseUpdater):
|
||||
# Get the header, and the index for the proper columns.
|
||||
file_col = 0
|
||||
date_col = 0
|
||||
file_len = 0
|
||||
file_html = None
|
||||
header = filelist.find('tr')
|
||||
# Icon, Name, Modified, Size, Description
|
||||
file_len = len(header.find_all('th'))
|
||||
@ -126,7 +128,7 @@ class Updater(_base.BaseUpdater):
|
||||
continue
|
||||
if link['href'] == '?C=M;O=A': # Last Modified
|
||||
date_col = idx
|
||||
for idx, row in enumerate(filelist.find('tr')):
|
||||
for idx, row in enumerate(filelist.find_all('tr')):
|
||||
if idx == 0: # Header; skip.
|
||||
continue
|
||||
cells = row.find_all('td')
|
||||
@ -134,38 +136,36 @@ class Updater(_base.BaseUpdater):
|
||||
continue
|
||||
name_html = cells[file_col]
|
||||
date_html = cells[date_col]
|
||||
|
||||
for link in filelist.find_all():
|
||||
fname_r = self._fname_re.search(link['href'])
|
||||
if not all((name_html, date_html)):
|
||||
continue
|
||||
date = datetime.datetime.strptime(date_html.text.strip(), self._datever_fmt)
|
||||
name_link = name_html.find('a')
|
||||
if name_link is None:
|
||||
continue
|
||||
name = name_link.text
|
||||
fname_r = self._fname_re.search(name)
|
||||
if not fname_r:
|
||||
continue
|
||||
ver_info = fname_r.groupdict()
|
||||
if ver_info['variant'] != self.variant:
|
||||
f_variant = fname_r.groupdict()['variant']
|
||||
if f_variant != self.variant:
|
||||
continue
|
||||
new_date = float(ver_info.get('version', self.old_ver))
|
||||
iso_url = os.path.join(self.dl_base, link['href'].replace(self.dl_base, ''))
|
||||
hash_url = '{0}.{1}'.format(iso_url, self.hash_type)
|
||||
newver_info = (hash_url, iso_url)
|
||||
versions[new_ver] = newver_info
|
||||
self.new_ver = sorted(list(versions.keys()))[-1]
|
||||
self.new_ver = date
|
||||
self.iso_url = os.path.join(self.dl_base, name_link['href'].replace(self.dl_base, ''))
|
||||
if not all((self.old_ver, self.old_date)) or \
|
||||
(self.new_ver > self.old_ver):
|
||||
self.do_update = True
|
||||
self.new_date = datetime.datetime.now(datetime.timezone.utc)
|
||||
hash_url, self.iso_url = versions[self.new_ver]
|
||||
req = requests.get(hash_url, headers = {'User-Agent': 'curl/7.74.0'})
|
||||
if not req.ok:
|
||||
raise RuntimeError('Received non-200/30x {0} for {1}'.format(req.status_code, hash_url))
|
||||
self.new_hash = req.content.decode('utf-8').lower().split()[0]
|
||||
return(None)
|
||||
|
||||
def updateVer(self):
|
||||
if self.getRunning():
|
||||
return(None)
|
||||
d = {
|
||||
'date': self.new_date.strftime(self._date_fmt),
|
||||
'variant': self.variant,
|
||||
self.hash_type: self.new_hash}
|
||||
if any((self.do_update, self.force_update)):
|
||||
self.new_hash = self.getISOHash()
|
||||
d = {'date': self.new_date.strftime(self._date_fmt),
|
||||
'variant': self.variant,
|
||||
'ver': self.new_ver.strftime(self._datever_fmt),
|
||||
self.hash_type: self.new_hash}
|
||||
j = json.dumps(d, indent = 4)
|
||||
with open(self.dest_ver, 'w') as fh:
|
||||
fh.write(j)
|
||||
|
@ -20,6 +20,6 @@ menuentry 'iPXE' {
|
||||
terminal_output console
|
||||
|
||||
chainloader (${root})/{{ iso_path }}
|
||||
{%- fi %}
|
||||
{%- endif %}
|
||||
|
||||
}
|
||||
|
Reference in New Issue
Block a user