XML validation is a LOT cleaner now
This commit is contained in:
parent
06c99221d2
commit
a1bc613979
@ -36,6 +36,7 @@ class Config(object):
|
|||||||
if not xsdpath:
|
if not xsdpath:
|
||||||
xsdpath = self.xsd_path
|
xsdpath = self.xsd_path
|
||||||
raw_xsd = None
|
raw_xsd = None
|
||||||
|
base_url = None
|
||||||
if xsdpath:
|
if xsdpath:
|
||||||
xsdpath = os.path.abspath(os.path.expanduser(xsdpath))
|
xsdpath = os.path.abspath(os.path.expanduser(xsdpath))
|
||||||
if not os.path.isfile(xsdpath):
|
if not os.path.isfile(xsdpath):
|
||||||
@ -43,11 +44,12 @@ class Config(object):
|
|||||||
'does not exist on the local filesystem'))
|
'does not exist on the local filesystem'))
|
||||||
with open(xsdpath, 'rb') as fh:
|
with open(xsdpath, 'rb') as fh:
|
||||||
raw_xsd = fh.read()
|
raw_xsd = fh.read()
|
||||||
|
base_url = os.path.split(xsdpath)[0]
|
||||||
else:
|
else:
|
||||||
xsi = self.xml.nsmap.get('xsi', 'http://www.w3.org/2001/XMLSchema-instance')
|
xsi = self.xml.nsmap.get('xsi', 'http://www.w3.org/2001/XMLSchema-instance')
|
||||||
schemaLocation = '{{{0}}}schemaLocation'.format(xsi)
|
schemaLocation = '{{{0}}}schemaLocation'.format(xsi)
|
||||||
schemaURL = self.xml.attrib.get(schemaLocation,
|
schemaURL = self.xml.attrib.get(schemaLocation,
|
||||||
'https://aif-ng.io/aif.xsd?ref={0}'.format(self.xml.attrib['version']))
|
'https://schema.xml.r00t2.io/projects/aif.xsd')
|
||||||
split_url = schemaURL.split()
|
split_url = schemaURL.split()
|
||||||
if len(split_url) == 2: # a properly defined schemaLocation
|
if len(split_url) == 2: # a properly defined schemaLocation
|
||||||
schemaURL = split_url[1]
|
schemaURL = split_url[1]
|
||||||
@ -58,7 +60,8 @@ class Config(object):
|
|||||||
# TODO: logging!
|
# TODO: logging!
|
||||||
raise RuntimeError('Could not download XSD')
|
raise RuntimeError('Could not download XSD')
|
||||||
raw_xsd = req.content
|
raw_xsd = req.content
|
||||||
self.xsd = etree.XMLSchema(etree.XML(raw_xsd))
|
base_url = os.path.split(req.url)[0] # This makes me feel dirty.
|
||||||
|
self.xsd = etree.XMLSchema(etree.XML(raw_xsd, base_url = base_url))
|
||||||
return()
|
return()
|
||||||
|
|
||||||
def parseRaw(self, parser = None):
|
def parseRaw(self, parser = None):
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import configparser
|
import configparser
|
||||||
|
import copy
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@ -78,7 +79,7 @@ class Locale(object):
|
|||||||
else:
|
else:
|
||||||
ldef_name = _locale_def_re.sub(r'\g<1>\g<2>', locale)
|
ldef_name = _locale_def_re.sub(r'\g<1>\g<2>', locale)
|
||||||
lpath = os.path.join(localesrcdir, 'locales', ldef_name)
|
lpath = os.path.join(localesrcdir, 'locales', ldef_name)
|
||||||
env = dict(os.environ).copy()
|
env = copy.deepycopy(dict(os.environ))
|
||||||
env['I18NPATH'] = localesrcdir
|
env['I18NPATH'] = localesrcdir
|
||||||
subprocess.run(['localedef',
|
subprocess.run(['localedef',
|
||||||
'--force',
|
'--force',
|
||||||
|
@ -6,6 +6,10 @@ import shlex
|
|||||||
import subprocess
|
import subprocess
|
||||||
##
|
##
|
||||||
import psutil
|
import psutil
|
||||||
|
##
|
||||||
|
from . import file_handler
|
||||||
|
from . import gpg_handler
|
||||||
|
from . import hash_handler
|
||||||
|
|
||||||
|
|
||||||
def checkMounted(devpath):
|
def checkMounted(devpath):
|
||||||
@ -149,50 +153,50 @@ class _Sizer(object):
|
|||||||
# 'decimal' is base-10, 'binary' is base-2. (Duh.)
|
# 'decimal' is base-10, 'binary' is base-2. (Duh.)
|
||||||
# "b" = bytes, "n" = given value, and "u" = unit suffix's key in below notes.
|
# "b" = bytes, "n" = given value, and "u" = unit suffix's key in below notes.
|
||||||
self.storageUnits = {'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u
|
self.storageUnits = {'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u
|
||||||
0: (None, 'B', 'byte'),
|
0: (None, 'B', 'byte'),
|
||||||
3: ('k', 'kB', 'kilobyte'),
|
3: ('k', 'kB', 'kilobyte'),
|
||||||
6: ('M', 'MB', 'megabyte'),
|
6: ('M', 'MB', 'megabyte'),
|
||||||
9: ('G', 'GB', 'gigabyte'),
|
9: ('G', 'GB', 'gigabyte'),
|
||||||
12: ('T', 'TB', 'teraybte'),
|
12: ('T', 'TB', 'teraybte'),
|
||||||
13: ('P', 'PB', 'petabyte'), # yeah, right.
|
13: ('P', 'PB', 'petabyte'), # yeah, right.
|
||||||
15: ('E', 'EB', 'exabyte'),
|
15: ('E', 'EB', 'exabyte'),
|
||||||
18: ('Z', 'ZB', 'zettabyte'),
|
18: ('Z', 'ZB', 'zettabyte'),
|
||||||
19: ('Y', 'YB', 'yottabyte')
|
19: ('Y', 'YB', 'yottabyte')
|
||||||
},
|
},
|
||||||
'binary': { # n * (2 ** u) = b; b / (2 ** u) = u
|
'binary': { # n * (2 ** u) = b; b / (2 ** u) = u
|
||||||
-1: ('nybble', 'nibble', 'nyble', 'half-byte', 'tetrade', 'nibble'),
|
-1: ('nybble', 'nibble', 'nyble', 'half-byte', 'tetrade', 'nibble'),
|
||||||
10: ('Ki', 'KiB', 'kibibyte'),
|
10: ('Ki', 'KiB', 'kibibyte'),
|
||||||
20: ('Mi', 'MiB', 'mebibyte'),
|
20: ('Mi', 'MiB', 'mebibyte'),
|
||||||
30: ('Gi', 'GiB', 'gibibyte'),
|
30: ('Gi', 'GiB', 'gibibyte'),
|
||||||
40: ('Ti', 'TiB', 'tebibyte'),
|
40: ('Ti', 'TiB', 'tebibyte'),
|
||||||
50: ('Pi', 'PiB', 'pebibyte'),
|
50: ('Pi', 'PiB', 'pebibyte'),
|
||||||
60: ('Ei', 'EiB', 'exbibyte'),
|
60: ('Ei', 'EiB', 'exbibyte'),
|
||||||
70: ('Zi', 'ZiB', 'zebibyte'),
|
70: ('Zi', 'ZiB', 'zebibyte'),
|
||||||
80: ('Yi', 'YiB', 'yobibyte')
|
80: ('Yi', 'YiB', 'yobibyte')
|
||||||
}}
|
}}
|
||||||
# https://en.wikipedia.org/wiki/Bit#Multiple_bits - note that 8 bits = 1 byte
|
# https://en.wikipedia.org/wiki/Bit#Multiple_bits - note that 8 bits = 1 byte
|
||||||
self.bwUnits = {'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u
|
self.bwUnits = {'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u
|
||||||
0: (None, 'b', 'bit'),
|
0: (None, 'b', 'bit'),
|
||||||
3: ('k', 'kb', 'kilobit'),
|
3: ('k', 'kb', 'kilobit'),
|
||||||
6: ('M', 'Mb', 'megabit'),
|
6: ('M', 'Mb', 'megabit'),
|
||||||
9: ('G', 'Gb', 'gigabit'),
|
9: ('G', 'Gb', 'gigabit'),
|
||||||
12: ('T', 'Tb', 'terabit'),
|
12: ('T', 'Tb', 'terabit'),
|
||||||
13: ('P', 'Pb', 'petabit'),
|
13: ('P', 'Pb', 'petabit'),
|
||||||
15: ('E', 'Eb', 'exabit'),
|
15: ('E', 'Eb', 'exabit'),
|
||||||
18: ('Z', 'Zb', 'zettabit'),
|
18: ('Z', 'Zb', 'zettabit'),
|
||||||
19: ('Y', 'Yb', 'yottabit')
|
19: ('Y', 'Yb', 'yottabit')
|
||||||
},
|
},
|
||||||
'binary': { # n * (2 ** u) = b; b / (2 ** u) = u
|
'binary': { # n * (2 ** u) = b; b / (2 ** u) = u
|
||||||
-1: ('semi-octet', 'quartet', 'quadbit'),
|
-1: ('semi-octet', 'quartet', 'quadbit'),
|
||||||
10: ('Ki', 'Kib', 'kibibit'),
|
10: ('Ki', 'Kib', 'kibibit'),
|
||||||
20: ('Mi', 'Mib', 'mebibit'),
|
20: ('Mi', 'Mib', 'mebibit'),
|
||||||
30: ('Gi', 'Gib', 'gibibit'),
|
30: ('Gi', 'Gib', 'gibibit'),
|
||||||
40: ('Ti', 'Tib', 'tebibit'),
|
40: ('Ti', 'Tib', 'tebibit'),
|
||||||
50: ('Pi', 'Pib', 'pebibit'),
|
50: ('Pi', 'Pib', 'pebibit'),
|
||||||
60: ('Ei', 'Eib', 'exbibit'),
|
60: ('Ei', 'Eib', 'exbibit'),
|
||||||
70: ('Zi', 'Zib', 'zebibit'),
|
70: ('Zi', 'Zib', 'zebibit'),
|
||||||
80: ('Yi', 'Yib', 'yobibit')
|
80: ('Yi', 'Yib', 'yobibit')
|
||||||
}}
|
}}
|
||||||
self.valid_storage = []
|
self.valid_storage = []
|
||||||
for unit_type, convpair in self.storageUnits.items():
|
for unit_type, convpair in self.storageUnits.items():
|
||||||
for f, l in convpair.items():
|
for f, l in convpair.items():
|
43
aif/utils/file_handler.py
Normal file
43
aif/utils/file_handler.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
|
||||||
|
class File(object):
|
||||||
|
def __init__(self, file_path):
|
||||||
|
self.orig_path = file_path
|
||||||
|
self.fullpath = os.path.abspath(os.path.expanduser(self.orig_path))
|
||||||
|
self.path_rel = pathlib.PurePosixPath(self.orig_path)
|
||||||
|
self.path_full = pathlib.PurePosixPath(self.fullpath)
|
||||||
|
|
||||||
|
|
||||||
|
class Directory(object):
|
||||||
|
def __init__(self, dir_path):
|
||||||
|
self.orig_path = dir_path
|
||||||
|
self.fullpath = os.path.abspath(os.path.expanduser(self.orig_path))
|
||||||
|
self.path_rel = pathlib.PurePosixPath(self.orig_path)
|
||||||
|
self.path_full = pathlib.PurePosixPath(self.fullpath)
|
||||||
|
self.files = []
|
||||||
|
self.dirs = []
|
||||||
|
|
||||||
|
def populateFilesDirs(self, recursive = False):
|
||||||
|
if not recursive:
|
||||||
|
for i in os.listdir(self.fullpath):
|
||||||
|
if os.path.isdir(os.path.join(self.fullpath, i)):
|
||||||
|
self.dirs.append(i)
|
||||||
|
elif os.path.isfile(os.path.join(self.fullpath, i)):
|
||||||
|
self.files.append(i)
|
||||||
|
else:
|
||||||
|
for root, dirs, files in os.walk(self.fullpath):
|
||||||
|
for f in files:
|
||||||
|
fpath = os.path.join(root, f)
|
||||||
|
relfpath = pathlib.PurePosixPath(fpath).relative_to(self.path_full)
|
||||||
|
self.files.append(relfpath)
|
||||||
|
for d in dirs:
|
||||||
|
dpath = os.path.join(root, d)
|
||||||
|
reldpath = pathlib.PurePosixPath(dpath).relative_to(self.path_full)
|
||||||
|
self.dirs.append(reldpath)
|
||||||
|
if root not in self.dirs:
|
||||||
|
self.dirs.append(dirs)
|
||||||
|
self.dirs.sort()
|
||||||
|
self.files.sort()
|
||||||
|
return()
|
154
aif/utils/gpg_handler.py
Normal file
154
aif/utils/gpg_handler.py
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
import copy
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
##
|
||||||
|
import gpg
|
||||||
|
import gpg.errors
|
||||||
|
|
||||||
|
|
||||||
|
class GPG(object):
|
||||||
|
def __init__(self, homedir = None, primary_key = None):
|
||||||
|
self.homedir = homedir
|
||||||
|
self.primary_key = primary_key
|
||||||
|
self.temporary = None
|
||||||
|
self.gpg = None
|
||||||
|
self._imported_keys = []
|
||||||
|
|
||||||
|
def _initContext(self):
|
||||||
|
if not self.homedir:
|
||||||
|
self.homedir = tempfile.mkdtemp(suffix = '.gpg', prefix = '.aif.')
|
||||||
|
self.homedir = os.path.abspath(os.path.expanduser(self.homedir))
|
||||||
|
self.temporary = False
|
||||||
|
if not os.path.isdir(self.homedir):
|
||||||
|
self.temporary = True
|
||||||
|
os.makedirs(self.homedir, exist_ok = True)
|
||||||
|
os.chmod(self.homedir, 0o0700)
|
||||||
|
self.gpg = gpg.Context(home_dir = self.homedir)
|
||||||
|
if self.temporary:
|
||||||
|
self.primary_key = self.createKey('AIF-NG File Verification Key', sign = True, force = True)
|
||||||
|
else:
|
||||||
|
self.primary_key = self.getKey(self.primary_key, secret = True)
|
||||||
|
return()
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
# This is mostly just to cleanup the stuff we did before.
|
||||||
|
self.primary_key = self.primary_key.fpr
|
||||||
|
if self.temporary:
|
||||||
|
self.primary_key = None
|
||||||
|
shutil.rmtree(self.homedir)
|
||||||
|
self.gpg = None
|
||||||
|
return()
|
||||||
|
|
||||||
|
def createKey(self, userid, *args, **kwargs):
|
||||||
|
# algorithm=None, expires_in=0, expires=True, sign=False, encrypt=False, certify=False,
|
||||||
|
# authenticate=False, passphrase=None, force=False
|
||||||
|
keyinfo = {'userid': userid,
|
||||||
|
'algorithm': kwargs.get('algorithm', 'rsa4096'),
|
||||||
|
'expires_in': kwargs.get('expires_in'),
|
||||||
|
'sign': kwargs.get('sign', True),
|
||||||
|
'encrypt': kwargs.get('encrypt', False),
|
||||||
|
'certify': kwargs.get('certify', False),
|
||||||
|
'authenticate': kwargs.get('authenticate', False),
|
||||||
|
'passphrase': kwargs.get('passphrase'),
|
||||||
|
'force': kwargs.get('force')}
|
||||||
|
if not keyinfo['expires_in']:
|
||||||
|
del(keyinfo['expires_in'])
|
||||||
|
keyinfo['expires'] = False
|
||||||
|
k = self.gpg.create_key(**keyinfo)
|
||||||
|
return(k.fpr)
|
||||||
|
|
||||||
|
def findKey(self, searchstr, secret = False, local = True, remote = True,
|
||||||
|
secret_only = False, keyring_import = False):
|
||||||
|
fltr = 0
|
||||||
|
if secret:
|
||||||
|
fltr = fltr | gpg.constants.KEYLIST_MODE_WITH_SECRET
|
||||||
|
if local:
|
||||||
|
fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL
|
||||||
|
if remote:
|
||||||
|
fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN
|
||||||
|
keys = [k for k in self.gpg.keylist(pattern = searchstr, secret = secret_only, mode = fltr)]
|
||||||
|
if keyring_import:
|
||||||
|
self.importKeys(keys, native = True)
|
||||||
|
return(keys)
|
||||||
|
|
||||||
|
def getKey(self, key_id, secret = False, strict = False):
|
||||||
|
try:
|
||||||
|
getattr(key_id, 'fpr')
|
||||||
|
return(key_id)
|
||||||
|
except AttributeError:
|
||||||
|
if not strict:
|
||||||
|
self.findKey(key_id, keyring_import = True)
|
||||||
|
try:
|
||||||
|
key = self.gpg.get_key(key_id, secret = secret)
|
||||||
|
except gpg.errors.KeyNotFound:
|
||||||
|
key = None
|
||||||
|
return(key)
|
||||||
|
return()
|
||||||
|
|
||||||
|
def getKeyFile(self, keyfile, keyring_import = False):
|
||||||
|
keyfile = os.path.abspath(os.path.expanduser(keyfile))
|
||||||
|
with open(keyfile, 'rb') as fh:
|
||||||
|
rawkey_data = fh.read()
|
||||||
|
fh.seek(0, 0)
|
||||||
|
keys = [k for k in self.gpg.keylist(source = fh)]
|
||||||
|
if keyring_import:
|
||||||
|
self.importKeys(keys, native = True)
|
||||||
|
return((keys, rawkey_data))
|
||||||
|
|
||||||
|
def getKeyStr(self, keydata, keyring_import = False):
|
||||||
|
orig_keydata = keydata
|
||||||
|
if isinstance(keydata, str):
|
||||||
|
keydata = keydata.encode('utf-8')
|
||||||
|
buf = io.BytesIO(keydata)
|
||||||
|
keys = [k for k in self.gpg.keylist(source = buf)]
|
||||||
|
buf.close()
|
||||||
|
if keyring_import:
|
||||||
|
self.importKeys(keys, native = True)
|
||||||
|
return((keys, orig_keydata))
|
||||||
|
|
||||||
|
def importKeys(self, keydata, native = False):
|
||||||
|
if not native:
|
||||||
|
self.gpg.key_import(keydata)
|
||||||
|
else:
|
||||||
|
if not isinstance(keydata, list):
|
||||||
|
keydata = [keydata]
|
||||||
|
self.gpg.op_import_keys(keydata)
|
||||||
|
return()
|
||||||
|
|
||||||
|
def verifyData(self, data, keys = None, strict = False, detached = None, *args, **kwargs):
|
||||||
|
results = {}
|
||||||
|
if keys:
|
||||||
|
if not isinstance(keys, list):
|
||||||
|
keys = [self.getKey(keys)]
|
||||||
|
else:
|
||||||
|
keys = [self.getKey(k) for k in keys]
|
||||||
|
if isinstance(data, str):
|
||||||
|
data = data.encode('utf-8')
|
||||||
|
args = {'signed_data': data}
|
||||||
|
if detached:
|
||||||
|
if isinstance(detached, str):
|
||||||
|
detached = detached.encode('utf-8')
|
||||||
|
args['signature'] = detached
|
||||||
|
if strict:
|
||||||
|
if keys:
|
||||||
|
if not isinstance(keys, list):
|
||||||
|
keys = [keys]
|
||||||
|
args['verify'] = keys
|
||||||
|
results[None] = self.gpg.verify(**args)
|
||||||
|
else:
|
||||||
|
if keys:
|
||||||
|
for k in keys:
|
||||||
|
_args = copy.deepcopy(args)
|
||||||
|
_args['verify'] = [k]
|
||||||
|
results[k.fpr] = self.gpg.verify(**_args)
|
||||||
|
else:
|
||||||
|
results[None] = self.gpg.verify(**args)
|
||||||
|
return(results)
|
||||||
|
|
||||||
|
def verifyFile(self, filepath, *args, **kwargs):
|
||||||
|
filepath = os.path.abspath(os.path.expanduser(filepath))
|
||||||
|
with open(filepath, 'rb') as fh:
|
||||||
|
results = self.verifyData(fh.read(), **kwargs)
|
||||||
|
return(results)
|
0
aif/utils/hash_handler.py
Normal file
0
aif/utils/hash_handler.py
Normal file
@ -1,16 +1,21 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" ?>
|
<?xml version="1.0" encoding="UTF-8" ?>
|
||||||
<aif xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
<aif xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xmlns="https://aif-ng.io/"
|
xmlns="https://aif-ng.io/"
|
||||||
xsi:schemaLocation="https://aif-ng.io/ https://schema.xml.r00t2.io/projects/aif.xsd"
|
xsi:schemaLocation="https://aif-ng.io/ http://schema.xml.r00t2.io/projects/aif.xsd"
|
||||||
chrootPath="/mnt/aif"
|
chrootPath="/mnt/aif"
|
||||||
reboot="false">
|
reboot="false">
|
||||||
<bootstrap>
|
<bootstrap>
|
||||||
<tarball>https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz</tarball>
|
<tarball>https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz</tarball>
|
||||||
<verify>
|
<verify>
|
||||||
<gpg>
|
<gpg>
|
||||||
<signatureFile>
|
<sigs>
|
||||||
https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig
|
<signatureFile>
|
||||||
</signatureFile>
|
https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig
|
||||||
|
</signatureFile>
|
||||||
|
</sigs>
|
||||||
|
<keys>
|
||||||
|
<keyID>0x4AA4767BBC9C4B1D18AE28B77F2D434B9741E8AC</keyID>
|
||||||
|
</keys>
|
||||||
</gpg>
|
</gpg>
|
||||||
<hash>
|
<hash>
|
||||||
<checksumFile hashType="md5">http://arch.mirror.square-r00t.net/iso/latest/md5sums.txt</checksumFile>
|
<checksumFile hashType="md5">http://arch.mirror.square-r00t.net/iso/latest/md5sums.txt</checksumFile>
|
||||||
|
Loading…
Reference in New Issue
Block a user