XML validation is a LOT cleaner now
This commit is contained in:
parent
06c99221d2
commit
a1bc613979
@ -36,6 +36,7 @@ class Config(object):
|
|||||||
if not xsdpath:
|
if not xsdpath:
|
||||||
xsdpath = self.xsd_path
|
xsdpath = self.xsd_path
|
||||||
raw_xsd = None
|
raw_xsd = None
|
||||||
|
base_url = None
|
||||||
if xsdpath:
|
if xsdpath:
|
||||||
xsdpath = os.path.abspath(os.path.expanduser(xsdpath))
|
xsdpath = os.path.abspath(os.path.expanduser(xsdpath))
|
||||||
if not os.path.isfile(xsdpath):
|
if not os.path.isfile(xsdpath):
|
||||||
@ -43,11 +44,12 @@ class Config(object):
|
|||||||
'does not exist on the local filesystem'))
|
'does not exist on the local filesystem'))
|
||||||
with open(xsdpath, 'rb') as fh:
|
with open(xsdpath, 'rb') as fh:
|
||||||
raw_xsd = fh.read()
|
raw_xsd = fh.read()
|
||||||
|
base_url = os.path.split(xsdpath)[0]
|
||||||
else:
|
else:
|
||||||
xsi = self.xml.nsmap.get('xsi', 'http://www.w3.org/2001/XMLSchema-instance')
|
xsi = self.xml.nsmap.get('xsi', 'http://www.w3.org/2001/XMLSchema-instance')
|
||||||
schemaLocation = '{{{0}}}schemaLocation'.format(xsi)
|
schemaLocation = '{{{0}}}schemaLocation'.format(xsi)
|
||||||
schemaURL = self.xml.attrib.get(schemaLocation,
|
schemaURL = self.xml.attrib.get(schemaLocation,
|
||||||
'https://aif-ng.io/aif.xsd?ref={0}'.format(self.xml.attrib['version']))
|
'https://schema.xml.r00t2.io/projects/aif.xsd')
|
||||||
split_url = schemaURL.split()
|
split_url = schemaURL.split()
|
||||||
if len(split_url) == 2: # a properly defined schemaLocation
|
if len(split_url) == 2: # a properly defined schemaLocation
|
||||||
schemaURL = split_url[1]
|
schemaURL = split_url[1]
|
||||||
@ -58,7 +60,8 @@ class Config(object):
|
|||||||
# TODO: logging!
|
# TODO: logging!
|
||||||
raise RuntimeError('Could not download XSD')
|
raise RuntimeError('Could not download XSD')
|
||||||
raw_xsd = req.content
|
raw_xsd = req.content
|
||||||
self.xsd = etree.XMLSchema(etree.XML(raw_xsd))
|
base_url = os.path.split(req.url)[0] # This makes me feel dirty.
|
||||||
|
self.xsd = etree.XMLSchema(etree.XML(raw_xsd, base_url = base_url))
|
||||||
return()
|
return()
|
||||||
|
|
||||||
def parseRaw(self, parser = None):
|
def parseRaw(self, parser = None):
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import configparser
|
import configparser
|
||||||
|
import copy
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@ -78,7 +79,7 @@ class Locale(object):
|
|||||||
else:
|
else:
|
||||||
ldef_name = _locale_def_re.sub(r'\g<1>\g<2>', locale)
|
ldef_name = _locale_def_re.sub(r'\g<1>\g<2>', locale)
|
||||||
lpath = os.path.join(localesrcdir, 'locales', ldef_name)
|
lpath = os.path.join(localesrcdir, 'locales', ldef_name)
|
||||||
env = dict(os.environ).copy()
|
env = copy.deepycopy(dict(os.environ))
|
||||||
env['I18NPATH'] = localesrcdir
|
env['I18NPATH'] = localesrcdir
|
||||||
subprocess.run(['localedef',
|
subprocess.run(['localedef',
|
||||||
'--force',
|
'--force',
|
||||||
|
@ -6,6 +6,10 @@ import shlex
|
|||||||
import subprocess
|
import subprocess
|
||||||
##
|
##
|
||||||
import psutil
|
import psutil
|
||||||
|
##
|
||||||
|
from . import file_handler
|
||||||
|
from . import gpg_handler
|
||||||
|
from . import hash_handler
|
||||||
|
|
||||||
|
|
||||||
def checkMounted(devpath):
|
def checkMounted(devpath):
|
43
aif/utils/file_handler.py
Normal file
43
aif/utils/file_handler.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
|
||||||
|
class File(object):
|
||||||
|
def __init__(self, file_path):
|
||||||
|
self.orig_path = file_path
|
||||||
|
self.fullpath = os.path.abspath(os.path.expanduser(self.orig_path))
|
||||||
|
self.path_rel = pathlib.PurePosixPath(self.orig_path)
|
||||||
|
self.path_full = pathlib.PurePosixPath(self.fullpath)
|
||||||
|
|
||||||
|
|
||||||
|
class Directory(object):
|
||||||
|
def __init__(self, dir_path):
|
||||||
|
self.orig_path = dir_path
|
||||||
|
self.fullpath = os.path.abspath(os.path.expanduser(self.orig_path))
|
||||||
|
self.path_rel = pathlib.PurePosixPath(self.orig_path)
|
||||||
|
self.path_full = pathlib.PurePosixPath(self.fullpath)
|
||||||
|
self.files = []
|
||||||
|
self.dirs = []
|
||||||
|
|
||||||
|
def populateFilesDirs(self, recursive = False):
|
||||||
|
if not recursive:
|
||||||
|
for i in os.listdir(self.fullpath):
|
||||||
|
if os.path.isdir(os.path.join(self.fullpath, i)):
|
||||||
|
self.dirs.append(i)
|
||||||
|
elif os.path.isfile(os.path.join(self.fullpath, i)):
|
||||||
|
self.files.append(i)
|
||||||
|
else:
|
||||||
|
for root, dirs, files in os.walk(self.fullpath):
|
||||||
|
for f in files:
|
||||||
|
fpath = os.path.join(root, f)
|
||||||
|
relfpath = pathlib.PurePosixPath(fpath).relative_to(self.path_full)
|
||||||
|
self.files.append(relfpath)
|
||||||
|
for d in dirs:
|
||||||
|
dpath = os.path.join(root, d)
|
||||||
|
reldpath = pathlib.PurePosixPath(dpath).relative_to(self.path_full)
|
||||||
|
self.dirs.append(reldpath)
|
||||||
|
if root not in self.dirs:
|
||||||
|
self.dirs.append(dirs)
|
||||||
|
self.dirs.sort()
|
||||||
|
self.files.sort()
|
||||||
|
return()
|
154
aif/utils/gpg_handler.py
Normal file
154
aif/utils/gpg_handler.py
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
import copy
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import tempfile
|
||||||
|
##
|
||||||
|
import gpg
|
||||||
|
import gpg.errors
|
||||||
|
|
||||||
|
|
||||||
|
class GPG(object):
|
||||||
|
def __init__(self, homedir = None, primary_key = None):
|
||||||
|
self.homedir = homedir
|
||||||
|
self.primary_key = primary_key
|
||||||
|
self.temporary = None
|
||||||
|
self.gpg = None
|
||||||
|
self._imported_keys = []
|
||||||
|
|
||||||
|
def _initContext(self):
|
||||||
|
if not self.homedir:
|
||||||
|
self.homedir = tempfile.mkdtemp(suffix = '.gpg', prefix = '.aif.')
|
||||||
|
self.homedir = os.path.abspath(os.path.expanduser(self.homedir))
|
||||||
|
self.temporary = False
|
||||||
|
if not os.path.isdir(self.homedir):
|
||||||
|
self.temporary = True
|
||||||
|
os.makedirs(self.homedir, exist_ok = True)
|
||||||
|
os.chmod(self.homedir, 0o0700)
|
||||||
|
self.gpg = gpg.Context(home_dir = self.homedir)
|
||||||
|
if self.temporary:
|
||||||
|
self.primary_key = self.createKey('AIF-NG File Verification Key', sign = True, force = True)
|
||||||
|
else:
|
||||||
|
self.primary_key = self.getKey(self.primary_key, secret = True)
|
||||||
|
return()
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
# This is mostly just to cleanup the stuff we did before.
|
||||||
|
self.primary_key = self.primary_key.fpr
|
||||||
|
if self.temporary:
|
||||||
|
self.primary_key = None
|
||||||
|
shutil.rmtree(self.homedir)
|
||||||
|
self.gpg = None
|
||||||
|
return()
|
||||||
|
|
||||||
|
def createKey(self, userid, *args, **kwargs):
|
||||||
|
# algorithm=None, expires_in=0, expires=True, sign=False, encrypt=False, certify=False,
|
||||||
|
# authenticate=False, passphrase=None, force=False
|
||||||
|
keyinfo = {'userid': userid,
|
||||||
|
'algorithm': kwargs.get('algorithm', 'rsa4096'),
|
||||||
|
'expires_in': kwargs.get('expires_in'),
|
||||||
|
'sign': kwargs.get('sign', True),
|
||||||
|
'encrypt': kwargs.get('encrypt', False),
|
||||||
|
'certify': kwargs.get('certify', False),
|
||||||
|
'authenticate': kwargs.get('authenticate', False),
|
||||||
|
'passphrase': kwargs.get('passphrase'),
|
||||||
|
'force': kwargs.get('force')}
|
||||||
|
if not keyinfo['expires_in']:
|
||||||
|
del(keyinfo['expires_in'])
|
||||||
|
keyinfo['expires'] = False
|
||||||
|
k = self.gpg.create_key(**keyinfo)
|
||||||
|
return(k.fpr)
|
||||||
|
|
||||||
|
def findKey(self, searchstr, secret = False, local = True, remote = True,
|
||||||
|
secret_only = False, keyring_import = False):
|
||||||
|
fltr = 0
|
||||||
|
if secret:
|
||||||
|
fltr = fltr | gpg.constants.KEYLIST_MODE_WITH_SECRET
|
||||||
|
if local:
|
||||||
|
fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL
|
||||||
|
if remote:
|
||||||
|
fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN
|
||||||
|
keys = [k for k in self.gpg.keylist(pattern = searchstr, secret = secret_only, mode = fltr)]
|
||||||
|
if keyring_import:
|
||||||
|
self.importKeys(keys, native = True)
|
||||||
|
return(keys)
|
||||||
|
|
||||||
|
def getKey(self, key_id, secret = False, strict = False):
|
||||||
|
try:
|
||||||
|
getattr(key_id, 'fpr')
|
||||||
|
return(key_id)
|
||||||
|
except AttributeError:
|
||||||
|
if not strict:
|
||||||
|
self.findKey(key_id, keyring_import = True)
|
||||||
|
try:
|
||||||
|
key = self.gpg.get_key(key_id, secret = secret)
|
||||||
|
except gpg.errors.KeyNotFound:
|
||||||
|
key = None
|
||||||
|
return(key)
|
||||||
|
return()
|
||||||
|
|
||||||
|
def getKeyFile(self, keyfile, keyring_import = False):
|
||||||
|
keyfile = os.path.abspath(os.path.expanduser(keyfile))
|
||||||
|
with open(keyfile, 'rb') as fh:
|
||||||
|
rawkey_data = fh.read()
|
||||||
|
fh.seek(0, 0)
|
||||||
|
keys = [k for k in self.gpg.keylist(source = fh)]
|
||||||
|
if keyring_import:
|
||||||
|
self.importKeys(keys, native = True)
|
||||||
|
return((keys, rawkey_data))
|
||||||
|
|
||||||
|
def getKeyStr(self, keydata, keyring_import = False):
|
||||||
|
orig_keydata = keydata
|
||||||
|
if isinstance(keydata, str):
|
||||||
|
keydata = keydata.encode('utf-8')
|
||||||
|
buf = io.BytesIO(keydata)
|
||||||
|
keys = [k for k in self.gpg.keylist(source = buf)]
|
||||||
|
buf.close()
|
||||||
|
if keyring_import:
|
||||||
|
self.importKeys(keys, native = True)
|
||||||
|
return((keys, orig_keydata))
|
||||||
|
|
||||||
|
def importKeys(self, keydata, native = False):
|
||||||
|
if not native:
|
||||||
|
self.gpg.key_import(keydata)
|
||||||
|
else:
|
||||||
|
if not isinstance(keydata, list):
|
||||||
|
keydata = [keydata]
|
||||||
|
self.gpg.op_import_keys(keydata)
|
||||||
|
return()
|
||||||
|
|
||||||
|
def verifyData(self, data, keys = None, strict = False, detached = None, *args, **kwargs):
|
||||||
|
results = {}
|
||||||
|
if keys:
|
||||||
|
if not isinstance(keys, list):
|
||||||
|
keys = [self.getKey(keys)]
|
||||||
|
else:
|
||||||
|
keys = [self.getKey(k) for k in keys]
|
||||||
|
if isinstance(data, str):
|
||||||
|
data = data.encode('utf-8')
|
||||||
|
args = {'signed_data': data}
|
||||||
|
if detached:
|
||||||
|
if isinstance(detached, str):
|
||||||
|
detached = detached.encode('utf-8')
|
||||||
|
args['signature'] = detached
|
||||||
|
if strict:
|
||||||
|
if keys:
|
||||||
|
if not isinstance(keys, list):
|
||||||
|
keys = [keys]
|
||||||
|
args['verify'] = keys
|
||||||
|
results[None] = self.gpg.verify(**args)
|
||||||
|
else:
|
||||||
|
if keys:
|
||||||
|
for k in keys:
|
||||||
|
_args = copy.deepcopy(args)
|
||||||
|
_args['verify'] = [k]
|
||||||
|
results[k.fpr] = self.gpg.verify(**_args)
|
||||||
|
else:
|
||||||
|
results[None] = self.gpg.verify(**args)
|
||||||
|
return(results)
|
||||||
|
|
||||||
|
def verifyFile(self, filepath, *args, **kwargs):
|
||||||
|
filepath = os.path.abspath(os.path.expanduser(filepath))
|
||||||
|
with open(filepath, 'rb') as fh:
|
||||||
|
results = self.verifyData(fh.read(), **kwargs)
|
||||||
|
return(results)
|
0
aif/utils/hash_handler.py
Normal file
0
aif/utils/hash_handler.py
Normal file
@ -1,16 +1,21 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" ?>
|
<?xml version="1.0" encoding="UTF-8" ?>
|
||||||
<aif xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
<aif xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
xmlns="https://aif-ng.io/"
|
xmlns="https://aif-ng.io/"
|
||||||
xsi:schemaLocation="https://aif-ng.io/ https://schema.xml.r00t2.io/projects/aif.xsd"
|
xsi:schemaLocation="https://aif-ng.io/ http://schema.xml.r00t2.io/projects/aif.xsd"
|
||||||
chrootPath="/mnt/aif"
|
chrootPath="/mnt/aif"
|
||||||
reboot="false">
|
reboot="false">
|
||||||
<bootstrap>
|
<bootstrap>
|
||||||
<tarball>https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz</tarball>
|
<tarball>https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz</tarball>
|
||||||
<verify>
|
<verify>
|
||||||
<gpg>
|
<gpg>
|
||||||
|
<sigs>
|
||||||
<signatureFile>
|
<signatureFile>
|
||||||
https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig
|
https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig
|
||||||
</signatureFile>
|
</signatureFile>
|
||||||
|
</sigs>
|
||||||
|
<keys>
|
||||||
|
<keyID>0x4AA4767BBC9C4B1D18AE28B77F2D434B9741E8AC</keyID>
|
||||||
|
</keys>
|
||||||
</gpg>
|
</gpg>
|
||||||
<hash>
|
<hash>
|
||||||
<checksumFile hashType="md5">http://arch.mirror.square-r00t.net/iso/latest/md5sums.txt</checksumFile>
|
<checksumFile hashType="md5">http://arch.mirror.square-r00t.net/iso/latest/md5sums.txt</checksumFile>
|
||||||
|
Loading…
Reference in New Issue
Block a user