diff --git a/aif/config/parser.py b/aif/config/parser.py index 5c66c5f..86b3981 100644 --- a/aif/config/parser.py +++ b/aif/config/parser.py @@ -36,6 +36,7 @@ class Config(object): if not xsdpath: xsdpath = self.xsd_path raw_xsd = None + base_url = None if xsdpath: xsdpath = os.path.abspath(os.path.expanduser(xsdpath)) if not os.path.isfile(xsdpath): @@ -43,11 +44,12 @@ class Config(object): 'does not exist on the local filesystem')) with open(xsdpath, 'rb') as fh: raw_xsd = fh.read() + base_url = os.path.split(xsdpath)[0] else: xsi = self.xml.nsmap.get('xsi', 'http://www.w3.org/2001/XMLSchema-instance') schemaLocation = '{{{0}}}schemaLocation'.format(xsi) schemaURL = self.xml.attrib.get(schemaLocation, - 'https://aif-ng.io/aif.xsd?ref={0}'.format(self.xml.attrib['version'])) + 'https://schema.xml.r00t2.io/projects/aif.xsd') split_url = schemaURL.split() if len(split_url) == 2: # a properly defined schemaLocation schemaURL = split_url[1] @@ -58,7 +60,8 @@ class Config(object): # TODO: logging! raise RuntimeError('Could not download XSD') raw_xsd = req.content - self.xsd = etree.XMLSchema(etree.XML(raw_xsd)) + base_url = os.path.split(req.url)[0] # This makes me feel dirty. + self.xsd = etree.XMLSchema(etree.XML(raw_xsd, base_url = base_url)) return() def parseRaw(self, parser = None): diff --git a/aif/system/locales.py b/aif/system/locales.py index 41fcad0..80ac749 100644 --- a/aif/system/locales.py +++ b/aif/system/locales.py @@ -1,4 +1,5 @@ import configparser +import copy import io import os import re @@ -78,7 +79,7 @@ class Locale(object): else: ldef_name = _locale_def_re.sub(r'\g<1>\g<2>', locale) lpath = os.path.join(localesrcdir, 'locales', ldef_name) - env = dict(os.environ).copy() + env = copy.deepycopy(dict(os.environ)) env['I18NPATH'] = localesrcdir subprocess.run(['localedef', '--force', diff --git a/aif/utils.py b/aif/utils/__init__.py similarity index 78% rename from aif/utils.py rename to aif/utils/__init__.py index 6f08fb3..279fbea 100644 --- a/aif/utils.py +++ b/aif/utils/__init__.py @@ -6,6 +6,10 @@ import shlex import subprocess ## import psutil +## +from . import file_handler +from . import gpg_handler +from . import hash_handler def checkMounted(devpath): @@ -149,50 +153,50 @@ class _Sizer(object): # 'decimal' is base-10, 'binary' is base-2. (Duh.) # "b" = bytes, "n" = given value, and "u" = unit suffix's key in below notes. self.storageUnits = {'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u - 0: (None, 'B', 'byte'), - 3: ('k', 'kB', 'kilobyte'), - 6: ('M', 'MB', 'megabyte'), - 9: ('G', 'GB', 'gigabyte'), - 12: ('T', 'TB', 'teraybte'), - 13: ('P', 'PB', 'petabyte'), # yeah, right. - 15: ('E', 'EB', 'exabyte'), - 18: ('Z', 'ZB', 'zettabyte'), - 19: ('Y', 'YB', 'yottabyte') - }, - 'binary': { # n * (2 ** u) = b; b / (2 ** u) = u - -1: ('nybble', 'nibble', 'nyble', 'half-byte', 'tetrade', 'nibble'), - 10: ('Ki', 'KiB', 'kibibyte'), - 20: ('Mi', 'MiB', 'mebibyte'), - 30: ('Gi', 'GiB', 'gibibyte'), - 40: ('Ti', 'TiB', 'tebibyte'), - 50: ('Pi', 'PiB', 'pebibyte'), - 60: ('Ei', 'EiB', 'exbibyte'), - 70: ('Zi', 'ZiB', 'zebibyte'), - 80: ('Yi', 'YiB', 'yobibyte') - }} + 0: (None, 'B', 'byte'), + 3: ('k', 'kB', 'kilobyte'), + 6: ('M', 'MB', 'megabyte'), + 9: ('G', 'GB', 'gigabyte'), + 12: ('T', 'TB', 'teraybte'), + 13: ('P', 'PB', 'petabyte'), # yeah, right. + 15: ('E', 'EB', 'exabyte'), + 18: ('Z', 'ZB', 'zettabyte'), + 19: ('Y', 'YB', 'yottabyte') + }, + 'binary': { # n * (2 ** u) = b; b / (2 ** u) = u + -1: ('nybble', 'nibble', 'nyble', 'half-byte', 'tetrade', 'nibble'), + 10: ('Ki', 'KiB', 'kibibyte'), + 20: ('Mi', 'MiB', 'mebibyte'), + 30: ('Gi', 'GiB', 'gibibyte'), + 40: ('Ti', 'TiB', 'tebibyte'), + 50: ('Pi', 'PiB', 'pebibyte'), + 60: ('Ei', 'EiB', 'exbibyte'), + 70: ('Zi', 'ZiB', 'zebibyte'), + 80: ('Yi', 'YiB', 'yobibyte') + }} # https://en.wikipedia.org/wiki/Bit#Multiple_bits - note that 8 bits = 1 byte self.bwUnits = {'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u - 0: (None, 'b', 'bit'), - 3: ('k', 'kb', 'kilobit'), - 6: ('M', 'Mb', 'megabit'), - 9: ('G', 'Gb', 'gigabit'), - 12: ('T', 'Tb', 'terabit'), - 13: ('P', 'Pb', 'petabit'), - 15: ('E', 'Eb', 'exabit'), - 18: ('Z', 'Zb', 'zettabit'), - 19: ('Y', 'Yb', 'yottabit') - }, - 'binary': { # n * (2 ** u) = b; b / (2 ** u) = u - -1: ('semi-octet', 'quartet', 'quadbit'), - 10: ('Ki', 'Kib', 'kibibit'), - 20: ('Mi', 'Mib', 'mebibit'), - 30: ('Gi', 'Gib', 'gibibit'), - 40: ('Ti', 'Tib', 'tebibit'), - 50: ('Pi', 'Pib', 'pebibit'), - 60: ('Ei', 'Eib', 'exbibit'), - 70: ('Zi', 'Zib', 'zebibit'), - 80: ('Yi', 'Yib', 'yobibit') - }} + 0: (None, 'b', 'bit'), + 3: ('k', 'kb', 'kilobit'), + 6: ('M', 'Mb', 'megabit'), + 9: ('G', 'Gb', 'gigabit'), + 12: ('T', 'Tb', 'terabit'), + 13: ('P', 'Pb', 'petabit'), + 15: ('E', 'Eb', 'exabit'), + 18: ('Z', 'Zb', 'zettabit'), + 19: ('Y', 'Yb', 'yottabit') + }, + 'binary': { # n * (2 ** u) = b; b / (2 ** u) = u + -1: ('semi-octet', 'quartet', 'quadbit'), + 10: ('Ki', 'Kib', 'kibibit'), + 20: ('Mi', 'Mib', 'mebibit'), + 30: ('Gi', 'Gib', 'gibibit'), + 40: ('Ti', 'Tib', 'tebibit'), + 50: ('Pi', 'Pib', 'pebibit'), + 60: ('Ei', 'Eib', 'exbibit'), + 70: ('Zi', 'Zib', 'zebibit'), + 80: ('Yi', 'Yib', 'yobibit') + }} self.valid_storage = [] for unit_type, convpair in self.storageUnits.items(): for f, l in convpair.items(): diff --git a/aif/utils/file_handler.py b/aif/utils/file_handler.py new file mode 100644 index 0000000..b2c0847 --- /dev/null +++ b/aif/utils/file_handler.py @@ -0,0 +1,43 @@ +import os +import pathlib + + +class File(object): + def __init__(self, file_path): + self.orig_path = file_path + self.fullpath = os.path.abspath(os.path.expanduser(self.orig_path)) + self.path_rel = pathlib.PurePosixPath(self.orig_path) + self.path_full = pathlib.PurePosixPath(self.fullpath) + + +class Directory(object): + def __init__(self, dir_path): + self.orig_path = dir_path + self.fullpath = os.path.abspath(os.path.expanduser(self.orig_path)) + self.path_rel = pathlib.PurePosixPath(self.orig_path) + self.path_full = pathlib.PurePosixPath(self.fullpath) + self.files = [] + self.dirs = [] + + def populateFilesDirs(self, recursive = False): + if not recursive: + for i in os.listdir(self.fullpath): + if os.path.isdir(os.path.join(self.fullpath, i)): + self.dirs.append(i) + elif os.path.isfile(os.path.join(self.fullpath, i)): + self.files.append(i) + else: + for root, dirs, files in os.walk(self.fullpath): + for f in files: + fpath = os.path.join(root, f) + relfpath = pathlib.PurePosixPath(fpath).relative_to(self.path_full) + self.files.append(relfpath) + for d in dirs: + dpath = os.path.join(root, d) + reldpath = pathlib.PurePosixPath(dpath).relative_to(self.path_full) + self.dirs.append(reldpath) + if root not in self.dirs: + self.dirs.append(dirs) + self.dirs.sort() + self.files.sort() + return() diff --git a/aif/utils/gpg_handler.py b/aif/utils/gpg_handler.py new file mode 100644 index 0000000..0dc23d4 --- /dev/null +++ b/aif/utils/gpg_handler.py @@ -0,0 +1,154 @@ +import copy +import io +import os +import shutil +import tempfile +## +import gpg +import gpg.errors + + +class GPG(object): + def __init__(self, homedir = None, primary_key = None): + self.homedir = homedir + self.primary_key = primary_key + self.temporary = None + self.gpg = None + self._imported_keys = [] + + def _initContext(self): + if not self.homedir: + self.homedir = tempfile.mkdtemp(suffix = '.gpg', prefix = '.aif.') + self.homedir = os.path.abspath(os.path.expanduser(self.homedir)) + self.temporary = False + if not os.path.isdir(self.homedir): + self.temporary = True + os.makedirs(self.homedir, exist_ok = True) + os.chmod(self.homedir, 0o0700) + self.gpg = gpg.Context(home_dir = self.homedir) + if self.temporary: + self.primary_key = self.createKey('AIF-NG File Verification Key', sign = True, force = True) + else: + self.primary_key = self.getKey(self.primary_key, secret = True) + return() + + def clean(self): + # This is mostly just to cleanup the stuff we did before. + self.primary_key = self.primary_key.fpr + if self.temporary: + self.primary_key = None + shutil.rmtree(self.homedir) + self.gpg = None + return() + + def createKey(self, userid, *args, **kwargs): + # algorithm=None, expires_in=0, expires=True, sign=False, encrypt=False, certify=False, + # authenticate=False, passphrase=None, force=False + keyinfo = {'userid': userid, + 'algorithm': kwargs.get('algorithm', 'rsa4096'), + 'expires_in': kwargs.get('expires_in'), + 'sign': kwargs.get('sign', True), + 'encrypt': kwargs.get('encrypt', False), + 'certify': kwargs.get('certify', False), + 'authenticate': kwargs.get('authenticate', False), + 'passphrase': kwargs.get('passphrase'), + 'force': kwargs.get('force')} + if not keyinfo['expires_in']: + del(keyinfo['expires_in']) + keyinfo['expires'] = False + k = self.gpg.create_key(**keyinfo) + return(k.fpr) + + def findKey(self, searchstr, secret = False, local = True, remote = True, + secret_only = False, keyring_import = False): + fltr = 0 + if secret: + fltr = fltr | gpg.constants.KEYLIST_MODE_WITH_SECRET + if local: + fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL + if remote: + fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN + keys = [k for k in self.gpg.keylist(pattern = searchstr, secret = secret_only, mode = fltr)] + if keyring_import: + self.importKeys(keys, native = True) + return(keys) + + def getKey(self, key_id, secret = False, strict = False): + try: + getattr(key_id, 'fpr') + return(key_id) + except AttributeError: + if not strict: + self.findKey(key_id, keyring_import = True) + try: + key = self.gpg.get_key(key_id, secret = secret) + except gpg.errors.KeyNotFound: + key = None + return(key) + return() + + def getKeyFile(self, keyfile, keyring_import = False): + keyfile = os.path.abspath(os.path.expanduser(keyfile)) + with open(keyfile, 'rb') as fh: + rawkey_data = fh.read() + fh.seek(0, 0) + keys = [k for k in self.gpg.keylist(source = fh)] + if keyring_import: + self.importKeys(keys, native = True) + return((keys, rawkey_data)) + + def getKeyStr(self, keydata, keyring_import = False): + orig_keydata = keydata + if isinstance(keydata, str): + keydata = keydata.encode('utf-8') + buf = io.BytesIO(keydata) + keys = [k for k in self.gpg.keylist(source = buf)] + buf.close() + if keyring_import: + self.importKeys(keys, native = True) + return((keys, orig_keydata)) + + def importKeys(self, keydata, native = False): + if not native: + self.gpg.key_import(keydata) + else: + if not isinstance(keydata, list): + keydata = [keydata] + self.gpg.op_import_keys(keydata) + return() + + def verifyData(self, data, keys = None, strict = False, detached = None, *args, **kwargs): + results = {} + if keys: + if not isinstance(keys, list): + keys = [self.getKey(keys)] + else: + keys = [self.getKey(k) for k in keys] + if isinstance(data, str): + data = data.encode('utf-8') + args = {'signed_data': data} + if detached: + if isinstance(detached, str): + detached = detached.encode('utf-8') + args['signature'] = detached + if strict: + if keys: + if not isinstance(keys, list): + keys = [keys] + args['verify'] = keys + results[None] = self.gpg.verify(**args) + else: + if keys: + for k in keys: + _args = copy.deepcopy(args) + _args['verify'] = [k] + results[k.fpr] = self.gpg.verify(**_args) + else: + results[None] = self.gpg.verify(**args) + return(results) + + def verifyFile(self, filepath, *args, **kwargs): + filepath = os.path.abspath(os.path.expanduser(filepath)) + with open(filepath, 'rb') as fh: + results = self.verifyData(fh.read(), **kwargs) + return(results) diff --git a/aif/utils/hash_handler.py b/aif/utils/hash_handler.py new file mode 100644 index 0000000..e69de29 diff --git a/examples/aif.xml b/examples/aif.xml index 4b73f64..a76a4a1 100644 --- a/examples/aif.xml +++ b/examples/aif.xml @@ -1,16 +1,21 @@ https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz - - https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig - + + + https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig + + + + 0x4AA4767BBC9C4B1D18AE28B77F2D434B9741E8AC + http://arch.mirror.square-r00t.net/iso/latest/md5sums.txt