starting to roll in some logging. still need to figure out what's going on with that gpg verifyData

This commit is contained in:
brent s. 2019-12-17 03:40:08 -05:00
parent 1ae519bb40
commit f25e6bee2a
13 changed files with 670 additions and 122 deletions

View File

@ -1,18 +1,27 @@
import logging
##
try: try:
from . import constants from . import constants
_has_constants = True
except ImportError: except ImportError:
from . import constants_fallback as constants from . import constants_fallback as constants
_has_constants = False
from . import log
from . import constants_fallback from . import constants_fallback
from . import utils from . import utils
from . import disk from . import disk
from . import system from . import system
from . import config from . import config
from . import envsetup from . import envsetup
from . import log
from . import network from . import network
from . import pacman from . import pacman




_logger = logging.getLogger('AIF')
if not _has_constants:
_logger.warning('Could not import constants, so using constants_fallback as constants')


class AIF(object): class AIF(object):
def __init__(self): def __init__(self):
# Process: # Process:

View File

@ -1,10 +1,13 @@
import copy import copy
import logging
import os import os
import re import re
## ##
import requests import requests
from lxml import etree, objectify from lxml import etree, objectify


_logger = logging.getLogger('config:{0}'.format(__name__))



class Config(object): class Config(object):
def __init__(self, xsd_path = None, *args, **kwargs): def __init__(self, xsd_path = None, *args, **kwargs):
@ -55,12 +58,18 @@ class Config(object):
schemaURL = split_url[1] schemaURL = split_url[1]
else: else:
schemaURL = split_url[0] # a LAZY schemaLocation schemaURL = split_url[0] # a LAZY schemaLocation
req = requests.get(schemaURL) if schemaURL.startswith('file://'):
if not req.ok: schemaURL = re.sub(r'^file://', r'', schemaURL)
# TODO: logging! with open(schemaURL, 'rb') as fh:
raise RuntimeError('Could not download XSD') raw_xsd = fh.read()
raw_xsd = req.content base_url = os.path.dirname(schemaURL)
base_url = os.path.split(req.url)[0] # This makes me feel dirty. else:
req = requests.get(schemaURL)
if not req.ok:
# TODO: logging!
raise RuntimeError('Could not download XSD')
raw_xsd = req.content
base_url = os.path.split(req.url)[0] # This makes me feel dirty.
self.xsd = etree.XMLSchema(etree.XML(raw_xsd, base_url = base_url)) self.xsd = etree.XMLSchema(etree.XML(raw_xsd, base_url = base_url))
return(None) return(None)


@ -197,7 +206,7 @@ class ConfigBin(Config):




detector = {'raw': (re.compile(r'^\s*(?P<xml><(\?xml|aif)\s+.*)\s*$', re.DOTALL | re.MULTILINE), ConfigStr), detector = {'raw': (re.compile(r'^\s*(?P<xml><(\?xml|aif)\s+.*)\s*$', re.DOTALL | re.MULTILINE), ConfigStr),
'remote': (re.compile(r'^(?P<uri>(?P<proto>(https?|ftps?)://)(?P<path>.*))\s*$'), RemoteFile), 'remote': (re.compile(r'^(?P<uri>(?P<scheme>(https?|ftps?)://)(?P<path>.*))\s*$'), RemoteFile),
'local': (re.compile(r'^(file://)?(?P<path>(/?[^/]+)+/?)$'), LocalFile)} 'local': (re.compile(r'^(file://)?(?P<path>(/?[^/]+)+/?)$'), LocalFile)}





View File

@ -22,6 +22,7 @@ EXTERNAL_DEPS = ['blkinfo',
'pytz', 'pytz',
'requests', 'requests',
'validators'] 'validators']
DEFAULT_LOGFILE = '/var/log/aif.log'
# PARTED FLAG INDEXING # PARTED FLAG INDEXING
PARTED_FSTYPES = sorted(list(dict(vars(parted.filesystem))['fileSystemType'].keys())) PARTED_FSTYPES = sorted(list(dict(vars(parted.filesystem))['fileSystemType'].keys()))
PARTED_FSTYPES_GUIDS = {'affs0': uuid.UUID(hex = '0FC63DAF-8483-4772-8E79-3D69D8477DE4'), PARTED_FSTYPES_GUIDS = {'affs0': uuid.UUID(hex = '0FC63DAF-8483-4772-8E79-3D69D8477DE4'),

View File

@ -1 +1,47 @@
import logging import logging
import logging.handlers
import os
##
try:
# https://www.freedesktop.org/software/systemd/python-systemd/journal.html#journalhandler-class
from systemd import journal
_has_journald = True
except ImportError:
_has_journald = False
##
from . import constants_fallback

_cfg_args = {'handlers': [],
'level': logging.DEBUG} # TEMPORARY FOR TESTING
if _has_journald:
# There were some weird changes somewhere along the line.
try:
# But it's *probably* this one.
h = journal.JournalHandler()
except AttributeError:
h = journal.JournaldLogHandler()
# Systemd includes times, so we don't need to.
h.setFormatter(logging.Formatter(style = '{',
fmt = ('{name}:{levelname}:{name}:{filename}:'
'{funcName}:{lineno}: {message}')))
_cfg_args['handlers'].append(h)
# Logfile
# Set up the permissions beforehand.
os.makedirs(os.path.dirname(constants_fallback.DEFAULT_LOGFILE), exist_ok = True)
os.chmod(constants_fallback.DEFAULT_LOGFILE, 0o0600)
h = logging.handlers.RotatingFileHandler(constants_fallback.DEFAULT_LOGFILE,
encoding = 'utf8',
# Disable rotating for now.
# maxBytes = 50000000000,
# backupCount = 30
)
h.setFormatter(logging.Formatter(style = '{',
fmt = ('{asctime}:'
'{levelname}:{name}:{filename}:'
'{funcName}:{lineno}: {message}')))
_cfg_args['handlers'].append(h)

logging.basicConfig(**_cfg_args)
logger = logging.getLogger()

logger.info('Logging initialized.')

2
aif/prep.py Normal file
View File

@ -0,0 +1,2 @@
import os
import aif.utils.file_handler

View File

@ -1,3 +1,4 @@
import logging
import math import math
import os import os
import pathlib import pathlib
@ -7,14 +8,24 @@ import subprocess
## ##
import psutil import psutil
## ##
from . import parser
from . import file_handler from . import file_handler
from . import gpg_handler from . import gpg_handler
from . import hash_handler from . import hash_handler
from . import sources


_logger = logging.getLogger('utils.__init__')




def checkMounted(devpath): def checkMounted(devpath):
if devpath in [p.device for p in psutil.disk_partitions(all = True)]: for p in psutil.disk_partitions(all = True):
raise RuntimeError('{0} is mounted; we are cowardly refusing to destructive operations on it'.format(devpath)) if p.device == devpath:
_logger.error(('{0} is mounted at {1} but was specified as a target. '
'Cowardly refusing to run potentially destructive operations on it.').format(devpath,
p.mountpoint))
# TODO: raise only if not dryrun? Raise warning instead if so?
raise RuntimeError('Device mounted in live environment')
return(None) return(None)




@ -104,6 +115,7 @@ def kernelFilesystems():
FS_FSTYPES.append(l[0]) FS_FSTYPES.append(l[0])
else: else:
FS_FSTYPES.append(l[1]) FS_FSTYPES.append(l[1])
_logger.debug('Built list of pre-loaded filesystem types: {0}'.format(','.join(FS_FSTYPES)))
_mod_dir = os.path.join('/lib/modules', _mod_dir = os.path.join('/lib/modules',
os.uname().release, os.uname().release,
'kernel/fs') 'kernel/fs')
@ -119,14 +131,23 @@ def kernelFilesystems():
fs_name = mod_name.group('fsname') fs_name = mod_name.group('fsname')
if fs_name: if fs_name:
# The kernel *probably* has autoloading enabled, but in case it doesn't... # The kernel *probably* has autoloading enabled, but in case it doesn't...
# TODO: logging!
if os.getuid() == 0: if os.getuid() == 0:
subprocess.run(['modprobe', fs_name]) cmd = subprocess.run(['modprobe', fs_name], stderr = subprocess.PIPE, stdout = subprocess.PIPE)
_logger.debug('Executed: {0}'.format(' '.join(cmd.args)))
if cmd.returncode != 0:
_logger.warning('Command returned non-zero status')
_logger.debug('Exit status: {0}'.format(str(cmd.returncode)))
for a in ('stdout', 'stderr'):
x = getattr(cmd, a)
if x:
_logger.debug('{0}: {1}'.format(a.upper(), x.decode('utf-8').strip()))
FS_FSTYPES.append(fs_name) FS_FSTYPES.append(fs_name)
except FileNotFoundError: except FileNotFoundError:
# We're running on a kernel that doesn't have modules # We're running on a kernel that doesn't have modules
_logger.info('Kernel has no modules available')
pass pass
FS_FSTYPES = sorted(list(set(FS_FSTYPES))) FS_FSTYPES = sorted(list(set(FS_FSTYPES)))
_logger.debug('Generated full list of FS_FSTYPES: {0}'.format(','.join(FS_FSTYPES)))
return(FS_FSTYPES) return(FS_FSTYPES)




@ -143,16 +164,16 @@ def xmlBool(xmlobj):




class _Sizer(object): class _Sizer(object):
def __init__(self): # We use different methods for converting between storage and BW, and different multipliers for each subtype.
# We use different methods for converting between storage and BW, and different multipliers for each subtype. # https://stackoverflow.com/a/12912296/733214
# https://stackoverflow.com/a/12912296/733214 # https://stackoverflow.com/a/52684562/733214
# https://stackoverflow.com/a/52684562/733214 # https://stackoverflow.com/questions/5194057/better-way-to-convert-file-sizes-in-python
# https://stackoverflow.com/questions/5194057/better-way-to-convert-file-sizes-in-python # https://en.wikipedia.org/wiki/Orders_of_magnitude_(data)
# https://en.wikipedia.org/wiki/Orders_of_magnitude_(data) # https://en.wikipedia.org/wiki/Binary_prefix
# https://en.wikipedia.org/wiki/Binary_prefix # 'decimal' is base-10, 'binary' is base-2. (Duh.)
# 'decimal' is base-10, 'binary' is base-2. (Duh.) # "b" = bytes, "n" = given value, and "u" = unit suffix's key in below notes.
# "b" = bytes, "n" = given value, and "u" = unit suffix's key in below notes. storageUnits = {
self.storageUnits = {'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u 'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u
0: (None, 'B', 'byte'), 0: (None, 'B', 'byte'),
3: ('k', 'kB', 'kilobyte'), 3: ('k', 'kB', 'kilobyte'),
6: ('M', 'MB', 'megabyte'), 6: ('M', 'MB', 'megabyte'),
@ -163,19 +184,20 @@ class _Sizer(object):
18: ('Z', 'ZB', 'zettabyte'), 18: ('Z', 'ZB', 'zettabyte'),
19: ('Y', 'YB', 'yottabyte') 19: ('Y', 'YB', 'yottabyte')
}, },
'binary': { # n * (2 ** u) = b; b / (2 ** u) = u 'binary': { # n * (2 ** u) = b; b / (2 ** u) = u
-1: ('nybble', 'nibble', 'nyble', 'half-byte', 'tetrade', 'nibble'), -1: ('nybble', 'nibble', 'nyble', 'half-byte', 'tetrade', 'nibble'),
10: ('Ki', 'KiB', 'kibibyte'), 10: ('Ki', 'KiB', 'kibibyte'),
20: ('Mi', 'MiB', 'mebibyte'), 20: ('Mi', 'MiB', 'mebibyte'),
30: ('Gi', 'GiB', 'gibibyte'), 30: ('Gi', 'GiB', 'gibibyte'),
40: ('Ti', 'TiB', 'tebibyte'), 40: ('Ti', 'TiB', 'tebibyte'),
50: ('Pi', 'PiB', 'pebibyte'), 50: ('Pi', 'PiB', 'pebibyte'),
60: ('Ei', 'EiB', 'exbibyte'), 60: ('Ei', 'EiB', 'exbibyte'),
70: ('Zi', 'ZiB', 'zebibyte'), 70: ('Zi', 'ZiB', 'zebibyte'),
80: ('Yi', 'YiB', 'yobibyte') 80: ('Yi', 'YiB', 'yobibyte')
}} }}
# https://en.wikipedia.org/wiki/Bit#Multiple_bits - note that 8 bits = 1 byte # https://en.wikipedia.org/wiki/Bit#Multiple_bits - note that 8 bits = 1 byte
self.bwUnits = {'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u bwUnits = {
'decimal': { # n * (10 ** u) = b; b / (10 ** u) = u
0: (None, 'b', 'bit'), 0: (None, 'b', 'bit'),
3: ('k', 'kb', 'kilobit'), 3: ('k', 'kb', 'kilobit'),
6: ('M', 'Mb', 'megabit'), 6: ('M', 'Mb', 'megabit'),
@ -186,29 +208,32 @@ class _Sizer(object):
18: ('Z', 'Zb', 'zettabit'), 18: ('Z', 'Zb', 'zettabit'),
19: ('Y', 'Yb', 'yottabit') 19: ('Y', 'Yb', 'yottabit')
}, },
'binary': { # n * (2 ** u) = b; b / (2 ** u) = u 'binary': { # n * (2 ** u) = b; b / (2 ** u) = u
-1: ('semi-octet', 'quartet', 'quadbit'), -1: ('semi-octet', 'quartet', 'quadbit'),
10: ('Ki', 'Kib', 'kibibit'), 10: ('Ki', 'Kib', 'kibibit'),
20: ('Mi', 'Mib', 'mebibit'), 20: ('Mi', 'Mib', 'mebibit'),
30: ('Gi', 'Gib', 'gibibit'), 30: ('Gi', 'Gib', 'gibibit'),
40: ('Ti', 'Tib', 'tebibit'), 40: ('Ti', 'Tib', 'tebibit'),
50: ('Pi', 'Pib', 'pebibit'), 50: ('Pi', 'Pib', 'pebibit'),
60: ('Ei', 'Eib', 'exbibit'), 60: ('Ei', 'Eib', 'exbibit'),
70: ('Zi', 'Zib', 'zebibit'), 70: ('Zi', 'Zib', 'zebibit'),
80: ('Yi', 'Yib', 'yobibit') 80: ('Yi', 'Yib', 'yobibit')
}} }}
self.valid_storage = [] valid_storage = []
for unit_type, convpair in self.storageUnits.items(): for unit_type, convpair in storageUnits.items():
for f, l in convpair.items(): for f, l in convpair.items():
for suffix in l: for suffix in l:
if suffix not in self.valid_storage and suffix: if suffix not in valid_storage and suffix:
self.valid_storage.append(suffix) valid_storage.append(suffix)
self.valid_bw = [] valid_bw = []
for unit_type, convpair in self.bwUnits.items(): for unit_type, convpair in bwUnits.items():
for f, l in convpair.items(): for f, l in convpair.items():
for suffix in l: for suffix in l:
if suffix not in self.valid_bw and suffix: if suffix not in valid_bw and suffix:
self.valid_bw.append(suffix) valid_bw.append(suffix)

def __init__(self):
pass


def convert(self, n, suffix): def convert(self, n, suffix):
conversion = {} conversion = {}
@ -227,6 +252,7 @@ class _Sizer(object):
conversion = None conversion = None
base_factors = [] base_factors = []
if suffix not in self.valid_bw: if suffix not in self.valid_bw:
_logger.error('Passed an invalid suffix')
raise ValueError('suffix is not a valid unit notation for this conversion') raise ValueError('suffix is not a valid unit notation for this conversion')
if target and target not in self.valid_bw: if target and target not in self.valid_bw:
raise ValueError('target is not a valid unit notation for this conversion') raise ValueError('target is not a valid unit notation for this conversion')
@ -310,4 +336,3 @@ def convertSizeUnit(pos):
else: else:
raise ValueError('Invalid size specified: {0}'.format(orig_pos)) raise ValueError('Invalid size specified: {0}'.format(orig_pos))
return((from_beginning, _size, amt_type)) return((from_beginning, _size, amt_type))


View File

@ -9,7 +9,7 @@ class File(object):
self.path_rel = pathlib.PurePosixPath(self.orig_path) self.path_rel = pathlib.PurePosixPath(self.orig_path)
self.path_full = pathlib.PurePosixPath(self.fullpath) self.path_full = pathlib.PurePosixPath(self.fullpath)


def __str(self): def __str__(self):
return(self.fullpath) return(self.fullpath)





View File

@ -8,28 +8,47 @@ import gpg
import gpg.errors import gpg.errors




class KeyEditor(object):
def __init__(self):
self.trusted = False

def truster(self, kw, arg, *args, **kwargs):
if kw == 'GET_LINE':
if arg == 'keyedit.prompt':
if not self.trusted:
return('trust')
else:
return('save')
elif arg == 'edit_ownertrust.value' and not self.trusted:
self.trusted = True
return('4') # "Full"
else:
return('save')
return(None)


class GPG(object): class GPG(object):
def __init__(self, homedir = None, primary_key = None): def __init__(self, homedir = None, primary_key = None, *args, **kwargs):
self.homedir = homedir self.homedir = homedir
self.primary_key = primary_key self.primary_key = primary_key
self.temporary = None self.temporary = None
self.gpg = None self.ctx = None
self._imported_keys = [] self._imported_keys = []
self._initContext()


def _initContext(self): def _initContext(self):
if not self.homedir: if not self.homedir:
self.homedir = tempfile.mkdtemp(suffix = '.gpg', prefix = '.aif.') self.homedir = tempfile.mkdtemp(suffix = '.gpg', prefix = '.aif.')
self.homedir = os.path.abspath(os.path.expanduser(self.homedir))
self.temporary = False
if not os.path.isdir(self.homedir):
self.temporary = True self.temporary = True
self.homedir = os.path.abspath(os.path.expanduser(self.homedir))
if not os.path.isdir(self.homedir):
os.makedirs(self.homedir, exist_ok = True) os.makedirs(self.homedir, exist_ok = True)
os.chmod(self.homedir, 0o0700) os.chmod(self.homedir, 0o0700)
self.gpg = gpg.Context(home_dir = self.homedir) self.ctx = gpg.Context(home_dir = self.homedir)
if self.temporary: if self.temporary:
self.primary_key = self.createKey('AIF-NG File Verification Key', sign = True, force = True) self.primary_key = self.createKey('AIF-NG File Verification Key', sign = True, force = True).fpr
else: self.primary_key = self.findKeyByID(self.primary_key, source = 'secret')
self.primary_key = self.getKey(self.primary_key, secret = True) self.ctx.signers = [self.primary_key]
return(None) return(None)


def clean(self): def clean(self):
@ -38,7 +57,7 @@ class GPG(object):
if self.temporary: if self.temporary:
self.primary_key = None self.primary_key = None
shutil.rmtree(self.homedir) shutil.rmtree(self.homedir)
self.gpg = None self.ctx = None
return(None) return(None)


def createKey(self, userid, *args, **kwargs): def createKey(self, userid, *args, **kwargs):
@ -56,11 +75,11 @@ class GPG(object):
if not keyinfo['expires_in']: if not keyinfo['expires_in']:
del(keyinfo['expires_in']) del(keyinfo['expires_in'])
keyinfo['expires'] = False keyinfo['expires'] = False
k = self.gpg.create_key(**keyinfo) k = self.ctx.create_key(**keyinfo)
return(k.fpr) return(k)


def findKey(self, searchstr, secret = False, local = True, remote = True, def findKey(self, searchstr, secret = False, local = True, remote = True,
secret_only = False, keyring_import = False): secret_only = False, keyring_import = False, *args, **kwargs):
fltr = 0 fltr = 0
if secret: if secret:
fltr = fltr | gpg.constants.KEYLIST_MODE_WITH_SECRET fltr = fltr | gpg.constants.KEYLIST_MODE_WITH_SECRET
@ -68,83 +87,137 @@ class GPG(object):
fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL
if remote: if remote:
fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN
keys = [k for k in self.gpg.keylist(pattern = searchstr, secret = secret_only, mode = fltr)] keys = [k for k in self.ctx.keylist(pattern = searchstr, secret = secret_only, mode = fltr)]
if keyring_import: if keyring_import:
self.importKeys(keys, native = True) self.importKeys(keys, native = True)
return(keys) return(keys)


def getKey(self, key_id, secret = False, strict = False): def findKeyByID(self, key_id, source = 'remote', keyring_import = False, *args, **kwargs):
# So .get_key() CAN get a remote key from a keyserver... but you can't have ANY other keylist modes defined.
# Ugh.
sources = {'remote': gpg.constants.KEYLIST_MODE_EXTERN,
'local': gpg.constants.KEYLIST_MODE_LOCAL,
'secret': gpg.constants.KEYLIST_MODE_WITH_SECRET}
if source not in sources.keys():
raise ValueError('source parameter must be one (and only one) of: {0}'.format(sources.keys()))
orig_mode = self.ctx.get_keylist_mode()
self.ctx.set_keylist_mode(sources[source])
try:
key = self.ctx.get_key(key_id, secret = (True if source == 'secret' else False))
except gpg.errors.KeyNotFound:
key = None
self.ctx.set_keylist_mode(orig_mode)
if keyring_import and key:
self.importKeys(key, native = True)
return(key)

def getKey(self, key_id, secret = False, strict = False, *args, **kwargs):
try: try:
getattr(key_id, 'fpr') getattr(key_id, 'fpr')
return(key_id) return(key_id)
except AttributeError: except AttributeError:
if not strict: if not strict:
self.findKey(key_id, keyring_import = True) self.findKeyByID(key_id, keyring_import = True, **kwargs)
try: try:
key = self.gpg.get_key(key_id, secret = secret) key = self.ctx.get_key(key_id, secret = secret)
except gpg.errors.KeyNotFound: except gpg.errors.KeyNotFound:
key = None key = None
return(key) return(key)
return(None) return(None)


def getKeyFile(self, keyfile, keyring_import = False): def getKeyFile(self, keyfile, keyring_import = False, *args, **kwargs):
keyfile = os.path.abspath(os.path.expanduser(keyfile)) keyfile = os.path.abspath(os.path.expanduser(keyfile))
with open(keyfile, 'rb') as fh: with open(keyfile, 'rb') as fh:
rawkey_data = fh.read() rawkey_data = fh.read()
fh.seek(0, 0) fh.seek(0, 0)
keys = [k for k in self.gpg.keylist(source = fh)] keys = [k for k in self.ctx.keylist(source = fh)]
if keyring_import: if keyring_import:
self.importKeys(keys, native = True) self.importKeys(keys, native = True)
return((keys, rawkey_data)) return((keys, rawkey_data))


def getKeyStr(self, keydata, keyring_import = False): def getKeyData(self, keydata, keyring_import = False, *args, **kwargs):
orig_keydata = keydata orig_keydata = keydata
if isinstance(keydata, str): if isinstance(keydata, str):
keydata = keydata.encode('utf-8') keydata = keydata.encode('utf-8')
buf = io.BytesIO(keydata) buf = io.BytesIO(keydata)
keys = [k for k in self.gpg.keylist(source = buf)] keys = [k for k in self.ctx.keylist(source = buf)]
buf.close() buf.close()
if keyring_import: if keyring_import:
self.importKeys(keys, native = True) self.importKeys(keys, native = True)
return((keys, orig_keydata)) return((keys, orig_keydata))


def importKeys(self, keydata, native = False): def importKeys(self, keydata, native = False, local = True, remote = True, *args, **kwargs):
if not native: fltr = 0
self.gpg.key_import(keydata) orig_km = None
else: keys = []
if local:
fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL
if remote:
fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN
if self.ctx.get_keylist_mode() != fltr:
orig_km = self.ctx.get_keylist_mode()
self.ctx.set_keylist_mode(fltr)
if not native: # It's raw key data (.gpg, .asc, etc.).
formatted_keys = b''
if isinstance(keydata, str):
formatted_keys += keydata.encode('utf-8')
elif isinstance(keydata, list):
for k in keydata:
if isinstance(k, str):
formatted_keys += k.encode('utf-8')
else:
formatted_keys += k
else:
formatted_keys += keydata
for rslt in self.ctx.key_import(formatted_keys).imports:
keys.append(self.ctx.get_key(rslt.fpr))
else: # It's a native Key() object (or a list of them).
if not isinstance(keydata, list): if not isinstance(keydata, list):
keydata = [keydata] keydata = [keydata]
self.gpg.op_import_keys(keydata) keys = keydata
self.ctx.op_import_keys(keydata)
if orig_km:
self.ctx.set_keylist_mode(orig_km)
for k in keys:
self.ctx.key_sign(k, local = True)
trusteditor = KeyEditor()
self.ctx.interact(k, trusteditor.truster)
return(None) return(None)


def verifyData(self, data, keys = None, strict = False, detached = None, *args, **kwargs): def verifyData(self, data, keys = None, strict = False, detached = None, *args, **kwargs):
results = {} results = {}
if keys: if keys:
if not isinstance(keys, list): if not isinstance(keys, list):
keys = [self.getKey(keys)] keys = [self.getKey(keys, source = 'local')]
else: else:
keys = [self.getKey(k) for k in keys] keys = [self.getKey(k, source = 'local') for k in keys]
if isinstance(data, str): if isinstance(data, str):
data = data.encode('utf-8') data = data.encode('utf-8')
args = {'signed_data': data} fnargs = {'signed_data': data}
if detached: if detached:
if isinstance(detached, str): if isinstance(detached, str):
detached = detached.encode('utf-8') detached = detached.encode('utf-8')
args['signature'] = detached if not isinstance(detached, bytes) and not hasattr(detached, 'read'):
raise TypeError('detached must be bytes or a file-like object (make sure the position is correct!)')
fnargs['signature'] = detached
if strict: if strict:
if keys: fnargs['verify'] = keys
if not isinstance(keys, list): results[None] = self.ctx.verify(**fnargs)
keys = [keys]
args['verify'] = keys
results[None] = self.gpg.verify(**args)
else: else:
if keys: if keys:
for k in keys: for k in keys:
_args = copy.deepcopy(args) _fnargs = copy.deepcopy(fnargs)
_args['verify'] = [k] _fnargs['verify'] = [k]
results[k.fpr] = self.gpg.verify(**_args) try:
print(self.ctx.get_keylist_mode())
sigchk = self.ctx.verify(**_fnargs)
results[k.fpr] = (True, sigchk[1].results, None)
except gpg.errors.MissingSignatures as e:
results[k.fpr] = (False, e.results, 'Missing Signature')
except gpg.errors.BadSignatures as e:
results[k.fpr] = (False, e.results, 'Bad Signature')
else: else:
results[None] = self.gpg.verify(**args) results[None] = self.ctx.verify(**fnargs)
return(results) return(results)


def verifyFile(self, filepath, *args, **kwargs): def verifyFile(self, filepath, *args, **kwargs):

View File

@ -1,24 +1,28 @@
import copy
import hashlib import hashlib
import os
import pathlib import pathlib
import zlib import zlib
## ##
import aif.constants_fallback import aif.constants_fallback
from . import file_handler




class Hash(object): class Hash(object):
def __init__(self, file_path): def __init__(self, hash_algos = None, *args, **kwargs):
self.hashers = None self.hashers = None
self.valid_hashtypes = list(aif.constants_fallback.HASH_SUPPORTED_TYPES)
self.hash_algos = hash_algos
self.configure()


def configure(self, hashalgo = None): def configure(self, *args, **kwargs):
self.hashers = {} self.hashers = {}
if hashalgo: if self.hash_algos:
if not isinstance(hashalgo, list): if not isinstance(self.hash_algos, list):
hashalgo = [hashalgo] self.hash_algos = [self.hash_algos]
else: else:
hashalgo = list(aif.constants_fallback.HASH_SUPPORTED_TYPES) self.hash_algos = copy.deepcopy(self.valid_hashtypes)
for h in hashalgo: for h in self.hash_algos:
if h not in aif.constants_fallback.HASH_SUPPORTED_TYPES: if h not in self.valid_hashtypes:
raise ValueError('Hash algorithm not supported') raise ValueError('Hash algorithm not supported')
if h not in aif.constants_fallback.HASH_EXTRA_SUPPORTED_TYPES: if h not in aif.constants_fallback.HASH_EXTRA_SUPPORTED_TYPES:
hasher = hashlib.new(h) hasher = hashlib.new(h)
@ -27,22 +31,44 @@ class Hash(object):
self.hashers[h] = hasher self.hashers[h] = hasher
return() return()


def hashData(self, data): def hashData(self, data, *args, **kwargs):
results = {} results = {}
if not self.hashers: if not self.hashers or not self.hash_algos:
self.configure() self.configure()
for hashtype, hasher in self.hashers.items(): for hashtype, hasher in self.hashers.items():
if hashtype in aif.constants_fallback.HASH_EXTRA_SUPPORTED_TYPES: if hashtype in aif.constants_fallback.HASH_EXTRA_SUPPORTED_TYPES:
results[hashtype] = hasher(data) results[hashtype] = hasher(data)
else: else:
rslt = hasher.update(data) hasher.update(data)
results[hashtype] = rslt.hexdigest() results[hashtype] = hasher.hexdigest()
return(results) return(results)


def hashFile(self, file_path): def hashFile(self, file_path, *args, **kwargs):
if not isinstance(file_path, (str, file_handler.File, pathlib.Path, pathlib.PurePath)): if not isinstance(file_path, (str, pathlib.Path, pathlib.PurePath)):
raise ValueError('file_path must be a path expression') raise ValueError('file_path must be a path expression')
file_path = str(file_path) file_path = str(file_path)
with open(file_path, 'rb') as fh: with open(file_path, 'rb') as fh:
results = self.hashData(fh.read()) results = self.hashData(fh.read())
return(results) return(results)

def verifyData(self, data, checksum, checksum_type, *args, **kwargs):
if isinstance(data, str):
data = data.encode('utf-8')
if not isinstance(checksum, str):
checksum = checksum.decode('utf-8')
if checksum_type not in self.hash_algos:
raise ValueError('Hash algorithm not supported; try reconfiguring')
self.configure()
cksum = self.hashData(data)
cksum_htype = cksum[checksum_type]
if cksum == checksum:
result = True
else:
result = False
return(result)

def verifyFile(self, filepath, checksum, checksum_type, *args, **kwargs):
filepath = os.path.abspath(os.path.expanduser(filepath))
with open(filepath, 'rb') as fh:
result = self.verifyData(fh.read(), checksum, checksum_type, **kwargs)
return(result)

29
aif/utils/parser.py Normal file
View File

@ -0,0 +1,29 @@
import logging
import re


_logger = logging.getLogger('utils:{0}'.format(__name__))


_uri_re = re.compile((r'^(?P<scheme>[\w]+)://'
r'(?:(?P<user>[^:@]+)(?::(?P<password>[^@]+)?)?@)?'
r'(?P<base>[^/:]+)?'
r'(?::(?P<port>[0-9]+))?'
r'(?P<path>/.*)$'),
re.IGNORECASE)


class URI(object):
def __init__(self, uri):
self.orig_uri = uri
r = _uri_re.search(self.orig_uri)
if not r:
raise ValueError('Not a valid URI')
for k, v in dict(zip(list(_uri_re.groupindex.keys()), r.groups())).items():
setattr(self, k, v)
if self.port:
self.port = int(self.port)
for a in ('base', 'scheme'):
v = getattr(self, a)
if v:
setattr(self, a, v.lower())

258
aif/utils/sources.py Normal file
View File

@ -0,0 +1,258 @@
import ftplib
import io
import pathlib
import re
##
import requests
import requests.auth
##
import aif.constants_fallback
from . import gpg_handler
from . import hash_handler
from . import parser


class ChecksumFile(object):
_bsd_re = re.compile(r'^(?P<fname>\(.*\))\s+=\s+(?P<cksum>.*)$')

def __init__(self, checksum_xml, filetype):
self.xml = checksum_xml
self.uri = self.xml.text.strip()
self.filetype = filetype
self.hashes = None
downloader = getDLHandler(self.uri) # Recursive objects for the win?
dl = downloader(self.xml)
dl.get()
self.data = dl.data.read()
dl.data.seek(0, 0)
self._convert()

def _convert(self):
data = self.data
if not isinstance(data, str):
data = data.decode('utf-8')
data.strip()
self.hashes = {}
if self.filetype not in ('gnu', 'bsd'):
raise ValueError('filetype attribute must be either "gnu" or "bsd"')
for line in data.splitlines():
if self.filetype == 'gnu':
hashtype = None # GNU style splits their hash types into separate files by default.
h, fname = line.split(None, 1)
elif self.filetype == 'bsd':
l = line.split(None, 1)
hashtype = l.pop(0).lower()
r = self._bsd_re.search(l[0])
h = r.group('cksum')
fname = r.group('fname')
if hashtype not in self.hashes:
self.hashes[hashtype] = {}
self.hashes[hashtype][fname] = h
return(None)


class Downloader(object):
def __init__(self, netresource_xml, *args, **kwargs):
self.xml = netresource_xml
self.uri = parser.URI(self.xml.text.strip())
self.user = self.xml.attrib.get('user')
if not self.user and self.uri.user:
self.user = self.uri.user
self.password = self.xml.attrib.get('password')
if not self.password and self.uri.password:
self.password = self.uri.password
self.real_uri = ('{0}://'
'{1}'
'{2}'
'{3}').format(self.uri.scheme,
(self.uri.base if self.uri.base else ''),
(':{0}'.format(self.uri.port) if self.uri.port else ''),
self.uri.path)
self.gpg = None
self.checksum = None
self.data = io.BytesIO()

def get(self):
pass # Dummy method.
return(None)

def parseGpgVerify(self, results):
pass

def verify(self, verify_xml, *args, **kwargs):
gpg_xml = verify_xml.find('gpg')
hash_xml = verify_xml.find('hash')
results = {}
if gpg_xml is not None:
results['gpg'] = self.verifyGPG(gpg_xml)
if hash_xml is not None:
results['hash'] = self.verifyHash(hash_xml)
return(results)

def verifyGPG(self, gpg_xml, *args, **kwargs):
results = {}
# We don't allow custom GPG homedirs since this is probably running from a LiveCD/USB/whatever anyways.
# This means we can *always* instantiate the GPG handler from scratch.
self.gpg = gpg_handler.GPG()
keys_xml = gpg_xml.find('keys')
sigs_xml = gpg_xml.find('sigs')
fnargs = {'keyring_import': True}
fnargs.update(kwargs)
if keys_xml is not None:
fnargs['keys'] = []
for key_id_xml in keys_xml.findall('keyID'):
if key_id_xml.text == 'auto':
k = self.gpg.findKeyByID(aif.constants_fallback.ARCH_RELENG_KEY, **fnargs)
elif key_id_xml.text == 'detect':
fnargs['strict'] = False
continue
else:
k = self.gpg.findKeyByID(key_id_xml.text.strip(), **fnargs)
fnargs['keys'].append(k)
for key_file_xml in keys_xml.findall('keyFile'):
downloader = getDLHandler(key_file_xml.text.strip()) # Recursive objects for the win?
dl = downloader(key_file_xml)
dl.get()
k = self.gpg.getKeyData(dl.data.read(), **fnargs)[0]
dl.data.seek(0, 0)
fnargs['keys'].extend(k)
if sigs_xml is not None:
for sig_text_xml in sigs_xml.findall('signature'):
sig = sig_text_xml.text.strip()
sigchk = self.gpg.verifyData(self.data.read(), detached = sig, **fnargs)
self.data.seek(0, 0)
results.update(sigchk)
for sig_file_xml in sigs_xml.findall('signatureFile'):
downloader = getDLHandler(sig_file_xml.text.strip())
dl = downloader(sig_file_xml)
dl.get()
sigchk = self.gpg.verifyData(self.data.read(), detached = dl.data.read(), **fnargs)
dl.data.seek(0, 0)
self.data.seek(0, 0)
results.update(sigchk)
self.gpg.clean()
return(results)

def verifyHash(self, hash_xml, *args, **kwargs):
results = []
algos = [str(ht) for ht in hash_xml.xpath('//checksum/@hashType|//checksumFile/@hashType')]
self.checksum = hash_handler.Hash(hash_algos = algos)
self.checksum.configure()
checksum_xml = hash_xml.findall('checksum')
checksum_file_xml = hash_xml.findall('checksumFile')
checksums = self.checksum.hashData(self.data.read())
self.data.seek(0, 0)
if checksum_file_xml is not None:
for cksum_xml in checksum_file_xml:
htype = cksum_xml.attrib['hashType'].strip().lower()
ftype = cksum_xml.attrib['fileType'].strip().lower()
fname = cksum_xml.attrib.get('filePath',
pathlib.PurePath(self.uri.path).name)
cksum_file = ChecksumFile(cksum_xml, ftype)
if ftype == 'gnu':
cksum = cksum_file.hashes[None][fname]
elif ftype == 'bsd':
cksum = cksum_file.hashes[htype][fname]
result = (cksum == checksums[htype])
results.append(result)
if checksum_xml is not None:
for cksum_xml in checksum_xml:
# Thankfully, this is a LOT easier.
htype = cksum_xml.attrib['hashType'].strip().lower()
result = (cksum_xml.text.strip().lower() == checksums[htype])
results.append(result)
result = all(results)
return(result)


class FSDownloader(Downloader):
def __init__(self, netresource_xml, *args, **kwargs):
super().__init__(netresource_xml, *args, **kwargs)
delattr(self, 'user')
delattr(self, 'password')

def get(self):
self.data.seek(0, 0)
with open(self.uri.path, 'rb') as fh:
self.data.write(fh.read())
self.data.seek(0, 0)
return(None)


class FTPDownloader(Downloader):
def __init__(self, netresource_xml, *args, **kwargs):
super().__init__(netresource_xml, *args, **kwargs)
if not self.user:
self.user = ''
if not self.password:
self.password = ''
self.port = (self.uri.port if self.uri.port else 0)
self._conn = None

def _connect(self):
self._conn = ftplib.FTP()
self._conn.connect(host = self.uri.base, port = self.port)
self._conn.login(user = self.user, passwd = self.password)
return(None)

def get(self):
self._connect()
self.data.seek(0, 0)
self._conn.retrbinary('RETR {0}'.format(self.uri.path), self.data.write)
self.data.seek(0, 0)
self._close()
return(None)

def _close(self):
self._conn.quit()
return(None)


class FTPSDownloader(FTPDownloader):
def __init__(self, netresource_xml, *args, **kwargs):
super().__init__(netresource_xml, *args, **kwargs)

def _connect(self):
self._conn = ftplib.FTP_TLS()
self._conn.connect(host = self.uri.base, port = self.port)
self._conn.login(user = self.user, passwd = self.password)
self._conn.prot_p()
return(None)


class HTTPDownloader(Downloader):
def __init__(self, netresource_xml, *args, **kwargs):
super().__init__(netresource_xml, *args, **kwargs)
self.auth = self.xml.attrib.get('authType', 'none').lower()
if self.auth == 'none':
self.auth = None
self.realm = None
self.user = None
self.password = None
else:
if self.auth == 'basic':
self.auth = requests.auth.HTTPBasicAuth(self.user, self.password)
elif self.auth == 'digest':
self.auth = requests.auth.HTTPDigestAuth(self.user, self.password)

def get(self):
self.data.seek(0, 0)
req = requests.get(self.real_uri, auth = self.auth)
self.data.write(req.content)
self.data.seek(0, 0)
return(None)


def getDLHandler(uri):
uri = uri.strip()
if re.search(r'^file://', uri, re.IGNORECASE):
return(FSDownloader)
elif re.search(r'^https?://', uri, re.IGNORECASE):
return(HTTPDownloader)
elif re.search(r'^ftp://', uri, re.IGNORECASE):
return(FTPDownloader)
elif re.search(r'^ftps://', uri, re.IGNORECASE):
return(FTPSDownloader)
else:
raise RuntimeError('Could not detect which download handler to use')
return(None)

View File

@ -129,7 +129,6 @@ Configure your bootloader to add the following options as necessary:
^m|aif_auth |(see <<aif_url, below>>) ^m|aif_auth |(see <<aif_url, below>>)
^m|aif_username |(see <<aif_url, below>>) ^m|aif_username |(see <<aif_url, below>>)
^m|aif_password |(see <<aif_url, below>>) ^m|aif_password |(see <<aif_url, below>>)
^m|aif_realm |(see <<aif_url, below>>)
|====================== |======================


[[aif_url]] [[aif_url]]
@ -143,7 +142,6 @@ Configure your bootloader to add the following options as necessary:
* If `aif_url` is an HTTP/HTTPS URL, then `aif_user` is the username to use with the https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#4xx_Client_errors[401^] (https://tools.ietf.org/html/rfc7235[RFC 7235^]) auth (via `aif_auth`). * If `aif_url` is an HTTP/HTTPS URL, then `aif_user` is the username to use with the https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#4xx_Client_errors[401^] (https://tools.ietf.org/html/rfc7235[RFC 7235^]) auth (via `aif_auth`).
** If `aif_url` is an FTP/FTPS URI, then `aif_user` will be the FTP user. ** If `aif_url` is an FTP/FTPS URI, then `aif_user` will be the FTP user.
** The same behavior applies for `aif_password`. ** The same behavior applies for `aif_password`.
* If `aif_auth` is `digest`, this is the realm we would use (we attempt to "guess" if it isnt specified); otherwise it is ignored.


== Building a compatible LiveCD == Building a compatible LiveCD
The default Arch install CD does not have AIF installed (hopefully, this will change someday). You have two options for using AIF-NG. The default Arch install CD does not have AIF installed (hopefully, this will change someday). You have two options for using AIF-NG.
@ -537,7 +535,6 @@ The `/aif/scripts/script` elements specify scripts to be run at different stages
^m|authtype |Same behavior as <<starting_an_install, `aif_auth`>> but for fetching this script (see also <<aif_url, further notes>> on this) ^m|authtype |Same behavior as <<starting_an_install, `aif_auth`>> but for fetching this script (see also <<aif_url, further notes>> on this)
^m|user |Same behavior as <<starting_an_install, `aif_user`>> but for fetching this script (see also <<aif_url, further notes>> on this) ^m|user |Same behavior as <<starting_an_install, `aif_user`>> but for fetching this script (see also <<aif_url, further notes>> on this)
^m|password |Same behavior as <<starting_an_install, `aif_password`>> but for fetching this script (see also <<aif_url, further notes>> on this) ^m|password |Same behavior as <<starting_an_install, `aif_password`>> but for fetching this script (see also <<aif_url, further notes>> on this)
^m|realm |Same behavior as <<starting_an_install, `aif_realm`>> but for fetching this script (see also <<aif_url, further notes>> on this)
^m|execution |(see <<script_types, below>>) ^m|execution |(see <<script_types, below>>)
|====================== |======================


@ -556,6 +553,67 @@ Here you will find further info and other resources relating to AIF-NG.


== FAQ == FAQ


=== "How do I make AIF-NG operate entirely offline?"

This is cooked right in, but takes a little extra work.

1.) First you'll need to locally clone the supporting XSD (XML schemas) that AIF-NG uses to verify the configuration file:

`/var/tmp/aif/xml`
[source,bash]
----
mkdir -p /var/tmp/aif
cd /var/tmp/aif
git clone https://git.square-r00t.net/XML xml
----

The path you clone it to isn't important as long as you're consistent below.

2.) Then edit your AIF-NG configuration file to source this directory for XML verification:

`aif.xml` (before)
[source,xml]
----
xsi:schemaLocation="https://aif-ng.io/ http://schema.xml.r00t2.io/projects/aif.xsd"
----

`aif.xml` (after)
[source,xml]
----
xsi:schemaLocation="https://aif-ng.io/ file:///var/tmp/aif/xml/schema/projects/aif.xsd"
----

The XSD files use includes with relative paths, so the rest of that is automagic.

3.) Use local file:// URIs in the rest of your AIF-NG configuration file.
e.g.:

[source,xml]
----
<tarball>file:///var/tmp/aif/bootstrap.tar.gz</tarball>
----

and

[source,xml]
----
<signatureFile>file:///var/tmp/aif/bootstrap.tar.gz.sig</signatureFile>
----

etc.

Obviously you need to *download* those files to their respective destinations first, however.

4.) Lastly, ensure you only use local pacman mirrors in your config. This gets tricky because the chroot will not have a way to access the hosts filesystem without creating e.g. a bind mount beforehand.

As long as:

* No remote locations are specified in your AIF-NG configuration file...
* *and it is completely and well defined*...
* and your scripts don't make remote calls,

then it shouldn't try to perform any remote operations.

=== "I specified start sector as 0 for a GPT-labeled disk but it starts at sector 2048 instead. What gives?" === "I specified start sector as 0 for a GPT-labeled disk but it starts at sector 2048 instead. What gives?"
GPT requires 33 sectors for the table at the beginning (and 32 sectors at the end) for the actual table. That plus an extra (usually) 512 bytes at the beginning for something called a https://en.wikipedia.org/wiki/GUID_Partition_Table#Protective_MBR_(LBA_0)[Protective MBR^] (this prevents disk utilities from overwriting the GPT label automatically in case they only recognize "msdos" labels and assume the disk is not formatted yet). GPT requires 33 sectors for the table at the beginning (and 32 sectors at the end) for the actual table. That plus an extra (usually) 512 bytes at the beginning for something called a https://en.wikipedia.org/wiki/GUID_Partition_Table#Protective_MBR_(LBA_0)[Protective MBR^] (this prevents disk utilities from overwriting the GPT label automatically in case they only recognize "msdos" labels and assume the disk is not formatted yet).



View File

@ -5,12 +5,20 @@
chrootPath="/mnt/aif" chrootPath="/mnt/aif"
reboot="false"> reboot="false">
<bootstrap> <bootstrap>
<tarball>https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz</tarball> <!-- <tarball>-->
<!-- https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz-->
<!-- </tarball>-->
<tarball>
file:///tmp/archlinux-bootstrap-2019.12.01-x86_64.tar.gz
</tarball>
<verify> <verify>
<gpg> <gpg>
<sigs> <sigs>
<!-- <signatureFile>-->
<!-- https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig-->
<!-- </signatureFile>-->
<signatureFile> <signatureFile>
https://arch.mirror.square-r00t.net/iso/latest/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig file:///tmp/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig
</signatureFile> </signatureFile>
</sigs> </sigs>
<keys> <keys>
@ -18,8 +26,12 @@
</keys> </keys>
</gpg> </gpg>
<hash> <hash>
<checksumFile hashType="md5">http://arch.mirror.square-r00t.net/iso/latest/md5sums.txt</checksumFile> <checksumFile hashType="md5" fileType="gnu">
<checksumFile hashType="sha1">http://arch.mirror.square-r00t.net/iso/latest/sha1sums.txt</checksumFile> http://arch.mirror.square-r00t.net/iso/latest/md5sums.txt
</checksumFile>
<checksumFile hashType="sha1" fileType="gnu">
http://arch.mirror.square-r00t.net/iso/latest/sha1sums.txt
</checksumFile>
</hash> </hash>
</verify> </verify>
</bootstrap> </bootstrap>