fixed the gpg thing. WHEW. what a PITA.
also fleshed out some logging.
This commit is contained in:
parent
f25e6bee2a
commit
b4c9caefbd
1
.gitignore
vendored
1
.gitignore
vendored
@ -20,3 +20,4 @@
|
|||||||
.idea/
|
.idea/
|
||||||
__pycache__/
|
__pycache__/
|
||||||
test.py
|
test.py
|
||||||
|
test*.py
|
||||||
|
@ -252,9 +252,10 @@ class _Sizer(object):
|
|||||||
conversion = None
|
conversion = None
|
||||||
base_factors = []
|
base_factors = []
|
||||||
if suffix not in self.valid_bw:
|
if suffix not in self.valid_bw:
|
||||||
_logger.error('Passed an invalid suffix')
|
_logger.error('Suffix {0} is invalid; must be one of {1}'.format(suffix, ','.join(self.valid_bw)))
|
||||||
raise ValueError('suffix is not a valid unit notation for this conversion')
|
raise ValueError('suffix is not a valid unit notation for this conversion')
|
||||||
if target and target not in self.valid_bw:
|
if target and target not in self.valid_bw:
|
||||||
|
_logger.error('Target {0} is invalid; must be one of {1}'.format(target, ','.join(self.valid_bw)))
|
||||||
raise ValueError('target is not a valid unit notation for this conversion')
|
raise ValueError('target is not a valid unit notation for this conversion')
|
||||||
for (_unit_type, _base) in (('decimal', 10), ('binary', 2)):
|
for (_unit_type, _base) in (('decimal', 10), ('binary', 2)):
|
||||||
if target and base_factors:
|
if target and base_factors:
|
||||||
@ -282,8 +283,10 @@ class _Sizer(object):
|
|||||||
conversion = None
|
conversion = None
|
||||||
base_factors = []
|
base_factors = []
|
||||||
if suffix not in self.valid_storage:
|
if suffix not in self.valid_storage:
|
||||||
|
_logger.error('Suffix {0} is invalid; must be one of {1}'.format(suffix, ','.join(self.valid_storage)))
|
||||||
raise ValueError('suffix is not a valid unit notation for this conversion')
|
raise ValueError('suffix is not a valid unit notation for this conversion')
|
||||||
if target and target not in self.valid_storage:
|
if target and target not in self.valid_storage:
|
||||||
|
_logger.error('Target {0} is invalid; must be one of {1}'.format(target, ','.join(self.valid_storage)))
|
||||||
raise ValueError('target is not a valid unit notation for this conversion')
|
raise ValueError('target is not a valid unit notation for this conversion')
|
||||||
for (_unit_type, _base) in (('decimal', 10), ('binary', 2)):
|
for (_unit_type, _base) in (('decimal', 10), ('binary', 2)):
|
||||||
if target and base_factors:
|
if target and base_factors:
|
||||||
@ -334,5 +337,6 @@ def convertSizeUnit(pos):
|
|||||||
_size = int(pos.group('size'))
|
_size = int(pos.group('size'))
|
||||||
amt_type = pos.group('pct_unit_or_sct').strip()
|
amt_type = pos.group('pct_unit_or_sct').strip()
|
||||||
else:
|
else:
|
||||||
raise ValueError('Invalid size specified: {0}'.format(orig_pos))
|
_logger.error('Size {0} is invalid; did not match {1}'.format(orig_pos, _pos_re.pattern))
|
||||||
|
raise ValueError('Invalid size specified')
|
||||||
return((from_beginning, _size, amt_type))
|
return((from_beginning, _size, amt_type))
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import copy
|
import copy
|
||||||
import io
|
import io
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
@ -8,21 +9,37 @@ import gpg
|
|||||||
import gpg.errors
|
import gpg.errors
|
||||||
|
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class KeyEditor(object):
|
class KeyEditor(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.trusted = False
|
self.trusted = False
|
||||||
|
_logger.info('Key editor instantiated.')
|
||||||
|
|
||||||
def truster(self, kw, arg, *args, **kwargs):
|
def truster(self, kw, arg, *args, **kwargs):
|
||||||
|
_logger.debug('Key trust editor invoked:')
|
||||||
|
_logger.debug('Command: {0}'.format(kw))
|
||||||
|
_logger.debug('Argument: {0}'.format(arg))
|
||||||
|
if args:
|
||||||
|
_logger.debug('args: {0}'.format(','.join(args)))
|
||||||
|
if kwargs:
|
||||||
|
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||||
if kw == 'GET_LINE':
|
if kw == 'GET_LINE':
|
||||||
if arg == 'keyedit.prompt':
|
if arg == 'keyedit.prompt':
|
||||||
if not self.trusted:
|
if not self.trusted:
|
||||||
|
_logger.debug('Returning: "trust"')
|
||||||
return('trust')
|
return('trust')
|
||||||
else:
|
else:
|
||||||
|
_logger.debug('Returning: "save"')
|
||||||
return('save')
|
return('save')
|
||||||
elif arg == 'edit_ownertrust.value' and not self.trusted:
|
elif arg == 'edit_ownertrust.value' and not self.trusted:
|
||||||
self.trusted = True
|
self.trusted = True
|
||||||
|
_logger.debug('Status changed to trusted')
|
||||||
|
_logger.debug('Returning: "4"')
|
||||||
return('4') # "Full"
|
return('4') # "Full"
|
||||||
else:
|
else:
|
||||||
|
_logger.debug('Returning: "save"')
|
||||||
return('save')
|
return('save')
|
||||||
return(None)
|
return(None)
|
||||||
|
|
||||||
@ -34,29 +51,53 @@ class GPG(object):
|
|||||||
self.temporary = None
|
self.temporary = None
|
||||||
self.ctx = None
|
self.ctx = None
|
||||||
self._imported_keys = []
|
self._imported_keys = []
|
||||||
|
_logger.debug('Homedir: {0}'.format(self.homedir))
|
||||||
|
_logger.debug('Primary key: {0}'.format(self.primary_key))
|
||||||
|
if args:
|
||||||
|
_logger.debug('args: {0}'.format(','.join(args)))
|
||||||
|
if kwargs:
|
||||||
|
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||||
|
_logger.info('Instantiated GPG class.')
|
||||||
self._initContext()
|
self._initContext()
|
||||||
|
|
||||||
def _initContext(self):
|
def _initContext(self):
|
||||||
if not self.homedir:
|
if not self.homedir:
|
||||||
self.homedir = tempfile.mkdtemp(suffix = '.gpg', prefix = '.aif.')
|
self.homedir = tempfile.mkdtemp(prefix = '.aif.', suffix = '.gpg')
|
||||||
self.temporary = True
|
self.temporary = True
|
||||||
|
_logger.debug('Set as temporary homedir.')
|
||||||
self.homedir = os.path.abspath(os.path.expanduser(self.homedir))
|
self.homedir = os.path.abspath(os.path.expanduser(self.homedir))
|
||||||
|
_logger.debug('Homedir finalized: {0}'.format(self.homedir))
|
||||||
if not os.path.isdir(self.homedir):
|
if not os.path.isdir(self.homedir):
|
||||||
os.makedirs(self.homedir, exist_ok = True)
|
os.makedirs(self.homedir, exist_ok = True)
|
||||||
os.chmod(self.homedir, 0o0700)
|
os.chmod(self.homedir, 0o0700)
|
||||||
|
_logger.info('Created {0}'.format(self.homedir))
|
||||||
self.ctx = gpg.Context(home_dir = self.homedir)
|
self.ctx = gpg.Context(home_dir = self.homedir)
|
||||||
if self.temporary:
|
if self.temporary:
|
||||||
self.primary_key = self.createKey('AIF-NG File Verification Key', sign = True, force = True).fpr
|
self.primary_key = self.createKey('AIF-NG File Verification Key',
|
||||||
|
sign = True,
|
||||||
|
force = True,
|
||||||
|
certify = True).fpr
|
||||||
self.primary_key = self.findKeyByID(self.primary_key, source = 'secret')
|
self.primary_key = self.findKeyByID(self.primary_key, source = 'secret')
|
||||||
|
if self.primary_key:
|
||||||
|
_logger.debug('Found primary key in secret keyring: {0}'.format(self.primary_key.fpr))
|
||||||
|
else:
|
||||||
|
_logger.error('Could not find primary key in secret keyring: {0}'.format(self.primary_key))
|
||||||
|
raise RuntimeError('Primary key not found in secret keyring')
|
||||||
self.ctx.signers = [self.primary_key]
|
self.ctx.signers = [self.primary_key]
|
||||||
|
if self.ctx.signers:
|
||||||
|
_logger.debug('Signers set to: {0}'.format(','.join([k.fpr for k in self.ctx.signers])))
|
||||||
|
else:
|
||||||
|
raise _logger.error('Could not assign signing keys; signing set empty')
|
||||||
return(None)
|
return(None)
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
# This is mostly just to cleanup the stuff we did before.
|
# This is mostly just to cleanup the stuff we did before.
|
||||||
|
_logger.info('Cleaning GPG homedir.')
|
||||||
self.primary_key = self.primary_key.fpr
|
self.primary_key = self.primary_key.fpr
|
||||||
if self.temporary:
|
if self.temporary:
|
||||||
self.primary_key = None
|
self.primary_key = None
|
||||||
shutil.rmtree(self.homedir)
|
shutil.rmtree(self.homedir)
|
||||||
|
_logger.info('Deleted temporary GPG homedir: {0}'.format(self.homedir))
|
||||||
self.ctx = None
|
self.ctx = None
|
||||||
return(None)
|
return(None)
|
||||||
|
|
||||||
@ -72,10 +113,17 @@ class GPG(object):
|
|||||||
'authenticate': kwargs.get('authenticate', False),
|
'authenticate': kwargs.get('authenticate', False),
|
||||||
'passphrase': kwargs.get('passphrase'),
|
'passphrase': kwargs.get('passphrase'),
|
||||||
'force': kwargs.get('force')}
|
'force': kwargs.get('force')}
|
||||||
|
_logger.debug('Key creation parameters: {0}'.format(keyinfo))
|
||||||
|
if args:
|
||||||
|
_logger.debug('args: {0}'.format(','.join(args)))
|
||||||
|
if kwargs:
|
||||||
|
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||||
if not keyinfo['expires_in']:
|
if not keyinfo['expires_in']:
|
||||||
del(keyinfo['expires_in'])
|
del(keyinfo['expires_in'])
|
||||||
keyinfo['expires'] = False
|
keyinfo['expires'] = False
|
||||||
k = self.ctx.create_key(**keyinfo)
|
k = self.ctx.create_key(**keyinfo)
|
||||||
|
_logger.info('Created key: {0}'.format(k.fpr))
|
||||||
|
_logger.debug('Key info: {0}'.format(k))
|
||||||
return(k)
|
return(k)
|
||||||
|
|
||||||
def findKey(self, searchstr, secret = False, local = True, remote = True,
|
def findKey(self, searchstr, secret = False, local = True, remote = True,
|
||||||
@ -83,12 +131,25 @@ class GPG(object):
|
|||||||
fltr = 0
|
fltr = 0
|
||||||
if secret:
|
if secret:
|
||||||
fltr = fltr | gpg.constants.KEYLIST_MODE_WITH_SECRET
|
fltr = fltr | gpg.constants.KEYLIST_MODE_WITH_SECRET
|
||||||
|
_logger.debug('Added "secret" to filter; new filter value: {0}'.format(fltr))
|
||||||
if local:
|
if local:
|
||||||
fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL
|
fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL
|
||||||
|
_logger.debug('Added "local" to filter; new filter value: {0}'.format(fltr))
|
||||||
if remote:
|
if remote:
|
||||||
fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN
|
fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN
|
||||||
|
_logger.debug('Added "remote" to filter; new filter value: {0}'.format(fltr))
|
||||||
|
if args:
|
||||||
|
_logger.debug('args: {0}'.format(','.join(args)))
|
||||||
|
if kwargs:
|
||||||
|
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||||
keys = [k for k in self.ctx.keylist(pattern = searchstr, secret = secret_only, mode = fltr)]
|
keys = [k for k in self.ctx.keylist(pattern = searchstr, secret = secret_only, mode = fltr)]
|
||||||
|
_logger.info('Found {0} keys'.format(len(keys)))
|
||||||
|
if keys:
|
||||||
|
_logger.debug('Found keys: {0}'.format(keys))
|
||||||
|
else:
|
||||||
|
_logger.warn('Found no keys.')
|
||||||
if keyring_import:
|
if keyring_import:
|
||||||
|
_logger.debug('Importing enabled; importing found keys.')
|
||||||
self.importKeys(keys, native = True)
|
self.importKeys(keys, native = True)
|
||||||
return(keys)
|
return(keys)
|
||||||
|
|
||||||
@ -99,129 +160,243 @@ class GPG(object):
|
|||||||
'local': gpg.constants.KEYLIST_MODE_LOCAL,
|
'local': gpg.constants.KEYLIST_MODE_LOCAL,
|
||||||
'secret': gpg.constants.KEYLIST_MODE_WITH_SECRET}
|
'secret': gpg.constants.KEYLIST_MODE_WITH_SECRET}
|
||||||
if source not in sources.keys():
|
if source not in sources.keys():
|
||||||
raise ValueError('source parameter must be one (and only one) of: {0}'.format(sources.keys()))
|
_logger.error('Invalid source parameter ({0}); must be one of: {1}'.format(source, sources.keys()))
|
||||||
|
raise ValueError('Invalid source parameter')
|
||||||
|
if args:
|
||||||
|
_logger.debug('args: {0}'.format(','.join(args)))
|
||||||
|
if kwargs:
|
||||||
|
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||||
orig_mode = self.ctx.get_keylist_mode()
|
orig_mode = self.ctx.get_keylist_mode()
|
||||||
|
_logger.debug('Original keylist mode: {0}'.format(orig_mode))
|
||||||
self.ctx.set_keylist_mode(sources[source])
|
self.ctx.set_keylist_mode(sources[source])
|
||||||
|
_logger.info('Set keylist mode: {0} ({1})'.format(source, sources[source]))
|
||||||
|
_logger.debug('Searching for key ID: {0}'.format(key_id))
|
||||||
try:
|
try:
|
||||||
key = self.ctx.get_key(key_id, secret = (True if source == 'secret' else False))
|
key = self.ctx.get_key(key_id, secret = (True if source == 'secret' else False))
|
||||||
|
_logger.info('Found key object for {0}'.format(key_id))
|
||||||
|
_logger.debug('Found key: {0}'.format(key))
|
||||||
except gpg.errors.KeyNotFound:
|
except gpg.errors.KeyNotFound:
|
||||||
key = None
|
key = None
|
||||||
|
_logger.warning('Found no keys.')
|
||||||
self.ctx.set_keylist_mode(orig_mode)
|
self.ctx.set_keylist_mode(orig_mode)
|
||||||
|
_logger.info('Restored keylist mode ({0})'.format(orig_mode))
|
||||||
if keyring_import and key:
|
if keyring_import and key:
|
||||||
|
_logger.debug('Importing enabled; importing found keys.')
|
||||||
self.importKeys(key, native = True)
|
self.importKeys(key, native = True)
|
||||||
return(key)
|
return(key)
|
||||||
|
|
||||||
def getKey(self, key_id, secret = False, strict = False, *args, **kwargs):
|
def getKey(self, key_id, secret = False, strict = False, *args, **kwargs):
|
||||||
|
key = None
|
||||||
|
if args:
|
||||||
|
_logger.debug('args: {0}'.format(','.join(args)))
|
||||||
|
if kwargs:
|
||||||
|
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||||
try:
|
try:
|
||||||
getattr(key_id, 'fpr')
|
getattr(key_id, 'fpr')
|
||||||
|
_logger.info('Key specified is already a native key object.')
|
||||||
|
_logger.debug('Key: {0}'.format(key_id))
|
||||||
return(key_id)
|
return(key_id)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
if not strict:
|
if not strict:
|
||||||
|
_logger.debug('Strict mode disabled; attempting import of {0} first.'.format(key_id))
|
||||||
self.findKeyByID(key_id, keyring_import = True, **kwargs)
|
self.findKeyByID(key_id, keyring_import = True, **kwargs)
|
||||||
try:
|
try:
|
||||||
key = self.ctx.get_key(key_id, secret = secret)
|
key = self.ctx.get_key(key_id, secret = secret)
|
||||||
|
_logger.info('Found {0}.'.format(key_id))
|
||||||
|
_logger.debug('Key: {0}'.format(key))
|
||||||
except gpg.errors.KeyNotFound:
|
except gpg.errors.KeyNotFound:
|
||||||
key = None
|
_logger.warning('Could not locate {0} in keyring'.format(key_id))
|
||||||
return(key)
|
return(key)
|
||||||
return(None)
|
|
||||||
|
|
||||||
def getKeyFile(self, keyfile, keyring_import = False, *args, **kwargs):
|
|
||||||
keyfile = os.path.abspath(os.path.expanduser(keyfile))
|
|
||||||
with open(keyfile, 'rb') as fh:
|
|
||||||
rawkey_data = fh.read()
|
|
||||||
fh.seek(0, 0)
|
|
||||||
keys = [k for k in self.ctx.keylist(source = fh)]
|
|
||||||
if keyring_import:
|
|
||||||
self.importKeys(keys, native = True)
|
|
||||||
return((keys, rawkey_data))
|
|
||||||
|
|
||||||
def getKeyData(self, keydata, keyring_import = False, *args, **kwargs):
|
def getKeyData(self, keydata, keyring_import = False, *args, **kwargs):
|
||||||
orig_keydata = keydata
|
orig_keydata = keydata
|
||||||
|
if args:
|
||||||
|
_logger.debug('args: {0}'.format(','.join(args)))
|
||||||
|
if kwargs:
|
||||||
|
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||||
if isinstance(keydata, str):
|
if isinstance(keydata, str):
|
||||||
|
_logger.debug('String passed as keydata; converting to bytes.')
|
||||||
keydata = keydata.encode('utf-8')
|
keydata = keydata.encode('utf-8')
|
||||||
buf = io.BytesIO(keydata)
|
buf = io.BytesIO(keydata)
|
||||||
|
_logger.info('Parsed {0} bytes; looking for key(s).'.format(buf.getbuffer().nbytes))
|
||||||
keys = [k for k in self.ctx.keylist(source = buf)]
|
keys = [k for k in self.ctx.keylist(source = buf)]
|
||||||
|
_logger.info('Found {0} key(s) in data.'.format(len(keys)))
|
||||||
|
if keys:
|
||||||
|
_logger.debug('Keys found: {0}'.format(keys))
|
||||||
|
else:
|
||||||
|
_logger.warning('No keys found in data.')
|
||||||
buf.close()
|
buf.close()
|
||||||
if keyring_import:
|
if keyring_import:
|
||||||
|
_logger.debug('Importing enabled; importing found keys.')
|
||||||
self.importKeys(keys, native = True)
|
self.importKeys(keys, native = True)
|
||||||
return((keys, orig_keydata))
|
return((keys, orig_keydata))
|
||||||
|
|
||||||
|
def getKeyFile(self, keyfile, keyring_import = False, *args, **kwargs):
|
||||||
|
if args:
|
||||||
|
_logger.debug('args: {0}'.format(','.join(args)))
|
||||||
|
if kwargs:
|
||||||
|
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||||
|
orig_keyfile = keyfile
|
||||||
|
keyfile = os.path.abspath(os.path.expanduser(keyfile))
|
||||||
|
_logger.info('Parsed absolute keyfile path: {0} => {1}'.format(orig_keyfile, keyfile))
|
||||||
|
with open(keyfile, 'rb') as fh:
|
||||||
|
rawkey_data = fh.read()
|
||||||
|
fh.seek(0, 0)
|
||||||
|
_logger.debug('Parsed {0} bytes; looking for key(s).'.format(len(rawkey_data)))
|
||||||
|
keys = [k for k in self.ctx.keylist(source = fh)]
|
||||||
|
_logger.info('Found {0} key(s) in data.'.format(len(keys)))
|
||||||
|
if keys:
|
||||||
|
_logger.debug('Keys found: {0}'.format(keys))
|
||||||
|
else:
|
||||||
|
_logger.warning('No keys found in data.')
|
||||||
|
if keyring_import:
|
||||||
|
_logger.debug('Importing enabled; importing found keys.')
|
||||||
|
self.importKeys(keys, native = True)
|
||||||
|
return((keys, rawkey_data))
|
||||||
|
|
||||||
def importKeys(self, keydata, native = False, local = True, remote = True, *args, **kwargs):
|
def importKeys(self, keydata, native = False, local = True, remote = True, *args, **kwargs):
|
||||||
fltr = 0
|
fltr = 0
|
||||||
orig_km = None
|
orig_km = None
|
||||||
keys = []
|
keys = []
|
||||||
|
if args:
|
||||||
|
_logger.debug('args: {0}'.format(','.join(args)))
|
||||||
|
if kwargs:
|
||||||
|
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||||
if local:
|
if local:
|
||||||
fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL
|
fltr = fltr | gpg.constants.KEYLIST_MODE_LOCAL
|
||||||
|
_logger.debug('Added "local" to filter; new filter value: {0}'.format(fltr))
|
||||||
if remote:
|
if remote:
|
||||||
fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN
|
fltr = fltr | gpg.constants.KEYLIST_MODE_EXTERN
|
||||||
|
_logger.debug('Added "remote" to filter; new filter value: {0}'.format(fltr))
|
||||||
if self.ctx.get_keylist_mode() != fltr:
|
if self.ctx.get_keylist_mode() != fltr:
|
||||||
orig_km = self.ctx.get_keylist_mode()
|
orig_km = self.ctx.get_keylist_mode()
|
||||||
self.ctx.set_keylist_mode(fltr)
|
self.ctx.set_keylist_mode(fltr)
|
||||||
|
_logger.info(('Current keylist mode ({0}) doesn\'t match filter ({1}); '
|
||||||
|
'set to new mode.').format(orig_km, fltr))
|
||||||
if not native: # It's raw key data (.gpg, .asc, etc.).
|
if not native: # It's raw key data (.gpg, .asc, etc.).
|
||||||
|
_logger.info('Non-native keydata specified; parsing.')
|
||||||
formatted_keys = b''
|
formatted_keys = b''
|
||||||
if isinstance(keydata, str):
|
if isinstance(keydata, str):
|
||||||
formatted_keys += keydata.encode('utf-8')
|
formatted_keys += keydata.encode('utf-8')
|
||||||
|
_logger.debug('Specified keydata was a string; converted to bytes.')
|
||||||
elif isinstance(keydata, list):
|
elif isinstance(keydata, list):
|
||||||
for k in keydata:
|
_logger.debug('Specified keydata was a list/list-like; iterating.')
|
||||||
|
for idx, k in enumerate(keydata):
|
||||||
|
_logger.debug('Parsing entry {0} of {1} entries.'.format((idx + 1), len(keydata)))
|
||||||
if isinstance(k, str):
|
if isinstance(k, str):
|
||||||
formatted_keys += k.encode('utf-8')
|
formatted_keys += k.encode('utf-8')
|
||||||
|
_logger.debug('Keydata ({0}) was a string; converted to bytes.'.format((idx + 1)))
|
||||||
else:
|
else:
|
||||||
|
_logger.debug('Keydata ({0}) was already in bytes.'.format((idx + 1)))
|
||||||
formatted_keys += k
|
formatted_keys += k
|
||||||
else:
|
else:
|
||||||
formatted_keys += keydata
|
_logger.warning('Could not identify keydata reliably; unpredictable results ahead.')
|
||||||
for rslt in self.ctx.key_import(formatted_keys).imports:
|
formatted_keys = keydata
|
||||||
keys.append(self.ctx.get_key(rslt.fpr))
|
rslt = self.ctx.key_import(formatted_keys).imports
|
||||||
|
_logger.debug('Imported keys: {0}'.format(rslt))
|
||||||
|
for r in rslt:
|
||||||
|
k = self.ctx.get_key(r.fpr)
|
||||||
|
if k:
|
||||||
|
_logger.debug('Adding key to keylist: {0}'.format(k))
|
||||||
|
else:
|
||||||
|
_logger.warning('Could not find key ID {0}.'.format(r.fpr))
|
||||||
|
keys.append(k)
|
||||||
else: # It's a native Key() object (or a list of them).
|
else: # It's a native Key() object (or a list of them).
|
||||||
|
_logger.info('Native keydata specified; parsing.')
|
||||||
if not isinstance(keydata, list):
|
if not isinstance(keydata, list):
|
||||||
|
_logger.debug('Specified keydata was not a list/list-like; fixing.')
|
||||||
keydata = [keydata]
|
keydata = [keydata]
|
||||||
keys = keydata
|
keys = keydata
|
||||||
|
_logger.debug('Importing keys: {0}'.format(keys))
|
||||||
self.ctx.op_import_keys(keydata)
|
self.ctx.op_import_keys(keydata)
|
||||||
if orig_km:
|
if orig_km:
|
||||||
self.ctx.set_keylist_mode(orig_km)
|
self.ctx.set_keylist_mode(orig_km)
|
||||||
|
_logger.info('Restored keylist mode to {0}'.format(orig_km))
|
||||||
for k in keys:
|
for k in keys:
|
||||||
|
_logger.info('Signing {0} with a local signature.'.format(k.fpr))
|
||||||
self.ctx.key_sign(k, local = True)
|
self.ctx.key_sign(k, local = True)
|
||||||
|
_logger.debug('Adding trust for {0}.'.format(k.fpr))
|
||||||
trusteditor = KeyEditor()
|
trusteditor = KeyEditor()
|
||||||
self.ctx.interact(k, trusteditor.truster)
|
self.ctx.interact(k, trusteditor.truster)
|
||||||
return(None)
|
return(None)
|
||||||
|
|
||||||
def verifyData(self, data, keys = None, strict = False, detached = None, *args, **kwargs):
|
def verifyData(self, data, keys = None, strict = False, detached = None, *args, **kwargs):
|
||||||
results = {}
|
results = {}
|
||||||
|
if args:
|
||||||
|
_logger.debug('args: {0}'.format(','.join(args)))
|
||||||
|
if kwargs:
|
||||||
|
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||||
if keys:
|
if keys:
|
||||||
|
_logger.info('Keys were specified.')
|
||||||
if not isinstance(keys, list):
|
if not isinstance(keys, list):
|
||||||
keys = [self.getKey(keys, source = 'local')]
|
keys = [self.getKey(keys, source = 'local')]
|
||||||
else:
|
else:
|
||||||
keys = [self.getKey(k, source = 'local') for k in keys]
|
keys = [self.getKey(k, source = 'local') for k in keys]
|
||||||
|
_logger.debug('Verifying against keys: {0}'.format(keys))
|
||||||
if isinstance(data, str):
|
if isinstance(data, str):
|
||||||
data = data.encode('utf-8')
|
data = data.encode('utf-8')
|
||||||
|
_logger.debug('Specified data was a string; converted to bytes.')
|
||||||
|
_logger.info('Verifying {0} bytes of data.'.format(len(data)))
|
||||||
fnargs = {'signed_data': data}
|
fnargs = {'signed_data': data}
|
||||||
if detached:
|
if detached:
|
||||||
|
_logger.info('Specified a detached signature.')
|
||||||
if isinstance(detached, str):
|
if isinstance(detached, str):
|
||||||
detached = detached.encode('utf-8')
|
detached = detached.encode('utf-8')
|
||||||
|
_logger.debug('Specified signature was a string; converted to bytes.')
|
||||||
if not isinstance(detached, bytes) and not hasattr(detached, 'read'):
|
if not isinstance(detached, bytes) and not hasattr(detached, 'read'):
|
||||||
raise TypeError('detached must be bytes or a file-like object (make sure the position is correct!)')
|
_logger.error('Detached signature was neither bytes nor a buffer-like object.')
|
||||||
|
raise TypeError('detached must be bytes or buffer-like object')
|
||||||
|
if isinstance(detached, bytes):
|
||||||
|
_logger.info('Signature length: {0} bytes'.format(len(detached)))
|
||||||
|
else:
|
||||||
|
_logger.info('Signature length: {0} bytes'.format(detached.getbuffer().nbytes))
|
||||||
fnargs['signature'] = detached
|
fnargs['signature'] = detached
|
||||||
if strict:
|
if strict:
|
||||||
|
_logger.debug('Strict mode enabled; data must be signed by ALL specified keys.')
|
||||||
fnargs['verify'] = keys
|
fnargs['verify'] = keys
|
||||||
|
_logger.debug('Verifying with args: {0}'.format(fnargs))
|
||||||
results[None] = self.ctx.verify(**fnargs)
|
results[None] = self.ctx.verify(**fnargs)
|
||||||
else:
|
else:
|
||||||
if keys:
|
if keys:
|
||||||
|
_logger.debug('Keys were specified but running in non-strict; iterating over all.')
|
||||||
for k in keys:
|
for k in keys:
|
||||||
_fnargs = copy.deepcopy(fnargs)
|
_fnargs = copy.deepcopy(fnargs)
|
||||||
_fnargs['verify'] = [k]
|
_fnargs['verify'] = [k]
|
||||||
|
_logger.info('Verifying against key {0}'.format(k.fpr))
|
||||||
try:
|
try:
|
||||||
print(self.ctx.get_keylist_mode())
|
_logger.debug(('Verifying with args (data-stripped): '
|
||||||
|
'{0}').format({k: (v if k not in ('signed_data',
|
||||||
|
'signature')
|
||||||
|
else '(stripped)') for k, v in _fnargs.items()}))
|
||||||
sigchk = self.ctx.verify(**_fnargs)
|
sigchk = self.ctx.verify(**_fnargs)
|
||||||
results[k.fpr] = (True, sigchk[1].results, None)
|
_logger.info('Key {0} verification results: {1}'.format(k.fpr, sigchk))
|
||||||
|
results[k.fpr] = (True, sigchk[1], None)
|
||||||
except gpg.errors.MissingSignatures as e:
|
except gpg.errors.MissingSignatures as e:
|
||||||
|
_logger.warning('Key {0}: missing signature'.format(k.fpr))
|
||||||
|
_logger.debug('Key {0} results: {1}'.format(k.fpr, e.results))
|
||||||
results[k.fpr] = (False, e.results, 'Missing Signature')
|
results[k.fpr] = (False, e.results, 'Missing Signature')
|
||||||
except gpg.errors.BadSignatures as e:
|
except gpg.errors.BadSignatures as e:
|
||||||
|
_logger.warning('Key {0}: bad signature'.format(k.fpr))
|
||||||
|
_logger.debug('Key {0} results: {1}'.format(k.fpr, e.results))
|
||||||
results[k.fpr] = (False, e.results, 'Bad Signature')
|
results[k.fpr] = (False, e.results, 'Bad Signature')
|
||||||
else:
|
else:
|
||||||
|
_logger.debug('No keys specified but running in non-strict; accepting any signatures.')
|
||||||
|
_logger.debug(('Verifying with args (data-stripped): '
|
||||||
|
'{0}').format({k: (v if k not in ('signed_data',
|
||||||
|
'signature')
|
||||||
|
else '(stripped)') for k, v in fnargs.items()}))
|
||||||
results[None] = self.ctx.verify(**fnargs)
|
results[None] = self.ctx.verify(**fnargs)
|
||||||
|
_logger.debug('Results for any/all signatures: {0}'.format(results[None]))
|
||||||
return(results)
|
return(results)
|
||||||
|
|
||||||
def verifyFile(self, filepath, *args, **kwargs):
|
def verifyFile(self, filepath, *args, **kwargs):
|
||||||
|
orig_filepath = filepath
|
||||||
filepath = os.path.abspath(os.path.expanduser(filepath))
|
filepath = os.path.abspath(os.path.expanduser(filepath))
|
||||||
|
_logger.debug('File verification invoked. Transformed filepath: {0} => {1}'.format(orig_filepath, filepath))
|
||||||
|
if args:
|
||||||
|
_logger.debug('args: {0}'.format(','.join(args)))
|
||||||
|
if kwargs:
|
||||||
|
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||||
with open(filepath, 'rb') as fh:
|
with open(filepath, 'rb') as fh:
|
||||||
results = self.verifyData(fh.read(), **kwargs)
|
results = self.verifyData(fh.read(), **kwargs)
|
||||||
return(results)
|
return(results)
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
import ftplib
|
import ftplib
|
||||||
import io
|
import io
|
||||||
|
import logging
|
||||||
import pathlib
|
import pathlib
|
||||||
import re
|
import re
|
||||||
##
|
##
|
||||||
import requests
|
import requests
|
||||||
import requests.auth
|
import requests.auth
|
||||||
|
from lxml import etree
|
||||||
##
|
##
|
||||||
import aif.constants_fallback
|
import aif.constants_fallback
|
||||||
from . import gpg_handler
|
from . import gpg_handler
|
||||||
@ -12,13 +14,26 @@ from . import hash_handler
|
|||||||
from . import parser
|
from . import parser
|
||||||
|
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ChecksumFile(object):
|
class ChecksumFile(object):
|
||||||
_bsd_re = re.compile(r'^(?P<fname>\(.*\))\s+=\s+(?P<cksum>.*)$')
|
_bsd_re = re.compile(r'^(?P<fname>\(.*\))\s+=\s+(?P<cksum>.*)$')
|
||||||
|
|
||||||
def __init__(self, checksum_xml, filetype):
|
def __init__(self, checksum_xml, filetype):
|
||||||
self.xml = checksum_xml
|
self.xml = checksum_xml
|
||||||
|
if self.xml is not None:
|
||||||
|
_logger.debug('checksum_xml: {0}'.format(etree.tostring(self.xml).decode('utf-8')))
|
||||||
|
else:
|
||||||
|
_logger.error('checksum_xml is required but not specified')
|
||||||
|
raise ValueError('checksum_xml is required')
|
||||||
self.uri = self.xml.text.strip()
|
self.uri = self.xml.text.strip()
|
||||||
self.filetype = filetype
|
self.filetype = filetype
|
||||||
|
if filetype:
|
||||||
|
_logger.debug('URI and filetype: {{{0}}}{1}'.format(self.uri, self.filetype))
|
||||||
|
else:
|
||||||
|
_logger.error('filetype is required but not specified')
|
||||||
|
raise ValueError('filetype is required')
|
||||||
self.hashes = None
|
self.hashes = None
|
||||||
downloader = getDLHandler(self.uri) # Recursive objects for the win?
|
downloader = getDLHandler(self.uri) # Recursive objects for the win?
|
||||||
dl = downloader(self.xml)
|
dl = downloader(self.xml)
|
||||||
@ -28,14 +43,14 @@ class ChecksumFile(object):
|
|||||||
self._convert()
|
self._convert()
|
||||||
|
|
||||||
def _convert(self):
|
def _convert(self):
|
||||||
data = self.data
|
if not isinstance(self.data, str):
|
||||||
if not isinstance(data, str):
|
self.data = self.data.decode('utf-8')
|
||||||
data = data.decode('utf-8')
|
self.data.strip()
|
||||||
data.strip()
|
|
||||||
self.hashes = {}
|
self.hashes = {}
|
||||||
if self.filetype not in ('gnu', 'bsd'):
|
if self.filetype not in ('gnu', 'bsd'):
|
||||||
|
_logger.error('Passed an invalid filetype: {0}'.format(self.filetype))
|
||||||
raise ValueError('filetype attribute must be either "gnu" or "bsd"')
|
raise ValueError('filetype attribute must be either "gnu" or "bsd"')
|
||||||
for line in data.splitlines():
|
for line in self.data.splitlines():
|
||||||
if self.filetype == 'gnu':
|
if self.filetype == 'gnu':
|
||||||
hashtype = None # GNU style splits their hash types into separate files by default.
|
hashtype = None # GNU style splits their hash types into separate files by default.
|
||||||
h, fname = line.split(None, 1)
|
h, fname = line.split(None, 1)
|
||||||
@ -48,17 +63,29 @@ class ChecksumFile(object):
|
|||||||
if hashtype not in self.hashes:
|
if hashtype not in self.hashes:
|
||||||
self.hashes[hashtype] = {}
|
self.hashes[hashtype] = {}
|
||||||
self.hashes[hashtype][fname] = h
|
self.hashes[hashtype][fname] = h
|
||||||
|
_logger.debug('Generated hash set: {0}'.format(self.hashes))
|
||||||
return(None)
|
return(None)
|
||||||
|
|
||||||
|
|
||||||
class Downloader(object):
|
class Downloader(object):
|
||||||
def __init__(self, netresource_xml, *args, **kwargs):
|
def __init__(self, netresource_xml, *args, **kwargs):
|
||||||
self.xml = netresource_xml
|
self.xml = netresource_xml
|
||||||
|
_logger.info('Instantiated class {0}'.format(type(self).__name__))
|
||||||
|
if netresource_xml is not None:
|
||||||
|
_logger.debug('netresource_xml: {0}'.format(etree.tostring(self.xml).decode('utf-8')))
|
||||||
|
else:
|
||||||
|
_logger.error('netresource_xml is required but not specified')
|
||||||
|
raise ValueError('netresource_xml is required')
|
||||||
|
_logger.debug('args: {0}'.format(','.join(args)))
|
||||||
|
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||||
self.uri = parser.URI(self.xml.text.strip())
|
self.uri = parser.URI(self.xml.text.strip())
|
||||||
|
_logger.debug('Parsed URI: {0}'.format(self.uri))
|
||||||
self.user = self.xml.attrib.get('user')
|
self.user = self.xml.attrib.get('user')
|
||||||
if not self.user and self.uri.user:
|
if not self.user and self.uri.user:
|
||||||
self.user = self.uri.user
|
self.user = self.uri.user
|
||||||
self.password = self.xml.attrib.get('password')
|
self.password = self.xml.attrib.get('password')
|
||||||
|
_logger.debug('Parsed user: {0}'.format(self.user))
|
||||||
|
_logger.debug('Parsed password: {0}'.format(self.password))
|
||||||
if not self.password and self.uri.password:
|
if not self.password and self.uri.password:
|
||||||
self.password = self.uri.password
|
self.password = self.uri.password
|
||||||
self.real_uri = ('{0}://'
|
self.real_uri = ('{0}://'
|
||||||
@ -68,6 +95,7 @@ class Downloader(object):
|
|||||||
(self.uri.base if self.uri.base else ''),
|
(self.uri.base if self.uri.base else ''),
|
||||||
(':{0}'.format(self.uri.port) if self.uri.port else ''),
|
(':{0}'.format(self.uri.port) if self.uri.port else ''),
|
||||||
self.uri.path)
|
self.uri.path)
|
||||||
|
_logger.debug('Rebuilt URI: {0}'.format(self.real_uri))
|
||||||
self.gpg = None
|
self.gpg = None
|
||||||
self.checksum = None
|
self.checksum = None
|
||||||
self.data = io.BytesIO()
|
self.data = io.BytesIO()
|
||||||
@ -77,11 +105,19 @@ class Downloader(object):
|
|||||||
return(None)
|
return(None)
|
||||||
|
|
||||||
def parseGpgVerify(self, results):
|
def parseGpgVerify(self, results):
|
||||||
pass
|
pass # TODO? Might not need to.
|
||||||
|
|
||||||
def verify(self, verify_xml, *args, **kwargs):
|
def verify(self, verify_xml, *args, **kwargs):
|
||||||
gpg_xml = verify_xml.find('gpg')
|
gpg_xml = verify_xml.find('gpg')
|
||||||
|
if gpg_xml is not None:
|
||||||
|
_logger.debug('gpg_xml: {0}'.format(etree.tostring(gpg_xml).decode('utf-8')))
|
||||||
|
else:
|
||||||
|
_logger.debug('No <gpg> in verify_xml')
|
||||||
hash_xml = verify_xml.find('hash')
|
hash_xml = verify_xml.find('hash')
|
||||||
|
if hash_xml is not None:
|
||||||
|
_logger.debug('Hash XML: {0}'.format(etree.tostring(hash_xml).decode('utf-8')))
|
||||||
|
else:
|
||||||
|
_logger.debug('No <hash> in verify_xml')
|
||||||
results = {}
|
results = {}
|
||||||
if gpg_xml is not None:
|
if gpg_xml is not None:
|
||||||
results['gpg'] = self.verifyGPG(gpg_xml)
|
results['gpg'] = self.verifyGPG(gpg_xml)
|
||||||
@ -94,35 +130,72 @@ class Downloader(object):
|
|||||||
# We don't allow custom GPG homedirs since this is probably running from a LiveCD/USB/whatever anyways.
|
# We don't allow custom GPG homedirs since this is probably running from a LiveCD/USB/whatever anyways.
|
||||||
# This means we can *always* instantiate the GPG handler from scratch.
|
# This means we can *always* instantiate the GPG handler from scratch.
|
||||||
self.gpg = gpg_handler.GPG()
|
self.gpg = gpg_handler.GPG()
|
||||||
|
_logger.info('Established GPG session.')
|
||||||
|
_logger.debug('GPG home dir: {0}'.format(self.gpg.homedir))
|
||||||
|
_logger.debug('GPG primary key: {0}'.format(self.gpg.primary_key.fpr))
|
||||||
keys_xml = gpg_xml.find('keys')
|
keys_xml = gpg_xml.find('keys')
|
||||||
|
if keys_xml is not None:
|
||||||
|
_logger.debug('keys_xml: {0}'.format(etree.tostring(keys_xml).decode('utf-8')))
|
||||||
|
else:
|
||||||
|
_logger.error('No required <keys> in gpg_xml')
|
||||||
|
raise ValueError('<keys> is required in a GPG verification block')
|
||||||
sigs_xml = gpg_xml.find('sigs')
|
sigs_xml = gpg_xml.find('sigs')
|
||||||
fnargs = {'keyring_import': True}
|
if sigs_xml is not None:
|
||||||
|
_logger.debug('Keys XML: {0}'.format(etree.tostring(keys_xml).decode('utf-8')))
|
||||||
|
else:
|
||||||
|
_logger.error('No required <keys> in gpg_xml')
|
||||||
|
raise ValueError('<sigs> is required in a GPG verification block')
|
||||||
|
fnargs = {'strict': keys_xml.attrib.get('detect')}
|
||||||
|
if fnargs['strict']: # We have to manually do this since it's in our parent's __init__
|
||||||
|
if fnargs['strict'].lower() in ('true', '1'):
|
||||||
|
fnargs['strict'] = True
|
||||||
|
else:
|
||||||
|
fnargs['strict'] = False
|
||||||
|
else:
|
||||||
|
fnargs['strict'] = False
|
||||||
fnargs.update(kwargs)
|
fnargs.update(kwargs)
|
||||||
if keys_xml is not None:
|
if keys_xml is not None:
|
||||||
fnargs['keys'] = []
|
fnargs['keys'] = []
|
||||||
for key_id_xml in keys_xml.findall('keyID'):
|
for key_id_xml in keys_xml.findall('keyID'):
|
||||||
|
_logger.debug('Found <keyID>: {0}'.format(etree.tostring(key_id_xml).decode('utf-8')))
|
||||||
if key_id_xml.text == 'auto':
|
if key_id_xml.text == 'auto':
|
||||||
k = self.gpg.findKeyByID(aif.constants_fallback.ARCH_RELENG_KEY, **fnargs)
|
_logger.debug('Key ID was set to "auto"; using {0}'.format(aif.constants_fallback.ARCH_RELENG_KEY))
|
||||||
elif key_id_xml.text == 'detect':
|
self.gpg.findKeyByID(aif.constants_fallback.ARCH_RELENG_KEY, source = 'remote',
|
||||||
fnargs['strict'] = False
|
keyring_import = True, **fnargs)
|
||||||
continue
|
k = self.gpg.findKeyByID(aif.constants_fallback.ARCH_RELENG_KEY, source = 'local', **fnargs)
|
||||||
else:
|
else:
|
||||||
k = self.gpg.findKeyByID(key_id_xml.text.strip(), **fnargs)
|
_logger.debug('Finding key: {0}'.format(key_id_xml.text.strip()))
|
||||||
|
self.gpg.findKeyByID(key_id_xml.text.strip(), source = 'remote', keyring_import = True, **fnargs)
|
||||||
|
k = self.gpg.findKeyByID(key_id_xml.text.strip(), source = 'local', **fnargs)
|
||||||
|
if k:
|
||||||
|
_logger.debug('Key {0} found'.format(k.fpr))
|
||||||
|
else:
|
||||||
|
_logger.error('Key {0} not found'.format(key_id_xml.text.strip()))
|
||||||
|
raise RuntimeError('Could not find key ID specified')
|
||||||
fnargs['keys'].append(k)
|
fnargs['keys'].append(k)
|
||||||
for key_file_xml in keys_xml.findall('keyFile'):
|
for key_file_xml in keys_xml.findall('keyFile'):
|
||||||
|
_logger.debug('Found <keyFile>: {0}'.format(etree.tostring(key_file_xml).decode('utf-8')))
|
||||||
downloader = getDLHandler(key_file_xml.text.strip()) # Recursive objects for the win?
|
downloader = getDLHandler(key_file_xml.text.strip()) # Recursive objects for the win?
|
||||||
dl = downloader(key_file_xml)
|
dl = downloader(key_file_xml)
|
||||||
dl.get()
|
dl.get()
|
||||||
k = self.gpg.getKeyData(dl.data.read(), **fnargs)[0]
|
k = self.gpg.getKeyData(dl.data.read(), keyring_import = True, **fnargs)[0]
|
||||||
|
if k:
|
||||||
|
fnargs['keys'].extend(k)
|
||||||
|
else:
|
||||||
|
pass # No keys found in key file. We log this in GPG.getKeyData() though.
|
||||||
dl.data.seek(0, 0)
|
dl.data.seek(0, 0)
|
||||||
fnargs['keys'].extend(k)
|
if not fnargs['keys']:
|
||||||
|
_logger.debug('Found no keys in keys_xml')
|
||||||
|
raise ValueError('Could not find any keys')
|
||||||
if sigs_xml is not None:
|
if sigs_xml is not None:
|
||||||
for sig_text_xml in sigs_xml.findall('signature'):
|
for sig_text_xml in sigs_xml.findall('signature'):
|
||||||
|
_logger.debug('Found <signature>')
|
||||||
sig = sig_text_xml.text.strip()
|
sig = sig_text_xml.text.strip()
|
||||||
sigchk = self.gpg.verifyData(self.data.read(), detached = sig, **fnargs)
|
sigchk = self.gpg.verifyData(self.data.read(), detached = sig, **fnargs)
|
||||||
self.data.seek(0, 0)
|
self.data.seek(0, 0)
|
||||||
results.update(sigchk)
|
results.update(sigchk)
|
||||||
for sig_file_xml in sigs_xml.findall('signatureFile'):
|
for sig_file_xml in sigs_xml.findall('signatureFile'):
|
||||||
|
_logger.debug('Found <signatureFile>: {0}'.format(sig_file_xml.text.strip()))
|
||||||
downloader = getDLHandler(sig_file_xml.text.strip())
|
downloader = getDLHandler(sig_file_xml.text.strip())
|
||||||
dl = downloader(sig_file_xml)
|
dl = downloader(sig_file_xml)
|
||||||
dl.get()
|
dl.get()
|
||||||
@ -131,6 +204,7 @@ class Downloader(object):
|
|||||||
self.data.seek(0, 0)
|
self.data.seek(0, 0)
|
||||||
results.update(sigchk)
|
results.update(sigchk)
|
||||||
self.gpg.clean()
|
self.gpg.clean()
|
||||||
|
_logger.debug('Rendered results: {0}'.format(results))
|
||||||
return(results)
|
return(results)
|
||||||
|
|
||||||
def verifyHash(self, hash_xml, *args, **kwargs):
|
def verifyHash(self, hash_xml, *args, **kwargs):
|
||||||
@ -144,6 +218,7 @@ class Downloader(object):
|
|||||||
self.data.seek(0, 0)
|
self.data.seek(0, 0)
|
||||||
if checksum_file_xml is not None:
|
if checksum_file_xml is not None:
|
||||||
for cksum_xml in checksum_file_xml:
|
for cksum_xml in checksum_file_xml:
|
||||||
|
_logger.debug('Found <checksumFile>: {0}'.format(etree.tostring(cksum_xml).decode('utf-8')))
|
||||||
htype = cksum_xml.attrib['hashType'].strip().lower()
|
htype = cksum_xml.attrib['hashType'].strip().lower()
|
||||||
ftype = cksum_xml.attrib['fileType'].strip().lower()
|
ftype = cksum_xml.attrib['fileType'].strip().lower()
|
||||||
fname = cksum_xml.attrib.get('filePath',
|
fname = cksum_xml.attrib.get('filePath',
|
||||||
@ -154,14 +229,28 @@ class Downloader(object):
|
|||||||
elif ftype == 'bsd':
|
elif ftype == 'bsd':
|
||||||
cksum = cksum_file.hashes[htype][fname]
|
cksum = cksum_file.hashes[htype][fname]
|
||||||
result = (cksum == checksums[htype])
|
result = (cksum == checksums[htype])
|
||||||
|
if result:
|
||||||
|
_logger.debug('Checksum type {0} matches ({1})'.format(htype, cksum))
|
||||||
|
else:
|
||||||
|
_logger.warning(('Checksum type {0} mismatch: '
|
||||||
|
'{1} (data) vs. {2} (specified)').format(htype, checksums[htype], cksum))
|
||||||
results.append(result)
|
results.append(result)
|
||||||
if checksum_xml is not None:
|
if checksum_xml is not None:
|
||||||
for cksum_xml in checksum_xml:
|
for cksum_xml in checksum_xml:
|
||||||
|
_logger.debug('Found <checksum>: {0}'.format(etree.tostring(cksum_xml).decode('utf-8')))
|
||||||
# Thankfully, this is a LOT easier.
|
# Thankfully, this is a LOT easier.
|
||||||
htype = cksum_xml.attrib['hashType'].strip().lower()
|
htype = cksum_xml.attrib['hashType'].strip().lower()
|
||||||
result = (cksum_xml.text.strip().lower() == checksums[htype])
|
result = (cksum_xml.text.strip().lower() == checksums[htype])
|
||||||
|
if result:
|
||||||
|
_logger.debug('Checksum type {0} matches ({1})'.format(htype, checksums[htype]))
|
||||||
|
else:
|
||||||
|
_logger.warning(('Checksum type {0} mismatch: '
|
||||||
|
'{1} (data) vs. {2} (specified)').format(htype,
|
||||||
|
checksums[htype],
|
||||||
|
cksum_xml.text.strip().lower()))
|
||||||
results.append(result)
|
results.append(result)
|
||||||
result = all(results)
|
result = all(results)
|
||||||
|
_logger.debug('Overall result of checksumming: {0}'.format(result))
|
||||||
return(result)
|
return(result)
|
||||||
|
|
||||||
|
|
||||||
@ -176,6 +265,7 @@ class FSDownloader(Downloader):
|
|||||||
with open(self.uri.path, 'rb') as fh:
|
with open(self.uri.path, 'rb') as fh:
|
||||||
self.data.write(fh.read())
|
self.data.write(fh.read())
|
||||||
self.data.seek(0, 0)
|
self.data.seek(0, 0)
|
||||||
|
_logger.info('Read in {0} bytes'.format(self.data.getbuffer().nbytes))
|
||||||
return(None)
|
return(None)
|
||||||
|
|
||||||
|
|
||||||
@ -188,11 +278,15 @@ class FTPDownloader(Downloader):
|
|||||||
self.password = ''
|
self.password = ''
|
||||||
self.port = (self.uri.port if self.uri.port else 0)
|
self.port = (self.uri.port if self.uri.port else 0)
|
||||||
self._conn = None
|
self._conn = None
|
||||||
|
_logger.debug('User: {0}'.format(self.user))
|
||||||
|
_logger.debug('Password: {0}'.format(self.password))
|
||||||
|
_logger.debug('Port: {0}'.format(self.port))
|
||||||
|
|
||||||
def _connect(self):
|
def _connect(self):
|
||||||
self._conn = ftplib.FTP()
|
self._conn = ftplib.FTP()
|
||||||
self._conn.connect(host = self.uri.base, port = self.port)
|
self._conn.connect(host = self.uri.base, port = self.port)
|
||||||
self._conn.login(user = self.user, passwd = self.password)
|
self._conn.login(user = self.user, passwd = self.password)
|
||||||
|
_logger.info('Connected.')
|
||||||
return(None)
|
return(None)
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
@ -201,10 +295,12 @@ class FTPDownloader(Downloader):
|
|||||||
self._conn.retrbinary('RETR {0}'.format(self.uri.path), self.data.write)
|
self._conn.retrbinary('RETR {0}'.format(self.uri.path), self.data.write)
|
||||||
self.data.seek(0, 0)
|
self.data.seek(0, 0)
|
||||||
self._close()
|
self._close()
|
||||||
|
_logger.info('Read in {0} bytes'.format(self.data.getbuffer().nbytes))
|
||||||
return(None)
|
return(None)
|
||||||
|
|
||||||
def _close(self):
|
def _close(self):
|
||||||
self._conn.quit()
|
self._conn.quit()
|
||||||
|
_logger.info('Closed connection')
|
||||||
return(None)
|
return(None)
|
||||||
|
|
||||||
|
|
||||||
@ -217,6 +313,7 @@ class FTPSDownloader(FTPDownloader):
|
|||||||
self._conn.connect(host = self.uri.base, port = self.port)
|
self._conn.connect(host = self.uri.base, port = self.port)
|
||||||
self._conn.login(user = self.user, passwd = self.password)
|
self._conn.login(user = self.user, passwd = self.password)
|
||||||
self._conn.prot_p()
|
self._conn.prot_p()
|
||||||
|
_logger.info('Connected.')
|
||||||
return(None)
|
return(None)
|
||||||
|
|
||||||
|
|
||||||
@ -225,6 +322,7 @@ class HTTPDownloader(Downloader):
|
|||||||
super().__init__(netresource_xml, *args, **kwargs)
|
super().__init__(netresource_xml, *args, **kwargs)
|
||||||
self.auth = self.xml.attrib.get('authType', 'none').lower()
|
self.auth = self.xml.attrib.get('authType', 'none').lower()
|
||||||
if self.auth == 'none':
|
if self.auth == 'none':
|
||||||
|
_logger.debug('No auth.')
|
||||||
self.auth = None
|
self.auth = None
|
||||||
self.realm = None
|
self.realm = None
|
||||||
self.user = None
|
self.user = None
|
||||||
@ -232,14 +330,17 @@ class HTTPDownloader(Downloader):
|
|||||||
else:
|
else:
|
||||||
if self.auth == 'basic':
|
if self.auth == 'basic':
|
||||||
self.auth = requests.auth.HTTPBasicAuth(self.user, self.password)
|
self.auth = requests.auth.HTTPBasicAuth(self.user, self.password)
|
||||||
|
_logger.info('HTTP basic auth configured.')
|
||||||
elif self.auth == 'digest':
|
elif self.auth == 'digest':
|
||||||
self.auth = requests.auth.HTTPDigestAuth(self.user, self.password)
|
self.auth = requests.auth.HTTPDigestAuth(self.user, self.password)
|
||||||
|
_logger.info('HTTP digest auth configured.')
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
self.data.seek(0, 0)
|
self.data.seek(0, 0)
|
||||||
req = requests.get(self.real_uri, auth = self.auth)
|
req = requests.get(self.real_uri, auth = self.auth)
|
||||||
self.data.write(req.content)
|
self.data.write(req.content)
|
||||||
self.data.seek(0, 0)
|
self.data.seek(0, 0)
|
||||||
|
_logger.info('Read in {0} bytes'.format(self.data.getbuffer().nbytes))
|
||||||
return(None)
|
return(None)
|
||||||
|
|
||||||
|
|
||||||
@ -254,5 +355,6 @@ def getDLHandler(uri):
|
|||||||
elif re.search(r'^ftps://', uri, re.IGNORECASE):
|
elif re.search(r'^ftps://', uri, re.IGNORECASE):
|
||||||
return(FTPSDownloader)
|
return(FTPSDownloader)
|
||||||
else:
|
else:
|
||||||
|
_logger.error('Unable to detect which download handler to instantiate.')
|
||||||
raise RuntimeError('Could not detect which download handler to use')
|
raise RuntimeError('Could not detect which download handler to use')
|
||||||
return(None)
|
return(None)
|
||||||
|
@ -546,9 +546,10 @@ There are several script types availabe for `execution`. Currently, these are:
|
|||||||
* pkg
|
* pkg
|
||||||
* post
|
* post
|
||||||
|
|
||||||
*pre* scripts are run (in numerical `order`) before the disks are even formatted. *pkg* scripts are run (in numerical `order`) right before the <<code_package_code, packages>> are installed (this allows you to configure an <<command, alternate packager>> such as https://aur.archlinux.org/packages/apacman/[apacman^]) -- these are run *inside* the chroot of the new install. *post* scripts are run inside the chroot like *pkg*, but are executed very last thing, just before the reboot.
|
*pre* scripts are run (in specified order) before the disks are even formatted. *pkg* scripts are run (in specified order) right before the <<code_package_code, packages>> are installed (this allows you to configure an <<command, alternate packager>> such as https://aur.archlinux.org/packages/apacman/[apacman^]) -- these are run *inside* the chroot of the new install. *post* scripts are run inside the chroot like *pkg*, but are executed very last thing, just before the reboot.
|
||||||
|
|
||||||
= Further Information
|
= Further Information
|
||||||
|
|
||||||
Here you will find further info and other resources relating to AIF-NG.
|
Here you will find further info and other resources relating to AIF-NG.
|
||||||
|
|
||||||
== FAQ
|
== FAQ
|
||||||
@ -614,6 +615,8 @@ As long as:
|
|||||||
|
|
||||||
then it shouldn't try to perform any remote operations.
|
then it shouldn't try to perform any remote operations.
|
||||||
|
|
||||||
|
Note that if you specified a GPG verification, you'll need to use a local exported key file for the public key (`keyFile`); if you use a `keyID`, then AIF-NG will try to fetch the key from keyservers.
|
||||||
|
|
||||||
=== "I specified start sector as 0 for a GPT-labeled disk but it starts at sector 2048 instead. What gives?"
|
=== "I specified start sector as 0 for a GPT-labeled disk but it starts at sector 2048 instead. What gives?"
|
||||||
GPT requires 33 sectors for the table at the beginning (and 32 sectors at the end) for the actual table. That plus an extra (usually) 512 bytes at the beginning for something called a https://en.wikipedia.org/wiki/GUID_Partition_Table#Protective_MBR_(LBA_0)[Protective MBR^] (this prevents disk utilities from overwriting the GPT label automatically in case they only recognize "msdos" labels and assume the disk is not formatted yet).
|
GPT requires 33 sectors for the table at the beginning (and 32 sectors at the end) for the actual table. That plus an extra (usually) 512 bytes at the beginning for something called a https://en.wikipedia.org/wiki/GUID_Partition_Table#Protective_MBR_(LBA_0)[Protective MBR^] (this prevents disk utilities from overwriting the GPT label automatically in case they only recognize "msdos" labels and assume the disk is not formatted yet).
|
||||||
|
|
||||||
@ -662,6 +665,19 @@ You can't, currently; support is only stubbed out for now. If absolutely necessa
|
|||||||
|
|
||||||
This hopefully will be changed in the future, however, as I'm interested in adding support. For now, open and WPA/WPA2 PSK only are considered supported.
|
This hopefully will be changed in the future, however, as I'm interested in adding support. For now, open and WPA/WPA2 PSK only are considered supported.
|
||||||
|
|
||||||
|
=== "How do I use my own GnuPG homedir instead of letting AIF-NG create one automatically?"
|
||||||
|
I can pretty easily add support for this -- it's stubbed in already. But there are a couple reasons it doesn't really make sense to do so:
|
||||||
|
|
||||||
|
* Being that most people are probably using this from a LiveCD/LiveUSB/PXE/whatever, it's *highly* unlikely they'll even have a static GnuPG homedir available.
|
||||||
|
* Even if they did, AIF-NG has no real way of running a passphrase prompt. It's intended to be run automatically, non-interactively, and daemonized. You'd have to have a passphrase-less private key for it to work.
|
||||||
|
** Why? Because it needs to be able to sign and trust the key ID you specified to get an accurate validity reading of the signature. If the private key has a passphrase, this is required for the operation to complete. If a custom homedir with a passphrased private key was specified, the signature's signer's public key would already need to be imported into the keyring, signed, AND trusted (with a sufficiently high enough level).
|
||||||
|
|
||||||
|
=== "Why do I have to specify a URI or key ID for a GPG key but can include a raw text block for a GPG `signature`?"
|
||||||
|
Because keys are (generally speaking) intended to be publicly fetchable in some form or another. `signatures` are not (necessarily); they're more geared towards being file objects. I definitely recommend using `signatureFile` instead, though, even if it's just to a local .sig/.asc file.
|
||||||
|
|
||||||
|
=== "Why don't you support WKD for GPG key fetching?"
|
||||||
|
Because I didn't. If there is interest, I can add support for it but please don't request it unless you plan on actually using it.
|
||||||
|
|
||||||
== Bug Reports/Feature Requests
|
== Bug Reports/Feature Requests
|
||||||
NOTE: It is possible to submit a bug or feature request without registering in my bugtracker. One of my pet peeves is needing to create an account/register on a bugtracker simply to report a bug! The following links only require an email address to file a bug (which is necessary in case I need any further clarification from you or to keep you updated on the status of the bug/feature request -- so please be sure to use a valid email address).
|
NOTE: It is possible to submit a bug or feature request without registering in my bugtracker. One of my pet peeves is needing to create an account/register on a bugtracker simply to report a bug! The following links only require an email address to file a bug (which is necessary in case I need any further clarification from you or to keep you updated on the status of the bug/feature request -- so please be sure to use a valid email address).
|
||||||
|
|
||||||
|
@ -51,3 +51,12 @@ https://msdn.microsoft.com/en-us/library/dd489258.aspx
|
|||||||
if i ever need a list of GPT GUIDs, maybe to do some fancy GUID-to-name-and-back mapping?
|
if i ever need a list of GPT GUIDs, maybe to do some fancy GUID-to-name-and-back mapping?
|
||||||
https://en.wikipedia.org/wiki/GUID_Partition_Table#Partition_type_GUIDs
|
https://en.wikipedia.org/wiki/GUID_Partition_Table#Partition_type_GUIDs
|
||||||
(mapping can be done via https://stackoverflow.com/questions/483666/reverse-invert-a-dictionary-mapping)
|
(mapping can be done via https://stackoverflow.com/questions/483666/reverse-invert-a-dictionary-mapping)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
docs todo:
|
||||||
|
- syntax notation:
|
||||||
|
bold element/attribute names are required (only specified once).
|
||||||
|
regular are optional.
|
||||||
|
italicized means there can be multiple (none, one or many) specified.
|
||||||
|
italicized and bold means there must be at LEAST one.
|
||||||
|
@ -21,7 +21,7 @@
|
|||||||
file:///tmp/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig
|
file:///tmp/archlinux-bootstrap-2019.12.01-x86_64.tar.gz.sig
|
||||||
</signatureFile>
|
</signatureFile>
|
||||||
</sigs>
|
</sigs>
|
||||||
<keys>
|
<keys detect="false">
|
||||||
<keyID>0x4AA4767BBC9C4B1D18AE28B77F2D434B9741E8AC</keyID>
|
<keyID>0x4AA4767BBC9C4B1D18AE28B77F2D434B9741E8AC</keyID>
|
||||||
</keys>
|
</keys>
|
||||||
</gpg>
|
</gpg>
|
||||||
|
Loading…
Reference in New Issue
Block a user