checking in some progress. config generator's almost done. kind of janky, but it works.
This commit is contained in:
parent
46a9df6ef6
commit
262eefba07
11
TODO
11
TODO
@ -3,6 +3,16 @@
|
|||||||
- ensure we use docstrings in a Sphinx-compatible manner?
|
- ensure we use docstrings in a Sphinx-compatible manner?
|
||||||
https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html
|
https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html
|
||||||
at the very least document all the functions and such so pydoc's happy.
|
at the very least document all the functions and such so pydoc's happy.
|
||||||
|
- better prompt display. i might include them as constants in a separate file
|
||||||
|
and then import it for e.g. confgen. or maybe a Flask website/app?
|
||||||
|
- locking
|
||||||
|
- for docs, 3.x (as of 3.10) was 2.4M.
|
||||||
|
- GUI? at least for generating config...
|
||||||
|
|
||||||
|
- need to package:
|
||||||
|
python-hashid (https://psypanda.github.io/hashID/,
|
||||||
|
https://github.com/psypanda/hashID,
|
||||||
|
https://pypi.org/project/hashID/)
|
||||||
|
|
||||||
- package for PyPI:
|
- package for PyPI:
|
||||||
# https://packaging.python.org/tutorials/distributing-packages/
|
# https://packaging.python.org/tutorials/distributing-packages/
|
||||||
@ -14,6 +24,7 @@
|
|||||||
BUGS.SQUARE-R00T.NET bugs/tasks:
|
BUGS.SQUARE-R00T.NET bugs/tasks:
|
||||||
#7: Ensure conditional deps/imports for features only if used.
|
#7: Ensure conditional deps/imports for features only if used.
|
||||||
Is this setup.py-compatible?
|
Is this setup.py-compatible?
|
||||||
|
nooope. just make everything a dep.
|
||||||
#14: Use os.path.join() for more consistency/pythonicness
|
#14: Use os.path.join() for more consistency/pythonicness
|
||||||
#24: Run as regular user? (pychroot? fakeroot?)
|
#24: Run as regular user? (pychroot? fakeroot?)
|
||||||
#34: Build-time support for only building single phase of build
|
#34: Build-time support for only building single phase of build
|
||||||
|
84
bdisk/GPG.py
84
bdisk/GPG.py
@ -1 +1,85 @@
|
|||||||
import gpg
|
import gpg
|
||||||
|
import os
|
||||||
|
import psutil
|
||||||
|
import gpg.errors
|
||||||
|
|
||||||
|
class GPGHandler(object):
|
||||||
|
def __init__(self, gnupg_homedir = None, key_id = None, keyservers = None):
|
||||||
|
self.home = gnupg_homedir
|
||||||
|
self.key_id = key_id
|
||||||
|
self.keyservers = keyservers
|
||||||
|
if self.home:
|
||||||
|
self._prep_home()
|
||||||
|
else:
|
||||||
|
self._check_home()
|
||||||
|
self.ctx = self.get_context(home_dir = self.home)
|
||||||
|
|
||||||
|
def _check_home(self, home = None):
|
||||||
|
if not home:
|
||||||
|
home = self.home
|
||||||
|
if not home:
|
||||||
|
self.home = os.environ.get('GNUPGHOME', '~/.gnupg')
|
||||||
|
home = self.home
|
||||||
|
self._prep_home(home)
|
||||||
|
return()
|
||||||
|
|
||||||
|
def _prep_home(self, home = None):
|
||||||
|
if not home:
|
||||||
|
home = self.home
|
||||||
|
if not home:
|
||||||
|
self.home = os.environ.get('GNUPGHOME', '~/.gnupg')
|
||||||
|
self.home = os.path.abspath(os.path.expanduser(self.home))
|
||||||
|
if os.path.isdir(self.home):
|
||||||
|
_exists = True
|
||||||
|
else:
|
||||||
|
_exists = False
|
||||||
|
_uid = os.getuid()
|
||||||
|
_gid = os.getgid()
|
||||||
|
try:
|
||||||
|
os.makedirs(self.home, exist_ok = True)
|
||||||
|
os.chown(self.home, _uid, _gid)
|
||||||
|
os.chmod(self.home, 0o700)
|
||||||
|
except PermissionError:
|
||||||
|
# It's alright; it's HOPEFULLY already created.
|
||||||
|
if not _exists:
|
||||||
|
raise PermissionError('We need a GnuPG home directory we can '
|
||||||
|
'write to')
|
||||||
|
return()
|
||||||
|
|
||||||
|
def get_context(self, **kwargs):
|
||||||
|
ctx = gpg.Context(**kwargs)
|
||||||
|
return(ctx)
|
||||||
|
|
||||||
|
def kill_stale_agent(self):
|
||||||
|
_process_list = []
|
||||||
|
# TODO: optimize; can I search by proc name?
|
||||||
|
for p in psutil.process_iter():
|
||||||
|
if (p.name() in ('gpg-agent', 'dirmngr') and \
|
||||||
|
p.uids()[0] == os.getuid()):
|
||||||
|
pd = psutil.Process(p.pid).as_dict()
|
||||||
|
# TODO: convert these over
|
||||||
|
# for d in (chrootdir, dlpath):
|
||||||
|
# if pd['cwd'].startswith('{0}'.format(d)):
|
||||||
|
# plst.append(p.pid)
|
||||||
|
# if len(plst) >= 1:
|
||||||
|
# for p in plst:
|
||||||
|
# psutil.Process(p).terminate()
|
||||||
|
|
||||||
|
def get_sigs(self, data_in):
|
||||||
|
key_ids = []
|
||||||
|
# Currently as of May 13, 2018 there's no way using the GPGME API to do
|
||||||
|
# the equivalent of the CLI's --list-packets.
|
||||||
|
# https://lists.gnupg.org/pipermail/gnupg-users/2018-January/
|
||||||
|
# 059708.html
|
||||||
|
# https://lists.gnupg.org/pipermail/gnupg-users/2018-January/
|
||||||
|
# 059715.html
|
||||||
|
# We use the "workaround in:
|
||||||
|
# https://lists.gnupg.org/pipermail/gnupg-users/2018-January/
|
||||||
|
# 059711.html
|
||||||
|
try:
|
||||||
|
self.ctx.verify(data_in)
|
||||||
|
except gpg.errors.BadSignatures as sig_except:
|
||||||
|
for line in [i.strip() for i in str(sig_except).splitlines()]:
|
||||||
|
l = [i.strip() for i in line.split(':')]
|
||||||
|
key_ids.append(l[0])
|
||||||
|
return(key_ids)
|
||||||
|
@ -0,0 +1,6 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" ?>
|
||||||
|
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
||||||
|
targetNamespace="http://bdisk.square-r00t.net"
|
||||||
|
xmlns="http://bdisk.square-r00t.net"
|
||||||
|
elementFormDefault="qualified">
|
||||||
|
</xs:schema>
|
590
bdisk/confgen.py
Executable file
590
bdisk/confgen.py
Executable file
@ -0,0 +1,590 @@
|
|||||||
|
#!/usr/bin/env python3.6
|
||||||
|
|
||||||
|
import confparse
|
||||||
|
import crypt
|
||||||
|
import getpass
|
||||||
|
import os
|
||||||
|
import utils
|
||||||
|
import uuid
|
||||||
|
import lxml.etree
|
||||||
|
|
||||||
|
detect = utils.detect()
|
||||||
|
generate = utils.generate()
|
||||||
|
prompt = utils.prompts()
|
||||||
|
transform = utils.transform()
|
||||||
|
valid = utils.valid()
|
||||||
|
|
||||||
|
# TODO: convert the restarts for prompts to continue's instead of letting them
|
||||||
|
# continue on to the next prompt.
|
||||||
|
|
||||||
|
def pass_prompt(user):
|
||||||
|
# This isn't in utils.prompts() because we need to use an instance of
|
||||||
|
# utils.valid() and it feels like it belongs here, since it's only usable
|
||||||
|
# for configuration generation.
|
||||||
|
passwd = {'hashed': None,
|
||||||
|
'password': None,
|
||||||
|
'hash_algo': None,
|
||||||
|
'salt': None}
|
||||||
|
_special_password_values = ('BLANK', '')
|
||||||
|
_passwd_is_special = False
|
||||||
|
_need_input_type = True
|
||||||
|
while _need_input_type:
|
||||||
|
_input_type = input('\nWill you be entering a password or a salted '
|
||||||
|
'hash? (If using a "special" value per the manual, '
|
||||||
|
'use 1 (password)):\n\n'
|
||||||
|
'\t\t1: password\n'
|
||||||
|
'\t\t2: salted hash\n\n'
|
||||||
|
'Choice: ').strip()
|
||||||
|
if not valid.integer(_input_type):
|
||||||
|
print('You must enter 1 or 2.')
|
||||||
|
else:
|
||||||
|
if int(_input_type) == 1:
|
||||||
|
_input_type = 'password'
|
||||||
|
_need_input_type = False
|
||||||
|
passwd['hashed'] = False
|
||||||
|
elif int(input_type) == 2:
|
||||||
|
_input_type = 'salted hash'
|
||||||
|
_need_input_type = False
|
||||||
|
passwd['hashed'] = True
|
||||||
|
else:
|
||||||
|
print('You must enter 1 or 2.')
|
||||||
|
_prompt = ('\nWhat do you want {0}\'s {1} to be?\n').format(user,
|
||||||
|
_input_type)
|
||||||
|
if passwd['hashed']:
|
||||||
|
passwd['password'] = input('{0}\n{1}: '.format(_prompt,
|
||||||
|
_input_type.title()))
|
||||||
|
if not valid.password_hash:
|
||||||
|
print('This is not a valid password hash. Re-running.')
|
||||||
|
pass_prompt(user)
|
||||||
|
else:
|
||||||
|
passwd['password'] = getpass.getpass(_prompt + ('See the manual for '
|
||||||
|
'special values.\nYour input will NOT '
|
||||||
|
'echo back (unless it\'s a special value).\n'
|
||||||
|
'{0}: ').format(_input_type.title()))
|
||||||
|
if passwd['password'] in _special_password_values:
|
||||||
|
_passwd_is_special = True
|
||||||
|
# 'BLANK' => '' => <(root)password></(root)password>
|
||||||
|
if passwd['password'] == 'BLANK':
|
||||||
|
passwd['password'] == ''
|
||||||
|
# '' => None => <(root)password />
|
||||||
|
elif passwd['password'] == '':
|
||||||
|
passwd['password'] == None
|
||||||
|
if not valid.password(passwd['password']):
|
||||||
|
print('As a safety precaution, we are refusing to use this '
|
||||||
|
'password. It should entirely consist of the 95 printable '
|
||||||
|
'ASCII characters. Consult the manual\'s section on '
|
||||||
|
'passwords for more information.\nLet\'s try this again, '
|
||||||
|
'shall we?')
|
||||||
|
pass_prompt(user)
|
||||||
|
_salt = input('\nEnter the salt to use. If left blank, one will be '
|
||||||
|
'automatically generated. See the manual for special '
|
||||||
|
'values.\nSalt: ').strip()
|
||||||
|
if _salt == '':
|
||||||
|
pass
|
||||||
|
elif _salt == 'auto':
|
||||||
|
passwd['salt'] = 'auto'
|
||||||
|
elif not valid.salt_hash():
|
||||||
|
print('This is not a valid salt. Let\'s try this again.')
|
||||||
|
pass_prompt(user)
|
||||||
|
else:
|
||||||
|
passwd['salt'] = _salt
|
||||||
|
_algo = input(('\nWhat algorithm should we use to hash the password? '
|
||||||
|
'The default is sha512. You can choose from the '
|
||||||
|
'following:\n\n'
|
||||||
|
'\t\t{0}\n\nAlgorithm: ').format(
|
||||||
|
'\n\t\t'.join(list(utils.crypt_map.keys()))
|
||||||
|
)).strip().lower()
|
||||||
|
if _algo == '':
|
||||||
|
_algo = 'sha512'
|
||||||
|
if _algo not in utils.crypt_map:
|
||||||
|
print('Algorithm not found; let\'s try this again.')
|
||||||
|
pass_prompt(user)
|
||||||
|
else:
|
||||||
|
passwd['hash_algo'] = _algo
|
||||||
|
if _salt == '':
|
||||||
|
passwd['salt'] = generate.salt(_algo)
|
||||||
|
if not _passwd_is_special:
|
||||||
|
_gen_now = prompt.confirm_or_no(prompt = '\nGenerate a password '
|
||||||
|
'hash now? This is HIGHLY recommended; otherwise, '
|
||||||
|
'the plaintext password will be stored in the '
|
||||||
|
'configuration and that is no bueno.\n')
|
||||||
|
if _gen_now:
|
||||||
|
passwd['password'] = generate.hash_password(
|
||||||
|
passwd['password'],
|
||||||
|
salt = passwd['salt'],
|
||||||
|
algo = passwd['hash_algo'])
|
||||||
|
passwd['hashed'] = True
|
||||||
|
return(passwd)
|
||||||
|
|
||||||
|
class ConfGenerator(object):
|
||||||
|
def __init__(self, cfgfile = None, append_config = False):
|
||||||
|
if append_config:
|
||||||
|
if not cfgfile:
|
||||||
|
raise RuntimeError('You have specified config appending but '
|
||||||
|
'did not provide a configuration file')
|
||||||
|
if cfgfile:
|
||||||
|
self.cfgfile = os.path.abspath(os.path.expanduser(cfgfile))
|
||||||
|
else:
|
||||||
|
# Write to STDOUT
|
||||||
|
self.cfgfile = None
|
||||||
|
c = confparse.Conf(cfgfile)
|
||||||
|
self.cfg = c.xml
|
||||||
|
self.append = True
|
||||||
|
else:
|
||||||
|
self.cfg = lxml.etree.Element('bdisk')
|
||||||
|
self.append = False
|
||||||
|
self.profile = lxml.etree.Element('profile')
|
||||||
|
self.cfg.append(self.profile) # do I need to do this at the end?
|
||||||
|
|
||||||
|
def main(self):
|
||||||
|
print(('\n\tPlease consult the manual at {manual_site} if you have '
|
||||||
|
'any questions.'
|
||||||
|
'\n\tYou can hit CTRL-c at any time to quit.\n'
|
||||||
|
).format(manual_site = 'https://bdisk.square-r00t.net/'))
|
||||||
|
try:
|
||||||
|
self.get_profile_attribs()
|
||||||
|
self.get_meta()
|
||||||
|
self.get_accounts()
|
||||||
|
self.get_sources()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
exit('\n\nCaught KeyboardInterrupt; quitting...')
|
||||||
|
return()
|
||||||
|
|
||||||
|
def get_profile_attribs(self):
|
||||||
|
print('++ PROFILE ATTRIBUTES ++')
|
||||||
|
id_attrs = {'name': None,
|
||||||
|
'id': None,
|
||||||
|
'uuid': None}
|
||||||
|
while not any(tuple(id_attrs.values())):
|
||||||
|
print('\nThese are used to uniquely identify the profile you are '
|
||||||
|
'creating. To ensure compatibility with other processes, '
|
||||||
|
'each profile MUST be unique (even if you\'re only storing '
|
||||||
|
'one profile per file). That means at least ONE of these '
|
||||||
|
'attributes must be populated. You can hit enter to leave '
|
||||||
|
'the attribute blank - you don\'t need to provide ALL '
|
||||||
|
'attributes (though it\'s certainly recommended).')
|
||||||
|
id_attrs['name'] = transform.sanitize_input(
|
||||||
|
(input(
|
||||||
|
'\nWhat name should this profile be? (It will '
|
||||||
|
'be transformed to a safe string if '
|
||||||
|
'necessary.)\nName: ')
|
||||||
|
))
|
||||||
|
id_attrs['id'] = transform.sanitize_input(
|
||||||
|
(input(
|
||||||
|
'\nWhat ID number should this profile have? It MUST be a '
|
||||||
|
'positive integer.\nID: ')
|
||||||
|
).strip())
|
||||||
|
if id_attrs['id']:
|
||||||
|
if not valid.integer(id_attrs['id']):
|
||||||
|
print('Invalid; skipping...')
|
||||||
|
id_attrs['id'] = None
|
||||||
|
# We don't sanitize this because it'd break. UUID4 requires hyphen
|
||||||
|
# separators. We still validate, though.
|
||||||
|
id_attrs['uuid'] = input(
|
||||||
|
'\nWhat UUID should this profile have? '
|
||||||
|
'It MUST be a UUID4 (RFC4122 § 4.4). e.g.:\n'
|
||||||
|
'\t333d7287-3caa-45fe-b954-2da15dad1212\n'
|
||||||
|
'If you use the special value "auto" (without quotes), then '
|
||||||
|
'one will be automatically generated for you.\nUUID: ').strip()
|
||||||
|
if id_attrs['uuid'].lower() == 'auto':
|
||||||
|
id_attrs['uuid'] = str(uuid.uuid4())
|
||||||
|
print('\n\tGenerated a UUID: {0}\n'.format(id_attrs['uuid']))
|
||||||
|
else:
|
||||||
|
if not valid.uuid(id_attrs['uuid']):
|
||||||
|
print('Invalid; skipping...')
|
||||||
|
id_attrs['uuid'] = None
|
||||||
|
# This causes a looping if none of the answers are valid.
|
||||||
|
for i in id_attrs:
|
||||||
|
if id_attrs[i] == '':
|
||||||
|
id_attrs[i] = None
|
||||||
|
for i in id_attrs:
|
||||||
|
if id_attrs[i]:
|
||||||
|
self.profile.attrib[i] = id_attrs[i]
|
||||||
|
print()
|
||||||
|
return()
|
||||||
|
|
||||||
|
def get_meta(self):
|
||||||
|
print('\n++ META ITEMS ++')
|
||||||
|
meta_items = {'names': {'name': None,
|
||||||
|
'uxname': None,
|
||||||
|
'pname': None},
|
||||||
|
'desc': None,
|
||||||
|
'uri': None,
|
||||||
|
'ver': None,
|
||||||
|
'dev': {'author': None,
|
||||||
|
'email': None,
|
||||||
|
'website': None},
|
||||||
|
'max_recurse': None}
|
||||||
|
while (not transform.flatten_recurse(meta_items) or \
|
||||||
|
(None in transform.flatten_recurse(meta_items))):
|
||||||
|
print('\nThese are used primarily for branding (with the '
|
||||||
|
'exception of recursion level, which is used '
|
||||||
|
'operationally).\n*All* items are REQUIRED (and if any are '
|
||||||
|
'blank or invalid, the entire section will restart), but '
|
||||||
|
'you may want to tweak the VERSION_INFO.txt.j2 template if '
|
||||||
|
'you don\'t want this information exposed to your users '
|
||||||
|
'(see the manual for more detail).')
|
||||||
|
print('\n++ META ITEMS || NAMES ++')
|
||||||
|
# https://en.wikipedia.org/wiki/8.3_filename
|
||||||
|
meta_items['names']['name'] = transform.sanitize_input(
|
||||||
|
input(
|
||||||
|
'\nWhat 8.3 filename should be used as the name of this '
|
||||||
|
'project/live distro? Refer to the manual\'s Configuration '
|
||||||
|
'section for path /bdisk/profile/meta/names/name for '
|
||||||
|
'restrictions (there are quite a few).\n8.3 Name: ').strip(),
|
||||||
|
no_underscores = True).upper()
|
||||||
|
if (len(meta_items['names']['name']) > 8) or (
|
||||||
|
meta_items['names']['name'] == ''):
|
||||||
|
print('Invalid; skipping...')
|
||||||
|
meta_items['names']['name'] = None
|
||||||
|
# Note: 2009 spec
|
||||||
|
# http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html#tag_03_282
|
||||||
|
meta_items['names']['uxname'] = input(
|
||||||
|
'\nWhat name should be used as the "human-readable" name of '
|
||||||
|
'this project/live distro? Refer to the manual\'s '
|
||||||
|
'Configuration section for path '
|
||||||
|
'/bdisk/profile/meta/names/uxname for restrictions, but in a '
|
||||||
|
'nutshell it must be compatible with the "POSIX Portable '
|
||||||
|
'Filename Character Set" specification (the manual has a '
|
||||||
|
'link).\nName: ').strip()
|
||||||
|
if not valid.posix_filename(meta_items['names']['uxname']):
|
||||||
|
print('Invalid; skipping...')
|
||||||
|
meta_items['names']['uxname'] = None
|
||||||
|
meta_items['names']['pname'] = input(
|
||||||
|
'\nWhat name should be used as the "pretty" name of this '
|
||||||
|
'project/live distro? Refer to the manual\'s Configuration '
|
||||||
|
'section for path /bdisk/profile/meta/names/uxname for '
|
||||||
|
'restrictions, but this is by far the most lax naming. It '
|
||||||
|
'should be used for your actual branding.\nName: ').strip()
|
||||||
|
if meta_items['names']['pname'] == '':
|
||||||
|
meta_items['names']['pname'] = None
|
||||||
|
print('\n++ META ITEMS || PROJECT INFORMATION ++')
|
||||||
|
meta_items['uri'] = input('\nWhat is your project\'s URI/URL?'
|
||||||
|
'\nURL: ').strip()
|
||||||
|
if not valid.url(meta_items['uri']):
|
||||||
|
print('Invalid; skipping...')
|
||||||
|
meta_items['uri'] = None
|
||||||
|
meta_items['ver'] = input(
|
||||||
|
'\nWhat version is this project? It follows the same rules as '
|
||||||
|
'the POSIX filename specification mentioned earlier (as we '
|
||||||
|
'use it to name certain files).\nVersion: ')
|
||||||
|
while not meta_items['desc']:
|
||||||
|
print('\nWhat is your project\'s description?'
|
||||||
|
'\nAccepts multiple lines, etc.'
|
||||||
|
'\nPress CTRL-d (on *nix/macOS) or CTRL-z (on Windows) '
|
||||||
|
'on an empty line when done.'
|
||||||
|
'\nIt will be echoed back for confirmation after it is '
|
||||||
|
'entered (with the option to re-enter if '
|
||||||
|
'desired/needed - this will NOT restart the entire Meta '
|
||||||
|
'section).')
|
||||||
|
meta_items['desc'] = prompt.multiline_input(
|
||||||
|
prompt = '\nDescription: ')
|
||||||
|
print('-----\n{0}\n-----'.format(meta_items['desc']))
|
||||||
|
_confirm = prompt.confirm_or_no(
|
||||||
|
prompt = 'Does this look okay?\n')
|
||||||
|
if not _confirm:
|
||||||
|
meta_items['desc'] = None
|
||||||
|
print('\n++ META ITEMS || DEVELOPER INFORMATION ++')
|
||||||
|
meta_items['dev']['author'] = (input(
|
||||||
|
'\nWhat is YOUR name?\nName: ')).strip()
|
||||||
|
meta_items['dev']['email'] = (input('\nWhat is your email address?'
|
||||||
|
'\nemail: ')).strip()
|
||||||
|
if not valid.email(meta_items['dev']['email']):
|
||||||
|
print('Invalid; skipping...')
|
||||||
|
meta_items['dev']['email'] = None
|
||||||
|
meta_items['dev']['website'] = (input('\nWhat is your website?\n'
|
||||||
|
'Website: ')).strip()
|
||||||
|
if not valid.url(meta_items['dev']['website']):
|
||||||
|
print('Invalid; skipping...')
|
||||||
|
meta_items['dev']['website'] = None
|
||||||
|
print('\n++ META ITEMS || OPERATIONAL CONFIGURATION ++')
|
||||||
|
meta_items['max_recurse'] = transform.sanitize_input(input(
|
||||||
|
'\nAs of the 4.x branch, BDisk configuration files support '
|
||||||
|
'cross-document substitution via XPath references, even '
|
||||||
|
'recursively. How many levels of recursion do you want this '
|
||||||
|
'profile to support? Note that the default limit for Python '
|
||||||
|
'is 1000 (and CAN be changed, but is not recommended) and '
|
||||||
|
'each level of recursion you add can POTENTIALLY add '
|
||||||
|
'additional CPU/RAM strain. HOWEVER, chances are if your '
|
||||||
|
'machine\'s good enough to run BDisk, it\'s good enough for '
|
||||||
|
'whatever you set. I recommend setting it to 5, because any '
|
||||||
|
'more than that and your configuration becomes cumbersome to '
|
||||||
|
'maintain.\nMax recursion: ').strip())
|
||||||
|
if not valid.integer(meta_items['max_recurse']):
|
||||||
|
print('Invalid; skipping...')
|
||||||
|
meta_items['dev']['website'] = None
|
||||||
|
meta = lxml.etree.SubElement(self.profile, 'meta')
|
||||||
|
for e in meta_items:
|
||||||
|
elem = lxml.etree.SubElement(meta, e)
|
||||||
|
# These have nested items.
|
||||||
|
if isinstance(meta_items[e], dict):
|
||||||
|
for s in meta_items[e]:
|
||||||
|
subelem = lxml.etree.SubElement(elem, s)
|
||||||
|
subelem.text = meta_items[e][s]
|
||||||
|
else:
|
||||||
|
elem.text = meta_items[e]
|
||||||
|
print()
|
||||||
|
return()
|
||||||
|
|
||||||
|
def get_accounts(self):
|
||||||
|
print('\n++ ACCOUNTS ++')
|
||||||
|
accounts = lxml.etree.SubElement(self.profile, 'accounts')
|
||||||
|
pass_attribs = ('hashed', 'hash_algo', 'salt')
|
||||||
|
rootpass = None
|
||||||
|
print('\n++ ACCOUNTS || ROOT ++')
|
||||||
|
if not rootpass:
|
||||||
|
prompt_attribs = pass_prompt('root')
|
||||||
|
rootpass = lxml.etree.Element('rootpass')
|
||||||
|
for i in pass_attribs:
|
||||||
|
rootpass.attrib[i] = transform.py2xml(prompt_attribs[i])
|
||||||
|
rootpass.text = prompt_attribs['password']
|
||||||
|
accounts.append(rootpass)
|
||||||
|
print('\n++ ACCOUNTS || USERS ++')
|
||||||
|
more_accounts = prompt.confirm_or_no(prompt = ('\nWould you like to '
|
||||||
|
'add a non-root/regular user?\n'),
|
||||||
|
usage = ('{0} for yes, {1} for no...\n'))
|
||||||
|
users = lxml.etree.SubElement(accounts, 'users')
|
||||||
|
while more_accounts:
|
||||||
|
user = None
|
||||||
|
_user_invalid = True
|
||||||
|
_user_text = {'username': None,
|
||||||
|
'password': None,
|
||||||
|
'comment': None}
|
||||||
|
while _user_invalid:
|
||||||
|
_username = (input('\nWhat should the username be?'
|
||||||
|
'\nUsername: ')).strip()
|
||||||
|
if not valid.username(_username):
|
||||||
|
print('\nThat username string is invalid. Consult the '
|
||||||
|
'manual and the man page for useradd(8). Let\'s '
|
||||||
|
'have another go.')
|
||||||
|
else:
|
||||||
|
_user_text['username'] = _username
|
||||||
|
_user_invalid = False
|
||||||
|
_sudo = prompt.confirm_or_no(prompt = ('\nGive {0} full sudo '
|
||||||
|
'access?\n').format(_username))
|
||||||
|
_pass_attr = pass_prompt(_username)
|
||||||
|
_user_text['password'] = _pass_attr['password']
|
||||||
|
_user_text['comment'] = transform.no_newlines(
|
||||||
|
(input('\nWhat do you want the GECOS comment to be? This is '
|
||||||
|
'USUALLY the full "real" name of the user (or a '
|
||||||
|
'description of the service, etc.). You can leave it '
|
||||||
|
'blank if you want.\nGECOS: ')).strip())
|
||||||
|
user = lxml.etree.Element('user')
|
||||||
|
user.attrib['sudo'] = transform.py2xml(_sudo)
|
||||||
|
_elems = {}
|
||||||
|
for elem in _user_text:
|
||||||
|
_elems[elem] = lxml.etree.SubElement(user, elem)
|
||||||
|
_elems[elem].text = _user_text[elem]
|
||||||
|
for i in pass_attribs:
|
||||||
|
_elems['password'].attrib[i] = transform.py2xml(_pass_attr[i])
|
||||||
|
users.append(user)
|
||||||
|
more_accounts = prompt.confirm_or_no(prompt = ('\nWould you like '
|
||||||
|
'to add another user?\n'),
|
||||||
|
usage = ('{0} for yes, {1} '
|
||||||
|
'for no...\n'))
|
||||||
|
return()
|
||||||
|
|
||||||
|
def get_sources(self):
|
||||||
|
print('\n++ SOURCES ++')
|
||||||
|
sources = lxml.etree.SubElement(self.profile, 'sources')
|
||||||
|
more_sources = True
|
||||||
|
_arches = []
|
||||||
|
_supported_arches = {'x86': ('(Also referred to by distros as "i386", '
|
||||||
|
'"i486", "i686", and "32-bit")'),
|
||||||
|
'x86_64': ('(Also referred to by distros as '
|
||||||
|
'"64-bit")')}
|
||||||
|
while more_sources:
|
||||||
|
if len(_arches) == len(_supported_arches):
|
||||||
|
# All supported arches have been added. We currently don't
|
||||||
|
# support mirror-balancing. TODO?
|
||||||
|
print('\nCannot add more sources; all supported architectures '
|
||||||
|
'have been used. Moving on.')
|
||||||
|
more_sources = False
|
||||||
|
break
|
||||||
|
if len(_arches) > 0:
|
||||||
|
print('\n(Currently added arches: {0})'.format(
|
||||||
|
', '.join(_arches)))
|
||||||
|
_print_arches = '\n\t'.join(
|
||||||
|
['{0}:\t{1}'.format(*i) for i in _supported_arches.items()])
|
||||||
|
source = lxml.etree.Element('source')
|
||||||
|
arch = (input((
|
||||||
|
'\nWhat hardware architecture is this source for?\n(Note: '
|
||||||
|
'BDisk currently only supports the listed architectures).\n'
|
||||||
|
'\n\t{0}\n\nArch: ').format(_print_arches))).strip().lower()
|
||||||
|
if arch not in _supported_arches.keys():
|
||||||
|
print('That is not a supported architecture. Trying again.')
|
||||||
|
continue
|
||||||
|
source.attrib['arch'] = arch
|
||||||
|
print('\n++ SOURCES || {0} ++'.format(arch.upper()))
|
||||||
|
print('\n++ SOURCES || {0} || TARBALL ++'.format(arch.upper()))
|
||||||
|
tarball = (input('\nWhat URL should be used for the tarball? '
|
||||||
|
'(Note that this is ONLY tested for syntax, we '
|
||||||
|
'don\'t confirm it\'s downloadable when running '
|
||||||
|
'through the configuration generator wizard - '
|
||||||
|
'so please make sure you enter the correct URL!)'
|
||||||
|
'\nTarball: ')).strip()
|
||||||
|
if not valid.url(tarball):
|
||||||
|
print('That isn\'t a valid URL. Please double-check and try '
|
||||||
|
'again.')
|
||||||
|
continue
|
||||||
|
tarball = transform.url_to_dict(tarball, no_None = True)
|
||||||
|
tarball_elem = lxml.etree.SubElement(source, 'tarball')
|
||||||
|
tarball_elem.attrib['flags'] = 'latest'
|
||||||
|
tarball_elem.text = tarball['full_url']
|
||||||
|
print('\n++ SOURCES || {0} || CHECKSUM ++'.format(arch.upper()))
|
||||||
|
chksum = lxml.etree.SubElement(source, 'checksum')
|
||||||
|
_chksum_chk = prompt.confirm_or_no(prompt = (
|
||||||
|
'\nWould you like to add a checksum for the tarball? (BDisk '
|
||||||
|
'can fetch a checksum file from a remote URL at build-time or '
|
||||||
|
'you can hardcode an explicit checksum in.)\n'),
|
||||||
|
usage = ('{0} for yes, {1} '
|
||||||
|
'for no...\n'))
|
||||||
|
if not _chksum_chk:
|
||||||
|
checksum = None
|
||||||
|
else:
|
||||||
|
checksum = (input(
|
||||||
|
'\nPlease enter the URL to the checksum file OR the '
|
||||||
|
'explicit checksum you wish to use.\nChecksum (remote URL '
|
||||||
|
'or checksum hash): ')).strip()
|
||||||
|
if valid.url(checksum):
|
||||||
|
checksum = transform.url_to_dict(checksum)
|
||||||
|
checksum_type = prompt.hash_select(prompt = (
|
||||||
|
'\nPlease select the digest type (by number) of the '
|
||||||
|
'checksums contained in this file.\n'
|
||||||
|
'Can be one of:\n\n\t{0}'
|
||||||
|
'\n\nChecksum type: '))
|
||||||
|
if checksum_type is False:
|
||||||
|
print('Select by NUMBER. Starting over.')
|
||||||
|
continue
|
||||||
|
elif checksum_type is None:
|
||||||
|
print('Invalid selection. Starting over.')
|
||||||
|
continue
|
||||||
|
chksum = lxml.etree.SubElement(source, 'checksum')
|
||||||
|
chksum.attrib['hash_algo'] = checksum_type
|
||||||
|
chksum.attrib['explicit'] = "no"
|
||||||
|
chksum.text = checksum['full_url']
|
||||||
|
else:
|
||||||
|
# Maybe it's a digest string.
|
||||||
|
checksum_type = detect.any_hash(checksum)
|
||||||
|
if not checksum_type:
|
||||||
|
print('\nCould not detect which hash type this digest '
|
||||||
|
'is.')
|
||||||
|
checksum_type = prompt.hash_select(
|
||||||
|
prompt = ('\nPlease select from the following '
|
||||||
|
'list (by numer):\n\n\t{0}'
|
||||||
|
'\n\nChecksum type: '))
|
||||||
|
if checksum_type is False:
|
||||||
|
print('Select by NUMBER. Starting over.')
|
||||||
|
continue
|
||||||
|
elif checksum_type is None:
|
||||||
|
print('Invalid selection. Starting over.')
|
||||||
|
continue
|
||||||
|
elif len(checksum_type) > 1:
|
||||||
|
checksum_type = prompt.hash_select(
|
||||||
|
prompt = (
|
||||||
|
'\nWe found several algorithms that can match '
|
||||||
|
'your provided digest.\nPlease select the '
|
||||||
|
'appropriate digest method from the list below '
|
||||||
|
'(by number):\n\n\t{0}\n\nChecksum type: '))
|
||||||
|
if checksum_type is False:
|
||||||
|
print('Select by NUMBER. Starting over.')
|
||||||
|
continue
|
||||||
|
elif checksum_type is None:
|
||||||
|
print('Invalid selection. Starting over.')
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
checksum_type == checksum_type[0]
|
||||||
|
chksum.attrib['explicit'] = "yes"
|
||||||
|
chksum.text = checksum
|
||||||
|
chksum.attrib['hash_algo'] = checksum_type
|
||||||
|
print('\n++ SOURCES || {0} || GPG ++'.format(arch.upper()))
|
||||||
|
sig = lxml.etree.SubElement(source, 'sig')
|
||||||
|
_gpg_chk = prompt.confirm_or_no(prompt = (
|
||||||
|
'\nWould you like to add a GPG(/GnuPG/PGP) signature for the '
|
||||||
|
'tarball?\n'))
|
||||||
|
if _gpg_chk:
|
||||||
|
gpgsig = (input(
|
||||||
|
'\nPlease enter the remote URL for the GPG signature '
|
||||||
|
'file.\nGPG Signature File URL: ')
|
||||||
|
).strip()
|
||||||
|
if not valid.url(gpgsig):
|
||||||
|
print('Invalid URL. Starting over.')
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
gpgsig = transform.url_to_dict(gpgsig)
|
||||||
|
sig.text = gpgsig['full_url']
|
||||||
|
sigkeys = prompt.confirm_or_no(prompt = (
|
||||||
|
'\nDo you know the key ID of the authorized/valid '
|
||||||
|
'signer? (If not, we will fetch the GPG signature file '
|
||||||
|
'now and try to parse it for key IDs.)\n'),
|
||||||
|
usage = ('{0} for yes, {1} '
|
||||||
|
'for no...\n'))
|
||||||
|
if sigkeys:
|
||||||
|
sigkeys = (input('\nWhat is the key ID? You can use the '
|
||||||
|
'fingerprint, full 40-character key ID '
|
||||||
|
'(preferred), 16-character "long" ID, or '
|
||||||
|
'the 8-character "short" ID '
|
||||||
|
'(HIGHLY unrecommended!).\nKey ID: ')
|
||||||
|
).strip().upper()
|
||||||
|
if not valid.gpgkeyID(sigkeys):
|
||||||
|
print('That is not a valid GPG key ID. Restarting')
|
||||||
|
continue
|
||||||
|
sig.attrib['keys'] = sigkeys
|
||||||
|
else:
|
||||||
|
sigkeys = detect.gpgkeyID_from_url(gpgsig)
|
||||||
|
if not isinstance(sigkeys, list):
|
||||||
|
print('Could not properly parse any keys in the '
|
||||||
|
'signature file. Restarting.')
|
||||||
|
continue
|
||||||
|
elif len(sigkeys) == 0:
|
||||||
|
print('We didn\'t find any key IDs embedded in the '
|
||||||
|
'given signature file. Restarting.')
|
||||||
|
continue
|
||||||
|
elif len(sigkeys) == 1:
|
||||||
|
_s = 'Does this key'
|
||||||
|
else:
|
||||||
|
_s = 'Do these keys'
|
||||||
|
_key_info = [detect.gpgkey_info(k) for k in sigkeys]
|
||||||
|
print('\nWe found the following key ID information:\n\n')
|
||||||
|
for _key in _key_info:
|
||||||
|
print('\t{0}\n'.format(_key['Full key']))
|
||||||
|
for _uid in _key['User IDs']:
|
||||||
|
# COULD flatten this to just one level.
|
||||||
|
print('\t\t{0}'.format(_uid['Name']))
|
||||||
|
for k in _uid:
|
||||||
|
if k != 'Name':
|
||||||
|
print('\t\t\t{0}:\t{1}'.format(k, _uid[k]))
|
||||||
|
_key_chk = prompt.confirm_or_no(prompt = (
|
||||||
|
'\n{0} look correct?\n').format(_s))
|
||||||
|
if not _key_chk:
|
||||||
|
print('Something must have gotten futzed, then.'
|
||||||
|
'Restarting!')
|
||||||
|
continue
|
||||||
|
sig.attrib['keys'] = ','.join(sigkeys)
|
||||||
|
elems = {}
|
||||||
|
for s in ('mirror', 'webroot'):
|
||||||
|
elems[s] = lxml.etree.SubElement(source, s)
|
||||||
|
elems['mirror'].text = '{scheme}://{host}'.format(**tarball)
|
||||||
|
if tarball['port'] != '':
|
||||||
|
elems['mirror'].text += ':{0}'.format(tarball['port'])
|
||||||
|
elems['webroot'].text = '{path}'.format(**tarball)
|
||||||
|
_arches.append(arch)
|
||||||
|
more_sources = prompt.confirm_or_no(prompt = ('\nWould you like '
|
||||||
|
'to add another '
|
||||||
|
'source?\n'),
|
||||||
|
usage = ('{0} for yes, {1} '
|
||||||
|
'for no...\n'))
|
||||||
|
return()
|
||||||
|
|
||||||
|
def main():
|
||||||
|
cg = ConfGenerator()
|
||||||
|
cg.main()
|
||||||
|
print()
|
||||||
|
print(lxml.etree.tostring(cg.cfg,
|
||||||
|
pretty_print = True,
|
||||||
|
encoding = 'UTF-8',
|
||||||
|
xml_declaration = True
|
||||||
|
).decode('utf-8'))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
@ -1,10 +1,10 @@
|
|||||||
import _io
|
import _io
|
||||||
import copy
|
import copy
|
||||||
|
import re
|
||||||
import os
|
import os
|
||||||
import validators
|
import validators
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
import lxml.etree
|
import lxml.etree
|
||||||
import lxml.objectify as objectify
|
|
||||||
|
|
||||||
etree = lxml.etree
|
etree = lxml.etree
|
||||||
|
|
||||||
@ -14,24 +14,25 @@ def _detect_cfg(cfg):
|
|||||||
if isinstance(cfg, str):
|
if isinstance(cfg, str):
|
||||||
# check for path or string
|
# check for path or string
|
||||||
try:
|
try:
|
||||||
etree.fromstring(cfg)
|
etree.fromstring(cfg.encode('utf-8'))
|
||||||
|
return(cfg.encode('utf-8'))
|
||||||
except lxml.etree.XMLSyntaxError:
|
except lxml.etree.XMLSyntaxError:
|
||||||
path = os.path.abspath(os.path.expanduser(cfg))
|
path = os.path.abspath(os.path.expanduser(cfg))
|
||||||
try:
|
try:
|
||||||
with open(path, 'r') as f:
|
with open(path, 'rb') as f:
|
||||||
cfg = f.read()
|
cfg = f.read()
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
raise ValueError('Could not open {0}'.format(path))
|
raise ValueError('Could not open {0}'.format(path))
|
||||||
elif isinstance(cfg, _io.TextIOWrapper):
|
elif isinstance(cfg, _io.TextIOWrapper):
|
||||||
_cfg = cfg.read()
|
_cfg = cfg.read().encode('utf-8')
|
||||||
cfg.close()
|
cfg.close()
|
||||||
cfg = _cfg
|
cfg = _cfg
|
||||||
elif isinstance(self.cfg, _io.BufferedReader):
|
elif isinstance(self.cfg, _io.BufferedReader):
|
||||||
_cfg = cfg.read().decode('utf-8')
|
_cfg = cfg.read()
|
||||||
cfg.close()
|
cfg.close()
|
||||||
cfg = _cfg
|
cfg = _cfg
|
||||||
elif isinstance(cfg, bytes):
|
elif isinstance(cfg, bytes):
|
||||||
cfg = cfg.decode('utf-8')
|
return(cfg)
|
||||||
else:
|
else:
|
||||||
raise TypeError('Could not determine the object type.')
|
raise TypeError('Could not determine the object type.')
|
||||||
return(cfg)
|
return(cfg)
|
||||||
@ -76,7 +77,15 @@ class Conf(object):
|
|||||||
self.profile = profile
|
self.profile = profile
|
||||||
self.xml = None
|
self.xml = None
|
||||||
self.profile = None
|
self.profile = None
|
||||||
self.xml = etree.from_string(self.cfg)
|
# Mad props to https://stackoverflow.com/a/12728199/733214
|
||||||
|
self.xpath_re = re.compile('(?<=(?<!\{)\{)[^{}]*(?=\}(?!\}))')
|
||||||
|
self.substitutions = {}
|
||||||
|
self.xpaths = ['xpath_ref']
|
||||||
|
try:
|
||||||
|
self.xml = etree.fromstring(self.raw)
|
||||||
|
except lxml.etree.XMLSyntaxError:
|
||||||
|
raise ValueError('The configuration provided does not seem to be '
|
||||||
|
'valid')
|
||||||
self.xsd = None
|
self.xsd = None
|
||||||
#if not self.validate(): # Need to write the XSD
|
#if not self.validate(): # Need to write the XSD
|
||||||
# raise ValueError('The configuration did not pass XSD/schema '
|
# raise ValueError('The configuration did not pass XSD/schema '
|
||||||
@ -137,7 +146,7 @@ class Conf(object):
|
|||||||
break
|
break
|
||||||
# We couldn't find a profile with a default name. Try to grab the
|
# We couldn't find a profile with a default name. Try to grab the
|
||||||
# first profile.
|
# first profile.
|
||||||
if not self.profile:
|
if self.profile is None:
|
||||||
# Grab the first profile.
|
# Grab the first profile.
|
||||||
if profiles:
|
if profiles:
|
||||||
self.profile = profile[0]
|
self.profile = profile[0]
|
||||||
@ -150,9 +159,4 @@ class Conf(object):
|
|||||||
def parse_profile(self):
|
def parse_profile(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _xpath_ref(self, element):
|
|
||||||
data = None
|
|
||||||
# This is incremented each recursive call until we reach
|
|
||||||
# self.max_recurse
|
|
||||||
recurse_cnt = 1
|
|
||||||
return(data)
|
|
||||||
|
515
bdisk/utils.py
515
bdisk/utils.py
@ -0,0 +1,515 @@
|
|||||||
|
import crypt
|
||||||
|
import GPG
|
||||||
|
import hashid
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import string
|
||||||
|
import textwrap
|
||||||
|
import uuid
|
||||||
|
import validators
|
||||||
|
import zlib
|
||||||
|
import lxml.etree
|
||||||
|
from collections import OrderedDict
|
||||||
|
from dns import resolver
|
||||||
|
from email.utils import parseaddr as emailparse
|
||||||
|
from passlib.context import CryptContext as cryptctx
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
# Supported by all versions of GNU/Linux shadow
|
||||||
|
passlib_schemes = ['des_crypt', 'md5_crypt', 'sha256_crypt', 'sha512_crypt']
|
||||||
|
|
||||||
|
# Build various hash digest name lists
|
||||||
|
digest_schemes = list(hashlib.algorithms_available)
|
||||||
|
# Provided by zlib
|
||||||
|
digest_schemes.append('adler32')
|
||||||
|
digest_schemes.append('crc32')
|
||||||
|
#clean_digest_schemes = sorted(list(set(digest_schemes)))
|
||||||
|
|
||||||
|
crypt_map = {'sha512': crypt.METHOD_SHA512,
|
||||||
|
'sha256': crypt.METHOD_SHA256,
|
||||||
|
'md5': crypt.METHOD_MD5,
|
||||||
|
'des': crypt.METHOD_CRYPT}
|
||||||
|
|
||||||
|
class detect(object):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def any_hash(self, hash_str):
|
||||||
|
h = hashid.HashID()
|
||||||
|
hashes = []
|
||||||
|
for i in h.IdentifyHash(hash_str):
|
||||||
|
if i.extended:
|
||||||
|
continue
|
||||||
|
x = i.name
|
||||||
|
if x.lower() in ('crc-32', 'ripemd-160', 'sha-1', 'sha-224',
|
||||||
|
'sha-256', 'sha-384', 'sha-512'):
|
||||||
|
# Gorram you, c0re.
|
||||||
|
x = re.sub('-', '', x.lower())
|
||||||
|
_hashes = [h.lower() for h in digest_schemes]
|
||||||
|
if x.lower() in sorted(list(set(_hashes))):
|
||||||
|
hashes.append(x)
|
||||||
|
return(hashes)
|
||||||
|
|
||||||
|
def password_hash(self, passwd_hash):
|
||||||
|
_ctx = cryptctx(schemes = passlib_schemes)
|
||||||
|
algo = _ctx.identify(passwd_hash)
|
||||||
|
if algo:
|
||||||
|
return(re.sub('_crypt$', '', algo))
|
||||||
|
else:
|
||||||
|
return(None)
|
||||||
|
return()
|
||||||
|
|
||||||
|
def gpgkeyID_from_url(self, url):
|
||||||
|
with urlparse(url) as u:
|
||||||
|
data = u.read()
|
||||||
|
g = GPG.GPGHandler()
|
||||||
|
key_ids = g.get_sigs(data)
|
||||||
|
del(g)
|
||||||
|
return(key_ids)
|
||||||
|
|
||||||
|
def gpgkey_info(self, keyID, secret = False):
|
||||||
|
def _get_key():
|
||||||
|
key = None
|
||||||
|
try:
|
||||||
|
key = g.get_key(keyID, secret = secret)
|
||||||
|
except GPG.gpg.errors.KeyNotFound:
|
||||||
|
return(None)
|
||||||
|
except Exception:
|
||||||
|
return(False)
|
||||||
|
return(key)
|
||||||
|
uids = {}
|
||||||
|
g = GPG.GPGHandler()
|
||||||
|
_orig_kl_mode = g.get_keylist_mode()
|
||||||
|
if _orig_kl_mode != GPG.gpg.constants.KEYLIST_MODE_EXTERN:
|
||||||
|
_key = _get_key()
|
||||||
|
if not _key:
|
||||||
|
g.set_keylist_mode(GPG.gpg.constants.KEYLIST_MODE_EXTERN)
|
||||||
|
_key = _get_key()
|
||||||
|
else:
|
||||||
|
_key = _get_key()
|
||||||
|
if not _key:
|
||||||
|
g.set_keylist_mode(_orig_kl_mode)
|
||||||
|
del(g)
|
||||||
|
return(None)
|
||||||
|
else:
|
||||||
|
uids['Full key'] = _key.fpr
|
||||||
|
uids['User IDs'] = []
|
||||||
|
for _uid in _key.uids:
|
||||||
|
_u = OrderedDict()
|
||||||
|
# Strings
|
||||||
|
for attr in ['Name', 'Email', 'Comment']:
|
||||||
|
s = getattr(_uid, attr.lower())
|
||||||
|
if s and s != '':
|
||||||
|
_u[attr] = s
|
||||||
|
# Key attributes
|
||||||
|
_u['Invalid'] = (True if _uid.invalid else False)
|
||||||
|
_u['Revoked'] = (True if _uid.revoked else False)
|
||||||
|
uids['User IDs'].append(_u)
|
||||||
|
g.set_keylist_mode(_orig_kl_mode)
|
||||||
|
del(g)
|
||||||
|
return(uids)
|
||||||
|
|
||||||
|
def supported_hashlib_name(self, name):
|
||||||
|
# Get any easy ones out of the way first.
|
||||||
|
if name in digest_schemes:
|
||||||
|
return(name)
|
||||||
|
# Otherwise grab the first one that matches, in order from the .
|
||||||
|
_digest_re = re.compile('^{0}$'.format(name.strip()), re.IGNORECASE)
|
||||||
|
for h in digest_schemes:
|
||||||
|
if _digest_re.search(h):
|
||||||
|
return(h)
|
||||||
|
return(None)
|
||||||
|
|
||||||
|
class generate(object):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def hash_password(self, password, salt = None, algo = crypt.METHOD_SHA512):
|
||||||
|
if not salt or salt == 'auto':
|
||||||
|
_salt = crypt.mksalt(algo)
|
||||||
|
else:
|
||||||
|
_salt = salt
|
||||||
|
return(crypt.crypt(password, _salt))
|
||||||
|
|
||||||
|
def hashlib_names(self):
|
||||||
|
hashes = []
|
||||||
|
for h in sorted(digest_schemes):
|
||||||
|
r = re.compile('^{0}$'.format(h), re.IGNORECASE)
|
||||||
|
if len([i for i in filter(r.search, hashes)]) == 0:
|
||||||
|
hashes.append(h)
|
||||||
|
return(hashes)
|
||||||
|
|
||||||
|
def salt(self, algo = 'sha512'):
|
||||||
|
algo = crypt_map[algo]
|
||||||
|
return(crypt.mksalt(algo))
|
||||||
|
|
||||||
|
class prompts(object):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def confirm_or_no(self, prompt = '', invert = False,
|
||||||
|
usage = '{0} to confirm, otherwise {1}...\n'):
|
||||||
|
# A simplified version of multiline_input, really.
|
||||||
|
# By default, Enter confirms (and returns True) and CTRL-d returns
|
||||||
|
# False unless - you guessed it - invert is True.
|
||||||
|
# usage is a string appended to prompt that explains which keys to use.
|
||||||
|
# It accepts two strformats: 0 is the EOF keystroke, and 1 is the Enter
|
||||||
|
# key (those are flipped if invert = True).
|
||||||
|
_enter_ks = 'Enter/Return'
|
||||||
|
if os.name == 'posix':
|
||||||
|
_ks = 'CTRL-d'
|
||||||
|
else: # What does os.name == "java" use?
|
||||||
|
_ks = 'CTRL-z'
|
||||||
|
if invert:
|
||||||
|
_usage = usage.format(_ks, _enter_ks)
|
||||||
|
else:
|
||||||
|
_usage = usage.format(_enter_ks, _ks)
|
||||||
|
try:
|
||||||
|
if usage:
|
||||||
|
input(prompt + _usage)
|
||||||
|
else:
|
||||||
|
input(prompt)
|
||||||
|
except EOFError:
|
||||||
|
if invert:
|
||||||
|
return(True)
|
||||||
|
else:
|
||||||
|
return(False)
|
||||||
|
return(True)
|
||||||
|
|
||||||
|
def hash_select(self, prompt = '',
|
||||||
|
hash_types = generate().hashlib_names()):
|
||||||
|
_hash_types = hash_types
|
||||||
|
_hash_str = '\n\t'.join(
|
||||||
|
['{0}: {1}'.format(idx, val) for idx, val in enumerate(_hash_types,
|
||||||
|
1)
|
||||||
|
])
|
||||||
|
prompt = prompt.format(_hash_str)
|
||||||
|
_hash_select = (input(prompt)).strip()
|
||||||
|
if not valid().integer(_hash_select):
|
||||||
|
return(False)
|
||||||
|
try:
|
||||||
|
_hash_select = _hash_types[int(_hash_select) - 1]
|
||||||
|
except IndexError:
|
||||||
|
return(None)
|
||||||
|
return(_hash_select)
|
||||||
|
|
||||||
|
def multiline_input(self, prompt = None, continue_str = '> ',
|
||||||
|
end_str = '\n(End signal received)'):
|
||||||
|
_lines = []
|
||||||
|
if prompt:
|
||||||
|
# This grabs the first CR/LF.
|
||||||
|
_lines.append(input(prompt))
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
if continue_str:
|
||||||
|
_lines.append(input(continue_str))
|
||||||
|
else:
|
||||||
|
_lines.append(input())
|
||||||
|
except EOFError:
|
||||||
|
if end_str:
|
||||||
|
print(end_str)
|
||||||
|
return('\n'.join(_lines))
|
||||||
|
|
||||||
|
class transform(object):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def flatten_recurse(self, obj, values = []):
|
||||||
|
_values = values
|
||||||
|
if isinstance(obj, list):
|
||||||
|
_values += obj
|
||||||
|
elif isinstance(obj, str):
|
||||||
|
_values.append(obj)
|
||||||
|
elif isinstance(obj, dict):
|
||||||
|
for k in obj:
|
||||||
|
self.flatten_recurse(obj[k], values = _values)
|
||||||
|
return(_values)
|
||||||
|
|
||||||
|
def no_newlines(self, text_in):
|
||||||
|
text = re.sub('\n+', ' ', text_in)
|
||||||
|
return(text)
|
||||||
|
|
||||||
|
def py2xml(self, value, attrib = True):
|
||||||
|
if value in (False, ''):
|
||||||
|
if attrib:
|
||||||
|
return("no")
|
||||||
|
else:
|
||||||
|
return(None)
|
||||||
|
elif isinstance(value, bool):
|
||||||
|
# We handle the False case above.
|
||||||
|
return("yes")
|
||||||
|
elif isinstance(value, str):
|
||||||
|
return(value)
|
||||||
|
else:
|
||||||
|
# We can't do it simply.
|
||||||
|
return(value)
|
||||||
|
|
||||||
|
def sanitize_input(self, text_in, no_underscores = False):
|
||||||
|
if no_underscores:
|
||||||
|
_ws_repl = ''
|
||||||
|
else:
|
||||||
|
_ws_repl = '_'
|
||||||
|
# First we convert spaces to underscores (or remove them entirely).
|
||||||
|
text_out = re.sub('\s+', _ws_repl, text_in.strip())
|
||||||
|
# Then just strip out all symbols.
|
||||||
|
text_out = re.sub('[^\w]', '', text_out)
|
||||||
|
return(text_out)
|
||||||
|
|
||||||
|
def url_to_dict(self, orig_url, no_None = False):
|
||||||
|
def _getuserinfo(uinfo_str):
|
||||||
|
if len(uinfo_str) == 0:
|
||||||
|
if no_None:
|
||||||
|
return('')
|
||||||
|
else:
|
||||||
|
return(None)
|
||||||
|
else:
|
||||||
|
uinfo_str = uinfo_str[0]
|
||||||
|
_l = [i.strip() for i in uinfo_str.split(':') if i.strip() != '']
|
||||||
|
if len(_l) == 1:
|
||||||
|
_l.append('')
|
||||||
|
elif len(_l) == 0:
|
||||||
|
if no_None:
|
||||||
|
return('')
|
||||||
|
else:
|
||||||
|
return(None)
|
||||||
|
uinfo = {}
|
||||||
|
if not no_None:
|
||||||
|
uinfo['user'] = (None if _l[0] == '' else _l[0])
|
||||||
|
uinfo['password'] = (None if _l[1] == '' else _l[1])
|
||||||
|
else:
|
||||||
|
uinfo['user'] = _l[0]
|
||||||
|
uinfo['password'] = _l[1]
|
||||||
|
return(uinfo)
|
||||||
|
def _getdfltport():
|
||||||
|
with open('/etc/services', 'r') as f:
|
||||||
|
_svcs = f.read()
|
||||||
|
_svcs = [i.strip() for i in _svcs.splitlines() if i.strip() != '']
|
||||||
|
svcs = {}
|
||||||
|
for x in _svcs:
|
||||||
|
if re.search('^\s*#', x):
|
||||||
|
continue
|
||||||
|
s = re.sub('^\s*(\w\s+\w)(\s|\s*#)*.*$', '\g<1>', x)
|
||||||
|
l = [i.strip() for i in s.split()]
|
||||||
|
p = (int(l[1].split('/')[0]), l[1].split('/')[1])
|
||||||
|
if l[0] not in svcs:
|
||||||
|
svcs[l[0]] = []
|
||||||
|
if len(svcs[l[0]]) > 0:
|
||||||
|
# If it has a TCP port, put that first.
|
||||||
|
for idx, val in enumerate(svcs[l[0]]):
|
||||||
|
if val['proto'].lower() == 'tcp':
|
||||||
|
svcs[l[0]].insert(0, svcs[l[0]].pop(idx))
|
||||||
|
svcs[l[0]].append({'port': p[0],
|
||||||
|
'proto': p[1]})
|
||||||
|
return(svcs)
|
||||||
|
def _subsplitter(in_str, split_char):
|
||||||
|
if in_str == '':
|
||||||
|
if not no_None:
|
||||||
|
return(None)
|
||||||
|
else:
|
||||||
|
return('')
|
||||||
|
params = {}
|
||||||
|
for i in in_str.split(split_char):
|
||||||
|
p = [x.strip() for x in i.split('=')]
|
||||||
|
params[p[0]] = p[1]
|
||||||
|
if not params:
|
||||||
|
if not no_None:
|
||||||
|
return(None)
|
||||||
|
else:
|
||||||
|
return('')
|
||||||
|
if not params and not no_None:
|
||||||
|
return(None)
|
||||||
|
return(params)
|
||||||
|
_dflt_ports = _getdfltport()
|
||||||
|
scheme = None
|
||||||
|
_scheme_re = re.compile('^([\w+\.-]+)(://.*)', re.IGNORECASE)
|
||||||
|
if not _scheme_re.search(orig_url):
|
||||||
|
# They probably didn't prefix a URI signifier (RFC3986 § 3.1).
|
||||||
|
# We'll add one for them.
|
||||||
|
url = 'http://' + url
|
||||||
|
scheme = 'http'
|
||||||
|
else:
|
||||||
|
# urlparse's .scheme? Total trash.
|
||||||
|
url = orig_url
|
||||||
|
scheme = _scheme_re.sub('\g<1>', orig_url)
|
||||||
|
url_split = urlparse(url)
|
||||||
|
# Get any userinfo present.
|
||||||
|
_auth = url_split.netloc.split('@')[:-1]
|
||||||
|
userinfo = _getuserinfo(_auth)
|
||||||
|
# Get any port specified (and parse the host at the same time).
|
||||||
|
if userinfo:
|
||||||
|
_h_split = url_split.netloc('@')[-1]
|
||||||
|
else:
|
||||||
|
_h_split = url_split.netloc
|
||||||
|
_nl_split = _h_split.split(':')
|
||||||
|
if len(_nl_split) > 1:
|
||||||
|
if userinfo in (None, ''):
|
||||||
|
port = int(_nl_split[1])
|
||||||
|
host = _nl_split[0]
|
||||||
|
else:
|
||||||
|
port = int(_nl_split[-1])
|
||||||
|
host = _nl_split[-2]
|
||||||
|
else:
|
||||||
|
if scheme in _dflt_ports:
|
||||||
|
port = _dflt_ports[scheme][0]['port']
|
||||||
|
else:
|
||||||
|
if not no_None:
|
||||||
|
port = None
|
||||||
|
else:
|
||||||
|
''
|
||||||
|
host = _nl_split[0]
|
||||||
|
# Split out the params, queries, fragments.
|
||||||
|
params = _subsplitter(url_split.params, ';')
|
||||||
|
queries = _subsplitter(url_split.query, '?')
|
||||||
|
fragments = _subsplitter(url_split.fragment, '#')
|
||||||
|
if url_split.path == '':
|
||||||
|
path = '/'
|
||||||
|
else:
|
||||||
|
path = os.path.dirname(url_split.path)
|
||||||
|
_dest = os.path.basename(url_split.path)
|
||||||
|
if not no_None:
|
||||||
|
dest = (None if _dest == '' else _dest)
|
||||||
|
else:
|
||||||
|
dest = _dest
|
||||||
|
url = {'scheme': scheme,
|
||||||
|
'auth': userinfo,
|
||||||
|
'host': host,
|
||||||
|
'port': port,
|
||||||
|
'path': path,
|
||||||
|
'dest': dest,
|
||||||
|
'params': params,
|
||||||
|
'queries': queries,
|
||||||
|
'fragments': fragments,
|
||||||
|
'url': orig_url}
|
||||||
|
url['full_url'] = '{scheme}://'
|
||||||
|
if userinfo not in (None, ''):
|
||||||
|
url['full_url'] += '{user}:{password}@'.format(userinfo)
|
||||||
|
url['full_url'] += host
|
||||||
|
if port not in (None, ''):
|
||||||
|
url['full_url'] += ':{0}'.format(port)
|
||||||
|
url['full_url'] += path + dest
|
||||||
|
# Do these need to be in a specific order?
|
||||||
|
if params not in (None, ''):
|
||||||
|
_p = ['{0}={1}'.format(k, v) for k, v in params.items()]
|
||||||
|
url['full_url'] += ';{0}'.format(';'.join(_p))
|
||||||
|
if queries not in (None, ''):
|
||||||
|
_q = ['{0}={1}'.format(k, v) for k, v in queries.items()]
|
||||||
|
url['full_url'] += '?{0}'.format('?'.join(_q))
|
||||||
|
if fragments not in (None, ''):
|
||||||
|
_f = ['{0}={1}'.format(k, v) for k, v in fragments.items()]
|
||||||
|
url['full_url'] += '#{0}'.format('#'.join(_f))
|
||||||
|
return(url)
|
||||||
|
|
||||||
|
class valid(object):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def dns(self, addr):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def connection(self, conninfo):
|
||||||
|
# conninfo should ideally be (host, port)
|
||||||
|
pass
|
||||||
|
|
||||||
|
def email(self, addr):
|
||||||
|
if isinstance(validators.email(emailparse(addr)[1]),
|
||||||
|
validators.utils.ValidationFailure):
|
||||||
|
return(False)
|
||||||
|
else:
|
||||||
|
return(True)
|
||||||
|
return()
|
||||||
|
|
||||||
|
def gpgkeyID(self, key_id):
|
||||||
|
# Condense fingerprints into normalized 40-char "full" key IDs.
|
||||||
|
key_id = re.sub('\s+', '', key_id)
|
||||||
|
_re_str = ('^(0x)?('
|
||||||
|
'[{HEX}]{{40}}|'
|
||||||
|
'[{HEX}]{{16}}|'
|
||||||
|
'[{HEX}]{{8}}'
|
||||||
|
')$').format(HEX = string.hexdigits)
|
||||||
|
_key_re = re.compile(_re_str)
|
||||||
|
if not _key_re.search(key_id):
|
||||||
|
return(False)
|
||||||
|
return(True)
|
||||||
|
|
||||||
|
def integer(self, num):
|
||||||
|
try:
|
||||||
|
int(num)
|
||||||
|
return(True)
|
||||||
|
except ValueError:
|
||||||
|
return(False)
|
||||||
|
return()
|
||||||
|
|
||||||
|
def password(self, passwd):
|
||||||
|
# https://en.wikipedia.org/wiki/ASCII#Printable_characters
|
||||||
|
# https://serverfault.com/a/513243/103116
|
||||||
|
_chars = ('!"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||||
|
'[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ ')
|
||||||
|
for char in passwd:
|
||||||
|
if char not in _chars:
|
||||||
|
return(False)
|
||||||
|
return(True)
|
||||||
|
|
||||||
|
def password_hash(self, passwd_hash, algo = None):
|
||||||
|
# We need to identify the algorithm if it wasn't provided.
|
||||||
|
if not algo:
|
||||||
|
# The following are supported on GNU/Linux.
|
||||||
|
# "des_crypt" is glibc's crypt() (man 3 crypt).
|
||||||
|
# https://passlib.readthedocs.io/en/stable/lib/passlib.context.html
|
||||||
|
# Specifically, ...#passlib.context.CryptContext.identify
|
||||||
|
_ctx = cryptctx(schemes = passlib_schemes)
|
||||||
|
_algo = _ctx.identify(passwd_hash)
|
||||||
|
if not _algo:
|
||||||
|
return(False)
|
||||||
|
else:
|
||||||
|
algo = re.sub('_crypt$', '', _algo)
|
||||||
|
_ctx = cryptctx(schemes = ['{0}_crypt'.format(algo)])
|
||||||
|
if not _ctx.identify(passwd_hash):
|
||||||
|
return(False)
|
||||||
|
return(True)
|
||||||
|
|
||||||
|
def salt_hash(self, salthash):
|
||||||
|
_idents = ''.join([i.ident for i in crypt_map if i.ident])
|
||||||
|
_regex = re.compile('^(\$[{0}]\$)?[./0-9A-Za-z]{0,16}\$?'.format(
|
||||||
|
_idents))
|
||||||
|
if not regex.search(salthash):
|
||||||
|
return(False)
|
||||||
|
return(True)
|
||||||
|
|
||||||
|
def posix_filename(self, fname):
|
||||||
|
# Note: 2009 spec of POSIX, "3.282 Portable Filename Character Set"
|
||||||
|
if len(fname) == 0:
|
||||||
|
return(False)
|
||||||
|
_chars = (string.ascii_letters + string.digits + '.-_')
|
||||||
|
for char in fname:
|
||||||
|
if char not in _chars:
|
||||||
|
return(False)
|
||||||
|
return(True)
|
||||||
|
|
||||||
|
def url(self, url):
|
||||||
|
if not re.search('^[\w+\.-]+://', url):
|
||||||
|
# They probably didn't prefix a URI signifier (RFC3986 § 3.1).
|
||||||
|
# We'll add one for them.
|
||||||
|
url = 'http://' + url
|
||||||
|
if isinstance(validators.url(url), validators.utils.ValidationFailure):
|
||||||
|
return(False)
|
||||||
|
else:
|
||||||
|
return(True)
|
||||||
|
return()
|
||||||
|
|
||||||
|
def username(self, uname):
|
||||||
|
# https://unix.stackexchange.com/a/435120/284004
|
||||||
|
_regex = re.compile('^[a-z_]([a-z0-9_-]{0,31}|[a-z0-9_-]{0,30}\$)$')
|
||||||
|
if not _regex.search(uname):
|
||||||
|
return(False)
|
||||||
|
return(True)
|
||||||
|
|
||||||
|
def uuid(self, uuid_str):
|
||||||
|
is_uuid = True
|
||||||
|
try:
|
||||||
|
u = uuid.UUID(uuid_in)
|
||||||
|
except ValueError:
|
||||||
|
return(False)
|
||||||
|
if not uuid_in == str(u):
|
||||||
|
return(False)
|
||||||
|
return(is_uuid)
|
5
bin/bdisk.py
Normal file
5
bin/bdisk.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
#!/usr/bin/env python3.6
|
||||||
|
|
||||||
|
# PLACEHOLDER - this will be a thin wrapper installed to /usr/bin/bdisk.
|
||||||
|
import argparse
|
||||||
|
import bdisk
|
4
bin/bdiskcfg.py
Normal file
4
bin/bdiskcfg.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
#!/usr/bin/env python3.6
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import bdisk.confgen as confgen
|
1
docs/examples/.gitignore
vendored
1
docs/examples/.gitignore
vendored
@ -1 +0,0 @@
|
|||||||
regen_multi.py
|
|
@ -1,4 +1,4 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" ?>
|
<?xml version='1.0' encoding='UTF-8'?>
|
||||||
<bdisk>
|
<bdisk>
|
||||||
<profile name="default" id="1" uuid="8cdd6bcb-c147-4a63-9779-b5433c510dbc">
|
<profile name="default" id="1" uuid="8cdd6bcb-c147-4a63-9779-b5433c510dbc">
|
||||||
<meta>
|
<meta>
|
||||||
@ -19,7 +19,7 @@
|
|||||||
</dev>
|
</dev>
|
||||||
<uri>https://domain.tld/projname</uri>
|
<uri>https://domain.tld/projname</uri>
|
||||||
<ver>1.0.0</ver>
|
<ver>1.0.0</ver>
|
||||||
<!-- This is the VERY FIRST value parsed, and is required. It controls how many levels of {xpath_ref:} to recurse. -->
|
<!-- This is the VERY FIRST value parsed, and is required. It controls how many levels of {xpath_ref%...} to recurse. -->
|
||||||
<!-- If the maximum level is reached, the substitution will evaluate as blank. -->
|
<!-- If the maximum level is reached, the substitution will evaluate as blank. -->
|
||||||
<max_recurse>5</max_recurse>
|
<max_recurse>5</max_recurse>
|
||||||
</meta>
|
</meta>
|
||||||
@ -30,7 +30,7 @@
|
|||||||
<username>{xpath_ref%//meta/names/uxname/text()}</username>
|
<username>{xpath_ref%//meta/names/uxname/text()}</username>
|
||||||
<!-- You can also use substitution from different profiles: -->
|
<!-- You can also use substitution from different profiles: -->
|
||||||
<!-- <username>{xpath_ref%//profile[@name='another_profile']/meta/names/uxname"}</username> -->
|
<!-- <username>{xpath_ref%//profile[@name='another_profile']/meta/names/uxname"}</username> -->
|
||||||
<name>{xpath_ref%//meta/dev/author/text()}</name>
|
<comment>{xpath_ref%//meta/dev/author/text()}</comment>
|
||||||
<password hashed="no" hash_algo="sha512" salt="auto">testpassword</password>
|
<password hashed="no" hash_algo="sha512" salt="auto">testpassword</password>
|
||||||
</user>
|
</user>
|
||||||
<user sudo="no">
|
<user sudo="no">
|
||||||
@ -43,15 +43,15 @@
|
|||||||
<source arch="x86_64">
|
<source arch="x86_64">
|
||||||
<mirror>http://archlinux.mirror.domain.tld</mirror>
|
<mirror>http://archlinux.mirror.domain.tld</mirror>
|
||||||
<webroot>/iso/latest</webroot>
|
<webroot>/iso/latest</webroot>
|
||||||
<tarball flags="glob,latest">{xpath_ref%../mirror/text()}{xpath_ref%../webroot/text()}/archlinux-bootstrap-*-x86_64.tar.gz</tarball>
|
<tarball flags="latest">{xpath_ref%../mirror/text()}{xpath_ref%../webroot/text()}/{regex%archlinux-bootstrap-[0-9]{4}\.[0-9]{2}\.[0-9]{2}-x86_64\.tar\.gz}</tarball>
|
||||||
<checksum hash="sha1">{xpath_ref%../mirror/text()}{xpath_ref%../webroot/text()}/sha1sums.txt</checksum>
|
<checksum hash_algo="sha1" flags="none">{xpath_ref%../mirror/text()}{xpath_ref%../webroot/text()}/sha1sums.txt</checksum>
|
||||||
<sig keys="7F2D434B9741E8AC" keyserver="hkp://pool.sks-keyservers.net">{xpath_ref%../tarball/text()}.sig</sig>
|
<sig keys="7F2D434B9741E8AC" keyserver="hkp://pool.sks-keyservers.net" flags="latest">{xpath_ref%../tarball/text()}.sig</sig>
|
||||||
</source>
|
</source>
|
||||||
<source arch="i686">
|
<source arch="i686">
|
||||||
<mirror>http://archlinux32.mirror.domain.tld</mirror>
|
<mirror>http://archlinux32.mirror.domain.tld</mirror>
|
||||||
<webroot>/iso/latest</webroot>
|
<webroot>/iso/latest</webroot>
|
||||||
<tarball flags="glob,latest">{xpath_ref%../mirror/text()}/{xpath_ref%../webroot/text()}/archlinux-bootstrap-*-i686.tar.gz</tarball>
|
<tarball flag="regex,latest">{xpath_ref%../mirror/text()}/{xpath_ref%../webroot/text()}/{regex%archlinux-bootstrap-[0-9]{4}\.[0-9]{2}\.[0-9]{2}-i686\.tar\.gz}</tarball>
|
||||||
<checksum hash="sha512">{xpath_ref%../mirror/text()}/{xpath_ref%../webroot/text()}/sha512sums.txt</checksum>
|
<checksum hash_algo="sha512" explicit="yes">cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e</checksum>
|
||||||
<sig keys="248BF41F9BDD61D41D060AE774EDA3C6B06D0506" keyserver="hkp://pool.sks-keyservers.net">{xpath_ref%../tarball/text()}.sig</sig>
|
<sig keys="248BF41F9BDD61D41D060AE774EDA3C6B06D0506" keyserver="hkp://pool.sks-keyservers.net">{xpath_ref%../tarball/text()}.sig</sig>
|
||||||
</source>
|
</source>
|
||||||
</sources>
|
</sources>
|
||||||
@ -59,6 +59,7 @@
|
|||||||
<paths>
|
<paths>
|
||||||
<cache>/var/tmp/{xpath_ref%//meta/names/uxname/text()}</cache>
|
<cache>/var/tmp/{xpath_ref%//meta/names/uxname/text()}</cache>
|
||||||
<chroot>/var/tmp/chroots/{xpath_ref%//meta/names/uxname/text()}</chroot>
|
<chroot>/var/tmp/chroots/{xpath_ref%//meta/names/uxname/text()}</chroot>
|
||||||
|
<overlay>{xpath_ref%../cache/text()}/overlay</overlay>
|
||||||
<templates>~/{xpath_ref%//meta/names/uxname/text()}/templates</templates>
|
<templates>~/{xpath_ref%//meta/names/uxname/text()}/templates</templates>
|
||||||
<mount>/mnt/{xpath_ref%//meta/names/uxname/text()}</mount>
|
<mount>/mnt/{xpath_ref%//meta/names/uxname/text()}</mount>
|
||||||
<distros>~/{xpath_ref%//meta/names/uxname/text()}/distros</distros>
|
<distros>~/{xpath_ref%//meta/names/uxname/text()}/distros</distros>
|
||||||
@ -74,7 +75,7 @@
|
|||||||
<ssl custom="no">
|
<ssl custom="no">
|
||||||
<!-- http://ipxe.org/crypto -->
|
<!-- http://ipxe.org/crypto -->
|
||||||
<ca>
|
<ca>
|
||||||
<cert>{xpath_ref%build/paths/ssl/text()}/ca.crt</cert>
|
<cert>{xpath_ref%//build/paths/ssl/text()}/ca.crt</cert>
|
||||||
<!-- If csr is self-enclosed (<csr />), we'll just generate and use a CSR in-memory.
|
<!-- If csr is self-enclosed (<csr />), we'll just generate and use a CSR in-memory.
|
||||||
Assuming we need to generate a certificate, anyways.
|
Assuming we need to generate a certificate, anyways.
|
||||||
If you want to write it out to disk (for debugging, etc.) OR use one already generated,
|
If you want to write it out to disk (for debugging, etc.) OR use one already generated,
|
||||||
@ -82,7 +83,7 @@
|
|||||||
e.g.:
|
e.g.:
|
||||||
<csr>{xpath_ref%build/paths/ssl/text()}/ca.csr</csr> -->
|
<csr>{xpath_ref%build/paths/ssl/text()}/ca.csr</csr> -->
|
||||||
<csr/>
|
<csr/>
|
||||||
<key des="no" passphrase="none">{xpath_ref%build/paths/ssl/text()}/ca.key</key>
|
<key des="no" passphrase="none">{xpath_ref%//build/paths/ssl/text()}/ca.key</key>
|
||||||
<subject>
|
<subject>
|
||||||
<commonName>domain.tld</commonName>
|
<commonName>domain.tld</commonName>
|
||||||
<countryName>XX</countryName>
|
<countryName>XX</countryName>
|
||||||
@ -90,13 +91,13 @@
|
|||||||
<stateOrProvinceName>Some State</stateOrProvinceName>
|
<stateOrProvinceName>Some State</stateOrProvinceName>
|
||||||
<organization>Some Org, Inc.</organization>
|
<organization>Some Org, Inc.</organization>
|
||||||
<organizationalUnitName>Department Name</organizationalUnitName>
|
<organizationalUnitName>Department Name</organizationalUnitName>
|
||||||
<emailAddress>{xpath_ref%../../../../../../meta/names/dev/email/text()}</emailAddress>
|
<emailAddress>{xpath_ref%//meta/dev/email/text()}</emailAddress>
|
||||||
</subject>
|
</subject>
|
||||||
</ca>
|
</ca>
|
||||||
<server>
|
<server>
|
||||||
<cert>{xpath_ref%build/paths/ssl/text()}/{xpath_ref%//meta/names/uxname/text()}.crt</cert>
|
<cert>{xpath_ref%//build/paths/ssl/text()}/{xpath_ref%//meta/names/uxname/text()}.crt</cert>
|
||||||
<csr/>
|
<csr/>
|
||||||
<key des="no" passphrase="none">{xpath_ref%build/paths/ssl/text()}/{xpath_ref%//meta/names/uxname/text()}.key</key>
|
<key des="no" passphrase="none">{xpath_ref%//build/paths/ssl/text()}/{xpath_ref%//meta/names/uxname/text()}.key</key>
|
||||||
<subject>
|
<subject>
|
||||||
<commonName>domain.tld (client)</commonName>
|
<commonName>domain.tld (client)</commonName>
|
||||||
<countryName>XX</countryName>
|
<countryName>XX</countryName>
|
||||||
@ -104,11 +105,11 @@
|
|||||||
<stateOrProvinceName>Some State</stateOrProvinceName>
|
<stateOrProvinceName>Some State</stateOrProvinceName>
|
||||||
<organization>Some Org, Inc.</organization>
|
<organization>Some Org, Inc.</organization>
|
||||||
<organizationalUnitName>Department Name</organizationalUnitName>
|
<organizationalUnitName>Department Name</organizationalUnitName>
|
||||||
<emailAddress>{xpath_ref%../../../../../../meta/names/dev/email/text()}</emailAddress>
|
<emailAddress>{xpath_ref%//meta/dev/email/text()}</emailAddress>
|
||||||
</subject>
|
</subject>
|
||||||
</server>
|
</server>
|
||||||
</ssl>
|
</ssl>
|
||||||
<uri>{xpath_ref%meta/dev/website/text()}/ipxe</uri>
|
<uri>{xpath_ref%//meta/dev/website/text()}/ipxe</uri>
|
||||||
</ipxe>
|
</ipxe>
|
||||||
<gpg keyid="none" gnupghome="none" publish="no" sync="yes"/>
|
<gpg keyid="none" gnupghome="none" publish="no" sync="yes"/>
|
||||||
<sync>
|
<sync>
|
||||||
@ -145,23 +146,23 @@
|
|||||||
<rootpass hashed="no">atotallyinsecurepassword</rootpass>
|
<rootpass hashed="no">atotallyinsecurepassword</rootpass>
|
||||||
<user sudo="no">
|
<user sudo="no">
|
||||||
<username>testuser</username>
|
<username>testuser</username>
|
||||||
<name>Test User</name>
|
<comment>Test User</comment>
|
||||||
<password hashed="no" hash_algo="sha512" salt="auto">testpassword</password>
|
<password hashed="no" hash_algo="sha512" salt="auto">atestpassword</password>
|
||||||
</user>
|
</user>
|
||||||
</accounts>
|
</accounts>
|
||||||
<sources>
|
<sources>
|
||||||
<source arch="x86_64">
|
<source arch="x86_64">
|
||||||
<mirror>http://archlinux.mirror.domain.tld</mirror>
|
<mirror>http://archlinux.mirror.domain.tld</mirror>
|
||||||
<webroot>/iso/latest</webroot>
|
<webroot>/iso/latest</webroot>
|
||||||
<tarball flags="glob,latest">{xpath_ref%../mirror/text()}{xpath_ref%../webroot/text()}/archlinux-bootstrap-*-x86_64.tar.gz</tarball>
|
<tarball flags="latest">{xpath_ref%../mirror/text()}{xpath_ref%../webroot/text()}/{regex%archlinux-bootstrap-[0-9]{4}\.[0-9]{2}\.[0-9]{2}-x86_64\.tar\.gz}</tarball>
|
||||||
<checksum hash="sha1">{xpath_ref%../mirror/text()}{xpath_ref%../webroot/text()}/sha1sums.txt</checksum>
|
<checksum hash_algo="sha1" flags="none">{xpath_ref%../mirror/text()}{xpath_ref%../webroot/text()}/sha1sums.txt</checksum>
|
||||||
<sig keys="7F2D434B9741E8AC" keyserver="hkp://pool.sks-keyservers.net">{xpath_ref%../tarball/text()}.sig</sig>
|
<sig keys="7F2D434B9741E8AC" keyserver="hkp://pool.sks-keyservers.net" flags="latest">{xpath_ref%../tarball/text()}.sig</sig>
|
||||||
</source>
|
</source>
|
||||||
<source arch="i686">
|
<source arch="i686">
|
||||||
<mirror>http://archlinux32.mirror.domain.tld</mirror>
|
<mirror>http://archlinux32.mirror.domain.tld</mirror>
|
||||||
<webroot>/iso/latest</webroot>
|
<webroot>/iso/latest</webroot>
|
||||||
<tarball flags="glob,latest">{xpath_ref%../mirror/text()}/{xpath_ref%../webroot/text()}/archlinux-bootstrap-*-i686.tar.gz</tarball>
|
<tarball flag="regex,latest">{xpath_ref%../mirror/text()}/{xpath_ref%../webroot/text()}/{regex%archlinux-bootstrap-[0-9]{4}\.[0-9]{2}\.[0-9]{2}-i686\.tar\.gz}</tarball>
|
||||||
<checksum hash="sha512">{xpath_ref%../mirror/text()}/{xpath_ref%../webroot/text()}/sha512sums.txt</checksum>
|
<checksum hash_algo="sha512" explicit="yes">cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e</checksum>
|
||||||
<sig keys="248BF41F9BDD61D41D060AE774EDA3C6B06D0506" keyserver="hkp://pool.sks-keyservers.net">{xpath_ref%../tarball/text()}.sig</sig>
|
<sig keys="248BF41F9BDD61D41D060AE774EDA3C6B06D0506" keyserver="hkp://pool.sks-keyservers.net">{xpath_ref%../tarball/text()}.sig</sig>
|
||||||
</source>
|
</source>
|
||||||
</sources>
|
</sources>
|
||||||
@ -169,6 +170,7 @@
|
|||||||
<paths>
|
<paths>
|
||||||
<cache>/var/tmp/{xpath_ref%//meta/names/uxname/text()}</cache>
|
<cache>/var/tmp/{xpath_ref%//meta/names/uxname/text()}</cache>
|
||||||
<chroot>/var/tmp/chroots/{xpath_ref%//meta/names/uxname/text()}</chroot>
|
<chroot>/var/tmp/chroots/{xpath_ref%//meta/names/uxname/text()}</chroot>
|
||||||
|
<overlay>{xpath_ref%../cache/text()}/overlay</overlay>
|
||||||
<templates>~/{xpath_ref%//meta/names/uxname/text()}/templates</templates>
|
<templates>~/{xpath_ref%//meta/names/uxname/text()}/templates</templates>
|
||||||
<mount>/mnt/{xpath_ref%//meta/names/uxname/text()}</mount>
|
<mount>/mnt/{xpath_ref%//meta/names/uxname/text()}</mount>
|
||||||
<distros>~/{xpath_ref%//meta/names/uxname/text()}/distros</distros>
|
<distros>~/{xpath_ref%//meta/names/uxname/text()}/distros</distros>
|
||||||
@ -183,9 +185,9 @@
|
|||||||
<ipxe sign="yes" sync="yes" iso="yes" rsync="yes">
|
<ipxe sign="yes" sync="yes" iso="yes" rsync="yes">
|
||||||
<ssl custom="no">
|
<ssl custom="no">
|
||||||
<ca>
|
<ca>
|
||||||
<cert>{xpath_ref%build/paths/ssl/text()}/ca.crt</cert>
|
<cert>{xpath_ref%//build/paths/ssl/text()}/ca.crt</cert>
|
||||||
<csr/>
|
<csr/>
|
||||||
<key des="no" passphrase="none">{xpath_ref%build/paths/ssl/text()}/ca.key</key>
|
<key des="no" passphrase="none">{xpath_ref%//build/paths/ssl/text()}/ca.key</key>
|
||||||
<subject>
|
<subject>
|
||||||
<commonName>domain.tld</commonName>
|
<commonName>domain.tld</commonName>
|
||||||
<countryName>XX</countryName>
|
<countryName>XX</countryName>
|
||||||
@ -193,13 +195,13 @@
|
|||||||
<stateOrProvinceName>Some State</stateOrProvinceName>
|
<stateOrProvinceName>Some State</stateOrProvinceName>
|
||||||
<organization>Some Org, Inc.</organization>
|
<organization>Some Org, Inc.</organization>
|
||||||
<organizationalUnitName>Department Name</organizationalUnitName>
|
<organizationalUnitName>Department Name</organizationalUnitName>
|
||||||
<emailAddress>{xpath_ref%../../../../../../meta/names/dev/email/text()}</emailAddress>
|
<emailAddress>{xpath_ref%//meta/dev/email/text()}</emailAddress>
|
||||||
</subject>
|
</subject>
|
||||||
</ca>
|
</ca>
|
||||||
<server>
|
<server>
|
||||||
<cert>{xpath_ref%build/paths/ssl/text()}/{xpath_ref%//meta/names/uxname/text()}.crt</cert>
|
<cert>{xpath_ref%//build/paths/ssl/text()}/{xpath_ref%//meta/names/uxname/text()}.crt</cert>
|
||||||
<csr/>
|
<csr/>
|
||||||
<key des="no" passphrase="none">{xpath_ref%build/paths/ssl/text()}/{xpath_ref%//meta/names/uxname/text()}.key</key>
|
<key des="no" passphrase="none">{xpath_ref%//build/paths/ssl/text()}/{xpath_ref%//meta/names/uxname/text()}.key</key>
|
||||||
<subject>
|
<subject>
|
||||||
<commonName>domain.tld (client)</commonName>
|
<commonName>domain.tld (client)</commonName>
|
||||||
<countryName>XX</countryName>
|
<countryName>XX</countryName>
|
||||||
@ -207,11 +209,11 @@
|
|||||||
<stateOrProvinceName>Some State</stateOrProvinceName>
|
<stateOrProvinceName>Some State</stateOrProvinceName>
|
||||||
<organization>Some Org, Inc.</organization>
|
<organization>Some Org, Inc.</organization>
|
||||||
<organizationalUnitName>Department Name</organizationalUnitName>
|
<organizationalUnitName>Department Name</organizationalUnitName>
|
||||||
<emailAddress>{xpath_ref%../../../../../../meta/names/dev/email/text()}</emailAddress>
|
<emailAddress>{xpath_ref%//meta/dev/email/text()}</emailAddress>
|
||||||
</subject>
|
</subject>
|
||||||
</server>
|
</server>
|
||||||
</ssl>
|
</ssl>
|
||||||
<uri>{xpath_ref%meta/dev/website/text()}/ipxe</uri>
|
<uri>{xpath_ref%//meta/dev/website/text()}/ipxe</uri>
|
||||||
</ipxe>
|
</ipxe>
|
||||||
<gpg keyid="none" gnupghome="none" publish="no" sync="yes"/>
|
<gpg keyid="none" gnupghome="none" publish="no" sync="yes"/>
|
||||||
<sync>
|
<sync>
|
||||||
|
@ -33,8 +33,8 @@ for e in meta.iter():
|
|||||||
|
|
||||||
accounts_tags = {'rootpass': 'atotallyinsecurepassword',
|
accounts_tags = {'rootpass': 'atotallyinsecurepassword',
|
||||||
'username': 'testuser',
|
'username': 'testuser',
|
||||||
'name': 'Test User',
|
'comment': 'Test User',
|
||||||
'passowrd': 'atestpassword'}
|
'password': 'atestpassword'}
|
||||||
accounts = alt_profile.xpath('/profile/accounts')[0]
|
accounts = alt_profile.xpath('/profile/accounts')[0]
|
||||||
for e in accounts.iter():
|
for e in accounts.iter():
|
||||||
if e.tag in accounts_tags:
|
if e.tag in accounts_tags:
|
||||||
@ -47,7 +47,8 @@ for e in accounts.iter():
|
|||||||
accounts.remove(accounts[2])
|
accounts.remove(accounts[2])
|
||||||
xml.append(alt_profile)
|
xml.append(alt_profile)
|
||||||
|
|
||||||
#print(etree.tostring(xml).decode('utf-8'))
|
|
||||||
with open('multi_profile.xml', 'wb') as f:
|
with open('multi_profile.xml', 'wb') as f:
|
||||||
f.write(b'<?xml version="1.0" encoding="UTF-8" ?>\n' + etree.tostring(xml,
|
f.write(etree.tostring(xml,
|
||||||
pretty_print = True))
|
pretty_print = True,
|
||||||
|
encoding = 'UTF-8',
|
||||||
|
xml_declaration = True))
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
</dev>
|
</dev>
|
||||||
<uri>https://domain.tld/projname</uri>
|
<uri>https://domain.tld/projname</uri>
|
||||||
<ver>1.0.0</ver>
|
<ver>1.0.0</ver>
|
||||||
<!-- This is the VERY FIRST value parsed, and is required. It controls how many levels of {xpath_ref:} to recurse. -->
|
<!-- This is the VERY FIRST value parsed, and is required. It controls how many levels of {xpath_ref%...} to recurse. -->
|
||||||
<!-- If the maximum level is reached, the substitution will evaluate as blank. -->
|
<!-- If the maximum level is reached, the substitution will evaluate as blank. -->
|
||||||
<max_recurse>5</max_recurse>
|
<max_recurse>5</max_recurse>
|
||||||
</meta>
|
</meta>
|
||||||
@ -30,7 +30,7 @@
|
|||||||
<username>{xpath_ref%//meta/names/uxname/text()}</username>
|
<username>{xpath_ref%//meta/names/uxname/text()}</username>
|
||||||
<!-- You can also use substitution from different profiles: -->
|
<!-- You can also use substitution from different profiles: -->
|
||||||
<!-- <username>{xpath_ref%//profile[@name='another_profile']/meta/names/uxname"}</username> -->
|
<!-- <username>{xpath_ref%//profile[@name='another_profile']/meta/names/uxname"}</username> -->
|
||||||
<name>{xpath_ref%//meta/dev/author/text()}</name>
|
<comment>{xpath_ref%//meta/dev/author/text()}</comment>
|
||||||
<password hashed="no"
|
<password hashed="no"
|
||||||
hash_algo="sha512"
|
hash_algo="sha512"
|
||||||
salt="auto">testpassword</password>
|
salt="auto">testpassword</password>
|
||||||
@ -47,16 +47,17 @@
|
|||||||
<source arch="x86_64">
|
<source arch="x86_64">
|
||||||
<mirror>http://archlinux.mirror.domain.tld</mirror>
|
<mirror>http://archlinux.mirror.domain.tld</mirror>
|
||||||
<webroot>/iso/latest</webroot>
|
<webroot>/iso/latest</webroot>
|
||||||
<tarball flags="glob,latest">{xpath_ref%../mirror/text()}{xpath_ref%../webroot/text()}/archlinux-bootstrap-*-x86_64.tar.gz</tarball>
|
<tarball flags="latest">{xpath_ref%../mirror/text()}{xpath_ref%../webroot/text()}/{regex%archlinux-bootstrap-[0-9]{4}\.[0-9]{2}\.[0-9]{2}-x86_64\.tar\.gz}</tarball>
|
||||||
<checksum hash="sha1">{xpath_ref%../mirror/text()}{xpath_ref%../webroot/text()}/sha1sums.txt</checksum>
|
<checksum hash_algo="sha1" flags="none" >{xpath_ref%../mirror/text()}{xpath_ref%../webroot/text()}/sha1sums.txt</checksum>
|
||||||
<sig keys="7F2D434B9741E8AC"
|
<sig keys="7F2D434B9741E8AC"
|
||||||
keyserver="hkp://pool.sks-keyservers.net">{xpath_ref%../tarball/text()}.sig</sig>
|
keyserver="hkp://pool.sks-keyservers.net"
|
||||||
|
flags="latest">{xpath_ref%../tarball/text()}.sig</sig>
|
||||||
</source>
|
</source>
|
||||||
<source arch="i686">
|
<source arch="i686">
|
||||||
<mirror>http://archlinux32.mirror.domain.tld</mirror>
|
<mirror>http://archlinux32.mirror.domain.tld</mirror>
|
||||||
<webroot>/iso/latest</webroot>
|
<webroot>/iso/latest</webroot>
|
||||||
<tarball flags="glob,latest">{xpath_ref%../mirror/text()}/{xpath_ref%../webroot/text()}/archlinux-bootstrap-*-i686.tar.gz</tarball>
|
<tarball flag="regex,latest">{xpath_ref%../mirror/text()}/{xpath_ref%../webroot/text()}/{regex%archlinux-bootstrap-[0-9]{4}\.[0-9]{2}\.[0-9]{2}-i686\.tar\.gz}</tarball>
|
||||||
<checksum hash="sha512">{xpath_ref%../mirror/text()}/{xpath_ref%../webroot/text()}/sha512sums.txt</checksum>
|
<checksum hash_algo="sha512" explicit="yes">cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e</checksum>
|
||||||
<sig keys="248BF41F9BDD61D41D060AE774EDA3C6B06D0506"
|
<sig keys="248BF41F9BDD61D41D060AE774EDA3C6B06D0506"
|
||||||
keyserver="hkp://pool.sks-keyservers.net">{xpath_ref%../tarball/text()}.sig</sig>
|
keyserver="hkp://pool.sks-keyservers.net">{xpath_ref%../tarball/text()}.sig</sig>
|
||||||
</source>
|
</source>
|
||||||
@ -65,6 +66,7 @@
|
|||||||
<paths>
|
<paths>
|
||||||
<cache>/var/tmp/{xpath_ref%//meta/names/uxname/text()}</cache>
|
<cache>/var/tmp/{xpath_ref%//meta/names/uxname/text()}</cache>
|
||||||
<chroot>/var/tmp/chroots/{xpath_ref%//meta/names/uxname/text()}</chroot>
|
<chroot>/var/tmp/chroots/{xpath_ref%//meta/names/uxname/text()}</chroot>
|
||||||
|
<overlay>{xpath_ref%../cache/text()}/overlay</overlay>
|
||||||
<templates>~/{xpath_ref%//meta/names/uxname/text()}/templates</templates>
|
<templates>~/{xpath_ref%//meta/names/uxname/text()}/templates</templates>
|
||||||
<mount>/mnt/{xpath_ref%//meta/names/uxname/text()}</mount>
|
<mount>/mnt/{xpath_ref%//meta/names/uxname/text()}</mount>
|
||||||
<distros>~/{xpath_ref%//meta/names/uxname/text()}/distros</distros>
|
<distros>~/{xpath_ref%//meta/names/uxname/text()}/distros</distros>
|
||||||
@ -80,7 +82,7 @@
|
|||||||
<ssl custom="no">
|
<ssl custom="no">
|
||||||
<!-- http://ipxe.org/crypto -->
|
<!-- http://ipxe.org/crypto -->
|
||||||
<ca>
|
<ca>
|
||||||
<cert>{xpath_ref%build/paths/ssl/text()}/ca.crt</cert>
|
<cert>{xpath_ref%//build/paths/ssl/text()}/ca.crt</cert>
|
||||||
<!-- If csr is self-enclosed (<csr />), we'll just generate and use a CSR in-memory.
|
<!-- If csr is self-enclosed (<csr />), we'll just generate and use a CSR in-memory.
|
||||||
Assuming we need to generate a certificate, anyways.
|
Assuming we need to generate a certificate, anyways.
|
||||||
If you want to write it out to disk (for debugging, etc.) OR use one already generated,
|
If you want to write it out to disk (for debugging, etc.) OR use one already generated,
|
||||||
@ -88,7 +90,7 @@
|
|||||||
e.g.:
|
e.g.:
|
||||||
<csr>{xpath_ref%build/paths/ssl/text()}/ca.csr</csr> -->
|
<csr>{xpath_ref%build/paths/ssl/text()}/ca.csr</csr> -->
|
||||||
<csr />
|
<csr />
|
||||||
<key des="no" passphrase="none">{xpath_ref%build/paths/ssl/text()}/ca.key</key>
|
<key des="no" passphrase="none">{xpath_ref%//build/paths/ssl/text()}/ca.key</key>
|
||||||
<subject>
|
<subject>
|
||||||
<commonName>domain.tld</commonName>
|
<commonName>domain.tld</commonName>
|
||||||
<countryName>XX</countryName>
|
<countryName>XX</countryName>
|
||||||
@ -96,13 +98,13 @@
|
|||||||
<stateOrProvinceName>Some State</stateOrProvinceName>
|
<stateOrProvinceName>Some State</stateOrProvinceName>
|
||||||
<organization>Some Org, Inc.</organization>
|
<organization>Some Org, Inc.</organization>
|
||||||
<organizationalUnitName>Department Name</organizationalUnitName>
|
<organizationalUnitName>Department Name</organizationalUnitName>
|
||||||
<emailAddress>{xpath_ref%../../../../../../meta/names/dev/email/text()}</emailAddress>
|
<emailAddress>{xpath_ref%//meta/dev/email/text()}</emailAddress>
|
||||||
</subject>
|
</subject>
|
||||||
</ca>
|
</ca>
|
||||||
<server>
|
<server>
|
||||||
<cert>{xpath_ref%build/paths/ssl/text()}/{xpath_ref%//meta/names/uxname/text()}.crt</cert>
|
<cert>{xpath_ref%//build/paths/ssl/text()}/{xpath_ref%//meta/names/uxname/text()}.crt</cert>
|
||||||
<csr />
|
<csr />
|
||||||
<key des="no" passphrase="none">{xpath_ref%build/paths/ssl/text()}/{xpath_ref%//meta/names/uxname/text()}.key</key>
|
<key des="no" passphrase="none">{xpath_ref%//build/paths/ssl/text()}/{xpath_ref%//meta/names/uxname/text()}.key</key>
|
||||||
<subject>
|
<subject>
|
||||||
<commonName>domain.tld (client)</commonName>
|
<commonName>domain.tld (client)</commonName>
|
||||||
<countryName>XX</countryName>
|
<countryName>XX</countryName>
|
||||||
@ -110,11 +112,11 @@
|
|||||||
<stateOrProvinceName>Some State</stateOrProvinceName>
|
<stateOrProvinceName>Some State</stateOrProvinceName>
|
||||||
<organization>Some Org, Inc.</organization>
|
<organization>Some Org, Inc.</organization>
|
||||||
<organizationalUnitName>Department Name</organizationalUnitName>
|
<organizationalUnitName>Department Name</organizationalUnitName>
|
||||||
<emailAddress>{xpath_ref%../../../../../../meta/names/dev/email/text()}</emailAddress>
|
<emailAddress>{xpath_ref%//meta/dev/email/text()}</emailAddress>
|
||||||
</subject>
|
</subject>
|
||||||
</server>
|
</server>
|
||||||
</ssl>
|
</ssl>
|
||||||
<uri>{xpath_ref%meta/dev/website/text()}/ipxe</uri>
|
<uri>{xpath_ref%//meta/dev/website/text()}/ipxe</uri>
|
||||||
</ipxe>
|
</ipxe>
|
||||||
<gpg keyid="none" gnupghome="none" publish="no" sync="yes" />
|
<gpg keyid="none" gnupghome="none" publish="no" sync="yes" />
|
||||||
<sync>
|
<sync>
|
||||||
|
Loading…
Reference in New Issue
Block a user