adding split-architecture source tarball support... untested and probably not done yet, so not tagging a release
This commit is contained in:
@@ -13,7 +13,11 @@ def genGPG(conf):
|
||||
bdisk = conf['bdisk']
|
||||
gpghome = conf['gpg']['mygpghome']
|
||||
distkey = build['gpgkey']
|
||||
gpgkeyserver = build['gpgkeyserver']
|
||||
gpgkeyserver = []
|
||||
for a in conf['build']['arch']:
|
||||
keysrv = conf['src'][a]['gpgkeyserver']
|
||||
if keysrv and (keysrv not in gpgkeyserver):
|
||||
gpgkeyserver.append(keysrv)
|
||||
templates_dir = '{0}/extra/templates'.format(build['basedir'])
|
||||
mykey = False
|
||||
pkeys = []
|
||||
@@ -31,16 +35,17 @@ def genGPG(conf):
|
||||
os.environ['GNUPGHOME'] = gpghome
|
||||
gpg = gpgme.Context()
|
||||
# do we need to add a keyserver?
|
||||
if gpgkeyserver != '':
|
||||
if len(gpgkeyserver) != 0:
|
||||
dirmgr = '{0}/dirmngr.conf'.format(gpghome)
|
||||
if os.path.isfile(dirmgr):
|
||||
with open(dirmgr, 'r+') as f:
|
||||
findme = any(gpgkeyserver in line for line in f)
|
||||
if not findme:
|
||||
f.seek(0, os.SEEK_END)
|
||||
f.write("\n# Added by {0}.\nkeyserver {1}\n".format(
|
||||
bdisk['pname'],
|
||||
gpgkeyserver))
|
||||
for s in gpgkeyserver:
|
||||
if os.path.isfile(dirmgr):
|
||||
with open(dirmgr, 'r+') as f:
|
||||
findme = any(s in line for line in f)
|
||||
if not findme:
|
||||
f.seek(0, os.SEEK_END)
|
||||
f.write("\n# Added by {0}.\nkeyserver {1}\n".format(
|
||||
bdisk['pname'],
|
||||
s))
|
||||
if mykey:
|
||||
try:
|
||||
privkey = gpg.get_key(mykey, True)
|
||||
@@ -62,15 +67,16 @@ def genGPG(conf):
|
||||
privkey = gpg.get_key(gpg.genkey(tpl_out).fpr, True)
|
||||
pkeys.append(privkey)
|
||||
# do we need to add a keyserver? this is for the freshly-generated GNUPGHOME
|
||||
if build['gpgkeyserver'] != '':
|
||||
if len(gpgkeyserver) != 0:
|
||||
dirmgr = '{0}/dirmngr.conf'.format(gpghome)
|
||||
with open(dirmgr, 'r+') as f:
|
||||
findme = any(gpgkeyserver in line for line in f)
|
||||
if not findme:
|
||||
f.seek(0, os.SEEK_END)
|
||||
f.write("\n# Added by {0}.\nkeyserver {1}\n".format(
|
||||
bdisk['pname'],
|
||||
build['gpgkeyserver']))
|
||||
for s in gpgkeyserver:
|
||||
with open(dirmgr, 'r+') as f:
|
||||
findme = any(s in line for line in f)
|
||||
if not findme:
|
||||
f.seek(0, os.SEEK_END)
|
||||
f.write("\n# Added by {0}.\nkeyserver {1}\n".format(
|
||||
bdisk['pname'],
|
||||
s))
|
||||
gpg.signers = pkeys
|
||||
# Now we try to find and add the key for the base image.
|
||||
gpg.keylist_mode = gpgme.KEYLIST_MODE_EXTERN # remote (keyserver)
|
||||
@@ -125,7 +131,7 @@ def killStaleAgent(conf):
|
||||
psutil.Process(p).terminate()
|
||||
|
||||
def signIMG(path, conf):
|
||||
if conf['build']['gpg']:
|
||||
if conf['build']['sign']:
|
||||
# Do we want to kill off any stale gpg-agents? (So we spawn a new one)
|
||||
# Requires further testing.
|
||||
#killStaleAgent()
|
||||
|
||||
@@ -68,9 +68,9 @@ def parseConfig(confs):
|
||||
config_dict = {s:dict(config.items(s)) for s in config.sections()}
|
||||
# Convert the booleans to pythonic booleans in the dict...
|
||||
config_dict['bdisk']['user'] = config['bdisk'].getboolean('user')
|
||||
config_dict['build']['gpg'] = config['build'].getboolean('gpg')
|
||||
config_dict['build']['i_am_a_racecar'] = config['build'].getboolean('i_am_a_racecar')
|
||||
config_dict['build']['ipxe'] = config['build'].getboolean('ipxe')
|
||||
config_dict['build']['sign'] = config['build'].getboolean('sign')
|
||||
config_dict['build']['multiarch'] = (config_dict['build']['multiarch']).lower()
|
||||
config_dict['ipxe']['iso'] = config['ipxe'].getboolean('iso')
|
||||
config_dict['ipxe']['usb'] = config['ipxe'].getboolean('usb')
|
||||
@@ -126,16 +126,21 @@ def parseConfig(confs):
|
||||
config_dict['build']['multiarch']))
|
||||
## VALIDATORS ##
|
||||
# Validate bootstrap mirror
|
||||
if (validators.domain(config_dict['build']['mirror']) or validators.ipv4(
|
||||
config_dict['build']['mirror']) or validatords.ipv6(
|
||||
config_dict['build']['mirror'])):
|
||||
try:
|
||||
getaddrinfo(config_dict['build']['mirror'], None)
|
||||
except:
|
||||
exit(('{0}: ERROR: {1} does not resolve and cannot be used as a ' +
|
||||
'mirror for the bootstrap tarballs. Check your configuration.').format(
|
||||
datetime.datetime.now(),
|
||||
config_dict['build']['host']))
|
||||
config_dict['src'] = {}
|
||||
for a in config_dict['build']['arch']:
|
||||
config_dict['src'][a] = config_dict['source_' + a]
|
||||
if config_dict['src'][a]['enabled']:
|
||||
if (validators.domain(config_dict['src'][a]['mirror']) or validators.ipv4(
|
||||
config_dict['src'][a]['mirror']) or validatords.ipv6(
|
||||
config_dict['src'][a]['mirror'])):
|
||||
try:
|
||||
getaddrinfo(config_dict['src'][a]['mirror'], None)
|
||||
except:
|
||||
exit(('{0}: ERROR: {1} does not resolve and cannot be used as a ' +
|
||||
'mirror for the bootstrap tarballs. Check your configuration.').format(
|
||||
datetime.datetime.now(),
|
||||
config_dict['src'][a]['host']))
|
||||
config_dict['src'][a]['gpg'] = config['source_' + a].getboolean('gpg')
|
||||
# Are we rsyncing? If so, validate the rsync host.
|
||||
# Works for IP address too. It does NOT check to see if we can
|
||||
# actually *rsync* to it; that'll come later.
|
||||
|
||||
@@ -25,27 +25,36 @@ def dirChk(conf):
|
||||
def downloadTarball(conf):
|
||||
build = conf['build']
|
||||
dlpath = build['dlpath']
|
||||
src = conf['src']
|
||||
arch = build['arch']
|
||||
#mirror = 'http://mirrors.kernel.org/archlinux'
|
||||
mirror = build['mirrorproto'] + '://' + build['mirror']
|
||||
rlsdir = mirror + build['mirrorpath']
|
||||
sha_in = urlopen(mirror + build['mirrorchksum'])
|
||||
# returns path/filename e.g. /some/path/to/file.tar.gz
|
||||
# we use .gnupg since we'll need it later.
|
||||
os.makedirs(dlpath + '/.gnupg', exist_ok = True)
|
||||
tarball_path = {}
|
||||
for x in arch:
|
||||
tarball_path[x] = dlpath + '/.latest.' + x + '.tar'
|
||||
sha1sums = sha_in.read()
|
||||
sha_in.close()
|
||||
sha_raw = sha1sums.decode("utf-8")
|
||||
sha_list = list(filter(None, sha_raw.split('\n')))
|
||||
sha_dict = {x.split()[1]: x.split()[0] for x in sha_list}
|
||||
# all that lousy work just to get a sha1 sum. okay. so.
|
||||
for a in arch:
|
||||
locsrc = conf['source_' + a]
|
||||
mirror = locsrc['mirrorproto'] + '://' + locsrc['mirror']
|
||||
rlsdir = mirror + locsrc['mirrorpath']
|
||||
if locsrc['mirrorchksum'] != '':
|
||||
if not locsrc['chksumtype']:
|
||||
exit(("{0}: source_{1}:chksumtype is unset!".format(
|
||||
datetime.datetime.now(),
|
||||
a))
|
||||
hash_type = locsrc['chksumtype']
|
||||
hash_in = urlopen(mirror + locsrc['mirrorchksum'])
|
||||
hashsums = hash_in.read()
|
||||
hash_in.close()
|
||||
hash_raw = hashsums.decode("utf-8")
|
||||
hash_list = list(filter(None, hash_raw.split('\n')))
|
||||
hash_dict = {x.split()[1]: x.split()[0] for x in hash_list}
|
||||
# returns path/filename e.g. /some/path/to/file.tar.gz
|
||||
# we use .gnupg since we'll need it later.
|
||||
os.makedirs(dlpath + '/.gnupg', exist_ok = True)
|
||||
tarball_path[a] = dlpath + '/.latest.' + a + '.tar'
|
||||
pattern = re.compile('^.*' + a + '\.tar(\.(gz|bz2|xz))?$')
|
||||
tarball = [filename.group(0) for l in list(sha_dict.keys()) for filename in [pattern.search(l)] if filename][0]
|
||||
sha1 = sha_dict[tarball]
|
||||
if locsrc['mirrorfile'] != '':
|
||||
tarball = locsrc['mirrorfile']
|
||||
else:
|
||||
tarball = [filename.group(0) for l in list(hash_dict.keys()) for filename in [pattern.search(l)] if filename][0]
|
||||
if locsrc['mirrorchksum'] != '':
|
||||
hashsum = hash_dict[tarball]
|
||||
if os.path.isfile(tarball_path[a]):
|
||||
pass
|
||||
else:
|
||||
@@ -53,7 +62,6 @@ def downloadTarball(conf):
|
||||
print("{0}: [PREP] Fetching tarball ({1} architecture)...".format(
|
||||
datetime.datetime.now(),
|
||||
a))
|
||||
#dl_file = urllib.URLopener()
|
||||
tarball_dl = urlopen(rlsdir + tarball)
|
||||
with open(tarball_path[a], 'wb') as f:
|
||||
f.write(tarball_dl.read())
|
||||
@@ -63,20 +71,32 @@ def downloadTarball(conf):
|
||||
tarball_path[a],
|
||||
humanize.naturalsize(
|
||||
os.path.getsize(tarball_path[a]))))
|
||||
print("{0}: [PREP] Checking hash checksum {1} against {2}...".format(
|
||||
datetime.datetime.now(),
|
||||
sha1,
|
||||
tarball_path[a]))
|
||||
tarball_hash = hashlib.sha1(open(tarball_path[a], 'rb').read()).hexdigest()
|
||||
if tarball_hash != sha1:
|
||||
exit(("{0}: {1} either did not download correctly\n\t\t\t or a wrong (probably old) version exists on the filesystem.\n\t\t\t " +
|
||||
"Please delete it and try again.").format(datetime.datetime.now(), tarball))
|
||||
elif build['mirrorgpgsig'] != '':
|
||||
# okay, so the sha1 matches. let's verify the signature.
|
||||
if build['mirrorgpgsig'] == '.sig':
|
||||
if locsrc['mirrorchksum'] != '':
|
||||
print("{0}: [PREP] Checking hash checksum {1} against {2}...".format(
|
||||
datetime.datetime.now(),
|
||||
hashsum,
|
||||
tarball_path[a]))
|
||||
# Calculate the checksum according to type specified.
|
||||
tarball_hash = False
|
||||
for i in hashlib.algorithms_available:
|
||||
if hash_type == i:
|
||||
hashfunc = getattr(hashlib, i)
|
||||
tarball_hash = hashfunc(open(tarball_path[a], 'rb').read()).hexdigest()
|
||||
break
|
||||
if not tarball_hash:
|
||||
exit(("{0}: source_{1}:chksumtype '{2}' is not supported on this machine!".format(
|
||||
datetime.datetime.now(),
|
||||
a,
|
||||
hash_type))
|
||||
if tarball_hash != hashsum:
|
||||
exit(("{0}: {1} either did not download correctly\n\t\t\t or a wrong (probably old) version exists on the filesystem.\n\t\t\t " +
|
||||
"Please delete it and try again.").format(datetime.datetime.now(), tarball))
|
||||
if locsrc['mirrorgpgsig'] != '':
|
||||
# let's verify the signature.
|
||||
if locsrc['mirrorgpgsig'] == '.sig':
|
||||
gpgsig_remote = rlsdir + tarball + '.sig'
|
||||
else:
|
||||
gpgsig_remote = build['mirrorgpgsig']
|
||||
gpgsig_remote = locsrc['mirrorgpgsig']
|
||||
sig_dl = urlopen(gpgsig_remote)
|
||||
sig = tarball_path[a] + '.sig'
|
||||
with open(sig, 'wb+') as f:
|
||||
|
||||
Reference in New Issue
Block a user