updating some pacman stuff. need to finish objtypes.Repo and might need to tweak config writer.
This commit is contained in:
4
aif/software/__init__.py
Normal file
4
aif/software/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from . import config
|
||||
from . import keyring
|
||||
from . import objtypes
|
||||
from . import pacman
|
||||
124
aif/software/config.py
Normal file
124
aif/software/config.py
Normal file
@@ -0,0 +1,124 @@
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from collections import OrderedDict
|
||||
##
|
||||
import jinja2
|
||||
##
|
||||
import aif.utils
|
||||
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PacmanConfig(object):
|
||||
_sct_re = re.compile(r'^\s*\[(?P<sect>[^]]+)\]\s*$')
|
||||
_kv_re = re.compile(r'^\s*(?P<key>[^\s=[]+)((?:\s*=\s*)(?P<value>.*))?$')
|
||||
_skipline_re = re.compile(r'^\s*(#.*)?$')
|
||||
# TODO: Append mirrors/repos to pacman.conf here before we parse?
|
||||
# I copy a log of logic from pycman/config.py here.
|
||||
_list_keys = ('CacheDir', 'HookDir', 'HoldPkg', 'SyncFirst', 'IgnoreGroup', 'IgnorePkg', 'NoExtract', 'NoUpgrade',
|
||||
'Server')
|
||||
_single_keys = ('RootDir', 'DBPath', 'GPGDir', 'LogFile', 'Architecture', 'XferCommand', 'CleanMethod', 'SigLevel',
|
||||
'LocalFileSigLevel', 'RemoteFileSigLevel')
|
||||
_noval_keys = ('UseSyslog', 'ShowSize', 'TotalDownload', 'CheckSpace', 'VerbosePkgLists', 'ILoveCandy', 'Color',
|
||||
'DisableDownloadTimeout')
|
||||
# These are the default (commented-out) values in the stock /etc/pacman.conf as of January 5, 2020.
|
||||
defaults = OrderedDict({'options': {'Architecture': 'auto',
|
||||
'CacheDir': '/var/cache/pacman/pkg/',
|
||||
'CheckSpace': None,
|
||||
'CleanMethod': 'KeepInstalled',
|
||||
# 'Color': None,
|
||||
'DBPath': '/var/lib/pacman/',
|
||||
'GPGDir': '/etc/pacman.d/gnupg/',
|
||||
'HoldPkg': 'pacman glibc',
|
||||
'HookDir': '/etc/pacman.d/hooks/',
|
||||
'IgnoreGroup': [],
|
||||
'IgnorePkg': [],
|
||||
'LocalFileSigLevel': ['Optional'],
|
||||
'LogFile': '/var/log/pacman.log',
|
||||
'NoExtract': [],
|
||||
'NoUpgrade': [],
|
||||
'RemoteFileSigLevel': ['Required'],
|
||||
'RootDir': '/',
|
||||
'SigLevel': ['Required', 'DatabaseOptional'],
|
||||
# 'TotalDownload': None,
|
||||
# 'UseSyslog': None,
|
||||
# 'VerbosePkgLists': None,
|
||||
'XferCommand': '/usr/bin/curl -L -C - -f -o %o %u'},
|
||||
# These should be explicitly included in the AIF config.
|
||||
# 'core': {'Include': '/etc/pacman.d/mirrorlist'},
|
||||
# 'extra': {'Include': '/etc/pacman.d/mirrorlist'},
|
||||
# 'community': {'Include': '/etc/pacman.d/mirrorlist'}
|
||||
})
|
||||
|
||||
def __init__(self, chroot_base, confpath = '/etc/pacman.conf'):
|
||||
self.chroot_base = chroot_base
|
||||
self.confpath = os.path.join(self.chroot_base, re.sub(r'^/+', '', confpath))
|
||||
self.confbak = '{0}.bak'.format(self.confpath)
|
||||
self.mirrorlstpath = os.path.join(self.chroot_base, 'etc', 'pacman.d', 'mirrorlist')
|
||||
self.mirrorlstbak = '{0}.bak'.format(self.mirrorlstpath)
|
||||
if not os.path.isfile(self.confbak):
|
||||
shutil.copy2(self.confpath, self.confbak)
|
||||
_logger.info('Copied: {0} => {1}'.format(self.confpath, self.confbak))
|
||||
if not os.path.isfile(self.mirrorlstbak):
|
||||
shutil.copy2(self.mirrorlstpath, self.mirrorlstbak)
|
||||
_logger.info('Copied: {0} => {1}'.format(self.mirrorlstpath, self.mirrorlstbak))
|
||||
self.j2_env = jinja2.Environment(loader = jinja2.FileSystemLoader(searchpath = './'))
|
||||
self.j2_env.filters.update(aif.utils.j2_filters)
|
||||
self.j2_conf = self.j2_env.get_template('pacman.conf.j2')
|
||||
self.j2_mirror = self.j2_env.get_template('mirrorlist.j2')
|
||||
self.conf = None
|
||||
self.mirrors = []
|
||||
|
||||
def _includeExpander(self, lines):
|
||||
curlines = []
|
||||
for line in lines:
|
||||
r = self._kv_re.search(line)
|
||||
if r and (r.group('key') == 'Include') and r.group('value'):
|
||||
path = os.path.join(self.chroot_base, re.sub(r'^/?', '', r.group('path')))
|
||||
with open(path, 'r') as fh:
|
||||
curlines.extend(self._includeExpander(fh.read().splitlines()))
|
||||
else:
|
||||
curlines.append(line)
|
||||
return(curlines)
|
||||
|
||||
def parse(self, defaults = True):
|
||||
self.conf = OrderedDict()
|
||||
rawlines = {}
|
||||
with open(self.confpath, 'r') as fh:
|
||||
rawlines['orig'] = [line for line in fh.read().splitlines() if not self._skipline_re.search(line)]
|
||||
rawlines['parsed'] = self._includeExpander(rawlines['orig'])
|
||||
for conftype, cfg in rawlines.items():
|
||||
_confdict = copy.deepcopy(self.defaults)
|
||||
_sect = None
|
||||
for line in cfg:
|
||||
if self._sct_re.search(line):
|
||||
_sect = self._sct_re.search(line).group('sect')
|
||||
if _sect not in _confdict.keys():
|
||||
_confdict[_sect] = OrderedDict()
|
||||
elif self._kv_re.search(line):
|
||||
r = self._kv_re.search(line)
|
||||
k = r.group('key')
|
||||
v = r.group('value')
|
||||
if k in self._noval_keys:
|
||||
_confdict[_sect][k] = None
|
||||
elif k in self._single_keys:
|
||||
_confdict[_sect][k] = v
|
||||
elif k in self._list_keys:
|
||||
if k not in _confdict[_sect].keys():
|
||||
_confdict[_sect][k] = []
|
||||
_confdict[_sect][k].append(v)
|
||||
if _confdict['options']['Architecture'] == 'auto':
|
||||
_confdict['options']['Architecture'] = os.uname().machine
|
||||
self.conf[conftype] = copy.deepcopy(_confdict)
|
||||
return(None)
|
||||
|
||||
def writeConf(self):
|
||||
with open(self.confpath, 'w') as fh:
|
||||
fh.write(self.j2_conf.render(cfg = self.conf))
|
||||
with open(self.mirrorlstpath, 'w') as fh:
|
||||
fh.write(self.j2_mirror.render(mirrors = self.mirrors))
|
||||
return(None)
|
||||
231
aif/software/keyring.py
Normal file
231
aif/software/keyring.py
Normal file
@@ -0,0 +1,231 @@
|
||||
import csv
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sqlite3
|
||||
##
|
||||
import gpg
|
||||
|
||||
|
||||
# We don't use utils.gpg_handler because this is pretty much all procedural.
|
||||
# Though, maybe add e.g. TofuDB stuff to it, and subclass it here?
|
||||
# TODO.
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
_createTofuDB = """BEGIN TRANSACTION;
|
||||
CREATE TABLE IF NOT EXISTS "ultimately_trusted_keys" (
|
||||
"keyid" TEXT
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS "encryptions" (
|
||||
"binding" INTEGER NOT NULL,
|
||||
"time" INTEGER
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS "signatures" (
|
||||
"binding" INTEGER NOT NULL,
|
||||
"sig_digest" TEXT,
|
||||
"origin" TEXT,
|
||||
"sig_time" INTEGER,
|
||||
"time" INTEGER,
|
||||
PRIMARY KEY("binding","sig_digest","origin")
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS "bindings" (
|
||||
"oid" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"fingerprint" TEXT,
|
||||
"email" TEXT,
|
||||
"user_id" TEXT,
|
||||
"time" INTEGER,
|
||||
"policy" INTEGER CHECK(policy in (1,2,3,4,5)),
|
||||
"conflict" STRING,
|
||||
"effective_policy" INTEGER DEFAULT 0 CHECK(effective_policy in (0,1,2,3,4,5)),
|
||||
UNIQUE("fingerprint","email")
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS "version" (
|
||||
"version" INTEGER
|
||||
);
|
||||
INSERT INTO "version" ("version") VALUES (1);
|
||||
CREATE INDEX IF NOT EXISTS "encryptions_binding" ON "encryptions" (
|
||||
"binding"
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS "bindings_email" ON "bindings" (
|
||||
"email"
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS "bindings_fingerprint_email" ON "bindings" (
|
||||
"fingerprint",
|
||||
"email"
|
||||
);
|
||||
COMMIT;"""
|
||||
|
||||
|
||||
class KeyEditor(object):
|
||||
def __init__(self, trustlevel = 4):
|
||||
self.trusted = False
|
||||
self.revoked = False
|
||||
self.trustlevel = trustlevel
|
||||
_logger.info('Key editor instantiated.')
|
||||
|
||||
def revoker(self, kw, arg, *args, **kwargs):
|
||||
# The "save" commands here can also be "quit".
|
||||
_logger.debug('Key revoker invoked:')
|
||||
_logger.debug('Command: {0}'.format(kw))
|
||||
_logger.debug('Argument: {0}'.format(arg))
|
||||
if args:
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
if kwargs:
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
if kw == 'GET_LINE':
|
||||
if arg == 'keyedit.prompt':
|
||||
if not self.revoked:
|
||||
_logger.debug('Returning: "disable"')
|
||||
self.revoked = True
|
||||
return('disable')
|
||||
else:
|
||||
_logger.debug('Returning: "save"')
|
||||
return('save')
|
||||
else:
|
||||
_logger.debug('Returning: "save"')
|
||||
return('save')
|
||||
return (None)
|
||||
|
||||
def truster(self, kw, arg, *args, **kwargs):
|
||||
_logger.debug('Key trust editor invoked:')
|
||||
_logger.debug('Command: {0}'.format(kw))
|
||||
_logger.debug('Argument: {0}'.format(arg))
|
||||
if args:
|
||||
_logger.debug('args: {0}'.format(','.join(args)))
|
||||
if kwargs:
|
||||
_logger.debug('kwargs: {0}'.format(kwargs))
|
||||
if kw == 'GET_LINE':
|
||||
if arg == 'keyedit.prompt':
|
||||
if not self.trusted:
|
||||
_logger.debug('Returning: "trust"')
|
||||
return('trust')
|
||||
else:
|
||||
_logger.debug('Returning: "save"')
|
||||
return('save')
|
||||
elif arg == 'edit_ownertrust.value' and not self.trusted:
|
||||
self.trusted = True
|
||||
_logger.debug('Status changed to trusted')
|
||||
_logger.debug('Returning: "{0}"'.format(self.trustlevel))
|
||||
return(str(self.trustlevel))
|
||||
else:
|
||||
_logger.debug('Returning: "save"')
|
||||
return('save')
|
||||
return(None)
|
||||
|
||||
|
||||
class PacmanKey(object):
|
||||
def __init__(self, chroot_base):
|
||||
# We more or less recreate /usr/bin/pacman-key in python.
|
||||
self.chroot_base = chroot_base
|
||||
self.home = os.path.join(self.chroot_base, 'etc', 'pacman.d', 'gnupg')
|
||||
self.conf = os.path.join(self.home, 'gpg.conf')
|
||||
self.agent_conf = os.path.join(self.home, 'gpg-agent.conf')
|
||||
self.db = os.path.join(self.home, 'tofu.db')
|
||||
# ...pacman devs, why do you create the gnupg home with 0755?
|
||||
os.makedirs(self.home, 0o0755, exist_ok = True)
|
||||
# Probably not necessary, but...
|
||||
with open(os.path.join(self.home, '.gpg-v21-migrated'), 'wb') as fh:
|
||||
fh.write(b'')
|
||||
_logger.info('Touched/wrote: {0}'.format(os.path.join(self.home, '.gpg-v21-migrated')))
|
||||
if not os.path.isfile(self.conf):
|
||||
with open(self.conf, 'w') as fh:
|
||||
fh.write(('# Generated by AIF-NG.\n'
|
||||
'no-greeting\n'
|
||||
'no-permission-warning\n'
|
||||
'lock-never\n'
|
||||
'keyserver-options timeout=10\n'))
|
||||
_logger.info('Wrote: {0}'.format(self.conf))
|
||||
if not os.path.isfile(self.agent_conf):
|
||||
with open(self.agent_conf, 'w') as fh:
|
||||
fh.write(('# Generated by AIF-NG.\n'
|
||||
'disable-scdaemon\n'))
|
||||
_logger.info('Wrote: {0}'.format(self.agent_conf))
|
||||
self.key = None
|
||||
# ...PROBABLY order-specific.
|
||||
self._initTofuDB()
|
||||
self.gpg = gpg.Context(home_dir = self.home)
|
||||
self._initKey()
|
||||
self._initPerms()
|
||||
self._initKeyring()
|
||||
|
||||
def _initKey(self):
|
||||
# These match what is currently used by pacman-key --init.
|
||||
_keyinfo = {'userid': 'Pacman Keyring Master Key <pacman@localhost>',
|
||||
'algorithm': 'rsa2048',
|
||||
'expires_in': 0,
|
||||
'expires': False,
|
||||
'sign': True,
|
||||
'encrypt': False,
|
||||
'certify': False,
|
||||
'authenticate': False,
|
||||
'passphrase': None,
|
||||
'force': False}
|
||||
_logger.debug('Creating key with options: {0}'.format(_keyinfo))
|
||||
genkey = self.gpg.create_key(**_keyinfo)
|
||||
_logger.info('Created key: {0}'.format(genkey.fpr))
|
||||
self.key = self.gpg.get_key(genkey.fpr, secret = True)
|
||||
self.gpg.signers = [self.key]
|
||||
_logger.debug('Set signer/self key to: {0}'.format(self.key))
|
||||
|
||||
def _initKeyring(self):
|
||||
krdir = os.path.join(self.chroot_base, 'usr', 'share', 'pacman', 'keyrings')
|
||||
keyrings = [i for i in os.listdir(krdir) if i.endswith('.gpg')]
|
||||
_logger.info('Importing {0} keyring(s).'.format(len(keyrings)))
|
||||
for idx, kr in enumerate(keyrings):
|
||||
krname = re.sub(r'\.gpg$', '', kr)
|
||||
krfile = os.path.join(krdir, kr)
|
||||
trustfile = os.path.join(krdir, '{0}-trusted'.format(krname))
|
||||
revokefile = os.path.join(krdir, '{0}-revoked'.format(krname))
|
||||
_logger.debug('Importing keyring: {0} ({1}/{2})'.format(krname, (idx + 1), len(keyrings)))
|
||||
with open(os.path.join(krdir, kr), 'rb') as fh:
|
||||
imported_keys = self.gpg.key_import(fh.read())
|
||||
if imported_keys:
|
||||
_logger.debug('Imported: {0}'.format(imported_keys))
|
||||
# We also have to sign/trust the keys. I still can't believe there isn't an easier way to do this.
|
||||
if os.path.isfile(trustfile):
|
||||
with open(trustfile, 'r') as fh:
|
||||
for trust in csv.reader(fh, delimiter = ':'):
|
||||
k_id = trust[0]
|
||||
k_trust = int(trust[1])
|
||||
k = self.gpg.get_key(k_id)
|
||||
self.gpg.key_sign(k, local = True)
|
||||
editor = KeyEditor(trustlevel = k_trust)
|
||||
self.gpg.interact(k, editor.truster)
|
||||
# And revoke keys.
|
||||
if os.path.isfile(revokefile):
|
||||
with open(revokefile, 'r') as fh:
|
||||
for fpr in fh.read().splitlines():
|
||||
k = self.gpg.get_key(fpr)
|
||||
editor = KeyEditor()
|
||||
self.gpg.interact(k, editor.revoker)
|
||||
return(None)
|
||||
|
||||
def _initPerms(self):
|
||||
# Again, not quite sure why it's so permissive. But pacman-key explicitly does it, so.
|
||||
filenames = {'pubring': 0o0644,
|
||||
'trustdb': 0o0644,
|
||||
'secring': 0o0600}
|
||||
for fname, filemode in filenames.items():
|
||||
fpath = os.path.join(self.home, '{0}.gpg'.format(fname))
|
||||
if not os.path.isfile(fpath):
|
||||
# TODO: Can we just manually create an empty file, or will GPG not like that?
|
||||
# I'm fairly certain that the key creation automatically creates these files, so as long as this
|
||||
# function is run after _initKey() then we should be fine.
|
||||
# with open(fpath, 'wb') as fh:
|
||||
# fh.write(b'')
|
||||
# _logger.info('Wrote: {0}'.format(fpath))
|
||||
continue
|
||||
os.chmod(fpath, filemode)
|
||||
return(None)
|
||||
|
||||
def _initTofuDB(self):
|
||||
# As glad as I am that GnuPG is moving more towards more accessible data structures...
|
||||
db = sqlite3.connect(self.db)
|
||||
cur = db.cursor()
|
||||
cur.executescript(_createTofuDB)
|
||||
db.commit()
|
||||
cur.close()
|
||||
db.close()
|
||||
return(None)
|
||||
5
aif/software/mirrorlist.j2
Normal file
5
aif/software/mirrorlist.j2
Normal file
@@ -0,0 +1,5 @@
|
||||
# Generated by AIF-NG.
|
||||
# See /etc/pacman.d/mirrorlist.bak for original version.
|
||||
{%- for mirror in mirrors %}
|
||||
Server = {{ mirror }}
|
||||
{%- endfor %}
|
||||
72
aif/software/objtypes.py
Normal file
72
aif/software/objtypes.py
Normal file
@@ -0,0 +1,72 @@
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
##
|
||||
from lxml import etree
|
||||
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Mirror(object):
|
||||
def __init__(self, mirror_xml, repo = None, arch = None):
|
||||
self.xml = mirror_xml
|
||||
_logger.debug('mirror_xml: {0}'.format(etree.tostring(self.xml, with_tail = False).decode('utf-8')))
|
||||
self.uri = self.xml.text
|
||||
self.real_uri = None
|
||||
self.aif_uri = None
|
||||
|
||||
def parse(self, chroot_base, repo, arch):
|
||||
self.real_uri = self.uri.replace('$repo', repo).replace('$arch', arch)
|
||||
if self.uri.startswith('file://'):
|
||||
self.aif_uri = os.path.join(chroot_base, re.sub(r'^file:///?', ''))
|
||||
|
||||
|
||||
class Package(object):
|
||||
def __init__(self, package_xml):
|
||||
self.xml = package_xml
|
||||
_logger.debug('package_xml: {0}'.format(etree.tostring(self.xml, with_tail = False).decode('utf-8')))
|
||||
self.name = self.xml.text
|
||||
self.repo = self.xml.attrib.get('repo')
|
||||
if self.repo:
|
||||
self.qualified_name = '{0}/{1}'.format(self.repo, self.name)
|
||||
else:
|
||||
self.qualified_name = self.name
|
||||
|
||||
|
||||
class Repo(object):
|
||||
def __init__(self, chroot_base, repo_xml, arch = 'x86_64'):
|
||||
# TODO: support Usage? ("REPOSITORY SECTIONS", pacman.conf(5))
|
||||
self.xml = repo_xml
|
||||
_logger.debug('repo_xml: {0}'.format(etree.tostring(self.xml, with_tail = False).decode('utf-8')))
|
||||
# TODO: SigLevels?!
|
||||
self.name = self.xml.attrib['name']
|
||||
self.conflines = {}
|
||||
self.mirrors = []
|
||||
self.parsed_mirrors = []
|
||||
_mirrors = self.xml.xpath('mirror|include') # "Server" and "Include" respectively in pyalpm lingo.
|
||||
if _mirrors:
|
||||
for m in _mirrors:
|
||||
k = m.tag.title()
|
||||
if k == 'Mirror':
|
||||
k = 'Server'
|
||||
if k not in self.conflines.keys():
|
||||
self.conflines[k] = []
|
||||
self.conflines[k].append(m.text)
|
||||
# TODO; better parsing here. handle in config.py?
|
||||
# if m.tag == 'include':
|
||||
# # TODO: We only support one level of includes. Pacman supports unlimited nesting? of includes.
|
||||
# file_uri = os.path.join(chroot_base, re.sub(r'^/?', '', m.text))
|
||||
# if not os.path.isfile(file_uri):
|
||||
# _logger.error('Include file ({0}) does not exist: {1}'.format(m.text, file_uri))
|
||||
# raise FileNotFoundError('Include file does not exist')
|
||||
# with open(file_uri, 'r') as fh:
|
||||
# for line in fh.read().splitlines():
|
||||
else:
|
||||
# Default (mirrorlist)
|
||||
self.conflines['Include'] = ['file:///etc/pacman.d/mirrorlist']
|
||||
self.enabled = (True if self.xml.attrib.get('enabled', 'true') in ('1', 'true') else False)
|
||||
self.siglevel = self.xml.attrib.get('sigLevel')
|
||||
# self.real_uri = None
|
||||
# if self.uri:
|
||||
# self.real_uri = self.uri.replace('$repo', self.name).replace('$arch', arch)
|
||||
16
aif/software/pacman.conf.j2
Normal file
16
aif/software/pacman.conf.j2
Normal file
@@ -0,0 +1,16 @@
|
||||
# Generated by AIF-NG.
|
||||
# See /etc/pacman.conf.bak for original version.
|
||||
{%- for section, kv in cfg.items() %}
|
||||
[{{ section }}]
|
||||
{%- for key, value in kv.items() %}
|
||||
{%- if value is none %}
|
||||
{{ key }}
|
||||
{%- elif value|isList %}
|
||||
{%- for val in value %}
|
||||
{{ key }} = {{ val }}
|
||||
{%- endfor %}
|
||||
{%- else %}
|
||||
{{ key }} = {{ val }}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{% endfor %}
|
||||
123
aif/software/pacman.py
Normal file
123
aif/software/pacman.py
Normal file
@@ -0,0 +1,123 @@
|
||||
# We can manually bootstrap and alter pacman's keyring. But check the bootstrap tarball; we might not need to.
|
||||
# TODO.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
##
|
||||
import pyalpm
|
||||
from lxml import etree
|
||||
##
|
||||
from . import keyring
|
||||
from . import objtypes
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# TODO: There is some duplication here that we can get rid of in the future. Namely:
|
||||
# - Mirror URI parsing
|
||||
# - Unified function for parsing Includes
|
||||
# - At some point, ideally there should be a MirrorList class that can take (or generate?) a list of Mirrors
|
||||
# and have a write function to write out a mirror list to a specified location.
|
||||
|
||||
|
||||
class PackageManager(object):
|
||||
def __init__(self, chroot_base, pacman_xml):
|
||||
self.xml = pacman_xml
|
||||
_logger.debug('pacman_xml: {0}'.format(etree.tostring(self.xml, with_tail = False).decode('utf-8')))
|
||||
self.chroot_base = chroot_base
|
||||
self.pacman_dir = os.path.join(self.chroot_base, 'var', 'lib', 'pacman')
|
||||
self.configfile = os.path.join(self.chroot_base, 'etc', 'pacman.conf')
|
||||
self.keyring = keyring.PacmanKey(self.chroot_base)
|
||||
self.config = None
|
||||
self.handler = None
|
||||
self.repos = []
|
||||
self.packages = []
|
||||
self.mirrorlist = []
|
||||
self._initHandler()
|
||||
self._initMirrors()
|
||||
self._initRepos()
|
||||
|
||||
def _initHandler(self):
|
||||
# TODO: Append mirrors/repos to pacman.conf here before we parse?
|
||||
self.opts = {'Architecture': 'x86_64', # Technically, "auto" but Arch proper only supports x86_64.
|
||||
'CacheDir': '/var/cache/pacman/pkg/',
|
||||
'CheckSpace': True,
|
||||
'CleanMethod': 'KeepInstalled',
|
||||
# 'Color': None,
|
||||
'DBPath': '/var/lib/pacman/',
|
||||
'GPGDir': '/etc/pacman.d/gnupg/',
|
||||
'HoldPkg': 'pacman glibc',
|
||||
'HookDir': '/etc/pacman.d/hooks/',
|
||||
'IgnoreGroup': '',
|
||||
'IgnorePkg': '',
|
||||
'LocalFileSigLevel': 'Optional',
|
||||
'LogFile': '/var/log/pacman.log',
|
||||
'NoExtract': '',
|
||||
'NoUpgrade': '',
|
||||
'RemoteFileSigLevel': 'Required',
|
||||
'RootDir': '/',
|
||||
'SigLevel': 'Required DatabaseOptional',
|
||||
# 'TotalDownload': None,
|
||||
# 'UseSyslog': None,
|
||||
# 'VerbosePkgLists': None,
|
||||
'XferCommand': '/usr/bin/curl -L -C - -f -o %o %u'
|
||||
}
|
||||
for k, v in self.opts.items():
|
||||
if k in ('CacheDir', 'DBPath', 'GPGDir', 'HookDir', 'LogFile', 'RootDir'):
|
||||
v = re.sub(r'^/+', r'', v)
|
||||
self.opts[k] = os.path.join(self.chroot_base, v)
|
||||
if k in ('HoldPkg', 'IgnoreGroup', 'IgnorePkg', 'NoExtract', 'NoUpgrade', 'SigLevel'):
|
||||
v = v.split()
|
||||
if not self.handler:
|
||||
self.handler = pyalpm.Handle(self.chroot_base, self.pacman_dir)
|
||||
# Pretty much blatantly ripped this off of pycman:
|
||||
# https://github.com/archlinux/pyalpm/blob/master/pycman/config.py
|
||||
for k in ('LogFile', 'GPGDir', 'NoExtract', 'NoUpgrade'):
|
||||
setattr(self.handler, k.lower(), self.opts[k])
|
||||
self.handler.arch = self.opts['Architecture']
|
||||
if self.opts['IgnoreGroup']:
|
||||
self.handler.ignoregrps = self.opts['IgnoreGroup']
|
||||
if self.opts['IgnorePkg']:
|
||||
self.handler.ignorepkgs = self.opts['IgnorePkg']
|
||||
return(None)
|
||||
|
||||
def _initMirrors(self):
|
||||
mirrors = self.xml.find('mirrorList')
|
||||
if mirrors:
|
||||
_mirrorlist = os.path.join(self.chroot_base, 'etc', 'pacman.d', 'mirrorlist')
|
||||
with open(_mirrorlist, 'a') as fh:
|
||||
fh.write('\n# Added by AIF-NG.\n')
|
||||
for m in mirrors.findall('mirror'):
|
||||
mirror = objtypes.Mirror(m)
|
||||
self.mirrorlist.append(mirror)
|
||||
fh.write('Server = {0}\n'.format(mirror.uri))
|
||||
_logger.info('Appended: {0}'.format(_mirrorlist))
|
||||
return(None)
|
||||
|
||||
def _initRepos(self):
|
||||
repos = self.xml.find('repos')
|
||||
_conf = os.path.join(self.chroot_base, 'etc', 'pacman.conf')
|
||||
with open(_conf, 'a') as fh:
|
||||
fh.write('\n# Added by AIF-NG.\n')
|
||||
for r in repos.findall('repo'):
|
||||
repo = objtypes.Repo(self.chroot_base, r)
|
||||
if repo.enabled:
|
||||
fh.write('[{0}]\n'.format(repo.name))
|
||||
if repo.siglevel:
|
||||
fh.write('SigLevel = {0}\n'.format(repo.siglevel))
|
||||
if repo.uri:
|
||||
fh.write('Server = {0}\n'.format(repo.uri))
|
||||
else:
|
||||
fh.write('Include = /etc/pacman.d/mirrorlist\n')
|
||||
else:
|
||||
fh.write('#[{0}]\n'.format(repo.name))
|
||||
if repo.siglevel:
|
||||
fh.write('#SigLevel = {0}\n'.format(repo.siglevel))
|
||||
if repo.uri:
|
||||
fh.write('#Server = {0}\n'.format(repo.uri))
|
||||
else:
|
||||
fh.write('#Include = /etc/pacman.d/mirrorlist\n')
|
||||
self.repos.append(repo)
|
||||
_logger.info('Appended: {0}'.format(_conf))
|
||||
return(None)
|
||||
Reference in New Issue
Block a user