#!/usr/bin/env python3 import argparse import os import sqlite3 ## from lxml import etree ## import repo # TODO: track which versions are built so we don't need to consistently rebuild ALL packages # TODO: logging # TODO: check result of build and *conditionally* cleanup if self.cleanup == True. # TODO: print repo snippet _dflts = {'cfgfile': '~/.config/arch_repo_builder/config.xml', 'cache_db': '~/.cache/arch_repo_builder/packages.sqlite3'} class Packager(object): def __init__(self, cfgfile = _dflts['cfgfile'], cache_db = _dflts['cache_db'], validate = True, *args, **kwargs): self.cfgfile = os.path.abspath(os.path.expanduser(cfgfile)) self.cache_db = os.path.abspath(os.path.expanduser(cache_db)) self.cfg = None self.xml = None self.schema = None self.ns = None self.repos = [] self.db = None self.cur = None self.origdir = os.path.abspath(os.path.expanduser(os.getcwd())) self._initCfg(validate = validate) self._initDB() self._initRepos() def _initCfg(self, validate = True): with open(self.cfgfile, 'rb') as f: self.xml = etree.parse(f) self.xml.xinclude() self.cfg = self.xml.getroot() self.ns = self.cfg.nsmap.get(None, 'http://git.square-r00t.net/Arch_Repo_Builder/tree/') self.ns = '{{{0}}}'.format(self.ns) if validate: if not self.schema: from urllib.request import urlopen xsi = self.cfg.nsmap.get('xsi', 'http://www.w3.org/2001/XMLSchema-instance') schemaLocation = '{{{0}}}schemaLocation'.format(xsi) schemaURL = self.cfg.attrib.get(schemaLocation, ('http://git.square-r00t.net/Arch_Repo_Builder/plain/archrepo.xsd')) with urlopen(schemaURL) as url: self.schema = url.read() self.schema = etree.XMLSchema(etree.XML(self.schema)) self.schema.assertValid(self.xml) return() def _initDB(self): is_new = False if not os.path.isdir(os.path.dirname(self.cache_db)): os.makedirs(os.path.dirname(self.cache_db), exist_ok = True) is_new = True if not os.path.isfile(self.cache_db): is_new = True self.db = sqlite3.connect(self.cache_db) self.db.row_factory = sqlite3.Row self.cur = self.db.cursor() if is_new: self.cur.execute(('CREATE TABLE IF NOT EXISTS ' '"packages" (' '"name" TEXT NOT NULL UNIQUE, ' '"version" TEXT NOT NULL, ' '"source" TEXT NOT NULL, ' '"repo" TEXT NOT NULL, ' '"sign" INTEGER NOT NULL, ' 'PRIMARY KEY("name"))')) self.cur.execute(('CREATE TABLE IF NOT EXISTS ' '"repos" (' '"name" TEXT NOT NULL UNIQUE, ' '"dest" TEXT NOT NULL, ' '"path" TEXT NOT NULL, ' '"sign" INTEGER NOT NULL, ' '"gpghome" TEXT, ' '"gpgkeyid" TEXT, ' 'PRIMARY KEY("name"))')) self.db.commit() return() def _initRepos(self): for r in self.xml.findall('{0}repo'.format(self.ns)): self.repos.append(repo.Repo(r, ns = self.ns)) return() def build(self): pass return() def _closeDB(self): if self.cur: self.cur.close() if self.db: self.db.close() return() def parseArgs(): args = argparse.ArgumentParser(description = 'Build Pacman packages and update a local repository') args.add_argument('-n', '--no-validate', dest = 'validate', action = 'store_false', help = ('If specified, do NOT attempt to validate the config file (-c/--config)')) args.add_argument('-c', '--config', dest = 'cfgfile', default = _dflts['cfgfile'], help = ('The path to the configuration file. Default: {0}').format(_dflts['cfgfile'])) args.add_argument('-C', '--cache-db', dest = 'cache_db', default = _dflts['cache_db'], help = ('The path to the cache DB file. Default: {0}').format(_dflts['cache_db'])) return (args) def main(): args = parseArgs().parse_args() varargs = vars(args) pkgr = Packager(**varargs) # pkgr.buildPkgs(auronly = varargs['auronly']) # pkgr.createRepo() import pprint # print('PACKAGER:') # pprint.pprint(vars(pkgr)) for r in pkgr.repos: # print('\nREPO: {0}'.format(r.name)) # pprint.pprint(vars(r)) # for m in r.mirrors: # print('\nREPO/MIRROR: {0}/{1}'.format(r.name, type(m).__name__)) # pprint.pprint(vars(m)) for p in r.packages: #p.extract(r.staging_dir) p.getPkgInfo() print('\nREPO/PACKAGE: {0}/{1} ({2})'.format(r.name, p.name, type(p).__name__)) pprint.pprint(vars(p)) return () if __name__ == '__main__': main()