regen queue instead of annoying poller

This commit is contained in:
dave 2019-05-01 18:45:52 -07:00
parent 51ba72f042
commit 4f5966b415
1 changed files with 77 additions and 64 deletions

View File

@ -11,11 +11,11 @@ from tempfile import TemporaryDirectory
from threading import Thread
import hashlib
import os
from time import sleep
import gnupg
from datetime import datetime
import traceback
import json
import queue
class AptRepo(Base):
@ -150,6 +150,8 @@ class AptProvider(object):
self.bucket = bucket
"""base path within the s3 bucket"""
self.basepath = "data/provider/apt"
"""queue entries are tuples containing the database id of the dist to regenerate indexes and signatures for"""
self.queue = queue.Queue()
cherrypy.tree.mount(AptWeb(self), "/repo/apt", {'/': {'tools.trailing_slash.on': False,
'tools.db.on': True}})
@ -166,96 +168,99 @@ class AptProvider(object):
Session = sqlalchemy.orm.sessionmaker(autoflush=True, autocommit=False)
Session.configure(bind=get_engine())
while True:
sleep(2)
try:
work = self.queue.get(block=True, timeout=5)
except queue.Empty:
continue
session = Session()
try:
self._sign_packages(session)
self._sign_packages(session, work)
except:
traceback.print_exc()
finally:
session.close()
sleep(10)
def _sign_packages(self, session):
dirtydists = session.query(AptDist).filter(AptDist.dirty == True).all()
def _sign_packages(self, session, work):
dist_id = work[0]
dist = session.query(AptDist).filter(AptDist.id == dist_id).first()
print("Generating metadata for repo:{} dist:{}".format(dist.repo.name, dist.name))
for dist in dirtydists:
print("Generating metadata for repo:{} dist:{}".format(dist.repo.name, dist.name))
str_packages = ""
str_packages = ""
for package in session.query(AptPackage) \
.filter(AptPackage.repo == dist.repo,
AptPackage.dist == dist) \
.order_by(AptPackage.id).all():
fields = json.loads(package.fields)
for k, v in fields.items():
str_packages += "{}: {}\n".format(k, v)
for algo, algoname in algos.items():
str_packages += "{}: {}\n".format(algoname, getattr(package, algo))
for package in session.query(AptPackage) \
.filter(AptPackage.repo == dist.repo,
AptPackage.dist == dist) \
.order_by(AptPackage.id).all():
fields = json.loads(package.fields)
for k, v in fields.items():
str_packages += "{}: {}\n".format(k, v)
for algo, algoname in algos.items():
str_packages += "{}: {}\n".format(algoname, getattr(package, algo))
str_packages += "Filename: packages/{}/{}\n".format(package.fname[0], package.fname)
str_packages += "Size: {}\n".format(package.size)
str_packages += "Filename: packages/{}/{}\n".format(package.fname[0], package.fname)
str_packages += "Size: {}\n".format(package.size)
str_packages += "\n"
str_packages += "\n"
dist.packages_cache = str_packages.encode("utf-8")
dist.packages_cache = str_packages.encode("utf-8")
release_hashes = hashmany(dist.packages_cache)
release_hashes = hashmany(dist.packages_cache)
str_release = """Origin: . {dist}
str_release = """Origin: . {dist}
Label: . {dist}
Suite: {dist}
Codename: {dist}
Date: {time}
Architectures: amd64
Components: main
Description: Generated by yolo
Description: Generated by Repobot
""".format(dist=dist.name, time=datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S UTC"))
for algo, algoname in algos.items():
str_release += "{}:\n {} {} {}/{}/{}\n".format(algoname,
release_hashes[algo],
len(dist.packages_cache),
"main", #TODO component
"binary-amd64", #TODO whatever this was
"Packages")
for algo, algoname in algos.items():
str_release += "{}:\n {} {} {}/{}/{}\n".format(algoname,
release_hashes[algo],
len(dist.packages_cache),
"main", #TODO component
"binary-amd64", #TODO whatever this was
"Packages")
dist.release_cache = str_release.encode("utf-8")
dist.release_cache = str_release.encode("utf-8")
keyemail = 'debian_signing@localhost'
keyemail = 'debian_signing@localhost'
with TemporaryDirectory() as tdir:
gpg = gnupg.GPG(gnupghome=tdir)
with TemporaryDirectory() as tdir:
gpg = gnupg.GPG(gnupghome=tdir)
def getkey():
keys = [i for i in gpg.list_keys(secret=True) if any([keyemail in k for k in i["uids"]])]
if keys:
return keys[0]
def getkey():
keys = [i for i in gpg.list_keys(secret=True) if any([keyemail in k for k in i["uids"]])]
if keys:
return keys[0]
fingerprint = None
fingerprint = None
if not dist.repo.gpgkey:
print("Generating key for", dist.repo.name)
key = gpg.gen_key(gpg.gen_key_input(name_email=keyemail,
expire_date='2029-04-28',
key_type='RSA',
key_length=4096,
key_usage='encrypt,sign,auth',
passphrase="secret"))
fingerprint = key.fingerprint
dist.repo.gpgkey = gpg.export_keys(fingerprint, secret=True, passphrase="secret")
dist.repo.gpgkeyprint = fingerprint
dist.repo.gpgpubkey = gpg.export_keys(fingerprint)
if not dist.repo.gpgkey:
print("Generating key for", dist.repo.name)
key = gpg.gen_key(gpg.gen_key_input(name_email=keyemail,
expire_date='2029-04-28',
key_type='RSA',
key_length=4096,
key_usage='encrypt,sign,auth',
passphrase="secret"))
fingerprint = key.fingerprint
dist.repo.gpgkey = gpg.export_keys(fingerprint, secret=True, passphrase="secret")
dist.repo.gpgkeyprint = fingerprint
dist.repo.gpgpubkey = gpg.export_keys(fingerprint)
else:
import_result = gpg.import_keys(dist.repo.gpgkey)
fingerprint = import_result.results[0]['fingerprint'] # errors here suggests some gpg import issue
assert(fingerprint == getkey()['fingerprint'])
else:
import_result = gpg.import_keys(dist.repo.gpgkey)
fingerprint = import_result.results[0]['fingerprint'] # errors here suggests some gpg import issue
assert(fingerprint == getkey()['fingerprint'])
dist.sig_cache = gpg.sign(dist.release_cache, keyid=fingerprint, passphrase='secret',
detach=True, clearsign=False).data
dist.dirty = False
session.commit()
dist.sig_cache = gpg.sign(dist.release_cache, keyid=fingerprint, passphrase='secret',
detach=True, clearsign=False).data
dist.dirty = False
session.commit()
print("Metadata generation complete")
def web_addpkg(self, reponame, name, version, fobj, dist):
repo = get_repo(db(), reponame)
@ -300,6 +305,11 @@ Description: Generated by yolo
db().add(pkg)
db().commit()
self.regen_dist(dist.id)
def regen_dist(self, dist_id):
self.queue.put((dist_id, ))
#TODO
# - verify dpkg name & version match params
# - copy to persistent storage
@ -315,14 +325,17 @@ class AptWeb(object):
self.packages = AptFiles(base)
@cherrypy.expose
def index(self, reponame=None):
def index(self, reponame=None, regen=False):
if reponame:
repo = get_repo(db(), reponame, create_ok=False)
yield "<a href='/repo/apt/{reponame}/pubkey'>pubkey</a><hr/>".format(reponame=repo.name)
yield "<a href='/repo/apt/{reponame}/pubkey'>pubkey</a> " \
"<a href='/repo/apt/{reponame}?regen=1'>regen</a><hr/>".format(reponame=repo.name)
for dist in db().query(AptDist).filter(AptDist.repo == repo).order_by(AptDist.name).all():
yield "<a href='/repo/apt/{reponame}/dists/{name}'>{name}</a>: <a href='/repo/apt/{reponame}/dists/{name}/main/indexname/Packages'>Packages</a> <a href='/repo/apt/{reponame}/dists/{name}/Release'>Release</a> <a href='/repo/apt/{reponame}/dists/{name}/Release.gpg'>Release.gpg</a><br />".format(reponame=repo.name, name=dist.name)
if regen:
self.base.regen_dist(dist.id)
# yield "about apt repo '{}'".format(reponame)
else: