non-working docker provider
This commit is contained in:
parent
3533fffa61
commit
1d7b05116d
|
@ -0,0 +1,151 @@
|
|||
import cherrypy
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
from jinja2 import Environment, FileSystemLoader, select_autoescape
|
||||
from sqlalchemy import Column, ForeignKey, UniqueConstraint
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.types import String, Integer
|
||||
from tempfile import TemporaryDirectory
|
||||
from repobot.tables import Base, db
|
||||
|
||||
"""
|
||||
Registry API notes
|
||||
|
||||
base URL structure:
|
||||
|
||||
/v2/<name>/ name matches [a-z0-9]+(?:[._-][a-z0-9]+)*
|
||||
/v2/library/ubuntu/ including slashes, must be less than 256 chars
|
||||
|
||||
"All endpoints should support aggressive http caching, compression and range headers, where appropriate."
|
||||
Note: prefixing names with more paths seems to work?
|
||||
This might be better: https://github.com/cherrypy/tools/blob/master/VirtualHosts
|
||||
|
||||
|
||||
4xx errors get a json reply like:
|
||||
{
|
||||
"errors:" [{
|
||||
"code": <error identifier>,
|
||||
"message": <message describing condition>,
|
||||
"detail": <unstructured>
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class DockerProvider(object):
|
||||
def __init__(self, dbcon, s3client, bucket):
|
||||
self.db = dbcon
|
||||
self.s3 = s3client
|
||||
self.bucket = bucket
|
||||
"""base path within the s3 bucket"""
|
||||
self.basepath = "data/provider/docker"
|
||||
|
||||
cherrypy.tree.mount(DockerApi(self), "/v2/", {'/': {'tools.trailing_slash.on': False,
|
||||
'tools.db.on': True}})
|
||||
|
||||
def web_addpkg(self, reponame, name, version, fobj):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class DockerApi(object):
|
||||
def __init__(self, base):
|
||||
self.base = base
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self):
|
||||
cherrypy.response.headers["Docker-Distribution-API-Version"] = "registry/2.0"
|
||||
return ""
|
||||
|
||||
@cherrypy.expose
|
||||
def default(self, *args):
|
||||
# ('foo', 'bar', 'qux', 'dpedu', 'zwastebin2', 'blobs', 'uploads')
|
||||
# ('dpedu', 'zwastebin2', 'blobs', 'uploads')
|
||||
# ('ubuntu', 'blobs', 'uploads')
|
||||
print("##DEFAULT##: ", args)
|
||||
raise cherrypy.HTTPError(403, "fuck off")
|
||||
|
||||
|
||||
@cherrypy.popargs("reponame", "pkgname", "filename")
|
||||
class TarWeb(object):
|
||||
def __init__(self, base):
|
||||
self.base = base
|
||||
|
||||
template_dir = "templates" if os.path.exists("templates") else os.path.join(APPROOT, "templates")
|
||||
self.tpl = Environment(loader=FileSystemLoader(template_dir),
|
||||
autoescape=select_autoescape(['html', 'xml']))
|
||||
|
||||
@cherrypy.expose
|
||||
def index(self, reponame=None, pkgname=None, filename=None):
|
||||
if filename:
|
||||
return self.handle_download(reponame, pkgname, filename)
|
||||
else:
|
||||
return self.handle_navigation(reponame, pkgname, filename)
|
||||
|
||||
def handle_navigation(self, reponame=None, pkgname=None, filename=None):
|
||||
if reponame:
|
||||
repo = get_repo(db(), reponame, create_ok=False)
|
||||
if pkgname:
|
||||
return self.tpl.get_template("tar/package.html") \
|
||||
.render(repo=repo,
|
||||
pkgs=db().query(TarPackage).filter(TarPackage.repo == repo,
|
||||
TarPackage.name == pkgname).
|
||||
order_by(TarPackage.version).all())
|
||||
|
||||
return self.tpl.get_template("tar/repo.html") \
|
||||
.render(repo=repo,
|
||||
pkgs=self._get_dists(repo))
|
||||
|
||||
return self.tpl.get_template("tar/root.html") \
|
||||
.render(repos=db().query(TarRepo).order_by(TarRepo.name).all())
|
||||
|
||||
def _get_dists(self, repo):
|
||||
lastpkg = None
|
||||
for pkg in db().query(TarPackage).filter(TarPackage.repo == repo).order_by(TarPackage.fname).all():
|
||||
if lastpkg and pkg.name == lastpkg:
|
||||
continue
|
||||
yield pkg
|
||||
lastpkg = pkg.name
|
||||
|
||||
def handle_download(self, reponame, distname, filename):
|
||||
repo = get_repo(db(), reponame, create_ok=False)
|
||||
pkg = db().query(TarPackage).filter(TarPackage.repo == repo, TarPackage.fname == filename).first()
|
||||
if not pkg:
|
||||
raise cherrypy.HTTPError(404)
|
||||
|
||||
dpath = os.path.join(self.base.basepath, pkg.blobpath)
|
||||
print("dpath=", dpath)
|
||||
print("blobpath=", pkg.blobpath)
|
||||
print("basepath=", self.base.basepath)
|
||||
|
||||
if str(cherrypy.request.method) == "DELETE":
|
||||
db().delete(pkg)
|
||||
files = self.base.s3.list_objects(Bucket=self.base.bucket, Prefix=dpath).get("Contents")
|
||||
if files:
|
||||
self.base.s3.delete_object(Bucket=self.base.bucket, Key=dpath)
|
||||
db().commit()
|
||||
return "OK" #TODO delete the repo if we've emptied it(?)
|
||||
|
||||
elif str(cherrypy.request.method) == "GET":
|
||||
response = self.base.s3.get_object(Bucket=self.base.bucket, Key=dpath)
|
||||
|
||||
cherrypy.response.headers["Content-Type"] = "application/octet-stream"
|
||||
cherrypy.response.headers["Content-Length"] = response["ContentLength"]
|
||||
|
||||
def stream():
|
||||
while True:
|
||||
data = response["Body"].read(65535)
|
||||
if not data:
|
||||
return
|
||||
yield data
|
||||
|
||||
return stream()
|
||||
else:
|
||||
raise cherrypy.HTTPError(405)
|
||||
|
||||
index._cp_config = {'response.stream': True}
|
|
@ -7,6 +7,7 @@ from botocore.client import Config as BotoConfig
|
|||
from repobot.aptprovider import AptProvider
|
||||
from repobot.pypiprovider import PypiProvider
|
||||
from repobot.tarprovider import TarProvider
|
||||
from repobot.dockermod import DockerProvider
|
||||
from repobot.tables import SAEnginePlugin, SATool
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
@ -80,7 +81,8 @@ def main():
|
|||
# set up providers
|
||||
providers = {"apt": AptProvider(dbcon, s3, bucket),
|
||||
"pypi": PypiProvider(dbcon, s3, bucket),
|
||||
"tar": TarProvider(dbcon, s3, bucket)}
|
||||
"tar": TarProvider(dbcon, s3, bucket),
|
||||
"docker": DockerProvider(dbcon, s3, bucket)}
|
||||
|
||||
# set up main web screen
|
||||
web = AppWeb(providers)
|
||||
|
|
Loading…
Reference in New Issue