docker-artifact/repobot/server.py

119 lines
3.8 KiB
Python
Raw Normal View History

2019-05-04 21:14:49 -07:00
import boto3
2019-04-29 22:23:25 -07:00
import cherrypy
import logging
2019-05-04 21:14:49 -07:00
import os
import sqlalchemy
from botocore.client import Config as BotoConfig
2019-04-29 22:23:25 -07:00
from repobot.aptprovider import AptProvider
from repobot.pypiprovider import PypiProvider
2019-05-04 21:14:49 -07:00
from repobot.tables import SAEnginePlugin, SATool
from urllib.parse import urlparse
2019-04-29 22:23:25 -07:00
class AppWeb(object):
def __init__(self, providers):
self.providers = providers
@cherrypy.expose
def index(self):
yield '<a href="/repo">repos</a>'
@cherrypy.expose
def repo(self):
for provider in self.providers.keys():
yield '<a href="/repo/{provider}">{provider}</a><br />'.format(provider=provider)
@cherrypy.expose
def addpkg(self, provider, reponame, name, version, f, **params):
# TODO regex validate args
yield from self.providers[provider].web_addpkg(reponame, name, version, f, **params)
def main():
import argparse
import signal
2019-05-04 21:14:49 -07:00
parser = argparse.ArgumentParser(description="package storage database")
parser.add_argument('-p', '--port', default=8080, type=int, help="http port to listen on")
parser.add_argument('-d', '--database', help="mysql+pymysql:// connection string",
default=os.environ.get("DATABASE_URL"))
parser.add_argument('-s', '--s3', help="http:// or https:// connection string",
default=os.environ.get("S3_URL"))
2019-04-29 22:23:25 -07:00
parser.add_argument('--debug', action="store_true", help="enable development options")
args = parser.parse_args()
logging.basicConfig(level=logging.INFO if args.debug else logging.WARNING,
format="%(asctime)-15s %(levelname)-8s %(filename)s:%(lineno)d %(message)s")
2019-05-04 21:14:49 -07:00
if not args.database:
parser.error("--database or DATABASE_URL required")
if not args.s3:
parser.error("--s3 or S3_URL required")
2019-04-29 22:23:25 -07:00
2019-05-04 21:14:49 -07:00
# set up database client
dbcon = sqlalchemy.create_engine(args.database, echo=args.debug, encoding="utf8")
2019-04-29 22:23:25 -07:00
SAEnginePlugin(cherrypy.engine, dbcon).subscribe()
cherrypy.tools.db = SATool()
2019-05-04 21:14:49 -07:00
# set up s3 client
s3url = urlparse(args.s3)
s3args = {"config": BotoConfig(signature_version='s3v4')}
2019-04-29 22:23:25 -07:00
2019-05-04 21:14:49 -07:00
endpoint_url = f"{s3url.scheme}://{s3url.hostname}"
if s3url.port:
endpoint_url += f":{s3url.port}"
s3args["endpoint_url"] = endpoint_url
2019-04-29 22:23:25 -07:00
2019-05-04 21:14:49 -07:00
if s3url.username and s3url.password:
s3args["aws_access_key_id"] = s3url.username
s3args["aws_secret_access_key"] = s3url.password
s3 = boto3.client('s3', **s3args)
bucket = s3url.path[1:]
# ensure bucket exists
if bucket not in [b['Name'] for b in s3.list_buckets()['Buckets']]:
print("Creating bucket")
s3.create_bucket(Bucket=bucket)
2019-04-29 22:23:25 -07:00
2019-05-04 21:14:49 -07:00
# set up providers
providers = {"apt": AptProvider(dbcon, s3, bucket),
"pypi": PypiProvider(dbcon, s3, bucket)}
# set up main web screen
web = AppWeb(providers)
2019-04-29 22:23:25 -07:00
cherrypy.tree.mount(web, '/', {'/': {'tools.trailing_slash.on': False,
'tools.db.on': True}})
cherrypy.config.update({
'tools.sessions.on': False,
'request.show_tracebacks': True,
'server.socket_port': args.port,
'server.thread_pool': 5,
'server.socket_host': '0.0.0.0',
'server.show_tracebacks': True,
'log.screen': False,
'engine.autoreload.on': args.debug,
2019-05-04 20:33:20 -07:00
'server.max_request_body_size': 0,
'server.socket_timeout': 3600,
'response.timeout': 3600
2019-04-29 22:23:25 -07:00
})
def signal_handler(signum, stack):
logging.warning('Got sig {}, exiting...'.format(signum))
cherrypy.engine.exit()
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
try:
cherrypy.engine.start()
cherrypy.engine.block()
finally:
cherrypy.engine.exit()
if __name__ == '__main__':
main()