diff --git a/common/cgi.py b/common/cgi.py index c545666..4866465 100755 --- a/common/cgi.py +++ b/common/cgi.py @@ -21,13 +21,14 @@ def parse_qs(): GET = {} if "QUERY_STRING" in os.environ: GET = _parse_qs(os.environ["QUERY_STRING"]) - GET = {k:v[0] for k,v in GET.items()} + GET = {k: v[0] for k, v in GET.items()} return GET class HTTPBasicAuth: username = None password = None + def __str__(self): return "" % (self.username, self.password) @@ -40,4 +41,4 @@ def parse_auth(): auth.username, auth.password = b64decode(value).decode().split(":") return auth -# cgi.print_environ() \ No newline at end of file +# cgi.print_environ() diff --git a/common/datadb.py b/common/datadb.py index b58e8a1..f64a216 100644 --- a/common/datadb.py +++ b/common/datadb.py @@ -1,4 +1,4 @@ DATADB_ROOT = "/nexus/datadb/backups/" -DATADB_TMP = "/nexus/datadb/tmp/" +DATADB_TMP = "/nexus/datadb/tmp/" -DATADB_DIR_TIMESTAMP_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" # Same as isoformat(), but we need to parse it back \ No newline at end of file +DATADB_DIR_TIMESTAMP_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" # Same as isoformat(), but we need to parse it back diff --git a/get_backup b/get_backup index df02176..f475834 100755 --- a/get_backup +++ b/get_backup @@ -2,11 +2,11 @@ import traceback import os -from sys import exit,stdin,stdout +from sys import exit, stdout from os.path import join as pathjoin -from os.path import exists,getsize -from common.cgi import parse_qs,parse_auth,start_response -from common.datadb import DATADB_ROOT, DATADB_TMP, DATADB_DIR_TIMESTAMP_FORMAT +from os.path import exists, getsize +from common.cgi import parse_qs, parse_auth, start_response +from common.datadb import DATADB_ROOT, DATADB_DIR_TIMESTAMP_FORMAT from datetime import datetime @@ -17,7 +17,7 @@ def get_backup_dir(backup_name): :returns: str absolute path to backup seq /0/ """ backups_dir = pathjoin(DATADB_ROOT, backup_name, 'data') - + if not exists(backups_dir): raise Exception("Backup does not exist") @@ -28,7 +28,8 @@ def get_backup_dir(backup_name): def handle_head(backup_name): try: - backup_path = get_backup_dir(backup_name) + # backup_path = get_backup_dir(backup_name) + # TODO appropriate content-length for HEAD start_response(extra_headers=['Content-length: 0']) except: start_response(status_code=("404", "Not Found",), extra_headers=['Content-length: 0']) @@ -40,9 +41,9 @@ def handle_get_rsync(backup_name): Prints the absolute path an rsync backup should pull from """ backup_path = get_backup_dir(backup_name) - + start_response() - print(backup_path+'/') + print(backup_path + '/') def handle_get_archive(backup_name): @@ -50,10 +51,11 @@ def handle_get_archive(backup_name): Returns .tar.gz data to the browser """ backup_path = pathjoin(get_backup_dir(backup_name), 'backup.tar.gz') - + with open(backup_path, 'rb') as f: - start_response(content_type="application/x-gzip", extra_headers=["Content-length: %s" % getsize(backup_path), - "Content-Disposition: attachment; filename=\"backup.tar.gz\""]) + start_response(content_type="application/x-gzip", + extra_headers=["Content-length: %s" % getsize(backup_path), + "Content-Disposition: attachment; filename=\"backup.tar.gz\""]) while True: data = f.read(8192) if not data: @@ -67,24 +69,24 @@ def handle_req(): Parse http query parameters and act accordingly. """ params = parse_qs() - + for param_name in ["proto", "name"]: - if not param_name in params: + if param_name not in params: raise Exception("Missing parameter: %s" % param_name) - + if os.environ['REQUEST_METHOD'] == "GET" and params["proto"] == "rsync": # Should return absolute local path to latest backup dir handle_get_rsync(params["name"]) - + elif os.environ['REQUEST_METHOD'] == "GET" and params["proto"] == "archive": # Should respond by transferring tar.gz data handle_get_archive(params["name"]) - + elif os.environ['REQUEST_METHOD'] == "HEAD": # Respond with 200 or 404 depending if backup exists # TODO: deeper inspection so the headers can be flushed out handle_head(params["name"]) - + else: raise Exception("Invalid request. Params: %s" % params) @@ -94,7 +96,7 @@ if __name__ == "__main__": handle_req() except Exception as e: start_response(status_code=("500", "Internal server error")) - + tb = traceback.format_exc() print(tb) diff --git a/new_backup b/new_backup index 5157e55..5560ac4 100755 --- a/new_backup +++ b/new_backup @@ -1,15 +1,16 @@ #!/usr/bin/env python3 -import os,sys,cgi +import os +import sys import traceback -from os import mkdir,rename,unlink,rmdir,utime,makedirs +from os import mkdir, rename, unlink, rmdir, utime from os.path import exists from os.path import join as pathjoin -from common.cgi import parse_qs,parse_auth,start_response +from common.cgi import parse_qs, parse_auth, start_response from common.datadb import DATADB_ROOT, DATADB_TMP, DATADB_DIR_TIMESTAMP_FORMAT from datetime import datetime from shutil import rmtree, move -from subprocess import Popen,PIPE +from subprocess import Popen, PIPE from random import randint from time import time from hashlib import md5 @@ -34,7 +35,7 @@ def rotate_backups(backup_dir, max_backups=5): """ # Path to this profile's backup data dir - #profile_base_path = pathjoin(DATADB_ROOT, backup_name, 'data') + # profile_base_path = pathjoin(DATADB_ROOT, backup_name, 'data') dirs = sorted([datetime.strptime(d, DATADB_DIR_TIMESTAMP_FORMAT) for d in os.listdir(backup_dir)]) dirs.reverse() @@ -52,7 +53,7 @@ def prepare_new_backup_dir(backup_dir): new_backup_path = pathjoin(backup_dir, datetime.now().strftime(DATADB_DIR_TIMESTAMP_FORMAT)) mkdir(new_backup_path) mkdir(pathjoin(new_backup_path, "data")) - return new_backup_path+'/data/' + return new_backup_path + '/data/' def prepare_backup_dirs(backup_name, max_backups=5, rotate=True): @@ -61,7 +62,7 @@ def prepare_backup_dirs(backup_name, max_backups=5, rotate=True): :param backup_name: name of backup profile :returns: absolute path to newly created backup dir (0) """ - #print("prepare_backup(%s, %s)" % (backup_name, proto)) + # print("prepare_backup(%s, %s)" % (backup_name, proto)) # Ensure the following dir exists: //data/0/ backup_base_path = get_backup_dir(backup_name) @@ -193,7 +194,7 @@ def handle_put_archive(backup_name, fileStream, max_backups): utime(get_backup_dir(backup_name)) # Done - start_response() # send 200 response code + start_response() # send 200 response code exit(0) @@ -204,7 +205,7 @@ def handle_req(): params = parse_qs() for param_name in ["proto", "name"]: - if not param_name in params: + if param_name not in params: raise Exception("Missing parameter: %s" % param_name) max_backups = int(params["keep"]) if "keep" in params else 5 diff --git a/test b/test index 294b514..ed6e51b 100755 --- a/test +++ b/test @@ -3,21 +3,23 @@ import os from urllib.parse import parse_qs + def start_response(content_type="text/html", status_code=("200", "OK",)): print('Status: %s %s' % (status_code)) print("Content-Type: %s" % content_type) print() + if __name__ == "__main__": try: - + data = parse_qs(os.environ["QUERY_STRING"]) - + assert "yo" in data - + start_response() print("you passed: ?yo=%s" % data["yo"][0]) - + except Exception as e: start_response(status_code=('500', "you fucked up")) print(str(e))