This commit is contained in:
dave 2017-04-22 00:06:12 -07:00
parent 0596899040
commit 49ff19b088
2 changed files with 128 additions and 120 deletions

View File

@ -7,13 +7,15 @@ from os.path import normpath, join, exists
from os import chmod, chown, stat, environ
from enum import Enum
import subprocess
from requests import get,put,head
import json
from requests import get, put, head
SSH_KEY_PATH = environ["DATADB_KEYPATH"] if "DATADB_KEYPATH" in environ else '/root/.ssh/datadb.key'
RSYNC_DEFAULT_ARGS = ['rsync', '-avzr', '--exclude=.datadb.lock', '--whole-file', '--one-file-system', '--delete', '-e', 'ssh -i {} -p 4874 -o StrictHostKeyChecking=no'.format(SSH_KEY_PATH)]
RSYNC_DEFAULT_ARGS = ['rsync', '-avzr', '--exclude=.datadb.lock', '--whole-file', '--one-file-system', '--delete', '-e',
'ssh -i {} -p 4874 -o StrictHostKeyChecking=no'.format(SSH_KEY_PATH)]
DATADB_HTTP_API = 'http://datadb.services.davepedu.com:4875/cgi-bin/'
class SyncStatus(Enum):
"Data is on local disk"
DATA_AVAILABLE = 1
@ -21,7 +23,7 @@ class SyncStatus(Enum):
DATA_MISSING = 2
def restore(profile, conf, force=False): #remote_uri, local_dir, identity='/root/.ssh/datadb.key'
def restore(profile, conf, force=False): # remote_uri, local_dir, identity='/root/.ssh/datadb.key'
"""
Restore data from datadb
"""
@ -33,7 +35,7 @@ def restore(profile, conf, force=False): #remote_uri, local_dir, identity='/root
original_perms = stat(conf["dir"])
dest = urlparse(conf["uri"])
status_code = head(DATADB_HTTP_API+'get_backup', params={'proto':dest.scheme, 'name':profile}).status_code
status_code = head(DATADB_HTTP_API + 'get_backup', params={'proto': dest.scheme, 'name': profile}).status_code
if status_code == 404:
print("Connected to datadb, but datasource '{}' doesn't exist. Exiting".format(profile))
# TODO: special exit code >1 to indicate this?
@ -43,13 +45,13 @@ def restore(profile, conf, force=False): #remote_uri, local_dir, identity='/root
args = RSYNC_DEFAULT_ARGS[:]
# Request backup server to prepare the backup, the returned dir is what we sync from
rsync_path = get(DATADB_HTTP_API+'get_backup', params={'proto':'rsync', 'name':profile}).text.rstrip()
rsync_path = get(DATADB_HTTP_API + 'get_backup', params={'proto': 'rsync', 'name': profile}).text.rstrip()
# Add rsync source path
args.append('nexus@{}:{}'.format(dest.netloc, normpath(rsync_path)+'/'))
args.append('nexus@{}:{}'.format(dest.netloc, normpath(rsync_path) + '/'))
# Add local dir
args.append(normpath(conf["dir"])+'/')
args.append(normpath(conf["dir"]) + '/')
print("Rsync restore call: {}".format(' '.join(args)))
subprocess.check_call(args)
@ -59,7 +61,7 @@ def restore(profile, conf, force=False): #remote_uri, local_dir, identity='/root
# download tarball
args_curl = ['curl', '-s', '-v', '-XGET', '{}get_backup?proto=archive&name={}'.format(DATADB_HTTP_API, profile)]
# unpack
args_tar = ['tar', 'zxv', '-C', normpath(conf["dir"])+'/']
args_tar = ['tar', 'zxv', '-C', normpath(conf["dir"]) + '/']
print("Tar restore call: {} | {}".format(' '.join(args_curl), ' '.join(args_tar)))
@ -102,30 +104,31 @@ def backup(profile, conf, force=False):
args.append(exclude_path)
# Add local dir
args.append(normpath(conf["dir"])+'/')
args.append(normpath(conf["dir"]) + '/')
new_backup_params = {'proto':'rsync',
'name':profile,
'keep':conf["keep"]}
new_backup_params = {'proto': 'rsync',
'name': profile,
'keep': conf["keep"]}
if conf["inplace"]:
new_backup_params["inplace"] = 1
# Hit backupdb via http to retreive absolute path of rsync destination of remote server
rsync_path, token = get(DATADB_HTTP_API+'new_backup', params=new_backup_params).json()
rsync_path, token = get(DATADB_HTTP_API + 'new_backup', params=new_backup_params).json()
# Add rsync source path
args.append(normpath('nexus@{}:{}'.format(dest.netloc, rsync_path))+'/')
args.append(normpath('nexus@{}:{}'.format(dest.netloc, rsync_path)) + '/')
#print("Rsync backup call: {}".format(' '.join(args)))
# print("Rsync backup call: {}".format(' '.join(args)))
try:
subprocess.check_call(args)
except subprocess.CalledProcessError as cpe:
if cpe.returncode not in [0,24]: # ignore partial transfer due to vanishing files on our end
if cpe.returncode not in [0, 24]: # ignore partial transfer due to vanishing files on our end
raise
# confirm completion if backup wasnt already in place
if not conf["inplace"]:
put(DATADB_HTTP_API+'new_backup', params={'proto':'rsync', 'name':profile, 'token': token, 'keep':conf["keep"]})
put(DATADB_HTTP_API + 'new_backup', params={'proto': 'rsync', 'name': profile, 'token': token,
'keep': conf["keep"]})
elif dest.scheme == 'archive':
# CD to local source dir
@ -141,11 +144,12 @@ def backup(profile, conf, force=False):
args_tar.append(exclude_path)
args_tar += ['-zcv', './']
args_curl = ['curl', '-v', '-XPUT', '--data-binary', '@-', '{}new_backup?proto=archive&name={}&keep={}'.format(DATADB_HTTP_API, profile, conf["keep"])]
args_curl = ['curl', '-v', '-XPUT', '--data-binary', '@-', '{}new_backup?proto=archive&name={}&keep={}'.
format(DATADB_HTTP_API, profile, conf["keep"])]
print("Tar backup call: {} | {}".format(' '.join(args_tar), ' '.join(args_curl)))
compress = subprocess.Popen(args_tar, stdout=subprocess.PIPE, cwd=normpath(conf["dir"])+'/')
compress = subprocess.Popen(args_tar, stdout=subprocess.PIPE, cwd=normpath(conf["dir"]) + '/')
upload = subprocess.Popen(args_curl, stdin=compress.stdout)
compress.wait()
@ -204,7 +208,8 @@ def main():
Valid protocols:
rsync - rsync executed over SSH. The local dir will be synced with the remote backup dir using rsync.
archive - tar archives transported over HTTP. The local dir will be tarred and PUT to the backup server's remote dir via http.
archive - tar archives transported over HTTP. The local dir will be tarred and PUT to the backup server's
remote dir via http.
*dir*: Local dir for this backup
@ -220,28 +225,30 @@ def main():
*export_postexec*: Shell command to exec after pushing data
*exclude*: if the underlying transport method supports excluding paths, a comma separated list of paths to exclude. Applies to backup operations only.
*exclude*: if the underlying transport method supports excluding paths, a comma separated list of paths to exclude.
Applies to backup operations only.
*inplace*: rsync only. if enabled, the server will keep only a single copy that you will rsync over. intended for single copies of LARGE datasets. overrides "keep".
*inplace*: rsync only. if enabled, the server will keep only a single copy that you will rsync over. intended for
single copies of LARGE datasets. overrides "keep".
"""
required_conf_params = ['dir', 'uri']
conf_params = {'export_preexec':None,
'exclude':None,
'keep':5,
'restore_preexec':None,
'restore_postexec':None,
'auth':'',
'export_postexec':None,
'inplace':False}
conf_params = {'export_preexec': None,
'exclude': None,
'keep': 5,
'restore_preexec': None,
'restore_postexec': None,
'auth': '',
'export_postexec': None,
'inplace': False}
conf_path = environ["DATADB_CONF"] if "DATADB_CONF" in environ else "/etc/datadb.ini"
# Load profiles
config = ConfigParser()
config.read(conf_path)
config = {section:{k:config[section][k] for k in config[section]} for section in config.sections()}
config = {section: {k: config[section][k] for k in config[section]} for section in config.sections()}
for conf_k, conf_dict in config.items():
for expect_param, expect_default in conf_params.items():
if expect_param not in conf_dict.keys():
@ -252,27 +259,29 @@ def main():
parser = argparse.ArgumentParser(description="Backupdb Agent depends on config: /etc/datadb.ini")
parser.add_argument('-f', '--force', default=False, action='store_true', help='force restore operation if destination data already exists')
parser.add_argument('-f', '--force', default=False, action='store_true',
help='force restore operation if destination data already exists')
parser.add_argument('-n', '--no-exec', default=False, action='store_true', help='don\'t run pre/post-exec commands')
parser.add_argument('-b', '--no-pre-exec', default=False, action='store_true', help='don\'t run pre-exec commands')
parser.add_argument('-m', '--no-post-exec', default=False, action='store_true', help='don\'t run post-exec commands')
parser.add_argument('-m', '--no-post-exec', default=False, action='store_true',
help='don\'t run post-exec commands')
parser.add_argument('profile', type=str, choices=config.keys(), help='Profile to restore')
#parser.add_argument('-i', '--identity',
# parser.add_argument('-i', '--identity',
# help='Ssh keyfile to use', type=str, default='/root/.ssh/datadb.key')
#parser.add_argument('-r', '--remote',
# parser.add_argument('-r', '--remote',
# help='Remote server (rsync://...)', type=str, required=True)
#parser.add_argument('-l', '--local_dir',
# parser.add_argument('-l', '--local_dir',
# help='Local path', type=str, required=True)
subparser_modes = parser.add_subparsers(dest='mode', help='modes (only "rsync")')
subparser_backup = subparser_modes.add_parser('backup', help='backup to datastore')
subparser_backup = subparser_modes.add_parser('backup', help='backup to datastore') # NOQA
subparser_restore = subparser_modes.add_parser('restore', help='restore from datastore')
subparser_restore = subparser_modes.add_parser('restore', help='restore from datastore') # NOQA
subparser_status = subparser_modes.add_parser('status', help='get info for profile')
subparser_status = subparser_modes.add_parser('status', help='get info for profile') # NOQA
args = parser.parse_args()

View File

@ -10,5 +10,4 @@ setup(name='datadb',
author='dpedu',
author_email='dave@davepedu.com',
packages=['datadb'],
scripts=['bin/datadb']
)
scripts=['bin/datadb'])