backupdb2/backupdb2/misc.py

221 lines
6.6 KiB
Python

import os
import hashlib
import subprocess
import logging
from configparser import ConfigParser
def load_cli_config(config_path=None):
"""
Load the CLI config file. The config is delivered as a dict structured like:
{
"options": {...},
"backups": {
"name": {
"uri": null,
"dir": null,
"keep": null,
"auth": null,
"restore_preexec": null,
"restore_postexec": null,
"export_preexec": null,
"export_postexec": null,
"exclude": null
}
}
}
"""
config = {
"options": {},
"backups": {}
}
# local the config file
if not config_path:
config_format, config_path = locate_cli_config()
else:
config_format = get_config_format(config_path)
if not config_path or not os.path.exists(config_path):
raise Exception(f"config file not found: {config_path}")
# TODO other formats and loaders
if config_format == "ini":
load_cli_config_ini(config, config_path)
# logging.info("no config file available")
return config
def locate_cli_config():
if os.path.exists("/etc/datadb.ini"):
return "ini", "/etc/datadb.ini"
return None, None
def get_config_format(fpath):
return "ini"#TODO
def load_cli_config_ini(config, config_path):
"""
The ini config file format is structured like:
[_backupdb_] ; global section
server=https://foo.bar/ ; server http address
username=foo ; http basic auth used for server (optional)
password=bar ; http basic auth used for server (optional)
namespace=default ; backup namespace
[backup_name] ; name of one backup config, must be unique
method=archive ; backup method, only 'archive' is supported now
dir=/foo ; dir to backup
keep=6 ; how many copies to instruct the server to keep
exclude= ; exclude pattern (passed directly to underlying implementation)
restore_preexec= ; commands to run before/after restoring/backing up
restore_postexec= ;
export_preexec= ;
export_postexec= ;
"""
# defaults per backup section
config_defaults = {
"server": None,
"username": None,
"password": None,
"namespace": "default"
}
backup_defaults = {
"method": "archive",
"dir": None,
"keep": 7,
"exclude": None,
"restore_preexec": None,
"restore_postexec": None,
"export_preexec": None,
"export_postexec": None
}
backup_required_params = set(["dir"])
global_required_params = set(["server"])
int_keys = set(["keep"])
# Load profiles
parser = ConfigParser(inline_comment_prefixes=("#", ";", ))
parser.read(config_path)
for section_name in parser.sections():
section = parser[section_name]
for name, options in section.items():
if section_name == "_backupdb_": # parse main config
for k, v in config_defaults.items():
config["options"][k] = section.get(k) or v
validate_section(section_name, config["options"], global_required_params)
else: # parse a backup section
config["backups"][section_name] = {}
for k, v in backup_defaults.items():
value = section.get(k) or v
if k in int_keys:
value = int(value)
config["backups"][section_name][k] = value
validate_section(section_name, config["backups"][section_name], backup_required_params)
return config
def validate_section(section_name, d, required_keys):
for k in required_keys:
if not d.get(k):
raise Exception(f"config section '{section_name}' missing key '{k}'")
def tabulate(rows, headers):
rows = [list(row) for row in list(rows)]
lengths = [0] * len(headers)
for row in rows + [headers]:
for i, c in enumerate(row):
row[i] = str(c)
lengths[i] = max(lengths[i], len(row[i]))
for i, h in enumerate(headers):
print(h + " " * (lengths[i] - len(h)) + " ", end="")
print()
for i, h in enumerate(headers):
print("-" * lengths[i] + " ", end="")
print()
for row in rows:
for i, c in enumerate(row):
print(c + " " * (lengths[i] - len(c)) + " ", end="")
print()
print()
def tabulate_dict(d):
tabulate(d.items(), headers=["property", "value"])
def get_tarcmd():
return "gtar" if has_binary("gtar") else "tar"
def has_binary(name):
"""
Check if the passed command is available
:return: boolean
"""
try:
subprocess.check_call(['which', name], stdout=subprocess.DEVNULL)
except subprocess.CalledProcessError:
return False
return True
def tar_scan_errors(stream, error_list):
"""
Read and print lines from a stream, appending messages that look like errors to error_list
Tar does not have an option to ignore file-removed errors. The warnings can be hidden but even with
--ignore-failed-read, file-removed errors cause a non-zero exit. So, hide the warnings we don't care about
using --warnings=no-xxx and scan output for unknown messages, assuming anything found is bad.
"""
for line in stream:
line = line.decode("UTF-8").strip()
if not line.startswith("./"):
if line not in error_list:
error_list.append(line)
logging.info(line)
class WrappedStdout(object):
BUFFSIZE = 1024 * 1024
"""
Requests will call tell() on the file-like readable stream if the tell attribute exists. However subprocess'
stdout stream (_io.BufferedReader) does not support this (raises OSError: [Errno 29] Illegal seek).
If the tell attribute is missing, requests will fall back to simply iterating on the file-like object,
so, we support only the iterable interface
"""
def __init__(self, readable):
self.readable = readable
self.hash = hashlib.sha256()
def __iter__(self):
return self
def __next__(self):
data = self.readable.read(self.BUFFSIZE)
if not data:
logging.info("end of stream")
raise StopIteration()
self.hash.update(data)
return data
def close(self):
self.readable.close()
def sha256(self):
return self.hash.hexdigest()