clean up logging and interfaces

This commit is contained in:
dave 2016-07-24 13:00:32 -07:00
parent 416b53d83a
commit aafe1a05ac
4 changed files with 38 additions and 9 deletions

View File

@ -146,13 +146,13 @@ class B2Reciever(Reciever):
row = c.execute("SELECT * FROM 'files' WHERE `path` = ?;", (f.rel_path,)).fetchone()
if self.should_transfer(row, f):
print("Uploading:", f.rel_path)
self.log.info("Uploading: %s", f.rel_path)
try:
# upload the file. if a row existed it means there may be historic copies of the file already there
result = self.put_file(f, purge_historics=row is not None)
except:
print("Failed:", f.rel_path)
print("Unexpected error:", sys.exc_info()[0])
self.log.error("Failed:", f.rel_path)
self.log.error("Unexpected error:", sys.exc_info()[0])
raise
# The file was uploaded, commit it to the db
@ -181,7 +181,7 @@ class B2Reciever(Reciever):
with closing(self.db.cursor()) as c_del:
for purge_file in c.execute("SELECT * FROM 'files' WHERE seen=0;"):
print("Delete on remote: ", purge_file["path"])
self.log.info("Delete on remote: %s", purge_file["path"])
self.purge_file(purge_file["path"])
c_del.execute("DELETE FROM 'files' WHERE path=?;", (purge_file["path"],))

View File

@ -19,10 +19,20 @@ class Reciever(object):
"""
Base class for destinations
"""
def put_file(self, file_info, purge_historics=False):
def xfer_file(self, f):
"""
Future-called function that handles a single file. This method should:
- check if the file is eligible for transfer (not in ignore list etc)
- return Result.skipped if skipped
- transfer the file and return Result.ok or Result.failed
"""
raise NotImplemented()
def purge_file(self, file_path):
def purge(self):
"""
Delete files on the remote that were not found when scanning the local tree. This will always be called AFTER
scanning through the local tree and calling xfer_file for every file.
"""
raise NotImplemented()
def teardown(self):

View File

@ -46,6 +46,8 @@ class B2SyncManager(object):
self.workers = workers
self.log.info("Initialized with %s workers, %s ignores", self.workers, len(self.exclude_res))
def sync(self):
"""
Sync the source to the dest. First uploads new local files, then cleans dead files from the remote.
@ -55,14 +57,15 @@ class B2SyncManager(object):
# Phase 2 - Delete files on the remote missing locally
self.purge_remote()
# Phase 3 - Tear down the src/dest modules
self.src.teardown()
self.dest.teardown()
self.cleanup()
def sync_up(self):
"""
Sync local files to the remote. All files in the DB will be marked as unseen. When a file is found locally it is
again marked as seen. This state later used to clear deleted files from the destination
"""
self.log.info("beginning upload phase")
chunk_size = 1000
# if rel_path matches any of the REs, the filter is True and the file is skipped
@ -89,10 +92,18 @@ class B2SyncManager(object):
During upload phase it is expected that destination modules track state of what files have been seen on the
local end. When local scan + upload is complete, the module uses this state to purge dead files on the remote.
"""
self.log.info("beginning remote purge phase")
self.dest.purge()
def cleanup(self):
self.log.info("beginning cleanp phase")
self.src.teardown()
self.dest.teardown()
def sync(source_uri, dest_uri, account_id, app_key, workers=10, exclude=[], compare_method="mtime"):
log = logging.getLogger("mirror")
source = urlparse(source_uri)
dest = urlparse(dest_uri)
@ -113,6 +124,9 @@ def sync(source_uri, dest_uri, account_id, app_key, workers=10, exclude=[], comp
assert source_provider is not None
assert dest_receiver is not None
log.info("Source: %s", source_provider)
log.info("Dest: %s", dest_receiver)
syncer = B2SyncManager(source_provider, dest_receiver,
workers=workers, exclude_res=exclude)
syncer.sync()

View File

@ -8,10 +8,13 @@ import logging
def main():
logging.basicConfig(level=logging.INFO)
logging.basicConfig(level=logging.DEBUG,
format="%(asctime)-15s %(levelname)-8s %(filename)s:%(lineno)d %(message)s")
logging.getLogger("requests").setLevel(logging.ERROR)
logging.getLogger("urllib3").setLevel(logging.ERROR)
log = logging.getLogger("main")
parser = argparse.ArgumentParser(description="Sync data to/from B2")
parser.add_argument("-i", "--size", help="Compare by size instead of mtime", action="store_true", default=False)
@ -29,6 +32,8 @@ def main():
ignore_res = [re.compile(i) for i in args.exclude] if args.exclude else []
log.debug("Running with params: %s", args)
mirror.sync(
args.source,
args.dest,