Class-ify code and make it more robust

This commit is contained in:
dpedu 2015-06-25 10:22:03 -07:00
parent 01b183023d
commit f55dec5bdb
1 changed files with 177 additions and 129 deletions

126
watch.py
View File

@ -11,17 +11,49 @@ from fsevents import Observer, Stream
from paramiko.ssh_exception import SSHException
from re import compile as regexp
# Path regexps to ignore
ignore = [
class sftpwatch:
def __init__(
self,
ignore = [
regexp(r'.*\.git.*'),
regexp(r'.*\.DS_Store$')
]
],
mapping = [],
host = None,
user = None,
password = None,
rootdir = None
# Local dir : remote path mappings
PATH_MAPPING = [
]
):
self.ignore = ignore
self.PATH_MAPPING = mapping
self.host = host
self.user = user
self.password = password
self.root = rootdir
self.observer = None
self.stream = None
self.sf = None
self.connected = False
self.exit = False
try:
self.connect();
except Exception as e:
logging.critical("SSH Could not connect!")
logging.critical(str(e))
exit(1)
def ssh_connect(hostname, username, password):
def connect(self):
self.sf = self.getsftp(args.host, args.user, args.password)
def watch(self):
self.observer = Observer()
self.observer.start()
self.stream = Stream(self.file_event_callback, self.root, file_events=True)
self.observer.schedule(self.stream)
def ssh_connect(self, hostname, username, password):
"""Connect to SSH server and return (authenticated) transport socket"""
host_keys = paramiko.util.load_host_keys(expanduser('~/.ssh/known_hosts'))
if hostname in host_keys:
@ -32,24 +64,19 @@ def ssh_connect(hostname, username, password):
logging.critical("Host key not found")
exit(0)
try:
t = paramiko.Transport((hostname, 22))
t.set_keepalive(30)
t.connect(hostkey, username, password)
return t
except SSHException as sshe:
logging.critical(str(sshe))
logging.critical("SSH: unable to connect!")
exit(1)
def getsftp(hostname, username, password):
def getsftp(self, hostname, username, password):
"""Return a ready-to-roll paramiko sftp object"""
t = ssh_connect(hostname, username, password)
t = self.ssh_connect(hostname, username, password)
return paramiko.SFTPClient.from_transport(t)
def transfer_file(localpath, remotepath):
def transfer_file(self, localpath, remotepath):
"""Transfer file over sftp"""
with sf.open(remotepath, 'wb') as destination:
with self.sf.open(remotepath, 'wb') as destination:
with open(localpath, 'rb') as source:
total = 0
while True:
@ -59,20 +86,41 @@ def transfer_file(localpath, remotepath):
destination.write(data)
total += len(data)
def file_event_callback(event):
def file_event_callback(self, event):
"""Respond to file events"""
# Make sure we have sftp connectivity
while not self.exit:
try:
assert not self.sf == None
self.sf.stat("/")
break
except (OSError, AssertionError) as e:
logging.warning("Attempting to connect...")
try:
self.connect()
break
except Exception as ee:
logging.warning("Could not Connect.")
logging.error("Error was: %s" % str(ee))
logging.warning("Trying again in 5 seconds...")
sleep(5)
if self.exit:
return
# check ignored
for expr in ignore:
for expr in self.ignore:
if not expr.match(event.name) == None:
return
# Determine file path relative to our root
filePath = event.name.replace(args.root, "")
filePath = event.name.replace(self.root, "")
logging.debug("Path from basedir: %s" % filePath)
# Apply directory mapping
for mapping in PATH_MAPPING:
for mapping in self.PATH_MAPPING:
localMapPath,remoteMapPath = mapping
if filePath[0:len(localMapPath)]==localMapPath:
logging.debug("Using mapping: %s" % (str(mapping)))
@ -97,17 +145,17 @@ def file_event_callback(event):
pathSegment = "/".join(path_dirs[0:i+1])
logging.debug("stat %s" % pathSegment)
try:
sf.stat(pathSegment)
self.sf.stat(pathSegment)
except IOError as e:
logging.info("Creating %s" % pathSegment)
sf.mkdir(pathSegment)
self.sf.mkdir(pathSegment)
# If file, upload it
if isfile(event.name) or islink(event.name):
tries = 0
while True:
try:
bytesSent = transfer_file(event.name, filePath)
bytesSent = self.transfer_file(event.name, filePath)
break
except IOError as ioe:
logging.error("Unable to upload file: %s" % str(ioe))
@ -127,9 +175,9 @@ def file_event_callback(event):
logging.info("removing %s" % filePath)
# Just delete it
try:
sf.remove(filePath)
self.sf.remove(filePath)
except:
# Silently fail so we don't delete.
# Silently fail so we don't delete unexpected stuff
pass
"""
@ -142,12 +190,13 @@ def file_event_callback(event):
done IN_DELETE - file was deleted
IN_ATTRIB - attributes modified - ignore for now
"""
#self.sf.close()
def signal_handler(signal, frame):
def signal_handler(self, signal, frame):
logging.info('Cleaning up....')
observer.unschedule(stream)
observer.stop()
self.exit = True
self.observer.unschedule(self.stream)
self.observer.stop()
if __name__ == "__main__":
@ -171,17 +220,16 @@ if __name__ == "__main__":
logging.critical("At least one --map is required.")
exit(1)
for mapping in args.map:
PATH_MAPPING.append(mapping.split(":"))
path_maps = []
sf = getsftp(args.host, args.user, args.password)
for mapping in args.map:
path_maps.append(mapping.split(":"))
pywatch = sftpwatch(mapping=path_maps, host=args.host, user=args.user, password=args.password, rootdir=args.root)
signal.signal(signal.SIGINT, pywatch.signal_handler)
logging.info("watching %s" % args.root)
observer = Observer()
observer.start()
stream = Stream(file_event_callback, args.root, file_events=True)
observer.schedule(stream)
signal.signal(signal.SIGINT, signal_handler)
signal.pause()
pywatch.watch()