most of api upload
parent
506c6e9c9a
commit
2c1ebea31c
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
import cherrypy
|
||||
import logging
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from photoapp.library import PhotoLibrary
|
||||
from photoapp.types import Photo, PhotoSet, Tag, TagItem, PhotoStatus, User
|
||||
|
@ -29,7 +30,35 @@ class PhotosApiV1(object):
|
|||
|
||||
@cherrypy.expose
|
||||
def upload(self, files, meta):
|
||||
pass
|
||||
"""
|
||||
upload accepts one photoset (multiple images)
|
||||
metadata format
|
||||
"""
|
||||
meta = json.loads(meta)
|
||||
|
||||
if type(files) != list:
|
||||
files = [files]
|
||||
|
||||
for file in files:
|
||||
print("File name:", file.filename)
|
||||
import hashlib
|
||||
sha = hashlib.sha256()
|
||||
|
||||
total = 0
|
||||
while True:
|
||||
b = file.file.read(1024)
|
||||
if not b:
|
||||
break
|
||||
sha.update(b)
|
||||
total += len(b)
|
||||
print("Read length:", total)
|
||||
print("Read sha256:", sha.hexdigest())
|
||||
|
||||
if str(file.filename) not in meta["files"].keys():
|
||||
raise cherrypy.HTTPError(400, f"no mdatadata provided for filename '{file.filename}'")
|
||||
print("we have metadata for this file:", meta["files"][file.filename])
|
||||
|
||||
print("____")
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
|
|
|
@ -1,14 +1,19 @@
|
|||
import os
|
||||
import json
|
||||
import argparse
|
||||
import requests
|
||||
from requests.exceptions import HTTPError
|
||||
from photoapp.utils import get_extension
|
||||
from photoapp.types import known_extensions
|
||||
from photoapp.common import pwhash
|
||||
from photoapp.ingest import get_photosets
|
||||
|
||||
|
||||
class PhotoApiClient(object):
|
||||
def __init__(self, base_url):
|
||||
def __init__(self, base_url, passwd=None):
|
||||
self.session = requests.Session()
|
||||
if passwd:
|
||||
self.session.auth = passwd # user, pass tuple
|
||||
self.base_url = base_url
|
||||
|
||||
def byhash(self, sha):
|
||||
|
@ -23,8 +28,8 @@ class PhotoApiClient(object):
|
|||
def delete(self, url, **params):
|
||||
return self.do("delete", url, **params)
|
||||
|
||||
def do(self, method, url, **params):
|
||||
resp = getattr(self.session, method)(self.base_url + "/api/v1/" + url, **params)
|
||||
def do(self, method, url, **kwargs):
|
||||
resp = getattr(self.session, method)(self.base_url + "/api/v1/" + url, **kwargs)
|
||||
resp.raise_for_status()
|
||||
return resp
|
||||
|
||||
|
@ -38,10 +43,17 @@ class PhotoApiClient(object):
|
|||
def delete_user(self, username):
|
||||
return self.delete("user", params={"username": username})
|
||||
|
||||
def upload(self, files, metadata):
|
||||
# print(">>>>>>", metadata)
|
||||
return self.post("upload", files=files, data={"meta": json.dumps(metadata)})
|
||||
|
||||
|
||||
def get_args():
|
||||
parser = argparse.ArgumentParser(description="photo library cli")
|
||||
parser.add_argument("-s", "--host", required=True, help="photo library server address")
|
||||
# TODO nicer uri parser
|
||||
parser.add_argument("--host", required=True, help="photo library server address")
|
||||
parser.add_argument("--user", required=True)
|
||||
parser.add_argument("--password", required=True)
|
||||
|
||||
sp_action = parser.add_subparsers(dest="action", help="action to take")
|
||||
|
||||
|
@ -50,8 +62,8 @@ def get_args():
|
|||
p_dupes.add_argument("files", nargs="+", help="files to check")
|
||||
|
||||
p_ingest = sp_action.add_parser("ingest", help="import images into the library")
|
||||
p_ingest.add_argument("files", nargs="+", help="files to import")
|
||||
p_ingest.add_argument("-c", "--copy-of", help="existing uuid the imported images will be placed under")
|
||||
p_ingest.add_argument("files", nargs="+", help="files to import")
|
||||
|
||||
# User section
|
||||
p_adduser = sp_action.add_parser("user", help="user manipulation functions")
|
||||
|
@ -73,7 +85,7 @@ def main():
|
|||
args = get_args()
|
||||
print(args)
|
||||
|
||||
client = PhotoApiClient(args.host)
|
||||
client = PhotoApiClient(args.host, (args.user, args.password, ))
|
||||
|
||||
if args.action == "checkdupes":
|
||||
hashes = {}
|
||||
|
@ -107,7 +119,25 @@ def main():
|
|||
raise
|
||||
|
||||
elif args.action == "ingest":
|
||||
pass
|
||||
if args.copy_of:
|
||||
raise NotImplementedError("--copy-of isn't implemented")
|
||||
|
||||
sets, skipped = get_photosets(args.files)
|
||||
|
||||
#TODO y/n confirmation and auto flag
|
||||
#TODO optional progress printing
|
||||
print("skipping:", skipped)
|
||||
print("sets:", [[f.path for f in s.files] for s in sets])
|
||||
|
||||
for set_ in sets:
|
||||
payload = set_.to_json()
|
||||
payload["files"] = {os.path.basename(photo.path): photo.to_json() for photo in set_.files}
|
||||
|
||||
files = []
|
||||
for file in set_.files:
|
||||
files.append(("files", (os.path.basename(file.path), open(file.path, 'rb'), file.format), ))
|
||||
|
||||
client.upload(files, payload)
|
||||
|
||||
elif args.action == "user":
|
||||
if args.action_user == "create":
|
||||
|
|
|
@ -19,8 +19,8 @@ def get_jpg_info(fpath):
|
|||
raise Exception("fuk")
|
||||
|
||||
# gps is set to 0,0 if unavailable
|
||||
lat, lon = gps or [0, 0]
|
||||
dimensions = dimensions or (0, 0)
|
||||
lat, lon = gps or [None, None]
|
||||
dimensions = dimensions or (None, None)
|
||||
mime = magic.from_file(fpath, mime=True)
|
||||
size = os.path.getsize(fpath)
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ import traceback
|
|||
from photoapp.library import PhotoLibrary
|
||||
from photoapp.image import get_jpg_info, get_hash, get_mtime
|
||||
from itertools import chain
|
||||
from photoapp.types import Photo, PhotoSet, known_extensions, regular_images, files_raw, files_video
|
||||
from photoapp.types import Photo, PhotoSet, known_extensions, regular_images, files_raw, files_video, map_extension
|
||||
import os
|
||||
|
||||
|
||||
|
@ -26,76 +26,74 @@ def pprogress(done, total=None):
|
|||
print(" complete: {}{}\r".format(done, " / {} ".format(total) if total else ''), end='')
|
||||
|
||||
|
||||
def batch_ingest(library, files):
|
||||
# group by extension
|
||||
def group_by_extension(files):
|
||||
byext = {k: [] for k in known_extensions}
|
||||
excluded = []
|
||||
|
||||
total = len(files)
|
||||
print("processing {} items".format(total))
|
||||
print("Pre-sorting files")
|
||||
for item in files:
|
||||
if not os.path.isfile(item):
|
||||
print("Skipping due to not a file: {}".format(item))
|
||||
if not os.path.isfile(item): # Not a file
|
||||
continue
|
||||
extension = item.split(".")
|
||||
if len(extension) < 2:
|
||||
print("Skipping due to no extension: {}".format(item))
|
||||
if len(extension) < 2: # no extension
|
||||
excluded.append(item)
|
||||
continue
|
||||
extension = extension[-1].lower()
|
||||
if extension == "jpeg":
|
||||
extension = "jpg"
|
||||
if extension not in known_extensions:
|
||||
print("Skipping due to unknown extension: {}".format(item))
|
||||
extension = map_extension(extension[-1].lower())
|
||||
if extension not in known_extensions: # an extension we don't support
|
||||
excluded.append(item)
|
||||
continue
|
||||
byext[extension.lower()].append(item)
|
||||
|
||||
print("Scanning images")
|
||||
photos = []
|
||||
return (byext, excluded)
|
||||
|
||||
|
||||
def get_photosets(files):
|
||||
byext, skipped = group_by_extension(files)
|
||||
|
||||
photosets = []
|
||||
|
||||
# process regular images first.
|
||||
for item in chain(*[byext[ext] for ext in regular_images]):
|
||||
photos.append(get_jpg_info(item))
|
||||
pprogress(len(photos), total)
|
||||
photosets.append(get_jpg_info(item))
|
||||
|
||||
print("\nScanning RAWs")
|
||||
# process raws
|
||||
done = len(photos)
|
||||
for item in chain(*[byext[ext] for ext in files_raw]):
|
||||
itemmeta = Photo(hash=get_hash(item), path=item, size=os.path.getsize(item),
|
||||
format=special_magic(item))
|
||||
fprefix = os.path.basename(item)[::-1].split(".", 1)[-1][::-1]
|
||||
fmatch = "{}.jpg".format(fprefix.lower())
|
||||
fmatch = "{}.jpg".format(fprefix.lower()) # if we're inspecting "foobar.raw", match it with "foobar.jpg"
|
||||
# TODO does this account for extension mappinh like jpeg->jpg?
|
||||
foundmatch = False
|
||||
for photo in photos:
|
||||
for photo in photosets:
|
||||
for fmt in photo.files[:]:
|
||||
if os.path.basename(fmt.path).lower() == fmatch:
|
||||
foundmatch = True
|
||||
photo.files.append(itemmeta)
|
||||
done += 1
|
||||
pprogress(done, total)
|
||||
break
|
||||
if foundmatch:
|
||||
break
|
||||
if not foundmatch:
|
||||
mtime = get_mtime(item)
|
||||
photos.append(PhotoSet(date=mtime, date_real=mtime, lat=0, lon=0, files=[itemmeta]))
|
||||
done += 1
|
||||
pprogress(done, total)
|
||||
# TODO prune any xmp without an associated regular image or cr2
|
||||
print("no match found for", itemmeta.path, "but importing anyway")
|
||||
photosets.append(PhotoSet(date=mtime, date_real=mtime, files=[itemmeta]))
|
||||
# TODO handle any xmp without an associated regular image or cr2
|
||||
|
||||
print("\nScanning other files")
|
||||
# process all other formats
|
||||
# process other known formats
|
||||
for item in chain(*[byext[ext] for ext in files_video]):
|
||||
itemmeta = Photo(hash=get_hash(item), path=item, size=os.path.getsize(item),
|
||||
format=special_magic(item))
|
||||
mtime = get_mtime(item)
|
||||
photos.append(PhotoSet(date=mtime, date_real=mtime, lat=0, lon=0, files=[itemmeta]))
|
||||
done += 1
|
||||
pprogress(done, total)
|
||||
photosets.append(PhotoSet(date=mtime, date_real=mtime, files=[itemmeta]))
|
||||
|
||||
return photosets, skipped
|
||||
|
||||
|
||||
def batch_ingest(library, files):
|
||||
sets, skipped = get_photosets(files)
|
||||
|
||||
print("\nUpdating database")
|
||||
done = 0
|
||||
total = len(photos)
|
||||
for photoset in photos:
|
||||
total = len(sets)
|
||||
for photoset in sets:
|
||||
try:
|
||||
library.add_photoset(photoset)
|
||||
pprogress(done, total)
|
||||
|
|
|
@ -11,6 +11,14 @@ known_extensions = ["jpg", "png", "cr2", "xmp", "mp4", "mov"]
|
|||
regular_images = ["jpg", "png"]
|
||||
files_raw = ["cr2", "xmp"]
|
||||
files_video = ["mp4", "mov"]
|
||||
mapped_extensions = {"jpg": {"jpeg", }} # target: aliases
|
||||
|
||||
|
||||
def map_extension(ext):
|
||||
for target, aliases in mapped_extensions.items():
|
||||
if ext in aliases:
|
||||
return target
|
||||
return ext
|
||||
|
||||
|
||||
class PhotoStatus(enum.Enum):
|
||||
|
@ -41,8 +49,8 @@ class PhotoSet(Base):
|
|||
|
||||
def to_json(self):
|
||||
s = {attr: getattr(self, attr) for attr in {"uuid", "title", "description"}}
|
||||
s["lat"] = str(self.lat)
|
||||
s["lon"] = str(self.lon)
|
||||
s["lat"] = str(self.lat) if self.lat else None
|
||||
s["lon"] = str(self.lon) if self.lon else None
|
||||
s["date"] = self.date.isoformat()
|
||||
s["files"] = {i.uuid: i.to_json() for i in self.files}
|
||||
s["tags"] = [t.name for t in self.tags]
|
||||
|
|
Loading…
Reference in New Issue