misc api upload refinements
parent
edb80828e8
commit
2ad28d1958
|
@ -4,7 +4,7 @@ import logging
|
|||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from photoapp.library import PhotoLibrary
|
||||
from photoapp.types import Photo, PhotoSet, Tag, TagItem, PhotoStatus, User, known_extensions, known_mimes
|
||||
from photoapp.types import Photo, PhotoSet, Tag, TagItem, PhotoStatus, User, known_extensions, known_mimes, genuuid
|
||||
from jinja2 import Environment, FileSystemLoader, select_autoescape
|
||||
from sqlalchemy import desc
|
||||
from sqlalchemy import func, and_, or_
|
||||
|
@ -152,19 +152,7 @@ class PhotosApiV1(object):
|
|||
upload accepts one photoset (multiple images)
|
||||
"""
|
||||
# load and verify metadata
|
||||
meta = json.loads(meta)
|
||||
if type(files) != list:
|
||||
files = [files]
|
||||
if set([file.filename for file in files]) != set(meta["files"].keys()):
|
||||
raise cherrypy.HTTPError(400, f"file metadata missing")
|
||||
|
||||
# use the photo's date to build a base path
|
||||
# each file's sha and file extension will be appended to this
|
||||
photo_date = datetime.fromisoformat(meta["date"])
|
||||
basepath = photo_date.strftime("%Y/%m/%d/%Y-%m-%d_%H.%M.%S")
|
||||
|
||||
stored_files = []
|
||||
photo_objs = []
|
||||
|
||||
def abort_upload(reason):
|
||||
for file in stored_files:
|
||||
|
@ -173,6 +161,22 @@ class PhotosApiV1(object):
|
|||
cherrypy.response.status = 400
|
||||
return {"error": reason}
|
||||
|
||||
meta = json.loads(meta)
|
||||
if type(files) != list:
|
||||
files = [files]
|
||||
if set([file.filename for file in files]) != set(meta["files"].keys()):
|
||||
raise cherrypy.HTTPError(400, f"file metadata missing")
|
||||
|
||||
dupes = db.query(Photo).filter(Photo.hash.in_([f["hash"] for f in meta["files"].values()])).first()
|
||||
if dupes:
|
||||
return abort_upload(f"file already in database: {dupes.path}")
|
||||
|
||||
# use the photo's date to build a base path
|
||||
# each file's sha and file extension will be appended to this
|
||||
photo_date = datetime.fromisoformat(meta["date"])
|
||||
basepath = photo_date.strftime("%Y/%m/%d/%Y-%m-%d_%H.%M.%S")
|
||||
photo_objs = []
|
||||
|
||||
for file in files:
|
||||
# build path using the sha and extension. note that we trust the sha the client provided now & verify later
|
||||
# something like 2019/06/25/2019-06-25_19.28.05_cea1a138.png
|
||||
|
@ -182,7 +186,7 @@ class PhotosApiV1(object):
|
|||
photo_path = f"{basepath}_{photo_meta['hash'][0:8]}.{ext}"
|
||||
|
||||
if self.library.storage.exists(photo_path):
|
||||
return abort_upload("file already in library: {photo_path}")
|
||||
return abort_upload(f"file already in library: {photo_path}")
|
||||
|
||||
# write file to the path (and copy sha while in flight)
|
||||
with closing(self.library.storage.open(photo_path, 'wb')) as f:
|
||||
|
@ -207,28 +211,43 @@ class PhotosApiV1(object):
|
|||
return abort_upload(str(ae))
|
||||
|
||||
# create photo object for this entry
|
||||
p = Photo(hash=shasum,
|
||||
p = Photo(uuid=genuuid(),
|
||||
hash=shasum,
|
||||
path=photo_path,
|
||||
format=photo_meta.get("format"),
|
||||
size=photo_meta.get("size"),
|
||||
width=photo_meta.get("width"), # not verified
|
||||
height=photo_meta.get("height"), # not verified
|
||||
orientation=photo_meta.get("orientation")) # not verified
|
||||
orientation=photo_meta.get("orientation"), # not verified
|
||||
fname=photo_meta.get("fname"))
|
||||
|
||||
photo_objs.append(p)
|
||||
|
||||
ps = PhotoSet(date=photo_date,
|
||||
date_real=photo_date, # TODO support time offsets
|
||||
files=photo_objs) # TODO support title field etc
|
||||
for pob in photo_objs:
|
||||
db.add(pob)
|
||||
|
||||
db.add(ps)
|
||||
if meta["uuid"] is not None:
|
||||
ps = db.query(PhotoSet).filter(PhotoSet.uuid == meta["uuid"]).first()
|
||||
if not ps:
|
||||
return abort_upload("parent uuid not found")
|
||||
ps.date = photo_date
|
||||
ps.date_real = photo_date
|
||||
ps.files.extend(photo_objs)
|
||||
else:
|
||||
ps = PhotoSet(uuid=genuuid(),
|
||||
date=photo_date,
|
||||
date_real=photo_date, # TODO support time offsets
|
||||
files=photo_objs) # TODO support title field et
|
||||
db.add(ps)
|
||||
|
||||
ps_json = ps.to_json() # we do this now to avoid a sqlalchemy bug where the object disappears after the commit
|
||||
|
||||
try:
|
||||
db.commit()
|
||||
except IntegrityError:
|
||||
return abort_upload()
|
||||
except IntegrityError as ie:
|
||||
return abort_upload(str(ie))
|
||||
|
||||
return ps.to_json()
|
||||
return ps_json
|
||||
|
||||
@cherrypy.expose
|
||||
@cherrypy.tools.json_out()
|
||||
|
|
|
@ -119,11 +119,7 @@ def main():
|
|||
raise
|
||||
|
||||
elif args.action == "ingest":
|
||||
if args.copy_of:
|
||||
raise NotImplementedError("--copy-of isn't implemented")
|
||||
|
||||
sets, skipped = get_photosets(args.files)
|
||||
|
||||
#TODO y/n confirmation and auto flag
|
||||
#TODO optional progress printing
|
||||
print("skipping:", skipped)
|
||||
|
@ -137,13 +133,17 @@ def main():
|
|||
for file in set_.files:
|
||||
files.append(("files", (os.path.basename(file.path), open(file.path, 'rb'), file.format), ))
|
||||
|
||||
if args.copy_of:
|
||||
payload["uuid"] = args.copy_of
|
||||
|
||||
print("Uploading: ", [os.path.basename(file.path) for file in set_.files])
|
||||
try:
|
||||
result = client.upload(files, payload)
|
||||
print("Uploaded: ", result.json()["uuid"])
|
||||
except HTTPError as he:
|
||||
print(he.response.json())
|
||||
return
|
||||
print("Uploaded: ", result.json()["uuid"])
|
||||
# TODO collect errors and print later
|
||||
# return
|
||||
print(f"{num} / {len(sets)}")
|
||||
# TODO be nice and close the files
|
||||
|
||||
|
|
|
@ -29,10 +29,8 @@ known_mimes = {"image/png",
|
|||
"video/quicktime"}
|
||||
|
||||
|
||||
def mime2ext(mime):
|
||||
"""
|
||||
Given a mime type return the canonical file extension
|
||||
"""
|
||||
def genuuid():
|
||||
return str(uuid.uuid4())
|
||||
|
||||
|
||||
def map_extension(ext):
|
||||
|
@ -83,7 +81,7 @@ class Photo(Base):
|
|||
|
||||
id = Column(Integer, primary_key=True)
|
||||
set_id = Column(Integer, ForeignKey("photos.id"))
|
||||
uuid = Column(Unicode, unique=True, default=lambda: str(uuid.uuid4()))
|
||||
uuid = Column(Unicode, unique=True, default=genuuid)
|
||||
|
||||
set = relationship("PhotoSet", back_populates="files", foreign_keys=[set_id])
|
||||
|
||||
|
|
Loading…
Reference in New Issue