more jobs progress
Gitea/photolib/pipeline/head This commit looks good Details

This commit is contained in:
dave 2023-02-01 23:04:42 -08:00
parent 62190f357f
commit ab2858cb04
3 changed files with 38 additions and 13 deletions

View File

@ -336,7 +336,7 @@ class JobsApiV1(object):
elif typ == JobTargetType.photoset:
query = db.query(PhotoSet.id).filter(PhotoSet.uuid.in_(target["uuids"]))
elif typ == JobTargetType.tag:
query = db.query(Tag.id).filter(Tag.name.in_(target["uuids"]))
query = db.query(Tag.id).filter(Tag.uuid.in_(target["uuids"]))
else:
raise Exception()

View File

@ -2,7 +2,8 @@ import queue
import logging
import cherrypy
from photoapp.dbutils import create_db_sessionmaker, cursorwrap
from photoapp.types import Job, JobTargetType, JobTarget
from photoapp.types import Job, JobTargetType, JobTarget, JobTargetStatus, Photo, PhotoSet, Tag, TagItem
from sqlalchemy import func
logger = logging.getLogger("jobs")
@ -65,9 +66,6 @@ class JobsClient(object):
class PhotoappTask(object):
def get_targets(self):
raise NotImplementedError()
def run_targets(self):
raise NotImplementedError()
@ -106,11 +104,36 @@ class JobServer(BaseJobServer):
def handle_notify(self, c, job_uuid):
# query the db for the target job, and lock it
job = c.query(Job).filter(Job.uuid == job_uuid).first()
logging.info("handle_notify for job %s: %s", job_uuid, job)
# get the task we're running
task = task_definitions[job.job_name]()
logging.info("task: %s", task)
# task = task_definitions[job.job_name]()
# logging.info("task: %s", task)
# check if JobTargetStatus has been populated for this job
statuses = c.query(func.count(JobTargetStatus.id)).join(JobTarget).filter(JobTarget.job_id == job.id).first()[0]
if statuses == 0:
# populate statuses
logging.info("preparing statuses for job %s/%s", job.id, job.uuid)
photo_ids = set()
for target in c.query(JobTarget).filter(JobTarget.job_id == job.id).all():
if target.target_type == JobTargetType.photo:
photo_ids.update([target.target])
elif target.target_type == JobTargetType.photoset:
for photo in c.query(Photo.id).filter(Photo.set_id == target.target).all():
photo_ids.update([photo[0]])
elif target.target_type == JobTargetType.tag:
for photo in c.query(Photo.id). \
join(PhotoSet).join(TagItem).join(Tag). \
filter(Tag.id == target.target). \
all():
photo_ids.update([photo[0]])
for photo_id in photo_ids:
c.add(JobTargetStatus(target_id=target.id, job_id=job.id, photo_id=photo_id))
c.commit() # if this fails, somebody else is handling the job
# query for Photos targeted by the task and allow the job to filter them
# query...
@ -144,7 +167,7 @@ class ThreadedJobServer(JobServer):
try:
job_uuid = self.notifyq.get(timeout=5.0)
self.handle_notify(job_uuid)
self.self.work_notifyq.put(job_uuid)
self.work_notifyq.put(job_uuid)
except queue.Empty:
pass

View File

@ -285,8 +285,8 @@ class JobTargetStatus(Base):
job_id = Column(Integer, ForeignKey("jobs.id"), nullable=False)
job = relationship("Job", back_populates="target_statuses", foreign_keys=[job_id])
job_target_id = Column(Integer, ForeignKey("job_targets.id"), nullable=False)
job_target = relationship("JobTarget", back_populates="statuses", foreign_keys=[job_target_id])
target_id = Column(Integer, ForeignKey("job_targets.id"), nullable=False)
job_target = relationship("JobTarget", back_populates="statuses", foreign_keys=[target_id])
"""
all jobs are progressed by photo_id. PhotoSets can have many Photos so this is the best logical unit. How this works
@ -296,6 +296,8 @@ class JobTargetStatus(Base):
a PhotoSet
- tag: same as photoset but iterated across all photosets with the tag
"""
target_photo_id = Column(Integer, ForeignKey("photos.id"), nullable=False)
photo_id = Column(Integer, ForeignKey("files.id"), nullable=False)
status = Column(Enum(JobTargetState), nullable=False)
status = Column(Enum(JobTargetState), nullable=False, default=JobTargetState.new)
UniqueConstraint(job_id, target_id, photo_id)