tune max upload size
This commit is contained in:
parent
a540a5aad2
commit
92a20f4f58
|
@ -13,3 +13,4 @@ testenv/
|
|||
source/
|
||||
source_copy/
|
||||
raws/
|
||||
*.sql
|
||||
|
|
14
README.md
14
README.md
|
@ -114,19 +114,19 @@ This would ingest all the files listed in `shas.txt` that aren't already in the
|
|||
|
||||
Roadmap
|
||||
-------
|
||||
- Stateless aka docker support
|
||||
- Migration path
|
||||
- open database
|
||||
- copy files table to memory
|
||||
- recreate files table
|
||||
- insert into the new table, with replaced paths, generating a list of files moves at the same time
|
||||
- migrate files to the new storage according to the list
|
||||
- Flesh out CLI:
|
||||
- Relink function - make a photo a member of another photo
|
||||
- Config that is saved somewhere
|
||||
- Album features
|
||||
- Support additional fields on upload like title description tags etc
|
||||
- delete features
|
||||
- tag features
|
||||
- Tag on import
|
||||
- modify features (tags & images)
|
||||
- "Batch" tag on import
|
||||
- Generate a tag on import
|
||||
- Save it in config and re-use it (if passing --same-batch)
|
||||
- photos imported as a batch will be under 1 tag
|
||||
- Longer term ideas:
|
||||
- "fast ingest" method that touches the db/storage directly. This would scale better than the API ingest.
|
||||
- Dynamic svg placeholder for images we can't open
|
||||
|
|
|
@ -432,6 +432,10 @@ def main():
|
|||
default=os.environ.get("DATABASE_URL")),
|
||||
parser.add_argument('--debug', action="store_true", help="enable development options")
|
||||
|
||||
tunables = parser.add_argument_group(title="tunables")
|
||||
tunables.add_argument('--max-upload', help="maximum file upload size accepted in bytes",
|
||||
default=1024**3, type=int)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.database:
|
||||
|
@ -493,7 +497,8 @@ def main():
|
|||
'server.socket_host': '0.0.0.0',
|
||||
'server.show_tracebacks': True,
|
||||
'log.screen': False,
|
||||
'engine.autoreload.on': args.debug
|
||||
'engine.autoreload.on': args.debug,
|
||||
'server.max_request_body_size': args.max_upload
|
||||
})
|
||||
|
||||
# Setup signal handling and run it.
|
||||
|
|
|
@ -38,7 +38,9 @@ class ThumbGenerator(object):
|
|||
return os.path.abspath(dest)
|
||||
if photo.width is None: # todo better detection of images that PIL can't open
|
||||
return None
|
||||
if photo.uuid not in self._failed_thumbs_cache[style]:
|
||||
if photo.uuid in self._failed_thumbs_cache[style]:
|
||||
return None
|
||||
|
||||
thumb_width, thumb_height, flip_ok = styles[style]
|
||||
i_width = photo.width
|
||||
i_height = photo.height
|
||||
|
@ -64,7 +66,6 @@ class ThumbGenerator(object):
|
|||
self._failed_thumbs_cache[style][photo.uuid] = True # dont retry failed generations
|
||||
return None
|
||||
return os.path.abspath(dest)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def gen_thumb(src_img, dest_img, width, height, rotation):
|
||||
|
|
Loading…
Reference in New Issue