Handling geo errors and threading for compression

This commit is contained in:
Profitroll 2023-02-14 14:31:56 +01:00
parent e14b3e7db3
commit ddca4a4a3d

View File

@ -2,6 +2,7 @@ import re
import pickle import pickle
from secrets import token_urlsafe from secrets import token_urlsafe
from shutil import move from shutil import move
from threading import Thread
from typing import List, Union from typing import List, Union
from magic import Magic from magic import Magic
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
@ -16,6 +17,7 @@ from modules.database import col_photos, col_albums, col_tokens
from pymongo import DESCENDING from pymongo import DESCENDING
from bson.objectid import ObjectId from bson.objectid import ObjectId
from bson.errors import InvalidId from bson.errors import InvalidId
from plum.exceptions import UnpackError
from fastapi import HTTPException, UploadFile, Security from fastapi import HTTPException, UploadFile, Security
from fastapi.responses import UJSONResponse, Response from fastapi.responses import UJSONResponse, Response
@ -36,9 +38,15 @@ async def compress_image(image_path: str):
# system(f"exiftool -overwrite_original -all:all= -tagsFromFile @ -exif:Orientation {image_path}") # system(f"exiftool -overwrite_original -all:all= -tagsFromFile @ -exif:Orientation {image_path}")
if image_type == "image/jpeg": if image_type == "image/jpeg":
system(f"jpegoptim {image_path} -o --max=55 -p --strip-none") task = Thread(target=system, kwargs={"command": f'jpegoptim "{image_path}" -o --max=55 -p --strip-none'})
elif image_type == "image/png": elif image_type == "image/png":
system(f"optipng -o3 {image_path}") task = Thread(target=system, kwargs={"command": f'optipng -o3 "{image_path}"'})
else:
return
task.start()
logWrite(f"Compressing '{path.split(image_path)[-1]}'...")
task.join()
size_after = path.getsize(image_path) / 1024 size_after = path.getsize(image_path) / 1024
logWrite(f"Compressed '{path.split(image_path)[-1]}' from {size_before} Kb to {size_after} Kb") logWrite(f"Compressed '{path.split(image_path)[-1]}' from {size_before} Kb to {size_after} Kb")
@ -76,7 +84,15 @@ async def photo_upload(file: UploadFile, album: str, ignore_duplicates: bool = F
status_code=HTTP_409_CONFLICT status_code=HTTP_409_CONFLICT
) )
try:
coords = extract_location(path.join("data", "users", current_user.user, "albums", album, filename)) coords = extract_location(path.join("data", "users", current_user.user, "albums", album, filename))
except (UnpackError, ValueError):
coords = {
"lng": 0.0,
"lat": 0.0,
"alt": 0.0
}
uploaded = col_photos.insert_one( uploaded = col_photos.insert_one(
{ {
"user": current_user.user, "user": current_user.user,
@ -109,7 +125,7 @@ async def photo_upload(file: UploadFile, album: str, ignore_duplicates: bool = F
) )
@app.get("/photos/{id}", description="Get a photo by id") @app.get("/photos/{id}", description="Get a photo by id")
async def photo_get(id: str, current_user: User = Security(get_current_active_user, scopes=["photos.view"])): async def photo_get(id: str, current_user: User = Security(get_current_active_user, scopes=["photos.read"])):
try: try:
image = col_photos.find_one( {"_id": ObjectId(id)} ) image = col_photos.find_one( {"_id": ObjectId(id)} )