3 Commits

Author SHA1 Message Date
96f4ab2eb9 Added raise for incorrect geo exif 2023-02-14 14:32:20 +01:00
2a100db981 Fixed scopes 2023-02-14 14:32:03 +01:00
ddca4a4a3d Handling geo errors and threading for compression 2023-02-14 14:31:56 +01:00
3 changed files with 28 additions and 9 deletions

View File

@@ -2,6 +2,7 @@ import re
import pickle import pickle
from secrets import token_urlsafe from secrets import token_urlsafe
from shutil import move from shutil import move
from threading import Thread
from typing import List, Union from typing import List, Union
from magic import Magic from magic import Magic
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
@@ -16,6 +17,7 @@ from modules.database import col_photos, col_albums, col_tokens
from pymongo import DESCENDING from pymongo import DESCENDING
from bson.objectid import ObjectId from bson.objectid import ObjectId
from bson.errors import InvalidId from bson.errors import InvalidId
from plum.exceptions import UnpackError
from fastapi import HTTPException, UploadFile, Security from fastapi import HTTPException, UploadFile, Security
from fastapi.responses import UJSONResponse, Response from fastapi.responses import UJSONResponse, Response
@@ -36,9 +38,15 @@ async def compress_image(image_path: str):
# system(f"exiftool -overwrite_original -all:all= -tagsFromFile @ -exif:Orientation {image_path}") # system(f"exiftool -overwrite_original -all:all= -tagsFromFile @ -exif:Orientation {image_path}")
if image_type == "image/jpeg": if image_type == "image/jpeg":
system(f"jpegoptim {image_path} -o --max=55 -p --strip-none") task = Thread(target=system, kwargs={"command": f'jpegoptim "{image_path}" -o --max=55 -p --strip-none'})
elif image_type == "image/png": elif image_type == "image/png":
system(f"optipng -o3 {image_path}") task = Thread(target=system, kwargs={"command": f'optipng -o3 "{image_path}"'})
else:
return
task.start()
logWrite(f"Compressing '{path.split(image_path)[-1]}'...")
task.join()
size_after = path.getsize(image_path) / 1024 size_after = path.getsize(image_path) / 1024
logWrite(f"Compressed '{path.split(image_path)[-1]}' from {size_before} Kb to {size_after} Kb") logWrite(f"Compressed '{path.split(image_path)[-1]}' from {size_before} Kb to {size_after} Kb")
@@ -76,7 +84,15 @@ async def photo_upload(file: UploadFile, album: str, ignore_duplicates: bool = F
status_code=HTTP_409_CONFLICT status_code=HTTP_409_CONFLICT
) )
coords = extract_location(path.join("data", "users", current_user.user, "albums", album, filename)) try:
coords = extract_location(path.join("data", "users", current_user.user, "albums", album, filename))
except (UnpackError, ValueError):
coords = {
"lng": 0.0,
"lat": 0.0,
"alt": 0.0
}
uploaded = col_photos.insert_one( uploaded = col_photos.insert_one(
{ {
"user": current_user.user, "user": current_user.user,
@@ -109,7 +125,7 @@ async def photo_upload(file: UploadFile, album: str, ignore_duplicates: bool = F
) )
@app.get("/photos/{id}", description="Get a photo by id") @app.get("/photos/{id}", description="Get a photo by id")
async def photo_get(id: str, current_user: User = Security(get_current_active_user, scopes=["photos.view"])): async def photo_get(id: str, current_user: User = Security(get_current_active_user, scopes=["photos.read"])):
try: try:
image = col_photos.find_one( {"_id": ObjectId(id)} ) image = col_photos.find_one( {"_id": ObjectId(id)} )

View File

@@ -81,7 +81,7 @@ async def video_upload(file: UploadFile, album: str, caption: Union[str, None] =
) )
@app.get("/videos/{id}", description="Get a video by id") @app.get("/videos/{id}", description="Get a video by id")
async def video_get(id: str, current_user: User = Security(get_current_active_user, scopes=["videos.view"])): async def video_get(id: str, current_user: User = Security(get_current_active_user, scopes=["videos.read"])):
try: try:
video = col_videos.find_one( {"_id": ObjectId(id)} ) video = col_videos.find_one( {"_id": ObjectId(id)} )

View File

@@ -45,10 +45,13 @@ async def get_duplicates(hash: str, album: str) -> list:
duplicates = [] duplicates = []
cache = get_duplicates_cache(album) cache = get_duplicates_cache(album)
for image_name in cache.keys(): for image_name in cache.keys():
distance = spatial.distance.hamming( try:
hash_hex_to_hash_array(cache[image_name][1]), distance = spatial.distance.hamming(
hash_hex_to_hash_array(hash) hash_hex_to_hash_array(cache[image_name][1]),
) hash_hex_to_hash_array(hash)
)
except ValueError:
continue
print("{0:<30} {1}".format(image_name, distance), flush=True) print("{0:<30} {1}".format(image_name, distance), flush=True)
if distance <= 0.25: if distance <= 0.25:
duplicates.append({"id": cache[image_name][0], "filename": image_name, "difference": distance}) duplicates.append({"id": cache[image_name][0], "filename": image_name, "difference": distance})