PhotosAPI/extensions/photos.py

209 lines
9.7 KiB
Python
Raw Normal View History

2022-12-20 02:22:32 +02:00
import re
2022-12-20 14:28:50 +02:00
import pickle
2022-12-20 02:22:32 +02:00
from secrets import token_urlsafe
2023-01-05 17:38:00 +02:00
from shutil import move
2023-01-02 16:08:46 +02:00
from typing import List, Union
2022-12-20 02:22:32 +02:00
from magic import Magic
2022-12-20 23:24:35 +02:00
from datetime import datetime, timedelta
from os import makedirs, path, remove, system
2022-12-20 18:07:48 +02:00
from classes.models import Photo, SearchResults
2023-01-02 16:08:46 +02:00
from modules.exif_reader import extract_location
2022-12-20 02:22:32 +02:00
from modules.hasher import get_phash, get_duplicates
2022-12-20 23:24:35 +02:00
from modules.scheduler import scheduler
2022-12-20 14:28:50 +02:00
from modules.security import User, get_current_active_user
from modules.app import app
2022-12-20 02:22:32 +02:00
from modules.database import col_photos, col_albums, col_tokens
from bson.objectid import ObjectId
from bson.errors import InvalidId
2022-12-20 14:28:50 +02:00
from fastapi import HTTPException, UploadFile, Security
2022-12-20 02:22:32 +02:00
from fastapi.responses import UJSONResponse, Response
2023-01-02 16:08:46 +02:00
from starlette.status import HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT, HTTP_422_UNPROCESSABLE_ENTITY
2022-12-20 02:22:32 +02:00
2022-12-20 23:24:35 +02:00
from modules.utils import logWrite
async def compress_image(image_path: str):
image_type = Magic(mime=True).from_file(image_path)
if image_type not in ["image/jpeg", "image/png"]:
logWrite(f"Not compressing {image_path} because its mime is '{image_type}'")
return
size_before = path.getsize(image_path) / 1024
2023-01-02 13:08:07 +02:00
# system(f"exiftool -overwrite_original -all:all= -tagsFromFile @ -exif:Orientation {image_path}")
2022-12-20 23:52:05 +02:00
2022-12-20 23:24:35 +02:00
if image_type == "image/jpeg":
2023-01-02 13:08:07 +02:00
system(f"jpegoptim {image_path} -o --max=55 -p --strip-none")
2022-12-20 23:24:35 +02:00
elif image_type == "image/png":
system(f"optipng -o3 {image_path}")
size_after = path.getsize(image_path) / 1024
logWrite(f"Compressed '{path.split(image_path)[-1]}' from {size_before} Kb to {size_after} Kb")
2022-12-20 18:07:48 +02:00
@app.post("/albums/{album}/photos", response_class=UJSONResponse, response_model=Photo, description="Upload a photo to album")
2022-12-20 23:24:35 +02:00
async def photo_upload(file: UploadFile, album: str, ignore_duplicates: bool = False, compress: bool = True, current_user: User = Security(get_current_active_user, scopes=["photos.write"])):
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
if col_albums.find_one( {"user": current_user.user, "name": album} ) is None:
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail=f"Provided album '{album}' does not exist.")
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
# if not file.content_type.startswith("image"):
# return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Provided file is not an image, not accepting.")
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
makedirs(path.join("data", "users", current_user.user, "albums", album), exist_ok=True)
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
filename = file.filename
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
if path.exists(path.join("data", "users", current_user.user, "albums", album, file.filename)):
base_name = file.filename.split(".")[:-1]
extension = file.filename.split(".")[-1]
filename = ".".join(base_name)+f"_{int(datetime.now().timestamp())}."+extension
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
with open(path.join("data", "users", current_user.user, "albums", album, filename), "wb") as f:
f.write(await file.read())
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
file_hash = await get_phash(path.join("data", "users", current_user.user, "albums", album, filename))
duplicates = await get_duplicates(file_hash, album)
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
if len(duplicates) > 0 and ignore_duplicates is False:
2022-12-20 02:22:32 +02:00
return UJSONResponse(
{
2022-12-20 14:28:50 +02:00
"detail": "Image duplicates found. Pass 'ignore_duplicates=true' to ignore.",
"duplicates": duplicates
},
status_code=HTTP_409_CONFLICT
2022-12-20 02:22:32 +02:00
)
2023-01-02 16:08:46 +02:00
coords = extract_location(path.join("data", "users", current_user.user, "albums", album, filename))
uploaded = col_photos.insert_one( {"user": current_user.user, "album": album, "hash": file_hash, "filename": filename, "location": [coords["lng"], coords["lat"], coords["alt"]]} )
2022-12-20 02:22:32 +02:00
2022-12-20 23:24:35 +02:00
if compress is True:
scheduler.add_job(compress_image, trigger="date", run_date=datetime.now()+timedelta(seconds=1), args=[path.join("data", "users", current_user.user, "albums", album, filename)])
2022-12-20 14:28:50 +02:00
return UJSONResponse(
{
"id": uploaded.inserted_id.__str__(),
"album": album,
"hash": file_hash,
"filename": filename
}
)
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
@app.get("/photos/{id}", description="Get a photo by id")
async def photo_get(id: str, current_user: User = Security(get_current_active_user, scopes=["photos.view"])):
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
try:
image = col_photos.find_one( {"_id": ObjectId(id)} )
if image is None:
raise InvalidId(id)
except InvalidId:
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find an image with such id.")
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
image_path = path.join("data", "users", current_user.user, "albums", image["album"], image["filename"])
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
mime = Magic(mime=True).from_file(image_path)
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
with open(image_path, "rb") as f: image_file = f.read()
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
return Response(image_file, media_type=mime)
2022-12-20 02:22:32 +02:00
2023-01-05 17:38:00 +02:00
@app.put("/photos/{id}", description="Move a photo into another album")
async def photo_move(id: str, album: str, current_user: User = Security(get_current_active_user, scopes=["photos.write"])):
try:
image = col_photos.find_one( {"_id": ObjectId(id)} )
if image is None:
raise InvalidId(id)
except InvalidId:
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find an image with such id.")
if col_albums.find_one( {"user": current_user.user, "name": album} ) is None:
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail=f"Provided album '{album}' does not exist.")
if path.exists(path.join("data", "users", current_user.user, "albums", album, image["filename"])):
base_name = image["filename"].split(".")[:-1]
extension = image["filename"].split(".")[-1]
filename = ".".join(base_name)+f"_{int(datetime.now().timestamp())}."+extension
else:
filename = image["filename"]
col_photos.find_one_and_update( {"_id": ObjectId(id)}, {"$set": {"album": album, "filename": filename}} )
move(
path.join("data", "users", current_user.user, "albums", image["album"], image["filename"]),
path.join("data", "users", current_user.user, "albums", album, filename)
)
return UJSONResponse(
{
"id": image["_id"].__str__(),
"filename": filename
}
)
2022-12-20 02:33:24 +02:00
@app.delete("/photos/{id}", description="Delete a photo by id")
2022-12-20 14:28:50 +02:00
async def photo_delete(id: str, current_user: User = Security(get_current_active_user, scopes=["photos.write"])):
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
try:
image = col_photos.find_one_and_delete( {"_id": ObjectId(id)} )
if image is None:
raise InvalidId(id)
except InvalidId:
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find an image with such id.")
2022-12-20 02:22:32 +02:00
2022-12-21 00:59:47 +02:00
album = col_albums.find_one( {"name": image["album"]} )
if album is not None and album["cover"] == image["_id"].__str__():
col_albums.update_one( {"name": image["album"]}, {"$set": {"cover": None}} )
2022-12-20 14:28:50 +02:00
remove(path.join("data", "users", current_user.user, "albums", image["album"], image["filename"]))
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
return Response(status_code=HTTP_204_NO_CONTENT)
2022-12-20 02:22:32 +02:00
2022-12-20 18:07:48 +02:00
@app.get("/albums/{album}/photos", response_class=UJSONResponse, response_model=SearchResults, description="Find a photo by filename")
2023-01-02 16:08:46 +02:00
async def photo_find(album: str, q: Union[str, None] = None, page: int = 1, page_size: int = 100, lat: Union[float, None] = None, lng: Union[float, None] = None, radius: Union[int, None] = None, current_user: User = Security(get_current_active_user, scopes=["photos.list"])):
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
if col_albums.find_one( {"user": current_user.user, "name": album} ) is None:
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail=f"Provided album '{album}' does not exist.")
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
if page <= 0 or page_size <= 0:
return HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Parameters 'page' and 'page_size' must be greater or equal to 1.")
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
output = {"results": []}
skip = (page-1)*page_size
2023-01-02 16:08:46 +02:00
radius = 5000 if radius is None else radius
if (lat is not None) and (lng is not None):
db_query = {"user": current_user.user, "album": album, "location": { "$nearSphere": {"$geometry": {"type": "Point", "coordinates": [lng, lat]}, "$maxDistance": radius} } }
db_query_count = {"user": current_user.user, "album": album, "location": { "$geoWithin": { "$centerSphere": [ [lng, lat], radius ] } } }
elif q is None:
raise HTTPException(status_code=HTTP_422_UNPROCESSABLE_ENTITY, detail="You must provide query or coordinates to look for photos")
else:
db_query = {"user": current_user.user, "album": album, "filename": re.compile(q)}
db_query_count = {"user": current_user.user, "album": album, "filename": re.compile(q)}
images = list(col_photos.find(db_query, limit=page_size, skip=skip))
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
for image in images:
output["results"].append({"id": image["_id"].__str__(), "filename": image["filename"]})
2022-12-20 02:22:32 +02:00
2023-01-02 16:08:46 +02:00
if col_photos.count_documents( db_query_count ) > page*page_size:
2022-12-20 14:28:50 +02:00
token = str(token_urlsafe(32))
col_tokens.insert_one( {"token": token, "query": q, "album": album, "page": page+1, "page_size": page_size, "user": pickle.dumps(current_user)} )
output["next_page"] = f"/albums/{album}/photos/token?token={token}" # type: ignore
2022-12-20 18:07:48 +02:00
else:
output["next_page"] = None # type: ignore
2022-12-20 02:22:32 +02:00
2022-12-20 14:28:50 +02:00
return UJSONResponse(output)
2022-12-20 02:22:32 +02:00
2022-12-20 18:07:48 +02:00
@app.get("/albums/{album}/photos/token", response_class=UJSONResponse, response_model=SearchResults, description="Find a photo by token")
2022-12-20 02:22:32 +02:00
async def photo_find_token(token: str):
found_record = col_tokens.find_one( {"token": token} )
if found_record is None:
return HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail="Invalid search token.")
2022-12-20 14:28:50 +02:00
return await photo_find(q=found_record["query"], album=found_record["album"], page=found_record["page"], page_size=found_record["page_size"], current_user=pickle.loads(found_record["user"]))