652 lines
18 KiB
Python
652 lines
18 KiB
Python
import logging
|
|
import re
|
|
from datetime import datetime, timedelta, timezone
|
|
from os import makedirs, path, remove, system
|
|
from pathlib import Path
|
|
from random import randint
|
|
from secrets import token_urlsafe
|
|
from shutil import move
|
|
from threading import Thread
|
|
from typing import Union
|
|
from uuid import uuid4
|
|
|
|
import aiofiles
|
|
from bson.errors import InvalidId
|
|
from bson.objectid import ObjectId
|
|
from fastapi import Security, UploadFile
|
|
from fastapi.responses import Response, UJSONResponse
|
|
from jose import JWTError, jwt
|
|
from magic import Magic
|
|
from plum.exceptions import UnpackError
|
|
from pydantic import ValidationError
|
|
from pymongo import DESCENDING
|
|
from starlette.status import HTTP_204_NO_CONTENT, HTTP_409_CONFLICT
|
|
|
|
from classes.exceptions import (
|
|
AccessTokenInvalidError,
|
|
AlbumNameNotFoundError,
|
|
PhotoNotFoundError,
|
|
PhotoSearchQueryEmptyError,
|
|
SearchLimitInvalidError,
|
|
SearchPageInvalidError,
|
|
SearchTokenInvalidError,
|
|
)
|
|
from classes.models import (
|
|
Photo,
|
|
PhotoPublic,
|
|
RandomSearchResultsPhoto,
|
|
SearchResultsPhoto,
|
|
)
|
|
from modules.app import app
|
|
from modules.database import col_albums, col_photos, col_tokens
|
|
from modules.exif_reader import extract_location
|
|
from modules.hasher import get_duplicates, get_phash
|
|
from modules.scheduler import scheduler
|
|
from modules.security import (
|
|
ALGORITHM,
|
|
SECRET_KEY,
|
|
TokenData,
|
|
User,
|
|
create_access_token,
|
|
get_current_active_user,
|
|
get_user,
|
|
)
|
|
from modules.utils import configGet
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
async def compress_image(image_path: str):
|
|
image_type = Magic(mime=True).from_file(image_path)
|
|
|
|
if image_type not in ["image/jpeg", "image/png"]:
|
|
logger.info(
|
|
"Not compressing %s because its mime is '%s'", image_path, image_type
|
|
)
|
|
return
|
|
|
|
size_before = path.getsize(image_path) / 1024
|
|
|
|
if image_type == "image/jpeg":
|
|
task = Thread(
|
|
target=system,
|
|
kwargs={"command": f'jpegoptim "{image_path}" -o --max=55 -p --strip-none'},
|
|
)
|
|
elif image_type == "image/png":
|
|
task = Thread(target=system, kwargs={"command": f'optipng -o3 "{image_path}"'})
|
|
else:
|
|
return
|
|
|
|
task.start()
|
|
logger.info("Compressing '%s'...", Path(image_path).name)
|
|
task.join()
|
|
|
|
size_after = path.getsize(image_path) / 1024
|
|
logger.info(
|
|
"Compressed '%s' from %s Kb to %s Kb",
|
|
Path(image_path).name,
|
|
size_before,
|
|
size_after,
|
|
)
|
|
|
|
|
|
photo_post_responses = {
|
|
404: AlbumNameNotFoundError("name").openapi,
|
|
409: {
|
|
"description": "Image Duplicates Found",
|
|
"content": {
|
|
"application/json": {
|
|
"example": {
|
|
"detail": "Image duplicates found. Pass 'ignore_duplicates=true' to ignore.",
|
|
"duplicates": ["string"],
|
|
"access_token": "string",
|
|
}
|
|
}
|
|
},
|
|
},
|
|
}
|
|
|
|
|
|
@app.post(
|
|
"/albums/{album}/photos",
|
|
description="Upload a photo to album",
|
|
response_class=UJSONResponse,
|
|
response_model=Photo,
|
|
responses=photo_post_responses,
|
|
)
|
|
async def photo_upload(
|
|
file: UploadFile,
|
|
album: str,
|
|
ignore_duplicates: bool = False,
|
|
compress: bool = True,
|
|
caption: Union[str, None] = None,
|
|
current_user: User = Security(get_current_active_user, scopes=["photos.write"]),
|
|
):
|
|
if col_albums.find_one({"user": current_user.user, "name": album}) is None:
|
|
raise AlbumNameNotFoundError(album)
|
|
|
|
makedirs(Path(f"data/users/{current_user.user}/albums/{album}"), exist_ok=True)
|
|
|
|
filename = file.filename
|
|
|
|
if Path(f"data/users/{current_user.user}/albums/{album}/{file.filename}").exists():
|
|
base_name = file.filename.split(".")[:-1]
|
|
extension = file.filename.split(".")[-1]
|
|
filename = (
|
|
".".join(base_name) + f"_{int(datetime.now().timestamp())}." + extension
|
|
)
|
|
|
|
async with aiofiles.open(
|
|
Path(f"data/users/{current_user.user}/albums/{album}/{filename}"), "wb"
|
|
) as f:
|
|
await f.write(await file.read())
|
|
|
|
file_hash = await get_phash(
|
|
Path(f"data/users/{current_user.user}/albums/{album}/{filename}")
|
|
)
|
|
duplicates = await get_duplicates(file_hash, album)
|
|
|
|
if len(duplicates) > 0 and not ignore_duplicates:
|
|
if configGet("media_token_access") is True:
|
|
duplicates_ids = [entry["id"] for entry in duplicates]
|
|
access_token = create_access_token(
|
|
data={
|
|
"sub": current_user.user,
|
|
"scopes": ["me", "photos.read"],
|
|
"allowed": duplicates_ids,
|
|
},
|
|
expires_delta=timedelta(hours=configGet("media_token_valid_hours")),
|
|
)
|
|
access_token_short = uuid4().hex[:12].lower()
|
|
col_tokens.insert_one(
|
|
{
|
|
"short": access_token_short,
|
|
"access_token": access_token,
|
|
"photos": duplicates_ids,
|
|
}
|
|
)
|
|
else:
|
|
access_token_short = None
|
|
return UJSONResponse(
|
|
{
|
|
"detail": "Image duplicates found. Pass 'ignore_duplicates=true' to ignore.",
|
|
"duplicates": duplicates,
|
|
"access_token": access_token_short,
|
|
},
|
|
status_code=HTTP_409_CONFLICT,
|
|
)
|
|
|
|
try:
|
|
coords = extract_location(
|
|
Path(f"data/users/{current_user.user}/albums/{album}/{filename}")
|
|
)
|
|
except (UnpackError, ValueError):
|
|
coords = {"lng": 0.0, "lat": 0.0, "alt": 0.0}
|
|
|
|
uploaded = col_photos.insert_one(
|
|
{
|
|
"user": current_user.user,
|
|
"album": album,
|
|
"hash": file_hash,
|
|
"filename": filename,
|
|
"dates": {
|
|
"uploaded": datetime.now(tz=timezone.utc),
|
|
"modified": datetime.now(tz=timezone.utc),
|
|
},
|
|
"location": [coords["lng"], coords["lat"], coords["alt"]],
|
|
"caption": caption,
|
|
}
|
|
)
|
|
|
|
if compress:
|
|
scheduler.add_job(
|
|
compress_image,
|
|
trigger="date",
|
|
run_date=datetime.now() + timedelta(seconds=1),
|
|
args=[Path(f"data/users/{current_user.user}/albums/{album}/{filename}")],
|
|
)
|
|
|
|
return UJSONResponse(
|
|
{
|
|
"id": uploaded.inserted_id.__str__(),
|
|
"album": album,
|
|
"hash": file_hash,
|
|
"filename": filename,
|
|
}
|
|
)
|
|
|
|
|
|
# Access to photos y token generated for example by
|
|
# upload method when duplicates are found. Is disabled
|
|
# by default and should remain so if not really needed.
|
|
if configGet("media_token_access") is True:
|
|
photo_get_token_responses = {
|
|
401: AccessTokenInvalidError().openapi,
|
|
404: PhotoNotFoundError("id").openapi,
|
|
}
|
|
|
|
@app.get(
|
|
"/token/photo/{token}",
|
|
description="Get a photo by its duplicate token",
|
|
responses=photo_get_token_responses,
|
|
)
|
|
async def photo_get_token(token: str, id: int):
|
|
db_entry = col_tokens.find_one({"short": token})
|
|
|
|
if db_entry is None:
|
|
raise AccessTokenInvalidError()
|
|
|
|
token = db_entry["access_token"]
|
|
id = db_entry["photos"][id]
|
|
|
|
try:
|
|
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
|
user: str = payload.get("sub")
|
|
if user is None:
|
|
raise AccessTokenInvalidError()
|
|
token_scopes = payload.get("scopes", [])
|
|
token_data = TokenData(scopes=token_scopes, user=user)
|
|
except (JWTError, ValidationError) as exp:
|
|
print(exp, flush=True)
|
|
raise AccessTokenInvalidError()
|
|
|
|
user = get_user(user=token_data.user)
|
|
|
|
if id not in payload.get("allowed", []):
|
|
raise AccessTokenInvalidError()
|
|
|
|
try:
|
|
image = col_photos.find_one({"_id": ObjectId(id)})
|
|
if image is None:
|
|
raise InvalidId(id)
|
|
except InvalidId:
|
|
raise PhotoNotFoundError(id)
|
|
|
|
image_path = Path(
|
|
f"data/users/{user.user}/albums/{image['album']}/{image['filename']}"
|
|
)
|
|
|
|
mime = Magic(mime=True).from_file(image_path)
|
|
|
|
async with aiofiles.open(image_path, "rb") as f:
|
|
image_file = await f.read()
|
|
|
|
return Response(image_file, media_type=mime)
|
|
|
|
|
|
photo_get_responses = {
|
|
200: {
|
|
"content": {
|
|
"application/octet-stream": {
|
|
"schema": {
|
|
"type": "string",
|
|
"format": "binary",
|
|
"contentMediaType": "image/*",
|
|
}
|
|
}
|
|
}
|
|
},
|
|
404: PhotoNotFoundError("id").openapi,
|
|
}
|
|
|
|
|
|
@app.get(
|
|
"/photos/{id}",
|
|
description="Get a photo by id",
|
|
responses=photo_get_responses,
|
|
response_class=Response,
|
|
)
|
|
async def photo_get(
|
|
id: str,
|
|
current_user: User = Security(get_current_active_user, scopes=["photos.read"]),
|
|
):
|
|
try:
|
|
image = col_photos.find_one({"_id": ObjectId(id)})
|
|
if image is None:
|
|
raise InvalidId(id)
|
|
except InvalidId:
|
|
raise PhotoNotFoundError(id)
|
|
|
|
image_path = Path(
|
|
f"data/users/{current_user.user}/albums/{image['album']}/{image['filename']}"
|
|
)
|
|
|
|
mime = Magic(mime=True).from_file(image_path)
|
|
|
|
async with aiofiles.open(image_path, "rb") as f:
|
|
image_file = await f.read()
|
|
|
|
return Response(image_file, media_type=mime)
|
|
|
|
|
|
photo_move_responses = {404: PhotoNotFoundError("id").openapi}
|
|
|
|
|
|
@app.put(
|
|
"/photos/{id}",
|
|
description="Move a photo to another album",
|
|
response_model=PhotoPublic,
|
|
responses=photo_move_responses,
|
|
)
|
|
async def photo_move(
|
|
id: str,
|
|
album: str,
|
|
current_user: User = Security(get_current_active_user, scopes=["photos.write"]),
|
|
):
|
|
try:
|
|
image = col_photos.find_one({"_id": ObjectId(id)})
|
|
if image is None:
|
|
raise InvalidId(id)
|
|
except InvalidId:
|
|
raise PhotoNotFoundError(id)
|
|
|
|
if col_albums.find_one({"user": current_user.user, "name": album}) is None:
|
|
raise AlbumNameNotFoundError(album)
|
|
|
|
if Path(
|
|
f"data/users/{current_user.user}/albums/{album}/{image['filename']}"
|
|
).exists():
|
|
base_name = image["filename"].split(".")[:-1]
|
|
extension = image["filename"].split(".")[-1]
|
|
filename = (
|
|
".".join(base_name) + f"_{int(datetime.now().timestamp())}." + extension
|
|
)
|
|
else:
|
|
filename = image["filename"]
|
|
|
|
col_photos.find_one_and_update(
|
|
{"_id": ObjectId(id)},
|
|
{
|
|
"$set": {
|
|
"album": album,
|
|
"filename": filename,
|
|
"dates.modified": datetime.now(tz=timezone.utc),
|
|
}
|
|
},
|
|
)
|
|
|
|
move(
|
|
Path(
|
|
f"data/users/{current_user.user}/albums/{image['album']}/{image['filename']}"
|
|
),
|
|
Path(f"data/users/{current_user.user}/albums/{album}/{filename}"),
|
|
)
|
|
|
|
return UJSONResponse(
|
|
{
|
|
"id": image["_id"].__str__(),
|
|
"caption": image["caption"],
|
|
"filename": filename,
|
|
}
|
|
)
|
|
|
|
|
|
photo_patch_responses = {404: PhotoNotFoundError("id").openapi}
|
|
|
|
|
|
@app.patch(
|
|
"/photos/{id}",
|
|
description="Change properties of a photo",
|
|
response_model=PhotoPublic,
|
|
responses=photo_patch_responses,
|
|
)
|
|
async def photo_patch(
|
|
id: str,
|
|
caption: str,
|
|
current_user: User = Security(get_current_active_user, scopes=["photos.write"]),
|
|
):
|
|
try:
|
|
image = col_photos.find_one({"_id": ObjectId(id)})
|
|
if image is None:
|
|
raise InvalidId(id)
|
|
except InvalidId:
|
|
raise PhotoNotFoundError(id)
|
|
|
|
col_photos.find_one_and_update(
|
|
{"_id": ObjectId(id)},
|
|
{"$set": {"caption": caption, "dates.modified": datetime.now(tz=timezone.utc)}},
|
|
)
|
|
|
|
return UJSONResponse(
|
|
{
|
|
"id": image["_id"].__str__(),
|
|
"caption": caption,
|
|
"filename": image["filename"],
|
|
}
|
|
)
|
|
|
|
|
|
photo_delete_responses = {404: PhotoNotFoundError("id").openapi}
|
|
|
|
|
|
@app.delete(
|
|
"/photos/{id}",
|
|
description="Delete a photo by id",
|
|
status_code=HTTP_204_NO_CONTENT,
|
|
responses=photo_delete_responses,
|
|
)
|
|
async def photo_delete(
|
|
id: str,
|
|
current_user: User = Security(get_current_active_user, scopes=["photos.write"]),
|
|
):
|
|
try:
|
|
image = col_photos.find_one_and_delete({"_id": ObjectId(id)})
|
|
if image is None:
|
|
raise InvalidId(id)
|
|
except InvalidId:
|
|
raise PhotoNotFoundError(id)
|
|
|
|
album = col_albums.find_one({"name": image["album"]})
|
|
|
|
if album is not None and album["cover"] == image["_id"].__str__():
|
|
col_albums.update_one({"name": image["album"]}, {"$set": {"cover": None}})
|
|
|
|
remove(
|
|
Path(
|
|
f"data/users/{current_user.user}/albums/{image['album']}/{image['filename']}"
|
|
)
|
|
)
|
|
|
|
return Response(status_code=HTTP_204_NO_CONTENT)
|
|
|
|
|
|
photo_random_responses = {
|
|
400: SearchLimitInvalidError().openapi,
|
|
404: AlbumNameNotFoundError("name").openapi,
|
|
}
|
|
|
|
|
|
@app.get(
|
|
"/albums/{album}/photos/random",
|
|
description="Get one random photo, optionally by caption",
|
|
response_class=UJSONResponse,
|
|
response_model=RandomSearchResultsPhoto,
|
|
responses=photo_random_responses,
|
|
)
|
|
async def photo_random(
|
|
album: str,
|
|
caption: Union[str, None] = None,
|
|
limit: int = 100,
|
|
current_user: User = Security(get_current_active_user, scopes=["photos.list"]),
|
|
):
|
|
if col_albums.find_one({"user": current_user.user, "name": album}) is None:
|
|
raise AlbumNameNotFoundError(album)
|
|
|
|
if limit <= 0:
|
|
raise SearchLimitInvalidError()
|
|
|
|
output = {"results": []}
|
|
|
|
db_query = (
|
|
{
|
|
"user": current_user.user,
|
|
"album": album,
|
|
"caption": re.compile(caption),
|
|
}
|
|
if caption is not None
|
|
else {
|
|
"user": current_user.user,
|
|
"album": album,
|
|
}
|
|
)
|
|
|
|
documents_count = col_photos.count_documents(db_query)
|
|
skip = randint(0, documents_count - 1) if documents_count > 1 else 0
|
|
|
|
images = list(
|
|
col_photos.aggregate(
|
|
[
|
|
{"$match": db_query},
|
|
{"$skip": skip},
|
|
{"$limit": limit},
|
|
]
|
|
)
|
|
)
|
|
|
|
for image in images:
|
|
output["results"].append(
|
|
{
|
|
"id": image["_id"].__str__(),
|
|
"filename": image["filename"],
|
|
"caption": image["caption"],
|
|
}
|
|
)
|
|
|
|
return UJSONResponse(output)
|
|
|
|
|
|
photo_find_responses = {
|
|
400: SearchPageInvalidError().openapi,
|
|
401: SearchTokenInvalidError().openapi,
|
|
404: AlbumNameNotFoundError("name").openapi,
|
|
422: PhotoSearchQueryEmptyError().openapi,
|
|
}
|
|
|
|
|
|
@app.get(
|
|
"/albums/{album}/photos",
|
|
description="Find a photo by filename, caption, location or token",
|
|
response_class=UJSONResponse,
|
|
response_model=SearchResultsPhoto,
|
|
responses=photo_find_responses,
|
|
)
|
|
async def photo_find(
|
|
album: str,
|
|
q: Union[str, None] = None,
|
|
caption: Union[str, None] = None,
|
|
token: Union[str, None] = None,
|
|
page: int = 1,
|
|
page_size: int = 100,
|
|
lat: Union[float, None] = None,
|
|
lng: Union[float, None] = None,
|
|
radius: Union[int, None] = None,
|
|
current_user: User = Security(get_current_active_user, scopes=["photos.list"]),
|
|
):
|
|
if token is not None:
|
|
found_record = col_tokens.find_one({"token": token})
|
|
|
|
if found_record is None:
|
|
raise SearchTokenInvalidError()
|
|
|
|
return await photo_find(
|
|
album=album,
|
|
q=found_record["query"],
|
|
caption=found_record["caption"],
|
|
lat=found_record["lat"],
|
|
lng=found_record["lng"],
|
|
radius=found_record["radius"],
|
|
page=found_record["page"],
|
|
page_size=found_record["page_size"],
|
|
current_user=current_user,
|
|
)
|
|
|
|
if col_albums.find_one({"user": current_user.user, "name": album}) is None:
|
|
raise AlbumNameNotFoundError(album)
|
|
|
|
if page <= 0 or page_size <= 0:
|
|
raise SearchPageInvalidError()
|
|
|
|
output = {"results": []}
|
|
skip = (page - 1) * page_size
|
|
|
|
radius = 5000 if radius is None else radius
|
|
|
|
if (lat is not None) and (lng is not None):
|
|
db_query = {
|
|
"user": current_user.user,
|
|
"album": album,
|
|
"location": {
|
|
"$nearSphere": {
|
|
"$geometry": {"type": "Point", "coordinates": [lng, lat]},
|
|
"$maxDistance": radius,
|
|
}
|
|
},
|
|
}
|
|
db_query_count = {
|
|
"user": current_user.user,
|
|
"album": album,
|
|
"location": {"$geoWithin": {"$centerSphere": [[lng, lat], radius]}},
|
|
}
|
|
elif q is None and caption is None:
|
|
raise PhotoSearchQueryEmptyError()
|
|
elif q is None:
|
|
db_query = {
|
|
"user": current_user.user,
|
|
"album": album,
|
|
"caption": re.compile(caption),
|
|
}
|
|
db_query_count = {
|
|
"user": current_user.user,
|
|
"album": album,
|
|
"caption": re.compile(caption),
|
|
}
|
|
elif caption is None:
|
|
db_query = {
|
|
"user": current_user.user,
|
|
"album": album,
|
|
"filename": re.compile(q),
|
|
}
|
|
db_query_count = {
|
|
"user": current_user.user,
|
|
"album": album,
|
|
"filename": re.compile(q),
|
|
}
|
|
else:
|
|
db_query = {"user": current_user.user, "album": album, "filename": re.compile(q), "caption": re.compile(caption)} # type: ignore
|
|
db_query_count = {"user": current_user.user, "album": album, "filename": re.compile(q), "caption": re.compile(caption)} # type: ignore
|
|
|
|
images = list(
|
|
col_photos.find(db_query, limit=page_size, skip=skip).sort(
|
|
"dates.uploaded", DESCENDING
|
|
)
|
|
)
|
|
|
|
for image in images:
|
|
output["results"].append(
|
|
{
|
|
"id": image["_id"].__str__(),
|
|
"filename": image["filename"],
|
|
"caption": image["caption"],
|
|
}
|
|
)
|
|
|
|
if col_photos.count_documents(db_query_count) > page * page_size:
|
|
token = str(token_urlsafe(32))
|
|
col_tokens.insert_one(
|
|
{
|
|
"token": token,
|
|
"query": q,
|
|
"caption": caption,
|
|
"lat": lat,
|
|
"lng": lng,
|
|
"radius": radius,
|
|
"page": page + 1,
|
|
"page_size": page_size,
|
|
}
|
|
)
|
|
output["next_page"] = f"/albums/{album}/photos/?token={token}" # type: ignore
|
|
else:
|
|
output["next_page"] = None # type: ignore
|
|
|
|
return UJSONResponse(output)
|