Compare commits
58 Commits
v0.1
...
4545e26f32
Author | SHA1 | Date | |
---|---|---|---|
4545e26f32 | |||
ab2bfd10d5 | |||
e9f3237fbb | |||
b3c9a972c8 | |||
42f125716a
|
|||
5e3df74052
|
|||
2ff4623d5f | |||
737b4c57c0 | |||
d723bb6b80 | |||
2a7870620c
|
|||
b003712358
|
|||
d29dfa4d3e | |||
d688d766da | |||
5cc10367b2
|
|||
4b43e76822
|
|||
23467a88ef
|
|||
88d8a38444
|
|||
a5cd6a215f
|
|||
a6002a5e60 | |||
917048a333 | |||
6be51c5aaa | |||
840e3022b3 | |||
24f4773dd7
|
|||
00d3d62762
|
|||
2a29b85ad2 | |||
9bdc788078
|
|||
5a5103ea9c | |||
ccf4c43bb9 | |||
19e0531a24
|
|||
b46f3fb0fd
|
|||
d2f3d7e687
|
|||
83dd4b6746
|
|||
47435c6128
|
|||
db77f62459
|
|||
b51026b200 | |||
b30547eca8 | |||
782b489db2 | |||
d085a0e639 | |||
30d72c84ed | |||
e1e42fdb60 | |||
36169b0e77 | |||
5de935cd21 | |||
1e6afc6b0c | |||
f9e6ee9c72 | |||
f512df408f | |||
aa083811dc | |||
4d24696d3d | |||
c7cb4a6dff | |||
4060aae038 | |||
4eea82a160 | |||
4ce4264580 | |||
6feed4359a | |||
2afc82cf01 | |||
bf0046c3d5 | |||
c55a2d0d44 | |||
a380da81bb | |||
e858e7d7f4 | |||
fcbbd4f2bf |
20
.renovaterc
Normal file
20
.renovaterc
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"config:base"
|
||||
],
|
||||
"baseBranches": [
|
||||
"dev"
|
||||
],
|
||||
"packageRules": [
|
||||
{
|
||||
"matchUpdateTypes": [
|
||||
"minor",
|
||||
"patch",
|
||||
"pin",
|
||||
"digest"
|
||||
],
|
||||
"automerge": true
|
||||
}
|
||||
]
|
||||
}
|
@@ -9,7 +9,7 @@ Small and simple API server for saving photos and videos.
|
||||
|
||||
## Dependencies
|
||||
|
||||
* [Python 3.7+](https://www.python.org) (3.9+ recommended)
|
||||
* [Python 3.8+](https://www.python.org) (3.9+ recommended)
|
||||
* [MongoDB](https://www.mongodb.com)
|
||||
* [exiftool](https://exiftool.org)
|
||||
* [jpegoptim](https://github.com/tjko/jpegoptim)
|
||||
|
@@ -1,7 +1,9 @@
|
||||
from typing import Literal
|
||||
|
||||
from fastapi import HTTPException
|
||||
|
||||
class AlbumNotFoundError(Exception):
|
||||
|
||||
class AlbumNotFoundError(HTTPException):
|
||||
"""Raises HTTP 404 if no album with this ID found."""
|
||||
|
||||
def __init__(self, id: str):
|
||||
@@ -16,7 +18,7 @@ class AlbumNotFoundError(Exception):
|
||||
}
|
||||
|
||||
|
||||
class AlbumNameNotFoundError(Exception):
|
||||
class AlbumNameNotFoundError(HTTPException):
|
||||
"""Raises HTTP 404 if no album with this name found."""
|
||||
|
||||
def __init__(self, name: str):
|
||||
@@ -29,9 +31,15 @@ class AlbumNameNotFoundError(Exception):
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=404,
|
||||
detail=self.openapi["content"]["application/json"]["example"][
|
||||
"detail"
|
||||
].format(name=self.name),
|
||||
)
|
||||
|
||||
|
||||
class AlbumAlreadyExistsError(Exception):
|
||||
class AlbumAlreadyExistsError(HTTPException):
|
||||
"""Raises HTTP 409 if album with this name already exists."""
|
||||
|
||||
def __init__(self, name: str):
|
||||
@@ -44,9 +52,15 @@ class AlbumAlreadyExistsError(Exception):
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=409,
|
||||
detail=self.openapi["content"]["application/json"]["example"][
|
||||
"detail"
|
||||
].format(name=self.name),
|
||||
)
|
||||
|
||||
|
||||
class AlbumIncorrectError(Exception):
|
||||
class AlbumIncorrectError(HTTPException):
|
||||
"""Raises HTTP 406 if album's title or name is invalid."""
|
||||
|
||||
def __init__(self, place: Literal["name", "title"], error: str) -> None:
|
||||
@@ -56,13 +70,19 @@ class AlbumIncorrectError(Exception):
|
||||
"description": "Album Name/Title Invalid",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Album {name/title} invalid: {error}"}
|
||||
"example": {"detail": "Album {place} invalid: {error}"}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=406,
|
||||
detail=self.openapi["content"]["application/json"]["example"][
|
||||
"detail"
|
||||
].format(place=self.place, error=self.error),
|
||||
)
|
||||
|
||||
|
||||
class PhotoNotFoundError(Exception):
|
||||
class PhotoNotFoundError(HTTPException):
|
||||
"""Raises HTTP 404 if no photo with this ID found."""
|
||||
|
||||
def __init__(self, id: str):
|
||||
@@ -75,9 +95,15 @@ class PhotoNotFoundError(Exception):
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=404,
|
||||
detail=self.openapi["content"]["application/json"]["example"][
|
||||
"detail"
|
||||
].format(id=self.id),
|
||||
)
|
||||
|
||||
|
||||
class PhotoSearchQueryEmptyError(Exception):
|
||||
class PhotoSearchQueryEmptyError(HTTPException):
|
||||
"""Raises HTTP 422 if no photo search query provided."""
|
||||
|
||||
def __init__(self):
|
||||
@@ -91,9 +117,13 @@ class PhotoSearchQueryEmptyError(Exception):
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=422,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class VideoNotFoundError(Exception):
|
||||
class VideoNotFoundError(HTTPException):
|
||||
"""Raises HTTP 404 if no video with this ID found."""
|
||||
|
||||
def __init__(self, id: str):
|
||||
@@ -106,9 +136,15 @@ class VideoNotFoundError(Exception):
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=404,
|
||||
detail=self.openapi["content"]["application/json"]["example"][
|
||||
"detail"
|
||||
].format(id=self.id),
|
||||
)
|
||||
|
||||
|
||||
class VideoSearchQueryEmptyError(Exception):
|
||||
class VideoSearchQueryEmptyError(HTTPException):
|
||||
"""Raises HTTP 422 if no video search query provided."""
|
||||
|
||||
def __init__(self):
|
||||
@@ -122,9 +158,33 @@ class VideoSearchQueryEmptyError(Exception):
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=422,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class SearchPageInvalidError(Exception):
|
||||
class SearchLimitInvalidError(HTTPException):
|
||||
"""Raises HTTP 400 if search results limit not in valid range."""
|
||||
|
||||
def __init__(self):
|
||||
self.openapi = {
|
||||
"description": "Invalid Limit",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": "Parameter 'limit' must be greater or equal to 1."
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=400,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class SearchPageInvalidError(HTTPException):
|
||||
"""Raises HTTP 400 if page or page size are not in valid range."""
|
||||
|
||||
def __init__(self):
|
||||
@@ -138,9 +198,13 @@ class SearchPageInvalidError(Exception):
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=400,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class SearchTokenInvalidError(Exception):
|
||||
class SearchTokenInvalidError(HTTPException):
|
||||
"""Raises HTTP 401 if search token is not valid."""
|
||||
|
||||
def __init__(self):
|
||||
@@ -150,9 +214,13 @@ class SearchTokenInvalidError(Exception):
|
||||
"application/json": {"example": {"detail": "Invalid search token."}}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=401,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class UserEmailCodeInvalid(Exception):
|
||||
class UserEmailCodeInvalid(HTTPException):
|
||||
"""Raises HTTP 400 if email confirmation code is not valid."""
|
||||
|
||||
def __init__(self):
|
||||
@@ -164,9 +232,13 @@ class UserEmailCodeInvalid(Exception):
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=400,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class UserAlreadyExists(Exception):
|
||||
class UserAlreadyExists(HTTPException):
|
||||
"""Raises HTTP 409 if user with this name already exists."""
|
||||
|
||||
def __init__(self):
|
||||
@@ -178,9 +250,13 @@ class UserAlreadyExists(Exception):
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=409,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class AccessTokenInvalidError(Exception):
|
||||
class AccessTokenInvalidError(HTTPException):
|
||||
"""Raises HTTP 401 if access token is not valid."""
|
||||
|
||||
def __init__(self):
|
||||
@@ -190,9 +266,13 @@ class AccessTokenInvalidError(Exception):
|
||||
"application/json": {"example": {"detail": "Invalid access token."}}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=401,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class UserCredentialsInvalid(Exception):
|
||||
class UserCredentialsInvalid(HTTPException):
|
||||
"""Raises HTTP 401 if user credentials are not valid."""
|
||||
|
||||
def __init__(self):
|
||||
@@ -202,3 +282,7 @@ class UserCredentialsInvalid(Exception):
|
||||
"application/json": {"example": {"detail": "Invalid credentials."}}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=401,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
@@ -1,4 +1,5 @@
|
||||
from typing import List, Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@@ -71,3 +72,11 @@ class SearchResultsPhoto(BaseModel):
|
||||
class SearchResultsVideo(BaseModel):
|
||||
results: List[VideoSearch]
|
||||
next_page: Union[str, None]
|
||||
|
||||
|
||||
class RandomSearchResultsPhoto(BaseModel):
|
||||
results: List[PhotoSearch]
|
||||
|
||||
|
||||
class RandomSearchResultsVideo(BaseModel):
|
||||
results: List[VideoSearch]
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import re
|
||||
from os import makedirs, path, rename
|
||||
from os import makedirs, rename
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
from typing import Union
|
||||
|
||||
@@ -49,9 +50,7 @@ async def album_create(
|
||||
if col_albums.find_one({"name": name}) is not None:
|
||||
raise AlbumAlreadyExistsError(name)
|
||||
|
||||
makedirs(
|
||||
path.join("data", "users", current_user.user, "albums", name), exist_ok=True
|
||||
)
|
||||
makedirs(Path(f"data/users/{current_user.user}/albums/{name}"), exist_ok=True)
|
||||
|
||||
uploaded = col_albums.insert_one(
|
||||
{"user": current_user.user, "name": name, "title": title, "cover": None}
|
||||
@@ -109,12 +108,12 @@ async def album_patch(
|
||||
except InvalidId:
|
||||
raise AlbumNotFoundError(id)
|
||||
|
||||
if title is not None:
|
||||
if 2 > len(title) > 40:
|
||||
raise AlbumIncorrectError("title", "must be >2 and <40 characters.")
|
||||
else:
|
||||
if title is None:
|
||||
title = album["title"]
|
||||
|
||||
elif 2 > len(title) > 40:
|
||||
raise AlbumIncorrectError("title", "must be >2 and <40 characters.")
|
||||
|
||||
if name is not None:
|
||||
if re.search(re.compile("^[a-z,0-9,_]*$"), name) is False:
|
||||
raise AlbumIncorrectError(
|
||||
@@ -123,8 +122,8 @@ async def album_patch(
|
||||
if 2 > len(name) > 20:
|
||||
raise AlbumIncorrectError("name", "must be >2 and <20 characters.")
|
||||
rename(
|
||||
path.join("data", "users", current_user.user, "albums", album["name"]),
|
||||
path.join("data", "users", current_user.user, "albums", name),
|
||||
Path(f"data/users/{current_user.user}/albums/{album['name']}"),
|
||||
Path(f"data/users/{current_user.user}/albums/{name}"),
|
||||
)
|
||||
col_photos.update_many(
|
||||
{"user": current_user.user, "album": album["name"]},
|
||||
@@ -186,8 +185,8 @@ async def album_put(
|
||||
cover = image["_id"].__str__() if image is not None else None # type: ignore
|
||||
|
||||
rename(
|
||||
path.join("data", "users", current_user.user, "albums", album["name"]),
|
||||
path.join("data", "users", current_user.user, "albums", name),
|
||||
Path(f"data/users/{current_user.user}/albums/{album['name']}"),
|
||||
Path(f"data/users/{current_user.user}/albums/{name}"),
|
||||
)
|
||||
|
||||
col_photos.update_many(
|
||||
@@ -222,6 +221,6 @@ async def album_delete(
|
||||
|
||||
col_photos.delete_many({"album": album["name"]})
|
||||
|
||||
rmtree(path.join("data", "users", current_user.user, "albums", album["name"]))
|
||||
rmtree(Path(f"data/users/{current_user.user}/albums/{album['name']}"))
|
||||
|
||||
return Response(status_code=HTTP_204_NO_CONTENT)
|
||||
|
@@ -1,7 +1,5 @@
|
||||
from fastapi import Request
|
||||
from fastapi.responses import UJSONResponse
|
||||
from modules.app import app
|
||||
from classes.exceptions import *
|
||||
from starlette.status import (
|
||||
HTTP_400_BAD_REQUEST,
|
||||
HTTP_401_UNAUTHORIZED,
|
||||
@@ -11,6 +9,23 @@ from starlette.status import (
|
||||
HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
)
|
||||
|
||||
from classes.exceptions import (
|
||||
AlbumNotFoundError,
|
||||
AlbumAlreadyExistsError,
|
||||
AlbumIncorrectError,
|
||||
PhotoNotFoundError,
|
||||
PhotoSearchQueryEmptyError,
|
||||
VideoNotFoundError,
|
||||
VideoSearchQueryEmptyError,
|
||||
SearchPageInvalidError,
|
||||
SearchTokenInvalidError,
|
||||
AccessTokenInvalidError,
|
||||
UserEmailCodeInvalid,
|
||||
UserAlreadyExists,
|
||||
UserCredentialsInvalid,
|
||||
)
|
||||
from modules.app import app
|
||||
|
||||
|
||||
@app.exception_handler(AlbumNotFoundError)
|
||||
async def album_not_found_exception_handler(request: Request, exc: AlbumNotFoundError):
|
||||
|
@@ -1,31 +1,36 @@
|
||||
from os import path
|
||||
from modules.app import app
|
||||
from pathlib import Path
|
||||
|
||||
import aiofiles
|
||||
from fastapi.responses import HTMLResponse, Response
|
||||
|
||||
from modules.app import app
|
||||
|
||||
|
||||
@app.get("/pages/matter.css", include_in_schema=False)
|
||||
async def page_matter():
|
||||
with open(path.join("pages", "matter.css"), "r", encoding="utf-8") as f:
|
||||
output = f.read()
|
||||
async with aiofiles.open(Path("pages/matter.css"), "r", encoding="utf-8") as f:
|
||||
output = await f.read()
|
||||
return Response(content=output)
|
||||
|
||||
|
||||
@app.get("/pages/{page}/{file}", include_in_schema=False)
|
||||
async def page_assets(page: str, file: str):
|
||||
with open(path.join("pages", page, file), "r", encoding="utf-8") as f:
|
||||
output = f.read()
|
||||
async with aiofiles.open(Path(f"pages/{page}/{file}"), "r", encoding="utf-8") as f:
|
||||
output = await f.read()
|
||||
return Response(content=output)
|
||||
|
||||
|
||||
@app.get("/", include_in_schema=False)
|
||||
async def page_home():
|
||||
with open(path.join("pages", "home", "index.html"), "r", encoding="utf-8") as f:
|
||||
output = f.read()
|
||||
async with aiofiles.open(Path("pages/home/index.html"), "r", encoding="utf-8") as f:
|
||||
output = await f.read()
|
||||
return HTMLResponse(content=output)
|
||||
|
||||
|
||||
@app.get("/register", include_in_schema=False)
|
||||
async def page_register():
|
||||
with open(path.join("pages", "register", "index.html"), "r", encoding="utf-8") as f:
|
||||
output = f.read()
|
||||
async with aiofiles.open(
|
||||
Path("pages/register/index.html"), "r", encoding="utf-8"
|
||||
) as f:
|
||||
output = await f.read()
|
||||
return HTMLResponse(content=output)
|
||||
|
@@ -1,26 +1,46 @@
|
||||
import logging
|
||||
import re
|
||||
import pickle
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from os import makedirs, path, remove, system
|
||||
from pathlib import Path
|
||||
from random import randint
|
||||
from secrets import token_urlsafe
|
||||
from shutil import move
|
||||
from threading import Thread
|
||||
from typing import Union
|
||||
from uuid import uuid4
|
||||
from magic import Magic
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from os import makedirs, path, remove, system
|
||||
|
||||
import aiofiles
|
||||
from bson.errors import InvalidId
|
||||
from bson.objectid import ObjectId
|
||||
from fastapi import Security, UploadFile
|
||||
from fastapi.responses import Response, UJSONResponse
|
||||
from jose import JWTError, jwt
|
||||
from magic import Magic
|
||||
from plum.exceptions import UnpackError
|
||||
from pydantic import ValidationError
|
||||
from pymongo import DESCENDING
|
||||
from starlette.status import HTTP_204_NO_CONTENT, HTTP_409_CONFLICT
|
||||
|
||||
from classes.exceptions import (
|
||||
AccessTokenInvalidError,
|
||||
AlbumNameNotFoundError,
|
||||
PhotoNotFoundError,
|
||||
PhotoSearchQueryEmptyError,
|
||||
SearchLimitInvalidError,
|
||||
SearchPageInvalidError,
|
||||
SearchTokenInvalidError,
|
||||
)
|
||||
from classes.models import Photo, PhotoPublic, SearchResultsPhoto
|
||||
from classes.models import (
|
||||
Photo,
|
||||
PhotoPublic,
|
||||
RandomSearchResultsPhoto,
|
||||
SearchResultsPhoto,
|
||||
)
|
||||
from modules.app import app
|
||||
from modules.database import col_albums, col_photos, col_tokens
|
||||
from modules.exif_reader import extract_location
|
||||
from modules.hasher import get_phash, get_duplicates
|
||||
from modules.hasher import get_duplicates, get_phash
|
||||
from modules.scheduler import scheduler
|
||||
from modules.security import (
|
||||
ALGORITHM,
|
||||
@@ -31,31 +51,18 @@ from modules.security import (
|
||||
get_current_active_user,
|
||||
get_user,
|
||||
)
|
||||
from modules.app import app
|
||||
from modules.database import col_photos, col_albums, col_tokens
|
||||
from pymongo import DESCENDING
|
||||
from bson.objectid import ObjectId
|
||||
from bson.errors import InvalidId
|
||||
from plum.exceptions import UnpackError
|
||||
from jose import JWTError, jwt
|
||||
from modules.utils import configGet
|
||||
|
||||
from fastapi import UploadFile, Security
|
||||
from fastapi.responses import UJSONResponse, Response
|
||||
from fastapi.exceptions import HTTPException
|
||||
from starlette.status import (
|
||||
HTTP_204_NO_CONTENT,
|
||||
HTTP_401_UNAUTHORIZED,
|
||||
HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
from modules.utils import configGet, logWrite
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def compress_image(image_path: str):
|
||||
image_type = Magic(mime=True).from_file(image_path)
|
||||
|
||||
if image_type not in ["image/jpeg", "image/png"]:
|
||||
logWrite(f"Not compressing {image_path} because its mime is '{image_type}'")
|
||||
logger.info(
|
||||
"Not compressing %s because its mime is '%s'", image_path, image_type
|
||||
)
|
||||
return
|
||||
|
||||
size_before = path.getsize(image_path) / 1024
|
||||
@@ -71,12 +78,15 @@ async def compress_image(image_path: str):
|
||||
return
|
||||
|
||||
task.start()
|
||||
logWrite(f"Compressing '{path.split(image_path)[-1]}'...")
|
||||
logger.info("Compressing '%s'...", Path(image_path).name)
|
||||
task.join()
|
||||
|
||||
size_after = path.getsize(image_path) / 1024
|
||||
logWrite(
|
||||
f"Compressed '{path.split(image_path)[-1]}' from {size_before} Kb to {size_after} Kb"
|
||||
logger.info(
|
||||
"Compressed '%s' from %s Kb to %s Kb",
|
||||
Path(image_path).name,
|
||||
size_before,
|
||||
size_after,
|
||||
)
|
||||
|
||||
|
||||
@@ -115,36 +125,30 @@ async def photo_upload(
|
||||
if col_albums.find_one({"user": current_user.user, "name": album}) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
makedirs(
|
||||
path.join("data", "users", current_user.user, "albums", album), exist_ok=True
|
||||
)
|
||||
makedirs(Path(f"data/users/{current_user.user}/albums/{album}"), exist_ok=True)
|
||||
|
||||
filename = file.filename
|
||||
|
||||
if path.exists(
|
||||
path.join("data", "users", current_user.user, "albums", album, file.filename)
|
||||
):
|
||||
if Path(f"data/users/{current_user.user}/albums/{album}/{file.filename}").exists():
|
||||
base_name = file.filename.split(".")[:-1]
|
||||
extension = file.filename.split(".")[-1]
|
||||
filename = (
|
||||
".".join(base_name) + f"_{int(datetime.now().timestamp())}." + extension
|
||||
)
|
||||
|
||||
with open(
|
||||
path.join("data", "users", current_user.user, "albums", album, filename), "wb"
|
||||
async with aiofiles.open(
|
||||
Path(f"data/users/{current_user.user}/albums/{album}/{filename}"), "wb"
|
||||
) as f:
|
||||
f.write(await file.read())
|
||||
await f.write(await file.read())
|
||||
|
||||
file_hash = await get_phash(
|
||||
path.join("data", "users", current_user.user, "albums", album, filename)
|
||||
Path(f"data/users/{current_user.user}/albums/{album}/{filename}")
|
||||
)
|
||||
duplicates = await get_duplicates(file_hash, album)
|
||||
|
||||
if len(duplicates) > 0 and ignore_duplicates is False:
|
||||
if len(duplicates) > 0 and not ignore_duplicates:
|
||||
if configGet("media_token_access") is True:
|
||||
duplicates_ids = []
|
||||
for entry in duplicates:
|
||||
duplicates_ids.append(entry["id"])
|
||||
duplicates_ids = [entry["id"] for entry in duplicates]
|
||||
access_token = create_access_token(
|
||||
data={
|
||||
"sub": current_user.user,
|
||||
@@ -174,7 +178,7 @@ async def photo_upload(
|
||||
|
||||
try:
|
||||
coords = extract_location(
|
||||
path.join("data", "users", current_user.user, "albums", album, filename)
|
||||
Path(f"data/users/{current_user.user}/albums/{album}/{filename}")
|
||||
)
|
||||
except (UnpackError, ValueError):
|
||||
coords = {"lng": 0.0, "lat": 0.0, "alt": 0.0}
|
||||
@@ -194,14 +198,12 @@ async def photo_upload(
|
||||
}
|
||||
)
|
||||
|
||||
if compress is True:
|
||||
if compress:
|
||||
scheduler.add_job(
|
||||
compress_image,
|
||||
trigger="date",
|
||||
run_date=datetime.now() + timedelta(seconds=1),
|
||||
args=[
|
||||
path.join("data", "users", current_user.user, "albums", album, filename)
|
||||
],
|
||||
args=[Path(f"data/users/{current_user.user}/albums/{album}/{filename}")],
|
||||
)
|
||||
|
||||
return UJSONResponse(
|
||||
@@ -260,22 +262,40 @@ if configGet("media_token_access") is True:
|
||||
except InvalidId:
|
||||
raise PhotoNotFoundError(id)
|
||||
|
||||
image_path = path.join(
|
||||
"data", "users", user.user, "albums", image["album"], image["filename"]
|
||||
image_path = Path(
|
||||
f"data/users/{user.user}/albums/{image['album']}/{image['filename']}"
|
||||
)
|
||||
|
||||
mime = Magic(mime=True).from_file(image_path)
|
||||
|
||||
with open(image_path, "rb") as f:
|
||||
image_file = f.read()
|
||||
async with aiofiles.open(image_path, "rb") as f:
|
||||
image_file = await f.read()
|
||||
|
||||
return Response(image_file, media_type=mime)
|
||||
|
||||
|
||||
photo_get_responses = {404: PhotoNotFoundError("id").openapi}
|
||||
photo_get_responses = {
|
||||
200: {
|
||||
"content": {
|
||||
"application/octet-stream": {
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"format": "binary",
|
||||
"contentMediaType": "image/*",
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
404: PhotoNotFoundError("id").openapi,
|
||||
}
|
||||
|
||||
|
||||
@app.get("/photos/{id}", description="Get a photo by id", responses=photo_get_responses)
|
||||
@app.get(
|
||||
"/photos/{id}",
|
||||
description="Get a photo by id",
|
||||
responses=photo_get_responses,
|
||||
response_class=Response,
|
||||
)
|
||||
async def photo_get(
|
||||
id: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["photos.read"]),
|
||||
@@ -287,14 +307,14 @@ async def photo_get(
|
||||
except InvalidId:
|
||||
raise PhotoNotFoundError(id)
|
||||
|
||||
image_path = path.join(
|
||||
"data", "users", current_user.user, "albums", image["album"], image["filename"]
|
||||
image_path = Path(
|
||||
f"data/users/{current_user.user}/albums/{image['album']}/{image['filename']}"
|
||||
)
|
||||
|
||||
mime = Magic(mime=True).from_file(image_path)
|
||||
|
||||
with open(image_path, "rb") as f:
|
||||
image_file = f.read()
|
||||
async with aiofiles.open(image_path, "rb") as f:
|
||||
image_file = await f.read()
|
||||
|
||||
return Response(image_file, media_type=mime)
|
||||
|
||||
@@ -323,11 +343,9 @@ async def photo_move(
|
||||
if col_albums.find_one({"user": current_user.user, "name": album}) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
if path.exists(
|
||||
path.join(
|
||||
"data", "users", current_user.user, "albums", album, image["filename"]
|
||||
)
|
||||
):
|
||||
if Path(
|
||||
f"data/users/{current_user.user}/albums/{album}/{image['filename']}"
|
||||
).exists():
|
||||
base_name = image["filename"].split(".")[:-1]
|
||||
extension = image["filename"].split(".")[-1]
|
||||
filename = (
|
||||
@@ -348,15 +366,10 @@ async def photo_move(
|
||||
)
|
||||
|
||||
move(
|
||||
path.join(
|
||||
"data",
|
||||
"users",
|
||||
current_user.user,
|
||||
"albums",
|
||||
image["album"],
|
||||
image["filename"],
|
||||
Path(
|
||||
f"data/users/{current_user.user}/albums/{image['album']}/{image['filename']}"
|
||||
),
|
||||
path.join("data", "users", current_user.user, "albums", album, filename),
|
||||
Path(f"data/users/{current_user.user}/albums/{album}/{filename}"),
|
||||
)
|
||||
|
||||
return UJSONResponse(
|
||||
@@ -429,21 +442,82 @@ async def photo_delete(
|
||||
col_albums.update_one({"name": image["album"]}, {"$set": {"cover": None}})
|
||||
|
||||
remove(
|
||||
path.join(
|
||||
"data",
|
||||
"users",
|
||||
current_user.user,
|
||||
"albums",
|
||||
image["album"],
|
||||
image["filename"],
|
||||
Path(
|
||||
f"data/users/{current_user.user}/albums/{image['album']}/{image['filename']}"
|
||||
)
|
||||
)
|
||||
|
||||
return Response(status_code=HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
photo_random_responses = {
|
||||
400: SearchLimitInvalidError().openapi,
|
||||
404: AlbumNameNotFoundError("name").openapi,
|
||||
}
|
||||
|
||||
|
||||
@app.get(
|
||||
"/albums/{album}/photos/random",
|
||||
description="Get one random photo, optionally by caption",
|
||||
response_class=UJSONResponse,
|
||||
response_model=RandomSearchResultsPhoto,
|
||||
responses=photo_random_responses,
|
||||
)
|
||||
async def photo_random(
|
||||
album: str,
|
||||
caption: Union[str, None] = None,
|
||||
limit: int = 100,
|
||||
current_user: User = Security(get_current_active_user, scopes=["photos.list"]),
|
||||
):
|
||||
if col_albums.find_one({"user": current_user.user, "name": album}) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
if limit <= 0:
|
||||
raise SearchLimitInvalidError()
|
||||
|
||||
output = {"results": []}
|
||||
|
||||
db_query = (
|
||||
{
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
if caption is not None
|
||||
else {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
}
|
||||
)
|
||||
|
||||
documents_count = col_photos.count_documents(db_query)
|
||||
skip = randint(0, documents_count - 1) if documents_count > 1 else 0
|
||||
|
||||
images = list(
|
||||
col_photos.aggregate(
|
||||
[
|
||||
{"$match": db_query},
|
||||
{"$skip": skip},
|
||||
{"$limit": limit},
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
for image in images:
|
||||
output["results"].append(
|
||||
{
|
||||
"id": image["_id"].__str__(),
|
||||
"filename": image["filename"],
|
||||
"caption": image["caption"],
|
||||
}
|
||||
)
|
||||
|
||||
return UJSONResponse(output)
|
||||
|
||||
|
||||
photo_find_responses = {
|
||||
400: SearchPageInvalidError().openapi,
|
||||
401: SearchTokenInvalidError().openapi,
|
||||
404: AlbumNameNotFoundError("name").openapi,
|
||||
422: PhotoSearchQueryEmptyError().openapi,
|
||||
}
|
||||
@@ -451,7 +525,7 @@ photo_find_responses = {
|
||||
|
||||
@app.get(
|
||||
"/albums/{album}/photos",
|
||||
description="Find a photo by filename",
|
||||
description="Find a photo by filename, caption, location or token",
|
||||
response_class=UJSONResponse,
|
||||
response_model=SearchResultsPhoto,
|
||||
responses=photo_find_responses,
|
||||
@@ -460,6 +534,7 @@ async def photo_find(
|
||||
album: str,
|
||||
q: Union[str, None] = None,
|
||||
caption: Union[str, None] = None,
|
||||
token: Union[str, None] = None,
|
||||
page: int = 1,
|
||||
page_size: int = 100,
|
||||
lat: Union[float, None] = None,
|
||||
@@ -467,6 +542,24 @@ async def photo_find(
|
||||
radius: Union[int, None] = None,
|
||||
current_user: User = Security(get_current_active_user, scopes=["photos.list"]),
|
||||
):
|
||||
if token is not None:
|
||||
found_record = col_tokens.find_one({"token": token})
|
||||
|
||||
if found_record is None:
|
||||
raise SearchTokenInvalidError()
|
||||
|
||||
return await photo_find(
|
||||
album=album,
|
||||
q=found_record["query"],
|
||||
caption=found_record["caption"],
|
||||
lat=found_record["lat"],
|
||||
lng=found_record["lng"],
|
||||
radius=found_record["radius"],
|
||||
page=found_record["page"],
|
||||
page_size=found_record["page_size"],
|
||||
current_user=current_user,
|
||||
)
|
||||
|
||||
if col_albums.find_one({"user": current_user.user, "name": album}) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
@@ -496,7 +589,7 @@ async def photo_find(
|
||||
}
|
||||
elif q is None and caption is None:
|
||||
raise PhotoSearchQueryEmptyError()
|
||||
elif q is None and caption is not None:
|
||||
elif q is None:
|
||||
db_query = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
@@ -507,7 +600,7 @@ async def photo_find(
|
||||
"album": album,
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
elif q is not None and caption is None:
|
||||
elif caption is None:
|
||||
db_query = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
@@ -543,39 +636,16 @@ async def photo_find(
|
||||
{
|
||||
"token": token,
|
||||
"query": q,
|
||||
"album": album,
|
||||
"caption": caption,
|
||||
"lat": lat,
|
||||
"lng": lng,
|
||||
"radius": radius,
|
||||
"page": page + 1,
|
||||
"page_size": page_size,
|
||||
"user": pickle.dumps(current_user),
|
||||
}
|
||||
)
|
||||
output["next_page"] = f"/albums/{album}/photos/token?token={token}" # type: ignore
|
||||
output["next_page"] = f"/albums/{album}/photos/?token={token}" # type: ignore
|
||||
else:
|
||||
output["next_page"] = None # type: ignore
|
||||
|
||||
return UJSONResponse(output)
|
||||
|
||||
|
||||
photo_find_token_responses = {401: SearchTokenInvalidError().openapi}
|
||||
|
||||
|
||||
@app.get(
|
||||
"/albums/{album}/photos/token",
|
||||
description="Find a photo by token",
|
||||
response_class=UJSONResponse,
|
||||
response_model=SearchResultsPhoto,
|
||||
responses=photo_find_token_responses,
|
||||
)
|
||||
async def photo_find_token(token: str):
|
||||
found_record = col_tokens.find_one({"token": token})
|
||||
|
||||
if found_record is None:
|
||||
raise SearchTokenInvalidError()
|
||||
|
||||
return await photo_find(
|
||||
q=found_record["query"],
|
||||
album=found_record["album"],
|
||||
page=found_record["page"],
|
||||
page_size=found_record["page_size"],
|
||||
current_user=pickle.loads(found_record["user"]),
|
||||
)
|
||||
|
@@ -1,12 +1,10 @@
|
||||
from datetime import timedelta
|
||||
from classes.exceptions import UserCredentialsInvalid
|
||||
from modules.app import app
|
||||
|
||||
from fastapi import Depends
|
||||
from fastapi.security import (
|
||||
OAuth2PasswordRequestForm,
|
||||
)
|
||||
from fastapi.security import OAuth2PasswordRequestForm
|
||||
|
||||
from classes.exceptions import UserCredentialsInvalid
|
||||
from modules.app import app
|
||||
from modules.security import (
|
||||
ACCESS_TOKEN_EXPIRE_DAYS,
|
||||
Token,
|
||||
|
@@ -1,27 +1,20 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from uuid import uuid1
|
||||
|
||||
from fastapi import Depends, Form
|
||||
from fastapi.responses import Response, UJSONResponse
|
||||
from starlette.status import HTTP_204_NO_CONTENT
|
||||
|
||||
from classes.exceptions import (
|
||||
UserAlreadyExists,
|
||||
UserCredentialsInvalid,
|
||||
UserEmailCodeInvalid,
|
||||
)
|
||||
from modules.database import (
|
||||
col_users,
|
||||
col_albums,
|
||||
col_photos,
|
||||
col_emails,
|
||||
col_videos,
|
||||
col_emails,
|
||||
)
|
||||
from modules.app import app
|
||||
from modules.utils import configGet, logWrite
|
||||
from modules.scheduler import scheduler
|
||||
from modules.database import col_albums, col_emails, col_photos, col_users, col_videos
|
||||
from modules.mailer import mail_sender
|
||||
|
||||
from uuid import uuid1
|
||||
from fastapi import Depends, Form
|
||||
from fastapi.responses import Response, UJSONResponse
|
||||
from starlette.status import HTTP_204_NO_CONTENT
|
||||
|
||||
from modules.scheduler import scheduler
|
||||
from modules.security import (
|
||||
User,
|
||||
get_current_active_user,
|
||||
@@ -29,6 +22,9 @@ from modules.security import (
|
||||
get_user,
|
||||
verify_password,
|
||||
)
|
||||
from modules.utils import configGet
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def send_confirmation(user: str, email: str):
|
||||
@@ -48,9 +44,11 @@ async def send_confirmation(user: str, email: str):
|
||||
col_emails.insert_one(
|
||||
{"user": user, "email": email, "used": False, "code": confirmation_code}
|
||||
)
|
||||
logWrite(f"Sent confirmation email to '{email}' with code {confirmation_code}")
|
||||
logger.info(
|
||||
"Sent confirmation email to '%s' with code %s", email, confirmation_code
|
||||
)
|
||||
except Exception as exp:
|
||||
logWrite(f"Could not send confirmation email to '{email}' due to: {exp}")
|
||||
logger.error("Could not send confirmation email to '%s' due to: %s", email, exp)
|
||||
|
||||
|
||||
@app.get("/users/me/", response_model=User)
|
||||
|
@@ -1,29 +1,38 @@
|
||||
import re
|
||||
import pickle
|
||||
from datetime import datetime, timezone
|
||||
from os import makedirs, remove
|
||||
from pathlib import Path
|
||||
from random import randint
|
||||
from secrets import token_urlsafe
|
||||
from shutil import move
|
||||
from typing import Union
|
||||
|
||||
import aiofiles
|
||||
from bson.errors import InvalidId
|
||||
from bson.objectid import ObjectId
|
||||
from fastapi import Security, UploadFile
|
||||
from fastapi.responses import Response, UJSONResponse
|
||||
from magic import Magic
|
||||
from datetime import datetime, timezone
|
||||
from os import makedirs, path, remove
|
||||
from pymongo import DESCENDING
|
||||
from starlette.status import HTTP_204_NO_CONTENT
|
||||
|
||||
from classes.exceptions import (
|
||||
AlbumNameNotFoundError,
|
||||
SearchLimitInvalidError,
|
||||
SearchPageInvalidError,
|
||||
SearchTokenInvalidError,
|
||||
VideoNotFoundError,
|
||||
VideoSearchQueryEmptyError,
|
||||
)
|
||||
from classes.models import Video, SearchResultsVideo, VideoPublic
|
||||
from modules.security import User, get_current_active_user
|
||||
from classes.models import (
|
||||
RandomSearchResultsVideo,
|
||||
SearchResultsVideo,
|
||||
Video,
|
||||
VideoPublic,
|
||||
)
|
||||
from modules.app import app
|
||||
from modules.database import col_videos, col_albums, col_tokens
|
||||
from bson.objectid import ObjectId
|
||||
from bson.errors import InvalidId
|
||||
from pymongo import DESCENDING
|
||||
|
||||
from fastapi import UploadFile, Security
|
||||
from fastapi.responses import UJSONResponse, Response
|
||||
from starlette.status import HTTP_204_NO_CONTENT
|
||||
from modules.database import col_albums, col_tokens, col_videos
|
||||
from modules.security import User, get_current_active_user
|
||||
|
||||
video_post_responses = {404: AlbumNameNotFoundError("name").openapi}
|
||||
|
||||
@@ -44,25 +53,21 @@ async def video_upload(
|
||||
if col_albums.find_one({"user": current_user.user, "name": album}) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
makedirs(
|
||||
path.join("data", "users", current_user.user, "albums", album), exist_ok=True
|
||||
)
|
||||
makedirs(Path(f"data/users/{current_user.user}/albums/{album}"), exist_ok=True)
|
||||
|
||||
filename = file.filename
|
||||
|
||||
if path.exists(
|
||||
path.join("data", "users", current_user.user, "albums", album, file.filename)
|
||||
):
|
||||
if Path(f"data/users/{current_user.user}/albums/{album}/{file.filename}").exists():
|
||||
base_name = file.filename.split(".")[:-1]
|
||||
extension = file.filename.split(".")[-1]
|
||||
filename = (
|
||||
".".join(base_name) + f"_{int(datetime.now().timestamp())}." + extension
|
||||
)
|
||||
|
||||
with open(
|
||||
path.join("data", "users", current_user.user, "albums", album, filename), "wb"
|
||||
async with aiofiles.open(
|
||||
Path(f"data/users/{current_user.user}/albums/{album}/{filename}"), "wb"
|
||||
) as f:
|
||||
f.write(await file.read())
|
||||
await f.write(await file.read())
|
||||
|
||||
# Hashing and duplicates check should be here
|
||||
|
||||
@@ -91,10 +96,28 @@ async def video_upload(
|
||||
)
|
||||
|
||||
|
||||
video_get_responses = {404: VideoNotFoundError("id").openapi}
|
||||
video_get_responses = {
|
||||
200: {
|
||||
"content": {
|
||||
"application/octet-stream": {
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"format": "binary",
|
||||
"contentMediaType": "video/*",
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
404: VideoNotFoundError("id").openapi,
|
||||
}
|
||||
|
||||
|
||||
@app.get("/videos/{id}", description="Get a video by id", responses=video_get_responses)
|
||||
@app.get(
|
||||
"/videos/{id}",
|
||||
description="Get a video by id",
|
||||
responses=video_get_responses,
|
||||
response_class=Response,
|
||||
)
|
||||
async def video_get(
|
||||
id: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["videos.read"]),
|
||||
@@ -106,16 +129,16 @@ async def video_get(
|
||||
except InvalidId:
|
||||
raise VideoNotFoundError(id)
|
||||
|
||||
video_path = path.join(
|
||||
"data", "users", current_user.user, "albums", video["album"], video["filename"]
|
||||
video_path = Path(
|
||||
f"data/users/{current_user.user}/albums/{video['album']}/{video['filename']}"
|
||||
)
|
||||
|
||||
mime = Magic(mime=True).from_file(video_path)
|
||||
|
||||
with open(video_path, "rb") as f:
|
||||
video_file = f.read()
|
||||
async with aiofiles.open(video_path, "rb") as f:
|
||||
video_file = await f.read()
|
||||
|
||||
return Response(video_file, media_type=mime)
|
||||
return Response(content=video_file, media_type=mime)
|
||||
|
||||
|
||||
video_move_responses = {404: VideoNotFoundError("id").openapi}
|
||||
@@ -142,11 +165,9 @@ async def video_move(
|
||||
if col_albums.find_one({"user": current_user.user, "name": album}) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
if path.exists(
|
||||
path.join(
|
||||
"data", "users", current_user.user, "albums", album, video["filename"]
|
||||
)
|
||||
):
|
||||
if Path(
|
||||
f"data/users/{current_user.user}/albums/{album}/{video['filename']}"
|
||||
).exists():
|
||||
base_name = video["filename"].split(".")[:-1]
|
||||
extension = video["filename"].split(".")[-1]
|
||||
filename = (
|
||||
@@ -167,15 +188,10 @@ async def video_move(
|
||||
)
|
||||
|
||||
move(
|
||||
path.join(
|
||||
"data",
|
||||
"users",
|
||||
current_user.user,
|
||||
"albums",
|
||||
video["album"],
|
||||
video["filename"],
|
||||
Path(
|
||||
f"data/users/{current_user.user}/albums/{video['album']}/{video['filename']}"
|
||||
),
|
||||
path.join("data", "users", current_user.user, "albums", album, filename),
|
||||
Path(f"data/users/{current_user.user}/albums/{album}/{filename}"),
|
||||
)
|
||||
|
||||
return UJSONResponse(
|
||||
@@ -245,21 +261,82 @@ async def video_delete(
|
||||
album = col_albums.find_one({"name": video["album"]})
|
||||
|
||||
remove(
|
||||
path.join(
|
||||
"data",
|
||||
"users",
|
||||
current_user.user,
|
||||
"albums",
|
||||
video["album"],
|
||||
video["filename"],
|
||||
Path(
|
||||
f"data/users/{current_user.user}/albums/{video['album']}/{video['filename']}"
|
||||
)
|
||||
)
|
||||
|
||||
return Response(status_code=HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
video_random_responses = {
|
||||
400: SearchLimitInvalidError().openapi,
|
||||
404: AlbumNameNotFoundError("name").openapi,
|
||||
}
|
||||
|
||||
|
||||
@app.get(
|
||||
"/albums/{album}/videos/random",
|
||||
description="Get one random video, optionally by caption",
|
||||
response_class=UJSONResponse,
|
||||
response_model=RandomSearchResultsVideo,
|
||||
responses=video_random_responses,
|
||||
)
|
||||
async def video_random(
|
||||
album: str,
|
||||
caption: Union[str, None] = None,
|
||||
limit: int = 100,
|
||||
current_user: User = Security(get_current_active_user, scopes=["videos.list"]),
|
||||
):
|
||||
if col_albums.find_one({"user": current_user.user, "name": album}) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
if limit <= 0:
|
||||
raise SearchLimitInvalidError()
|
||||
|
||||
output = {"results": []}
|
||||
|
||||
db_query = (
|
||||
{
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
if caption is not None
|
||||
else {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
}
|
||||
)
|
||||
|
||||
documents_count = col_videos.count_documents(db_query)
|
||||
skip = randint(0, documents_count - 1) if documents_count > 1 else 0
|
||||
|
||||
videos = list(
|
||||
col_videos.aggregate(
|
||||
[
|
||||
{"$match": db_query},
|
||||
{"$skip": skip},
|
||||
{"$limit": limit},
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
for video in videos:
|
||||
output["results"].append(
|
||||
{
|
||||
"id": video["_id"].__str__(),
|
||||
"filename": video["filename"],
|
||||
"caption": video["caption"],
|
||||
}
|
||||
)
|
||||
|
||||
return UJSONResponse(output)
|
||||
|
||||
|
||||
video_find_responses = {
|
||||
400: SearchPageInvalidError().openapi,
|
||||
401: SearchTokenInvalidError().openapi,
|
||||
404: AlbumNameNotFoundError("name").openapi,
|
||||
422: VideoSearchQueryEmptyError().openapi,
|
||||
}
|
||||
@@ -267,7 +344,7 @@ video_find_responses = {
|
||||
|
||||
@app.get(
|
||||
"/albums/{album}/videos",
|
||||
description="Find a video by filename",
|
||||
description="Find a video by filename, caption or token",
|
||||
response_class=UJSONResponse,
|
||||
response_model=SearchResultsVideo,
|
||||
responses=video_find_responses,
|
||||
@@ -276,10 +353,26 @@ async def video_find(
|
||||
album: str,
|
||||
q: Union[str, None] = None,
|
||||
caption: Union[str, None] = None,
|
||||
token: Union[str, None] = None,
|
||||
page: int = 1,
|
||||
page_size: int = 100,
|
||||
current_user: User = Security(get_current_active_user, scopes=["videos.list"]),
|
||||
):
|
||||
if token is not None:
|
||||
found_record = col_tokens.find_one({"token": token})
|
||||
|
||||
if found_record is None:
|
||||
raise SearchTokenInvalidError()
|
||||
|
||||
return await video_find(
|
||||
album=album,
|
||||
q=found_record["query"],
|
||||
caption=found_record["caption"],
|
||||
page=found_record["page"],
|
||||
page_size=found_record["page_size"],
|
||||
current_user=current_user,
|
||||
)
|
||||
|
||||
if col_albums.find_one({"user": current_user.user, "name": album}) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
@@ -292,7 +385,7 @@ async def video_find(
|
||||
if q is None and caption is None:
|
||||
raise VideoSearchQueryEmptyError()
|
||||
|
||||
if q is None and caption is not None:
|
||||
if q is None:
|
||||
db_query = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
@@ -303,7 +396,7 @@ async def video_find(
|
||||
"album": album,
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
elif q is not None and caption is None:
|
||||
elif caption is None:
|
||||
db_query = list(
|
||||
col_videos.find(
|
||||
{"user": current_user.user, "album": album, "filename": re.compile(q)},
|
||||
@@ -341,39 +434,13 @@ async def video_find(
|
||||
{
|
||||
"token": token,
|
||||
"query": q,
|
||||
"album": album,
|
||||
"caption": caption,
|
||||
"page": page + 1,
|
||||
"page_size": page_size,
|
||||
"user": pickle.dumps(current_user),
|
||||
}
|
||||
)
|
||||
output["next_page"] = f"/albums/{album}/videos/token?token={token}" # type: ignore
|
||||
output["next_page"] = f"/albums/{album}/videos/?token={token}" # type: ignore
|
||||
else:
|
||||
output["next_page"] = None # type: ignore
|
||||
|
||||
return UJSONResponse(output)
|
||||
|
||||
|
||||
video_find_token_responses = {401: SearchTokenInvalidError().openapi}
|
||||
|
||||
|
||||
@app.get(
|
||||
"/albums/{album}/videos/token",
|
||||
description="Find a video by token",
|
||||
response_class=UJSONResponse,
|
||||
response_model=SearchResultsVideo,
|
||||
responses=video_find_token_responses,
|
||||
)
|
||||
async def video_find_token(token: str):
|
||||
found_record = col_tokens.find_one({"token": token})
|
||||
|
||||
if found_record is None:
|
||||
raise SearchTokenInvalidError()
|
||||
|
||||
return await video_find(
|
||||
q=found_record["query"],
|
||||
album=found_record["album"],
|
||||
page=found_record["page"],
|
||||
page_size=found_record["page_size"],
|
||||
current_user=pickle.loads(found_record["user"]),
|
||||
)
|
||||
|
@@ -1,15 +1,14 @@
|
||||
from fastapi import FastAPI
|
||||
from fastapi.openapi.docs import get_swagger_ui_html, get_redoc_html
|
||||
from fastapi.openapi.docs import get_redoc_html, get_swagger_ui_html
|
||||
|
||||
|
||||
app = FastAPI(title="END PLAY Photos", docs_url=None, redoc_url=None, version="0.1")
|
||||
app = FastAPI(title="END PLAY Photos", docs_url=None, redoc_url=None, version="0.5")
|
||||
|
||||
|
||||
@app.get("/docs", include_in_schema=False)
|
||||
async def custom_swagger_ui_html():
|
||||
return get_swagger_ui_html(
|
||||
openapi_url=app.openapi_url, # type: ignore
|
||||
title=app.title + " - Documentation",
|
||||
openapi_url=app.openapi_url,
|
||||
title=f"{app.title} - Documentation",
|
||||
swagger_favicon_url="/favicon.ico",
|
||||
)
|
||||
|
||||
@@ -17,7 +16,7 @@ async def custom_swagger_ui_html():
|
||||
@app.get("/redoc", include_in_schema=False)
|
||||
async def custom_redoc_html():
|
||||
return get_redoc_html(
|
||||
openapi_url=app.openapi_url, # type: ignore
|
||||
title=app.title + " - Documentation",
|
||||
openapi_url=app.openapi_url,
|
||||
title=f"{app.title} - Documentation",
|
||||
redoc_favicon_url="/favicon.ico",
|
||||
)
|
||||
|
@@ -1,5 +1,6 @@
|
||||
from pymongo import GEOSPHERE, MongoClient
|
||||
|
||||
from modules.utils import configGet
|
||||
from pymongo import MongoClient, GEOSPHERE
|
||||
|
||||
db_config = configGet("database")
|
||||
|
||||
@@ -23,7 +24,7 @@ db = db_client.get_database(name=db_config["name"])
|
||||
collections = db.list_collection_names()
|
||||
|
||||
for collection in ["users", "albums", "photos", "videos", "tokens", "emails"]:
|
||||
if not collection in collections:
|
||||
if collection not in collections:
|
||||
db.create_collection(collection)
|
||||
|
||||
col_users = db.get_collection("users")
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import contextlib
|
||||
|
||||
from exif import Image
|
||||
|
||||
|
||||
@@ -12,8 +14,10 @@ def decimal_coords(coords: float, ref: str) -> float:
|
||||
* float: Decimal degrees
|
||||
"""
|
||||
decimal_degrees = coords[0] + coords[1] / 60 + coords[2] / 3600
|
||||
if ref == "S" or ref == "W":
|
||||
|
||||
if ref in {"S", "W"}:
|
||||
decimal_degrees = -decimal_degrees
|
||||
|
||||
return round(decimal_degrees, 5)
|
||||
|
||||
|
||||
@@ -35,11 +39,9 @@ def extract_location(filepath: str) -> dict:
|
||||
if img.has_exif is False:
|
||||
return output
|
||||
|
||||
try:
|
||||
with contextlib.suppress(AttributeError):
|
||||
output["lng"] = decimal_coords(img.gps_longitude, img.gps_longitude_ref)
|
||||
output["lat"] = decimal_coords(img.gps_latitude, img.gps_latitude_ref)
|
||||
output["alt"] = img.gps_altitude
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
return output
|
||||
|
@@ -1,5 +1,6 @@
|
||||
from importlib.util import module_from_spec, spec_from_file_location
|
||||
from os import getcwd, path, walk
|
||||
from pathlib import Path
|
||||
|
||||
# =================================================================================
|
||||
|
||||
@@ -10,9 +11,9 @@ def get_py_files(src):
|
||||
cwd = getcwd() # Current Working directory
|
||||
py_files = []
|
||||
for root, dirs, files in walk(src):
|
||||
for file in files:
|
||||
if file.endswith(".py"):
|
||||
py_files.append(path.join(cwd, root, file))
|
||||
py_files.extend(
|
||||
Path(f"{cwd}/{root}/{file}") for file in files if file.endswith(".py")
|
||||
)
|
||||
return py_files
|
||||
|
||||
|
||||
@@ -36,7 +37,7 @@ def dynamic_import(module_name, py_path):
|
||||
def dynamic_import_from_src(src, star_import=False):
|
||||
my_py_files = get_py_files(src)
|
||||
for py_file in my_py_files:
|
||||
module_name = path.split(py_file)[-1][:-3]
|
||||
module_name = Path(py_file).stem
|
||||
print(f"Importing {module_name} extension...", flush=True)
|
||||
imported_module = dynamic_import(module_name, py_file)
|
||||
if imported_module != None:
|
||||
|
@@ -1,8 +1,12 @@
|
||||
from modules.database import col_photos
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from scipy import spatial
|
||||
import cv2
|
||||
|
||||
from modules.database import col_photos
|
||||
|
||||
|
||||
def hash_array_to_hash_hex(hash_array):
|
||||
@@ -16,18 +20,18 @@ def hash_hex_to_hash_array(hash_hex) -> NDArray:
|
||||
# convert hash string in hex to hash values of 0 or 1
|
||||
hash_str = int(hash_hex, 16)
|
||||
array_str = bin(hash_str)[2:]
|
||||
return np.array([i for i in array_str], dtype=np.float32)
|
||||
return np.array(list(array_str), dtype=np.float32)
|
||||
|
||||
|
||||
def get_duplicates_cache(album: str) -> dict:
|
||||
output = {}
|
||||
for photo in col_photos.find({"album": album}):
|
||||
output[photo["filename"]] = [photo["_id"].__str__(), photo["hash"]]
|
||||
return output
|
||||
return {
|
||||
photo["filename"]: [photo["_id"].__str__(), photo["hash"]]
|
||||
for photo in col_photos.find({"album": album})
|
||||
}
|
||||
|
||||
|
||||
async def get_phash(filepath: str) -> str:
|
||||
img = cv2.imread(filepath)
|
||||
async def get_phash(filepath: Union[str, Path]) -> str:
|
||||
img = cv2.imread(str(filepath))
|
||||
# resize image and convert to gray scale
|
||||
img = cv2.resize(img, (64, 64))
|
||||
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
||||
@@ -48,14 +52,14 @@ async def get_phash(filepath: str) -> str:
|
||||
return hash_array_to_hash_hex(dct_block.flatten())
|
||||
|
||||
|
||||
async def get_duplicates(hash: str, album: str) -> list:
|
||||
async def get_duplicates(hash_string: str, album: str) -> list:
|
||||
duplicates = []
|
||||
cache = get_duplicates_cache(album)
|
||||
for image_name in cache.keys():
|
||||
for image_name, image_object in cache.items():
|
||||
try:
|
||||
distance = spatial.distance.hamming(
|
||||
hash_hex_to_hash_array(cache[image_name][1]),
|
||||
hash_hex_to_hash_array(hash),
|
||||
hash_hex_to_hash_array(hash_string),
|
||||
)
|
||||
except ValueError:
|
||||
continue
|
||||
|
@@ -1,7 +1,11 @@
|
||||
import logging
|
||||
from smtplib import SMTP, SMTP_SSL
|
||||
from traceback import print_exc
|
||||
from ssl import create_default_context
|
||||
from modules.utils import configGet, logWrite
|
||||
from traceback import print_exc
|
||||
|
||||
from modules.utils import configGet
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
if configGet("use_ssl", "mailer", "smtp") is True:
|
||||
@@ -9,7 +13,7 @@ try:
|
||||
configGet("host", "mailer", "smtp"),
|
||||
configGet("port", "mailer", "smtp"),
|
||||
)
|
||||
logWrite(f"Initialized SMTP SSL connection")
|
||||
logger.info("Initialized SMTP SSL connection")
|
||||
elif configGet("use_tls", "mailer", "smtp") is True:
|
||||
mail_sender = SMTP(
|
||||
configGet("host", "mailer", "smtp"),
|
||||
@@ -17,21 +21,21 @@ try:
|
||||
)
|
||||
mail_sender.starttls(context=create_default_context())
|
||||
mail_sender.ehlo()
|
||||
logWrite(f"Initialized SMTP TLS connection")
|
||||
logger.info("Initialized SMTP TLS connection")
|
||||
else:
|
||||
mail_sender = SMTP(
|
||||
configGet("host", "mailer", "smtp"), configGet("port", "mailer", "smtp")
|
||||
)
|
||||
mail_sender.ehlo()
|
||||
logWrite(f"Initialized SMTP connection")
|
||||
logger.info("Initialized SMTP connection")
|
||||
except Exception as exp:
|
||||
logWrite(f"Could not initialize SMTP connection to: {exp}")
|
||||
logger.error("Could not initialize SMTP connection to: %s", exp)
|
||||
print_exc()
|
||||
|
||||
try:
|
||||
mail_sender.login(
|
||||
configGet("login", "mailer", "smtp"), configGet("password", "mailer", "smtp")
|
||||
)
|
||||
logWrite(f"Successfully initialized mailer")
|
||||
logger.info("Successfully initialized mailer")
|
||||
except Exception as exp:
|
||||
logWrite(f"Could not login into provided SMTP account due to: {exp}")
|
||||
logger.error("Could not login into provided SMTP account due to: %s", exp)
|
||||
|
@@ -1,16 +1,13 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import List, Union
|
||||
from modules.database import col_users
|
||||
|
||||
from fastapi import Depends, HTTPException, Security, status
|
||||
from fastapi.security import (
|
||||
OAuth2PasswordBearer,
|
||||
SecurityScopes,
|
||||
)
|
||||
from fastapi.security import OAuth2PasswordBearer, SecurityScopes
|
||||
from jose import JWTError, jwt
|
||||
from passlib.context import CryptContext
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from modules.database import col_users
|
||||
|
||||
with open("secret_key", "r", encoding="utf-8") as f:
|
||||
SECRET_KEY = f.read()
|
||||
@@ -76,12 +73,10 @@ def get_user(user: str):
|
||||
|
||||
|
||||
def authenticate_user(user_name: str, password: str):
|
||||
user = get_user(user_name)
|
||||
if not user:
|
||||
if user := get_user(user_name):
|
||||
return user if verify_password(password, user.hash) else False
|
||||
else:
|
||||
return False
|
||||
if not verify_password(password, user.hash):
|
||||
return False
|
||||
return user
|
||||
|
||||
|
||||
def create_access_token(data: dict, expires_delta: Union[timedelta, None] = None):
|
||||
@@ -92,9 +87,8 @@ def create_access_token(data: dict, expires_delta: Union[timedelta, None] = None
|
||||
expire = datetime.now(tz=timezone.utc) + timedelta(
|
||||
days=ACCESS_TOKEN_EXPIRE_DAYS
|
||||
)
|
||||
to_encode.update({"exp": expire})
|
||||
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||
return encoded_jwt
|
||||
to_encode["exp"] = expire
|
||||
return jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
|
@@ -1,20 +1,18 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from traceback import format_exc
|
||||
from typing import Any, Union
|
||||
from ujson import loads, dumps, JSONDecodeError
|
||||
from traceback import print_exc
|
||||
|
||||
from ujson import JSONDecodeError, dumps, loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Print to stdout and then to log
|
||||
def logWrite(message: str, debug: bool = False) -> None:
|
||||
# save to log file and rotation is to be done
|
||||
# logAppend(f'{message}', debug=debug)
|
||||
print(f"{message}", flush=True)
|
||||
|
||||
|
||||
def jsonLoad(filepath: str) -> Any:
|
||||
def jsonLoad(filepath: Union[str, Path]) -> Any:
|
||||
"""Load json file
|
||||
|
||||
### Args:
|
||||
* filepath (`str`): Path to input file
|
||||
* filepath (`Union[str, Path]`): Path to input file
|
||||
|
||||
### Returns:
|
||||
* `Any`: Some json deserializable
|
||||
@@ -23,32 +21,36 @@ def jsonLoad(filepath: str) -> Any:
|
||||
try:
|
||||
output = loads(file.read())
|
||||
except JSONDecodeError:
|
||||
logWrite(
|
||||
f"Could not load json file {filepath}: file seems to be incorrect!\n{print_exc()}"
|
||||
logger.error(
|
||||
"Could not load json file %s: file seems to be incorrect!\n%s",
|
||||
filepath,
|
||||
format_exc(),
|
||||
)
|
||||
raise
|
||||
except FileNotFoundError:
|
||||
logWrite(
|
||||
f"Could not load json file {filepath}: file does not seem to exist!\n{print_exc()}"
|
||||
logger.error(
|
||||
"Could not load json file %s: file does not seem to exist!\n%s",
|
||||
filepath,
|
||||
format_exc(),
|
||||
)
|
||||
raise
|
||||
file.close()
|
||||
return output
|
||||
|
||||
|
||||
def jsonSave(contents: Union[list, dict], filepath: str) -> None:
|
||||
def jsonSave(contents: Union[list, dict], filepath: Union[str, Path]) -> None:
|
||||
"""Save contents into json file
|
||||
|
||||
### Args:
|
||||
* contents (`Union[list, dict]`): Some json serializable
|
||||
* filepath (`str`): Path to output file
|
||||
* filepath (`Union[str, Path]`): Path to output file
|
||||
"""
|
||||
try:
|
||||
with open(filepath, "w", encoding="utf8") as file:
|
||||
file.write(dumps(contents, ensure_ascii=False, indent=4))
|
||||
file.close()
|
||||
except Exception as exp:
|
||||
logWrite(f"Could not save json file {filepath}: {exp}\n{print_exc()}")
|
||||
logger.error("Could not save json file %s: %s\n%s", filepath, exp, format_exc())
|
||||
return
|
||||
|
||||
|
||||
@@ -62,7 +64,7 @@ def configGet(key: str, *args: str) -> Any:
|
||||
### Returns:
|
||||
* `Any`: Value of provided key
|
||||
"""
|
||||
this_dict = jsonLoad("config.json")
|
||||
this_dict = jsonLoad(Path("config.json"))
|
||||
this_key = this_dict
|
||||
for dict_key in args:
|
||||
this_key = this_key[dict_key]
|
||||
|
@@ -1,11 +1,20 @@
|
||||
from os import makedirs, path
|
||||
from modules.app import app
|
||||
from modules.utils import *
|
||||
from modules.scheduler import scheduler
|
||||
from modules.extensions_loader import dynamic_import_from_src
|
||||
import logging
|
||||
from os import makedirs
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
makedirs(path.join("data", "users"), exist_ok=True)
|
||||
from modules.app import app
|
||||
from modules.extensions_loader import dynamic_import_from_src
|
||||
from modules.scheduler import scheduler
|
||||
|
||||
makedirs(Path("data/users"), exist_ok=True)
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(name)s.%(funcName)s | %(levelname)s | %(message)s",
|
||||
datefmt="[%X]",
|
||||
)
|
||||
|
||||
|
||||
@app.get("/favicon.ico", response_class=FileResponse, include_in_schema=False)
|
||||
|
@@ -1,10 +1,11 @@
|
||||
fastapi[all]~=0.94.0
|
||||
pymongo==4.3.3
|
||||
ujson~=5.7.0
|
||||
scipy~=1.10.1
|
||||
python-magic~=0.4.27
|
||||
opencv-python~=4.7.0.72
|
||||
python-jose[cryptography]~=3.3.0
|
||||
passlib~=1.7.4
|
||||
aiofiles==23.1.0
|
||||
apscheduler~=3.10.1
|
||||
exif==1.5.0
|
||||
exif==1.6.0
|
||||
fastapi[all]==0.99.1
|
||||
opencv-python~=4.8.0.74
|
||||
passlib~=1.7.4
|
||||
pymongo==4.4.0
|
||||
python-jose[cryptography]~=3.3.0
|
||||
python-magic~=0.4.27
|
||||
scipy~=1.11.0
|
||||
ujson~=5.8.0
|
Reference in New Issue
Block a user