47 Commits

Author SHA1 Message Date
eab19e6783 Merge pull request 'Update dependency fastapi to v0.103.1' (#30) from renovate/fastapi-0.x into dev
Reviewed-on: #30
2023-09-04 22:40:37 +03:00
8347a4c779 Update dependency fastapi to v0.103.1 2023-09-02 20:40:38 +03:00
ec5d0585a2 Merge pull request 'Update dependency fastapi to v0.103.0' (#29) from renovate/fastapi-0.x into dev
Reviewed-on: #29
2023-08-26 23:03:06 +03:00
ee53a77691 Update dependency fastapi to v0.103.0 2023-08-26 22:09:12 +03:00
10ee56be9e Merge pull request 'Update dependency fastapi to v0.102.0' (#28) from renovate/fastapi-0.x into dev
Reviewed-on: #28
2023-08-25 23:22:13 +03:00
91d5032fd2 Update dependency fastapi to v0.102.0 2023-08-25 22:30:40 +03:00
3569de9363 Added pymongo as a direct dependency 2023-08-14 14:26:54 +02:00
c966a6de07 Fixed direction errors 2023-08-14 13:55:49 +02:00
7011baff0f Added db_client_sync 2023-08-14 13:51:18 +02:00
a1acaed6dd WIP: Migration to async_pymongo 2023-08-14 13:44:07 +02:00
80ec8eb4f3 Merge pull request 'Update dependency fastapi to v0.101.1' (#26) from renovate/fastapi-0.x into dev
Reviewed-on: #26
2023-08-14 13:35:26 +03:00
bcc7012744 Update dependency fastapi to v0.101.1 2023-08-14 13:12:49 +03:00
e3038e4224 Merge pull request 'Update dependency aiofiles to v23.2.1' (#25) from renovate/aiofiles-23.x into dev
Reviewed-on: #25
2023-08-09 23:20:30 +03:00
3b4d108d45 Update dependency aiofiles to v23.2.1 2023-08-09 18:50:11 +03:00
16fe8235f4 Merge pull request 'Update dependency fastapi to v0.101.0' (#24) from renovate/fastapi-0.x into dev
Reviewed-on: #24
2023-08-05 13:38:45 +03:00
6cc0d3814e Update dependency fastapi to v0.101.0 2023-08-05 01:06:50 +03:00
b0c46e0c1e Merge pull request 'Update dependency fastapi to v0.100.1' (#23) from renovate/fastapi-0.x into dev
Reviewed-on: #23
2023-07-28 09:33:38 +03:00
7c725bf04a Update dependency fastapi to v0.100.1 2023-07-27 23:20:36 +03:00
cff6ed17a7 Merge pull request 'Update dependency pymongo to v4.4.1' (#22) from renovate/pymongo-4.x into dev
Reviewed-on: #22
2023-07-15 15:23:30 +03:00
e6fae57679 Update dependency pymongo to v4.4.1 2023-07-14 16:02:07 +03:00
dfdfebe155 Merge pull request 'Update dependency fastapi to v0.100.0' (#21) from renovate/fastapi-0.x into dev
Reviewed-on: #21
2023-07-07 22:51:12 +03:00
01b6222f6b Update dependency fastapi to v0.100.0 2023-07-07 21:00:24 +03:00
10fb021162 Merge pull request 'Update dependency fastapi to v0.99.1' (#20) from renovate/fastapi-0.x into dev
Reviewed-on: #20
2023-07-03 11:37:38 +03:00
4545e26f32 Update dependency fastapi to v0.99.1 2023-07-02 19:30:19 +03:00
ab2bfd10d5 Merge pull request 'Update dependency opencv-python to ~=4.8.0.74' (#19) from renovate/opencv-python-4.x into dev
Reviewed-on: #19
2023-06-30 19:06:25 +03:00
e9f3237fbb Update dependency opencv-python to ~=4.8.0.74 2023-06-30 15:53:36 +03:00
b3c9a972c8 Merge branch 'master' into dev 2023-06-27 14:54:21 +03:00
42f125716a Updated to v0.5 2023-06-27 13:52:13 +02:00
5e3df74052 Added random photo/video request 2023-06-27 13:51:18 +02:00
2ff4623d5f Merge pull request 'Update dependency scipy to ~=1.11.0' (#17) from renovate/scipy-1.x into dev
Reviewed-on: #17
2023-06-26 11:49:40 +03:00
737b4c57c0 Update dependency scipy to ~=1.11.0 2023-06-25 21:47:06 +03:00
d723bb6b80 Merge branch 'master' of https://git.profitroll.eu/profitroll/PhotosAPI 2023-06-23 12:17:05 +02:00
2a7870620c Refactor changed are done 2023-06-23 12:17:01 +02:00
b003712358 Fixed path error 2023-06-23 12:09:36 +02:00
d29dfa4d3e Merge pull request 'Update FastAPI to 0.98.0' (#16) from dev into master
Reviewed-on: #16
2023-06-23 12:32:46 +03:00
d688d766da Merge branch 'master' into dev 2023-06-23 12:31:10 +03:00
5cc10367b2 Typo fixed 2023-06-23 11:30:18 +02:00
4b43e76822 logWrite replaced with logging module 2023-06-23 11:25:27 +02:00
23467a88ef pathlib support 2023-06-23 11:17:02 +02:00
88d8a38444 WIP: pathlib support 2023-06-23 08:51:42 +00:00
a5cd6a215f Fixed missing await 2023-06-23 07:40:37 +00:00
a6002a5e60 Merge pull request 'Update dependency fastapi to v0.98.0' (#14) from renovate/fastapi-0.x into dev
Reviewed-on: #14
2023-06-22 22:12:09 +03:00
917048a333 Update dependency fastapi to v0.98.0 2023-06-22 21:40:59 +03:00
6be51c5aaa Merge pull request 'Fixed OpenAPI specs' (#13) from dev into master
Reviewed-on: #13
2023-06-22 15:44:39 +03:00
840e3022b3 Merge branch 'master' into dev 2023-06-22 15:44:35 +03:00
24f4773dd7 Updated to v0.4 2023-06-22 14:43:15 +02:00
00d3d62762 Fixed openapi spec 2023-06-22 14:43:00 +02:00
18 changed files with 515 additions and 322 deletions

View File

@@ -164,6 +164,26 @@ class VideoSearchQueryEmptyError(HTTPException):
) )
class SearchLimitInvalidError(HTTPException):
"""Raises HTTP 400 if search results limit not in valid range."""
def __init__(self):
self.openapi = {
"description": "Invalid Limit",
"content": {
"application/json": {
"example": {
"detail": "Parameter 'limit' must be greater or equal to 1."
}
}
},
}
super().__init__(
status_code=400,
detail=self.openapi["content"]["application/json"]["example"]["detail"],
)
class SearchPageInvalidError(HTTPException): class SearchPageInvalidError(HTTPException):
"""Raises HTTP 400 if page or page size are not in valid range.""" """Raises HTTP 400 if page or page size are not in valid range."""

View File

@@ -72,3 +72,11 @@ class SearchResultsPhoto(BaseModel):
class SearchResultsVideo(BaseModel): class SearchResultsVideo(BaseModel):
results: List[VideoSearch] results: List[VideoSearch]
next_page: Union[str, None] next_page: Union[str, None]
class RandomSearchResultsPhoto(BaseModel):
results: List[PhotoSearch]
class RandomSearchResultsVideo(BaseModel):
results: List[VideoSearch]

View File

@@ -1,5 +1,6 @@
import re import re
from os import makedirs, path, rename from os import makedirs, rename
from pathlib import Path
from shutil import rmtree from shutil import rmtree
from typing import Union from typing import Union
@@ -46,14 +47,12 @@ async def album_create(
if 2 > len(title) > 40: if 2 > len(title) > 40:
raise AlbumIncorrectError("title", "must be >2 and <40 characters.") raise AlbumIncorrectError("title", "must be >2 and <40 characters.")
if col_albums.find_one({"name": name}) is not None: if (await col_albums.find_one({"name": name})) is not None:
raise AlbumAlreadyExistsError(name) raise AlbumAlreadyExistsError(name)
makedirs( makedirs(Path(f"data/users/{current_user.user}/albums/{name}"), exist_ok=True)
path.join("data", "users", current_user.user, "albums", name), exist_ok=True
)
uploaded = col_albums.insert_one( uploaded = await col_albums.insert_one(
{"user": current_user.user, "name": name, "title": title, "cover": None} {"user": current_user.user, "name": name, "title": title, "cover": None}
) )
@@ -68,9 +67,10 @@ async def album_find(
current_user: User = Security(get_current_active_user, scopes=["albums.list"]), current_user: User = Security(get_current_active_user, scopes=["albums.list"]),
): ):
output = {"results": []} output = {"results": []}
albums = list(col_albums.find({"user": current_user.user, "name": re.compile(q)}))
for album in albums: async for album in col_albums.find(
{"user": current_user.user, "name": re.compile(q)}
):
output["results"].append( output["results"].append(
{ {
"id": album["_id"].__str__(), "id": album["_id"].__str__(),
@@ -103,18 +103,18 @@ async def album_patch(
current_user: User = Security(get_current_active_user, scopes=["albums.write"]), current_user: User = Security(get_current_active_user, scopes=["albums.write"]),
): ):
try: try:
album = col_albums.find_one({"_id": ObjectId(id)}) album = await col_albums.find_one({"_id": ObjectId(id)})
if album is None: if album is None:
raise InvalidId(id) raise InvalidId(id)
except InvalidId: except InvalidId as exc:
raise AlbumNotFoundError(id) raise AlbumNotFoundError(id) from exc
if title is not None: if title is None:
if 2 > len(title) > 40:
raise AlbumIncorrectError("title", "must be >2 and <40 characters.")
else:
title = album["title"] title = album["title"]
elif 2 > len(title) > 40:
raise AlbumIncorrectError("title", "must be >2 and <40 characters.")
if name is not None: if name is not None:
if re.search(re.compile("^[a-z,0-9,_]*$"), name) is False: if re.search(re.compile("^[a-z,0-9,_]*$"), name) is False:
raise AlbumIncorrectError( raise AlbumIncorrectError(
@@ -123,10 +123,10 @@ async def album_patch(
if 2 > len(name) > 20: if 2 > len(name) > 20:
raise AlbumIncorrectError("name", "must be >2 and <20 characters.") raise AlbumIncorrectError("name", "must be >2 and <20 characters.")
rename( rename(
path.join("data", "users", current_user.user, "albums", album["name"]), Path(f"data/users/{current_user.user}/albums/{album['name']}"),
path.join("data", "users", current_user.user, "albums", name), Path(f"data/users/{current_user.user}/albums/{name}"),
) )
col_photos.update_many( await col_photos.update_many(
{"user": current_user.user, "album": album["name"]}, {"user": current_user.user, "album": album["name"]},
{"$set": {"album": name}}, {"$set": {"album": name}},
) )
@@ -134,12 +134,14 @@ async def album_patch(
name = album["name"] name = album["name"]
if cover is not None: if cover is not None:
image = col_photos.find_one({"_id": ObjectId(cover), "album": album["name"]}) image = await col_photos.find_one(
{"_id": ObjectId(cover), "album": album["name"]}
)
cover = image["_id"].__str__() if image is not None else album["cover"] cover = image["_id"].__str__() if image is not None else album["cover"]
else: else:
cover = album["cover"] cover = album["cover"]
col_albums.update_one( await col_albums.update_one(
{"_id": ObjectId(id)}, {"$set": {"name": name, "title": title, "cover": cover}} {"_id": ObjectId(id)}, {"$set": {"name": name, "title": title, "cover": cover}}
) )
@@ -167,11 +169,11 @@ async def album_put(
current_user: User = Security(get_current_active_user, scopes=["albums.write"]), current_user: User = Security(get_current_active_user, scopes=["albums.write"]),
): ):
try: try:
album = col_albums.find_one({"_id": ObjectId(id)}) album = await col_albums.find_one({"_id": ObjectId(id)})
if album is None: if album is None:
raise InvalidId(id) raise InvalidId(id)
except InvalidId: except InvalidId as exc:
raise AlbumNotFoundError(id) raise AlbumNotFoundError(id) from exc
if re.search(re.compile("^[a-z,0-9,_]*$"), name) is False: if re.search(re.compile("^[a-z,0-9,_]*$"), name) is False:
raise AlbumIncorrectError("name", "can only contain a-z, 0-9 and _ characters.") raise AlbumIncorrectError("name", "can only contain a-z, 0-9 and _ characters.")
@@ -182,18 +184,18 @@ async def album_put(
if 2 > len(title) > 40: if 2 > len(title) > 40:
raise AlbumIncorrectError("title", "must be >2 and <40 characters.") raise AlbumIncorrectError("title", "must be >2 and <40 characters.")
image = col_photos.find_one({"_id": ObjectId(cover), "album": album["name"]}) image = await col_photos.find_one({"_id": ObjectId(cover), "album": album["name"]})
cover = image["_id"].__str__() if image is not None else None # type: ignore cover = image["_id"].__str__() if image is not None else None # type: ignore
rename( rename(
path.join("data", "users", current_user.user, "albums", album["name"]), Path(f"data/users/{current_user.user}/albums/{album['name']}"),
path.join("data", "users", current_user.user, "albums", name), Path(f"data/users/{current_user.user}/albums/{name}"),
) )
col_photos.update_many( await col_photos.update_many(
{"user": current_user.user, "album": album["name"]}, {"$set": {"album": name}} {"user": current_user.user, "album": album["name"]}, {"$set": {"album": name}}
) )
col_albums.update_one( await col_albums.update_one(
{"_id": ObjectId(id)}, {"$set": {"name": name, "title": title, "cover": cover}} {"_id": ObjectId(id)}, {"$set": {"name": name, "title": title, "cover": cover}}
) )
@@ -214,14 +216,14 @@ async def album_delete(
current_user: User = Security(get_current_active_user, scopes=["albums.write"]), current_user: User = Security(get_current_active_user, scopes=["albums.write"]),
): ):
try: try:
album = col_albums.find_one_and_delete({"_id": ObjectId(id)}) album = await col_albums.find_one_and_delete({"_id": ObjectId(id)})
if album is None: if album is None:
raise InvalidId(id) raise InvalidId(id)
except InvalidId: except InvalidId as exc:
raise AlbumNotFoundError(id) raise AlbumNotFoundError(id) from exc
col_photos.delete_many({"album": album["name"]}) await col_photos.delete_many({"album": album["name"]})
rmtree(path.join("data", "users", current_user.user, "albums", album["name"])) rmtree(Path(f"data/users/{current_user.user}/albums/{album['name']}"))
return Response(status_code=HTTP_204_NO_CONTENT) return Response(status_code=HTTP_204_NO_CONTENT)

View File

@@ -1,4 +1,4 @@
from os import path from pathlib import Path
import aiofiles import aiofiles
from fastapi.responses import HTMLResponse, Response from fastapi.responses import HTMLResponse, Response
@@ -8,27 +8,21 @@ from modules.app import app
@app.get("/pages/matter.css", include_in_schema=False) @app.get("/pages/matter.css", include_in_schema=False)
async def page_matter(): async def page_matter():
async with aiofiles.open( async with aiofiles.open(Path("pages/matter.css"), "r", encoding="utf-8") as f:
path.join("pages", "matter.css"), "r", encoding="utf-8"
) as f:
output = await f.read() output = await f.read()
return Response(content=output) return Response(content=output)
@app.get("/pages/{page}/{file}", include_in_schema=False) @app.get("/pages/{page}/{file}", include_in_schema=False)
async def page_assets(page: str, file: str): async def page_assets(page: str, file: str):
async with aiofiles.open( async with aiofiles.open(Path(f"pages/{page}/{file}"), "r", encoding="utf-8") as f:
path.join("pages", page, file), "r", encoding="utf-8"
) as f:
output = await f.read() output = await f.read()
return Response(content=output) return Response(content=output)
@app.get("/", include_in_schema=False) @app.get("/", include_in_schema=False)
async def page_home(): async def page_home():
async with aiofiles.open( async with aiofiles.open(Path("pages/home/index.html"), "r", encoding="utf-8") as f:
path.join("pages", "home", "index.html"), "r", encoding="utf-8"
) as f:
output = await f.read() output = await f.read()
return HTMLResponse(content=output) return HTMLResponse(content=output)
@@ -36,7 +30,7 @@ async def page_home():
@app.get("/register", include_in_schema=False) @app.get("/register", include_in_schema=False)
async def page_register(): async def page_register():
async with aiofiles.open( async with aiofiles.open(
path.join("pages", "register", "index.html"), "r", encoding="utf-8" Path("pages/register/index.html"), "r", encoding="utf-8"
) as f: ) as f:
output = await f.read() output = await f.read()
return HTMLResponse(content=output) return HTMLResponse(content=output)

View File

@@ -1,6 +1,9 @@
import logging
import re import re
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from os import makedirs, path, remove, system from os import makedirs, path, remove, system
from pathlib import Path
from random import randint
from secrets import token_urlsafe from secrets import token_urlsafe
from shutil import move from shutil import move
from threading import Thread from threading import Thread
@@ -24,10 +27,16 @@ from classes.exceptions import (
AlbumNameNotFoundError, AlbumNameNotFoundError,
PhotoNotFoundError, PhotoNotFoundError,
PhotoSearchQueryEmptyError, PhotoSearchQueryEmptyError,
SearchLimitInvalidError,
SearchPageInvalidError, SearchPageInvalidError,
SearchTokenInvalidError, SearchTokenInvalidError,
) )
from classes.models import Photo, PhotoPublic, SearchResultsPhoto from classes.models import (
Photo,
PhotoPublic,
RandomSearchResultsPhoto,
SearchResultsPhoto,
)
from modules.app import app from modules.app import app
from modules.database import col_albums, col_photos, col_tokens from modules.database import col_albums, col_photos, col_tokens
from modules.exif_reader import extract_location from modules.exif_reader import extract_location
@@ -42,14 +51,18 @@ from modules.security import (
get_current_active_user, get_current_active_user,
get_user, get_user,
) )
from modules.utils import configGet, logWrite from modules.utils import configGet
logger = logging.getLogger(__name__)
async def compress_image(image_path: str): async def compress_image(image_path: str):
image_type = Magic(mime=True).from_file(image_path) image_type = Magic(mime=True).from_file(image_path)
if image_type not in ["image/jpeg", "image/png"]: if image_type not in ["image/jpeg", "image/png"]:
logWrite(f"Not compressing {image_path} because its mime is '{image_type}'") logger.info(
"Not compressing %s because its mime is '%s'", image_path, image_type
)
return return
size_before = path.getsize(image_path) / 1024 size_before = path.getsize(image_path) / 1024
@@ -65,12 +78,15 @@ async def compress_image(image_path: str):
return return
task.start() task.start()
logWrite(f"Compressing '{path.split(image_path)[-1]}'...") logger.info("Compressing '%s'...", Path(image_path).name)
task.join() task.join()
size_after = path.getsize(image_path) / 1024 size_after = path.getsize(image_path) / 1024
logWrite( logger.info(
f"Compressed '{path.split(image_path)[-1]}' from {size_before} Kb to {size_after} Kb" "Compressed '%s' from %s Kb to %s Kb",
Path(image_path).name,
size_before,
size_after,
) )
@@ -106,18 +122,14 @@ async def photo_upload(
caption: Union[str, None] = None, caption: Union[str, None] = None,
current_user: User = Security(get_current_active_user, scopes=["photos.write"]), current_user: User = Security(get_current_active_user, scopes=["photos.write"]),
): ):
if col_albums.find_one({"user": current_user.user, "name": album}) is None: if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
raise AlbumNameNotFoundError(album) raise AlbumNameNotFoundError(album)
makedirs( makedirs(Path(f"data/users/{current_user.user}/albums/{album}"), exist_ok=True)
path.join("data", "users", current_user.user, "albums", album), exist_ok=True
)
filename = file.filename filename = file.filename
if path.exists( if Path(f"data/users/{current_user.user}/albums/{album}/{file.filename}").exists():
path.join("data", "users", current_user.user, "albums", album, file.filename)
):
base_name = file.filename.split(".")[:-1] base_name = file.filename.split(".")[:-1]
extension = file.filename.split(".")[-1] extension = file.filename.split(".")[-1]
filename = ( filename = (
@@ -125,20 +137,18 @@ async def photo_upload(
) )
async with aiofiles.open( async with aiofiles.open(
path.join("data", "users", current_user.user, "albums", album, filename), "wb" Path(f"data/users/{current_user.user}/albums/{album}/{filename}"), "wb"
) as f: ) as f:
f.write(await file.read()) await f.write(await file.read())
file_hash = await get_phash( file_hash = await get_phash(
path.join("data", "users", current_user.user, "albums", album, filename) Path(f"data/users/{current_user.user}/albums/{album}/{filename}")
) )
duplicates = await get_duplicates(file_hash, album) duplicates = await get_duplicates(file_hash, album)
if len(duplicates) > 0 and ignore_duplicates is False: if len(duplicates) > 0 and not ignore_duplicates:
if configGet("media_token_access") is True: if configGet("media_token_access") is True:
duplicates_ids = [] duplicates_ids = [entry["id"] for entry in duplicates]
for entry in duplicates:
duplicates_ids.append(entry["id"])
access_token = create_access_token( access_token = create_access_token(
data={ data={
"sub": current_user.user, "sub": current_user.user,
@@ -148,7 +158,7 @@ async def photo_upload(
expires_delta=timedelta(hours=configGet("media_token_valid_hours")), expires_delta=timedelta(hours=configGet("media_token_valid_hours")),
) )
access_token_short = uuid4().hex[:12].lower() access_token_short = uuid4().hex[:12].lower()
col_tokens.insert_one( await col_tokens.insert_one(
{ {
"short": access_token_short, "short": access_token_short,
"access_token": access_token, "access_token": access_token,
@@ -168,12 +178,12 @@ async def photo_upload(
try: try:
coords = extract_location( coords = extract_location(
path.join("data", "users", current_user.user, "albums", album, filename) Path(f"data/users/{current_user.user}/albums/{album}/{filename}")
) )
except (UnpackError, ValueError): except (UnpackError, ValueError):
coords = {"lng": 0.0, "lat": 0.0, "alt": 0.0} coords = {"lng": 0.0, "lat": 0.0, "alt": 0.0}
uploaded = col_photos.insert_one( uploaded = await col_photos.insert_one(
{ {
"user": current_user.user, "user": current_user.user,
"album": album, "album": album,
@@ -188,14 +198,12 @@ async def photo_upload(
} }
) )
if compress is True: if compress:
scheduler.add_job( scheduler.add_job(
compress_image, compress_image,
trigger="date", trigger="date",
run_date=datetime.now() + timedelta(seconds=1), run_date=datetime.now() + timedelta(seconds=1),
args=[ args=[Path(f"data/users/{current_user.user}/albums/{album}/{filename}")],
path.join("data", "users", current_user.user, "albums", album, filename)
],
) )
return UJSONResponse( return UJSONResponse(
@@ -223,7 +231,7 @@ if configGet("media_token_access") is True:
responses=photo_get_token_responses, responses=photo_get_token_responses,
) )
async def photo_get_token(token: str, id: int): async def photo_get_token(token: str, id: int):
db_entry = col_tokens.find_one({"short": token}) db_entry = await col_tokens.find_one({"short": token})
if db_entry is None: if db_entry is None:
raise AccessTokenInvalidError() raise AccessTokenInvalidError()
@@ -238,24 +246,23 @@ if configGet("media_token_access") is True:
raise AccessTokenInvalidError() raise AccessTokenInvalidError()
token_scopes = payload.get("scopes", []) token_scopes = payload.get("scopes", [])
token_data = TokenData(scopes=token_scopes, user=user) token_data = TokenData(scopes=token_scopes, user=user)
except (JWTError, ValidationError) as exp: except (JWTError, ValidationError) as exc:
print(exp, flush=True) raise AccessTokenInvalidError() from exc
raise AccessTokenInvalidError()
user = get_user(user=token_data.user) user_record = await get_user(user=token_data.user)
if id not in payload.get("allowed", []): if id not in payload.get("allowed", []):
raise AccessTokenInvalidError() raise AccessTokenInvalidError()
try: try:
image = col_photos.find_one({"_id": ObjectId(id)}) image = await col_photos.find_one({"_id": ObjectId(id)})
if image is None: if image is None:
raise InvalidId(id) raise InvalidId(id)
except InvalidId: except InvalidId as exc:
raise PhotoNotFoundError(id) raise PhotoNotFoundError(id) from exc
image_path = path.join( image_path = Path(
"data", "users", user.user, "albums", image["album"], image["filename"] f"data/users/{user_record.user}/albums/{image['album']}/{image['filename']}"
) )
mime = Magic(mime=True).from_file(image_path) mime = Magic(mime=True).from_file(image_path)
@@ -267,7 +274,17 @@ if configGet("media_token_access") is True:
photo_get_responses = { photo_get_responses = {
200: {"content": {"image/*": {}}}, 200: {
"content": {
"application/octet-stream": {
"schema": {
"type": "string",
"format": "binary",
"contentMediaType": "image/*",
}
}
}
},
404: PhotoNotFoundError("id").openapi, 404: PhotoNotFoundError("id").openapi,
} }
@@ -283,14 +300,14 @@ async def photo_get(
current_user: User = Security(get_current_active_user, scopes=["photos.read"]), current_user: User = Security(get_current_active_user, scopes=["photos.read"]),
): ):
try: try:
image = col_photos.find_one({"_id": ObjectId(id)}) image = await col_photos.find_one({"_id": ObjectId(id)})
if image is None: if image is None:
raise InvalidId(id) raise InvalidId(id)
except InvalidId: except InvalidId as exc:
raise PhotoNotFoundError(id) raise PhotoNotFoundError(id) from exc
image_path = path.join( image_path = Path(
"data", "users", current_user.user, "albums", image["album"], image["filename"] f"data/users/{current_user.user}/albums/{image['album']}/{image['filename']}"
) )
mime = Magic(mime=True).from_file(image_path) mime = Magic(mime=True).from_file(image_path)
@@ -316,20 +333,18 @@ async def photo_move(
current_user: User = Security(get_current_active_user, scopes=["photos.write"]), current_user: User = Security(get_current_active_user, scopes=["photos.write"]),
): ):
try: try:
image = col_photos.find_one({"_id": ObjectId(id)}) image = await col_photos.find_one({"_id": ObjectId(id)})
if image is None: if image is None:
raise InvalidId(id) raise InvalidId(id)
except InvalidId: except InvalidId as exc:
raise PhotoNotFoundError(id) raise PhotoNotFoundError(id) from exc
if col_albums.find_one({"user": current_user.user, "name": album}) is None: if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
raise AlbumNameNotFoundError(album) raise AlbumNameNotFoundError(album)
if path.exists( if Path(
path.join( f"data/users/{current_user.user}/albums/{album}/{image['filename']}"
"data", "users", current_user.user, "albums", album, image["filename"] ).exists():
)
):
base_name = image["filename"].split(".")[:-1] base_name = image["filename"].split(".")[:-1]
extension = image["filename"].split(".")[-1] extension = image["filename"].split(".")[-1]
filename = ( filename = (
@@ -338,7 +353,7 @@ async def photo_move(
else: else:
filename = image["filename"] filename = image["filename"]
col_photos.find_one_and_update( await col_photos.find_one_and_update(
{"_id": ObjectId(id)}, {"_id": ObjectId(id)},
{ {
"$set": { "$set": {
@@ -350,15 +365,10 @@ async def photo_move(
) )
move( move(
path.join( Path(
"data", f"data/users/{current_user.user}/albums/{image['album']}/{image['filename']}"
"users",
current_user.user,
"albums",
image["album"],
image["filename"],
), ),
path.join("data", "users", current_user.user, "albums", album, filename), Path(f"data/users/{current_user.user}/albums/{album}/{filename}"),
) )
return UJSONResponse( return UJSONResponse(
@@ -385,13 +395,13 @@ async def photo_patch(
current_user: User = Security(get_current_active_user, scopes=["photos.write"]), current_user: User = Security(get_current_active_user, scopes=["photos.write"]),
): ):
try: try:
image = col_photos.find_one({"_id": ObjectId(id)}) image = await col_photos.find_one({"_id": ObjectId(id)})
if image is None: if image is None:
raise InvalidId(id) raise InvalidId(id)
except InvalidId: except InvalidId as exc:
raise PhotoNotFoundError(id) raise PhotoNotFoundError(id) from exc
col_photos.find_one_and_update( await col_photos.find_one_and_update(
{"_id": ObjectId(id)}, {"_id": ObjectId(id)},
{"$set": {"caption": caption, "dates.modified": datetime.now(tz=timezone.utc)}}, {"$set": {"caption": caption, "dates.modified": datetime.now(tz=timezone.utc)}},
) )
@@ -419,31 +429,87 @@ async def photo_delete(
current_user: User = Security(get_current_active_user, scopes=["photos.write"]), current_user: User = Security(get_current_active_user, scopes=["photos.write"]),
): ):
try: try:
image = col_photos.find_one_and_delete({"_id": ObjectId(id)}) image = await col_photos.find_one_and_delete({"_id": ObjectId(id)})
if image is None: if image is None:
raise InvalidId(id) raise InvalidId(id)
except InvalidId: except InvalidId as exc:
raise PhotoNotFoundError(id) raise PhotoNotFoundError(id) from exc
album = col_albums.find_one({"name": image["album"]}) album = await col_albums.find_one({"name": image["album"]})
if album is not None and album["cover"] == image["_id"].__str__(): if album is not None and album["cover"] == image["_id"].__str__():
col_albums.update_one({"name": image["album"]}, {"$set": {"cover": None}}) await col_albums.update_one({"name": image["album"]}, {"$set": {"cover": None}})
remove( remove(
path.join( Path(
"data", f"data/users/{current_user.user}/albums/{image['album']}/{image['filename']}"
"users",
current_user.user,
"albums",
image["album"],
image["filename"],
) )
) )
return Response(status_code=HTTP_204_NO_CONTENT) return Response(status_code=HTTP_204_NO_CONTENT)
photo_random_responses = {
400: SearchLimitInvalidError().openapi,
404: AlbumNameNotFoundError("name").openapi,
}
@app.get(
"/albums/{album}/photos/random",
description="Get one random photo, optionally by caption",
response_class=UJSONResponse,
response_model=RandomSearchResultsPhoto,
responses=photo_random_responses,
)
async def photo_random(
album: str,
caption: Union[str, None] = None,
limit: int = 100,
current_user: User = Security(get_current_active_user, scopes=["photos.list"]),
):
if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
raise AlbumNameNotFoundError(album)
if limit <= 0:
raise SearchLimitInvalidError()
output = {"results": []}
db_query = (
{
"user": current_user.user,
"album": album,
"caption": re.compile(caption),
}
if caption is not None
else {
"user": current_user.user,
"album": album,
}
)
documents_count = await col_photos.count_documents(db_query)
skip = randint(0, documents_count - 1) if documents_count > 1 else 0
async for image in col_photos.aggregate(
[
{"$match": db_query},
{"$skip": skip},
{"$limit": limit},
]
):
output["results"].append(
{
"id": image["_id"].__str__(),
"filename": image["filename"],
"caption": image["caption"],
}
)
return UJSONResponse(output)
photo_find_responses = { photo_find_responses = {
400: SearchPageInvalidError().openapi, 400: SearchPageInvalidError().openapi,
401: SearchTokenInvalidError().openapi, 401: SearchTokenInvalidError().openapi,
@@ -472,7 +538,7 @@ async def photo_find(
current_user: User = Security(get_current_active_user, scopes=["photos.list"]), current_user: User = Security(get_current_active_user, scopes=["photos.list"]),
): ):
if token is not None: if token is not None:
found_record = col_tokens.find_one({"token": token}) found_record = await col_tokens.find_one({"token": token})
if found_record is None: if found_record is None:
raise SearchTokenInvalidError() raise SearchTokenInvalidError()
@@ -489,7 +555,7 @@ async def photo_find(
current_user=current_user, current_user=current_user,
) )
if col_albums.find_one({"user": current_user.user, "name": album}) is None: if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
raise AlbumNameNotFoundError(album) raise AlbumNameNotFoundError(album)
if page <= 0 or page_size <= 0: if page <= 0 or page_size <= 0:
@@ -518,7 +584,7 @@ async def photo_find(
} }
elif q is None and caption is None: elif q is None and caption is None:
raise PhotoSearchQueryEmptyError() raise PhotoSearchQueryEmptyError()
elif q is None and caption is not None: elif q is None:
db_query = { db_query = {
"user": current_user.user, "user": current_user.user,
"album": album, "album": album,
@@ -529,7 +595,7 @@ async def photo_find(
"album": album, "album": album,
"caption": re.compile(caption), "caption": re.compile(caption),
} }
elif q is not None and caption is None: elif caption is None:
db_query = { db_query = {
"user": current_user.user, "user": current_user.user,
"album": album, "album": album,
@@ -541,16 +607,22 @@ async def photo_find(
"filename": re.compile(q), "filename": re.compile(q),
} }
else: else:
db_query = {"user": current_user.user, "album": album, "filename": re.compile(q), "caption": re.compile(caption)} # type: ignore db_query = {
db_query_count = {"user": current_user.user, "album": album, "filename": re.compile(q), "caption": re.compile(caption)} # type: ignore "user": current_user.user,
"album": album,
"filename": re.compile(q),
"caption": re.compile(caption),
}
db_query_count = {
"user": current_user.user,
"album": album,
"filename": re.compile(q),
"caption": re.compile(caption),
}
images = list( async for image in col_photos.find(db_query, limit=page_size, skip=skip).sort(
col_photos.find(db_query, limit=page_size, skip=skip).sort( "dates.uploaded", direction=DESCENDING
"dates.uploaded", DESCENDING ):
)
)
for image in images:
output["results"].append( output["results"].append(
{ {
"id": image["_id"].__str__(), "id": image["_id"].__str__(),
@@ -559,9 +631,9 @@ async def photo_find(
} }
) )
if col_photos.count_documents(db_query_count) > page * page_size: if (await col_photos.count_documents(db_query_count)) > page * page_size:
token = str(token_urlsafe(32)) token = str(token_urlsafe(32))
col_tokens.insert_one( await col_tokens.insert_one(
{ {
"token": token, "token": token,
"query": q, "query": q,

View File

@@ -17,7 +17,7 @@ token_post_responses = {401: UserCredentialsInvalid().openapi}
@app.post("/token", response_model=Token, responses=token_post_responses) @app.post("/token", response_model=Token, responses=token_post_responses)
async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()): async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()):
user = authenticate_user(form_data.username, form_data.password) user = await authenticate_user(form_data.username, form_data.password)
if not user: if not user:
raise UserCredentialsInvalid() raise UserCredentialsInvalid()
access_token_expires = timedelta(days=ACCESS_TOKEN_EXPIRE_DAYS) access_token_expires = timedelta(days=ACCESS_TOKEN_EXPIRE_DAYS)

View File

@@ -1,3 +1,4 @@
import logging
from datetime import datetime, timedelta from datetime import datetime, timedelta
from uuid import uuid1 from uuid import uuid1
@@ -21,7 +22,9 @@ from modules.security import (
get_user, get_user,
verify_password, verify_password,
) )
from modules.utils import configGet, logWrite from modules.utils import configGet
logger = logging.getLogger(__name__)
async def send_confirmation(user: str, email: str): async def send_confirmation(user: str, email: str):
@@ -38,12 +41,14 @@ async def send_confirmation(user: str, email: str):
+ f"/users/{user}/confirm?code={confirmation_code}" + f"/users/{user}/confirm?code={confirmation_code}"
), ),
) )
col_emails.insert_one( await col_emails.insert_one(
{"user": user, "email": email, "used": False, "code": confirmation_code} {"user": user, "email": email, "used": False, "code": confirmation_code}
) )
logWrite(f"Sent confirmation email to '{email}' with code {confirmation_code}") logger.info(
except Exception as exp: "Sent confirmation email to '%s' with code %s", email, confirmation_code
logWrite(f"Could not send confirmation email to '{email}' due to: {exp}") )
except Exception as exc:
logger.error("Could not send confirmation email to '%s' due to: %s", email, exc)
@app.get("/users/me/", response_model=User) @app.get("/users/me/", response_model=User)
@@ -75,15 +80,15 @@ if configGet("registration_requires_confirmation") is True:
responses=user_confirm_responses, responses=user_confirm_responses,
) )
async def user_confirm(user: str, code: str): async def user_confirm(user: str, code: str):
confirm_record = col_emails.find_one( confirm_record = await col_emails.find_one(
{"user": user, "code": code, "used": False} {"user": user, "code": code, "used": False}
) )
if confirm_record is None: if confirm_record is None:
raise UserEmailCodeInvalid() raise UserEmailCodeInvalid()
col_emails.find_one_and_update( await col_emails.find_one_and_update(
{"_id": confirm_record["_id"]}, {"$set": {"used": True}} {"_id": confirm_record["_id"]}, {"$set": {"used": True}}
) )
col_users.find_one_and_update( await col_users.find_one_and_update(
{"user": confirm_record["user"]}, {"$set": {"disabled": False}} {"user": confirm_record["user"]}, {"$set": {"disabled": False}}
) )
return UJSONResponse({"detail": configGet("email_confirmed", "messages")}) return UJSONResponse({"detail": configGet("email_confirmed", "messages")})
@@ -98,9 +103,9 @@ if configGet("registration_enabled") is True:
async def user_create( async def user_create(
user: str = Form(), email: str = Form(), password: str = Form() user: str = Form(), email: str = Form(), password: str = Form()
): ):
if col_users.find_one({"user": user}) is not None: if (await col_users.find_one({"user": user})) is not None:
raise UserAlreadyExists() raise UserAlreadyExists()
col_users.insert_one( await col_users.insert_one(
{ {
"user": user, "user": user,
"email": email, "email": email,
@@ -127,14 +132,14 @@ user_delete_responses = {401: UserCredentialsInvalid().openapi}
async def user_delete( async def user_delete(
password: str = Form(), current_user: User = Depends(get_current_active_user) password: str = Form(), current_user: User = Depends(get_current_active_user)
): ):
user = get_user(current_user.user) user = await get_user(current_user.user)
if not user: if not user:
return False return False
if not verify_password(password, user.hash): if not verify_password(password, user.hash):
raise UserCredentialsInvalid() raise UserCredentialsInvalid()
col_users.delete_many({"user": current_user.user}) await col_users.delete_many({"user": current_user.user})
col_emails.delete_many({"user": current_user.user}) await col_emails.delete_many({"user": current_user.user})
col_photos.delete_many({"user": current_user.user}) await col_photos.delete_many({"user": current_user.user})
col_videos.delete_many({"user": current_user.user}) await col_videos.delete_many({"user": current_user.user})
col_albums.delete_many({"user": current_user.user}) await col_albums.delete_many({"user": current_user.user})
return Response(status_code=HTTP_204_NO_CONTENT) return Response(status_code=HTTP_204_NO_CONTENT)

View File

@@ -1,6 +1,8 @@
import re import re
from datetime import datetime, timezone from datetime import datetime, timezone
from os import makedirs, path, remove from os import makedirs, remove
from pathlib import Path
from random import randint
from secrets import token_urlsafe from secrets import token_urlsafe
from shutil import move from shutil import move
from typing import Union from typing import Union
@@ -16,12 +18,18 @@ from starlette.status import HTTP_204_NO_CONTENT
from classes.exceptions import ( from classes.exceptions import (
AlbumNameNotFoundError, AlbumNameNotFoundError,
SearchLimitInvalidError,
SearchPageInvalidError, SearchPageInvalidError,
SearchTokenInvalidError, SearchTokenInvalidError,
VideoNotFoundError, VideoNotFoundError,
VideoSearchQueryEmptyError, VideoSearchQueryEmptyError,
) )
from classes.models import SearchResultsVideo, Video, VideoPublic from classes.models import (
RandomSearchResultsVideo,
SearchResultsVideo,
Video,
VideoPublic,
)
from modules.app import app from modules.app import app
from modules.database import col_albums, col_tokens, col_videos from modules.database import col_albums, col_tokens, col_videos
from modules.security import User, get_current_active_user from modules.security import User, get_current_active_user
@@ -42,18 +50,14 @@ async def video_upload(
caption: Union[str, None] = None, caption: Union[str, None] = None,
current_user: User = Security(get_current_active_user, scopes=["videos.write"]), current_user: User = Security(get_current_active_user, scopes=["videos.write"]),
): ):
if col_albums.find_one({"user": current_user.user, "name": album}) is None: if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
raise AlbumNameNotFoundError(album) raise AlbumNameNotFoundError(album)
makedirs( makedirs(Path(f"data/users/{current_user.user}/albums/{album}"), exist_ok=True)
path.join("data", "users", current_user.user, "albums", album), exist_ok=True
)
filename = file.filename filename = file.filename
if path.exists( if Path(f"data/users/{current_user.user}/albums/{album}/{file.filename}").exists():
path.join("data", "users", current_user.user, "albums", album, file.filename)
):
base_name = file.filename.split(".")[:-1] base_name = file.filename.split(".")[:-1]
extension = file.filename.split(".")[-1] extension = file.filename.split(".")[-1]
filename = ( filename = (
@@ -61,7 +65,7 @@ async def video_upload(
) )
async with aiofiles.open( async with aiofiles.open(
path.join("data", "users", current_user.user, "albums", album, filename), "wb" Path(f"data/users/{current_user.user}/albums/{album}/{filename}"), "wb"
) as f: ) as f:
await f.write(await file.read()) await f.write(await file.read())
@@ -69,7 +73,7 @@ async def video_upload(
# Coords extraction should be here # Coords extraction should be here
uploaded = col_videos.insert_one( uploaded = await col_videos.insert_one(
{ {
"user": current_user.user, "user": current_user.user,
"album": album, "album": album,
@@ -93,7 +97,17 @@ async def video_upload(
video_get_responses = { video_get_responses = {
200: {"content": {"video/*": {}}}, 200: {
"content": {
"application/octet-stream": {
"schema": {
"type": "string",
"format": "binary",
"contentMediaType": "video/*",
}
}
}
},
404: VideoNotFoundError("id").openapi, 404: VideoNotFoundError("id").openapi,
} }
@@ -109,14 +123,14 @@ async def video_get(
current_user: User = Security(get_current_active_user, scopes=["videos.read"]), current_user: User = Security(get_current_active_user, scopes=["videos.read"]),
): ):
try: try:
video = col_videos.find_one({"_id": ObjectId(id)}) video = await col_videos.find_one({"_id": ObjectId(id)})
if video is None: if video is None:
raise InvalidId(id) raise InvalidId(id)
except InvalidId: except InvalidId as exc:
raise VideoNotFoundError(id) raise VideoNotFoundError(id) from exc
video_path = path.join( video_path = Path(
"data", "users", current_user.user, "albums", video["album"], video["filename"] f"data/users/{current_user.user}/albums/{video['album']}/{video['filename']}"
) )
mime = Magic(mime=True).from_file(video_path) mime = Magic(mime=True).from_file(video_path)
@@ -142,20 +156,18 @@ async def video_move(
current_user: User = Security(get_current_active_user, scopes=["videos.write"]), current_user: User = Security(get_current_active_user, scopes=["videos.write"]),
): ):
try: try:
video = col_videos.find_one({"_id": ObjectId(id)}) video = await col_videos.find_one({"_id": ObjectId(id)})
if video is None: if video is None:
raise InvalidId(id) raise InvalidId(id)
except InvalidId: except InvalidId as exc:
raise VideoNotFoundError(id) raise VideoNotFoundError(id) from exc
if col_albums.find_one({"user": current_user.user, "name": album}) is None: if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
raise AlbumNameNotFoundError(album) raise AlbumNameNotFoundError(album)
if path.exists( if Path(
path.join( f"data/users/{current_user.user}/albums/{album}/{video['filename']}"
"data", "users", current_user.user, "albums", album, video["filename"] ).exists():
)
):
base_name = video["filename"].split(".")[:-1] base_name = video["filename"].split(".")[:-1]
extension = video["filename"].split(".")[-1] extension = video["filename"].split(".")[-1]
filename = ( filename = (
@@ -164,7 +176,7 @@ async def video_move(
else: else:
filename = video["filename"] filename = video["filename"]
col_videos.find_one_and_update( await col_videos.find_one_and_update(
{"_id": ObjectId(id)}, {"_id": ObjectId(id)},
{ {
"$set": { "$set": {
@@ -176,15 +188,10 @@ async def video_move(
) )
move( move(
path.join( Path(
"data", f"data/users/{current_user.user}/albums/{video['album']}/{video['filename']}"
"users",
current_user.user,
"albums",
video["album"],
video["filename"],
), ),
path.join("data", "users", current_user.user, "albums", album, filename), Path(f"data/users/{current_user.user}/albums/{album}/{filename}"),
) )
return UJSONResponse( return UJSONResponse(
@@ -211,13 +218,13 @@ async def video_patch(
current_user: User = Security(get_current_active_user, scopes=["videos.write"]), current_user: User = Security(get_current_active_user, scopes=["videos.write"]),
): ):
try: try:
video = col_videos.find_one({"_id": ObjectId(id)}) video = await col_videos.find_one({"_id": ObjectId(id)})
if video is None: if video is None:
raise InvalidId(id) raise InvalidId(id)
except InvalidId: except InvalidId as exc:
raise VideoNotFoundError(id) raise VideoNotFoundError(id) from exc
col_videos.find_one_and_update( await col_videos.find_one_and_update(
{"_id": ObjectId(id)}, {"_id": ObjectId(id)},
{"$set": {"caption": caption, "dates.modified": datetime.now(tz=timezone.utc)}}, {"$set": {"caption": caption, "dates.modified": datetime.now(tz=timezone.utc)}},
) )
@@ -245,28 +252,84 @@ async def video_delete(
current_user: User = Security(get_current_active_user, scopes=["videos.write"]), current_user: User = Security(get_current_active_user, scopes=["videos.write"]),
): ):
try: try:
video = col_videos.find_one_and_delete({"_id": ObjectId(id)}) video = await col_videos.find_one_and_delete({"_id": ObjectId(id)})
if video is None: if video is None:
raise InvalidId(id) raise InvalidId(id)
except InvalidId: except InvalidId as exc:
raise VideoNotFoundError(id) raise VideoNotFoundError(id) from exc
album = col_albums.find_one({"name": video["album"]}) album = await col_albums.find_one({"name": video["album"]})
remove( remove(
path.join( Path(
"data", f"data/users/{current_user.user}/albums/{video['album']}/{video['filename']}"
"users",
current_user.user,
"albums",
video["album"],
video["filename"],
) )
) )
return Response(status_code=HTTP_204_NO_CONTENT) return Response(status_code=HTTP_204_NO_CONTENT)
video_random_responses = {
400: SearchLimitInvalidError().openapi,
404: AlbumNameNotFoundError("name").openapi,
}
@app.get(
"/albums/{album}/videos/random",
description="Get one random video, optionally by caption",
response_class=UJSONResponse,
response_model=RandomSearchResultsVideo,
responses=video_random_responses,
)
async def video_random(
album: str,
caption: Union[str, None] = None,
limit: int = 100,
current_user: User = Security(get_current_active_user, scopes=["videos.list"]),
):
if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
raise AlbumNameNotFoundError(album)
if limit <= 0:
raise SearchLimitInvalidError()
output = {"results": []}
db_query = (
{
"user": current_user.user,
"album": album,
"caption": re.compile(caption),
}
if caption is not None
else {
"user": current_user.user,
"album": album,
}
)
documents_count = await col_videos.count_documents(db_query)
skip = randint(0, documents_count - 1) if documents_count > 1 else 0
async for video in col_videos.aggregate(
[
{"$match": db_query},
{"$skip": skip},
{"$limit": limit},
]
):
output["results"].append(
{
"id": video["_id"].__str__(),
"filename": video["filename"],
"caption": video["caption"],
}
)
return UJSONResponse(output)
video_find_responses = { video_find_responses = {
400: SearchPageInvalidError().openapi, 400: SearchPageInvalidError().openapi,
401: SearchTokenInvalidError().openapi, 401: SearchTokenInvalidError().openapi,
@@ -292,7 +355,7 @@ async def video_find(
current_user: User = Security(get_current_active_user, scopes=["videos.list"]), current_user: User = Security(get_current_active_user, scopes=["videos.list"]),
): ):
if token is not None: if token is not None:
found_record = col_tokens.find_one({"token": token}) found_record = await col_tokens.find_one({"token": token})
if found_record is None: if found_record is None:
raise SearchTokenInvalidError() raise SearchTokenInvalidError()
@@ -306,7 +369,7 @@ async def video_find(
current_user=current_user, current_user=current_user,
) )
if col_albums.find_one({"user": current_user.user, "name": album}) is None: if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
raise AlbumNameNotFoundError(album) raise AlbumNameNotFoundError(album)
if page <= 0 or page_size <= 0: if page <= 0 or page_size <= 0:
@@ -318,7 +381,7 @@ async def video_find(
if q is None and caption is None: if q is None and caption is None:
raise VideoSearchQueryEmptyError() raise VideoSearchQueryEmptyError()
if q is None and caption is not None: if q is None:
db_query = { db_query = {
"user": current_user.user, "user": current_user.user,
"album": album, "album": album,
@@ -329,30 +392,34 @@ async def video_find(
"album": album, "album": album,
"caption": re.compile(caption), "caption": re.compile(caption),
} }
elif q is not None and caption is None: elif caption is None:
db_query = list( db_query = {
col_videos.find( "user": current_user.user,
{"user": current_user.user, "album": album, "filename": re.compile(q)}, "album": album,
limit=page_size, "filename": re.compile(q),
skip=skip, }
).sort("dates.uploaded", DESCENDING)
)
db_query_count = { db_query_count = {
"user": current_user.user, "user": current_user.user,
"album": album, "album": album,
"caption": re.compile(q), "caption": re.compile(q),
} }
else: else:
db_query = list(col_videos.find({"user": current_user.user, "album": album, "filename": re.compile(q), "caption": re.compile(caption)}, limit=page_size, skip=skip).sort("dates.uploaded", DESCENDING)) # type: ignore db_query = {
db_query_count = {"user": current_user.user, "album": album, "filename": re.compile(q), "caption": re.compile(caption)} # type: ignore "user": current_user.user,
"album": album,
"filename": re.compile(q),
"caption": re.compile(caption),
}
db_query_count = {
"user": current_user.user,
"album": album,
"filename": re.compile(q),
"caption": re.compile(caption),
}
videos = list( async for video in col_videos.find(db_query, limit=page_size, skip=skip).sort(
col_videos.find(db_query, limit=page_size, skip=skip).sort( "dates.uploaded", direction=DESCENDING
"dates.uploaded", DESCENDING ):
)
)
for video in videos:
output["results"].append( output["results"].append(
{ {
"id": video["_id"].__str__(), "id": video["_id"].__str__(),
@@ -361,9 +428,9 @@ async def video_find(
} }
) )
if col_videos.count_documents(db_query_count) > page * page_size: if (await col_videos.count_documents(db_query_count)) > page * page_size:
token = str(token_urlsafe(32)) token = str(token_urlsafe(32))
col_tokens.insert_one( await col_tokens.insert_one(
{ {
"token": token, "token": token,
"query": q, "query": q,

View File

@@ -1,14 +1,14 @@
from fastapi import FastAPI from fastapi import FastAPI
from fastapi.openapi.docs import get_redoc_html, get_swagger_ui_html from fastapi.openapi.docs import get_redoc_html, get_swagger_ui_html
app = FastAPI(title="END PLAY Photos", docs_url=None, redoc_url=None, version="0.3") app = FastAPI(title="END PLAY Photos", docs_url=None, redoc_url=None, version="0.5")
@app.get("/docs", include_in_schema=False) @app.get("/docs", include_in_schema=False)
async def custom_swagger_ui_html(): async def custom_swagger_ui_html():
return get_swagger_ui_html( return get_swagger_ui_html(
openapi_url=app.openapi_url, # type: ignore openapi_url=app.openapi_url,
title=app.title + " - Documentation", title=f"{app.title} - Documentation",
swagger_favicon_url="/favicon.ico", swagger_favicon_url="/favicon.ico",
) )
@@ -16,7 +16,7 @@ async def custom_swagger_ui_html():
@app.get("/redoc", include_in_schema=False) @app.get("/redoc", include_in_schema=False)
async def custom_redoc_html(): async def custom_redoc_html():
return get_redoc_html( return get_redoc_html(
openapi_url=app.openapi_url, # type: ignore openapi_url=app.openapi_url,
title=app.title + " - Documentation", title=f"{app.title} - Documentation",
redoc_favicon_url="/favicon.ico", redoc_favicon_url="/favicon.ico",
) )

View File

@@ -1,3 +1,4 @@
from async_pymongo import AsyncClient
from pymongo import GEOSPHERE, MongoClient from pymongo import GEOSPHERE, MongoClient
from modules.utils import configGet from modules.utils import configGet
@@ -17,16 +18,11 @@ else:
db_config["host"], db_config["port"], db_config["name"] db_config["host"], db_config["port"], db_config["name"]
) )
db_client = MongoClient(con_string) db_client = AsyncClient(con_string)
db_client_sync = MongoClient(con_string)
db = db_client.get_database(name=db_config["name"]) db = db_client.get_database(name=db_config["name"])
collections = db.list_collection_names()
for collection in ["users", "albums", "photos", "videos", "tokens", "emails"]:
if not collection in collections:
db.create_collection(collection)
col_users = db.get_collection("users") col_users = db.get_collection("users")
col_albums = db.get_collection("albums") col_albums = db.get_collection("albums")
col_photos = db.get_collection("photos") col_photos = db.get_collection("photos")
@@ -34,4 +30,4 @@ col_videos = db.get_collection("videos")
col_tokens = db.get_collection("tokens") col_tokens = db.get_collection("tokens")
col_emails = db.get_collection("emails") col_emails = db.get_collection("emails")
col_photos.create_index([("location", GEOSPHERE)]) db_client_sync[db_config["name"]]["photos"].create_index([("location", GEOSPHERE)])

View File

@@ -1,3 +1,7 @@
import contextlib
from pathlib import Path
from typing import Mapping, Union
from exif import Image from exif import Image
@@ -12,12 +16,14 @@ def decimal_coords(coords: float, ref: str) -> float:
* float: Decimal degrees * float: Decimal degrees
""" """
decimal_degrees = coords[0] + coords[1] / 60 + coords[2] / 3600 decimal_degrees = coords[0] + coords[1] / 60 + coords[2] / 3600
if ref == "S" or ref == "W":
if ref in {"S", "W"}:
decimal_degrees = -decimal_degrees decimal_degrees = -decimal_degrees
return round(decimal_degrees, 5) return round(decimal_degrees, 5)
def extract_location(filepath: str) -> dict: def extract_location(filepath: Union[str, Path]) -> Mapping[str, float]:
"""Get location data from image """Get location data from image
### Args: ### Args:
@@ -35,11 +41,9 @@ def extract_location(filepath: str) -> dict:
if img.has_exif is False: if img.has_exif is False:
return output return output
try: with contextlib.suppress(AttributeError):
output["lng"] = decimal_coords(img.gps_longitude, img.gps_longitude_ref) output["lng"] = decimal_coords(img.gps_longitude, img.gps_longitude_ref)
output["lat"] = decimal_coords(img.gps_latitude, img.gps_latitude_ref) output["lat"] = decimal_coords(img.gps_latitude, img.gps_latitude_ref)
output["alt"] = img.gps_altitude output["alt"] = img.gps_altitude
except AttributeError:
pass
return output return output

View File

@@ -1,5 +1,7 @@
from importlib.util import module_from_spec, spec_from_file_location from importlib.util import module_from_spec, spec_from_file_location
from os import getcwd, path, walk from os import getcwd, path, walk
from pathlib import Path
from typing import Union
# ================================================================================= # =================================================================================
@@ -10,17 +12,21 @@ def get_py_files(src):
cwd = getcwd() # Current Working directory cwd = getcwd() # Current Working directory
py_files = [] py_files = []
for root, dirs, files in walk(src): for root, dirs, files in walk(src):
for file in files: py_files.extend(
if file.endswith(".py"): Path(f"{cwd}/{root}/{file}") for file in files if file.endswith(".py")
py_files.append(path.join(cwd, root, file)) )
return py_files return py_files
def dynamic_import(module_name, py_path): def dynamic_import(module_name: str, py_path: str):
try: try:
module_spec = spec_from_file_location(module_name, py_path) module_spec = spec_from_file_location(module_name, py_path)
module = module_from_spec(module_spec) # type: ignore if module_spec is None:
module_spec.loader.exec_module(module) # type: ignore raise RuntimeError(
f"Module spec from module name {module_name} and path {py_path} is None"
)
module = module_from_spec(module_spec)
module_spec.loader.exec_module(module)
return module return module
except SyntaxError: except SyntaxError:
print( print(
@@ -28,15 +34,15 @@ def dynamic_import(module_name, py_path):
flush=True, flush=True,
) )
return return
except Exception as exp: except Exception as exc:
print(f"Could not load extension {module_name} due to {exp}", flush=True) print(f"Could not load extension {module_name} due to {exc}", flush=True)
return return
def dynamic_import_from_src(src, star_import=False): def dynamic_import_from_src(src: Union[str, Path], star_import=False):
my_py_files = get_py_files(src) my_py_files = get_py_files(src)
for py_file in my_py_files: for py_file in my_py_files:
module_name = path.split(py_file)[-1][:-3] module_name = Path(py_file).stem
print(f"Importing {module_name} extension...", flush=True) print(f"Importing {module_name} extension...", flush=True)
imported_module = dynamic_import(module_name, py_file) imported_module = dynamic_import(module_name, py_file)
if imported_module != None: if imported_module != None:

View File

@@ -1,3 +1,6 @@
from pathlib import Path
from typing import Any, List, Mapping, Union
import cv2 import cv2
import numpy as np import numpy as np
from numpy.typing import NDArray from numpy.typing import NDArray
@@ -6,7 +9,7 @@ from scipy import spatial
from modules.database import col_photos from modules.database import col_photos
def hash_array_to_hash_hex(hash_array): def hash_array_to_hash_hex(hash_array) -> str:
# convert hash array of 0 or 1 to hash string in hex # convert hash array of 0 or 1 to hash string in hex
hash_array = np.array(hash_array, dtype=np.uint8) hash_array = np.array(hash_array, dtype=np.uint8)
hash_str = "".join(str(i) for i in 1 * hash_array.flatten()) hash_str = "".join(str(i) for i in 1 * hash_array.flatten())
@@ -17,18 +20,18 @@ def hash_hex_to_hash_array(hash_hex) -> NDArray:
# convert hash string in hex to hash values of 0 or 1 # convert hash string in hex to hash values of 0 or 1
hash_str = int(hash_hex, 16) hash_str = int(hash_hex, 16)
array_str = bin(hash_str)[2:] array_str = bin(hash_str)[2:]
return np.array([i for i in array_str], dtype=np.float32) return np.array(list(array_str), dtype=np.float32)
def get_duplicates_cache(album: str) -> dict: async def get_duplicates_cache(album: str) -> Mapping[str, Any]:
output = {} return {
for photo in col_photos.find({"album": album}): photo["filename"]: [photo["_id"].__str__(), photo["hash"]]
output[photo["filename"]] = [photo["_id"].__str__(), photo["hash"]] async for photo in col_photos.find({"album": album})
return output }
async def get_phash(filepath: str) -> str: async def get_phash(filepath: Union[str, Path]) -> str:
img = cv2.imread(filepath) img = cv2.imread(str(filepath))
# resize image and convert to gray scale # resize image and convert to gray scale
img = cv2.resize(img, (64, 64)) img = cv2.resize(img, (64, 64))
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
@@ -49,14 +52,14 @@ async def get_phash(filepath: str) -> str:
return hash_array_to_hash_hex(dct_block.flatten()) return hash_array_to_hash_hex(dct_block.flatten())
async def get_duplicates(hash: str, album: str) -> list: async def get_duplicates(hash_string: str, album: str) -> List[Mapping[str, Any]]:
duplicates = [] duplicates = []
cache = get_duplicates_cache(album) cache = await get_duplicates_cache(album)
for image_name in cache.keys(): for image_name, image_object in cache.items():
try: try:
distance = spatial.distance.hamming( distance = spatial.distance.hamming(
hash_hex_to_hash_array(cache[image_name][1]), hash_hex_to_hash_array(cache[image_name][1]),
hash_hex_to_hash_array(hash), hash_hex_to_hash_array(hash_string),
) )
except ValueError: except ValueError:
continue continue

View File

@@ -1,8 +1,11 @@
import logging
from smtplib import SMTP, SMTP_SSL from smtplib import SMTP, SMTP_SSL
from ssl import create_default_context from ssl import create_default_context
from traceback import print_exc from traceback import print_exc
from modules.utils import configGet, logWrite from modules.utils import configGet
logger = logging.getLogger(__name__)
try: try:
if configGet("use_ssl", "mailer", "smtp") is True: if configGet("use_ssl", "mailer", "smtp") is True:
@@ -10,7 +13,7 @@ try:
configGet("host", "mailer", "smtp"), configGet("host", "mailer", "smtp"),
configGet("port", "mailer", "smtp"), configGet("port", "mailer", "smtp"),
) )
logWrite(f"Initialized SMTP SSL connection") logger.info("Initialized SMTP SSL connection")
elif configGet("use_tls", "mailer", "smtp") is True: elif configGet("use_tls", "mailer", "smtp") is True:
mail_sender = SMTP( mail_sender = SMTP(
configGet("host", "mailer", "smtp"), configGet("host", "mailer", "smtp"),
@@ -18,21 +21,21 @@ try:
) )
mail_sender.starttls(context=create_default_context()) mail_sender.starttls(context=create_default_context())
mail_sender.ehlo() mail_sender.ehlo()
logWrite(f"Initialized SMTP TLS connection") logger.info("Initialized SMTP TLS connection")
else: else:
mail_sender = SMTP( mail_sender = SMTP(
configGet("host", "mailer", "smtp"), configGet("port", "mailer", "smtp") configGet("host", "mailer", "smtp"), configGet("port", "mailer", "smtp")
) )
mail_sender.ehlo() mail_sender.ehlo()
logWrite(f"Initialized SMTP connection") logger.info("Initialized SMTP connection")
except Exception as exp: except Exception as exc:
logWrite(f"Could not initialize SMTP connection to: {exp}") logger.error("Could not initialize SMTP connection to: %s", exc)
print_exc() print_exc()
try: try:
mail_sender.login( mail_sender.login(
configGet("login", "mailer", "smtp"), configGet("password", "mailer", "smtp") configGet("login", "mailer", "smtp"), configGet("password", "mailer", "smtp")
) )
logWrite(f"Successfully initialized mailer") logger.info("Successfully initialized mailer")
except Exception as exp: except Exception as exc:
logWrite(f"Could not login into provided SMTP account due to: {exp}") logger.error("Could not login into provided SMTP account due to: %s", exc)

View File

@@ -54,16 +54,20 @@ oauth2_scheme = OAuth2PasswordBearer(
) )
def verify_password(plain_password, hashed_password): def verify_password(plain_password, hashed_password) -> bool:
return pwd_context.verify(plain_password, hashed_password) return pwd_context.verify(plain_password, hashed_password)
def get_password_hash(password): def get_password_hash(password) -> str:
return pwd_context.hash(password) return pwd_context.hash(password)
def get_user(user: str): async def get_user(user: str) -> UserInDB:
found_user = col_users.find_one({"user": user}) found_user = await col_users.find_one({"user": user})
if found_user is None:
raise RuntimeError(f"User {user} does not exist")
return UserInDB( return UserInDB(
user=found_user["user"], user=found_user["user"],
email=found_user["email"], email=found_user["email"],
@@ -72,16 +76,16 @@ def get_user(user: str):
) )
def authenticate_user(user_name: str, password: str): async def authenticate_user(user_name: str, password: str) -> Union[UserInDB, bool]:
user = get_user(user_name) if user := await get_user(user_name):
if not user: return user if verify_password(password, user.hash) else False
else:
return False return False
if not verify_password(password, user.hash):
return False
return user
def create_access_token(data: dict, expires_delta: Union[timedelta, None] = None): def create_access_token(
data: dict, expires_delta: Union[timedelta, None] = None
) -> str:
to_encode = data.copy() to_encode = data.copy()
if expires_delta: if expires_delta:
expire = datetime.now(tz=timezone.utc) + expires_delta expire = datetime.now(tz=timezone.utc) + expires_delta
@@ -89,14 +93,13 @@ def create_access_token(data: dict, expires_delta: Union[timedelta, None] = None
expire = datetime.now(tz=timezone.utc) + timedelta( expire = datetime.now(tz=timezone.utc) + timedelta(
days=ACCESS_TOKEN_EXPIRE_DAYS days=ACCESS_TOKEN_EXPIRE_DAYS
) )
to_encode.update({"exp": expire}) to_encode["exp"] = expire
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM) return jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
return encoded_jwt
async def get_current_user( async def get_current_user(
security_scopes: SecurityScopes, token: str = Depends(oauth2_scheme) security_scopes: SecurityScopes, token: str = Depends(oauth2_scheme)
): ) -> UserInDB:
if security_scopes.scopes: if security_scopes.scopes:
authenticate_value = f'Bearer scope="{security_scopes.scope_str}"' authenticate_value = f'Bearer scope="{security_scopes.scope_str}"'
else: else:
@@ -115,12 +118,12 @@ async def get_current_user(
raise credentials_exception raise credentials_exception
token_scopes = payload.get("scopes", []) token_scopes = payload.get("scopes", [])
token_data = TokenData(scopes=token_scopes, user=user) token_data = TokenData(scopes=token_scopes, user=user)
except (JWTError, ValidationError): except (JWTError, ValidationError) as exc:
raise credentials_exception raise credentials_exception from exc
user = get_user(user=token_data.user) user_record = await get_user(user=token_data.user)
if user is None: if user_record is None:
raise credentials_exception raise credentials_exception
for scope in security_scopes.scopes: for scope in security_scopes.scopes:
@@ -130,7 +133,7 @@ async def get_current_user(
detail="Not enough permissions", detail="Not enough permissions",
headers={"WWW-Authenticate": authenticate_value}, headers={"WWW-Authenticate": authenticate_value},
) )
return user return user_record
async def get_current_active_user( async def get_current_active_user(

View File

@@ -1,21 +1,18 @@
from traceback import print_exc import logging
from pathlib import Path
from traceback import format_exc
from typing import Any, Union from typing import Any, Union
from ujson import JSONDecodeError, dumps, loads from ujson import JSONDecodeError, dumps, loads
logger = logging.getLogger(__name__)
# Print to stdout and then to log
def logWrite(message: str, debug: bool = False) -> None:
# save to log file and rotation is to be done
# logAppend(f'{message}', debug=debug)
print(f"{message}", flush=True)
def jsonLoad(filepath: str) -> Any: def jsonLoad(filepath: Union[str, Path]) -> Any:
"""Load json file """Load json file
### Args: ### Args:
* filepath (`str`): Path to input file * filepath (`Union[str, Path]`): Path to input file
### Returns: ### Returns:
* `Any`: Some json deserializable * `Any`: Some json deserializable
@@ -24,32 +21,36 @@ def jsonLoad(filepath: str) -> Any:
try: try:
output = loads(file.read()) output = loads(file.read())
except JSONDecodeError: except JSONDecodeError:
logWrite( logger.error(
f"Could not load json file {filepath}: file seems to be incorrect!\n{print_exc()}" "Could not load json file %s: file seems to be incorrect!\n%s",
filepath,
format_exc(),
) )
raise raise
except FileNotFoundError: except FileNotFoundError:
logWrite( logger.error(
f"Could not load json file {filepath}: file does not seem to exist!\n{print_exc()}" "Could not load json file %s: file does not seem to exist!\n%s",
filepath,
format_exc(),
) )
raise raise
file.close() file.close()
return output return output
def jsonSave(contents: Union[list, dict], filepath: str) -> None: def jsonSave(contents: Union[list, dict], filepath: Union[str, Path]) -> None:
"""Save contents into json file """Save contents into json file
### Args: ### Args:
* contents (`Union[list, dict]`): Some json serializable * contents (`Union[list, dict]`): Some json serializable
* filepath (`str`): Path to output file * filepath (`Union[str, Path]`): Path to output file
""" """
try: try:
with open(filepath, "w", encoding="utf8") as file: with open(filepath, "w", encoding="utf8") as file:
file.write(dumps(contents, ensure_ascii=False, indent=4)) file.write(dumps(contents, ensure_ascii=False, indent=4))
file.close() file.close()
except Exception as exp: except Exception as exc:
logWrite(f"Could not save json file {filepath}: {exp}\n{print_exc()}") logger.error("Could not save json file %s: %s\n%s", filepath, exc, format_exc())
return return
@@ -63,7 +64,7 @@ def configGet(key: str, *args: str) -> Any:
### Returns: ### Returns:
* `Any`: Value of provided key * `Any`: Value of provided key
""" """
this_dict = jsonLoad("config.json") this_dict = jsonLoad(Path("config.json"))
this_key = this_dict this_key = this_dict
for dict_key in args: for dict_key in args:
this_key = this_key[dict_key] this_key = this_key[dict_key]

View File

@@ -1,13 +1,20 @@
from os import makedirs, path import logging
from os import makedirs
from pathlib import Path
from fastapi.responses import FileResponse from fastapi.responses import FileResponse
from modules.app import app from modules.app import app
from modules.extensions_loader import dynamic_import_from_src from modules.extensions_loader import dynamic_import_from_src
from modules.scheduler import scheduler from modules.scheduler import scheduler
from modules.utils import *
makedirs(path.join("data", "users"), exist_ok=True) makedirs(Path("data/users"), exist_ok=True)
logging.basicConfig(
level=logging.INFO,
format="%(name)s.%(funcName)s | %(levelname)s | %(message)s",
datefmt="[%X]",
)
@app.get("/favicon.ico", response_class=FileResponse, include_in_schema=False) @app.get("/favicon.ico", response_class=FileResponse, include_in_schema=False)

View File

@@ -1,11 +1,13 @@
aiofiles==23.1.0 aiofiles==23.2.1
apscheduler~=3.10.1 apscheduler~=3.10.1
exif==1.6.0 exif==1.6.0
fastapi[all]==0.97.0 fastapi[all]==0.103.1
opencv-python~=4.7.0.72 opencv-python~=4.8.0.74
passlib~=1.7.4 passlib~=1.7.4
pymongo==4.4.0 pymongo>=4.3.3
python-jose[cryptography]~=3.3.0 python-jose[cryptography]~=3.3.0
python-magic~=0.4.27 python-magic~=0.4.27
scipy~=1.10.1 scipy~=1.11.0
ujson~=5.8.0 ujson~=5.8.0
--extra-index-url https://git.end-play.xyz/api/packages/profitroll/pypi/simple
async_pymongo==0.1.4