Fixes and cleanups #11

Merged
profitroll merged 7 commits from dev into master 2023-06-22 14:52:23 +03:00
3 changed files with 28 additions and 17 deletions
Showing only changes of commit 47435c6128 - Show all commits

View File

@ -1,3 +1,4 @@
import aiofiles
from os import path
from modules.app import app
from fastapi.responses import HTMLResponse, Response
@ -5,27 +6,35 @@ from fastapi.responses import HTMLResponse, Response
@app.get("/pages/matter.css", include_in_schema=False)
async def page_matter():
with open(path.join("pages", "matter.css"), "r", encoding="utf-8") as f:
output = f.read()
async with aiofiles.open(
path.join("pages", "matter.css"), "r", encoding="utf-8"
) as f:
output = await f.read()
return Response(content=output)
@app.get("/pages/{page}/{file}", include_in_schema=False)
async def page_assets(page: str, file: str):
with open(path.join("pages", page, file), "r", encoding="utf-8") as f:
output = f.read()
async with aiofiles.open(
path.join("pages", page, file), "r", encoding="utf-8"
) as f:
output = await f.read()
return Response(content=output)
@app.get("/", include_in_schema=False)
async def page_home():
with open(path.join("pages", "home", "index.html"), "r", encoding="utf-8") as f:
output = f.read()
async with aiofiles.open(
path.join("pages", "home", "index.html"), "r", encoding="utf-8"
) as f:
output = await f.read()
return HTMLResponse(content=output)
@app.get("/register", include_in_schema=False)
async def page_register():
with open(path.join("pages", "register", "index.html"), "r", encoding="utf-8") as f:
output = f.read()
async with aiofiles.open(
path.join("pages", "register", "index.html"), "r", encoding="utf-8"
) as f:
output = await f.read()
return HTMLResponse(content=output)

View File

@ -1,3 +1,4 @@
import aiofiles
import re
import pickle
from secrets import token_urlsafe
@ -130,7 +131,7 @@ async def photo_upload(
".".join(base_name) + f"_{int(datetime.now().timestamp())}." + extension
)
with open(
async with aiofiles.open(
path.join("data", "users", current_user.user, "albums", album, filename), "wb"
) as f:
f.write(await file.read())
@ -266,8 +267,8 @@ if configGet("media_token_access") is True:
mime = Magic(mime=True).from_file(image_path)
with open(image_path, "rb") as f:
image_file = f.read()
async with aiofiles.open(image_path, "rb") as f:
image_file = await f.read()
return Response(image_file, media_type=mime)
@ -293,8 +294,8 @@ async def photo_get(
mime = Magic(mime=True).from_file(image_path)
with open(image_path, "rb") as f:
image_file = f.read()
async with aiofiles.open(image_path, "rb") as f:
image_file = await f.read()
return Response(image_file, media_type=mime)

View File

@ -1,3 +1,4 @@
import aiofiles
import re
import pickle
from secrets import token_urlsafe
@ -59,10 +60,10 @@ async def video_upload(
".".join(base_name) + f"_{int(datetime.now().timestamp())}." + extension
)
with open(
async with aiofiles.open(
path.join("data", "users", current_user.user, "albums", album, filename), "wb"
) as f:
f.write(await file.read())
await f.write(await file.read())
# Hashing and duplicates check should be here
@ -112,8 +113,8 @@ async def video_get(
mime = Magic(mime=True).from_file(video_path)
with open(video_path, "rb") as f:
video_file = f.read()
with aiofiles.open(video_path, "rb") as f:
video_file = await f.read()
return Response(video_file, media_type=mime)