Files zipping update

This commit is contained in:
Profitroll 2023-01-19 13:35:27 +01:00
parent 3c245d8671
commit aa8be6006c
4 changed files with 84 additions and 64 deletions

View File

@ -9,6 +9,7 @@
"locations": { "locations": {
"data": "data" "data": "data"
}, },
"compression": 5,
"messages": { "messages": {
"key_expired": "API key expired", "key_expired": "API key expired",
"key_invalid": "Invalid API key", "key_invalid": "Invalid API key",
@ -19,5 +20,25 @@
"user_already_exists": "User with this username already exists.", "user_already_exists": "User with this username already exists.",
"email_confirmed": "Email confirmed. You can now log in.", "email_confirmed": "Email confirmed. You can now log in.",
"email_code_invalid": "Confirmation code is invalid." "email_code_invalid": "Confirmation code is invalid."
},
"external_address": "localhost",
"registration_enabled": false,
"registration_requires_confirmation": false,
"mailer": {
"smtp": {
"host": "",
"port": 0,
"sender": "",
"login": "",
"password": "",
"use_ssl": true,
"use_tls": false
},
"messages": {
"registration_confirmation": {
"subject": "Email confirmation",
"message": "To confirm your email please follow this link: {0}"
}
}
} }
} }

View File

@ -1,19 +1,35 @@
from datetime import datetime from datetime import datetime
from io import BytesIO from io import BytesIO
from urllib.parse import quote_plus from urllib.parse import quote_plus
from os import path, remove from os import makedirs, path, remove
from typing import Dict, List, Union from typing import Dict, List, Tuple, Union
from zipfile import ZipFile from uuid import uuid4
from zipfile import ZipFile, ZIP_DEFLATED
from xmltodict import parse from xmltodict import parse
from models.saves import StardewSave from models.saves import StardewSave
from modules.app import app, get_api_key, user_by_key from modules.app import app, get_api_key, user_by_key
from modules.utils import saveFile from modules.utils import configGet, saveFile
from modules.database import col_devices, col_saves from modules.database import col_devices, col_saves
from fastapi import HTTPException, Depends, UploadFile from fastapi import HTTPException, Depends, UploadFile
from fastapi.responses import UJSONResponse, FileResponse, Response from fastapi.responses import UJSONResponse, FileResponse, Response
from fastapi.openapi.models import APIKey from fastapi.openapi.models import APIKey
from starlette.status import HTTP_204_NO_CONTENT, HTTP_404_NOT_FOUND, HTTP_406_NOT_ACCEPTABLE from starlette.status import HTTP_204_NO_CONTENT, HTTP_404_NOT_FOUND, HTTP_406_NOT_ACCEPTABLE
def zip_saves(save_filename: str, save_bytes: bytes, saveinfo_bytes: bytes) -> Tuple[str, str]:
save_uuid = str(uuid4())
makedirs(path.join(configGet("data", "locations"), "files", save_uuid))
#saveFile(save_bytes, filename=save_filename, dirname=zipname)
#saveFile(saveinfo_bytes, filename="SaveGameInfo", dirname=zipname)
with ZipFile(path.join(configGet("data", "locations"), "files", save_uuid, save_filename+".svsave"), 'w', ZIP_DEFLATED, compresslevel=configGet("compression")) as ziph:
ziph.writestr("SaveGameInfo", saveinfo_bytes)
ziph.writestr(save_filename, save_bytes)
return save_uuid, path.join(configGet("data", "locations"), "files", save_uuid, save_filename+".svsave")
def zipfiles(filenames, save_name: str) -> Response: def zipfiles(filenames, save_name: str) -> Response:
@ -29,7 +45,7 @@ def zipfiles(filenames, save_name: str) -> Response:
# Add file, at correct path # Add file, at correct path
for entry in (list(col_saves.find({"files.save.uuid": fname})) + list(col_saves.find({"files.saveinfo.uuid": fname}))): for entry in (list(col_saves.find({"files.save.uuid": fname})) + list(col_saves.find({"files.saveinfo.uuid": fname}))):
filename = entry["files"]["save"]["name"] if (entry["files"]["save"]["uuid"] == fname) else entry["files"]["saveinfo"]["name"] filename = entry["file"]["save"]["name"] if (entry["file"]["save"]["uuid"] == fname) else entry["file"]["saveinfo"]["name"]
zf.write(fpath, filename) zf.write(fpath, filename)
# Must close zip for all contents to be written # Must close zip for all contents to be written
@ -60,7 +76,7 @@ async def saves_get(device: Union[str, None] = None, apikey: APIKey = Depends(ge
out_entry = entry out_entry = entry
del out_entry["_id"] del out_entry["_id"]
del out_entry["user"] del out_entry["user"]
del out_entry["files"] del out_entry["file"]
output.append(out_entry) output.append(out_entry)
return UJSONResponse(output) return UJSONResponse(output)
@ -79,7 +95,7 @@ async def saves_get_by_id(id: int, device: Union[str, None] = None, apikey: APIK
for entry in saves_entries: for entry in saves_entries:
out_entry = entry out_entry = entry
del out_entry["_id"] del out_entry["_id"]
del out_entry["files"] del out_entry["file"]
del out_entry["user"] del out_entry["user"]
output.append(out_entry) output.append(out_entry)
@ -91,7 +107,7 @@ async def saves_get_by_both_ids(id: int, save_date: int, device: Union[str, None
saves_entry = col_saves.find_one({"user": user_by_key(apikey), "id": id, "date": save_date}) if device is None else col_saves.find_one({"user": user_by_key(apikey), "id": id, "device": device, "date": save_date}) saves_entry = col_saves.find_one({"user": user_by_key(apikey), "id": id, "date": save_date}) if device is None else col_saves.find_one({"user": user_by_key(apikey), "id": id, "device": device, "date": save_date})
if saves_entry is not None: if saves_entry is not None:
del saves_entry["_id"] del saves_entry["_id"]
del saves_entry["files"] del saves_entry["file"]
return UJSONResponse(saves_entry) return UJSONResponse(saves_entry)
else: else:
raise HTTPException(HTTP_404_NOT_FOUND, detail="Could not find save with such id.") raise HTTPException(HTTP_404_NOT_FOUND, detail="Could not find save with such id.")
@ -103,8 +119,7 @@ async def saves_delete_by_id(id: int, apikey: APIKey = Depends(get_api_key)):
if col_saves.count_documents({"user": user, "id": id}) > 0: if col_saves.count_documents({"user": user, "id": id}) > 0:
saves_entries = list(col_saves.find({"user": user, "id": id})) saves_entries = list(col_saves.find({"user": user, "id": id}))
for entry in saves_entries: for entry in saves_entries:
remove(entry["files"]["save"]["path"]) remove(entry["file"]["path"])
remove(entry["files"]["saveinfo"]["path"])
col_saves.delete_many({"user": user, "id": id}) col_saves.delete_many({"user": user, "id": id})
return Response(status_code=HTTP_204_NO_CONTENT) return Response(status_code=HTTP_204_NO_CONTENT)
else: else:
@ -115,18 +130,26 @@ async def saves_delete_by_id(id: int, apikey: APIKey = Depends(get_api_key)):
async def saves_delete_by_both_ids(id: int, save_date: int, apikey: APIKey = Depends(get_api_key)): async def saves_delete_by_both_ids(id: int, save_date: int, apikey: APIKey = Depends(get_api_key)):
saves_entry = col_saves.find_one_and_delete({"id": id, "date": save_date}) saves_entry = col_saves.find_one_and_delete({"id": id, "date": save_date})
if saves_entry is not None: if saves_entry is not None:
remove(saves_entry["files"]["save"]["path"]) remove(saves_entry["file"]["path"])
remove(saves_entry["files"]["saveinfo"]["path"])
return Response(status_code=HTTP_204_NO_CONTENT) return Response(status_code=HTTP_204_NO_CONTENT)
else: else:
raise HTTPException(HTTP_404_NOT_FOUND, detail="Could not find save with such id.") raise HTTPException(HTTP_404_NOT_FOUND, detail="Could not find save with such id.")
@app.get("/saves/{id}/{save_date}/download", response_class=FileResponse, description="Get game save as .svsave file by its id and save date") @app.get("/saves/{id}/{save_date}/download", response_class=FileResponse, description="Get game save as .svsave file by its id and save date")
async def saves_download(id: int, save_date: int, apikey: APIKey = Depends(get_api_key)): async def saves_download(id: int, save_date: int, device: Union[str, None] = None, apikey: APIKey = Depends(get_api_key)):
saves_entry = col_saves.find_one({"user": user_by_key(apikey), "id": id, "date": save_date}) saves_entry = col_saves.find_one({"user": user_by_key(apikey), "id": id, "date": save_date}) if device is None else col_saves.find_one({"user": user_by_key(apikey), "id": id, "device": device, "date": save_date})
if saves_entry is not None: # type: ignore if saves_entry is not None: # type: ignore
return zipfiles([saves_entry["files"]["save"]["path"], saves_entry["files"]["saveinfo"]["path"]], save_name=f'{saves_entry["data"]["farmer"]}_{saves_entry["id"]}') with open(saves_entry["file"]["path"], "rb") as file:
response = Response(
file.read(),
media_type="application/x-zip-compressed",
headers={
'Content-Disposition': f'attachment;filename={quote_plus(saves_entry["file"]["name"])}.svsave'
}
)
return response
# return zipfiles([saves_entry["file"]["save"]["path"], saves_entry["file"]["saveinfo"]["path"]], save_name=f'{saves_entry["data"]["farmer"]}_{saves_entry["id"]}')
else: else:
raise HTTPException(HTTP_404_NOT_FOUND, detail="Could not find save with such id.") raise HTTPException(HTTP_404_NOT_FOUND, detail="Could not find save with such id.")
@ -147,26 +170,26 @@ async def saves_post(device: str, files: List[UploadFile], apikey: APIKey = Depe
if col_devices.find_one({"user": user, "name": device}) is None: if col_devices.find_one({"user": user, "name": device}) is None:
raise HTTPException(HTTP_404_NOT_FOUND, detail="Could not find device with that name.") raise HTTPException(HTTP_404_NOT_FOUND, detail="Could not find device with that name.")
save_info = save_data = save_info_file = save_data_filename = save_data_file = save_info_file_id = save_data_file_id = None save_info = save_data = save_info_file = save_data_filename = save_data_file = None
for file in files: for file in files:
if file.filename == "SaveGameInfo": if file.filename == "SaveGameInfo":
save_info_file = await file.read() save_info_file = await file.read()
save_info_file_id = saveFile(save_info_file)
save_info = parse(save_info_file.decode("utf-8")) save_info = parse(save_info_file.decode("utf-8"))
if "Farmer" not in save_info: if "Farmer" not in save_info:
return error_return return error_return
else: else:
save_data_filename = file.filename save_data_filename = file.filename
save_data_file = await file.read() save_data_file = await file.read()
save_data_file_id = saveFile(save_data_file)
save_data = parse(save_data_file.decode("utf-8")) save_data = parse(save_data_file.decode("utf-8"))
if "SaveGame" not in save_data: if "SaveGame" not in save_data:
return error_return return error_return
if save_info is None or save_data is None or save_info_file is None or save_data_filename is None or save_data_file is None or save_info_file_id is None or save_data_file_id is None: if save_info is None or save_data is None or save_info_file is None or save_data_filename is None or save_data_file is None:
return error_return return error_return
zipped = zip_saves(save_data_filename, save_data_file, save_info_file)
save_date = int(datetime.utcnow().timestamp()) save_date = int(datetime.utcnow().timestamp())
index = { index = {
@ -183,17 +206,10 @@ async def saves_post(device: str, files: List[UploadFile], apikey: APIKey = Depe
"season": int(save_info["Farmer"]["seasonForSaveGame"]), "season": int(save_info["Farmer"]["seasonForSaveGame"]),
"day": int(save_info["Farmer"]["dayOfMonthForSaveGame"]) "day": int(save_info["Farmer"]["dayOfMonthForSaveGame"])
}, },
"files": { "file": {
"save": { "name": save_data_filename,
"name": save_data_filename, "uuid": zipped[0],
"uuid": save_data_file_id[0], "path": zipped[1]
"path": save_data_file_id[1]
},
"saveinfo": {
"name": "SaveGameInfo",
"uuid": save_info_file_id[0],
"path": save_info_file_id[1]
}
} }
} }
@ -202,7 +218,7 @@ async def saves_post(device: str, files: List[UploadFile], apikey: APIKey = Depe
del save_info, save_data del save_info, save_data
del index["user"] del index["user"]
del index["files"] del index["file"]
del index["_id"] del index["_id"]
col_devices.find_one_and_update({"user": user, "name": device}, {"$set": {"last_save": save_date}}) col_devices.find_one_and_update({"user": user, "name": device}, {"$set": {"last_save": save_date}})

View File

@ -62,7 +62,7 @@ def configGet(key: str, *args: str) -> Any:
this_key = this_key[dict_key] this_key = this_key[dict_key]
return this_key[key] return this_key[key]
def saveFile(filebytes: bytes) -> Tuple[str, str]: def saveFile(filebytes: bytes, filename: Union[str, None] = None, dirname: Union[str, None] = None) -> Tuple[str, str]:
"""Save some bytedata into random file and return its ID """Save some bytedata into random file and return its ID
### Args: ### Args:
@ -71,8 +71,11 @@ def saveFile(filebytes: bytes) -> Tuple[str, str]:
### Returns: ### Returns:
* `Tuple[str, str]`: Tuple where first item is an ID and the second is an absolute path to file * `Tuple[str, str]`: Tuple where first item is an ID and the second is an absolute path to file
""" """
makedirs(path.join(configGet("data", "locations"), "files"), exist_ok=True) pathlist = [configGet("data", "locations"), "files"]
filename = str(uuid4()) if dirname is not None:
with open(path.join(configGet("data", "locations"), "files", filename), "wb") as file: pathlist.append(dirname)
makedirs(path.join(pathlist), exist_ok=True)
filename = str(uuid4()) if filename is None else filename
with open(path.join(pathlist+[filename], "wb")) as file:
file.write(filebytes) file.write(filebytes)
return filename, path.join(configGet("data", "locations"), "files", filename) return filename, path.join(pathlist+[filename])

View File

@ -13,15 +13,10 @@
"data.year", "data.year",
"data.season", "data.season",
"data.day", "data.day",
"files", "file",
"files.save", "file.name",
"files.save.name", "file.uuid",
"files.save.uuid", "file.path"
"files.save.path",
"files.saveinfo",
"files.saveinfo.name",
"files.saveinfo.uuid",
"files.saveinfo.path"
], ],
"properties": { "properties": {
"id": { "id": {
@ -60,31 +55,16 @@
"data.day": { "data.day": {
"bsonType": "int" "bsonType": "int"
}, },
"files": { "file": {
"bsonType": "object" "bsonType": "object"
}, },
"files.save": { "file.name": {
"bsonType": "object"
},
"files.save.name": {
"bsonType": "string" "bsonType": "string"
}, },
"files.save.uuid": { "file.uuid": {
"bsonType": "string" "bsonType": "string"
}, },
"files.save.path": { "file.path": {
"bsonType": "string"
},
"files.saveinfo": {
"bsonType": "object"
},
"files.saveinfo.name": {
"bsonType": "string"
},
"files.saveinfo.uuid": {
"bsonType": "string"
},
"files.saveinfo.path": {
"bsonType": "string" "bsonType": "string"
} }
} }