Migrated from main API
This commit is contained in:
parent
ae19e362b7
commit
0067a5915e
157
.gitignore
vendored
Normal file
157
.gitignore
vendored
Normal file
@ -0,0 +1,157 @@
|
||||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintainted in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
# Custom
|
||||
.vscode
|
||||
config.json
|
16
config_example.json
Normal file
16
config_example.json
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"database": {
|
||||
"name": "photos",
|
||||
"host": "127.0.0.1",
|
||||
"port": 27017,
|
||||
"user": null,
|
||||
"password": null
|
||||
},
|
||||
"messages": {
|
||||
"key_expired": "API key expired",
|
||||
"key_invalid": "Invalid API key",
|
||||
"key_valid": "Valid API key",
|
||||
"bad_request": "Bad request. Read the docs at photos.end-play.xyz/docs",
|
||||
"ip_blacklisted": "Your IP is blacklisted. Make sure you are using correct API address."
|
||||
}
|
||||
}
|
BIN
favicon.ico
Normal file
BIN
favicon.ico
Normal file
Binary file not shown.
After Width: | Height: | Size: 11 KiB |
79
modules/app.py
Normal file
79
modules/app.py
Normal file
@ -0,0 +1,79 @@
|
||||
from os import sep
|
||||
from fastapi import FastAPI, Security, HTTPException
|
||||
from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN
|
||||
from fastapi.security import APIKeyQuery, APIKeyHeader, APIKeyCookie
|
||||
from fastapi.openapi.docs import get_swagger_ui_html, get_redoc_html
|
||||
from starlette.status import HTTP_401_UNAUTHORIZED
|
||||
from fastapi.openapi.models import APIKey
|
||||
|
||||
from modules.utils import configGet, jsonLoad
|
||||
|
||||
app = FastAPI(title="END PLAY Photos", docs_url=None, redoc_url=None, version="2.0")
|
||||
|
||||
api_key_query = APIKeyQuery(name="apikey", auto_error=False)
|
||||
api_key_header = APIKeyHeader(name="apikey", auto_error=False)
|
||||
api_key_cookie = APIKeyCookie(name="apikey", auto_error=False)
|
||||
|
||||
|
||||
def get_all_api_keys():
|
||||
return jsonLoad(f'{configGet("data_location")}{sep}api_keys.json')
|
||||
|
||||
def get_all_expired_keys():
|
||||
return jsonLoad(f'{configGet("data_location")}{sep}expired_keys.json')
|
||||
|
||||
def check_project_key(project: str, apikey: APIKey) -> bool:
|
||||
keys = jsonLoad(f'{configGet("data_location")}{sep}api_keys.json')
|
||||
if apikey in keys:
|
||||
if keys[apikey] != []:
|
||||
if project in keys[apikey]:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
async def get_api_key(
|
||||
api_key_query: str = Security(api_key_query),
|
||||
api_key_header: str = Security(api_key_header),
|
||||
api_key_cookie: str = Security(api_key_cookie),
|
||||
):
|
||||
|
||||
keys = get_all_api_keys()
|
||||
expired = get_all_expired_keys()
|
||||
|
||||
def is_valid(key):
|
||||
if (key in keys) or (key == "publickey"):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
if is_valid(api_key_query):
|
||||
return api_key_query
|
||||
elif is_valid(api_key_header):
|
||||
return api_key_header
|
||||
elif is_valid(api_key_cookie):
|
||||
return api_key_cookie
|
||||
else:
|
||||
if (api_key_query in expired) or (api_key_header in expired) or (api_key_cookie in expired):
|
||||
raise HTTPException(status_code=HTTP_403_FORBIDDEN, detail=configGet("key_expired", "messages"))
|
||||
else:
|
||||
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail=configGet("key_invalid", "messages"))
|
||||
|
||||
@app.get("/docs", include_in_schema=False)
|
||||
async def custom_swagger_ui_html():
|
||||
return get_swagger_ui_html(
|
||||
openapi_url=app.openapi_url, # type: ignore
|
||||
title=app.title + " - Documentation",
|
||||
swagger_favicon_url="/favicon.ico"
|
||||
)
|
||||
|
||||
@app.get("/redoc", include_in_schema=False)
|
||||
async def custom_redoc_html():
|
||||
return get_redoc_html(
|
||||
openapi_url=app.openapi_url, # type: ignore
|
||||
title=app.title + " - Documentation",
|
||||
redoc_favicon_url="/favicon.ico"
|
||||
)
|
33
modules/database.py
Normal file
33
modules/database.py
Normal file
@ -0,0 +1,33 @@
|
||||
from modules.utils import configGet
|
||||
from pymongo import MongoClient
|
||||
|
||||
db_config = configGet("database")
|
||||
|
||||
if db_config["user"] is not None and db_config["password"] is not None:
|
||||
con_string = 'mongodb://{0}:{1}@{2}:{3}/{4}'.format(
|
||||
db_config["user"],
|
||||
db_config["password"],
|
||||
db_config["host"],
|
||||
db_config["port"],
|
||||
db_config["name"]
|
||||
)
|
||||
else:
|
||||
con_string = 'mongodb://{0}:{1}/{2}'.format(
|
||||
db_config["host"],
|
||||
db_config["port"],
|
||||
db_config["name"]
|
||||
)
|
||||
|
||||
db_client = MongoClient(con_string)
|
||||
|
||||
db = db_client.get_database(name=db_config["name"])
|
||||
|
||||
collections = db.list_collection_names()
|
||||
|
||||
for collection in ["albums", "photos", "tokens"]:
|
||||
if not collection in collections:
|
||||
db.create_collection(collection)
|
||||
|
||||
col_albums = db.get_collection("albums")
|
||||
col_photos = db.get_collection("photos")
|
||||
col_tokens = db.get_collection("tokens")
|
47
modules/extensions_loader.py
Normal file
47
modules/extensions_loader.py
Normal file
@ -0,0 +1,47 @@
|
||||
from importlib.util import module_from_spec, spec_from_file_location
|
||||
from os import getcwd, path, walk
|
||||
|
||||
#=================================================================================
|
||||
|
||||
# Import functions
|
||||
# Took from https://stackoverflow.com/a/57892961
|
||||
def get_py_files(src):
|
||||
cwd = getcwd() # Current Working directory
|
||||
py_files = []
|
||||
for root, dirs, files in walk(src):
|
||||
for file in files:
|
||||
if file.endswith(".py"):
|
||||
py_files.append(path.join(cwd, root, file))
|
||||
return py_files
|
||||
|
||||
|
||||
def dynamic_import(module_name, py_path):
|
||||
try:
|
||||
module_spec = spec_from_file_location(module_name, py_path)
|
||||
module = module_from_spec(module_spec) # type: ignore
|
||||
module_spec.loader.exec_module(module) # type: ignore
|
||||
return module
|
||||
except SyntaxError:
|
||||
print(f"Could not load extension {module_name} due to invalid syntax. Check logs/errors.log for details.", flush=True)
|
||||
return
|
||||
except Exception as exp:
|
||||
print(f"Could not load extension {module_name} due to {exp}", flush=True)
|
||||
return
|
||||
|
||||
|
||||
def dynamic_import_from_src(src, star_import = False):
|
||||
my_py_files = get_py_files(src)
|
||||
for py_file in my_py_files:
|
||||
module_name = path.split(py_file)[-1][:-3]
|
||||
print(f"Importing {module_name} extension...", flush=True)
|
||||
imported_module = dynamic_import(module_name, py_file)
|
||||
if imported_module != None:
|
||||
if star_import:
|
||||
for obj in dir(imported_module):
|
||||
globals()[obj] = imported_module.__dict__[obj]
|
||||
else:
|
||||
globals()[module_name] = imported_module
|
||||
print(f"Successfully loaded {module_name} extension", flush=True)
|
||||
return
|
||||
|
||||
#=================================================================================
|
55
modules/hasher.py
Normal file
55
modules/hasher.py
Normal file
@ -0,0 +1,55 @@
|
||||
from modules.database import col_photos
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from scipy import spatial
|
||||
import cv2
|
||||
|
||||
def hash_array_to_hash_hex(hash_array):
|
||||
# convert hash array of 0 or 1 to hash string in hex
|
||||
hash_array = np.array(hash_array, dtype = np.uint8)
|
||||
hash_str = ''.join(str(i) for i in 1 * hash_array.flatten())
|
||||
return (hex(int(hash_str, 2)))
|
||||
|
||||
def hash_hex_to_hash_array(hash_hex) -> NDArray:
|
||||
# convert hash string in hex to hash values of 0 or 1
|
||||
hash_str = int(hash_hex, 16)
|
||||
array_str = bin(hash_str)[2:]
|
||||
return np.array([i for i in array_str], dtype = np.float32)
|
||||
|
||||
def get_duplicates_cache(album: str) -> dict:
|
||||
output = {}
|
||||
for photo in col_photos.find( {"album": album} ):
|
||||
output[photo["filename"]] = [photo["_id"].__str__(), photo["hash"]]
|
||||
return output
|
||||
|
||||
async def get_phash(filepath: str) -> str:
|
||||
img = cv2.imread(filepath)
|
||||
# resize image and convert to gray scale
|
||||
img = cv2.resize(img, (64, 64))
|
||||
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
||||
img = np.array(img, dtype = np.float32)
|
||||
# calculate dct of image
|
||||
dct = cv2.dct(img)
|
||||
# to reduce hash length take only 8*8 top-left block
|
||||
# as this block has more information than the rest
|
||||
dct_block = dct[: 8, : 8]
|
||||
# caclulate mean of dct block excluding first term i.e, dct(0, 0)
|
||||
dct_average = (dct_block.mean() * dct_block.size - dct_block[0, 0]) / (dct_block.size - 1)
|
||||
# convert dct block to binary values based on dct_average
|
||||
dct_block[dct_block < dct_average] = 0.0
|
||||
dct_block[dct_block != 0] = 1.0
|
||||
# store hash value
|
||||
return hash_array_to_hash_hex(dct_block.flatten())
|
||||
|
||||
async def get_duplicates(hash: str, album: str) -> list:
|
||||
duplicates = []
|
||||
cache = get_duplicates_cache(album)
|
||||
for image_name in cache.keys():
|
||||
distance = spatial.distance.hamming(
|
||||
hash_hex_to_hash_array(cache[image_name][1]),
|
||||
hash_hex_to_hash_array(hash)
|
||||
)
|
||||
print("{0:<30} {1}".format(image_name, distance), flush=True)
|
||||
if distance <= 0.25:
|
||||
duplicates.append({"id": cache[image_name][0], "filename": image_name, "difference": distance})
|
||||
return duplicates
|
75
modules/utils.py
Normal file
75
modules/utils.py
Normal file
@ -0,0 +1,75 @@
|
||||
from typing import Any, Union
|
||||
from ujson import loads, dumps, JSONDecodeError
|
||||
from traceback import print_exc
|
||||
|
||||
# Print to stdout and then to log
|
||||
def logWrite(message: str, debug: bool = False) -> None:
|
||||
# save to log file and rotation is to be done
|
||||
# logAppend(f'{message}', debug=debug)
|
||||
print(f"{message}", flush=True)
|
||||
|
||||
def jsonLoad(filepath: str) -> Any:
|
||||
"""Load json file
|
||||
|
||||
### Args:
|
||||
* filepath (`str`): Path to input file
|
||||
|
||||
### Returns:
|
||||
* `Any`: Some json deserializable
|
||||
"""
|
||||
with open(filepath, "r", encoding='utf8') as file:
|
||||
try:
|
||||
output = loads(file.read())
|
||||
except JSONDecodeError:
|
||||
logWrite(f"Could not load json file {filepath}: file seems to be incorrect!\n{print_exc()}")
|
||||
raise
|
||||
except FileNotFoundError:
|
||||
logWrite(f"Could not load json file {filepath}: file does not seem to exist!\n{print_exc()}")
|
||||
raise
|
||||
file.close()
|
||||
return output
|
||||
|
||||
def jsonSave(contents: Union[list, dict], filepath: str) -> None:
|
||||
"""Save contents into json file
|
||||
|
||||
### Args:
|
||||
* contents (`Union[list, dict]`): Some json serializable
|
||||
* filepath (`str`): Path to output file
|
||||
"""
|
||||
try:
|
||||
with open(filepath, "w", encoding='utf8') as file:
|
||||
file.write(dumps(contents, ensure_ascii=False, indent=4))
|
||||
file.close()
|
||||
except Exception as exp:
|
||||
logWrite(f"Could not save json file {filepath}: {exp}\n{print_exc()}")
|
||||
return
|
||||
|
||||
def configGet(key: str, *args: str) -> Any:
|
||||
"""Get value of the config key
|
||||
|
||||
### Args:
|
||||
* key (`str`): The last key of the keys path.
|
||||
* *args (`str`): Path to key like: dict[args][key].
|
||||
|
||||
### Returns:
|
||||
* `Any`: Value of provided key
|
||||
"""
|
||||
this_dict = jsonLoad("config.json")
|
||||
this_key = this_dict
|
||||
for dict_key in args:
|
||||
this_key = this_key[dict_key]
|
||||
return this_key[key]
|
||||
|
||||
def apiKeyInvalid(obj):
|
||||
obj.send_response(401)
|
||||
obj.send_header('Content-type', 'application/json; charset=utf-8')
|
||||
obj.end_headers()
|
||||
obj.wfile.write(b'{"code":401, "message": "Invalid API key"}')
|
||||
return
|
||||
|
||||
def apiKeyExpired(obj):
|
||||
obj.send_response(403)
|
||||
obj.send_header('Content-type', 'application/json; charset=utf-8')
|
||||
obj.end_headers()
|
||||
obj.wfile.write(b'{"code":403, "message": "API key expired"}')
|
||||
return
|
17
photos_api.py
Normal file
17
photos_api.py
Normal file
@ -0,0 +1,17 @@
|
||||
from os import makedirs, sep
|
||||
from modules.app import app
|
||||
from modules.utils import *
|
||||
from modules.extensions_loader import dynamic_import_from_src
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
makedirs(f"data{sep}users", exist_ok=True)
|
||||
|
||||
|
||||
@app.get("/favicon.ico", response_class=FileResponse, include_in_schema=False)
|
||||
async def favicon():
|
||||
return FileResponse("favicon.ico")
|
||||
|
||||
|
||||
#=================================================================================
|
||||
dynamic_import_from_src("requests", star_import = True)
|
||||
#=================================================================================
|
159
requests/albums.py
Normal file
159
requests/albums.py
Normal file
@ -0,0 +1,159 @@
|
||||
import re
|
||||
from os import makedirs, rename, sep
|
||||
from shutil import rmtree
|
||||
from typing import Union
|
||||
from modules.utils import configGet
|
||||
from modules.app import app, check_project_key, get_api_key
|
||||
from modules.database import col_photos, col_albums
|
||||
from bson.objectid import ObjectId
|
||||
from bson.errors import InvalidId
|
||||
|
||||
from fastapi import HTTPException, Depends
|
||||
from fastapi.responses import UJSONResponse, Response
|
||||
from fastapi.openapi.models import APIKey
|
||||
from starlette.status import HTTP_204_NO_CONTENT, HTTP_401_UNAUTHORIZED, HTTP_404_NOT_FOUND, HTTP_406_NOT_ACCEPTABLE, HTTP_409_CONFLICT
|
||||
|
||||
@app.post("/albums", response_class=UJSONResponse, include_in_schema=True)
|
||||
async def album_create(name: str, title: str, apikey: APIKey = Depends(get_api_key)):
|
||||
|
||||
if (check_project_key("photos", apikey)):
|
||||
|
||||
if re.search(re.compile('^[a-z,0-9,_]*$'), name) is False:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album name can only contain: a-z, 0-9 and _ characters.")
|
||||
|
||||
if 2 > len(name) > 20:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album name must be >2 and <20 characters.")
|
||||
|
||||
if 2 > len(title) > 40:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album title must be >2 and <40 characters.")
|
||||
|
||||
if col_albums.find_one( {"name": name} ) is not None:
|
||||
return HTTPException(status_code=HTTP_409_CONFLICT, detail=f"Album with name '{name}' already exists.")
|
||||
|
||||
makedirs(f'{configGet("data_location")}{sep}photos{sep}images{sep}{name}', exist_ok=True)
|
||||
|
||||
uploaded = col_albums.insert_one( {"name": name, "title": title} )
|
||||
|
||||
return UJSONResponse(
|
||||
{
|
||||
"id": uploaded.inserted_id.__str__(),
|
||||
"name": name,
|
||||
"title": title
|
||||
}
|
||||
)
|
||||
|
||||
else:
|
||||
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail=configGet("key_invalid", "messages"))
|
||||
|
||||
@app.get("/albums", response_class=UJSONResponse, include_in_schema=True)
|
||||
async def album_find(q: str, apikey: APIKey = Depends(get_api_key)):
|
||||
|
||||
if (check_project_key("photos", apikey)):
|
||||
|
||||
output = {"results": []}
|
||||
albums = list(col_albums.find( {"name": re.compile(q)} ))
|
||||
|
||||
for album in albums:
|
||||
output["results"].append( {"id": album["_id"].__str__(), "name": album["name"]} )
|
||||
|
||||
return UJSONResponse(output)
|
||||
|
||||
else:
|
||||
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail=configGet("key_invalid", "messages"))
|
||||
|
||||
@app.patch("/albums/{id}", response_class=UJSONResponse, include_in_schema=True)
|
||||
async def album_patch(id: str, name: Union[str, None] = None, title: Union[str, None] = None, apikey: APIKey = Depends(get_api_key)):
|
||||
|
||||
if (check_project_key("photos", apikey)):
|
||||
|
||||
try:
|
||||
album = col_albums.find_one( {"_id": ObjectId(id)} )
|
||||
if album is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find an album with such id.")
|
||||
|
||||
if title is not None:
|
||||
if 2 > len(title) > 40:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album title must be >2 and <40 characters.")
|
||||
else:
|
||||
title = album["title"]
|
||||
|
||||
if name is not None:
|
||||
if re.search(re.compile('^[a-z,0-9,_]*$'), name) is False:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album name can only contain: a-z, 0-9 and _ characters.")
|
||||
if 2 > len(name) > 20:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album name must be >2 and <20 characters.")
|
||||
rename(f'{configGet("data_location")}{sep}photos{sep}images{sep}{album["name"]}', f'{configGet("data_location")}{sep}photos{sep}images{sep}{name}')
|
||||
col_photos.update_many( {"album": album["name"]}, {"$set": {"album": name}} )
|
||||
else:
|
||||
name = album["name"]
|
||||
|
||||
col_albums.update_one( {"_id": ObjectId(id)}, {"$set": {"name": name, "title": title}} )
|
||||
|
||||
return UJSONResponse(
|
||||
{
|
||||
"name": name,
|
||||
"title": title
|
||||
}
|
||||
)
|
||||
|
||||
else:
|
||||
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail=configGet("key_invalid", "messages"))
|
||||
|
||||
@app.put("/albums/{id}", response_class=UJSONResponse, include_in_schema=True)
|
||||
async def album_put(id: str, name: str, title: str, apikey: APIKey = Depends(get_api_key)):
|
||||
|
||||
if (check_project_key("photos", apikey)):
|
||||
|
||||
try:
|
||||
album = col_albums.find_one( {"_id": ObjectId(id)} )
|
||||
if album is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find an album with such id.")
|
||||
|
||||
if re.search(re.compile('^[a-z,0-9,_]*$'), name) is False:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album name can only contain: a-z, 0-9 and _ characters.")
|
||||
|
||||
if 2 > len(name) > 20:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album name must be >2 and <20 characters.")
|
||||
|
||||
if 2 > len(title) > 40:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album title must be >2 and <40 characters.")
|
||||
|
||||
rename(f'{configGet("data_location")}{sep}photos{sep}images{sep}{album["name"]}', f'{configGet("data_location")}{sep}photos{sep}images{sep}{name}')
|
||||
col_photos.update_many( {"album": album["name"]}, {"$set": {"album": name}} )
|
||||
|
||||
col_albums.update_one( {"_id": ObjectId(id)}, {"$set": {"name": name, "title": title}} )
|
||||
|
||||
return UJSONResponse(
|
||||
{
|
||||
"name": name,
|
||||
"title": title
|
||||
}
|
||||
)
|
||||
|
||||
else:
|
||||
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail=configGet("key_invalid", "messages"))
|
||||
|
||||
@app.delete("/album/{id}", response_class=UJSONResponse, include_in_schema=True)
|
||||
async def album_delete(id: str, apikey: APIKey = Depends(get_api_key)):
|
||||
|
||||
if (check_project_key("photos", apikey)):
|
||||
|
||||
try:
|
||||
album = col_albums.find_one_and_delete( {"_id": ObjectId(id)} )
|
||||
if album is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find an album with such id.")
|
||||
|
||||
col_photos.delete_many( {"album": album["name"]} )
|
||||
|
||||
rmtree(f'{configGet("data_location")}{sep}photos{sep}images{sep}{album["name"]}')
|
||||
|
||||
return Response(status_code=HTTP_204_NO_CONTENT)
|
||||
|
||||
else:
|
||||
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail=configGet("key_invalid", "messages"))
|
145
requests/photos.py
Normal file
145
requests/photos.py
Normal file
@ -0,0 +1,145 @@
|
||||
import re
|
||||
from secrets import token_urlsafe
|
||||
from magic import Magic
|
||||
from datetime import datetime
|
||||
from os import makedirs, sep, path, remove
|
||||
from modules.hasher import get_phash, get_duplicates
|
||||
from modules.utils import configGet
|
||||
from modules.app import app, check_project_key, get_api_key
|
||||
from modules.database import col_photos, col_albums, col_tokens
|
||||
from bson.objectid import ObjectId
|
||||
from bson.errors import InvalidId
|
||||
|
||||
from fastapi import HTTPException, Depends, UploadFile
|
||||
from fastapi.responses import UJSONResponse, Response
|
||||
from fastapi.openapi.models import APIKey
|
||||
from starlette.status import HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED, HTTP_404_NOT_FOUND, HTTP_406_NOT_ACCEPTABLE, HTTP_409_CONFLICT
|
||||
|
||||
@app.post("/albums/{album}/photos", response_class=UJSONResponse, include_in_schema=True)
|
||||
async def photo_upload(file: UploadFile, album: str, ignore_duplicates: bool = False, apikey: APIKey = Depends(get_api_key)):
|
||||
|
||||
if (check_project_key("photos", apikey)):
|
||||
|
||||
if col_albums.find_one( {"name": album} ) is None:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail=f"Provided album '{album}' does not exist.")
|
||||
|
||||
# if not file.content_type.startswith("image"):
|
||||
# return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Provided file is not an image, not accepting.")
|
||||
|
||||
makedirs(f'data{sep}users{sep}sample_user{sep}albums{sep}{album}', exist_ok=True)
|
||||
|
||||
filename = file.filename
|
||||
|
||||
if path.exists(f'data{sep}users{sep}sample_user{sep}albums{sep}{album}{sep}{file.filename}'):
|
||||
base_name = file.filename.split(".")[:-1]
|
||||
extension = file.filename.split(".")[-1]
|
||||
filename = ".".join(base_name)+f"_{int(datetime.now().timestamp())}."+extension
|
||||
|
||||
with open(f'data{sep}users{sep}sample_user{sep}albums{sep}{album}{sep}{filename}', "wb") as f:
|
||||
f.write(await file.read())
|
||||
|
||||
file_hash = await get_phash(f'data{sep}users{sep}sample_user{sep}albums{sep}{album}{sep}{filename}')
|
||||
duplicates = await get_duplicates(file_hash, album)
|
||||
|
||||
if len(duplicates) > 0 and ignore_duplicates is False:
|
||||
return UJSONResponse(
|
||||
{
|
||||
"detail": "Image duplicates found. Pass 'ignore_duplicates=true' to ignore.",
|
||||
"duplicates": duplicates
|
||||
},
|
||||
status_code=HTTP_409_CONFLICT
|
||||
)
|
||||
|
||||
uploaded = col_photos.insert_one( {"album": album, "hash": file_hash, "filename": filename} )
|
||||
|
||||
return UJSONResponse(
|
||||
{
|
||||
"id": uploaded.inserted_id.__str__(),
|
||||
"album": album,
|
||||
"hash": file_hash,
|
||||
"filename": filename
|
||||
}
|
||||
)
|
||||
|
||||
else:
|
||||
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail=configGet("key_invalid", "messages"))
|
||||
|
||||
@app.get("/photos/{id}", include_in_schema=True)
|
||||
async def photo_get(id: str, apikey: APIKey = Depends(get_api_key)):
|
||||
|
||||
if (check_project_key("photos", apikey)):
|
||||
|
||||
try:
|
||||
image = col_photos.find_one( {"_id": ObjectId(id)} )
|
||||
if image is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find an image with such id.")
|
||||
|
||||
image_path = f'data{sep}users{sep}sample_user{sep}albums{sep}{image["album"]}{sep}{image["filename"]}'
|
||||
|
||||
mime = Magic(mime=True).from_file(image_path)
|
||||
|
||||
with open(image_path, "rb") as f: image_file = f.read()
|
||||
|
||||
return Response(image_file, media_type=mime)
|
||||
|
||||
else:
|
||||
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail=configGet("key_invalid", "messages"))
|
||||
|
||||
@app.delete("/photos/{id}", include_in_schema=True)
|
||||
async def photo_delete(id: str, apikey: APIKey = Depends(get_api_key)):
|
||||
|
||||
if (check_project_key("photos", apikey)):
|
||||
|
||||
try:
|
||||
image = col_photos.find_one_and_delete( {"_id": ObjectId(id)} )
|
||||
if image is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find an image with such id.")
|
||||
|
||||
remove(f'data{sep}users{sep}sample_user{sep}albums{sep}{image["album"]}{sep}{image["filename"]}')
|
||||
|
||||
return Response(status_code=HTTP_204_NO_CONTENT)
|
||||
|
||||
else:
|
||||
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail=configGet("key_invalid", "messages"))
|
||||
|
||||
@app.get("/albums/{album}/photos", response_class=UJSONResponse, include_in_schema=True)
|
||||
async def photo_find(q: str, album: str, page: int = 1, page_size: int = 100, apikey: APIKey = Depends(get_api_key)):
|
||||
|
||||
if (check_project_key("photos", apikey)):
|
||||
|
||||
if col_albums.find_one( {"name": album} ) is None:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail=f"Provided album '{album}' does not exist.")
|
||||
|
||||
if page <= 0 or page_size <= 0:
|
||||
return HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Parameters 'page' and 'page_size' must be greater or equal to 1.")
|
||||
|
||||
output = {"results": []}
|
||||
skip = (page-1)*page_size
|
||||
images = list(col_photos.find({"album": album, "filename": re.compile(q)}, limit=page_size, skip=skip))
|
||||
|
||||
for image in images:
|
||||
output["results"].append({"id": image["_id"].__str__(), "filename": image["filename"]})
|
||||
|
||||
if col_photos.count_documents( {"album": album, "filename": re.compile(q)} ) > page*page_size:
|
||||
token = str(token_urlsafe(32))
|
||||
col_tokens.insert_one( {"token": token, "query": q, "album": album, "page": page+1, "page_size": page_size, "apikey": apikey} )
|
||||
output["next_page"] = f"https://api.end-play.xyz/photoFindToken?token={token}" # type: ignore
|
||||
|
||||
return UJSONResponse(output)
|
||||
|
||||
else:
|
||||
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail=configGet("key_invalid", "messages"))
|
||||
|
||||
@app.get("/photos/token/{token}", response_class=UJSONResponse, include_in_schema=True)
|
||||
async def photo_find_token(token: str):
|
||||
|
||||
found_record = col_tokens.find_one( {"token": token} )
|
||||
|
||||
if found_record is None:
|
||||
return HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail="Invalid search token.")
|
||||
|
||||
return await photo_find(q=found_record["query"], album=found_record["album"], page=found_record["page"], page_size=found_record["page_size"], apikey=found_record["apikey"])
|
6
requirements.txt
Normal file
6
requirements.txt
Normal file
@ -0,0 +1,6 @@
|
||||
fastapi[all]
|
||||
pymongo==4.3.3
|
||||
ujson~=5.6.0
|
||||
scipy~=1.9.3
|
||||
python-magic~=0.4.27
|
||||
opencv-python~=4.6.0.66
|
Loading…
Reference in New Issue
Block a user