Compare commits

...

2 Commits

Author SHA1 Message Date
ea1b92015d Support for geo search 2023-01-02 15:08:46 +01:00
075f08a8c1 Added EXIF location extractor 2023-01-02 13:10:10 +01:00
4 changed files with 73 additions and 8 deletions

View File

@ -1,10 +1,12 @@
import re import re
import pickle import pickle
from secrets import token_urlsafe from secrets import token_urlsafe
from typing import List, Union
from magic import Magic from magic import Magic
from datetime import datetime, timedelta from datetime import datetime, timedelta
from os import makedirs, path, remove, system from os import makedirs, path, remove, system
from classes.models import Photo, SearchResults from classes.models import Photo, SearchResults
from modules.exif_reader import extract_location
from modules.hasher import get_phash, get_duplicates from modules.hasher import get_phash, get_duplicates
from modules.scheduler import scheduler from modules.scheduler import scheduler
from modules.security import User, get_current_active_user from modules.security import User, get_current_active_user
@ -15,7 +17,7 @@ from bson.errors import InvalidId
from fastapi import HTTPException, UploadFile, Security from fastapi import HTTPException, UploadFile, Security
from fastapi.responses import UJSONResponse, Response from fastapi.responses import UJSONResponse, Response
from starlette.status import HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT from starlette.status import HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT, HTTP_422_UNPROCESSABLE_ENTITY
from modules.utils import logWrite from modules.utils import logWrite
@ -72,7 +74,8 @@ async def photo_upload(file: UploadFile, album: str, ignore_duplicates: bool = F
status_code=HTTP_409_CONFLICT status_code=HTTP_409_CONFLICT
) )
uploaded = col_photos.insert_one( {"user": current_user.user, "album": album, "hash": file_hash, "filename": filename} ) coords = extract_location(path.join("data", "users", current_user.user, "albums", album, filename))
uploaded = col_photos.insert_one( {"user": current_user.user, "album": album, "hash": file_hash, "filename": filename, "location": [coords["lng"], coords["lat"], coords["alt"]]} )
if compress is True: if compress is True:
scheduler.add_job(compress_image, trigger="date", run_date=datetime.now()+timedelta(seconds=1), args=[path.join("data", "users", current_user.user, "albums", album, filename)]) scheduler.add_job(compress_image, trigger="date", run_date=datetime.now()+timedelta(seconds=1), args=[path.join("data", "users", current_user.user, "albums", album, filename)])
@ -124,7 +127,7 @@ async def photo_delete(id: str, current_user: User = Security(get_current_active
return Response(status_code=HTTP_204_NO_CONTENT) return Response(status_code=HTTP_204_NO_CONTENT)
@app.get("/albums/{album}/photos", response_class=UJSONResponse, response_model=SearchResults, description="Find a photo by filename") @app.get("/albums/{album}/photos", response_class=UJSONResponse, response_model=SearchResults, description="Find a photo by filename")
async def photo_find(q: str, album: str, page: int = 1, page_size: int = 100, current_user: User = Security(get_current_active_user, scopes=["photos.list"])): async def photo_find(album: str, q: Union[str, None] = None, page: int = 1, page_size: int = 100, lat: Union[float, None] = None, lng: Union[float, None] = None, radius: Union[int, None] = None, current_user: User = Security(get_current_active_user, scopes=["photos.list"])):
if col_albums.find_one( {"user": current_user.user, "name": album} ) is None: if col_albums.find_one( {"user": current_user.user, "name": album} ) is None:
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail=f"Provided album '{album}' does not exist.") return HTTPException(status_code=HTTP_404_NOT_FOUND, detail=f"Provided album '{album}' does not exist.")
@ -134,12 +137,24 @@ async def photo_find(q: str, album: str, page: int = 1, page_size: int = 100, cu
output = {"results": []} output = {"results": []}
skip = (page-1)*page_size skip = (page-1)*page_size
images = list(col_photos.find({"user": current_user.user, "album": album, "filename": re.compile(q)}, limit=page_size, skip=skip))
radius = 5000 if radius is None else radius
if (lat is not None) and (lng is not None):
db_query = {"user": current_user.user, "album": album, "location": { "$nearSphere": {"$geometry": {"type": "Point", "coordinates": [lng, lat]}, "$maxDistance": radius} } }
db_query_count = {"user": current_user.user, "album": album, "location": { "$geoWithin": { "$centerSphere": [ [lng, lat], radius ] } } }
elif q is None:
raise HTTPException(status_code=HTTP_422_UNPROCESSABLE_ENTITY, detail="You must provide query or coordinates to look for photos")
else:
db_query = {"user": current_user.user, "album": album, "filename": re.compile(q)}
db_query_count = {"user": current_user.user, "album": album, "filename": re.compile(q)}
images = list(col_photos.find(db_query, limit=page_size, skip=skip))
for image in images: for image in images:
output["results"].append({"id": image["_id"].__str__(), "filename": image["filename"]}) output["results"].append({"id": image["_id"].__str__(), "filename": image["filename"]})
if col_photos.count_documents( {"user": current_user.user, "album": album, "filename": re.compile(q)} ) > page*page_size: if col_photos.count_documents( db_query_count ) > page*page_size:
token = str(token_urlsafe(32)) token = str(token_urlsafe(32))
col_tokens.insert_one( {"token": token, "query": q, "album": album, "page": page+1, "page_size": page_size, "user": pickle.dumps(current_user)} ) col_tokens.insert_one( {"token": token, "query": q, "album": album, "page": page+1, "page_size": page_size, "user": pickle.dumps(current_user)} )
output["next_page"] = f"/albums/{album}/photos/token?token={token}" # type: ignore output["next_page"] = f"/albums/{album}/photos/token?token={token}" # type: ignore

View File

@ -1,5 +1,5 @@
from modules.utils import configGet from modules.utils import configGet
from pymongo import MongoClient from pymongo import MongoClient, GEOSPHERE
db_config = configGet("database") db_config = configGet("database")
@ -32,4 +32,6 @@ col_users = db.get_collection("users")
col_albums = db.get_collection("albums") col_albums = db.get_collection("albums")
col_photos = db.get_collection("photos") col_photos = db.get_collection("photos")
col_videos = db.get_collection("videos") col_videos = db.get_collection("videos")
col_tokens = db.get_collection("tokens") col_tokens = db.get_collection("tokens")
col_photos.create_index([("location", GEOSPHERE)])

47
modules/exif_reader.py Normal file
View File

@ -0,0 +1,47 @@
from exif import Image
def decimal_coords(coords: float, ref: str) -> float:
"""Get latitude/longitude from coord and direction reference
### Args:
* coords (`float`): _description_
* ref (`str`): _description_
### Returns:
* float: Decimal degrees
"""
decimal_degrees = coords[0] + coords[1] / 60 + coords[2] / 3600
if ref == "S" or ref == "W":
decimal_degrees = -decimal_degrees
return round(decimal_degrees, 5)
def extract_location(filepath: str) -> dict:
"""Get location data from image
### Args:
* filepath (`str`): Path to file location
### Returns:
* dict: `{ "lng": float, "lat": float, "alt": float }`
"""
output = {
"lng": 0.0,
"lat": 0.0,
"alt": 0.0
}
with open(filepath, 'rb') as src:
img = Image(src)
if img.has_exif is False:
return output
try:
output["lng"] = decimal_coords(img.gps_longitude, img.gps_longitude_ref)
output["lat"] = decimal_coords(img.gps_latitude, img.gps_latitude_ref)
output["alt"] = img.gps_altitude
except AttributeError:
pass
return output

View File

@ -6,4 +6,5 @@ python-magic~=0.4.27
opencv-python~=4.6.0.66 opencv-python~=4.6.0.66
python-jose[cryptography]~=3.3.0 python-jose[cryptography]~=3.3.0
passlib~=1.7.4 passlib~=1.7.4
apscheduler~=3.9.1.post1 apscheduler~=3.9.1.post1
exif==1.4.2