Compare commits
233 Commits
0f1c6edf0f
...
v0.6.1
Author | SHA1 | Date | |
---|---|---|---|
00af3433e9 | |||
31da6b29ca | |||
2884d989a8 | |||
ae984a20e5 | |||
c62359c9c2 | |||
560ef5c986 | |||
f0587d0422 | |||
381c6fd0d3 | |||
b8a0ff79ec | |||
51253d82af | |||
6cdc804d30 | |||
fe4cd2dc48 | |||
b7def0fe64 | |||
921119ee75 | |||
bebf1f8541 | |||
16afeb5a95 | |||
764dfe540c | |||
df32ee393e | |||
936a86728a | |||
0438bd0483 | |||
ff3a18691b | |||
5a3e2fffe9 | |||
f248433e11 | |||
8c26ffc7be | |||
8a7f53d63b | |||
2c2704cd91 | |||
1a7fe795b4 | |||
d01842913d | |||
50c629c157 | |||
95bfb1fe2f | |||
b53743fb19 | |||
2103d6b003 | |||
126070bfdc | |||
b660a12e23 | |||
e1bb3e9af9 | |||
8a58902f8c | |||
8ba45faf50 | |||
78f24bc423 | |||
a148b9ac30 | |||
af936b522d | |||
50da0f78c7 | |||
bda26f5ebf | |||
3df80337b7 | |||
6955f1860a | |||
22818c4cae | |||
7e5f6c3cb7 | |||
46b6ed0e22 | |||
89cdd8ca3b | |||
50dfccef1a | |||
a84cd1c5ed | |||
2830a8e21a | |||
eee7e3a73f | |||
0de1f7f08d | |||
5227c7cd14 | |||
6fea31e3fb | |||
e8312a99d0 | |||
ab8b9bacde | |||
296b0014d5 | |||
adcf25ff15 | |||
508984442b | |||
37a6b4634c | |||
5796c6ea40 | |||
b488bbee0a | |||
43597db03c | |||
e6f3b7c4b1 | |||
e91a6ad10e | |||
aa6c5b99b1 | |||
0991734377 | |||
d4aa6558d6 | |||
186fddacef | |||
bf006a0734 | |||
9d70724d64 | |||
8b30afdf6a | |||
0db4661658 | |||
bf4fbe2302 | |||
3bb24f786d | |||
7adf849150 | |||
e6c0a53742 | |||
fa09dbc9b2 | |||
c3a9a2f40a | |||
58933a9279 | |||
bcf74089f9 | |||
891dc81271 | |||
25c902c194 | |||
5a794f7dc6 | |||
848b2f1a8e | |||
539b3b42c9 | |||
f01d2d177b | |||
5129cb449e | |||
4d6efac3c4 | |||
88b820e90d | |||
afefea6f68
|
|||
e5fad5ba92
|
|||
5174602c31
|
|||
0043abdbad
|
|||
0f423166f1
|
|||
b2146b965a | |||
3aa171869b | |||
126c66637e | |||
d0d127d9c0 | |||
728917b4b9 | |||
b1eb8f9aac | |||
0a30512dbc | |||
14b09d7062 | |||
ac8f2b2ba6 | |||
eab19e6783 | |||
8347a4c779 | |||
ec5d0585a2 | |||
ee53a77691 | |||
10ee56be9e | |||
91d5032fd2 | |||
3569de9363
|
|||
c966a6de07
|
|||
7011baff0f
|
|||
a1acaed6dd
|
|||
80ec8eb4f3 | |||
bcc7012744 | |||
e3038e4224 | |||
3b4d108d45 | |||
16fe8235f4 | |||
6cc0d3814e | |||
b0c46e0c1e | |||
7c725bf04a | |||
cff6ed17a7 | |||
e6fae57679 | |||
dfdfebe155 | |||
01b6222f6b | |||
10fb021162 | |||
4545e26f32 | |||
ab2bfd10d5 | |||
e9f3237fbb | |||
1bcca0f812 | |||
b3c9a972c8 | |||
42f125716a
|
|||
5e3df74052
|
|||
2ff4623d5f | |||
737b4c57c0 | |||
d723bb6b80 | |||
2a7870620c
|
|||
b003712358
|
|||
d29dfa4d3e | |||
d688d766da | |||
5cc10367b2
|
|||
4b43e76822
|
|||
23467a88ef
|
|||
88d8a38444
|
|||
a5cd6a215f
|
|||
a6002a5e60 | |||
917048a333 | |||
6be51c5aaa | |||
840e3022b3 | |||
24f4773dd7
|
|||
00d3d62762
|
|||
2a29b85ad2 | |||
9bdc788078
|
|||
5a5103ea9c | |||
ccf4c43bb9 | |||
19e0531a24
|
|||
b46f3fb0fd
|
|||
d2f3d7e687
|
|||
83dd4b6746
|
|||
47435c6128
|
|||
db77f62459
|
|||
b51026b200 | |||
b30547eca8 | |||
782b489db2 | |||
d085a0e639 | |||
30d72c84ed | |||
e1e42fdb60 | |||
36169b0e77 | |||
5de935cd21 | |||
1e6afc6b0c | |||
f9e6ee9c72 | |||
f512df408f | |||
aa083811dc | |||
4d24696d3d | |||
c7cb4a6dff | |||
4060aae038 | |||
4eea82a160 | |||
4ce4264580 | |||
6feed4359a | |||
2afc82cf01 | |||
bf0046c3d5 | |||
c55a2d0d44 | |||
a380da81bb | |||
e858e7d7f4 | |||
fcbbd4f2bf | |||
77efd3be89 | |||
735a1e9261 | |||
f9df399682 | |||
47ae594079 | |||
ca9a9ce5d8 | |||
|
09ec0f4620 | ||
|
c272342b4b | ||
|
f6c2002811 | ||
|
f1a190f030 | ||
fe2ef49c74 | |||
3520912aae | |||
b285fc0668 | |||
7580478ac3 | |||
e2633a01e5 | |||
80897dd79c | |||
fa3aca30c2 | |||
c353a4a4df | |||
3bae6ef40e | |||
0722a26fb3 | |||
873e506c7d | |||
dddb5dbc12 | |||
cb6d7d9433 | |||
1cbfd6abe8 | |||
1f867630f4 | |||
96f4ab2eb9 | |||
2a100db981 | |||
ddca4a4a3d | |||
e14b3e7db3 | |||
9a48835fe4 | |||
d800bbbda5 | |||
073e26fef0 | |||
7a477876e0 | |||
29ef2cfe2b | |||
8cf78f4409 | |||
75b99251eb | |||
225c80f2f9 | |||
c693756a43 | |||
906674fcdb | |||
19b1ae6158 | |||
|
31d42f3ce7 | ||
6df2d274b1 | |||
ea1b92015d | |||
075f08a8c1 | |||
aa15d17883 | |||
fca317c45f | |||
91d16dab6e |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -153,5 +153,6 @@ cython_debug/
|
||||
#.idea/
|
||||
|
||||
# Custom
|
||||
.vscode
|
||||
data/
|
||||
.vscode/
|
||||
config.json
|
20
.renovaterc
Normal file
20
.renovaterc
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"config:base"
|
||||
],
|
||||
"baseBranches": [
|
||||
"dev"
|
||||
],
|
||||
"packageRules": [
|
||||
{
|
||||
"matchUpdateTypes": [
|
||||
"minor",
|
||||
"patch",
|
||||
"pin",
|
||||
"digest"
|
||||
],
|
||||
"automerge": true
|
||||
}
|
||||
]
|
||||
}
|
113
README.md
Normal file
113
README.md
Normal file
@@ -0,0 +1,113 @@
|
||||
<h1 align="center">Photos API</h1>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://git.end-play.xyz/profitroll/PhotosAPI/src/branch/master/README.md"><img alt="License: GPL" src="https://img.shields.io/badge/License-GPL-blue"></a>
|
||||
<a href="https://git.end-play.xyz/profitroll/PhotosAPI"><img alt="Code style: black" src="https://img.shields.io/badge/code%20style-black-000000.svg"></a>
|
||||
</p>
|
||||
|
||||
Small and simple API server for saving photos and videos.
|
||||
|
||||
## Dependencies
|
||||
|
||||
* [Python 3.8+](https://www.python.org) (3.9+ recommended)
|
||||
* [MongoDB](https://www.mongodb.com)
|
||||
* [exiftool](https://exiftool.org)
|
||||
* [jpegoptim](https://github.com/tjko/jpegoptim)
|
||||
* [optipng](https://optipng.sourceforge.net)
|
||||
|
||||
## Installation
|
||||
|
||||
First you need to have a Python interpreter, MongoDB and optionally git. You can also ignore git and simply download source code, should also work fine. After that you're ready to go.
|
||||
|
||||
> In this README I assume that you're using default python in your
|
||||
> system and your system's PATH contains it. If your default python
|
||||
> is `python3` or for example `/home/user/.local/bin/python3.9` - use it instead.
|
||||
|
||||
1. Install Mongo:
|
||||
|
||||
Please follow [official installation manual](https://www.mongodb.com/docs/manual/installation) for that.
|
||||
|
||||
2. Download Photos API:
|
||||
|
||||
1. `git clone https://git.end-play.xyz/profitroll/PhotosAPI.git` (if you're using git)
|
||||
2. `cd PhotosAPI`
|
||||
|
||||
3. Create virtual environment [Optional yet recommended]:
|
||||
|
||||
1. Install virtualenv module: `pip install virtualenv`
|
||||
2. Create venv: `python -m venv .venv`
|
||||
3. Activate it using `source .venv/bin/activate` on Linux, `.venv\Scripts\activate.bat` in CMD or `.venv\Scripts\Activate.ps1` in PowerShell.
|
||||
|
||||
4. Install project's dependencies:
|
||||
|
||||
`python -m pip install -r requirements.txt`
|
||||
|
||||
5. Configure your API:
|
||||
|
||||
1. Copy file `config_example.json` to `config.json`
|
||||
2. Open `config.json` using your favorite text editor. For example `nano config.json`
|
||||
3. Change `"database"` keys to match your MongoDB setup
|
||||
4. Set the key `"secret"` to your JWT secret. You can type in anything, but long secrets are recommended. You can also set environment variable `PHOTOSAPI_SECRET` as an alternative
|
||||
5. Change `"external_address"` to the ip/http address you may get in responses. By default it's `"localhost"`. This is extremely useful when running behind reverse-proxy.
|
||||
|
||||
After configuring everything listed above your API will be able to boot, however further configuration can be done. You can read about it in [repository's wiki](https://git.end-play.xyz/profitroll/PhotosAPI/wiki/Configuration). There's no need to focus on that now, it makes more sense to configure it afterwards.
|
||||
|
||||
6. Start your API:
|
||||
|
||||
You can run your API by the following command:
|
||||
`uvicorn photos_api:app --host 127.0.0.1 --port 8054`
|
||||
|
||||
Learn more about available uvicorn arguments using `uvicorn --help`
|
||||
|
||||
## Upgrading
|
||||
|
||||
When a new version comes out, sometimes you want to upgrade your instance right away. Here's a checklist what to do:
|
||||
|
||||
1. Carefully read the patch notes of the version you want to update to and all the versions that came out between the release of your version and the one you want to upgrade to.
|
||||
Breaking changes will be marked so and config updates will also be described in the patch notes
|
||||
2. Make a backup of your currently working instance. This includes both the PhotosAPI and the database
|
||||
3. Download the latest version using git (`git pull` if you cloned the repo in the past) or from the releases
|
||||
4. Reconfigure the config if needed and apply the changes from the patch notes
|
||||
5. Upgrade the dependencies in your virtual environment using `pip install -r requirements.txt`
|
||||
6. Start the migration using `python photos_api.py --migrate` from your virtual environment
|
||||
7. Test if everything works and troubleshoot/rollback if not
|
||||
|
||||
## Using as a service
|
||||
|
||||
It's a good practice to use your API as a systemd service on Linux. Here's a quick overview how that can be done.
|
||||
|
||||
1. Create user and move your API
|
||||
|
||||
You don't always need to do so, but that's a cleaner way to deploy a service.
|
||||
|
||||
1. Create service user `photosapi` using `sudo useradd -r -U photosapi`
|
||||
2. Assuming you are still in directory `PhotosAPI`, use `cd ..` to go up a level and then move your API to the distinguished folder. For example, `/opt/`: `sudo mv ./PhotosAPI /opt/`
|
||||
3. Make your user and its group own their directory using `sudo chown -R photosapi:photosapi /opt/PhotosAPI`
|
||||
|
||||
2. Configure service
|
||||
|
||||
Here's an example service file for PhotosAPI that is using virtual environment:
|
||||
|
||||
```systemd
|
||||
[Unit]
|
||||
Description=Photos API
|
||||
After=network.target mongod.service
|
||||
Wants=network-online.target mongod.service
|
||||
|
||||
[Service]
|
||||
Restart=always
|
||||
Type=simple
|
||||
ExecStart=/bin/bash -c 'source .venv/bin/activate && .venv/bin/uvicorn photos_api:app --port 8054'
|
||||
WorkingDirectory=/opt/PhotosAPI
|
||||
User=photosapi
|
||||
Group=photosapi
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
|
||||
1. Create a service by pasting code above into `/etc/systemd/system/photos-api.service`
|
||||
2. Enable your service to start on system boot using `sudo systemctl enable photos-api.service`
|
||||
3. Start your service now using `sudo systemctl start photos-api.service`
|
||||
4. Check if it's running using `sudo systemctl status photos-api.service`
|
||||
5. If something goes wrong - check API's logs using `sudo journalctl -u photos-api.service`
|
308
classes/exceptions.py
Normal file
308
classes/exceptions.py
Normal file
@@ -0,0 +1,308 @@
|
||||
from typing import Literal
|
||||
|
||||
from fastapi import HTTPException
|
||||
|
||||
|
||||
class AlbumNotFoundError(HTTPException):
|
||||
"""Raises HTTP 404 if no album with this ID found."""
|
||||
|
||||
def __init__(self, id: str):
|
||||
self.id = id
|
||||
self.openapi = {
|
||||
"description": "Album Does Not Exist",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Could not find album with id '{id}'."}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
class AlbumNameNotFoundError(HTTPException):
|
||||
"""Raises HTTP 404 if no album with this name found."""
|
||||
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
self.openapi = {
|
||||
"description": "Album Does Not Exist",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Could not find album with name '{name}'."}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=404,
|
||||
detail=self.openapi["content"]["application/json"]["example"][
|
||||
"detail"
|
||||
].format(name=self.name),
|
||||
)
|
||||
|
||||
|
||||
class AlbumAlreadyExistsError(HTTPException):
|
||||
"""Raises HTTP 409 if album with this name already exists."""
|
||||
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
self.openapi = {
|
||||
"description": "Album Already Exists",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Album with name '{name}' already exists."}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=409,
|
||||
detail=self.openapi["content"]["application/json"]["example"][
|
||||
"detail"
|
||||
].format(name=self.name),
|
||||
)
|
||||
|
||||
|
||||
class AlbumIncorrectError(HTTPException):
|
||||
"""Raises HTTP 406 if album's title or name is invalid."""
|
||||
|
||||
def __init__(self, place: Literal["name", "title"], error: str) -> None:
|
||||
self.place = place
|
||||
self.error = error
|
||||
self.openapi = {
|
||||
"description": "Album Name/Title Invalid",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Album {place} invalid: {error}"}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=406,
|
||||
detail=self.openapi["content"]["application/json"]["example"][
|
||||
"detail"
|
||||
].format(place=self.place, error=self.error),
|
||||
)
|
||||
|
||||
|
||||
class PhotoNotFoundError(HTTPException):
|
||||
"""Raises HTTP 404 if no photo with this ID found."""
|
||||
|
||||
def __init__(self, id: str):
|
||||
self.id = id
|
||||
self.openapi = {
|
||||
"description": "Photo Does Not Exist",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Could not find photo with id '{id}'."}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=404,
|
||||
detail=self.openapi["content"]["application/json"]["example"][
|
||||
"detail"
|
||||
].format(id=self.id),
|
||||
)
|
||||
|
||||
|
||||
class PhotoSearchQueryEmptyError(HTTPException):
|
||||
"""Raises HTTP 422 if no photo search query provided."""
|
||||
|
||||
def __init__(self):
|
||||
self.openapi = {
|
||||
"description": "Invalid Query",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": "You must provide query, caption or coordinates to look for photos."
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=422,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class VideoNotFoundError(HTTPException):
|
||||
"""Raises HTTP 404 if no video with this ID found."""
|
||||
|
||||
def __init__(self, id: str):
|
||||
self.id = id
|
||||
self.openapi = {
|
||||
"description": "Video Does Not Exist",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Could not find video with id '{id}'."}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=404,
|
||||
detail=self.openapi["content"]["application/json"]["example"][
|
||||
"detail"
|
||||
].format(id=self.id),
|
||||
)
|
||||
|
||||
|
||||
class VideoSearchQueryEmptyError(HTTPException):
|
||||
"""Raises HTTP 422 if no video search query provided."""
|
||||
|
||||
def __init__(self):
|
||||
self.openapi = {
|
||||
"description": "Invalid Query",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": "You must provide query or caption to look for videos."
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=422,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class SearchLimitInvalidError(HTTPException):
|
||||
"""Raises HTTP 400 if search results limit not in valid range."""
|
||||
|
||||
def __init__(self):
|
||||
self.openapi = {
|
||||
"description": "Invalid Limit",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": "Parameter 'limit' must be greater or equal to 1."
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=400,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class SearchPageInvalidError(HTTPException):
|
||||
"""Raises HTTP 400 if page or page size are not in valid range."""
|
||||
|
||||
def __init__(self):
|
||||
self.openapi = {
|
||||
"description": "Invalid Page",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": "Parameters 'page' and 'page_size' must be greater or equal to 1."
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=400,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class SearchTokenInvalidError(HTTPException):
|
||||
"""Raises HTTP 401 if search token is not valid."""
|
||||
|
||||
def __init__(self):
|
||||
self.openapi = {
|
||||
"description": "Invalid Token",
|
||||
"content": {
|
||||
"application/json": {"example": {"detail": "Invalid search token."}}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=401,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class UserEmailCodeInvalid(HTTPException):
|
||||
"""Raises HTTP 400 if email confirmation code is not valid."""
|
||||
|
||||
def __init__(self):
|
||||
self.openapi = {
|
||||
"description": "Invalid Email Code",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "Confirmation code is invalid."}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=400,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class UserAlreadyExists(HTTPException):
|
||||
"""Raises HTTP 409 if user with this name already exists."""
|
||||
|
||||
def __init__(self):
|
||||
self.openapi = {
|
||||
"description": "User Already Exists",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": "User with this username already exists."}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=409,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class AccessTokenInvalidError(HTTPException):
|
||||
"""Raises HTTP 401 if access token is not valid."""
|
||||
|
||||
def __init__(self):
|
||||
self.openapi = {
|
||||
"description": "Invalid Access Token",
|
||||
"content": {
|
||||
"application/json": {"example": {"detail": "Invalid access token."}}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=401,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class UserCredentialsInvalid(HTTPException):
|
||||
"""Raises HTTP 401 if user credentials are not valid."""
|
||||
|
||||
def __init__(self):
|
||||
self.openapi = {
|
||||
"description": "Invalid Credentials",
|
||||
"content": {
|
||||
"application/json": {"example": {"detail": "Invalid credentials."}}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=401,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
||||
|
||||
|
||||
class UserMediaQuotaReached(HTTPException):
|
||||
"""Raises HTTP 403 if user's quota has been reached."""
|
||||
|
||||
def __init__(self):
|
||||
self.openapi = {
|
||||
"description": "Media Quota Reached",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": "Media quota has been reached, media upload impossible."
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
super().__init__(
|
||||
status_code=403,
|
||||
detail=self.openapi["content"]["application/json"]["example"]["detail"],
|
||||
)
|
@@ -1,27 +1,82 @@
|
||||
from typing import Union
|
||||
from typing import List, Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Photo(BaseModel):
|
||||
id: str
|
||||
album: str
|
||||
hash: str
|
||||
filename: str
|
||||
|
||||
|
||||
class PhotoPublic(BaseModel):
|
||||
id: str
|
||||
caption: str
|
||||
filename: str
|
||||
|
||||
|
||||
class PhotoSearch(BaseModel):
|
||||
id: str
|
||||
filename: str
|
||||
caption: Union[str, None]
|
||||
|
||||
|
||||
class Video(BaseModel):
|
||||
id: str
|
||||
album: str
|
||||
hash: str
|
||||
filename: str
|
||||
|
||||
|
||||
class VideoPublic(BaseModel):
|
||||
id: str
|
||||
caption: str
|
||||
filename: str
|
||||
|
||||
|
||||
class VideoSearch(BaseModel):
|
||||
id: str
|
||||
filename: str
|
||||
caption: Union[str, None]
|
||||
|
||||
|
||||
class Album(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
title: str
|
||||
|
||||
class AlbumModified(BaseModel):
|
||||
|
||||
class AlbumSearch(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
title: str
|
||||
|
||||
class SearchResults(BaseModel):
|
||||
results: list
|
||||
next_page: Union[str, None] = None
|
||||
|
||||
class AlbumModified(BaseModel):
|
||||
name: str
|
||||
title: str
|
||||
cover: Union[str, None]
|
||||
|
||||
|
||||
class SearchResultsAlbum(BaseModel):
|
||||
results: List[Album]
|
||||
next_page: Union[str, None]
|
||||
|
||||
|
||||
class SearchResultsPhoto(BaseModel):
|
||||
results: List[PhotoSearch]
|
||||
next_page: Union[str, None]
|
||||
|
||||
|
||||
class SearchResultsVideo(BaseModel):
|
||||
results: List[VideoSearch]
|
||||
next_page: Union[str, None]
|
||||
|
||||
|
||||
class RandomSearchResultsPhoto(BaseModel):
|
||||
results: List[PhotoSearch]
|
||||
|
||||
|
||||
class RandomSearchResultsVideo(BaseModel):
|
||||
results: List[VideoSearch]
|
||||
|
@@ -6,11 +6,31 @@
|
||||
"user": null,
|
||||
"password": null
|
||||
},
|
||||
"secret": "",
|
||||
"messages": {
|
||||
"key_expired": "API key expired",
|
||||
"key_invalid": "Invalid API key",
|
||||
"key_valid": "Valid API key",
|
||||
"bad_request": "Bad request. Read the docs at photos.end-play.xyz/docs",
|
||||
"ip_blacklisted": "Your IP is blacklisted. Make sure you are using correct API address."
|
||||
"email_confirmed": "Email confirmed. You can now log in."
|
||||
},
|
||||
"external_address": "localhost",
|
||||
"media_token_access": false,
|
||||
"media_token_valid_hours": 12,
|
||||
"registration_enabled": true,
|
||||
"registration_requires_confirmation": false,
|
||||
"default_user_quota": 10000,
|
||||
"mailer": {
|
||||
"smtp": {
|
||||
"host": "",
|
||||
"port": 0,
|
||||
"sender": "",
|
||||
"login": "",
|
||||
"password": "",
|
||||
"use_ssl": true,
|
||||
"use_tls": false
|
||||
},
|
||||
"messages": {
|
||||
"registration_confirmation": {
|
||||
"subject": "Email confirmation",
|
||||
"message": "To confirm your email please follow this link: {0}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,151 +1,229 @@
|
||||
import re
|
||||
from os import makedirs, path, rename
|
||||
from os import makedirs, rename
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
from typing import Union
|
||||
from classes.models import Album, AlbumModified, SearchResults
|
||||
from modules.app import app
|
||||
from modules.database import col_photos, col_albums
|
||||
from modules.security import User, get_current_active_user
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
from bson.errors import InvalidId
|
||||
from bson.objectid import ObjectId
|
||||
from fastapi import Security
|
||||
from fastapi.responses import Response, UJSONResponse
|
||||
from starlette.status import HTTP_204_NO_CONTENT
|
||||
|
||||
from fastapi import HTTPException, Security
|
||||
from fastapi.responses import UJSONResponse, Response
|
||||
from starlette.status import HTTP_204_NO_CONTENT, HTTP_404_NOT_FOUND, HTTP_406_NOT_ACCEPTABLE, HTTP_409_CONFLICT
|
||||
from classes.exceptions import (
|
||||
AlbumAlreadyExistsError,
|
||||
AlbumIncorrectError,
|
||||
AlbumNotFoundError,
|
||||
)
|
||||
from classes.models import Album, AlbumModified, SearchResultsAlbum
|
||||
from modules.app import app
|
||||
from modules.database import col_albums, col_photos
|
||||
from modules.security import User, get_current_active_user
|
||||
|
||||
@app.post("/albums", response_class=UJSONResponse, response_model=Album, description="Create album with name and title")
|
||||
async def album_create(name: str, title: str, current_user: User = Security(get_current_active_user, scopes=["albums.write"])):
|
||||
album_create_responses = {
|
||||
406: AlbumIncorrectError("name", "error").openapi,
|
||||
409: AlbumAlreadyExistsError("name").openapi,
|
||||
}
|
||||
|
||||
|
||||
@app.post(
|
||||
"/albums",
|
||||
description="Create album with name and title",
|
||||
response_class=UJSONResponse,
|
||||
response_model=Album,
|
||||
responses=album_create_responses,
|
||||
)
|
||||
async def album_create(
|
||||
name: str,
|
||||
title: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["albums.write"]),
|
||||
):
|
||||
if re.search(re.compile("^[a-z,0-9,_]*$"), name) is False:
|
||||
raise AlbumIncorrectError("name", "can only contain a-z, 0-9 and _ characters.")
|
||||
|
||||
if re.search(re.compile('^[a-z,0-9,_]*$'), name) is False:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album name can only contain: a-z, 0-9 and _ characters.")
|
||||
|
||||
if 2 > len(name) > 20:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album name must be >2 and <20 characters.")
|
||||
raise AlbumIncorrectError("name", "must be >2 and <20 characters.")
|
||||
|
||||
if 2 > len(title) > 40:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album title must be >2 and <40 characters.")
|
||||
raise AlbumIncorrectError("title", "must be >2 and <40 characters.")
|
||||
|
||||
if col_albums.find_one( {"name": name} ) is not None:
|
||||
return HTTPException(status_code=HTTP_409_CONFLICT, detail=f"Album with name '{name}' already exists.")
|
||||
if (await col_albums.find_one({"name": name})) is not None:
|
||||
raise AlbumAlreadyExistsError(name)
|
||||
|
||||
makedirs(path.join("data", "users", current_user.user, "albums", name), exist_ok=True)
|
||||
makedirs(Path(f"data/users/{current_user.user}/albums/{name}"), exist_ok=True)
|
||||
|
||||
uploaded = col_albums.insert_one( {"user": current_user.user, "name": name, "title": title, "cover": None} )
|
||||
|
||||
return UJSONResponse(
|
||||
{
|
||||
"id": uploaded.inserted_id.__str__(),
|
||||
"name": name,
|
||||
"title": title
|
||||
}
|
||||
uploaded = await col_albums.insert_one(
|
||||
{"user": current_user.user, "name": name, "title": title, "cover": None}
|
||||
)
|
||||
|
||||
@app.get("/albums", response_model=SearchResults, description="Find album by name")
|
||||
async def album_find(q: str, current_user: User = Security(get_current_active_user, scopes=["albums.list"])):
|
||||
return UJSONResponse(
|
||||
{"id": uploaded.inserted_id.__str__(), "name": name, "title": title}
|
||||
)
|
||||
|
||||
|
||||
@app.get("/albums", description="Find album by name", response_model=SearchResultsAlbum)
|
||||
async def album_find(
|
||||
q: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["albums.list"]),
|
||||
):
|
||||
output = {"results": []}
|
||||
albums = list(col_albums.find( {"user": current_user.user, "name": re.compile(q)} ))
|
||||
|
||||
for album in albums:
|
||||
output["results"].append( {"id": album["_id"].__str__(), "name": album["name"]} )
|
||||
async for album in col_albums.find(
|
||||
{"user": current_user.user, "name": re.compile(q)}
|
||||
):
|
||||
output["results"].append(
|
||||
{
|
||||
"id": album["_id"].__str__(),
|
||||
"name": album["name"],
|
||||
"title": album["title"],
|
||||
}
|
||||
)
|
||||
|
||||
return UJSONResponse(output)
|
||||
|
||||
@app.patch("/albums/{id}", response_class=UJSONResponse, response_model=AlbumModified, description="Modify album's name or title by id")
|
||||
async def album_patch(id: str, name: Union[str, None] = None, title: Union[str, None] = None, cover: Union[str, None] = None, current_user: User = Security(get_current_active_user, scopes=["albums.write"])):
|
||||
|
||||
album_patch_responses = {
|
||||
404: AlbumNotFoundError("id").openapi,
|
||||
406: AlbumIncorrectError("name", "error").openapi,
|
||||
}
|
||||
|
||||
|
||||
@app.patch(
|
||||
"/albums/{id}",
|
||||
description="Modify album's name or title by id",
|
||||
response_class=UJSONResponse,
|
||||
response_model=AlbumModified,
|
||||
responses=album_patch_responses,
|
||||
)
|
||||
async def album_patch(
|
||||
id: str,
|
||||
name: Union[str, None] = None,
|
||||
title: Union[str, None] = None,
|
||||
cover: Union[str, None] = None,
|
||||
current_user: User = Security(get_current_active_user, scopes=["albums.write"]),
|
||||
):
|
||||
try:
|
||||
album = col_albums.find_one( {"_id": ObjectId(id)} )
|
||||
album = await col_albums.find_one({"_id": ObjectId(id)})
|
||||
if album is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find an album with such id.")
|
||||
except InvalidId as exc:
|
||||
raise AlbumNotFoundError(id) from exc
|
||||
|
||||
if title is not None:
|
||||
if 2 > len(title) > 40:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album title must be >2 and <40 characters.")
|
||||
else:
|
||||
if title is None:
|
||||
title = album["title"]
|
||||
|
||||
elif 2 > len(title) > 40:
|
||||
raise AlbumIncorrectError("title", "must be >2 and <40 characters.")
|
||||
|
||||
if name is not None:
|
||||
if re.search(re.compile('^[a-z,0-9,_]*$'), name) is False:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album name can only contain: a-z, 0-9 and _ characters.")
|
||||
if re.search(re.compile("^[a-z,0-9,_]*$"), name) is False:
|
||||
raise AlbumIncorrectError(
|
||||
"name", "can only contain a-z, 0-9 and _ characters."
|
||||
)
|
||||
if 2 > len(name) > 20:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album name must be >2 and <20 characters.")
|
||||
raise AlbumIncorrectError("name", "must be >2 and <20 characters.")
|
||||
rename(
|
||||
path.join("data", "users", current_user.user, "albums", album["name"]),
|
||||
path.join("data", "users", current_user.user, "albums", name)
|
||||
Path(f"data/users/{current_user.user}/albums/{album['name']}"),
|
||||
Path(f"data/users/{current_user.user}/albums/{name}"),
|
||||
)
|
||||
await col_photos.update_many(
|
||||
{"user": current_user.user, "album": album["name"]},
|
||||
{"$set": {"album": name}},
|
||||
)
|
||||
col_photos.update_many( {"user": current_user.user, "album": album["name"]}, {"$set": {"album": name}} )
|
||||
else:
|
||||
name = album["name"]
|
||||
|
||||
if cover is not None:
|
||||
image = col_photos.find_one( {"_id": ObjectId(cover), "album": album["name"]} )
|
||||
image = await col_photos.find_one(
|
||||
{"_id": ObjectId(cover), "album": album["name"]}
|
||||
)
|
||||
cover = image["_id"].__str__() if image is not None else album["cover"]
|
||||
else:
|
||||
cover = album["cover"]
|
||||
|
||||
col_albums.update_one( {"_id": ObjectId(id)}, {"$set": {"name": name, "title": title, "cover": cover}} )
|
||||
|
||||
return UJSONResponse(
|
||||
{
|
||||
"name": name,
|
||||
"title": title,
|
||||
"cover": cover
|
||||
}
|
||||
await col_albums.update_one(
|
||||
{"_id": ObjectId(id)}, {"$set": {"name": name, "title": title, "cover": cover}}
|
||||
)
|
||||
|
||||
@app.put("/albums/{id}", response_class=UJSONResponse, response_model=AlbumModified, description="Modify album's name and title by id")
|
||||
async def album_put(id: str, name: str, title: str, cover: str, current_user: User = Security(get_current_active_user, scopes=["albums.write"])):
|
||||
return UJSONResponse({"name": name, "title": title, "cover": cover})
|
||||
|
||||
|
||||
album_put_responses = {
|
||||
404: AlbumNotFoundError("id").openapi,
|
||||
406: AlbumIncorrectError("name", "error").openapi,
|
||||
}
|
||||
|
||||
|
||||
@app.put(
|
||||
"/albums/{id}",
|
||||
description="Modify album's name and title by id",
|
||||
response_class=UJSONResponse,
|
||||
response_model=AlbumModified,
|
||||
responses=album_put_responses,
|
||||
)
|
||||
async def album_put(
|
||||
id: str,
|
||||
name: str,
|
||||
title: str,
|
||||
cover: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["albums.write"]),
|
||||
):
|
||||
try:
|
||||
album = col_albums.find_one( {"_id": ObjectId(id)} )
|
||||
album = await col_albums.find_one({"_id": ObjectId(id)})
|
||||
if album is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find an album with such id.")
|
||||
except InvalidId as exc:
|
||||
raise AlbumNotFoundError(id) from exc
|
||||
|
||||
if re.search(re.compile('^[a-z,0-9,_]*$'), name) is False:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album name can only contain: a-z, 0-9 and _ characters.")
|
||||
if re.search(re.compile("^[a-z,0-9,_]*$"), name) is False:
|
||||
raise AlbumIncorrectError("name", "can only contain a-z, 0-9 and _ characters.")
|
||||
|
||||
if 2 > len(name) > 20:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album name must be >2 and <20 characters.")
|
||||
raise AlbumIncorrectError("name", "must be >2 and <20 characters.")
|
||||
|
||||
if 2 > len(title) > 40:
|
||||
return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Album title must be >2 and <40 characters.")
|
||||
raise AlbumIncorrectError("title", "must be >2 and <40 characters.")
|
||||
|
||||
image = await col_photos.find_one({"_id": ObjectId(cover), "album": album["name"]})
|
||||
cover = image["_id"].__str__() if image is not None else None # type: ignore
|
||||
|
||||
image = col_photos.find_one( {"_id": ObjectId(cover), "album": album["name"]} )
|
||||
cover = image["_id"].__str__() if image is not None else None # type: ignore
|
||||
|
||||
rename(
|
||||
path.join("data", "users", current_user.user, "albums", album["name"]),
|
||||
path.join("data", "users", current_user.user, "albums", name)
|
||||
Path(f"data/users/{current_user.user}/albums/{album['name']}"),
|
||||
Path(f"data/users/{current_user.user}/albums/{name}"),
|
||||
)
|
||||
|
||||
col_photos.update_many( {"user": current_user.user, "album": album["name"]}, {"$set": {"album": name}} )
|
||||
col_albums.update_one( {"_id": ObjectId(id)}, {"$set": {"name": name, "title": title, "cover": cover}} )
|
||||
|
||||
return UJSONResponse(
|
||||
{
|
||||
"name": name,
|
||||
"title": title,
|
||||
"cover": cover
|
||||
}
|
||||
await col_photos.update_many(
|
||||
{"user": current_user.user, "album": album["name"]}, {"$set": {"album": name}}
|
||||
)
|
||||
await col_albums.update_one(
|
||||
{"_id": ObjectId(id)}, {"$set": {"name": name, "title": title, "cover": cover}}
|
||||
)
|
||||
|
||||
@app.delete("/album/{id}", response_class=UJSONResponse, description="Delete album by id")
|
||||
async def album_delete(id: str, current_user: User = Security(get_current_active_user, scopes=["albums.write"])):
|
||||
return UJSONResponse({"name": name, "title": title, "cover": cover})
|
||||
|
||||
|
||||
album_delete_responses = {404: AlbumNotFoundError("id").openapi}
|
||||
|
||||
|
||||
@app.delete(
|
||||
"/album/{id}",
|
||||
description="Delete album by id",
|
||||
status_code=HTTP_204_NO_CONTENT,
|
||||
responses=album_delete_responses,
|
||||
)
|
||||
async def album_delete(
|
||||
id: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["albums.write"]),
|
||||
):
|
||||
try:
|
||||
album = col_albums.find_one_and_delete( {"_id": ObjectId(id)} )
|
||||
album = await col_albums.find_one_and_delete({"_id": ObjectId(id)})
|
||||
if album is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find an album with such id.")
|
||||
|
||||
col_photos.delete_many( {"album": album["name"]} )
|
||||
except InvalidId as exc:
|
||||
raise AlbumNotFoundError(id) from exc
|
||||
|
||||
rmtree(path.join("data", "users", current_user.user, "albums", album["name"]))
|
||||
await col_photos.delete_many({"album": album["name"]})
|
||||
|
||||
return Response(status_code=HTTP_204_NO_CONTENT)
|
||||
rmtree(Path(f"data/users/{current_user.user}/albums/{album['name']}"))
|
||||
|
||||
return Response(status_code=HTTP_204_NO_CONTENT)
|
||||
|
182
extensions/exceptions.py
Normal file
182
extensions/exceptions.py
Normal file
@@ -0,0 +1,182 @@
|
||||
from fastapi import Request
|
||||
from fastapi.responses import UJSONResponse
|
||||
from starlette.status import (
|
||||
HTTP_400_BAD_REQUEST,
|
||||
HTTP_401_UNAUTHORIZED,
|
||||
HTTP_403_FORBIDDEN,
|
||||
HTTP_404_NOT_FOUND,
|
||||
HTTP_406_NOT_ACCEPTABLE,
|
||||
HTTP_409_CONFLICT,
|
||||
HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
)
|
||||
|
||||
from classes.exceptions import (
|
||||
AccessTokenInvalidError,
|
||||
AlbumAlreadyExistsError,
|
||||
AlbumIncorrectError,
|
||||
AlbumNotFoundError,
|
||||
PhotoNotFoundError,
|
||||
PhotoSearchQueryEmptyError,
|
||||
SearchLimitInvalidError,
|
||||
SearchPageInvalidError,
|
||||
SearchTokenInvalidError,
|
||||
UserAlreadyExists,
|
||||
UserCredentialsInvalid,
|
||||
UserEmailCodeInvalid,
|
||||
UserMediaQuotaReached,
|
||||
VideoNotFoundError,
|
||||
VideoSearchQueryEmptyError,
|
||||
)
|
||||
from modules.app import app
|
||||
|
||||
|
||||
@app.exception_handler(AlbumNotFoundError)
|
||||
async def album_not_found_exception_handler(request: Request, exc: AlbumNotFoundError):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_404_NOT_FOUND,
|
||||
content={"detail": f"Could not find album with id '{exc.id}'."},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(AlbumAlreadyExistsError)
|
||||
async def album_already_exists_exception_handler(
|
||||
request: Request, exc: AlbumAlreadyExistsError
|
||||
):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_409_CONFLICT,
|
||||
content={"detail": f"Album with name '{exc.name}' already exists."},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(AlbumIncorrectError)
|
||||
async def album_incorrect_exception_handler(request: Request, exc: AlbumIncorrectError):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_406_NOT_ACCEPTABLE,
|
||||
content={"detail": f"Album {exc.place} invalid: {exc.error}"},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(PhotoNotFoundError)
|
||||
async def photo_not_found_exception_handler(request: Request, exc: PhotoNotFoundError):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_404_NOT_FOUND,
|
||||
content={"detail": f"Could not find photo with id '{exc.id}'."},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(PhotoSearchQueryEmptyError)
|
||||
async def photo_search_query_empty_exception_handler(
|
||||
request: Request, exc: PhotoSearchQueryEmptyError
|
||||
):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
content={
|
||||
"detail": "You must provide query, caption or coordinates to look for photos."
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(VideoNotFoundError)
|
||||
async def video_not_found_exception_handler(request: Request, exc: VideoNotFoundError):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_404_NOT_FOUND,
|
||||
content={"detail": f"Could not find video with id '{exc.id}'."},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(VideoSearchQueryEmptyError)
|
||||
async def video_search_query_empty_exception_handler(
|
||||
request: Request, exc: VideoSearchQueryEmptyError
|
||||
):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
content={
|
||||
"detail": "You must provide query, caption or coordinates to look for photos."
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(SearchLimitInvalidError)
|
||||
async def search_limit_invalid_exception_handler(
|
||||
request: Request, exc: SearchLimitInvalidError
|
||||
):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_400_BAD_REQUEST,
|
||||
content={
|
||||
"detail": "Parameter 'limit' must be greater or equal to 1."
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(SearchPageInvalidError)
|
||||
async def search_page_invalid_exception_handler(
|
||||
request: Request, exc: SearchPageInvalidError
|
||||
):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_401_UNAUTHORIZED,
|
||||
content={
|
||||
"detail": "Parameters 'page' and 'page_size' must be greater or equal to 1."
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(SearchTokenInvalidError)
|
||||
async def search_token_invalid_exception_handler(
|
||||
request: Request, exc: SearchTokenInvalidError
|
||||
):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_401_UNAUTHORIZED,
|
||||
content={
|
||||
"detail": "Invalid search token."
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(UserEmailCodeInvalid)
|
||||
async def user_email_code_invalid_exception_handler(
|
||||
request: Request, exc: UserEmailCodeInvalid
|
||||
):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_400_BAD_REQUEST,
|
||||
content={"detail": "Confirmation code is invalid."},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(UserAlreadyExists)
|
||||
async def user_already_exists_exception_handler(
|
||||
request: Request, exc: UserAlreadyExists
|
||||
):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_409_CONFLICT,
|
||||
content={"detail": "User with this username already exists."},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(AccessTokenInvalidError)
|
||||
async def access_token_invalid_exception_handler(
|
||||
request: Request, exc: AccessTokenInvalidError
|
||||
):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_401_UNAUTHORIZED,
|
||||
content={"detail": "Invalid access token."},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(UserCredentialsInvalid)
|
||||
async def user_credentials_invalid_exception_handler(
|
||||
request: Request, exc: UserCredentialsInvalid
|
||||
):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_401_UNAUTHORIZED,
|
||||
content={"detail": "Invalid credentials."},
|
||||
)
|
||||
|
||||
|
||||
@app.exception_handler(UserMediaQuotaReached)
|
||||
async def user_media_quota_reached_exception_handler(
|
||||
request: Request, exc: UserMediaQuotaReached
|
||||
):
|
||||
return UJSONResponse(
|
||||
status_code=HTTP_403_FORBIDDEN,
|
||||
content={"detail": "Media quota has been reached, media upload impossible."},
|
||||
)
|
@@ -1,27 +1,36 @@
|
||||
from os import path
|
||||
from modules.app import app
|
||||
from pathlib import Path
|
||||
|
||||
import aiofiles
|
||||
from fastapi.responses import HTMLResponse, Response
|
||||
|
||||
from modules.app import app
|
||||
|
||||
|
||||
@app.get("/pages/matter.css", include_in_schema=False)
|
||||
async def page_matter():
|
||||
with open(path.join("pages", "matter.css"), "r", encoding="utf-8") as f:
|
||||
output = f.read()
|
||||
async with aiofiles.open(Path("pages/matter.css"), "r", encoding="utf-8") as f:
|
||||
output = await f.read()
|
||||
return Response(content=output)
|
||||
|
||||
|
||||
@app.get("/pages/{page}/{file}", include_in_schema=False)
|
||||
async def page_assets(page:str, file: str):
|
||||
with open(path.join("pages", page, file), "r", encoding="utf-8") as f:
|
||||
output = f.read()
|
||||
async def page_assets(page: str, file: str):
|
||||
async with aiofiles.open(Path(f"pages/{page}/{file}"), "r", encoding="utf-8") as f:
|
||||
output = await f.read()
|
||||
return Response(content=output)
|
||||
|
||||
|
||||
@app.get("/", include_in_schema=False)
|
||||
async def page_home():
|
||||
with open(path.join("pages", "home", "index.html"), "r", encoding="utf-8") as f:
|
||||
output = f.read()
|
||||
async with aiofiles.open(Path("pages/home/index.html"), "r", encoding="utf-8") as f:
|
||||
output = await f.read()
|
||||
return HTMLResponse(content=output)
|
||||
|
||||
|
||||
@app.get("/register", include_in_schema=False)
|
||||
async def page_register():
|
||||
with open(path.join("pages", "register", "index.html"), "r", encoding="utf-8") as f:
|
||||
output = f.read()
|
||||
return HTMLResponse(content=output)
|
||||
async with aiofiles.open(
|
||||
Path("pages/register/index.html"), "r", encoding="utf-8"
|
||||
) as f:
|
||||
output = await f.read()
|
||||
return HTMLResponse(content=output)
|
||||
|
@@ -1,159 +1,661 @@
|
||||
import logging
|
||||
import re
|
||||
import pickle
|
||||
from secrets import token_urlsafe
|
||||
from magic import Magic
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from os import makedirs, path, remove, system
|
||||
from classes.models import Photo, SearchResults
|
||||
from modules.hasher import get_phash, get_duplicates
|
||||
from modules.scheduler import scheduler
|
||||
from modules.security import User, get_current_active_user
|
||||
from modules.app import app
|
||||
from modules.database import col_photos, col_albums, col_tokens
|
||||
from bson.objectid import ObjectId
|
||||
from pathlib import Path
|
||||
from random import randint
|
||||
from secrets import token_urlsafe
|
||||
from shutil import move
|
||||
from threading import Thread
|
||||
from typing import Union
|
||||
from uuid import uuid4
|
||||
|
||||
import aiofiles
|
||||
from bson.errors import InvalidId
|
||||
from bson.objectid import ObjectId
|
||||
from fastapi import Security, UploadFile
|
||||
from fastapi.responses import Response, UJSONResponse
|
||||
from jose import JWTError, jwt
|
||||
from magic import Magic
|
||||
from plum.exceptions import UnpackError
|
||||
from pydantic import ValidationError
|
||||
from pymongo import DESCENDING
|
||||
from starlette.status import HTTP_204_NO_CONTENT, HTTP_409_CONFLICT
|
||||
|
||||
from fastapi import HTTPException, UploadFile, Security
|
||||
from fastapi.responses import UJSONResponse, Response
|
||||
from starlette.status import HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED, HTTP_404_NOT_FOUND, HTTP_409_CONFLICT
|
||||
from classes.exceptions import (
|
||||
AccessTokenInvalidError,
|
||||
AlbumNameNotFoundError,
|
||||
PhotoNotFoundError,
|
||||
PhotoSearchQueryEmptyError,
|
||||
SearchLimitInvalidError,
|
||||
SearchPageInvalidError,
|
||||
SearchTokenInvalidError,
|
||||
UserMediaQuotaReached,
|
||||
)
|
||||
from classes.models import (
|
||||
Photo,
|
||||
PhotoPublic,
|
||||
RandomSearchResultsPhoto,
|
||||
SearchResultsPhoto,
|
||||
)
|
||||
from modules.app import app
|
||||
from modules.database import col_albums, col_photos, col_tokens, col_videos
|
||||
from modules.exif_reader import extract_location
|
||||
from modules.hasher import get_duplicates, get_phash
|
||||
from modules.scheduler import scheduler
|
||||
from modules.security import (
|
||||
ALGORITHM,
|
||||
SECRET_KEY,
|
||||
TokenData,
|
||||
User,
|
||||
create_access_token,
|
||||
get_current_active_user,
|
||||
get_user,
|
||||
)
|
||||
from modules.utils import configGet
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from modules.utils import logWrite
|
||||
|
||||
async def compress_image(image_path: str):
|
||||
|
||||
image_type = Magic(mime=True).from_file(image_path)
|
||||
|
||||
if image_type not in ["image/jpeg", "image/png"]:
|
||||
logWrite(f"Not compressing {image_path} because its mime is '{image_type}'")
|
||||
logger.info(
|
||||
"Not compressing %s because its mime is '%s'", image_path, image_type
|
||||
)
|
||||
return
|
||||
|
||||
size_before = path.getsize(image_path) / 1024
|
||||
|
||||
system(f"exiftool -overwrite_original -all:all= -tagsFromFile @ -exif:Orientation {image_path}")
|
||||
|
||||
if image_type == "image/jpeg":
|
||||
system(f"jpegoptim {image_path} -o --max=55 -p")
|
||||
task = Thread(
|
||||
target=system,
|
||||
kwargs={"command": f'jpegoptim "{image_path}" -o --max=55 -p --strip-none'},
|
||||
)
|
||||
elif image_type == "image/png":
|
||||
system(f"optipng -o3 {image_path}")
|
||||
task = Thread(target=system, kwargs={"command": f'optipng -o3 "{image_path}"'})
|
||||
else:
|
||||
return
|
||||
|
||||
task.start()
|
||||
logger.info("Compressing '%s'...", Path(image_path).name)
|
||||
task.join()
|
||||
|
||||
size_after = path.getsize(image_path) / 1024
|
||||
logWrite(f"Compressed '{path.split(image_path)[-1]}' from {size_before} Kb to {size_after} Kb")
|
||||
logger.info(
|
||||
"Compressed '%s' from %s Kb to %s Kb",
|
||||
Path(image_path).name,
|
||||
size_before,
|
||||
size_after,
|
||||
)
|
||||
|
||||
@app.post("/albums/{album}/photos", response_class=UJSONResponse, response_model=Photo, description="Upload a photo to album")
|
||||
async def photo_upload(file: UploadFile, album: str, ignore_duplicates: bool = False, compress: bool = True, current_user: User = Security(get_current_active_user, scopes=["photos.write"])):
|
||||
|
||||
if col_albums.find_one( {"user": current_user.user, "name": album} ) is None:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail=f"Provided album '{album}' does not exist.")
|
||||
photo_post_responses = {
|
||||
403: UserMediaQuotaReached().openapi,
|
||||
404: AlbumNameNotFoundError("name").openapi,
|
||||
409: {
|
||||
"description": "Image Duplicates Found",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {
|
||||
"detail": "Image duplicates found. Pass 'ignore_duplicates=true' to ignore.",
|
||||
"duplicates": ["string"],
|
||||
"access_token": "string",
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
# if not file.content_type.startswith("image"):
|
||||
# return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Provided file is not an image, not accepting.")
|
||||
|
||||
makedirs(path.join("data", "users", current_user.user, "albums", album), exist_ok=True)
|
||||
@app.post(
|
||||
"/albums/{album}/photos",
|
||||
description="Upload a photo to album",
|
||||
response_class=UJSONResponse,
|
||||
response_model=Photo,
|
||||
responses=photo_post_responses,
|
||||
)
|
||||
async def photo_upload(
|
||||
file: UploadFile,
|
||||
album: str,
|
||||
ignore_duplicates: bool = False,
|
||||
compress: bool = True,
|
||||
caption: Union[str, None] = None,
|
||||
current_user: User = Security(get_current_active_user, scopes=["photos.write"]),
|
||||
):
|
||||
if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
user_media_count = (
|
||||
await col_photos.count_documents({"user": current_user.user})
|
||||
) + (await col_videos.count_documents({"user": current_user.user}))
|
||||
|
||||
if user_media_count >= current_user.quota and not current_user.quota == -1: # type: ignore
|
||||
raise UserMediaQuotaReached()
|
||||
|
||||
makedirs(Path(f"data/users/{current_user.user}/albums/{album}"), exist_ok=True)
|
||||
|
||||
filename = file.filename
|
||||
|
||||
if path.exists(path.join("data", "users", current_user.user, "albums", album, file.filename)):
|
||||
if Path(f"data/users/{current_user.user}/albums/{album}/{file.filename}").exists():
|
||||
base_name = file.filename.split(".")[:-1]
|
||||
extension = file.filename.split(".")[-1]
|
||||
filename = ".".join(base_name)+f"_{int(datetime.now().timestamp())}."+extension
|
||||
filename = (
|
||||
".".join(base_name) + f"_{int(datetime.now().timestamp())}." + extension
|
||||
)
|
||||
|
||||
with open(path.join("data", "users", current_user.user, "albums", album, filename), "wb") as f:
|
||||
f.write(await file.read())
|
||||
async with aiofiles.open(
|
||||
Path(f"data/users/{current_user.user}/albums/{album}/{filename}"), "wb"
|
||||
) as f:
|
||||
await f.write(await file.read())
|
||||
|
||||
file_hash = await get_phash(path.join("data", "users", current_user.user, "albums", album, filename))
|
||||
file_hash = await get_phash(
|
||||
Path(f"data/users/{current_user.user}/albums/{album}/{filename}")
|
||||
)
|
||||
duplicates = await get_duplicates(file_hash, album)
|
||||
|
||||
if len(duplicates) > 0 and ignore_duplicates is False:
|
||||
if len(duplicates) > 0 and not ignore_duplicates:
|
||||
if configGet("media_token_access") is True:
|
||||
duplicates_ids = [entry["id"] for entry in duplicates]
|
||||
access_token = create_access_token(
|
||||
data={
|
||||
"sub": current_user.user,
|
||||
"scopes": ["me", "photos.read"],
|
||||
"allowed": duplicates_ids,
|
||||
},
|
||||
expires_delta=timedelta(hours=configGet("media_token_valid_hours")),
|
||||
)
|
||||
access_token_short = uuid4().hex[:12].lower()
|
||||
await col_tokens.insert_one(
|
||||
{
|
||||
"short": access_token_short,
|
||||
"access_token": access_token,
|
||||
"photos": duplicates_ids,
|
||||
}
|
||||
)
|
||||
else:
|
||||
access_token_short = None
|
||||
return UJSONResponse(
|
||||
{
|
||||
"detail": "Image duplicates found. Pass 'ignore_duplicates=true' to ignore.",
|
||||
"duplicates": duplicates
|
||||
"duplicates": duplicates,
|
||||
"access_token": access_token_short,
|
||||
},
|
||||
status_code=HTTP_409_CONFLICT
|
||||
status_code=HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
uploaded = col_photos.insert_one( {"user": current_user.user, "album": album, "hash": file_hash, "filename": filename} )
|
||||
try:
|
||||
coords = extract_location(
|
||||
Path(f"data/users/{current_user.user}/albums/{album}/{filename}")
|
||||
)
|
||||
except (UnpackError, ValueError):
|
||||
coords = {"lng": 0.0, "lat": 0.0, "alt": 0.0}
|
||||
|
||||
if compress is True:
|
||||
scheduler.add_job(compress_image, trigger="date", run_date=datetime.now()+timedelta(seconds=1), args=[path.join("data", "users", current_user.user, "albums", album, filename)])
|
||||
uploaded = await col_photos.insert_one(
|
||||
{
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"hash": file_hash,
|
||||
"filename": filename,
|
||||
"dates": {
|
||||
"uploaded": datetime.now(tz=timezone.utc),
|
||||
"modified": datetime.now(tz=timezone.utc),
|
||||
},
|
||||
"location": [coords["lng"], coords["lat"], coords["alt"]],
|
||||
"caption": caption,
|
||||
}
|
||||
)
|
||||
|
||||
if compress:
|
||||
scheduler.add_job(
|
||||
compress_image,
|
||||
trigger="date",
|
||||
run_date=datetime.now() + timedelta(seconds=1),
|
||||
args=[Path(f"data/users/{current_user.user}/albums/{album}/{filename}")],
|
||||
)
|
||||
|
||||
return UJSONResponse(
|
||||
{
|
||||
"id": uploaded.inserted_id.__str__(),
|
||||
"album": album,
|
||||
"hash": file_hash,
|
||||
"filename": filename
|
||||
"filename": filename,
|
||||
}
|
||||
)
|
||||
|
||||
@app.get("/photos/{id}", description="Get a photo by id")
|
||||
async def photo_get(id: str, current_user: User = Security(get_current_active_user, scopes=["photos.view"])):
|
||||
|
||||
# Access to photos y token generated for example by
|
||||
# upload method when duplicates are found. Is disabled
|
||||
# by default and should remain so if not really needed.
|
||||
if configGet("media_token_access") is True:
|
||||
photo_get_token_responses = {
|
||||
401: AccessTokenInvalidError().openapi,
|
||||
404: PhotoNotFoundError("id").openapi,
|
||||
}
|
||||
|
||||
@app.get(
|
||||
"/token/photo/{token}",
|
||||
description="Get a photo by its duplicate token",
|
||||
responses=photo_get_token_responses,
|
||||
)
|
||||
async def photo_get_token(token: str, id: int):
|
||||
db_entry = await col_tokens.find_one({"short": token})
|
||||
|
||||
if db_entry is None:
|
||||
raise AccessTokenInvalidError()
|
||||
|
||||
token = db_entry["access_token"]
|
||||
id = db_entry["photos"][id]
|
||||
|
||||
try:
|
||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
user: str = payload.get("sub")
|
||||
if user is None:
|
||||
raise AccessTokenInvalidError()
|
||||
token_scopes = payload.get("scopes", [])
|
||||
token_data = TokenData(scopes=token_scopes, user=user)
|
||||
except (JWTError, ValidationError) as exc:
|
||||
raise AccessTokenInvalidError() from exc
|
||||
|
||||
user_record = await get_user(user=token_data.user)
|
||||
|
||||
if id not in payload.get("allowed", []):
|
||||
raise AccessTokenInvalidError()
|
||||
|
||||
try:
|
||||
image = await col_photos.find_one({"_id": ObjectId(id)})
|
||||
if image is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId as exc:
|
||||
raise PhotoNotFoundError(id) from exc
|
||||
|
||||
image_path = Path(
|
||||
f"data/users/{user_record.user}/albums/{image['album']}/{image['filename']}"
|
||||
)
|
||||
|
||||
mime = Magic(mime=True).from_file(image_path)
|
||||
|
||||
async with aiofiles.open(image_path, "rb") as f:
|
||||
image_file = await f.read()
|
||||
|
||||
return Response(image_file, media_type=mime)
|
||||
|
||||
|
||||
photo_get_responses = {
|
||||
200: {
|
||||
"content": {
|
||||
"application/octet-stream": {
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"format": "binary",
|
||||
"contentMediaType": "image/*",
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
404: PhotoNotFoundError("id").openapi,
|
||||
}
|
||||
|
||||
|
||||
@app.get(
|
||||
"/photos/{id}",
|
||||
description="Get a photo by id",
|
||||
responses=photo_get_responses,
|
||||
response_class=Response,
|
||||
)
|
||||
async def photo_get(
|
||||
id: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["photos.read"]),
|
||||
):
|
||||
try:
|
||||
image = col_photos.find_one( {"_id": ObjectId(id)} )
|
||||
image = await col_photos.find_one({"_id": ObjectId(id)})
|
||||
if image is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find an image with such id.")
|
||||
except InvalidId as exc:
|
||||
raise PhotoNotFoundError(id) from exc
|
||||
|
||||
image_path = path.join("data", "users", current_user.user, "albums", image["album"], image["filename"])
|
||||
image_path = Path(
|
||||
f"data/users/{current_user.user}/albums/{image['album']}/{image['filename']}"
|
||||
)
|
||||
|
||||
mime = Magic(mime=True).from_file(image_path)
|
||||
|
||||
with open(image_path, "rb") as f: image_file = f.read()
|
||||
async with aiofiles.open(image_path, "rb") as f:
|
||||
image_file = await f.read()
|
||||
|
||||
return Response(image_file, media_type=mime)
|
||||
|
||||
@app.delete("/photos/{id}", description="Delete a photo by id")
|
||||
async def photo_delete(id: str, current_user: User = Security(get_current_active_user, scopes=["photos.write"])):
|
||||
|
||||
photo_move_responses = {404: PhotoNotFoundError("id").openapi}
|
||||
|
||||
|
||||
@app.put(
|
||||
"/photos/{id}",
|
||||
description="Move a photo to another album",
|
||||
response_model=PhotoPublic,
|
||||
responses=photo_move_responses,
|
||||
)
|
||||
async def photo_move(
|
||||
id: str,
|
||||
album: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["photos.write"]),
|
||||
):
|
||||
try:
|
||||
image = col_photos.find_one_and_delete( {"_id": ObjectId(id)} )
|
||||
image = await col_photos.find_one({"_id": ObjectId(id)})
|
||||
if image is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find an image with such id.")
|
||||
except InvalidId as exc:
|
||||
raise PhotoNotFoundError(id) from exc
|
||||
|
||||
album = col_albums.find_one( {"name": image["album"]} )
|
||||
if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
if Path(
|
||||
f"data/users/{current_user.user}/albums/{album}/{image['filename']}"
|
||||
).exists():
|
||||
base_name = image["filename"].split(".")[:-1]
|
||||
extension = image["filename"].split(".")[-1]
|
||||
filename = (
|
||||
".".join(base_name) + f"_{int(datetime.now().timestamp())}." + extension
|
||||
)
|
||||
else:
|
||||
filename = image["filename"]
|
||||
|
||||
await col_photos.find_one_and_update(
|
||||
{"_id": ObjectId(id)},
|
||||
{
|
||||
"$set": {
|
||||
"album": album,
|
||||
"filename": filename,
|
||||
"dates.modified": datetime.now(tz=timezone.utc),
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
move(
|
||||
Path(
|
||||
f"data/users/{current_user.user}/albums/{image['album']}/{image['filename']}"
|
||||
),
|
||||
Path(f"data/users/{current_user.user}/albums/{album}/{filename}"),
|
||||
)
|
||||
|
||||
return UJSONResponse(
|
||||
{
|
||||
"id": image["_id"].__str__(),
|
||||
"caption": image["caption"],
|
||||
"filename": filename,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
photo_patch_responses = {404: PhotoNotFoundError("id").openapi}
|
||||
|
||||
|
||||
@app.patch(
|
||||
"/photos/{id}",
|
||||
description="Change properties of a photo",
|
||||
response_model=PhotoPublic,
|
||||
responses=photo_patch_responses,
|
||||
)
|
||||
async def photo_patch(
|
||||
id: str,
|
||||
caption: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["photos.write"]),
|
||||
):
|
||||
try:
|
||||
image = await col_photos.find_one({"_id": ObjectId(id)})
|
||||
if image is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId as exc:
|
||||
raise PhotoNotFoundError(id) from exc
|
||||
|
||||
await col_photos.find_one_and_update(
|
||||
{"_id": ObjectId(id)},
|
||||
{"$set": {"caption": caption, "dates.modified": datetime.now(tz=timezone.utc)}},
|
||||
)
|
||||
|
||||
return UJSONResponse(
|
||||
{
|
||||
"id": image["_id"].__str__(),
|
||||
"caption": caption,
|
||||
"filename": image["filename"],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
photo_delete_responses = {404: PhotoNotFoundError("id").openapi}
|
||||
|
||||
|
||||
@app.delete(
|
||||
"/photos/{id}",
|
||||
description="Delete a photo by id",
|
||||
status_code=HTTP_204_NO_CONTENT,
|
||||
responses=photo_delete_responses,
|
||||
)
|
||||
async def photo_delete(
|
||||
id: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["photos.write"]),
|
||||
):
|
||||
try:
|
||||
image = await col_photos.find_one_and_delete({"_id": ObjectId(id)})
|
||||
if image is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId as exc:
|
||||
raise PhotoNotFoundError(id) from exc
|
||||
|
||||
album = await col_albums.find_one({"name": image["album"]})
|
||||
|
||||
if album is not None and album["cover"] == image["_id"].__str__():
|
||||
col_albums.update_one( {"name": image["album"]}, {"$set": {"cover": None}} )
|
||||
await col_albums.update_one({"name": image["album"]}, {"$set": {"cover": None}})
|
||||
|
||||
remove(path.join("data", "users", current_user.user, "albums", image["album"], image["filename"]))
|
||||
remove(
|
||||
Path(
|
||||
f"data/users/{current_user.user}/albums/{image['album']}/{image['filename']}"
|
||||
)
|
||||
)
|
||||
|
||||
return Response(status_code=HTTP_204_NO_CONTENT)
|
||||
|
||||
@app.get("/albums/{album}/photos", response_class=UJSONResponse, response_model=SearchResults, description="Find a photo by filename")
|
||||
async def photo_find(q: str, album: str, page: int = 1, page_size: int = 100, current_user: User = Security(get_current_active_user, scopes=["photos.list"])):
|
||||
|
||||
if col_albums.find_one( {"user": current_user.user, "name": album} ) is None:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail=f"Provided album '{album}' does not exist.")
|
||||
photo_random_responses = {
|
||||
400: SearchLimitInvalidError().openapi,
|
||||
404: AlbumNameNotFoundError("name").openapi,
|
||||
}
|
||||
|
||||
if page <= 0 or page_size <= 0:
|
||||
return HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Parameters 'page' and 'page_size' must be greater or equal to 1.")
|
||||
|
||||
@app.get(
|
||||
"/albums/{album}/photos/random",
|
||||
description="Get one random photo, optionally by caption",
|
||||
response_class=UJSONResponse,
|
||||
response_model=RandomSearchResultsPhoto,
|
||||
responses=photo_random_responses,
|
||||
)
|
||||
async def photo_random(
|
||||
album: str,
|
||||
caption: Union[str, None] = None,
|
||||
limit: int = 100,
|
||||
current_user: User = Security(get_current_active_user, scopes=["photos.list"]),
|
||||
):
|
||||
if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
if limit <= 0:
|
||||
raise SearchLimitInvalidError()
|
||||
|
||||
output = {"results": []}
|
||||
skip = (page-1)*page_size
|
||||
images = list(col_photos.find({"user": current_user.user, "album": album, "filename": re.compile(q)}, limit=page_size, skip=skip))
|
||||
|
||||
for image in images:
|
||||
output["results"].append({"id": image["_id"].__str__(), "filename": image["filename"]})
|
||||
db_query = (
|
||||
{
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
if caption is not None
|
||||
else {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
}
|
||||
)
|
||||
|
||||
if col_photos.count_documents( {"user": current_user.user, "album": album, "filename": re.compile(q)} ) > page*page_size:
|
||||
token = str(token_urlsafe(32))
|
||||
col_tokens.insert_one( {"token": token, "query": q, "album": album, "page": page+1, "page_size": page_size, "user": pickle.dumps(current_user)} )
|
||||
output["next_page"] = f"/albums/{album}/photos/token?token={token}" # type: ignore
|
||||
else:
|
||||
output["next_page"] = None # type: ignore
|
||||
documents_count = await col_photos.count_documents(db_query)
|
||||
skip = randint(0, documents_count - 1) if documents_count > 1 else 0
|
||||
|
||||
async for image in col_photos.aggregate(
|
||||
[
|
||||
{"$match": db_query},
|
||||
{"$skip": skip},
|
||||
{"$limit": limit},
|
||||
]
|
||||
):
|
||||
output["results"].append(
|
||||
{
|
||||
"id": image["_id"].__str__(),
|
||||
"filename": image["filename"],
|
||||
"caption": image["caption"],
|
||||
}
|
||||
)
|
||||
|
||||
return UJSONResponse(output)
|
||||
|
||||
@app.get("/albums/{album}/photos/token", response_class=UJSONResponse, response_model=SearchResults, description="Find a photo by token")
|
||||
async def photo_find_token(token: str):
|
||||
|
||||
found_record = col_tokens.find_one( {"token": token} )
|
||||
photo_find_responses = {
|
||||
400: SearchPageInvalidError().openapi,
|
||||
401: SearchTokenInvalidError().openapi,
|
||||
404: AlbumNameNotFoundError("name").openapi,
|
||||
422: PhotoSearchQueryEmptyError().openapi,
|
||||
}
|
||||
|
||||
if found_record is None:
|
||||
return HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail="Invalid search token.")
|
||||
|
||||
return await photo_find(q=found_record["query"], album=found_record["album"], page=found_record["page"], page_size=found_record["page_size"], current_user=pickle.loads(found_record["user"]))
|
||||
@app.get(
|
||||
"/albums/{album}/photos",
|
||||
description="Find a photo by filename, caption, location or token",
|
||||
response_class=UJSONResponse,
|
||||
response_model=SearchResultsPhoto,
|
||||
responses=photo_find_responses,
|
||||
)
|
||||
async def photo_find(
|
||||
album: str,
|
||||
q: Union[str, None] = None,
|
||||
caption: Union[str, None] = None,
|
||||
token: Union[str, None] = None,
|
||||
page: int = 1,
|
||||
page_size: int = 100,
|
||||
lat: Union[float, None] = None,
|
||||
lng: Union[float, None] = None,
|
||||
radius: Union[int, None] = None,
|
||||
current_user: User = Security(get_current_active_user, scopes=["photos.list"]),
|
||||
):
|
||||
if token is not None:
|
||||
found_record = await col_tokens.find_one({"token": token})
|
||||
|
||||
if found_record is None:
|
||||
raise SearchTokenInvalidError()
|
||||
|
||||
return await photo_find(
|
||||
album=album,
|
||||
q=found_record["query"],
|
||||
caption=found_record["caption"],
|
||||
lat=found_record["lat"],
|
||||
lng=found_record["lng"],
|
||||
radius=found_record["radius"],
|
||||
page=found_record["page"],
|
||||
page_size=found_record["page_size"],
|
||||
current_user=current_user,
|
||||
)
|
||||
|
||||
if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
if page <= 0 or page_size <= 0:
|
||||
raise SearchPageInvalidError()
|
||||
|
||||
output = {"results": []}
|
||||
skip = (page - 1) * page_size
|
||||
|
||||
radius = 5000 if radius is None else radius
|
||||
|
||||
if (lat is not None) and (lng is not None):
|
||||
db_query = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"location": {
|
||||
"$nearSphere": {
|
||||
"$geometry": {"type": "Point", "coordinates": [lng, lat]},
|
||||
"$maxDistance": radius,
|
||||
}
|
||||
},
|
||||
}
|
||||
db_query_count = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"location": {"$geoWithin": {"$centerSphere": [[lng, lat], radius]}},
|
||||
}
|
||||
elif q is None and caption is None:
|
||||
raise PhotoSearchQueryEmptyError()
|
||||
elif q is None:
|
||||
db_query = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
db_query_count = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
elif caption is None:
|
||||
db_query = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"filename": re.compile(q),
|
||||
}
|
||||
db_query_count = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"filename": re.compile(q),
|
||||
}
|
||||
else:
|
||||
db_query = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"filename": re.compile(q),
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
db_query_count = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"filename": re.compile(q),
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
|
||||
async for image in col_photos.find(db_query, limit=page_size, skip=skip).sort(
|
||||
"dates.uploaded", direction=DESCENDING
|
||||
):
|
||||
output["results"].append(
|
||||
{
|
||||
"id": image["_id"].__str__(),
|
||||
"filename": image["filename"],
|
||||
"caption": image["caption"],
|
||||
}
|
||||
)
|
||||
|
||||
if (await col_photos.count_documents(db_query_count)) > page * page_size:
|
||||
token = str(token_urlsafe(32))
|
||||
await col_tokens.insert_one(
|
||||
{
|
||||
"token": token,
|
||||
"query": q,
|
||||
"caption": caption,
|
||||
"lat": lat,
|
||||
"lng": lng,
|
||||
"radius": radius,
|
||||
"page": page + 1,
|
||||
"page_size": page_size,
|
||||
}
|
||||
)
|
||||
output["next_page"] = f"/albums/{album}/photos/?token={token}" # type: ignore
|
||||
else:
|
||||
output["next_page"] = None # type: ignore
|
||||
|
||||
return UJSONResponse(output)
|
||||
|
@@ -1,55 +1,28 @@
|
||||
from datetime import timedelta
|
||||
from modules.database import col_users
|
||||
|
||||
from fastapi import Depends
|
||||
from fastapi.security import OAuth2PasswordRequestForm
|
||||
|
||||
from classes.exceptions import UserCredentialsInvalid
|
||||
from modules.app import app
|
||||
|
||||
from fastapi import Depends, HTTPException, Response
|
||||
from starlette.status import HTTP_204_NO_CONTENT
|
||||
from fastapi.security import (
|
||||
OAuth2PasswordRequestForm,
|
||||
)
|
||||
|
||||
from modules.security import (
|
||||
ACCESS_TOKEN_EXPIRE_DAYS,
|
||||
Token,
|
||||
User,
|
||||
authenticate_user,
|
||||
create_access_token,
|
||||
get_current_active_user,
|
||||
get_password_hash
|
||||
)
|
||||
|
||||
token_post_responses = {401: UserCredentialsInvalid().openapi}
|
||||
|
||||
@app.post("/token", response_model=Token)
|
||||
|
||||
@app.post("/token", response_model=Token, responses=token_post_responses)
|
||||
async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()):
|
||||
user = authenticate_user(form_data.username, form_data.password)
|
||||
user = await authenticate_user(form_data.username, form_data.password)
|
||||
if not user:
|
||||
raise HTTPException(status_code=400, detail="Incorrect user or password")
|
||||
raise UserCredentialsInvalid()
|
||||
access_token_expires = timedelta(days=ACCESS_TOKEN_EXPIRE_DAYS)
|
||||
access_token = create_access_token(
|
||||
data={"sub": user.user, "scopes": form_data.scopes},
|
||||
expires_delta=access_token_expires,
|
||||
)
|
||||
return {"access_token": access_token, "token_type": "bearer"}
|
||||
|
||||
|
||||
@app.get("/users/me/", response_model=User)
|
||||
async def read_users_me(current_user: User = Depends(get_current_active_user)):
|
||||
return current_user
|
||||
|
||||
|
||||
@app.post("/users", response_class=Response)
|
||||
async def create_users(user: str, email: str, password: str):
|
||||
col_users.insert_one( {"user": user, "email": email, "hash": get_password_hash(password), "disabled": True} )
|
||||
return Response(status_code=HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
# @app.get("/users/me/items/")
|
||||
# async def read_own_items(
|
||||
# current_user: User = Security(get_current_active_user, scopes=["items"])
|
||||
# ):
|
||||
# return [{"item_id": "Foo", "owner": current_user.user}]
|
||||
|
||||
|
||||
# @app.get("/status/")
|
||||
# async def read_system_status(current_user: User = Depends(get_current_user)):
|
||||
# return {"status": "ok"}
|
146
extensions/users.py
Normal file
146
extensions/users.py
Normal file
@@ -0,0 +1,146 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from uuid import uuid1
|
||||
|
||||
from fastapi import Depends, Form
|
||||
from fastapi.responses import Response, UJSONResponse
|
||||
from starlette.status import HTTP_204_NO_CONTENT
|
||||
|
||||
from classes.exceptions import (
|
||||
UserAlreadyExists,
|
||||
UserCredentialsInvalid,
|
||||
UserEmailCodeInvalid,
|
||||
)
|
||||
from modules.app import app
|
||||
from modules.database import col_albums, col_emails, col_photos, col_users, col_videos
|
||||
from modules.mailer import mail_sender
|
||||
from modules.scheduler import scheduler
|
||||
from modules.security import (
|
||||
User,
|
||||
get_current_active_user,
|
||||
get_password_hash,
|
||||
get_user,
|
||||
verify_password,
|
||||
)
|
||||
from modules.utils import configGet
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def send_confirmation(user: str, email: str):
|
||||
confirmation_code = str(uuid1())
|
||||
try:
|
||||
mail_sender.sendmail(
|
||||
from_addr=configGet("sender", "mailer", "smtp"),
|
||||
to_addrs=email,
|
||||
msg=f'From: {configGet("sender", "mailer", "smtp")}\nSubject: Email confirmation\n\n'
|
||||
+ configGet(
|
||||
"message", "mailer", "messages", "registration_confirmation"
|
||||
).format(
|
||||
configGet("external_address")
|
||||
+ f"/users/{user}/confirm?code={confirmation_code}"
|
||||
),
|
||||
)
|
||||
await col_emails.insert_one(
|
||||
{"user": user, "email": email, "used": False, "code": confirmation_code}
|
||||
)
|
||||
logger.info(
|
||||
"Sent confirmation email to '%s' with code %s", email, confirmation_code
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.error("Could not send confirmation email to '%s' due to: %s", email, exc)
|
||||
|
||||
|
||||
@app.get("/users/me/", response_model=User)
|
||||
async def user_me(current_user: User = Depends(get_current_active_user)):
|
||||
return current_user
|
||||
|
||||
|
||||
user_confirm_responses = {
|
||||
200: {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"example": {"detail": configGet("email_confirmed", "messages")}
|
||||
}
|
||||
},
|
||||
},
|
||||
400: UserEmailCodeInvalid().openapi,
|
||||
}
|
||||
if configGet("registration_requires_confirmation") is True:
|
||||
|
||||
@app.get(
|
||||
"/users/{user}/confirm",
|
||||
response_class=UJSONResponse,
|
||||
responses=user_confirm_responses,
|
||||
)
|
||||
@app.patch(
|
||||
"/users/{user}/confirm",
|
||||
response_class=UJSONResponse,
|
||||
responses=user_confirm_responses,
|
||||
)
|
||||
async def user_confirm(user: str, code: str):
|
||||
confirm_record = await col_emails.find_one(
|
||||
{"user": user, "code": code, "used": False}
|
||||
)
|
||||
if confirm_record is None:
|
||||
raise UserEmailCodeInvalid()
|
||||
await col_emails.find_one_and_update(
|
||||
{"_id": confirm_record["_id"]}, {"$set": {"used": True}}
|
||||
)
|
||||
await col_users.find_one_and_update(
|
||||
{"user": confirm_record["user"]}, {"$set": {"disabled": False}}
|
||||
)
|
||||
return UJSONResponse({"detail": configGet("email_confirmed", "messages")})
|
||||
|
||||
|
||||
user_create_responses = {409: UserAlreadyExists().openapi}
|
||||
if configGet("registration_enabled") is True:
|
||||
|
||||
@app.post(
|
||||
"/users", status_code=HTTP_204_NO_CONTENT, responses=user_create_responses
|
||||
)
|
||||
async def user_create(
|
||||
user: str = Form(), email: str = Form(), password: str = Form()
|
||||
):
|
||||
if (await col_users.find_one({"user": user})) is not None:
|
||||
raise UserAlreadyExists()
|
||||
await col_users.insert_one(
|
||||
{
|
||||
"user": user,
|
||||
"email": email,
|
||||
"quota": None,
|
||||
"hash": get_password_hash(password),
|
||||
"disabled": configGet("registration_requires_confirmation"),
|
||||
}
|
||||
)
|
||||
if configGet("registration_requires_confirmation") is True:
|
||||
scheduler.add_job(
|
||||
send_confirmation,
|
||||
trigger="date",
|
||||
run_date=datetime.now() + timedelta(seconds=1),
|
||||
kwargs={"user": user, "email": email},
|
||||
)
|
||||
return Response(status_code=HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
user_delete_responses = {401: UserCredentialsInvalid().openapi}
|
||||
|
||||
|
||||
@app.delete(
|
||||
"/users/me/", status_code=HTTP_204_NO_CONTENT, responses=user_delete_responses
|
||||
)
|
||||
async def user_delete(
|
||||
password: str = Form(), current_user: User = Depends(get_current_active_user)
|
||||
):
|
||||
user = await get_user(current_user.user)
|
||||
if not user:
|
||||
return False
|
||||
if not verify_password(password, user.hash):
|
||||
raise UserCredentialsInvalid()
|
||||
await col_users.delete_many({"user": current_user.user})
|
||||
await col_emails.delete_many({"user": current_user.user})
|
||||
await col_photos.delete_many({"user": current_user.user})
|
||||
await col_videos.delete_many({"user": current_user.user})
|
||||
await col_albums.delete_many({"user": current_user.user})
|
||||
return Response(status_code=HTTP_204_NO_CONTENT)
|
@@ -1,129 +1,457 @@
|
||||
import re
|
||||
import pickle
|
||||
from datetime import datetime, timezone
|
||||
from os import makedirs, remove
|
||||
from pathlib import Path
|
||||
from random import randint
|
||||
from secrets import token_urlsafe
|
||||
from magic import Magic
|
||||
from datetime import datetime
|
||||
from os import makedirs, path, remove
|
||||
from classes.models import Video, SearchResults
|
||||
from modules.security import User, get_current_active_user
|
||||
from modules.app import app
|
||||
from modules.database import col_videos, col_albums, col_tokens
|
||||
from bson.objectid import ObjectId
|
||||
from shutil import move
|
||||
from typing import Union
|
||||
|
||||
import aiofiles
|
||||
from bson.errors import InvalidId
|
||||
from bson.objectid import ObjectId
|
||||
from fastapi import Security, UploadFile
|
||||
from fastapi.responses import Response, UJSONResponse
|
||||
from magic import Magic
|
||||
from pymongo import DESCENDING
|
||||
from starlette.status import HTTP_204_NO_CONTENT
|
||||
|
||||
from fastapi import HTTPException, UploadFile, Security
|
||||
from fastapi.responses import UJSONResponse, Response
|
||||
from starlette.status import HTTP_204_NO_CONTENT, HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED, HTTP_404_NOT_FOUND
|
||||
from classes.exceptions import (
|
||||
AlbumNameNotFoundError,
|
||||
SearchLimitInvalidError,
|
||||
SearchPageInvalidError,
|
||||
SearchTokenInvalidError,
|
||||
UserMediaQuotaReached,
|
||||
VideoNotFoundError,
|
||||
VideoSearchQueryEmptyError,
|
||||
)
|
||||
from classes.models import (
|
||||
RandomSearchResultsVideo,
|
||||
SearchResultsVideo,
|
||||
Video,
|
||||
VideoPublic,
|
||||
)
|
||||
from modules.app import app
|
||||
from modules.database import col_albums, col_photos, col_tokens, col_videos
|
||||
from modules.security import User, get_current_active_user
|
||||
|
||||
video_post_responses = {
|
||||
403: UserMediaQuotaReached().openapi,
|
||||
404: AlbumNameNotFoundError("name").openapi,
|
||||
}
|
||||
|
||||
|
||||
@app.post("/albums/{album}/videos", response_class=UJSONResponse, response_model=Video, description="Upload a video to album")
|
||||
async def video_upload(file: UploadFile, album: str, current_user: User = Security(get_current_active_user, scopes=["videos.write"])):
|
||||
@app.post(
|
||||
"/albums/{album}/videos",
|
||||
description="Upload a video to album",
|
||||
response_class=UJSONResponse,
|
||||
response_model=Video,
|
||||
responses=video_post_responses,
|
||||
)
|
||||
async def video_upload(
|
||||
file: UploadFile,
|
||||
album: str,
|
||||
caption: Union[str, None] = None,
|
||||
current_user: User = Security(get_current_active_user, scopes=["videos.write"]),
|
||||
):
|
||||
if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
if col_albums.find_one( {"user": current_user.user, "name": album} ) is None:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail=f"Provided album '{album}' does not exist.")
|
||||
user_media_count = (
|
||||
await col_videos.count_documents({"user": current_user.user})
|
||||
) + (await col_photos.count_documents({"user": current_user.user}))
|
||||
|
||||
# if not file.content_type.startswith("video"):
|
||||
# return HTTPException(status_code=HTTP_406_NOT_ACCEPTABLE, detail="Provided file is not a video, not accepting.")
|
||||
if user_media_count >= current_user.quota and not current_user.quota == -1: # type: ignore
|
||||
raise UserMediaQuotaReached()
|
||||
|
||||
makedirs(path.join("data", "users", current_user.user, "albums", album), exist_ok=True)
|
||||
makedirs(Path(f"data/users/{current_user.user}/albums/{album}"), exist_ok=True)
|
||||
|
||||
filename = file.filename
|
||||
|
||||
if path.exists(path.join("data", "users", current_user.user, "albums", album, file.filename)):
|
||||
if Path(f"data/users/{current_user.user}/albums/{album}/{file.filename}").exists():
|
||||
base_name = file.filename.split(".")[:-1]
|
||||
extension = file.filename.split(".")[-1]
|
||||
filename = ".".join(base_name)+f"_{int(datetime.now().timestamp())}."+extension
|
||||
filename = (
|
||||
".".join(base_name) + f"_{int(datetime.now().timestamp())}." + extension
|
||||
)
|
||||
|
||||
with open(path.join("data", "users", current_user.user, "albums", album, filename), "wb") as f:
|
||||
f.write(await file.read())
|
||||
async with aiofiles.open(
|
||||
Path(f"data/users/{current_user.user}/albums/{album}/{filename}"), "wb"
|
||||
) as f:
|
||||
await f.write(await file.read())
|
||||
|
||||
# file_hash = await get_phash(path.join("data", "users", current_user.user, "albums", album, filename))
|
||||
# duplicates = await get_duplicates(file_hash, album)
|
||||
# Hashing and duplicates check should be here
|
||||
|
||||
# if len(duplicates) > 0 and ignore_duplicates is False:
|
||||
# return UJSONResponse(
|
||||
# {
|
||||
# "detail": "video duplicates found. Pass 'ignore_duplicates=true' to ignore.",
|
||||
# "duplicates": duplicates
|
||||
# },
|
||||
# status_code=HTTP_409_CONFLICT
|
||||
# )
|
||||
# Coords extraction should be here
|
||||
|
||||
uploaded = col_videos.insert_one( {"user": current_user.user, "album": album, "filename": filename} )
|
||||
uploaded = await col_videos.insert_one(
|
||||
{
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"filename": filename,
|
||||
"dates": {
|
||||
"uploaded": datetime.now(tz=timezone.utc),
|
||||
"modified": datetime.now(tz=timezone.utc),
|
||||
},
|
||||
"caption": caption,
|
||||
}
|
||||
)
|
||||
|
||||
return UJSONResponse(
|
||||
{
|
||||
"id": uploaded.inserted_id.__str__(),
|
||||
"album": album,
|
||||
"filename": filename
|
||||
"hash": "", # SHOULD BE DONE
|
||||
"filename": filename,
|
||||
}
|
||||
)
|
||||
|
||||
@app.get("/videos/{id}", description="Get a video by id")
|
||||
async def video_get(id: str, current_user: User = Security(get_current_active_user, scopes=["videos.view"])):
|
||||
|
||||
video_get_responses = {
|
||||
200: {
|
||||
"content": {
|
||||
"application/octet-stream": {
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"format": "binary",
|
||||
"contentMediaType": "video/*",
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
404: VideoNotFoundError("id").openapi,
|
||||
}
|
||||
|
||||
|
||||
@app.get(
|
||||
"/videos/{id}",
|
||||
description="Get a video by id",
|
||||
responses=video_get_responses,
|
||||
response_class=Response,
|
||||
)
|
||||
async def video_get(
|
||||
id: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["videos.read"]),
|
||||
):
|
||||
try:
|
||||
video = col_videos.find_one( {"_id": ObjectId(id)} )
|
||||
video = await col_videos.find_one({"_id": ObjectId(id)})
|
||||
if video is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find a video with such id.")
|
||||
except InvalidId as exc:
|
||||
raise VideoNotFoundError(id) from exc
|
||||
|
||||
video_path = path.join("data", "users", current_user.user, "albums", video["album"], video["filename"])
|
||||
video_path = Path(
|
||||
f"data/users/{current_user.user}/albums/{video['album']}/{video['filename']}"
|
||||
)
|
||||
|
||||
mime = Magic(mime=True).from_file(video_path)
|
||||
|
||||
with open(video_path, "rb") as f: video_file = f.read()
|
||||
async with aiofiles.open(video_path, "rb") as f:
|
||||
video_file = await f.read()
|
||||
|
||||
return Response(video_file, media_type=mime)
|
||||
return Response(content=video_file, media_type=mime)
|
||||
|
||||
@app.delete("/videos/{id}", description="Delete a video by id")
|
||||
async def video_delete(id: str, current_user: User = Security(get_current_active_user, scopes=["videos.write"])):
|
||||
|
||||
video_move_responses = {404: VideoNotFoundError("id").openapi}
|
||||
|
||||
|
||||
@app.put(
|
||||
"/videos/{id}",
|
||||
description="Move a video into another album",
|
||||
response_model=VideoPublic,
|
||||
responses=video_move_responses,
|
||||
)
|
||||
async def video_move(
|
||||
id: str,
|
||||
album: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["videos.write"]),
|
||||
):
|
||||
try:
|
||||
video = col_videos.find_one_and_delete( {"_id": ObjectId(id)} )
|
||||
video = await col_videos.find_one({"_id": ObjectId(id)})
|
||||
if video is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail="Could not find a video with such id.")
|
||||
except InvalidId as exc:
|
||||
raise VideoNotFoundError(id) from exc
|
||||
|
||||
album = col_albums.find_one( {"name": video["album"]} )
|
||||
if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
remove(path.join("data", "users", current_user.user, "albums", video["album"], video["filename"]))
|
||||
if Path(
|
||||
f"data/users/{current_user.user}/albums/{album}/{video['filename']}"
|
||||
).exists():
|
||||
base_name = video["filename"].split(".")[:-1]
|
||||
extension = video["filename"].split(".")[-1]
|
||||
filename = (
|
||||
".".join(base_name) + f"_{int(datetime.now().timestamp())}." + extension
|
||||
)
|
||||
else:
|
||||
filename = video["filename"]
|
||||
|
||||
await col_videos.find_one_and_update(
|
||||
{"_id": ObjectId(id)},
|
||||
{
|
||||
"$set": {
|
||||
"album": album,
|
||||
"filename": filename,
|
||||
"dates.modified": datetime.now(tz=timezone.utc),
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
move(
|
||||
Path(
|
||||
f"data/users/{current_user.user}/albums/{video['album']}/{video['filename']}"
|
||||
),
|
||||
Path(f"data/users/{current_user.user}/albums/{album}/{filename}"),
|
||||
)
|
||||
|
||||
return UJSONResponse(
|
||||
{
|
||||
"id": video["_id"].__str__(),
|
||||
"caption": video["caption"],
|
||||
"filename": filename,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
video_patch_responses = {404: VideoNotFoundError("id").openapi}
|
||||
|
||||
|
||||
@app.patch(
|
||||
"/videos/{id}",
|
||||
description="Change properties of a video",
|
||||
response_model=VideoPublic,
|
||||
responses=video_patch_responses,
|
||||
)
|
||||
async def video_patch(
|
||||
id: str,
|
||||
caption: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["videos.write"]),
|
||||
):
|
||||
try:
|
||||
video = await col_videos.find_one({"_id": ObjectId(id)})
|
||||
if video is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId as exc:
|
||||
raise VideoNotFoundError(id) from exc
|
||||
|
||||
await col_videos.find_one_and_update(
|
||||
{"_id": ObjectId(id)},
|
||||
{"$set": {"caption": caption, "dates.modified": datetime.now(tz=timezone.utc)}},
|
||||
)
|
||||
|
||||
return UJSONResponse(
|
||||
{
|
||||
"id": video["_id"].__str__(),
|
||||
"caption": video["caption"],
|
||||
"filename": video["filename"],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
video_delete_responses = {404: VideoNotFoundError("id").openapi}
|
||||
|
||||
|
||||
@app.delete(
|
||||
"/videos/{id}",
|
||||
description="Delete a video by id",
|
||||
status_code=HTTP_204_NO_CONTENT,
|
||||
responses=video_delete_responses,
|
||||
)
|
||||
async def video_delete(
|
||||
id: str,
|
||||
current_user: User = Security(get_current_active_user, scopes=["videos.write"]),
|
||||
):
|
||||
try:
|
||||
video = await col_videos.find_one_and_delete({"_id": ObjectId(id)})
|
||||
if video is None:
|
||||
raise InvalidId(id)
|
||||
except InvalidId as exc:
|
||||
raise VideoNotFoundError(id) from exc
|
||||
|
||||
album = await col_albums.find_one({"name": video["album"]})
|
||||
|
||||
remove(
|
||||
Path(
|
||||
f"data/users/{current_user.user}/albums/{video['album']}/{video['filename']}"
|
||||
)
|
||||
)
|
||||
|
||||
return Response(status_code=HTTP_204_NO_CONTENT)
|
||||
|
||||
@app.get("/albums/{album}/videos", response_class=UJSONResponse, response_model=SearchResults, description="Find a video by filename")
|
||||
async def video_find(q: str, album: str, page: int = 1, page_size: int = 100, current_user: User = Security(get_current_active_user, scopes=["videos.list"])):
|
||||
|
||||
if col_albums.find_one( {"user": current_user.user, "name": album} ) is None:
|
||||
return HTTPException(status_code=HTTP_404_NOT_FOUND, detail=f"Provided album '{album}' does not exist.")
|
||||
video_random_responses = {
|
||||
400: SearchLimitInvalidError().openapi,
|
||||
404: AlbumNameNotFoundError("name").openapi,
|
||||
}
|
||||
|
||||
if page <= 0 or page_size <= 0:
|
||||
return HTTPException(status_code=HTTP_400_BAD_REQUEST, detail="Parameters 'page' and 'page_size' must be greater or equal to 1.")
|
||||
|
||||
@app.get(
|
||||
"/albums/{album}/videos/random",
|
||||
description="Get one random video, optionally by caption",
|
||||
response_class=UJSONResponse,
|
||||
response_model=RandomSearchResultsVideo,
|
||||
responses=video_random_responses,
|
||||
)
|
||||
async def video_random(
|
||||
album: str,
|
||||
caption: Union[str, None] = None,
|
||||
limit: int = 100,
|
||||
current_user: User = Security(get_current_active_user, scopes=["videos.list"]),
|
||||
):
|
||||
if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
if limit <= 0:
|
||||
raise SearchLimitInvalidError()
|
||||
|
||||
output = {"results": []}
|
||||
skip = (page-1)*page_size
|
||||
videos = list(col_videos.find({"user": current_user.user, "album": album, "filename": re.compile(q)}, limit=page_size, skip=skip))
|
||||
|
||||
for video in videos:
|
||||
output["results"].append({"id": video["_id"].__str__(), "filename": video["filename"]})
|
||||
db_query = (
|
||||
{
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
if caption is not None
|
||||
else {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
}
|
||||
)
|
||||
|
||||
if col_videos.count_documents( {"user": current_user.user, "album": album, "filename": re.compile(q)} ) > page*page_size:
|
||||
token = str(token_urlsafe(32))
|
||||
col_tokens.insert_one( {"token": token, "query": q, "album": album, "page": page+1, "page_size": page_size, "user": pickle.dumps(current_user)} )
|
||||
output["next_page"] = f"/albums/{album}/videos/token?token={token}" # type: ignore
|
||||
else:
|
||||
output["next_page"] = None # type: ignore
|
||||
documents_count = await col_videos.count_documents(db_query)
|
||||
skip = randint(0, documents_count - 1) if documents_count > 1 else 0
|
||||
|
||||
async for video in col_videos.aggregate(
|
||||
[
|
||||
{"$match": db_query},
|
||||
{"$skip": skip},
|
||||
{"$limit": limit},
|
||||
]
|
||||
):
|
||||
output["results"].append(
|
||||
{
|
||||
"id": video["_id"].__str__(),
|
||||
"filename": video["filename"],
|
||||
"caption": video["caption"],
|
||||
}
|
||||
)
|
||||
|
||||
return UJSONResponse(output)
|
||||
|
||||
@app.get("/albums/{album}/videos/token", response_class=UJSONResponse, response_model=SearchResults, description="Find a video by token")
|
||||
async def video_find_token(token: str):
|
||||
|
||||
found_record = col_tokens.find_one( {"token": token} )
|
||||
video_find_responses = {
|
||||
400: SearchPageInvalidError().openapi,
|
||||
401: SearchTokenInvalidError().openapi,
|
||||
404: AlbumNameNotFoundError("name").openapi,
|
||||
422: VideoSearchQueryEmptyError().openapi,
|
||||
}
|
||||
|
||||
if found_record is None:
|
||||
return HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail="Invalid search token.")
|
||||
|
||||
return await video_find(q=found_record["query"], album=found_record["album"], page=found_record["page"], page_size=found_record["page_size"], current_user=pickle.loads(found_record["user"]))
|
||||
@app.get(
|
||||
"/albums/{album}/videos",
|
||||
description="Find a video by filename, caption or token",
|
||||
response_class=UJSONResponse,
|
||||
response_model=SearchResultsVideo,
|
||||
responses=video_find_responses,
|
||||
)
|
||||
async def video_find(
|
||||
album: str,
|
||||
q: Union[str, None] = None,
|
||||
caption: Union[str, None] = None,
|
||||
token: Union[str, None] = None,
|
||||
page: int = 1,
|
||||
page_size: int = 100,
|
||||
current_user: User = Security(get_current_active_user, scopes=["videos.list"]),
|
||||
):
|
||||
if token is not None:
|
||||
found_record = await col_tokens.find_one({"token": token})
|
||||
|
||||
if found_record is None:
|
||||
raise SearchTokenInvalidError()
|
||||
|
||||
return await video_find(
|
||||
album=album,
|
||||
q=found_record["query"],
|
||||
caption=found_record["caption"],
|
||||
page=found_record["page"],
|
||||
page_size=found_record["page_size"],
|
||||
current_user=current_user,
|
||||
)
|
||||
|
||||
if (await col_albums.find_one({"user": current_user.user, "name": album})) is None:
|
||||
raise AlbumNameNotFoundError(album)
|
||||
|
||||
if page <= 0 or page_size <= 0:
|
||||
raise SearchPageInvalidError()
|
||||
|
||||
output = {"results": []}
|
||||
skip = (page - 1) * page_size
|
||||
|
||||
if q is None and caption is None:
|
||||
raise VideoSearchQueryEmptyError()
|
||||
|
||||
if q is None:
|
||||
db_query = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
db_query_count = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
elif caption is None:
|
||||
db_query = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"filename": re.compile(q),
|
||||
}
|
||||
db_query_count = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"caption": re.compile(q),
|
||||
}
|
||||
else:
|
||||
db_query = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"filename": re.compile(q),
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
db_query_count = {
|
||||
"user": current_user.user,
|
||||
"album": album,
|
||||
"filename": re.compile(q),
|
||||
"caption": re.compile(caption),
|
||||
}
|
||||
|
||||
async for video in col_videos.find(db_query, limit=page_size, skip=skip).sort(
|
||||
"dates.uploaded", direction=DESCENDING
|
||||
):
|
||||
output["results"].append(
|
||||
{
|
||||
"id": video["_id"].__str__(),
|
||||
"filename": video["filename"],
|
||||
"caption": video["caption"],
|
||||
}
|
||||
)
|
||||
|
||||
if (await col_videos.count_documents(db_query_count)) > page * page_size:
|
||||
token = str(token_urlsafe(32))
|
||||
await col_tokens.insert_one(
|
||||
{
|
||||
"token": token,
|
||||
"query": q,
|
||||
"caption": caption,
|
||||
"page": page + 1,
|
||||
"page_size": page_size,
|
||||
}
|
||||
)
|
||||
output["next_page"] = f"/albums/{album}/videos/?token={token}" # type: ignore
|
||||
else:
|
||||
output["next_page"] = None # type: ignore
|
||||
|
||||
return UJSONResponse(output)
|
||||
|
9
migrations/202311251700.py
Normal file
9
migrations/202311251700.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from mongodb_migrations.base import BaseMigration
|
||||
|
||||
|
||||
class Migration(BaseMigration):
|
||||
def upgrade(self):
|
||||
self.db.users.update_many({}, {"$set": {"quota": None}})
|
||||
|
||||
def downgrade(self):
|
||||
self.db.test_collection.update_many({}, {"$unset": "quota"})
|
@@ -1,79 +1,22 @@
|
||||
from os import sep
|
||||
from fastapi import FastAPI, Security, HTTPException
|
||||
from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN
|
||||
from fastapi.security import APIKeyQuery, APIKeyHeader, APIKeyCookie
|
||||
from fastapi.openapi.docs import get_swagger_ui_html, get_redoc_html
|
||||
from starlette.status import HTTP_401_UNAUTHORIZED
|
||||
from fastapi.openapi.models import APIKey
|
||||
from fastapi import FastAPI
|
||||
from fastapi.openapi.docs import get_redoc_html, get_swagger_ui_html
|
||||
|
||||
from modules.utils import configGet, jsonLoad
|
||||
app = FastAPI(title="END PLAY Photos", docs_url=None, redoc_url=None, version="0.6")
|
||||
|
||||
app = FastAPI(title="END PLAY Photos", docs_url=None, redoc_url=None, version="0.1")
|
||||
|
||||
api_key_query = APIKeyQuery(name="apikey", auto_error=False)
|
||||
api_key_header = APIKeyHeader(name="apikey", auto_error=False)
|
||||
api_key_cookie = APIKeyCookie(name="apikey", auto_error=False)
|
||||
|
||||
|
||||
def get_all_api_keys():
|
||||
return jsonLoad(f'{configGet("data_location")}{sep}api_keys.json')
|
||||
|
||||
def get_all_expired_keys():
|
||||
return jsonLoad(f'{configGet("data_location")}{sep}expired_keys.json')
|
||||
|
||||
def check_project_key(project: str, apikey: APIKey) -> bool:
|
||||
keys = jsonLoad(f'{configGet("data_location")}{sep}api_keys.json')
|
||||
if apikey in keys:
|
||||
if keys[apikey] != []:
|
||||
if project in keys[apikey]:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
async def get_api_key(
|
||||
api_key_query: str = Security(api_key_query),
|
||||
api_key_header: str = Security(api_key_header),
|
||||
api_key_cookie: str = Security(api_key_cookie),
|
||||
):
|
||||
|
||||
keys = get_all_api_keys()
|
||||
expired = get_all_expired_keys()
|
||||
|
||||
def is_valid(key):
|
||||
if (key in keys) or (key == "publickey"):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
if is_valid(api_key_query):
|
||||
return api_key_query
|
||||
elif is_valid(api_key_header):
|
||||
return api_key_header
|
||||
elif is_valid(api_key_cookie):
|
||||
return api_key_cookie
|
||||
else:
|
||||
if (api_key_query in expired) or (api_key_header in expired) or (api_key_cookie in expired):
|
||||
raise HTTPException(status_code=HTTP_403_FORBIDDEN, detail=configGet("key_expired", "messages"))
|
||||
else:
|
||||
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED, detail=configGet("key_invalid", "messages"))
|
||||
|
||||
@app.get("/docs", include_in_schema=False)
|
||||
async def custom_swagger_ui_html():
|
||||
return get_swagger_ui_html(
|
||||
openapi_url=app.openapi_url, # type: ignore
|
||||
title=app.title + " - Documentation",
|
||||
swagger_favicon_url="/favicon.ico"
|
||||
openapi_url=app.openapi_url,
|
||||
title=f"{app.title} - Documentation",
|
||||
swagger_favicon_url="/favicon.ico",
|
||||
)
|
||||
|
||||
|
||||
@app.get("/redoc", include_in_schema=False)
|
||||
async def custom_redoc_html():
|
||||
return get_redoc_html(
|
||||
openapi_url=app.openapi_url, # type: ignore
|
||||
title=app.title + " - Documentation",
|
||||
redoc_favicon_url="/favicon.ico"
|
||||
)
|
||||
openapi_url=app.openapi_url,
|
||||
title=f"{app.title} - Documentation",
|
||||
redoc_favicon_url="/favicon.ico",
|
||||
)
|
||||
|
@@ -1,35 +1,33 @@
|
||||
from async_pymongo import AsyncClient
|
||||
from pymongo import GEOSPHERE, MongoClient
|
||||
|
||||
from modules.utils import configGet
|
||||
from pymongo import MongoClient
|
||||
|
||||
db_config = configGet("database")
|
||||
|
||||
if db_config["user"] is not None and db_config["password"] is not None:
|
||||
con_string = 'mongodb://{0}:{1}@{2}:{3}/{4}'.format(
|
||||
con_string = "mongodb://{0}:{1}@{2}:{3}/{4}".format(
|
||||
db_config["user"],
|
||||
db_config["password"],
|
||||
db_config["host"],
|
||||
db_config["port"],
|
||||
db_config["name"]
|
||||
db_config["name"],
|
||||
)
|
||||
else:
|
||||
con_string = 'mongodb://{0}:{1}/{2}'.format(
|
||||
db_config["host"],
|
||||
db_config["port"],
|
||||
db_config["name"]
|
||||
con_string = "mongodb://{0}:{1}/{2}".format(
|
||||
db_config["host"], db_config["port"], db_config["name"]
|
||||
)
|
||||
|
||||
db_client = MongoClient(con_string)
|
||||
db_client = AsyncClient(con_string)
|
||||
db_client_sync = MongoClient(con_string)
|
||||
|
||||
db = db_client.get_database(name=db_config["name"])
|
||||
|
||||
collections = db.list_collection_names()
|
||||
|
||||
for collection in ["users", "albums", "photos", "videos", "tokens"]:
|
||||
if not collection in collections:
|
||||
db.create_collection(collection)
|
||||
|
||||
col_users = db.get_collection("users")
|
||||
col_albums = db.get_collection("albums")
|
||||
col_photos = db.get_collection("photos")
|
||||
col_videos = db.get_collection("videos")
|
||||
col_tokens = db.get_collection("tokens")
|
||||
col_tokens = db.get_collection("tokens")
|
||||
col_emails = db.get_collection("emails")
|
||||
|
||||
db_client_sync[db_config["name"]]["photos"].create_index([("location", GEOSPHERE)])
|
||||
|
49
modules/exif_reader.py
Normal file
49
modules/exif_reader.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import contextlib
|
||||
from pathlib import Path
|
||||
from typing import Mapping, Union
|
||||
|
||||
from exif import Image
|
||||
|
||||
|
||||
def decimal_coords(coords: float, ref: str) -> float:
|
||||
"""Get latitude/longitude from coord and direction reference
|
||||
|
||||
### Args:
|
||||
* coords (`float`): _description_
|
||||
* ref (`str`): _description_
|
||||
|
||||
### Returns:
|
||||
* float: Decimal degrees
|
||||
"""
|
||||
decimal_degrees = coords[0] + coords[1] / 60 + coords[2] / 3600
|
||||
|
||||
if ref in {"S", "W"}:
|
||||
decimal_degrees = -decimal_degrees
|
||||
|
||||
return round(decimal_degrees, 5)
|
||||
|
||||
|
||||
def extract_location(filepath: Union[str, Path]) -> Mapping[str, float]:
|
||||
"""Get location data from image
|
||||
|
||||
### Args:
|
||||
* filepath (`str`): Path to file location
|
||||
|
||||
### Returns:
|
||||
* dict: `{ "lng": float, "lat": float, "alt": float }`
|
||||
"""
|
||||
|
||||
output = {"lng": 0.0, "lat": 0.0, "alt": 0.0}
|
||||
|
||||
with open(filepath, "rb") as src:
|
||||
img = Image(src)
|
||||
|
||||
if img.has_exif is False:
|
||||
return output
|
||||
|
||||
with contextlib.suppress(AttributeError):
|
||||
output["lng"] = decimal_coords(img.gps_longitude, img.gps_longitude_ref)
|
||||
output["lat"] = decimal_coords(img.gps_latitude, img.gps_latitude_ref)
|
||||
output["alt"] = img.gps_altitude
|
||||
|
||||
return output
|
@@ -1,38 +1,48 @@
|
||||
from importlib.util import module_from_spec, spec_from_file_location
|
||||
from os import getcwd, path, walk
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
|
||||
# =================================================================================
|
||||
|
||||
#=================================================================================
|
||||
|
||||
# Import functions
|
||||
# Took from https://stackoverflow.com/a/57892961
|
||||
def get_py_files(src):
|
||||
cwd = getcwd() # Current Working directory
|
||||
py_files = []
|
||||
cwd = getcwd() # Current Working directory
|
||||
py_files = []
|
||||
for root, dirs, files in walk(src):
|
||||
for file in files:
|
||||
if file.endswith(".py"):
|
||||
py_files.append(path.join(cwd, root, file))
|
||||
py_files.extend(
|
||||
Path(f"{cwd}/{root}/{file}") for file in files if file.endswith(".py")
|
||||
)
|
||||
return py_files
|
||||
|
||||
|
||||
def dynamic_import(module_name, py_path):
|
||||
def dynamic_import(module_name: str, py_path: str):
|
||||
try:
|
||||
module_spec = spec_from_file_location(module_name, py_path)
|
||||
module = module_from_spec(module_spec) # type: ignore
|
||||
module_spec.loader.exec_module(module) # type: ignore
|
||||
if module_spec is None:
|
||||
raise RuntimeError(
|
||||
f"Module spec from module name {module_name} and path {py_path} is None"
|
||||
)
|
||||
module = module_from_spec(module_spec)
|
||||
module_spec.loader.exec_module(module)
|
||||
return module
|
||||
except SyntaxError:
|
||||
print(f"Could not load extension {module_name} due to invalid syntax. Check logs/errors.log for details.", flush=True)
|
||||
print(
|
||||
f"Could not load extension {module_name} due to invalid syntax. Check logs/errors.log for details.",
|
||||
flush=True,
|
||||
)
|
||||
return
|
||||
except Exception as exp:
|
||||
print(f"Could not load extension {module_name} due to {exp}", flush=True)
|
||||
except Exception as exc:
|
||||
print(f"Could not load extension {module_name} due to {exc}", flush=True)
|
||||
return
|
||||
|
||||
|
||||
def dynamic_import_from_src(src, star_import = False):
|
||||
def dynamic_import_from_src(src: Union[str, Path], star_import=False):
|
||||
my_py_files = get_py_files(src)
|
||||
for py_file in my_py_files:
|
||||
module_name = path.split(py_file)[-1][:-3]
|
||||
module_name = Path(py_file).stem
|
||||
print(f"Importing {module_name} extension...", flush=True)
|
||||
imported_module = dynamic_import(module_name, py_file)
|
||||
if imported_module != None:
|
||||
@@ -44,4 +54,5 @@ def dynamic_import_from_src(src, star_import = False):
|
||||
print(f"Successfully loaded {module_name} extension", flush=True)
|
||||
return
|
||||
|
||||
#=================================================================================
|
||||
|
||||
# =================================================================================
|
||||
|
@@ -1,55 +1,75 @@
|
||||
from modules.database import col_photos
|
||||
from pathlib import Path
|
||||
from typing import Any, List, Mapping, Union
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from numpy.typing import NDArray
|
||||
from scipy import spatial
|
||||
import cv2
|
||||
|
||||
def hash_array_to_hash_hex(hash_array):
|
||||
# convert hash array of 0 or 1 to hash string in hex
|
||||
hash_array = np.array(hash_array, dtype = np.uint8)
|
||||
hash_str = ''.join(str(i) for i in 1 * hash_array.flatten())
|
||||
return (hex(int(hash_str, 2)))
|
||||
from modules.database import col_photos
|
||||
|
||||
|
||||
def hash_array_to_hash_hex(hash_array) -> str:
|
||||
# convert hash array of 0 or 1 to hash string in hex
|
||||
hash_array = np.array(hash_array, dtype=np.uint8)
|
||||
hash_str = "".join(str(i) for i in 1 * hash_array.flatten())
|
||||
return hex(int(hash_str, 2))
|
||||
|
||||
|
||||
def hash_hex_to_hash_array(hash_hex) -> NDArray:
|
||||
# convert hash string in hex to hash values of 0 or 1
|
||||
hash_str = int(hash_hex, 16)
|
||||
array_str = bin(hash_str)[2:]
|
||||
return np.array([i for i in array_str], dtype = np.float32)
|
||||
# convert hash string in hex to hash values of 0 or 1
|
||||
hash_str = int(hash_hex, 16)
|
||||
array_str = bin(hash_str)[2:]
|
||||
return np.array(list(array_str), dtype=np.float32)
|
||||
|
||||
def get_duplicates_cache(album: str) -> dict:
|
||||
output = {}
|
||||
for photo in col_photos.find( {"album": album} ):
|
||||
output[photo["filename"]] = [photo["_id"].__str__(), photo["hash"]]
|
||||
return output
|
||||
|
||||
async def get_phash(filepath: str) -> str:
|
||||
img = cv2.imread(filepath)
|
||||
async def get_duplicates_cache(album: str) -> Mapping[str, Any]:
|
||||
return {
|
||||
photo["filename"]: [photo["_id"].__str__(), photo["hash"]]
|
||||
async for photo in col_photos.find({"album": album})
|
||||
}
|
||||
|
||||
|
||||
async def get_phash(filepath: Union[str, Path]) -> str:
|
||||
img = cv2.imread(str(filepath))
|
||||
# resize image and convert to gray scale
|
||||
img = cv2.resize(img, (64, 64))
|
||||
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
|
||||
img = np.array(img, dtype = np.float32)
|
||||
# calculate dct of image
|
||||
img = np.array(img, dtype=np.float32)
|
||||
# calculate dct of image
|
||||
dct = cv2.dct(img)
|
||||
# to reduce hash length take only 8*8 top-left block
|
||||
# to reduce hash length take only 8*8 top-left block
|
||||
# as this block has more information than the rest
|
||||
dct_block = dct[: 8, : 8]
|
||||
dct_block = dct[:8, :8]
|
||||
# caclulate mean of dct block excluding first term i.e, dct(0, 0)
|
||||
dct_average = (dct_block.mean() * dct_block.size - dct_block[0, 0]) / (dct_block.size - 1)
|
||||
dct_average = (dct_block.mean() * dct_block.size - dct_block[0, 0]) / (
|
||||
dct_block.size - 1
|
||||
)
|
||||
# convert dct block to binary values based on dct_average
|
||||
dct_block[dct_block < dct_average] = 0.0
|
||||
dct_block[dct_block != 0] = 1.0
|
||||
# store hash value
|
||||
return hash_array_to_hash_hex(dct_block.flatten())
|
||||
|
||||
async def get_duplicates(hash: str, album: str) -> list:
|
||||
|
||||
async def get_duplicates(hash_string: str, album: str) -> List[Mapping[str, Any]]:
|
||||
duplicates = []
|
||||
cache = get_duplicates_cache(album)
|
||||
for image_name in cache.keys():
|
||||
distance = spatial.distance.hamming(
|
||||
hash_hex_to_hash_array(cache[image_name][1]),
|
||||
hash_hex_to_hash_array(hash)
|
||||
)
|
||||
print("{0:<30} {1}".format(image_name, distance), flush=True)
|
||||
if distance <= 0.25:
|
||||
duplicates.append({"id": cache[image_name][0], "filename": image_name, "difference": distance})
|
||||
return duplicates
|
||||
cache = await get_duplicates_cache(album)
|
||||
for image_name, image_object in cache.items():
|
||||
try:
|
||||
distance = spatial.distance.hamming(
|
||||
hash_hex_to_hash_array(cache[image_name][1]),
|
||||
hash_hex_to_hash_array(hash_string),
|
||||
)
|
||||
except ValueError:
|
||||
continue
|
||||
# print("{0:<30} {1}".format(image_name, distance), flush=True)
|
||||
if distance <= 0.1:
|
||||
duplicates.append(
|
||||
{
|
||||
"id": cache[image_name][0],
|
||||
"filename": image_name,
|
||||
"difference": distance,
|
||||
}
|
||||
)
|
||||
return duplicates
|
||||
|
41
modules/mailer.py
Normal file
41
modules/mailer.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import logging
|
||||
from smtplib import SMTP, SMTP_SSL
|
||||
from ssl import create_default_context
|
||||
from traceback import print_exc
|
||||
|
||||
from modules.utils import configGet
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
if configGet("use_ssl", "mailer", "smtp") is True:
|
||||
mail_sender = SMTP_SSL(
|
||||
configGet("host", "mailer", "smtp"),
|
||||
configGet("port", "mailer", "smtp"),
|
||||
)
|
||||
logger.info("Initialized SMTP SSL connection")
|
||||
elif configGet("use_tls", "mailer", "smtp") is True:
|
||||
mail_sender = SMTP(
|
||||
configGet("host", "mailer", "smtp"),
|
||||
configGet("port", "mailer", "smtp"),
|
||||
)
|
||||
mail_sender.starttls(context=create_default_context())
|
||||
mail_sender.ehlo()
|
||||
logger.info("Initialized SMTP TLS connection")
|
||||
else:
|
||||
mail_sender = SMTP(
|
||||
configGet("host", "mailer", "smtp"), configGet("port", "mailer", "smtp")
|
||||
)
|
||||
mail_sender.ehlo()
|
||||
logger.info("Initialized SMTP connection")
|
||||
except Exception as exc:
|
||||
logger.error("Could not initialize SMTP connection to: %s", exc)
|
||||
print_exc()
|
||||
|
||||
try:
|
||||
mail_sender.login(
|
||||
configGet("login", "mailer", "smtp"), configGet("password", "mailer", "smtp")
|
||||
)
|
||||
logger.info("Successfully initialized mailer")
|
||||
except Exception as exc:
|
||||
logger.error("Could not login into provided SMTP account due to: %s", exc)
|
23
modules/migrator.py
Normal file
23
modules/migrator.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from typing import Any, Mapping
|
||||
|
||||
from mongodb_migrations.cli import MigrationManager
|
||||
from mongodb_migrations.config import Configuration
|
||||
|
||||
from modules.utils import configGet
|
||||
|
||||
|
||||
def migrate_database() -> None:
|
||||
"""Apply migrations from folder `migrations/` to the database"""
|
||||
db_config: Mapping[str, Any] = configGet("database")
|
||||
|
||||
manager_config = Configuration(
|
||||
{
|
||||
"mongo_host": db_config["host"],
|
||||
"mongo_port": db_config["port"],
|
||||
"mongo_database": db_config["name"],
|
||||
"mongo_username": db_config["user"],
|
||||
"mongo_password": db_config["password"],
|
||||
}
|
||||
)
|
||||
manager = MigrationManager(manager_config)
|
||||
manager.run()
|
@@ -1,3 +1,3 @@
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
|
||||
scheduler = AsyncIOScheduler()
|
||||
scheduler = AsyncIOScheduler()
|
||||
|
@@ -1,19 +1,34 @@
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from os import getenv
|
||||
from typing import List, Union
|
||||
from modules.database import col_users
|
||||
|
||||
from fastapi import Depends, HTTPException, Security, status
|
||||
from fastapi.security import (
|
||||
OAuth2PasswordBearer,
|
||||
SecurityScopes,
|
||||
)
|
||||
from fastapi.security import OAuth2PasswordBearer, SecurityScopes
|
||||
from jose import JWTError, jwt
|
||||
from passlib.context import CryptContext
|
||||
from pydantic import BaseModel, ValidationError
|
||||
|
||||
from modules.database import col_users
|
||||
from modules.utils import configGet
|
||||
|
||||
try:
|
||||
configGet("secret")
|
||||
except KeyError as exc:
|
||||
raise KeyError(
|
||||
"PhotosAPI secret is not set. Secret key handling has changed in PhotosAPI 0.6.0, so you need to add the config key 'secret' to your config file."
|
||||
) from exc
|
||||
|
||||
if configGet("secret") == "" and getenv("PHOTOSAPI_SECRET") is None:
|
||||
raise KeyError(
|
||||
"PhotosAPI secret is not set. Set the config key 'secret' or provide the environment variable 'PHOTOSAPI_SECRET' containing a secret string."
|
||||
)
|
||||
|
||||
SECRET_KEY = (
|
||||
getenv("PHOTOSAPI_SECRET")
|
||||
if getenv("PHOTOSAPI_SECRET") is not None
|
||||
else configGet("secret")
|
||||
)
|
||||
|
||||
with open("secret_key", "r", encoding="utf-8") as f:
|
||||
SECRET_KEY = f.read()
|
||||
ALGORITHM = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_DAYS = 180
|
||||
|
||||
@@ -31,6 +46,7 @@ class TokenData(BaseModel):
|
||||
class User(BaseModel):
|
||||
user: str
|
||||
email: Union[str, None] = None
|
||||
quota: Union[int, None] = None
|
||||
disabled: Union[bool, None] = None
|
||||
|
||||
|
||||
@@ -52,46 +68,63 @@ oauth2_scheme = OAuth2PasswordBearer(
|
||||
"photos.write": "Modify photos.",
|
||||
"videos.list": "List videos.",
|
||||
"videos.read": "View videos.",
|
||||
"videos.write": "Modify videos."
|
||||
"videos.write": "Modify videos.",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def verify_password(plain_password, hashed_password):
|
||||
def verify_password(plain_password, hashed_password) -> bool:
|
||||
return pwd_context.verify(plain_password, hashed_password)
|
||||
|
||||
|
||||
def get_password_hash(password):
|
||||
def get_password_hash(password) -> str:
|
||||
return pwd_context.hash(password)
|
||||
|
||||
|
||||
def get_user(user: str):
|
||||
found_user = col_users.find_one( {"user": user} )
|
||||
return UserInDB(user=found_user["user"], email=found_user["email"], disabled=found_user["disabled"], hash=found_user["hash"])
|
||||
async def get_user(user: str) -> UserInDB:
|
||||
found_user = await col_users.find_one({"user": user})
|
||||
|
||||
if found_user is None:
|
||||
raise RuntimeError(f"User {user} does not exist")
|
||||
|
||||
return UserInDB(
|
||||
user=found_user["user"],
|
||||
email=found_user["email"],
|
||||
quota=found_user["quota"]
|
||||
if found_user["quota"] is not None
|
||||
else configGet("default_user_quota"),
|
||||
disabled=found_user["disabled"],
|
||||
hash=found_user["hash"],
|
||||
)
|
||||
|
||||
|
||||
def authenticate_user(user_name: str, password: str):
|
||||
user = get_user(user_name)
|
||||
if not user:
|
||||
return False
|
||||
if not verify_password(password, user.hash):
|
||||
return False
|
||||
return user
|
||||
|
||||
|
||||
def create_access_token( data: dict, expires_delta: Union[timedelta, None] = None ):
|
||||
to_encode = data.copy()
|
||||
if expires_delta:
|
||||
expire = datetime.utcnow() + expires_delta
|
||||
async def authenticate_user(user_name: str, password: str) -> Union[UserInDB, bool]:
|
||||
if user := await get_user(user_name):
|
||||
return user if verify_password(password, user.hash) else False
|
||||
else:
|
||||
expire = datetime.utcnow() + timedelta(days=ACCESS_TOKEN_EXPIRE_DAYS)
|
||||
to_encode.update({"exp": expire})
|
||||
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||
return encoded_jwt
|
||||
return False
|
||||
|
||||
|
||||
async def get_current_user( security_scopes: SecurityScopes, token: str = Depends(oauth2_scheme) ):
|
||||
def create_access_token(
|
||||
data: dict, expires_delta: Union[timedelta, None] = None
|
||||
) -> str:
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.now(tz=timezone.utc) + expires_delta
|
||||
else:
|
||||
expire = datetime.now(tz=timezone.utc) + timedelta(
|
||||
days=ACCESS_TOKEN_EXPIRE_DAYS
|
||||
)
|
||||
|
||||
to_encode["exp"] = expire
|
||||
|
||||
return jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
security_scopes: SecurityScopes, token: str = Depends(oauth2_scheme)
|
||||
) -> UserInDB:
|
||||
if security_scopes.scopes:
|
||||
authenticate_value = f'Bearer scope="{security_scopes.scope_str}"'
|
||||
else:
|
||||
@@ -106,16 +139,18 @@ async def get_current_user( security_scopes: SecurityScopes, token: str = Depend
|
||||
try:
|
||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
user: str = payload.get("sub")
|
||||
|
||||
if user is None:
|
||||
raise credentials_exception
|
||||
|
||||
token_scopes = payload.get("scopes", [])
|
||||
token_data = TokenData(scopes=token_scopes, user=user)
|
||||
except (JWTError, ValidationError):
|
||||
raise credentials_exception
|
||||
|
||||
user = get_user(user=token_data.user)
|
||||
except (JWTError, ValidationError) as exc:
|
||||
raise credentials_exception from exc
|
||||
|
||||
if user is None:
|
||||
user_record = await get_user(user=token_data.user)
|
||||
|
||||
if user_record is None:
|
||||
raise credentials_exception
|
||||
|
||||
for scope in security_scopes.scopes:
|
||||
@@ -125,10 +160,14 @@ async def get_current_user( security_scopes: SecurityScopes, token: str = Depend
|
||||
detail="Not enough permissions",
|
||||
headers={"WWW-Authenticate": authenticate_value},
|
||||
)
|
||||
return user
|
||||
|
||||
return user_record
|
||||
|
||||
|
||||
async def get_current_active_user( current_user: User = Security(get_current_user, scopes=["me"]) ):
|
||||
async def get_current_active_user(
|
||||
current_user: User = Security(get_current_user, scopes=["me"])
|
||||
):
|
||||
if current_user.disabled:
|
||||
raise HTTPException(status_code=400, detail="Inactive user")
|
||||
return current_user
|
||||
|
||||
return current_user
|
||||
|
@@ -1,49 +1,59 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from traceback import format_exc
|
||||
from typing import Any, Union
|
||||
from ujson import loads, dumps, JSONDecodeError
|
||||
from traceback import print_exc
|
||||
|
||||
# Print to stdout and then to log
|
||||
def logWrite(message: str, debug: bool = False) -> None:
|
||||
# save to log file and rotation is to be done
|
||||
# logAppend(f'{message}', debug=debug)
|
||||
print(f"{message}", flush=True)
|
||||
from ujson import JSONDecodeError, dumps, loads
|
||||
|
||||
def jsonLoad(filepath: str) -> Any:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def jsonLoad(filepath: Union[str, Path]) -> Any:
|
||||
"""Load json file
|
||||
|
||||
### Args:
|
||||
* filepath (`str`): Path to input file
|
||||
* filepath (`Union[str, Path]`): Path to input file
|
||||
|
||||
### Returns:
|
||||
* `Any`: Some json deserializable
|
||||
"""
|
||||
with open(filepath, "r", encoding='utf8') as file:
|
||||
"""
|
||||
with open(filepath, "r", encoding="utf8") as file:
|
||||
try:
|
||||
output = loads(file.read())
|
||||
except JSONDecodeError:
|
||||
logWrite(f"Could not load json file {filepath}: file seems to be incorrect!\n{print_exc()}")
|
||||
logger.error(
|
||||
"Could not load json file %s: file seems to be incorrect!\n%s",
|
||||
filepath,
|
||||
format_exc(),
|
||||
)
|
||||
raise
|
||||
except FileNotFoundError:
|
||||
logWrite(f"Could not load json file {filepath}: file does not seem to exist!\n{print_exc()}")
|
||||
logger.error(
|
||||
"Could not load json file %s: file does not seem to exist!\n%s",
|
||||
filepath,
|
||||
format_exc(),
|
||||
)
|
||||
raise
|
||||
file.close()
|
||||
return output
|
||||
|
||||
def jsonSave(contents: Union[list, dict], filepath: str) -> None:
|
||||
|
||||
def jsonSave(contents: Union[list, dict], filepath: Union[str, Path]) -> None:
|
||||
"""Save contents into json file
|
||||
|
||||
### Args:
|
||||
* contents (`Union[list, dict]`): Some json serializable
|
||||
* filepath (`str`): Path to output file
|
||||
"""
|
||||
* filepath (`Union[str, Path]`): Path to output file
|
||||
"""
|
||||
try:
|
||||
with open(filepath, "w", encoding='utf8') as file:
|
||||
with open(filepath, "w", encoding="utf8") as file:
|
||||
file.write(dumps(contents, ensure_ascii=False, indent=4))
|
||||
file.close()
|
||||
except Exception as exp:
|
||||
logWrite(f"Could not save json file {filepath}: {exp}\n{print_exc()}")
|
||||
except Exception as exc:
|
||||
logger.error("Could not save json file %s: %s\n%s", filepath, exc, format_exc())
|
||||
return
|
||||
|
||||
|
||||
def configGet(key: str, *args: str) -> Any:
|
||||
"""Get value of the config key
|
||||
|
||||
@@ -53,23 +63,25 @@ def configGet(key: str, *args: str) -> Any:
|
||||
|
||||
### Returns:
|
||||
* `Any`: Value of provided key
|
||||
"""
|
||||
this_dict = jsonLoad("config.json")
|
||||
"""
|
||||
this_dict = jsonLoad(Path("config.json"))
|
||||
this_key = this_dict
|
||||
for dict_key in args:
|
||||
this_key = this_key[dict_key]
|
||||
return this_key[key]
|
||||
|
||||
|
||||
def apiKeyInvalid(obj):
|
||||
obj.send_response(401)
|
||||
obj.send_header('Content-type', 'application/json; charset=utf-8')
|
||||
obj.send_header("Content-type", "application/json; charset=utf-8")
|
||||
obj.end_headers()
|
||||
obj.wfile.write(b'{"code":401, "message": "Invalid API key"}')
|
||||
return
|
||||
|
||||
|
||||
def apiKeyExpired(obj):
|
||||
obj.send_response(403)
|
||||
obj.send_header('Content-type', 'application/json; charset=utf-8')
|
||||
obj.send_header("Content-type", "application/json; charset=utf-8")
|
||||
obj.end_headers()
|
||||
obj.wfile.write(b'{"code":403, "message": "API key expired"}')
|
||||
return
|
||||
return
|
||||
|
@@ -1,3 +1,5 @@
|
||||
@import url('https://fonts.cdnfonts.com/css/google-sans');
|
||||
|
||||
/* Material Customization */
|
||||
:root {
|
||||
--pure-material-primary-rgb: 255, 191, 0;
|
||||
@@ -15,15 +17,15 @@ body {
|
||||
.registration {
|
||||
position: relative;
|
||||
border-radius: 8px;
|
||||
padding: 16px 48px;
|
||||
padding: 16px 48px 30px 30px;
|
||||
box-shadow: 0 3px 1px -2px rgba(0, 0, 0, 0.2), 0 2px 2px 0 rgba(0, 0, 0, 0.14), 0 1px 5px 0 rgba(0, 0, 0, 0.12);
|
||||
overflow: hidden;
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
h1 {
|
||||
.registration h1 {
|
||||
margin: 32px 0;
|
||||
font-family: "Roboto", "Segoe UI", BlinkMacSystemFont, system-ui, -apple-system;
|
||||
font-family: 'Product Sans', sans-serif;
|
||||
font-weight: normal;
|
||||
text-align: center;
|
||||
}
|
||||
@@ -35,114 +37,69 @@ h1 {
|
||||
}
|
||||
|
||||
p {
|
||||
font-family: "Roboto", "Segoe UI", BlinkMacSystemFont, system-ui, -apple-system;
|
||||
font-family: 'Product Sans', sans-serif;
|
||||
font-weight: normal;
|
||||
text-align: center;
|
||||
color: #2b2b2b;
|
||||
}
|
||||
|
||||
a.matter-button-contained {
|
||||
font-family: 'Product Sans', sans-serif;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a {
|
||||
color: rgb(var(--pure-material-primary-rgb));
|
||||
font-family: 'Product Sans', sans-serif;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
button {
|
||||
display: block !important;
|
||||
margin: 32px auto;
|
||||
}
|
||||
|
||||
.done,
|
||||
.progress {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
background-color: white;
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
.done {
|
||||
transition: visibility 0s 1s;
|
||||
}
|
||||
|
||||
.signed > .done {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
.done > a {
|
||||
display: inline-block;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.progress {
|
||||
opacity: 0;
|
||||
.matter-button-contained {
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
.signed > .progress {
|
||||
animation: loading 4s;
|
||||
}
|
||||
@media only screen and (max-width: 980px) {
|
||||
|
||||
@keyframes loading {
|
||||
0% {
|
||||
visibility: visible;
|
||||
.registration {
|
||||
position: fixed;
|
||||
border-radius: none;
|
||||
padding: auto;
|
||||
box-shadow: none;
|
||||
overflow: visible;
|
||||
}
|
||||
12.5% {
|
||||
opacity: 0;
|
||||
|
||||
.registration {
|
||||
width: 95%;
|
||||
height: auto;
|
||||
}
|
||||
25% {
|
||||
opacity: 1;
|
||||
|
||||
.registration h1 {
|
||||
font-size: 64px;
|
||||
}
|
||||
87.5% {
|
||||
opacity: 1;
|
||||
|
||||
body {
|
||||
background: none;
|
||||
min-height: 85vh;
|
||||
}
|
||||
100% {
|
||||
opacity: 0;
|
||||
|
||||
p {
|
||||
font-size: 42px;
|
||||
}
|
||||
}
|
||||
|
||||
.left-footer,
|
||||
.right-footer {
|
||||
position: fixed;
|
||||
padding: 14px;
|
||||
bottom: 14px;
|
||||
color: #555;
|
||||
background-color: #eee;
|
||||
font-family: "Roboto", "Segoe UI", BlinkMacSystemFont, system-ui, -apple-system;
|
||||
font-size: 14px;
|
||||
line-height: 1.5;
|
||||
box-shadow: 0 3px 1px -2px rgba(0, 0, 0, 0.2), 0 2px 2px 0 rgba(0, 0, 0, 0.14), 0 1px 5px 0 rgba(0, 0, 0, 0.12);
|
||||
}
|
||||
.matter-button-contained {
|
||||
padding-top: 0.5em;
|
||||
padding-bottom: 1.3em;
|
||||
padding-left: 0.65em;
|
||||
padding-right: 0.65em;
|
||||
border-radius: 7px;
|
||||
}
|
||||
|
||||
.left-footer {
|
||||
left: 0;
|
||||
border-radius: 0 4px 4px 0;
|
||||
text-align: left;
|
||||
}
|
||||
a.matter-button-contained {
|
||||
font-size: 3.2em;
|
||||
}
|
||||
|
||||
.right-footer {
|
||||
right: 0;
|
||||
border-radius: 4px 0 0 4px;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.left-footer > a,
|
||||
.right-footer > a {
|
||||
color: black;
|
||||
}
|
||||
|
||||
.left-footer > a:hover,
|
||||
.right-footer > a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
@@ -4,26 +4,28 @@
|
||||
<meta charset="UTF-8">
|
||||
<title>END PLAY Photos API • Sign Up</title>
|
||||
<link href="/pages/matter.css" rel="stylesheet">
|
||||
<link rel="stylesheet" href="/pages/home/style.css">
|
||||
<link rel="stylesheet" href="/pages/register/style.css">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<!-- partial:index.partial.html -->
|
||||
<form class="registration" method="post">
|
||||
<iframe name="hiddenFrame" width="0" height="0" border="0" style="display: none;"></iframe>
|
||||
|
||||
<form class="registration" method="post" action="/users" enctype="application/x-www-form-urlencoded" target="hiddenFrame">
|
||||
<h1>👋 Welcome!</h1>
|
||||
|
||||
<label class="matter-textfield-outlined">
|
||||
<input placeholder=" " type="text" alt="You won't be able to change it later!" required>
|
||||
<input placeholder=" " type="text" alt="You won't be able to change it later!" id="user" name="user" required>
|
||||
<span>Username</span>
|
||||
</label>
|
||||
|
||||
<label class="matter-textfield-outlined">
|
||||
<input placeholder=" " type="email" required>
|
||||
<input placeholder=" " type="email" id="email" name="email" required>
|
||||
<span>Email</span>
|
||||
</label>
|
||||
|
||||
<label class="matter-textfield-outlined">
|
||||
<input placeholder=" " type="password" required>
|
||||
<input placeholder=" " type="password" id="password" name="password" required>
|
||||
<span>Password</span>
|
||||
</label>
|
||||
|
||||
@@ -32,11 +34,14 @@
|
||||
<span>I agree to the <a href="https://codepen.io/collection/nZKBZe/" target="_blank" title="Actually not a Terms of Service">Terms of Service</a></span>
|
||||
</label>
|
||||
|
||||
<button class="matter-button-contained" type="submit">Sign Up</button>
|
||||
<center><a class="matter-button-contained" href="https://photos.end-play.xyz/register" type="submit" value="Submit">Sign Up</a></center>
|
||||
<!-- <button class="matter-button-contained" type="submit" value="Submit">Sign Up</button> -->
|
||||
|
||||
<div class="done">
|
||||
<h1>👌 You're all set!</h1>
|
||||
<a class="matter-button-text" href="javascript:window.location.reload(true)">Again</a>
|
||||
<p>You should now receive an email with activation link.</p>
|
||||
<p>Activate your account and proceed to docs if you'd like to learn how to use API now.</p>
|
||||
<a class="matter-button-text" href="/docs">Docs</a>
|
||||
</div>
|
||||
<div class="progress">
|
||||
<progress class="matter-progress-circular" />
|
||||
@@ -44,7 +49,7 @@
|
||||
</form>
|
||||
|
||||
<!-- partial -->
|
||||
<script src="./script.js"></script>
|
||||
<script src="/pages/register/script.js"></script>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
@@ -3,4 +3,5 @@ var form = document.querySelector('form');
|
||||
form.onsubmit = function (event) {
|
||||
event.preventDefault();
|
||||
form.classList.add('signed');
|
||||
form.submit();
|
||||
};
|
@@ -61,6 +61,9 @@ button {
|
||||
justify-content: center;
|
||||
background-color: white;
|
||||
visibility: hidden;
|
||||
padding-left: 40px;
|
||||
padding-right: 40px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.done {
|
||||
@@ -81,7 +84,7 @@ button {
|
||||
}
|
||||
|
||||
.signed > .progress {
|
||||
animation: loading 4s;
|
||||
animation: loading 3s;
|
||||
}
|
||||
|
||||
@keyframes loading {
|
||||
@@ -135,4 +138,55 @@ button {
|
||||
.left-footer > a:hover,
|
||||
.right-footer > a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 980px) {
|
||||
|
||||
.registration {
|
||||
width: 90%;
|
||||
height: auto;
|
||||
position: fixed;
|
||||
border-radius: none;
|
||||
padding: auto;
|
||||
box-shadow: none;
|
||||
overflow: visible;
|
||||
}
|
||||
|
||||
body {
|
||||
background: none;
|
||||
min-height: 85vh;
|
||||
}
|
||||
|
||||
p {
|
||||
font-size: 36px;
|
||||
}
|
||||
|
||||
.registration h1 {
|
||||
font-size: 64px;
|
||||
}
|
||||
|
||||
.matter-button-contained {
|
||||
padding-top: 0.5em;
|
||||
padding-bottom: 1.3em;
|
||||
padding-left: 0.65em;
|
||||
padding-right: 0.65em;
|
||||
border-radius: 7px;
|
||||
}
|
||||
|
||||
.matter-button-text {
|
||||
padding-top: 0.4em;
|
||||
padding-bottom: 1.3em;
|
||||
padding-left: 0.65em;
|
||||
padding-right: 0.65em;
|
||||
border-radius: 7px;
|
||||
}
|
||||
|
||||
a.matter-button-contained {
|
||||
font-size: 3.2em;
|
||||
}
|
||||
|
||||
a.matter-button-text {
|
||||
font-size: 2.6em;
|
||||
}
|
||||
|
||||
}
|
@@ -1,11 +1,22 @@
|
||||
from os import makedirs, sep
|
||||
from modules.app import app
|
||||
from modules.utils import *
|
||||
from modules.scheduler import scheduler
|
||||
from modules.extensions_loader import dynamic_import_from_src
|
||||
import logging
|
||||
from argparse import ArgumentParser
|
||||
from os import makedirs
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi.responses import FileResponse
|
||||
|
||||
makedirs(f"data{sep}users", exist_ok=True)
|
||||
from modules.app import app
|
||||
from modules.extensions_loader import dynamic_import_from_src
|
||||
from modules.migrator import migrate_database
|
||||
from modules.scheduler import scheduler
|
||||
|
||||
makedirs(Path("data/users"), exist_ok=True)
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(name)s.%(funcName)s | %(levelname)s | %(message)s",
|
||||
datefmt="[%X]",
|
||||
)
|
||||
|
||||
|
||||
@app.get("/favicon.ico", response_class=FileResponse, include_in_schema=False)
|
||||
@@ -13,8 +24,20 @@ async def favicon():
|
||||
return FileResponse("favicon.ico")
|
||||
|
||||
|
||||
#=================================================================================
|
||||
dynamic_import_from_src("extensions", star_import = True)
|
||||
#=================================================================================
|
||||
# =================================================================================
|
||||
dynamic_import_from_src("extensions", star_import=True)
|
||||
# =================================================================================
|
||||
|
||||
scheduler.start()
|
||||
scheduler.start()
|
||||
|
||||
parser = ArgumentParser(
|
||||
prog="PhotosAPI",
|
||||
description="Small and simple API server for saving photos and videos.",
|
||||
)
|
||||
|
||||
parser.add_argument("--migrate", action="store_true")
|
||||
|
||||
args, unknown = parser.parse_known_args()
|
||||
|
||||
if args.migrate:
|
||||
migrate_database()
|
||||
|
@@ -1,9 +1,13 @@
|
||||
fastapi[all]
|
||||
pymongo==4.3.3
|
||||
ujson~=5.6.0
|
||||
scipy~=1.9.3
|
||||
python-magic~=0.4.27
|
||||
opencv-python~=4.6.0.66
|
||||
python-jose[cryptography]~=3.3.0
|
||||
aiofiles==24.1.0
|
||||
apscheduler~=3.11.0
|
||||
async_pymongo==0.1.11
|
||||
exif==1.6.1
|
||||
fastapi[all]==0.115.6
|
||||
mongodb-migrations==1.3.1
|
||||
opencv-python~=4.10.0.82
|
||||
passlib~=1.7.4
|
||||
apscheduler~=3.9.1.post1
|
||||
pymongo>=4.3.3
|
||||
python-jose[cryptography]~=3.3.0
|
||||
python-magic~=0.4.27
|
||||
scipy~=1.13.0,<1.14.0
|
||||
ujson~=5.10.0
|
Reference in New Issue
Block a user