Compare commits
No commits in common. "dev" and "v0.2.0-rc.1" have entirely different histories.
dev
...
v0.2.0-rc.
3
classes/cache/__init__.py
vendored
Normal file
3
classes/cache/__init__.py
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from .holo_cache import HoloCache
|
||||||
|
from .holo_cache_memcached import HoloCacheMemcached
|
||||||
|
from .holo_cache_redis import HoloCacheRedis
|
44
classes/cache/holo_cache.py
vendored
Normal file
44
classes/cache/holo_cache.py
vendored
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
import pymemcache
|
||||||
|
import redis
|
||||||
|
|
||||||
|
|
||||||
|
class HoloCache(ABC):
|
||||||
|
client: pymemcache.Client | redis.Redis
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def from_config(cls, engine_config: Dict[str, Any]) -> Any:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_json(self, key: str) -> Any | None:
|
||||||
|
# TODO This method must also carry out ObjectId conversion!
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_string(self, key: str) -> str | None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_object(self, key: str) -> Any | None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def set_json(self, key: str, value: Any) -> None:
|
||||||
|
# TODO This method must also carry out ObjectId conversion!
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def set_string(self, key: str, value: str) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def set_object(self, key: str, value: Any) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def delete(self, key: str) -> None:
|
||||||
|
pass
|
89
classes/cache/holo_cache_memcached.py
vendored
Normal file
89
classes/cache/holo_cache_memcached.py
vendored
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
import logging
|
||||||
|
from logging import Logger
|
||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
from pymemcache import Client
|
||||||
|
|
||||||
|
from modules.cache_utils import string_to_json, json_to_string
|
||||||
|
from . import HoloCache
|
||||||
|
|
||||||
|
logger: Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class HoloCacheMemcached(HoloCache):
|
||||||
|
client: Client
|
||||||
|
|
||||||
|
def __init__(self, client: Client):
|
||||||
|
self.client = client
|
||||||
|
|
||||||
|
logger.info("Initialized Memcached for caching")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_config(cls, engine_config: Dict[str, Any]) -> "HoloCacheMemcached":
|
||||||
|
if "uri" not in engine_config:
|
||||||
|
raise KeyError(
|
||||||
|
"Cache configuration is invalid. Please check if all keys are set (engine: memcached)"
|
||||||
|
)
|
||||||
|
|
||||||
|
return cls(Client(engine_config["uri"], default_noreply=True))
|
||||||
|
|
||||||
|
def get_json(self, key: str) -> Any | None:
|
||||||
|
try:
|
||||||
|
result: Any | None = self.client.get(key, None)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Got json cache key '%s'%s",
|
||||||
|
key,
|
||||||
|
"" if result is not None else " (not found)",
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Could not get json cache key '%s' due to: %s", key, exc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
return None if result is None else string_to_json(result)
|
||||||
|
|
||||||
|
def get_string(self, key: str) -> str | None:
|
||||||
|
try:
|
||||||
|
result: str | None = self.client.get(key, None)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Got string cache key '%s'%s",
|
||||||
|
key,
|
||||||
|
"" if result is not None else " (not found)",
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Could not get string cache key '%s' due to: %s", key, exc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# TODO Implement binary deserialization
|
||||||
|
def get_object(self, key: str) -> Any | None:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def set_json(self, key: str, value: Any) -> None:
|
||||||
|
try:
|
||||||
|
self.client.set(key, json_to_string(value))
|
||||||
|
logger.debug("Set json cache key '%s'", key)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Could not set json cache key '%s' due to: %s", key, exc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set_string(self, key: str, value: str) -> None:
|
||||||
|
try:
|
||||||
|
self.client.set(key, value)
|
||||||
|
logger.debug("Set string cache key '%s'", key)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Could not set string cache key '%s' due to: %s", key, exc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# TODO Implement binary serialization
|
||||||
|
def set_object(self, key: str, value: Any) -> None:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def delete(self, key: str) -> None:
|
||||||
|
try:
|
||||||
|
self.client.delete(key)
|
||||||
|
logger.debug("Deleted cache key '%s'", key)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Could not delete cache key '%s' due to: %s", key, exc)
|
89
classes/cache/holo_cache_redis.py
vendored
Normal file
89
classes/cache/holo_cache_redis.py
vendored
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
import logging
|
||||||
|
from logging import Logger
|
||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
from redis import Redis
|
||||||
|
|
||||||
|
from classes.cache import HoloCache
|
||||||
|
from modules.cache_utils import string_to_json, json_to_string
|
||||||
|
|
||||||
|
logger: Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class HoloCacheRedis(HoloCache):
|
||||||
|
client: Redis
|
||||||
|
|
||||||
|
def __init__(self, client: Redis):
|
||||||
|
self.client = client
|
||||||
|
|
||||||
|
logger.info("Initialized Redis for caching")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_config(cls, engine_config: Dict[str, Any]) -> Any:
|
||||||
|
if "uri" not in engine_config:
|
||||||
|
raise KeyError(
|
||||||
|
"Cache configuration is invalid. Please check if all keys are set (engine: memcached)"
|
||||||
|
)
|
||||||
|
|
||||||
|
return cls(Redis.from_url(engine_config["uri"]))
|
||||||
|
|
||||||
|
def get_json(self, key: str) -> Any | None:
|
||||||
|
try:
|
||||||
|
result: Any | None = self.client.get(key)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Got json cache key '%s'%s",
|
||||||
|
key,
|
||||||
|
"" if result is not None else " (not found)",
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Could not get json cache key '%s' due to: %s", key, exc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
return None if result is None else string_to_json(result)
|
||||||
|
|
||||||
|
def get_string(self, key: str) -> str | None:
|
||||||
|
try:
|
||||||
|
result: str | None = self.client.get(key)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Got string cache key '%s'%s",
|
||||||
|
key,
|
||||||
|
"" if result is not None else " (not found)",
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Could not get string cache key '%s' due to: %s", key, exc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# TODO Implement binary deserialization
|
||||||
|
def get_object(self, key: str) -> Any | None:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def set_json(self, key: str, value: Any) -> None:
|
||||||
|
try:
|
||||||
|
self.client.set(key, json_to_string(value))
|
||||||
|
logger.debug("Set json cache key '%s'", key)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Could not set json cache key '%s' due to: %s", key, exc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set_string(self, key: str, value: str) -> None:
|
||||||
|
try:
|
||||||
|
self.client.set(key, value)
|
||||||
|
logger.debug("Set string cache key '%s'", key)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Could not set string cache key '%s' due to: %s", key, exc)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# TODO Implement binary serialization
|
||||||
|
def set_object(self, key: str, value: Any) -> None:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def delete(self, key: str) -> None:
|
||||||
|
try:
|
||||||
|
self.client.delete(key)
|
||||||
|
logger.debug("Deleted cache key '%s'", key)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.error("Could not delete cache key '%s' due to: %s", key, exc)
|
@ -1,15 +1,17 @@
|
|||||||
import logging
|
import logging
|
||||||
from logging import Logger
|
from logging import Logger
|
||||||
|
|
||||||
from libbot.cache.classes import CacheMemcached, CacheRedis
|
|
||||||
from libbot.cache.manager import create_cache_client
|
|
||||||
from libbot.pycord.classes import PycordBot
|
from libbot.pycord.classes import PycordBot
|
||||||
|
|
||||||
|
from classes.cache.holo_cache_memcached import HoloCacheMemcached
|
||||||
|
from classes.cache.holo_cache_redis import HoloCacheRedis
|
||||||
|
from modules.cache_manager import create_cache_client
|
||||||
|
|
||||||
logger: Logger = logging.getLogger(__name__)
|
logger: Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class HoloBot(PycordBot):
|
class HoloBot(PycordBot):
|
||||||
cache: CacheMemcached | CacheRedis | None = None
|
cache: HoloCacheMemcached | HoloCacheRedis | None = None
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
@ -1,15 +1,16 @@
|
|||||||
import logging
|
import logging
|
||||||
from logging import Logger
|
from logging import Logger
|
||||||
from typing import Any, Dict, Optional
|
from typing import Any, Dict
|
||||||
|
|
||||||
from bson import ObjectId
|
from bson import ObjectId
|
||||||
from discord import User, Member
|
from discord import User, Member
|
||||||
from libbot.cache.classes import Cache
|
|
||||||
from libbot.utils import config_get
|
from libbot.utils import config_get
|
||||||
from pymongo.results import InsertOneResult
|
from pymongo.results import InsertOneResult
|
||||||
|
from typing_extensions import deprecated
|
||||||
|
|
||||||
|
from classes.cache import HoloCache
|
||||||
from errors import UserNotFoundError
|
from errors import UserNotFoundError
|
||||||
from modules.database import col_users
|
from modules.database import col_warnings, col_users
|
||||||
|
|
||||||
logger: Logger = logging.getLogger(__name__)
|
logger: Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -33,14 +34,14 @@ class HoloUser:
|
|||||||
cls,
|
cls,
|
||||||
user: User | Member,
|
user: User | Member,
|
||||||
allow_creation: bool = True,
|
allow_creation: bool = True,
|
||||||
cache: Optional[Cache] = None,
|
cache: HoloCache | None = None,
|
||||||
) -> "HoloUser":
|
) -> "HoloUser":
|
||||||
"""Get an object that has a proper binding between Discord ID and database
|
"""Get an object that has a proper binding between Discord ID and database
|
||||||
|
|
||||||
### Args:
|
### Args:
|
||||||
* `user` (User | Member): Object from which an ID can be extracted
|
* `user` (User | Member): Object from which an ID can be extracted
|
||||||
* `allow_creation` (bool, optional): Whether to allow creation of a new user record if none found. Defaults to True.
|
* `allow_creation` (bool, optional): Whether to allow creation of a new user record if none found. Defaults to True.
|
||||||
* `cache` (Cache, optional): Cache engine to get the cache from
|
* `cache` (HoloCache | None, optional): Cache engine to get the cache from
|
||||||
|
|
||||||
### Raises:
|
### Raises:
|
||||||
* `UserNotFoundError`: User with such ID does not seem to exist in database
|
* `UserNotFoundError`: User with such ID does not seem to exist in database
|
||||||
@ -70,15 +71,48 @@ class HoloUser:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def from_id(cls, user_id: int) -> "HoloUser":
|
async def from_id(cls, user_id: int) -> "HoloUser":
|
||||||
raise NotImplementedError()
|
return NotImplemented
|
||||||
|
|
||||||
async def _set(self, key: str, value: Any, cache: Optional[Cache] = None) -> None:
|
# TODO Deprecate and remove warnings
|
||||||
|
@deprecated("Warnings are deprecated")
|
||||||
|
async def get_warnings(self) -> int:
|
||||||
|
"""Get number of warnings user has
|
||||||
|
|
||||||
|
### Returns:
|
||||||
|
* `int`: Number of warnings
|
||||||
|
"""
|
||||||
|
warns: Dict[str, Any] | None = await col_warnings.find_one({"id": self.id})
|
||||||
|
|
||||||
|
return 0 if warns is None else warns["warns"]
|
||||||
|
|
||||||
|
# TODO Deprecate and remove warnings
|
||||||
|
@deprecated("Warnings are deprecated")
|
||||||
|
async def warn(self, count: int = 1, reason: str = "Reason not provided") -> None:
|
||||||
|
"""Warn and add count to warns number
|
||||||
|
|
||||||
|
### Args:
|
||||||
|
* `count` (int, optional): Count of warnings to be added. Defaults to 1.
|
||||||
|
* `reason` (int, optional): Count of warnings to be added. Defaults to 1.
|
||||||
|
"""
|
||||||
|
warns: Dict[str, Any] | None = await col_warnings.find_one({"id": self.id})
|
||||||
|
|
||||||
|
if warns is not None:
|
||||||
|
await col_warnings.update_one(
|
||||||
|
{"_id": self._id},
|
||||||
|
{"$set": {"warns": warns["warns"] + count}},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
await col_warnings.insert_one(document={"id": self.id, "warns": count})
|
||||||
|
|
||||||
|
logger.info("User %s was warned %s times due to: %s", self.id, count, reason)
|
||||||
|
|
||||||
|
async def _set(self, key: str, value: Any, cache: HoloCache | None = None) -> None:
|
||||||
"""Set attribute data and save it into the database
|
"""Set attribute data and save it into the database
|
||||||
|
|
||||||
### Args:
|
### Args:
|
||||||
* `key` (str): Attribute to be changed
|
* `key` (str): Attribute to be changed
|
||||||
* `value` (Any): Value to set
|
* `value` (Any): Value to set
|
||||||
* `cache` (Cache, optional): Cache engine to write the update into
|
* `cache` (HoloCache | None, optional): Cache engine to write the update into
|
||||||
"""
|
"""
|
||||||
if not hasattr(self, key):
|
if not hasattr(self, key):
|
||||||
raise AttributeError()
|
raise AttributeError()
|
||||||
@ -93,12 +127,12 @@ class HoloUser:
|
|||||||
|
|
||||||
logger.info("Set attribute '%s' of user %s to '%s'", key, self.id, value)
|
logger.info("Set attribute '%s' of user %s to '%s'", key, self.id, value)
|
||||||
|
|
||||||
async def _remove(self, key: str, cache: Optional[Cache] = None) -> None:
|
async def _remove(self, key: str, cache: HoloCache | None = None) -> None:
|
||||||
"""Remove attribute data and save it into the database
|
"""Remove attribute data and save it into the database
|
||||||
|
|
||||||
### Args:
|
### Args:
|
||||||
* `key` (str): Attribute to be removed
|
* `key` (str): Attribute to be removed
|
||||||
* `cache` (Cache, optional): Cache engine to write the update into
|
* `cache` (HoloCache | None, optional): Cache engine to write the update into
|
||||||
"""
|
"""
|
||||||
if not hasattr(self, key):
|
if not hasattr(self, key):
|
||||||
raise AttributeError()
|
raise AttributeError()
|
||||||
@ -118,7 +152,7 @@ class HoloUser:
|
|||||||
def _get_cache_key(self) -> str:
|
def _get_cache_key(self) -> str:
|
||||||
return f"user_{self.id}"
|
return f"user_{self.id}"
|
||||||
|
|
||||||
def _update_cache(self, cache: Optional[Cache] = None) -> None:
|
def _update_cache(self, cache: HoloCache | None = None) -> None:
|
||||||
if cache is None:
|
if cache is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -129,7 +163,7 @@ class HoloUser:
|
|||||||
else:
|
else:
|
||||||
self._delete_cache(cache)
|
self._delete_cache(cache)
|
||||||
|
|
||||||
def _delete_cache(self, cache: Optional[Cache] = None) -> None:
|
def _delete_cache(self, cache: HoloCache | None = None) -> None:
|
||||||
if cache is None:
|
if cache is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -159,26 +193,26 @@ class HoloUser:
|
|||||||
}
|
}
|
||||||
|
|
||||||
async def set_custom_channel(
|
async def set_custom_channel(
|
||||||
self, channel_id: int, cache: Optional[Cache] = None
|
self, channel_id: int, cache: HoloCache | None = None
|
||||||
) -> None:
|
) -> None:
|
||||||
await self._set("custom_channel", channel_id, cache=cache)
|
await self._set("custom_channel", channel_id, cache=cache)
|
||||||
|
|
||||||
async def set_custom_role(
|
async def set_custom_role(
|
||||||
self, role_id: int, cache: Optional[Cache] = None
|
self, role_id: int, cache: HoloCache | None = None
|
||||||
) -> None:
|
) -> None:
|
||||||
await self._set("custom_role", role_id, cache=cache)
|
await self._set("custom_role", role_id, cache=cache)
|
||||||
|
|
||||||
async def remove_custom_channel(self, cache: Optional[Cache] = None) -> None:
|
async def remove_custom_channel(self, cache: HoloCache | None = None) -> None:
|
||||||
await self._remove("custom_channel", cache=cache)
|
await self._remove("custom_channel", cache=cache)
|
||||||
|
|
||||||
async def remove_custom_role(self, cache: Optional[Cache] = None) -> None:
|
async def remove_custom_role(self, cache: HoloCache | None = None) -> None:
|
||||||
await self._remove("custom_role", cache=cache)
|
await self._remove("custom_role", cache=cache)
|
||||||
|
|
||||||
async def purge(self, cache: Optional[Cache] = None) -> None:
|
async def purge(self, cache: HoloCache | None = None) -> None:
|
||||||
"""Completely remove user data from database. Only removes the user record from users collection.
|
"""Completely remove user data from database. Will not remove transactions logs and warnings.
|
||||||
|
|
||||||
### Args:
|
### Args:
|
||||||
* `cache` (Cache, optional): Cache engine to write the update into
|
* `cache` (HoloCache | None, optional): Cache engine to write the update into
|
||||||
"""
|
"""
|
||||||
await col_users.delete_one({"_id": self._id})
|
await col_users.delete_one({"_id": self._id})
|
||||||
self._delete_cache(cache)
|
self._delete_cache(cache)
|
||||||
|
@ -96,7 +96,6 @@ class Data(commands.Cog):
|
|||||||
|
|
||||||
# Temporary file must be written synchronously,
|
# Temporary file must be written synchronously,
|
||||||
# otherwise it will not be there when ctx.respond() is be called
|
# otherwise it will not be there when ctx.respond() is be called
|
||||||
# TODO Find a way to give this file to Pycord without FS operations
|
|
||||||
json_write(users, Path(f"tmp/{uuid}"))
|
json_write(users, Path(f"tmp/{uuid}"))
|
||||||
|
|
||||||
await ctx.respond(file=File(Path(f"tmp/{uuid}"), filename="users.json"))
|
await ctx.respond(file=File(Path(f"tmp/{uuid}"), filename="users.json"))
|
||||||
|
29
modules/cache_manager.py
Normal file
29
modules/cache_manager.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
from typing import Dict, Any, Literal
|
||||||
|
|
||||||
|
from classes.cache.holo_cache_memcached import HoloCacheMemcached
|
||||||
|
from classes.cache.holo_cache_redis import HoloCacheRedis
|
||||||
|
|
||||||
|
|
||||||
|
def create_cache_client(
|
||||||
|
config: Dict[str, Any],
|
||||||
|
engine: Literal["memcached", "redis"] | None = None,
|
||||||
|
) -> HoloCacheMemcached | HoloCacheRedis:
|
||||||
|
if engine not in ["memcached", "redis"] or engine is None:
|
||||||
|
raise KeyError(
|
||||||
|
f"Incorrect cache engine provided. Expected 'memcached' or 'redis', got '{engine}'"
|
||||||
|
)
|
||||||
|
|
||||||
|
if "cache" not in config or engine not in config["cache"]:
|
||||||
|
raise KeyError(
|
||||||
|
f"Cache configuration is invalid. Please check if all keys are set (engine: '{engine}')"
|
||||||
|
)
|
||||||
|
|
||||||
|
match engine:
|
||||||
|
case "memcached":
|
||||||
|
return HoloCacheMemcached.from_config(config["cache"][engine])
|
||||||
|
case "redis":
|
||||||
|
return HoloCacheRedis.from_config(config["cache"][engine])
|
||||||
|
case _:
|
||||||
|
raise KeyError(
|
||||||
|
f"Cache implementation for the engine '{engine}' is not present."
|
||||||
|
)
|
25
modules/cache_utils.py
Normal file
25
modules/cache_utils.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
from copy import deepcopy
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from bson import ObjectId
|
||||||
|
from ujson import dumps, loads
|
||||||
|
|
||||||
|
|
||||||
|
def json_to_string(json_object: Any) -> str:
|
||||||
|
json_object_copy: Any = deepcopy(json_object)
|
||||||
|
|
||||||
|
if isinstance(json_object_copy, dict) and "_id" in json_object_copy:
|
||||||
|
json_object_copy["_id"] = str(json_object_copy["_id"])
|
||||||
|
|
||||||
|
return dumps(
|
||||||
|
json_object_copy, ensure_ascii=False, indent=0, escape_forward_slashes=False
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def string_to_json(json_string: str) -> Any:
|
||||||
|
json_object: Any = loads(json_string)
|
||||||
|
|
||||||
|
if "_id" in json_object:
|
||||||
|
json_object["_id"] = ObjectId(json_object["_id"])
|
||||||
|
|
||||||
|
return json_object
|
@ -2,6 +2,9 @@ from typing import Dict, Any
|
|||||||
|
|
||||||
from async_pymongo import AsyncClient, AsyncCollection, AsyncDatabase
|
from async_pymongo import AsyncClient, AsyncCollection, AsyncDatabase
|
||||||
from libbot.utils import config_get
|
from libbot.utils import config_get
|
||||||
|
from pymongo import MongoClient
|
||||||
|
from pymongo.synchronous.collection import Collection
|
||||||
|
from pymongo.synchronous.database import Database
|
||||||
|
|
||||||
db_config: Dict[str, Any] = config_get("database")
|
db_config: Dict[str, Any] = config_get("database")
|
||||||
|
|
||||||
@ -20,12 +23,21 @@ con_string: str = (
|
|||||||
)
|
)
|
||||||
|
|
||||||
db_client: AsyncClient = AsyncClient(con_string)
|
db_client: AsyncClient = AsyncClient(con_string)
|
||||||
|
db_client_sync: MongoClient = MongoClient(con_string)
|
||||||
|
|
||||||
# Async declarations per default
|
# Async declarations per default
|
||||||
db: AsyncDatabase = db_client.get_database(name=db_config["name"])
|
db: AsyncDatabase = db_client.get_database(name=db_config["name"])
|
||||||
|
|
||||||
col_users: AsyncCollection = db.get_collection("users")
|
col_users: AsyncCollection = db.get_collection("users")
|
||||||
|
col_warnings: AsyncCollection = db.get_collection("warnings")
|
||||||
col_analytics: AsyncCollection = db.get_collection("analytics")
|
col_analytics: AsyncCollection = db.get_collection("analytics")
|
||||||
|
|
||||||
|
# Sync declarations as a fallback
|
||||||
|
sync_db: Database = db_client_sync.get_database(name=db_config["name"])
|
||||||
|
|
||||||
|
sync_col_users: Collection = sync_db.get_collection("users")
|
||||||
|
sync_col_warnings: Collection = sync_db.get_collection("warnings")
|
||||||
|
sync_col_analytics: Collection = sync_db.get_collection("analytics")
|
||||||
|
|
||||||
# Update indexes
|
# Update indexes
|
||||||
db.dispatch.get_collection("users").create_index("id", unique=True)
|
sync_col_users.create_index(["id"], unique=True)
|
||||||
|
@ -5,7 +5,7 @@ requests>=2.32.2
|
|||||||
aiofiles~=24.1.0
|
aiofiles~=24.1.0
|
||||||
apscheduler>=3.10.0
|
apscheduler>=3.10.0
|
||||||
async_pymongo==0.1.11
|
async_pymongo==0.1.11
|
||||||
libbot[speed,pycord,cache]==4.1.0
|
libbot[speed,pycord]==4.0.2
|
||||||
mongodb-migrations==1.3.1
|
mongodb-migrations==1.3.1
|
||||||
pymemcache~=4.0.0
|
pymemcache~=4.0.0
|
||||||
redis~=5.2.1
|
redis~=5.2.1
|
||||||
|
18
validation/warnings.json
Normal file
18
validation/warnings.json
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"$jsonSchema": {
|
||||||
|
"required": [
|
||||||
|
"user_id",
|
||||||
|
"warnings"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"user_id": {
|
||||||
|
"bsonType": "long",
|
||||||
|
"description": "Discord ID of user"
|
||||||
|
},
|
||||||
|
"warnings": {
|
||||||
|
"bsonType": "int",
|
||||||
|
"description": "Number of warnings on count"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user