Merge pull request 'v4.1.0' (#189) from dev into main
Some checks failed
Analysis / SonarCloud (push) Successful in 46s
Tests / Build and Test (3.11) (push) Successful in 1m9s
Tests / Build and Test (3.12) (push) Successful in 1m7s
Tests / Build and Test (3.13) (push) Successful in 1m4s
Upload Python Package / release-build (release) Successful in 18s
Upload Python Package / gitea-publish (release) Failing after 20s
Upload Python Package / pypi-publish (release) Failing after 11s

Reviewed-on: #189
This commit is contained in:
Profitroll 2025-02-16 18:37:38 +02:00
commit fdb8db4782
21 changed files with 381 additions and 68 deletions

View File

@ -6,19 +6,18 @@ on:
- main
- dev
pull_request:
types: [opened, synchronize, reopened]
types: [ opened, synchronize, reopened ]
jobs:
sonarcloud:
name: SonarCloud
runs-on: ubuntu-latest
container: catthehacker/ubuntu:act-latest
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: SonarCloud Scan
uses: SonarSource/sonarcloud-github-action@master
- name: SonarQube Scan
uses: SonarSource/sonarqube-scan-action@v4.2.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}

View File

@ -9,81 +9,58 @@ permissions:
jobs:
release-build:
runs-on: ubuntu-latest
container: catthehacker/ubuntu:act-latest
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- name: Build release distributions
run: |
python -m pip install build
python -m build
- name: Upload distributions
uses: christopherhx/gitea-upload-artifact@v4
with:
name: release-dists
path: dist/
gitea-publish:
runs-on: ubuntu-latest
container: catthehacker/ubuntu:act-latest
needs:
- release-build
runs-on: ubuntu-24.04
needs: release-build
permissions:
id-token: write
environment:
name: gitea
url: https://git.end-play.xyz/profitroll/-/packages/pypi/libbot
env:
GITHUB_WORKFLOW_REF: ${{ gitea.workflow_ref }}
INPUT_REPOSITORY_URL: https://git.end-play.xyz/api/packages/profitroll/pypi
steps:
- name: Retrieve release distributions
uses: christopherhx/gitea-download-artifact@v4
with:
name: release-dists
path: dist/
- name: Publish package distributions to TestPyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.PYPI_GITEA_API_TOKEN }}
repository-url: https://git.end-play.xyz/api/packages/profitroll/pypi
pypi-publish:
runs-on: ubuntu-latest
container: catthehacker/ubuntu:act-latest
needs:
- release-build
runs-on: ubuntu-24.04
needs: release-build
permissions:
id-token: write
environment:
name: pypi
env:
GITHUB_WORKFLOW_REF: ${{ gitea.workflow_ref }}
steps:
- name: Retrieve release distributions
uses: christopherhx/gitea-download-artifact@v4
with:
name: release-dists
path: dist/
- name: Publish package distributions to TestPyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:

View File

@ -11,18 +11,18 @@ on:
jobs:
test:
name: Build and Test
runs-on: ubuntu-latest
container: catthehacker/ubuntu:act-latest
runs-on: ubuntu-24.04
strategy:
matrix:
python-version: [ "3.11", "3.12", "3.13" ]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
cache-dependency-path: './requirements/*'
env:
AGENT_TOOLSDIRECTORY: /opt/hostedtoolcache
- name: Install dependencies

View File

@ -31,6 +31,7 @@ dev = { file = "requirements/dev.txt" }
pycord = { file = "requirements/pycord.txt" }
pyrogram = { file = "requirements/pyrogram.txt" }
speed = { file = "requirements/speed.txt" }
cache = { file = "requirements/cache.txt" }
[project.urls]
Source = "https://git.end-play.xyz/profitroll/LibBotUniversal"

2
requirements/cache.txt Normal file
View File

@ -0,0 +1,2 @@
pymemcache~=4.0.0
redis~=5.2.1

View File

@ -1,12 +1,12 @@
black==24.10.0
black==25.1.0
build==1.2.2.post1
isort==5.13.2
mypy==1.14.1
pylint==3.3.3
pytest-asyncio==0.25.1
mypy==1.15.0
pylint==3.3.4
pytest-asyncio==0.25.3
pytest-cov==6.0.0
pytest==8.3.4
tox==4.23.2
twine==6.0.1
tox==4.24.0
twine==6.1.0
types-aiofiles==24.1.0.20241221
types-ujson==5.10.0.20240515

View File

@ -1,4 +1,4 @@
__version__ = "4.0.2"
__version__ = "4.1.0"
__license__ = "GPL3"
__author__ = "Profitroll"

2
src/libbot/cache/__init__.py vendored Normal file
View File

@ -0,0 +1,2 @@
# This file is left empty on purpose
# Adding imports here will cause import errors when libbot[pycord] is not installed

3
src/libbot/cache/classes/__init__.py vendored Normal file
View File

@ -0,0 +1,3 @@
from .cache import Cache
from .cache_memcached import CacheMemcached
from .cache_redis import CacheRedis

44
src/libbot/cache/classes/cache.py vendored Normal file
View File

@ -0,0 +1,44 @@
from abc import ABC, abstractmethod
from typing import Any, Dict
import pymemcache
import redis
class Cache(ABC):
client: pymemcache.Client | redis.Redis
@classmethod
@abstractmethod
def from_config(cls, engine_config: Dict[str, Any]) -> Any:
pass
@abstractmethod
def get_json(self, key: str) -> Any | None:
# TODO This method must also carry out ObjectId conversion!
pass
@abstractmethod
def get_string(self, key: str) -> str | None:
pass
@abstractmethod
def get_object(self, key: str) -> Any | None:
pass
@abstractmethod
def set_json(self, key: str, value: Any) -> None:
# TODO This method must also carry out ObjectId conversion!
pass
@abstractmethod
def set_string(self, key: str, value: str) -> None:
pass
@abstractmethod
def set_object(self, key: str, value: Any) -> None:
pass
@abstractmethod
def delete(self, key: str) -> None:
pass

View File

@ -0,0 +1,89 @@
import logging
from logging import Logger
from typing import Dict, Any
from pymemcache import Client
from .cache import Cache
from ..utils._objects import _json_to_string, _string_to_json
logger: Logger = logging.getLogger(__name__)
class CacheMemcached(Cache):
client: Client
def __init__(self, client: Client):
self.client = client
logger.info("Initialized Memcached for caching")
@classmethod
def from_config(cls, engine_config: Dict[str, Any]) -> "CacheMemcached":
if "uri" not in engine_config:
raise KeyError(
"Cache configuration is invalid. Please check if all keys are set (engine: memcached)"
)
return cls(Client(engine_config["uri"], default_noreply=True))
def get_json(self, key: str) -> Any | None:
try:
result: Any | None = self.client.get(key, None)
logger.debug(
"Got json cache key '%s'%s",
key,
"" if result is not None else " (not found)",
)
except Exception as exc:
logger.error("Could not get json cache key '%s' due to: %s", key, exc)
return None
return None if result is None else _string_to_json(result)
def get_string(self, key: str) -> str | None:
try:
result: str | None = self.client.get(key, None)
logger.debug(
"Got string cache key '%s'%s",
key,
"" if result is not None else " (not found)",
)
return result
except Exception as exc:
logger.error("Could not get string cache key '%s' due to: %s", key, exc)
return None
# TODO Implement binary deserialization
def get_object(self, key: str) -> Any | None:
raise NotImplementedError()
def set_json(self, key: str, value: Any) -> None:
try:
self.client.set(key, _json_to_string(value))
logger.debug("Set json cache key '%s'", key)
except Exception as exc:
logger.error("Could not set json cache key '%s' due to: %s", key, exc)
return None
def set_string(self, key: str, value: str) -> None:
try:
self.client.set(key, value)
logger.debug("Set string cache key '%s'", key)
except Exception as exc:
logger.error("Could not set string cache key '%s' due to: %s", key, exc)
return None
# TODO Implement binary serialization
def set_object(self, key: str, value: Any) -> None:
raise NotImplementedError()
def delete(self, key: str) -> None:
try:
self.client.delete(key)
logger.debug("Deleted cache key '%s'", key)
except Exception as exc:
logger.error("Could not delete cache key '%s' due to: %s", key, exc)

89
src/libbot/cache/classes/cache_redis.py vendored Normal file
View File

@ -0,0 +1,89 @@
import logging
from logging import Logger
from typing import Dict, Any
from redis import Redis
from .cache import Cache
from ..utils._objects import _string_to_json, _json_to_string
logger: Logger = logging.getLogger(__name__)
class CacheRedis(Cache):
client: Redis
def __init__(self, client: Redis):
self.client = client
logger.info("Initialized Redis for caching")
@classmethod
def from_config(cls, engine_config: Dict[str, Any]) -> Any:
if "uri" not in engine_config:
raise KeyError(
"Cache configuration is invalid. Please check if all keys are set (engine: memcached)"
)
return cls(Redis.from_url(engine_config["uri"]))
def get_json(self, key: str) -> Any | None:
try:
result: Any | None = self.client.get(key)
logger.debug(
"Got json cache key '%s'%s",
key,
"" if result is not None else " (not found)",
)
except Exception as exc:
logger.error("Could not get json cache key '%s' due to: %s", key, exc)
return None
return None if result is None else _string_to_json(result)
def get_string(self, key: str) -> str | None:
try:
result: str | None = self.client.get(key)
logger.debug(
"Got string cache key '%s'%s",
key,
"" if result is not None else " (not found)",
)
return result
except Exception as exc:
logger.error("Could not get string cache key '%s' due to: %s", key, exc)
return None
# TODO Implement binary deserialization
def get_object(self, key: str) -> Any | None:
raise NotImplementedError()
def set_json(self, key: str, value: Any) -> None:
try:
self.client.set(key, _json_to_string(value))
logger.debug("Set json cache key '%s'", key)
except Exception as exc:
logger.error("Could not set json cache key '%s' due to: %s", key, exc)
return None
def set_string(self, key: str, value: str) -> None:
try:
self.client.set(key, value)
logger.debug("Set string cache key '%s'", key)
except Exception as exc:
logger.error("Could not set string cache key '%s' due to: %s", key, exc)
return None
# TODO Implement binary serialization
def set_object(self, key: str, value: Any) -> None:
raise NotImplementedError()
def delete(self, key: str) -> None:
try:
self.client.delete(key)
logger.debug("Deleted cache key '%s'", key)
except Exception as exc:
logger.error("Could not delete cache key '%s' due to: %s", key, exc)

1
src/libbot/cache/manager/__init__.py vendored Normal file
View File

@ -0,0 +1 @@
from .manager import create_cache_client

24
src/libbot/cache/manager/manager.py vendored Normal file
View File

@ -0,0 +1,24 @@
from typing import Dict, Any, Literal
from ..classes import CacheMemcached, CacheRedis
def create_cache_client(
config: Dict[str, Any],
engine: Literal["memcached", "redis"] | None = None,
) -> CacheMemcached | CacheRedis:
if engine not in ["memcached", "redis"] or engine is None:
raise KeyError(f"Incorrect cache engine provided. Expected 'memcached' or 'redis', got '{engine}'")
if "cache" not in config or engine not in config["cache"]:
raise KeyError(
f"Cache configuration is invalid. Please check if all keys are set (engine: '{engine}')"
)
match engine:
case "memcached":
return CacheMemcached.from_config(config["cache"][engine])
case "redis":
return CacheRedis.from_config(config["cache"][engine])
case _:
raise KeyError(f"Cache implementation for the engine '{engine}' is not present.")

0
src/libbot/cache/utils/__init__.py vendored Normal file
View File

42
src/libbot/cache/utils/_objects.py vendored Normal file
View File

@ -0,0 +1,42 @@
import logging
from copy import deepcopy
from logging import Logger
from typing import Any
try:
from ujson import dumps, loads
except ImportError:
from json import dumps, loads
logger: Logger = logging.getLogger(__name__)
try:
from bson import ObjectId
except ImportError:
logger.warning(
"Could not import bson.ObjectId. PyMongo conversions will not be supported by the cache. It's safe to ignore this message if you do not use MongoDB."
)
def _json_to_string(json_object: Any) -> str:
json_object_copy: Any = deepcopy(json_object)
if isinstance(json_object_copy, dict) and "_id" in json_object_copy:
json_object_copy["_id"] = str(json_object_copy["_id"])
return dumps(json_object_copy, ensure_ascii=False, indent=0, escape_forward_slashes=False)
def _string_to_json(json_string: str) -> Any:
json_object: Any = loads(json_string)
if "_id" in json_object:
try:
json_object["_id"] = ObjectId(json_object["_id"])
except NameError:
logger.debug(
"Tried to convert attribute '_id' with value '%s' but bson.ObjectId is not present, skipping the conversion.",
json_object["_id"],
)
return json_object

View File

@ -20,7 +20,7 @@ def config_get(key: str, *path: str, config_file: str | Path = DEFAULT_CONFIG_LO
### Args:
* key (`str`): Key that contains the value
* *path (`str`): Path to the key that contains the value
* *path (`str`): Path to the key that contains the value (pass *[] or don't pass anything at all to get on the top/root level)
* config_file (`str | Path`, *optional*): Path-like object or path as a string of a location of the config file. Defaults to `"config.json"`
### Returns:
@ -59,7 +59,7 @@ async def config_get(key: str, *path: str, config_file: str | Path = DEFAULT_CON
### Args:
* key (`str`): Key that contains the value
* *path (`str`): Path to the key that contains the value
* *path (`str`): Path to the key that contains the value (pass *[] or don't pass anything at all to get on the top/root level)
* config_file (`str | Path`, *optional*): Path-like object or path as a string of a location of the config file. Defaults to `"config.json"`
### Returns:
@ -98,7 +98,7 @@ def config_set(key: str, value: Any, *path: str, config_file: str | Path = DEFAU
### Args:
* key (`str`): Key that leads to the value
* value (`Any`): Any JSON serializable data
* *path (`str`): Path to the key of the target
* *path (`str`): Path to the key of the target (pass *[] or don't pass anything at all to set on the top/root level)
* config_file (`str | Path`, *optional*): Path-like object or path as a string of a location of the config file. Defaults to `"config.json"`
### Raises:
@ -116,7 +116,7 @@ async def config_set(
### Args:
* key (`str`): Key that leads to the value
* value (`Any`): Any JSON serializable data
* *path (`str`): Path to the key of the target
* *path (`str`): Path to the key of the target (pass *[] or don't pass anything at all to set on the top/root level)
* config_file (`str | Path`, *optional*): Path-like object or path as a string of a location of the config file. Defaults to `"config.json"`
### Raises:
@ -136,7 +136,7 @@ def config_delete(
### Args:
* key (`str`): Key to delete
* *path (`str`): Path to the key of the target
* *path (`str`): Path to the key of the target (pass *[] or don't pass anything at all to delete on the top/root level)
* missing_ok (`bool`): Do not raise an exception if the key is missing. Defaults to `False`
* config_file (`str | Path`, *optional*): Path-like object or path as a string of a location of the config file. Defaults to `"config.json"`
@ -165,7 +165,7 @@ async def config_delete(
### Args:
* key (`str`): Key to delete
* *path (`str`): Path to the key of the target
* *path (`str`): Path to the key of the target (pass *[] or don't pass anything at all to delete on the top/root level)
* missing_ok (`bool`): Do not raise an exception if the key is missing. Defaults to `False`
* config_file (`str | Path`, *optional*): Path-like object or path as a string of a location of the config file. Defaults to `"config.json"`

View File

@ -3,11 +3,11 @@ from typing import Any, Dict
from typing import Callable
def supports_argument(func: Callable, arg_name: str) -> bool:
def supports_argument(func: Callable[..., Any], arg_name: str) -> bool:
"""Check whether a function has a specific argument
### Args:
* func (`Callable`): Function to be inspected
* func (`Callable[..., Any]`): Function to be inspected
* arg_name (`str`): Argument to be checked
### Returns:
@ -24,11 +24,13 @@ def supports_argument(func: Callable, arg_name: str) -> bool:
return False
def nested_set(target: dict, value: Any, *path: str, create_missing=True) -> Dict[str, Any]:
def nested_set(
target: Dict[str, Any], value: Any, *path: str, create_missing: bool = True
) -> Dict[str, Any]:
"""Set the key by its path to the value
### Args:
* target (`dict`): Dictionary to perform modifications on
* target (`Dict[str, Any]`): Dictionary to perform modifications on
* value (`Any`): Any data
* *path (`str`): Path to the key of the target
* create_missing (`bool`, *optional*): Create keys on the way if they're missing. Defaults to `True`
@ -39,29 +41,29 @@ def nested_set(target: dict, value: Any, *path: str, create_missing=True) -> Dic
### Returns:
* `Dict[str, Any]`: Changed dictionary
"""
d = target
target_copy: Dict[str, Any] = target
for key in path[:-1]:
if key in d:
d = d[key]
if key in target_copy:
target_copy = target_copy[key]
elif create_missing:
d = d.setdefault(key, {})
target_copy = target_copy.setdefault(key, {})
else:
raise KeyError(
f"Key '{key}' is not found under path provided ({path}) and create_missing is False"
)
if path[-1] in d or create_missing:
d[path[-1]] = value
if path[-1] in target_copy or create_missing:
target_copy[path[-1]] = value
return target
def nested_delete(target: dict, *path: str) -> Dict[str, Any]:
def nested_delete(target: Dict[str, Any], *path: str) -> Dict[str, Any]:
"""Delete the key by its path
### Args:
* target (`dict`): Dictionary to perform modifications on
* target (`Dict[str, Any]`): Dictionary to perform modifications on
### Raises:
* `KeyError`: Key is not found under path provided
@ -69,16 +71,16 @@ def nested_delete(target: dict, *path: str) -> Dict[str, Any]:
### Returns:
`Dict[str, Any]`: Changed dictionary
"""
d = target
target_copy: Dict[str, Any] = target
for key in path[:-1]:
if key in d:
d = d[key]
if key in target_copy:
target_copy = target_copy[key]
else:
raise KeyError(f"Key '{key}' is not found under path provided ({path})")
if path[-1] in d:
del d[path[-1]]
if path[-1] in target_copy:
del target_copy[path[-1]]
else:
raise KeyError(f"Key '{path[-1]}' is not found under path provided ({path})")

View File

@ -2,5 +2,14 @@
"locale": "en",
"bot": {
"bot_token": "sample_token"
},
"cache": {
"type": "memcached",
"memcached": {
"uri": "127.0.0.1:11211"
},
"redis": {
"uri": "redis://127.0.0.1:6379/0"
}
}
}

28
tests/test_cache.py Normal file
View File

@ -0,0 +1,28 @@
from pathlib import Path
from libbot.cache.classes import Cache
from libbot.cache.manager import create_cache_client
try:
from ujson import JSONDecodeError, dumps, loads
except ImportError:
from json import JSONDecodeError, dumps, loads
from typing import Any, Dict
import pytest
@pytest.mark.parametrize(
"engine",
[
"memcached",
"redis",
],
)
def test_cache_creation(engine: str, location_config: Path):
with open(location_config, "r", encoding="utf-8") as file:
config: Dict[str, Any] = loads(file.read())
cache: Cache = create_cache_client(config, engine)
assert isinstance(cache, Cache)

View File

@ -10,13 +10,14 @@ python =
3.13: py313
[testenv]
setenv =
setenv =
PYTHONPATH = {toxinidir}
deps =
deps =
-r{toxinidir}/requirements/_.txt
-r{toxinidir}/requirements/dev.txt
-r{toxinidir}/requirements/pycord.txt
-r{toxinidir}/requirements/pyrogram.txt
-r{toxinidir}/requirements/speed.txt
-r{toxinidir}/requirements/cache.txt
commands =
pytest --basetemp={envtmpdir} --cov=libbot