Initial commit
This commit is contained in:
8
garbage_api_client/__init__.py
Normal file
8
garbage_api_client/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
""" A client library for accessing GarbageReminder API """
|
||||
|
||||
from .client import AuthenticatedClient, Client
|
||||
|
||||
__all__ = (
|
||||
"AuthenticatedClient",
|
||||
"Client",
|
||||
)
|
1
garbage_api_client/api/__init__.py
Normal file
1
garbage_api_client/api/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
""" Contains methods for accessing the API """
|
0
garbage_api_client/api/default/__init__.py
Normal file
0
garbage_api_client/api/default/__init__.py
Normal file
@@ -0,0 +1,271 @@
|
||||
from http import HTTPStatus
|
||||
from typing import Any, Dict, Optional, Union, cast
|
||||
|
||||
import httpx
|
||||
|
||||
from ... import errors
|
||||
from ...client import AuthenticatedClient, Client
|
||||
from ...models.entry_find_locations_location_entries_get_garbage_type_type_0 import (
|
||||
EntryFindLocationsLocationEntriesGetGarbageTypeType0,
|
||||
)
|
||||
from ...models.search_results_collection_entry import SearchResultsCollectionEntry
|
||||
from ...types import UNSET, Response, Unset
|
||||
|
||||
|
||||
def _get_kwargs(
|
||||
location: int,
|
||||
*,
|
||||
garbage_type: Union[
|
||||
EntryFindLocationsLocationEntriesGetGarbageTypeType0, None, Unset
|
||||
] = UNSET,
|
||||
date_start: Union[Unset, str] = "2024-04-13T15:39:14.714927",
|
||||
date_end: Union[Unset, str] = "2024-05-13T15:39:14.714945",
|
||||
page: Union[Unset, int] = 1,
|
||||
page_size: Union[Unset, int] = 100,
|
||||
) -> Dict[str, Any]:
|
||||
|
||||
params: Dict[str, Any] = {}
|
||||
|
||||
json_garbage_type: Union[None, Unset, int]
|
||||
if isinstance(garbage_type, Unset):
|
||||
json_garbage_type = UNSET
|
||||
elif isinstance(garbage_type, EntryFindLocationsLocationEntriesGetGarbageTypeType0):
|
||||
json_garbage_type = garbage_type.value
|
||||
else:
|
||||
json_garbage_type = garbage_type
|
||||
params["garbage_type"] = json_garbage_type
|
||||
|
||||
params["date_start"] = date_start
|
||||
|
||||
params["date_end"] = date_end
|
||||
|
||||
params["page"] = page
|
||||
|
||||
params["page_size"] = page_size
|
||||
|
||||
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
|
||||
|
||||
_kwargs: Dict[str, Any] = {
|
||||
"method": "get",
|
||||
"url": "/locations/{location}/entries".format(
|
||||
location=location,
|
||||
),
|
||||
"params": params,
|
||||
}
|
||||
|
||||
return _kwargs
|
||||
|
||||
|
||||
def _parse_response(
|
||||
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
|
||||
) -> Optional[Union[Any, SearchResultsCollectionEntry]]:
|
||||
if response.status_code == HTTPStatus.OK:
|
||||
response_200 = SearchResultsCollectionEntry.from_dict(response.json())
|
||||
|
||||
return response_200
|
||||
if response.status_code == HTTPStatus.BAD_REQUEST:
|
||||
response_400 = cast(Any, None)
|
||||
return response_400
|
||||
if response.status_code == HTTPStatus.NOT_FOUND:
|
||||
response_404 = cast(Any, None)
|
||||
return response_404
|
||||
if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY:
|
||||
response_422 = cast(Any, None)
|
||||
return response_422
|
||||
if client.raise_on_unexpected_status:
|
||||
raise errors.UnexpectedStatus(response.status_code, response.content)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _build_response(
|
||||
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
|
||||
) -> Response[Union[Any, SearchResultsCollectionEntry]]:
|
||||
return Response(
|
||||
status_code=HTTPStatus(response.status_code),
|
||||
content=response.content,
|
||||
headers=response.headers,
|
||||
parsed=_parse_response(client=client, response=response),
|
||||
)
|
||||
|
||||
|
||||
def sync_detailed(
|
||||
location: int,
|
||||
*,
|
||||
client: Union[AuthenticatedClient, Client],
|
||||
garbage_type: Union[
|
||||
EntryFindLocationsLocationEntriesGetGarbageTypeType0, None, Unset
|
||||
] = UNSET,
|
||||
date_start: Union[Unset, str] = "2024-04-13T15:39:14.714927",
|
||||
date_end: Union[Unset, str] = "2024-05-13T15:39:14.714945",
|
||||
page: Union[Unset, int] = 1,
|
||||
page_size: Union[Unset, int] = 100,
|
||||
) -> Response[Union[Any, SearchResultsCollectionEntry]]:
|
||||
"""Entry Find
|
||||
|
||||
Find entries by date(s) or type
|
||||
|
||||
Args:
|
||||
location (int):
|
||||
garbage_type (Union[EntryFindLocationsLocationEntriesGetGarbageTypeType0, None, Unset]):
|
||||
date_start (Union[Unset, str]): Default: '2024-04-13T15:39:14.714927'.
|
||||
date_end (Union[Unset, str]): Default: '2024-05-13T15:39:14.714945'.
|
||||
page (Union[Unset, int]): Default: 1.
|
||||
page_size (Union[Unset, int]): Default: 100.
|
||||
|
||||
Raises:
|
||||
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
||||
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
||||
|
||||
Returns:
|
||||
Response[Union[Any, SearchResultsCollectionEntry]]
|
||||
"""
|
||||
|
||||
kwargs = _get_kwargs(
|
||||
location=location,
|
||||
garbage_type=garbage_type,
|
||||
date_start=date_start,
|
||||
date_end=date_end,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
|
||||
response = client.get_httpx_client().request(
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
return _build_response(client=client, response=response)
|
||||
|
||||
|
||||
def sync(
|
||||
location: int,
|
||||
*,
|
||||
client: Union[AuthenticatedClient, Client],
|
||||
garbage_type: Union[
|
||||
EntryFindLocationsLocationEntriesGetGarbageTypeType0, None, Unset
|
||||
] = UNSET,
|
||||
date_start: Union[Unset, str] = "2024-04-13T15:39:14.714927",
|
||||
date_end: Union[Unset, str] = "2024-05-13T15:39:14.714945",
|
||||
page: Union[Unset, int] = 1,
|
||||
page_size: Union[Unset, int] = 100,
|
||||
) -> Optional[Union[Any, SearchResultsCollectionEntry]]:
|
||||
"""Entry Find
|
||||
|
||||
Find entries by date(s) or type
|
||||
|
||||
Args:
|
||||
location (int):
|
||||
garbage_type (Union[EntryFindLocationsLocationEntriesGetGarbageTypeType0, None, Unset]):
|
||||
date_start (Union[Unset, str]): Default: '2024-04-13T15:39:14.714927'.
|
||||
date_end (Union[Unset, str]): Default: '2024-05-13T15:39:14.714945'.
|
||||
page (Union[Unset, int]): Default: 1.
|
||||
page_size (Union[Unset, int]): Default: 100.
|
||||
|
||||
Raises:
|
||||
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
||||
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
||||
|
||||
Returns:
|
||||
Union[Any, SearchResultsCollectionEntry]
|
||||
"""
|
||||
|
||||
return sync_detailed(
|
||||
location=location,
|
||||
client=client,
|
||||
garbage_type=garbage_type,
|
||||
date_start=date_start,
|
||||
date_end=date_end,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
).parsed
|
||||
|
||||
|
||||
async def asyncio_detailed(
|
||||
location: int,
|
||||
*,
|
||||
client: Union[AuthenticatedClient, Client],
|
||||
garbage_type: Union[
|
||||
EntryFindLocationsLocationEntriesGetGarbageTypeType0, None, Unset
|
||||
] = UNSET,
|
||||
date_start: Union[Unset, str] = "2024-04-13T15:39:14.714927",
|
||||
date_end: Union[Unset, str] = "2024-05-13T15:39:14.714945",
|
||||
page: Union[Unset, int] = 1,
|
||||
page_size: Union[Unset, int] = 100,
|
||||
) -> Response[Union[Any, SearchResultsCollectionEntry]]:
|
||||
"""Entry Find
|
||||
|
||||
Find entries by date(s) or type
|
||||
|
||||
Args:
|
||||
location (int):
|
||||
garbage_type (Union[EntryFindLocationsLocationEntriesGetGarbageTypeType0, None, Unset]):
|
||||
date_start (Union[Unset, str]): Default: '2024-04-13T15:39:14.714927'.
|
||||
date_end (Union[Unset, str]): Default: '2024-05-13T15:39:14.714945'.
|
||||
page (Union[Unset, int]): Default: 1.
|
||||
page_size (Union[Unset, int]): Default: 100.
|
||||
|
||||
Raises:
|
||||
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
||||
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
||||
|
||||
Returns:
|
||||
Response[Union[Any, SearchResultsCollectionEntry]]
|
||||
"""
|
||||
|
||||
kwargs = _get_kwargs(
|
||||
location=location,
|
||||
garbage_type=garbage_type,
|
||||
date_start=date_start,
|
||||
date_end=date_end,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
|
||||
response = await client.get_async_httpx_client().request(**kwargs)
|
||||
|
||||
return _build_response(client=client, response=response)
|
||||
|
||||
|
||||
async def asyncio(
|
||||
location: int,
|
||||
*,
|
||||
client: Union[AuthenticatedClient, Client],
|
||||
garbage_type: Union[
|
||||
EntryFindLocationsLocationEntriesGetGarbageTypeType0, None, Unset
|
||||
] = UNSET,
|
||||
date_start: Union[Unset, str] = "2024-04-13T15:39:14.714927",
|
||||
date_end: Union[Unset, str] = "2024-05-13T15:39:14.714945",
|
||||
page: Union[Unset, int] = 1,
|
||||
page_size: Union[Unset, int] = 100,
|
||||
) -> Optional[Union[Any, SearchResultsCollectionEntry]]:
|
||||
"""Entry Find
|
||||
|
||||
Find entries by date(s) or type
|
||||
|
||||
Args:
|
||||
location (int):
|
||||
garbage_type (Union[EntryFindLocationsLocationEntriesGetGarbageTypeType0, None, Unset]):
|
||||
date_start (Union[Unset, str]): Default: '2024-04-13T15:39:14.714927'.
|
||||
date_end (Union[Unset, str]): Default: '2024-05-13T15:39:14.714945'.
|
||||
page (Union[Unset, int]): Default: 1.
|
||||
page_size (Union[Unset, int]): Default: 100.
|
||||
|
||||
Raises:
|
||||
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
||||
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
||||
|
||||
Returns:
|
||||
Union[Any, SearchResultsCollectionEntry]
|
||||
"""
|
||||
|
||||
return (
|
||||
await asyncio_detailed(
|
||||
location=location,
|
||||
client=client,
|
||||
garbage_type=garbage_type,
|
||||
date_start=date_start,
|
||||
date_end=date_end,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
).parsed
|
268
garbage_api_client/api/default/location_find_locations_get.py
Normal file
268
garbage_api_client/api/default/location_find_locations_get.py
Normal file
@@ -0,0 +1,268 @@
|
||||
from http import HTTPStatus
|
||||
from typing import Any, Dict, Optional, Union, cast
|
||||
|
||||
import httpx
|
||||
|
||||
from ... import errors
|
||||
from ...client import AuthenticatedClient, Client
|
||||
from ...models.search_results_location import SearchResultsLocation
|
||||
from ...types import UNSET, Response, Unset
|
||||
|
||||
|
||||
def _get_kwargs(
|
||||
*,
|
||||
q: Union[None, Unset, str] = UNSET,
|
||||
page: Union[Unset, int] = 1,
|
||||
page_size: Union[Unset, int] = 100,
|
||||
lat: Union[None, Unset, float] = UNSET,
|
||||
lng: Union[None, Unset, float] = UNSET,
|
||||
radius: Union[None, Unset, int] = UNSET,
|
||||
) -> Dict[str, Any]:
|
||||
|
||||
params: Dict[str, Any] = {}
|
||||
|
||||
json_q: Union[None, Unset, str]
|
||||
if isinstance(q, Unset):
|
||||
json_q = UNSET
|
||||
else:
|
||||
json_q = q
|
||||
params["q"] = json_q
|
||||
|
||||
params["page"] = page
|
||||
|
||||
params["page_size"] = page_size
|
||||
|
||||
json_lat: Union[None, Unset, float]
|
||||
if isinstance(lat, Unset):
|
||||
json_lat = UNSET
|
||||
else:
|
||||
json_lat = lat
|
||||
params["lat"] = json_lat
|
||||
|
||||
json_lng: Union[None, Unset, float]
|
||||
if isinstance(lng, Unset):
|
||||
json_lng = UNSET
|
||||
else:
|
||||
json_lng = lng
|
||||
params["lng"] = json_lng
|
||||
|
||||
json_radius: Union[None, Unset, int]
|
||||
if isinstance(radius, Unset):
|
||||
json_radius = UNSET
|
||||
else:
|
||||
json_radius = radius
|
||||
params["radius"] = json_radius
|
||||
|
||||
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
|
||||
|
||||
_kwargs: Dict[str, Any] = {
|
||||
"method": "get",
|
||||
"url": "/locations",
|
||||
"params": params,
|
||||
}
|
||||
|
||||
return _kwargs
|
||||
|
||||
|
||||
def _parse_response(
|
||||
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
|
||||
) -> Optional[Union[Any, SearchResultsLocation]]:
|
||||
if response.status_code == HTTPStatus.OK:
|
||||
response_200 = SearchResultsLocation.from_dict(response.json())
|
||||
|
||||
return response_200
|
||||
if response.status_code == HTTPStatus.BAD_REQUEST:
|
||||
response_400 = cast(Any, None)
|
||||
return response_400
|
||||
if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY:
|
||||
response_422 = cast(Any, None)
|
||||
return response_422
|
||||
if client.raise_on_unexpected_status:
|
||||
raise errors.UnexpectedStatus(response.status_code, response.content)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _build_response(
|
||||
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
|
||||
) -> Response[Union[Any, SearchResultsLocation]]:
|
||||
return Response(
|
||||
status_code=HTTPStatus(response.status_code),
|
||||
content=response.content,
|
||||
headers=response.headers,
|
||||
parsed=_parse_response(client=client, response=response),
|
||||
)
|
||||
|
||||
|
||||
def sync_detailed(
|
||||
*,
|
||||
client: Union[AuthenticatedClient, Client],
|
||||
q: Union[None, Unset, str] = UNSET,
|
||||
page: Union[Unset, int] = 1,
|
||||
page_size: Union[Unset, int] = 100,
|
||||
lat: Union[None, Unset, float] = UNSET,
|
||||
lng: Union[None, Unset, float] = UNSET,
|
||||
radius: Union[None, Unset, int] = UNSET,
|
||||
) -> Response[Union[Any, SearchResultsLocation]]:
|
||||
"""Location Find
|
||||
|
||||
Find a location by name or coordinates
|
||||
|
||||
Args:
|
||||
q (Union[None, Unset, str]):
|
||||
page (Union[Unset, int]): Default: 1.
|
||||
page_size (Union[Unset, int]): Default: 100.
|
||||
lat (Union[None, Unset, float]):
|
||||
lng (Union[None, Unset, float]):
|
||||
radius (Union[None, Unset, int]):
|
||||
|
||||
Raises:
|
||||
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
||||
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
||||
|
||||
Returns:
|
||||
Response[Union[Any, SearchResultsLocation]]
|
||||
"""
|
||||
|
||||
kwargs = _get_kwargs(
|
||||
q=q,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
lat=lat,
|
||||
lng=lng,
|
||||
radius=radius,
|
||||
)
|
||||
|
||||
response = client.get_httpx_client().request(
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
return _build_response(client=client, response=response)
|
||||
|
||||
|
||||
def sync(
|
||||
*,
|
||||
client: Union[AuthenticatedClient, Client],
|
||||
q: Union[None, Unset, str] = UNSET,
|
||||
page: Union[Unset, int] = 1,
|
||||
page_size: Union[Unset, int] = 100,
|
||||
lat: Union[None, Unset, float] = UNSET,
|
||||
lng: Union[None, Unset, float] = UNSET,
|
||||
radius: Union[None, Unset, int] = UNSET,
|
||||
) -> Optional[Union[Any, SearchResultsLocation]]:
|
||||
"""Location Find
|
||||
|
||||
Find a location by name or coordinates
|
||||
|
||||
Args:
|
||||
q (Union[None, Unset, str]):
|
||||
page (Union[Unset, int]): Default: 1.
|
||||
page_size (Union[Unset, int]): Default: 100.
|
||||
lat (Union[None, Unset, float]):
|
||||
lng (Union[None, Unset, float]):
|
||||
radius (Union[None, Unset, int]):
|
||||
|
||||
Raises:
|
||||
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
||||
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
||||
|
||||
Returns:
|
||||
Union[Any, SearchResultsLocation]
|
||||
"""
|
||||
|
||||
return sync_detailed(
|
||||
client=client,
|
||||
q=q,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
lat=lat,
|
||||
lng=lng,
|
||||
radius=radius,
|
||||
).parsed
|
||||
|
||||
|
||||
async def asyncio_detailed(
|
||||
*,
|
||||
client: Union[AuthenticatedClient, Client],
|
||||
q: Union[None, Unset, str] = UNSET,
|
||||
page: Union[Unset, int] = 1,
|
||||
page_size: Union[Unset, int] = 100,
|
||||
lat: Union[None, Unset, float] = UNSET,
|
||||
lng: Union[None, Unset, float] = UNSET,
|
||||
radius: Union[None, Unset, int] = UNSET,
|
||||
) -> Response[Union[Any, SearchResultsLocation]]:
|
||||
"""Location Find
|
||||
|
||||
Find a location by name or coordinates
|
||||
|
||||
Args:
|
||||
q (Union[None, Unset, str]):
|
||||
page (Union[Unset, int]): Default: 1.
|
||||
page_size (Union[Unset, int]): Default: 100.
|
||||
lat (Union[None, Unset, float]):
|
||||
lng (Union[None, Unset, float]):
|
||||
radius (Union[None, Unset, int]):
|
||||
|
||||
Raises:
|
||||
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
||||
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
||||
|
||||
Returns:
|
||||
Response[Union[Any, SearchResultsLocation]]
|
||||
"""
|
||||
|
||||
kwargs = _get_kwargs(
|
||||
q=q,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
lat=lat,
|
||||
lng=lng,
|
||||
radius=radius,
|
||||
)
|
||||
|
||||
response = await client.get_async_httpx_client().request(**kwargs)
|
||||
|
||||
return _build_response(client=client, response=response)
|
||||
|
||||
|
||||
async def asyncio(
|
||||
*,
|
||||
client: Union[AuthenticatedClient, Client],
|
||||
q: Union[None, Unset, str] = UNSET,
|
||||
page: Union[Unset, int] = 1,
|
||||
page_size: Union[Unset, int] = 100,
|
||||
lat: Union[None, Unset, float] = UNSET,
|
||||
lng: Union[None, Unset, float] = UNSET,
|
||||
radius: Union[None, Unset, int] = UNSET,
|
||||
) -> Optional[Union[Any, SearchResultsLocation]]:
|
||||
"""Location Find
|
||||
|
||||
Find a location by name or coordinates
|
||||
|
||||
Args:
|
||||
q (Union[None, Unset, str]):
|
||||
page (Union[Unset, int]): Default: 1.
|
||||
page_size (Union[Unset, int]): Default: 100.
|
||||
lat (Union[None, Unset, float]):
|
||||
lng (Union[None, Unset, float]):
|
||||
radius (Union[None, Unset, int]):
|
||||
|
||||
Raises:
|
||||
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
||||
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
||||
|
||||
Returns:
|
||||
Union[Any, SearchResultsLocation]
|
||||
"""
|
||||
|
||||
return (
|
||||
await asyncio_detailed(
|
||||
client=client,
|
||||
q=q,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
lat=lat,
|
||||
lng=lng,
|
||||
radius=radius,
|
||||
)
|
||||
).parsed
|
169
garbage_api_client/api/default/location_get_locations_id_get.py
Normal file
169
garbage_api_client/api/default/location_get_locations_id_get.py
Normal file
@@ -0,0 +1,169 @@
|
||||
from http import HTTPStatus
|
||||
from typing import Any, Dict, Optional, Union, cast
|
||||
|
||||
import httpx
|
||||
|
||||
from ... import errors
|
||||
from ...client import AuthenticatedClient, Client
|
||||
from ...models.http_validation_error import HTTPValidationError
|
||||
from ...models.location import Location
|
||||
from ...types import Response
|
||||
|
||||
|
||||
def _get_kwargs(
|
||||
id: int,
|
||||
) -> Dict[str, Any]:
|
||||
|
||||
_kwargs: Dict[str, Any] = {
|
||||
"method": "get",
|
||||
"url": "/locations/{id}".format(
|
||||
id=id,
|
||||
),
|
||||
}
|
||||
|
||||
return _kwargs
|
||||
|
||||
|
||||
def _parse_response(
|
||||
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
|
||||
) -> Optional[Union[Any, HTTPValidationError, Location]]:
|
||||
if response.status_code == HTTPStatus.OK:
|
||||
response_200 = Location.from_dict(response.json())
|
||||
|
||||
return response_200
|
||||
if response.status_code == HTTPStatus.NOT_FOUND:
|
||||
response_404 = cast(Any, None)
|
||||
return response_404
|
||||
if response.status_code == HTTPStatus.UNPROCESSABLE_ENTITY:
|
||||
response_422 = HTTPValidationError.from_dict(response.json())
|
||||
|
||||
return response_422
|
||||
if client.raise_on_unexpected_status:
|
||||
raise errors.UnexpectedStatus(response.status_code, response.content)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def _build_response(
|
||||
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
|
||||
) -> Response[Union[Any, HTTPValidationError, Location]]:
|
||||
return Response(
|
||||
status_code=HTTPStatus(response.status_code),
|
||||
content=response.content,
|
||||
headers=response.headers,
|
||||
parsed=_parse_response(client=client, response=response),
|
||||
)
|
||||
|
||||
|
||||
def sync_detailed(
|
||||
id: int,
|
||||
*,
|
||||
client: Union[AuthenticatedClient, Client],
|
||||
) -> Response[Union[Any, HTTPValidationError, Location]]:
|
||||
"""Location Get
|
||||
|
||||
Get a location by id
|
||||
|
||||
Args:
|
||||
id (int):
|
||||
|
||||
Raises:
|
||||
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
||||
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
||||
|
||||
Returns:
|
||||
Response[Union[Any, HTTPValidationError, Location]]
|
||||
"""
|
||||
|
||||
kwargs = _get_kwargs(
|
||||
id=id,
|
||||
)
|
||||
|
||||
response = client.get_httpx_client().request(
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
return _build_response(client=client, response=response)
|
||||
|
||||
|
||||
def sync(
|
||||
id: int,
|
||||
*,
|
||||
client: Union[AuthenticatedClient, Client],
|
||||
) -> Optional[Union[Any, HTTPValidationError, Location]]:
|
||||
"""Location Get
|
||||
|
||||
Get a location by id
|
||||
|
||||
Args:
|
||||
id (int):
|
||||
|
||||
Raises:
|
||||
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
||||
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
||||
|
||||
Returns:
|
||||
Union[Any, HTTPValidationError, Location]
|
||||
"""
|
||||
|
||||
return sync_detailed(
|
||||
id=id,
|
||||
client=client,
|
||||
).parsed
|
||||
|
||||
|
||||
async def asyncio_detailed(
|
||||
id: int,
|
||||
*,
|
||||
client: Union[AuthenticatedClient, Client],
|
||||
) -> Response[Union[Any, HTTPValidationError, Location]]:
|
||||
"""Location Get
|
||||
|
||||
Get a location by id
|
||||
|
||||
Args:
|
||||
id (int):
|
||||
|
||||
Raises:
|
||||
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
||||
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
||||
|
||||
Returns:
|
||||
Response[Union[Any, HTTPValidationError, Location]]
|
||||
"""
|
||||
|
||||
kwargs = _get_kwargs(
|
||||
id=id,
|
||||
)
|
||||
|
||||
response = await client.get_async_httpx_client().request(**kwargs)
|
||||
|
||||
return _build_response(client=client, response=response)
|
||||
|
||||
|
||||
async def asyncio(
|
||||
id: int,
|
||||
*,
|
||||
client: Union[AuthenticatedClient, Client],
|
||||
) -> Optional[Union[Any, HTTPValidationError, Location]]:
|
||||
"""Location Get
|
||||
|
||||
Get a location by id
|
||||
|
||||
Args:
|
||||
id (int):
|
||||
|
||||
Raises:
|
||||
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
|
||||
httpx.TimeoutException: If the request takes longer than Client.timeout.
|
||||
|
||||
Returns:
|
||||
Union[Any, HTTPValidationError, Location]
|
||||
"""
|
||||
|
||||
return (
|
||||
await asyncio_detailed(
|
||||
id=id,
|
||||
client=client,
|
||||
)
|
||||
).parsed
|
286
garbage_api_client/client.py
Normal file
286
garbage_api_client/client.py
Normal file
@@ -0,0 +1,286 @@
|
||||
import ssl
|
||||
from typing import Any, Dict, Optional, Union
|
||||
|
||||
import httpx
|
||||
from attrs import define, evolve, field
|
||||
|
||||
|
||||
@define
|
||||
class Client:
|
||||
"""A class for keeping track of data related to the API
|
||||
|
||||
The following are accepted as keyword arguments and will be used to construct httpx Clients internally:
|
||||
|
||||
``base_url``: The base URL for the API, all requests are made to a relative path to this URL
|
||||
|
||||
``cookies``: A dictionary of cookies to be sent with every request
|
||||
|
||||
``headers``: A dictionary of headers to be sent with every request
|
||||
|
||||
``timeout``: The maximum amount of a time a request can take. API functions will raise
|
||||
httpx.TimeoutException if this is exceeded.
|
||||
|
||||
``verify_ssl``: Whether or not to verify the SSL certificate of the API server. This should be True in production,
|
||||
but can be set to False for testing purposes.
|
||||
|
||||
``follow_redirects``: Whether or not to follow redirects. Default value is False.
|
||||
|
||||
``httpx_args``: A dictionary of additional arguments to be passed to the ``httpx.Client`` and ``httpx.AsyncClient`` constructor.
|
||||
|
||||
|
||||
Attributes:
|
||||
raise_on_unexpected_status: Whether or not to raise an errors.UnexpectedStatus if the API returns a
|
||||
status code that was not documented in the source OpenAPI document. Can also be provided as a keyword
|
||||
argument to the constructor.
|
||||
"""
|
||||
|
||||
raise_on_unexpected_status: bool = field(default=False, kw_only=True)
|
||||
_base_url: str = field(alias="base_url")
|
||||
_cookies: Dict[str, str] = field(factory=dict, kw_only=True, alias="cookies")
|
||||
_headers: Dict[str, str] = field(factory=dict, kw_only=True, alias="headers")
|
||||
_timeout: Optional[httpx.Timeout] = field(
|
||||
default=None, kw_only=True, alias="timeout"
|
||||
)
|
||||
_verify_ssl: Union[str, bool, ssl.SSLContext] = field(
|
||||
default=True, kw_only=True, alias="verify_ssl"
|
||||
)
|
||||
_follow_redirects: bool = field(
|
||||
default=False, kw_only=True, alias="follow_redirects"
|
||||
)
|
||||
_httpx_args: Dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args")
|
||||
_client: Optional[httpx.Client] = field(default=None, init=False)
|
||||
_async_client: Optional[httpx.AsyncClient] = field(default=None, init=False)
|
||||
|
||||
def with_headers(self, headers: Dict[str, str]) -> "Client":
|
||||
"""Get a new client matching this one with additional headers"""
|
||||
if self._client is not None:
|
||||
self._client.headers.update(headers)
|
||||
if self._async_client is not None:
|
||||
self._async_client.headers.update(headers)
|
||||
return evolve(self, headers={**self._headers, **headers})
|
||||
|
||||
def with_cookies(self, cookies: Dict[str, str]) -> "Client":
|
||||
"""Get a new client matching this one with additional cookies"""
|
||||
if self._client is not None:
|
||||
self._client.cookies.update(cookies)
|
||||
if self._async_client is not None:
|
||||
self._async_client.cookies.update(cookies)
|
||||
return evolve(self, cookies={**self._cookies, **cookies})
|
||||
|
||||
def with_timeout(self, timeout: httpx.Timeout) -> "Client":
|
||||
"""Get a new client matching this one with a new timeout (in seconds)"""
|
||||
if self._client is not None:
|
||||
self._client.timeout = timeout
|
||||
if self._async_client is not None:
|
||||
self._async_client.timeout = timeout
|
||||
return evolve(self, timeout=timeout)
|
||||
|
||||
def set_httpx_client(self, client: httpx.Client) -> "Client":
|
||||
"""Manually the underlying httpx.Client
|
||||
|
||||
**NOTE**: This will override any other settings on the client, including cookies, headers, and timeout.
|
||||
"""
|
||||
self._client = client
|
||||
return self
|
||||
|
||||
def get_httpx_client(self) -> httpx.Client:
|
||||
"""Get the underlying httpx.Client, constructing a new one if not previously set"""
|
||||
if self._client is None:
|
||||
self._client = httpx.Client(
|
||||
base_url=self._base_url,
|
||||
cookies=self._cookies,
|
||||
headers=self._headers,
|
||||
timeout=self._timeout,
|
||||
verify=self._verify_ssl,
|
||||
follow_redirects=self._follow_redirects,
|
||||
**self._httpx_args,
|
||||
)
|
||||
return self._client
|
||||
|
||||
def __enter__(self) -> "Client":
|
||||
"""Enter a context manager for self.client—you cannot enter twice (see httpx docs)"""
|
||||
self.get_httpx_client().__enter__()
|
||||
return self
|
||||
|
||||
def __exit__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Exit a context manager for internal httpx.Client (see httpx docs)"""
|
||||
self.get_httpx_client().__exit__(*args, **kwargs)
|
||||
|
||||
def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "Client":
|
||||
"""Manually the underlying httpx.AsyncClient
|
||||
|
||||
**NOTE**: This will override any other settings on the client, including cookies, headers, and timeout.
|
||||
"""
|
||||
self._async_client = async_client
|
||||
return self
|
||||
|
||||
def get_async_httpx_client(self) -> httpx.AsyncClient:
|
||||
"""Get the underlying httpx.AsyncClient, constructing a new one if not previously set"""
|
||||
if self._async_client is None:
|
||||
self._async_client = httpx.AsyncClient(
|
||||
base_url=self._base_url,
|
||||
cookies=self._cookies,
|
||||
headers=self._headers,
|
||||
timeout=self._timeout,
|
||||
verify=self._verify_ssl,
|
||||
follow_redirects=self._follow_redirects,
|
||||
**self._httpx_args,
|
||||
)
|
||||
return self._async_client
|
||||
|
||||
async def __aenter__(self) -> "Client":
|
||||
"""Enter a context manager for underlying httpx.AsyncClient—you cannot enter twice (see httpx docs)"""
|
||||
await self.get_async_httpx_client().__aenter__()
|
||||
return self
|
||||
|
||||
async def __aexit__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Exit a context manager for underlying httpx.AsyncClient (see httpx docs)"""
|
||||
await self.get_async_httpx_client().__aexit__(*args, **kwargs)
|
||||
|
||||
|
||||
@define
|
||||
class AuthenticatedClient:
|
||||
"""A Client which has been authenticated for use on secured endpoints
|
||||
|
||||
The following are accepted as keyword arguments and will be used to construct httpx Clients internally:
|
||||
|
||||
``base_url``: The base URL for the API, all requests are made to a relative path to this URL
|
||||
|
||||
``cookies``: A dictionary of cookies to be sent with every request
|
||||
|
||||
``headers``: A dictionary of headers to be sent with every request
|
||||
|
||||
``timeout``: The maximum amount of a time a request can take. API functions will raise
|
||||
httpx.TimeoutException if this is exceeded.
|
||||
|
||||
``verify_ssl``: Whether or not to verify the SSL certificate of the API server. This should be True in production,
|
||||
but can be set to False for testing purposes.
|
||||
|
||||
``follow_redirects``: Whether or not to follow redirects. Default value is False.
|
||||
|
||||
``httpx_args``: A dictionary of additional arguments to be passed to the ``httpx.Client`` and ``httpx.AsyncClient`` constructor.
|
||||
|
||||
|
||||
Attributes:
|
||||
raise_on_unexpected_status: Whether or not to raise an errors.UnexpectedStatus if the API returns a
|
||||
status code that was not documented in the source OpenAPI document. Can also be provided as a keyword
|
||||
argument to the constructor.
|
||||
token: The token to use for authentication
|
||||
prefix: The prefix to use for the Authorization header
|
||||
auth_header_name: The name of the Authorization header
|
||||
"""
|
||||
|
||||
raise_on_unexpected_status: bool = field(default=False, kw_only=True)
|
||||
_base_url: str = field(alias="base_url")
|
||||
_cookies: Dict[str, str] = field(factory=dict, kw_only=True, alias="cookies")
|
||||
_headers: Dict[str, str] = field(factory=dict, kw_only=True, alias="headers")
|
||||
_timeout: Optional[httpx.Timeout] = field(
|
||||
default=None, kw_only=True, alias="timeout"
|
||||
)
|
||||
_verify_ssl: Union[str, bool, ssl.SSLContext] = field(
|
||||
default=True, kw_only=True, alias="verify_ssl"
|
||||
)
|
||||
_follow_redirects: bool = field(
|
||||
default=False, kw_only=True, alias="follow_redirects"
|
||||
)
|
||||
_httpx_args: Dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args")
|
||||
_client: Optional[httpx.Client] = field(default=None, init=False)
|
||||
_async_client: Optional[httpx.AsyncClient] = field(default=None, init=False)
|
||||
|
||||
token: str
|
||||
prefix: str = "Bearer"
|
||||
auth_header_name: str = "Authorization"
|
||||
|
||||
def with_headers(self, headers: Dict[str, str]) -> "AuthenticatedClient":
|
||||
"""Get a new client matching this one with additional headers"""
|
||||
if self._client is not None:
|
||||
self._client.headers.update(headers)
|
||||
if self._async_client is not None:
|
||||
self._async_client.headers.update(headers)
|
||||
return evolve(self, headers={**self._headers, **headers})
|
||||
|
||||
def with_cookies(self, cookies: Dict[str, str]) -> "AuthenticatedClient":
|
||||
"""Get a new client matching this one with additional cookies"""
|
||||
if self._client is not None:
|
||||
self._client.cookies.update(cookies)
|
||||
if self._async_client is not None:
|
||||
self._async_client.cookies.update(cookies)
|
||||
return evolve(self, cookies={**self._cookies, **cookies})
|
||||
|
||||
def with_timeout(self, timeout: httpx.Timeout) -> "AuthenticatedClient":
|
||||
"""Get a new client matching this one with a new timeout (in seconds)"""
|
||||
if self._client is not None:
|
||||
self._client.timeout = timeout
|
||||
if self._async_client is not None:
|
||||
self._async_client.timeout = timeout
|
||||
return evolve(self, timeout=timeout)
|
||||
|
||||
def set_httpx_client(self, client: httpx.Client) -> "AuthenticatedClient":
|
||||
"""Manually the underlying httpx.Client
|
||||
|
||||
**NOTE**: This will override any other settings on the client, including cookies, headers, and timeout.
|
||||
"""
|
||||
self._client = client
|
||||
return self
|
||||
|
||||
def get_httpx_client(self) -> httpx.Client:
|
||||
"""Get the underlying httpx.Client, constructing a new one if not previously set"""
|
||||
if self._client is None:
|
||||
self._headers[self.auth_header_name] = (
|
||||
f"{self.prefix} {self.token}" if self.prefix else self.token
|
||||
)
|
||||
self._client = httpx.Client(
|
||||
base_url=self._base_url,
|
||||
cookies=self._cookies,
|
||||
headers=self._headers,
|
||||
timeout=self._timeout,
|
||||
verify=self._verify_ssl,
|
||||
follow_redirects=self._follow_redirects,
|
||||
**self._httpx_args,
|
||||
)
|
||||
return self._client
|
||||
|
||||
def __enter__(self) -> "AuthenticatedClient":
|
||||
"""Enter a context manager for self.client—you cannot enter twice (see httpx docs)"""
|
||||
self.get_httpx_client().__enter__()
|
||||
return self
|
||||
|
||||
def __exit__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Exit a context manager for internal httpx.Client (see httpx docs)"""
|
||||
self.get_httpx_client().__exit__(*args, **kwargs)
|
||||
|
||||
def set_async_httpx_client(
|
||||
self, async_client: httpx.AsyncClient
|
||||
) -> "AuthenticatedClient":
|
||||
"""Manually the underlying httpx.AsyncClient
|
||||
|
||||
**NOTE**: This will override any other settings on the client, including cookies, headers, and timeout.
|
||||
"""
|
||||
self._async_client = async_client
|
||||
return self
|
||||
|
||||
def get_async_httpx_client(self) -> httpx.AsyncClient:
|
||||
"""Get the underlying httpx.AsyncClient, constructing a new one if not previously set"""
|
||||
if self._async_client is None:
|
||||
self._headers[self.auth_header_name] = (
|
||||
f"{self.prefix} {self.token}" if self.prefix else self.token
|
||||
)
|
||||
self._async_client = httpx.AsyncClient(
|
||||
base_url=self._base_url,
|
||||
cookies=self._cookies,
|
||||
headers=self._headers,
|
||||
timeout=self._timeout,
|
||||
verify=self._verify_ssl,
|
||||
follow_redirects=self._follow_redirects,
|
||||
**self._httpx_args,
|
||||
)
|
||||
return self._async_client
|
||||
|
||||
async def __aenter__(self) -> "AuthenticatedClient":
|
||||
"""Enter a context manager for underlying httpx.AsyncClient—you cannot enter twice (see httpx docs)"""
|
||||
await self.get_async_httpx_client().__aenter__()
|
||||
return self
|
||||
|
||||
async def __aexit__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Exit a context manager for underlying httpx.AsyncClient (see httpx docs)"""
|
||||
await self.get_async_httpx_client().__aexit__(*args, **kwargs)
|
16
garbage_api_client/errors.py
Normal file
16
garbage_api_client/errors.py
Normal file
@@ -0,0 +1,16 @@
|
||||
""" Contains shared errors types that can be raised from API functions """
|
||||
|
||||
|
||||
class UnexpectedStatus(Exception):
|
||||
"""Raised by api functions when the response status an undocumented status and Client.raise_on_unexpected_status is True"""
|
||||
|
||||
def __init__(self, status_code: int, content: bytes):
|
||||
self.status_code = status_code
|
||||
self.content = content
|
||||
|
||||
super().__init__(
|
||||
f"Unexpected status code: {status_code}\n\nResponse content:\n{content.decode(errors='ignore')}"
|
||||
)
|
||||
|
||||
|
||||
__all__ = ["UnexpectedStatus"]
|
23
garbage_api_client/models/__init__.py
Normal file
23
garbage_api_client/models/__init__.py
Normal file
@@ -0,0 +1,23 @@
|
||||
""" Contains all the data models used in inputs/outputs """
|
||||
|
||||
from .collection_entry import CollectionEntry
|
||||
from .collection_entry_garbage_type import CollectionEntryGarbageType
|
||||
from .entry_find_locations_location_entries_get_garbage_type_type_0 import (
|
||||
EntryFindLocationsLocationEntriesGetGarbageTypeType0,
|
||||
)
|
||||
from .http_validation_error import HTTPValidationError
|
||||
from .location import Location
|
||||
from .search_results_collection_entry import SearchResultsCollectionEntry
|
||||
from .search_results_location import SearchResultsLocation
|
||||
from .validation_error import ValidationError
|
||||
|
||||
__all__ = (
|
||||
"CollectionEntry",
|
||||
"CollectionEntryGarbageType",
|
||||
"EntryFindLocationsLocationEntriesGetGarbageTypeType0",
|
||||
"HTTPValidationError",
|
||||
"Location",
|
||||
"SearchResultsCollectionEntry",
|
||||
"SearchResultsLocation",
|
||||
"ValidationError",
|
||||
)
|
76
garbage_api_client/models/collection_entry.py
Normal file
76
garbage_api_client/models/collection_entry.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from typing import Any, Dict, List, Type, TypeVar, cast
|
||||
|
||||
from attrs import define as _attrs_define
|
||||
from attrs import field as _attrs_field
|
||||
|
||||
from ..models.collection_entry_garbage_type import CollectionEntryGarbageType
|
||||
|
||||
T = TypeVar("T", bound="CollectionEntry")
|
||||
|
||||
|
||||
@_attrs_define
|
||||
class CollectionEntry:
|
||||
"""
|
||||
Attributes:
|
||||
locations (List[int]):
|
||||
garbage_type (CollectionEntryGarbageType):
|
||||
date (str):
|
||||
"""
|
||||
|
||||
locations: List[int]
|
||||
garbage_type: CollectionEntryGarbageType
|
||||
date: str
|
||||
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
locations = self.locations
|
||||
|
||||
garbage_type = self.garbage_type.value
|
||||
|
||||
date = self.date
|
||||
|
||||
field_dict: Dict[str, Any] = {}
|
||||
field_dict.update(self.additional_properties)
|
||||
field_dict.update(
|
||||
{
|
||||
"locations": locations,
|
||||
"garbage_type": garbage_type,
|
||||
"date": date,
|
||||
}
|
||||
)
|
||||
|
||||
return field_dict
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
||||
d = src_dict.copy()
|
||||
locations = cast(List[int], d.pop("locations"))
|
||||
|
||||
garbage_type = CollectionEntryGarbageType(d.pop("garbage_type"))
|
||||
|
||||
date = d.pop("date")
|
||||
|
||||
collection_entry = cls(
|
||||
locations=locations,
|
||||
garbage_type=garbage_type,
|
||||
date=date,
|
||||
)
|
||||
|
||||
collection_entry.additional_properties = d
|
||||
return collection_entry
|
||||
|
||||
@property
|
||||
def additional_keys(self) -> List[str]:
|
||||
return list(self.additional_properties.keys())
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
return self.additional_properties[key]
|
||||
|
||||
def __setitem__(self, key: str, value: Any) -> None:
|
||||
self.additional_properties[key] = value
|
||||
|
||||
def __delitem__(self, key: str) -> None:
|
||||
del self.additional_properties[key]
|
||||
|
||||
def __contains__(self, key: str) -> bool:
|
||||
return key in self.additional_properties
|
13
garbage_api_client/models/collection_entry_garbage_type.py
Normal file
13
garbage_api_client/models/collection_entry_garbage_type.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from enum import IntEnum
|
||||
|
||||
|
||||
class CollectionEntryGarbageType(IntEnum):
|
||||
VALUE_0 = 0
|
||||
VALUE_1 = 1
|
||||
VALUE_2 = 2
|
||||
VALUE_3 = 3
|
||||
VALUE_4 = 4
|
||||
VALUE_5 = 5
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.value)
|
@@ -0,0 +1,13 @@
|
||||
from enum import IntEnum
|
||||
|
||||
|
||||
class EntryFindLocationsLocationEntriesGetGarbageTypeType0(IntEnum):
|
||||
VALUE_0 = 0
|
||||
VALUE_1 = 1
|
||||
VALUE_2 = 2
|
||||
VALUE_3 = 3
|
||||
VALUE_4 = 4
|
||||
VALUE_5 = 5
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.value)
|
74
garbage_api_client/models/http_validation_error.py
Normal file
74
garbage_api_client/models/http_validation_error.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union
|
||||
|
||||
from attrs import define as _attrs_define
|
||||
from attrs import field as _attrs_field
|
||||
|
||||
from ..types import UNSET, Unset
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..models.validation_error import ValidationError
|
||||
|
||||
|
||||
T = TypeVar("T", bound="HTTPValidationError")
|
||||
|
||||
|
||||
@_attrs_define
|
||||
class HTTPValidationError:
|
||||
"""
|
||||
Attributes:
|
||||
detail (Union[Unset, List['ValidationError']]):
|
||||
"""
|
||||
|
||||
detail: Union[Unset, List["ValidationError"]] = UNSET
|
||||
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
detail: Union[Unset, List[Dict[str, Any]]] = UNSET
|
||||
if not isinstance(self.detail, Unset):
|
||||
detail = []
|
||||
for detail_item_data in self.detail:
|
||||
detail_item = detail_item_data.to_dict()
|
||||
detail.append(detail_item)
|
||||
|
||||
field_dict: Dict[str, Any] = {}
|
||||
field_dict.update(self.additional_properties)
|
||||
field_dict.update({})
|
||||
if detail is not UNSET:
|
||||
field_dict["detail"] = detail
|
||||
|
||||
return field_dict
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
||||
from ..models.validation_error import ValidationError
|
||||
|
||||
d = src_dict.copy()
|
||||
detail = []
|
||||
_detail = d.pop("detail", UNSET)
|
||||
for detail_item_data in _detail or []:
|
||||
detail_item = ValidationError.from_dict(detail_item_data)
|
||||
|
||||
detail.append(detail_item)
|
||||
|
||||
http_validation_error = cls(
|
||||
detail=detail,
|
||||
)
|
||||
|
||||
http_validation_error.additional_properties = d
|
||||
return http_validation_error
|
||||
|
||||
@property
|
||||
def additional_keys(self) -> List[str]:
|
||||
return list(self.additional_properties.keys())
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
return self.additional_properties[key]
|
||||
|
||||
def __setitem__(self, key: str, value: Any) -> None:
|
||||
self.additional_properties[key] = value
|
||||
|
||||
def __delitem__(self, key: str) -> None:
|
||||
del self.additional_properties[key]
|
||||
|
||||
def __contains__(self, key: str) -> bool:
|
||||
return key in self.additional_properties
|
90
garbage_api_client/models/location.py
Normal file
90
garbage_api_client/models/location.py
Normal file
@@ -0,0 +1,90 @@
|
||||
from typing import Any, Dict, List, Type, TypeVar, cast
|
||||
|
||||
from attrs import define as _attrs_define
|
||||
from attrs import field as _attrs_field
|
||||
|
||||
T = TypeVar("T", bound="Location")
|
||||
|
||||
|
||||
@_attrs_define
|
||||
class Location:
|
||||
"""
|
||||
Attributes:
|
||||
id (int):
|
||||
name (str):
|
||||
location (List[int]):
|
||||
country (int):
|
||||
timezone (str):
|
||||
"""
|
||||
|
||||
id: int
|
||||
name: str
|
||||
location: List[int]
|
||||
country: int
|
||||
timezone: str
|
||||
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
id = self.id
|
||||
|
||||
name = self.name
|
||||
|
||||
location = self.location
|
||||
|
||||
country = self.country
|
||||
|
||||
timezone = self.timezone
|
||||
|
||||
field_dict: Dict[str, Any] = {}
|
||||
field_dict.update(self.additional_properties)
|
||||
field_dict.update(
|
||||
{
|
||||
"id": id,
|
||||
"name": name,
|
||||
"location": location,
|
||||
"country": country,
|
||||
"timezone": timezone,
|
||||
}
|
||||
)
|
||||
|
||||
return field_dict
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
||||
d = src_dict.copy()
|
||||
id = d.pop("id")
|
||||
|
||||
name = d.pop("name")
|
||||
|
||||
location = cast(List[int], d.pop("location"))
|
||||
|
||||
country = d.pop("country")
|
||||
|
||||
timezone = d.pop("timezone")
|
||||
|
||||
location = cls(
|
||||
id=id,
|
||||
name=name,
|
||||
location=location,
|
||||
country=country,
|
||||
timezone=timezone,
|
||||
)
|
||||
|
||||
location.additional_properties = d
|
||||
return location
|
||||
|
||||
@property
|
||||
def additional_keys(self) -> List[str]:
|
||||
return list(self.additional_properties.keys())
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
return self.additional_properties[key]
|
||||
|
||||
def __setitem__(self, key: str, value: Any) -> None:
|
||||
self.additional_properties[key] = value
|
||||
|
||||
def __delitem__(self, key: str) -> None:
|
||||
del self.additional_properties[key]
|
||||
|
||||
def __contains__(self, key: str) -> bool:
|
||||
return key in self.additional_properties
|
72
garbage_api_client/models/search_results_collection_entry.py
Normal file
72
garbage_api_client/models/search_results_collection_entry.py
Normal file
@@ -0,0 +1,72 @@
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar
|
||||
|
||||
from attrs import define as _attrs_define
|
||||
from attrs import field as _attrs_field
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..models.collection_entry import CollectionEntry
|
||||
|
||||
|
||||
T = TypeVar("T", bound="SearchResultsCollectionEntry")
|
||||
|
||||
|
||||
@_attrs_define
|
||||
class SearchResultsCollectionEntry:
|
||||
"""
|
||||
Attributes:
|
||||
results (List['CollectionEntry']):
|
||||
"""
|
||||
|
||||
results: List["CollectionEntry"]
|
||||
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
results = []
|
||||
for results_item_data in self.results:
|
||||
results_item = results_item_data.to_dict()
|
||||
results.append(results_item)
|
||||
|
||||
field_dict: Dict[str, Any] = {}
|
||||
field_dict.update(self.additional_properties)
|
||||
field_dict.update(
|
||||
{
|
||||
"results": results,
|
||||
}
|
||||
)
|
||||
|
||||
return field_dict
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
||||
from ..models.collection_entry import CollectionEntry
|
||||
|
||||
d = src_dict.copy()
|
||||
results = []
|
||||
_results = d.pop("results")
|
||||
for results_item_data in _results:
|
||||
results_item = CollectionEntry.from_dict(results_item_data)
|
||||
|
||||
results.append(results_item)
|
||||
|
||||
search_results_collection_entry = cls(
|
||||
results=results,
|
||||
)
|
||||
|
||||
search_results_collection_entry.additional_properties = d
|
||||
return search_results_collection_entry
|
||||
|
||||
@property
|
||||
def additional_keys(self) -> List[str]:
|
||||
return list(self.additional_properties.keys())
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
return self.additional_properties[key]
|
||||
|
||||
def __setitem__(self, key: str, value: Any) -> None:
|
||||
self.additional_properties[key] = value
|
||||
|
||||
def __delitem__(self, key: str) -> None:
|
||||
del self.additional_properties[key]
|
||||
|
||||
def __contains__(self, key: str) -> bool:
|
||||
return key in self.additional_properties
|
72
garbage_api_client/models/search_results_location.py
Normal file
72
garbage_api_client/models/search_results_location.py
Normal file
@@ -0,0 +1,72 @@
|
||||
from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar
|
||||
|
||||
from attrs import define as _attrs_define
|
||||
from attrs import field as _attrs_field
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..models.location import Location
|
||||
|
||||
|
||||
T = TypeVar("T", bound="SearchResultsLocation")
|
||||
|
||||
|
||||
@_attrs_define
|
||||
class SearchResultsLocation:
|
||||
"""
|
||||
Attributes:
|
||||
results (List['Location']):
|
||||
"""
|
||||
|
||||
results: List["Location"]
|
||||
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
results = []
|
||||
for results_item_data in self.results:
|
||||
results_item = results_item_data.to_dict()
|
||||
results.append(results_item)
|
||||
|
||||
field_dict: Dict[str, Any] = {}
|
||||
field_dict.update(self.additional_properties)
|
||||
field_dict.update(
|
||||
{
|
||||
"results": results,
|
||||
}
|
||||
)
|
||||
|
||||
return field_dict
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
||||
from ..models.location import Location
|
||||
|
||||
d = src_dict.copy()
|
||||
results = []
|
||||
_results = d.pop("results")
|
||||
for results_item_data in _results:
|
||||
results_item = Location.from_dict(results_item_data)
|
||||
|
||||
results.append(results_item)
|
||||
|
||||
search_results_location = cls(
|
||||
results=results,
|
||||
)
|
||||
|
||||
search_results_location.additional_properties = d
|
||||
return search_results_location
|
||||
|
||||
@property
|
||||
def additional_keys(self) -> List[str]:
|
||||
return list(self.additional_properties.keys())
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
return self.additional_properties[key]
|
||||
|
||||
def __setitem__(self, key: str, value: Any) -> None:
|
||||
self.additional_properties[key] = value
|
||||
|
||||
def __delitem__(self, key: str) -> None:
|
||||
del self.additional_properties[key]
|
||||
|
||||
def __contains__(self, key: str) -> bool:
|
||||
return key in self.additional_properties
|
87
garbage_api_client/models/validation_error.py
Normal file
87
garbage_api_client/models/validation_error.py
Normal file
@@ -0,0 +1,87 @@
|
||||
from typing import Any, Dict, List, Type, TypeVar, Union, cast
|
||||
|
||||
from attrs import define as _attrs_define
|
||||
from attrs import field as _attrs_field
|
||||
|
||||
T = TypeVar("T", bound="ValidationError")
|
||||
|
||||
|
||||
@_attrs_define
|
||||
class ValidationError:
|
||||
"""
|
||||
Attributes:
|
||||
loc (List[Union[int, str]]):
|
||||
msg (str):
|
||||
type (str):
|
||||
"""
|
||||
|
||||
loc: List[Union[int, str]]
|
||||
msg: str
|
||||
type: str
|
||||
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
loc = []
|
||||
for loc_item_data in self.loc:
|
||||
loc_item: Union[int, str]
|
||||
loc_item = loc_item_data
|
||||
loc.append(loc_item)
|
||||
|
||||
msg = self.msg
|
||||
|
||||
type = self.type
|
||||
|
||||
field_dict: Dict[str, Any] = {}
|
||||
field_dict.update(self.additional_properties)
|
||||
field_dict.update(
|
||||
{
|
||||
"loc": loc,
|
||||
"msg": msg,
|
||||
"type": type,
|
||||
}
|
||||
)
|
||||
|
||||
return field_dict
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
||||
d = src_dict.copy()
|
||||
loc = []
|
||||
_loc = d.pop("loc")
|
||||
for loc_item_data in _loc:
|
||||
|
||||
def _parse_loc_item(data: object) -> Union[int, str]:
|
||||
return cast(Union[int, str], data)
|
||||
|
||||
loc_item = _parse_loc_item(loc_item_data)
|
||||
|
||||
loc.append(loc_item)
|
||||
|
||||
msg = d.pop("msg")
|
||||
|
||||
type = d.pop("type")
|
||||
|
||||
validation_error = cls(
|
||||
loc=loc,
|
||||
msg=msg,
|
||||
type=type,
|
||||
)
|
||||
|
||||
validation_error.additional_properties = d
|
||||
return validation_error
|
||||
|
||||
@property
|
||||
def additional_keys(self) -> List[str]:
|
||||
return list(self.additional_properties.keys())
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
return self.additional_properties[key]
|
||||
|
||||
def __setitem__(self, key: str, value: Any) -> None:
|
||||
self.additional_properties[key] = value
|
||||
|
||||
def __delitem__(self, key: str) -> None:
|
||||
del self.additional_properties[key]
|
||||
|
||||
def __contains__(self, key: str) -> bool:
|
||||
return key in self.additional_properties
|
1
garbage_api_client/py.typed
Normal file
1
garbage_api_client/py.typed
Normal file
@@ -0,0 +1 @@
|
||||
# Marker file for PEP 561
|
45
garbage_api_client/types.py
Normal file
45
garbage_api_client/types.py
Normal file
@@ -0,0 +1,45 @@
|
||||
""" Contains some shared types for properties """
|
||||
|
||||
from http import HTTPStatus
|
||||
from typing import BinaryIO, Generic, Literal, MutableMapping, Optional, Tuple, TypeVar
|
||||
|
||||
from attrs import define
|
||||
|
||||
|
||||
class Unset:
|
||||
def __bool__(self) -> Literal[False]:
|
||||
return False
|
||||
|
||||
|
||||
UNSET: Unset = Unset()
|
||||
|
||||
FileJsonType = Tuple[Optional[str], BinaryIO, Optional[str]]
|
||||
|
||||
|
||||
@define
|
||||
class File:
|
||||
"""Contains information for file uploads"""
|
||||
|
||||
payload: BinaryIO
|
||||
file_name: Optional[str] = None
|
||||
mime_type: Optional[str] = None
|
||||
|
||||
def to_tuple(self) -> FileJsonType:
|
||||
"""Return a tuple representation that httpx will accept for multipart/form-data"""
|
||||
return self.file_name, self.payload, self.mime_type
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
@define
|
||||
class Response(Generic[T]):
|
||||
"""A response from an endpoint"""
|
||||
|
||||
status_code: HTTPStatus
|
||||
content: bytes
|
||||
headers: MutableMapping[str, str]
|
||||
parsed: Optional[T]
|
||||
|
||||
|
||||
__all__ = ["File", "Response", "FileJsonType", "Unset", "UNSET"]
|
Reference in New Issue
Block a user