Compare commits

..

No commits in common. "c92398118b9b973a295bffdabd4ff9fa7a595eb9" and "7fb3aa0f428a8c8ff7d4d0559cd024767efc61b1" have entirely different histories.

12 changed files with 202 additions and 269 deletions

View file

@ -18,5 +18,4 @@
],
"python.testing.unittestEnabled": false,
"python.testing.pytestEnabled": true,
"black-formatter.importStrategy": "fromEnvironment",
}

View file

@ -21,8 +21,8 @@ class CalDAV:
password=SETTINGS.caldav.password,
)
@classmethod
@property
@classmethod
def principal(cls) -> Principal:
"""
Gets the `Principal` object of the main CalDAV client.
@ -31,7 +31,6 @@ class CalDAV:
_logger.debug("principal")
return cls._caldav_client.principal()
@classmethod
@property
@AsyncTTL(
time_to_live=SETTINGS.caldav.cache_ttl,
@ -39,6 +38,7 @@ class CalDAV:
skip_args=1,
)
@asyncify
@classmethod
def calendars(cls) -> list[str]:
"""
Asynchroneously lists all calendars using the main WebDAV client.

View file

@ -123,7 +123,7 @@ class Settings(BaseSettings):
caldav: DAVSettings = DAVSettings()
@model_validator(mode="before")
@model_validator(mode='before')
@classmethod
def validate_dav_settings(cls, data) -> dict[str, Any]:
assert isinstance(data, dict)

View file

@ -7,7 +7,7 @@ This file: Main API router definition.
from fastapi import APIRouter
# from . import aggregate, calendar, file, image, misc, text, ticker
from . import calendar, file, image, misc, text, ticker
from . import file, image, misc, text, ticker
router = APIRouter(prefix="/api/v1")
@ -18,7 +18,7 @@ router.include_router(ticker.router)
router.include_router(image.router)
router.include_router(file.router)
router.include_router(calendar.router)
# router.include_router(calendar.router)
# router.include_router(aggregate.router)
__all__ = ["router"]

View file

@ -3,17 +3,19 @@ Dependables for defining Routers.
"""
import re
from dataclasses import dataclass, field
from dataclasses import dataclass
from logging import getLogger
from typing import Awaitable, Callable, Generic, ParamSpec, Self, TypeVar
from typing import Awaitable, Callable, Generic, ParamSpec, TypeVar
from fastapi import Depends, HTTPException, params, status
from fastapi import HTTPException, status
from webdav3.exceptions import RemoteResourceNotFound
from ...core.caldav import CalDAV
from ...core.config import get_config
from ...core.webdav import WebDAV
# from ...core.caldav import CalDAV
# from ...core.config import Config, get_config
_logger = getLogger(__name__)
_RESPONSE_OK = {
@ -25,114 +27,50 @@ _RESPONSE_OK = {
Params = ParamSpec("Params")
Return = TypeVar("Return")
type DependableFn[**Params, Return] = Callable[Params, Awaitable[Return]]
@dataclass(slots=True, frozen=True)
class Dependable(Generic[Params, Return]):
func: DependableFn[Params, Return]
responses: dict = field(default_factory=lambda: _RESPONSE_OK.copy())
func: Callable[Params, Awaitable[Return]]
responses: dict
@dataclass(slots=True, frozen=True)
class ListManager:
lister: Dependable[[], list[str]]
filter: Dependable[[str], list[str]]
getter: Dependable[[str], str]
@classmethod
def from_lister(cls, lister: Dependable[[], list[str]]) -> Self:
async def _filter_fn(
prefix: str,
names: list[str] = Depends(lister.func),
) -> list[str]:
if isinstance(names, params.Depends):
names = await lister.func()
_logger.debug("filter %s from %s", repr(prefix), repr(names))
return [item for item in names if item.lower().startswith(prefix.lower())]
async def _getter_fn(
prefix: str,
names: list[str] = Depends(_filter_fn),
) -> str:
if isinstance(names, params.Depends):
names = await _filter_fn(prefix)
_logger.debug("get %s from %s", repr(prefix), repr(names))
match names:
case [name]:
return name
case []:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
case _:
raise HTTPException(status_code=status.HTTP_409_CONFLICT)
return cls(
lister=lister,
filter=Dependable(_filter_fn),
getter=Dependable(
func=_getter_fn,
responses={
**_RESPONSE_OK,
status.HTTP_404_NOT_FOUND: {
"description": "Prefix not found",
"content": None,
},
status.HTTP_409_CONFLICT: {
"description": "Ambiguous prefix",
"content": None,
},
},
),
)
@classmethod
def from_lister_fn(cls, lister_fn: DependableFn[[], list[str]]) -> Self:
return cls.from_lister(Dependable(lister_fn))
type _NDependable[Return] = Dependable[[], Return]
def get_remote_path(
path_name: str,
) -> DependableFn[[], str]:
) -> _NDependable[str]:
async def _get_remote_path() -> str:
cfg = await get_config()
return getattr(cfg, path_name)
return _get_remote_path
return Dependable(
func=_get_remote_path,
responses={**_RESPONSE_OK},
)
RP_FILE = get_remote_path("file_dir")
RP_IMAGE = get_remote_path("image_dir")
RP_TEXT = get_remote_path("text_dir")
def get_file_lister(
rp: DependableFn[[], str],
def list_files(
*,
path_name: str,
re: re.Pattern[str],
) -> Dependable[[], list[str]]:
) -> _NDependable[list[str]]:
"""
List files in remote `path` matching the RegEx `re`
"""
async def _list_files(
remote_path: str = Depends(rp),
) -> list[str]:
if isinstance(remote_path, params.Depends):
remote_path = await rp()
_logger.debug("list %s", repr(remote_path))
async def _list_files() -> list[str]:
cfg = await get_config()
path = getattr(cfg, path_name)
try:
return await WebDAV.list_files(remote_path, regex=re)
return await WebDAV.list_files(path, regex=re)
except RemoteResourceNotFound:
_logger.error("WebDAV path %s lost!", repr(remote_path))
_logger.error(
"WebDAV path %s lost!",
repr(path),
)
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
return Dependable(
@ -140,35 +78,18 @@ def get_file_lister(
responses={
**_RESPONSE_OK,
status.HTTP_404_NOT_FOUND: {
"description": "Remote path not found",
"description": f"{path_name!r} not found",
"content": None,
},
},
)
LM_FILE = ListManager.from_lister(
get_file_lister(rp=RP_FILE, re=re.compile(r"[^/]$", flags=re.IGNORECASE))
)
LM_IMAGE = ListManager.from_lister(
get_file_lister(
rp=RP_IMAGE, re=re.compile(r"\.(gif|jpe?g|tiff?|png|bmp)$", flags=re.IGNORECASE)
)
)
LM_TEXT = ListManager.from_lister(
get_file_lister(rp=RP_TEXT, re=re.compile(r"\.(txt|md)$", flags=re.IGNORECASE))
)
async def list_calendar_names() -> list[str]:
"""
List calendar names
"""
return await CalDAV.calendars
LM_CALENDARS = ListManager.from_lister_fn(list_calendar_names)
# async def list_calendar_names() -> list[str]:
# """
# List calendar names
# """
# return await CalDAV.calendars
# async def list_aggregate_names(
@ -177,8 +98,70 @@ LM_CALENDARS = ListManager.from_lister_fn(list_calendar_names)
# """
# List aggregate calendar names
# """
# return list(cfg.calendar.aggregates.keys())
# LM_AGGREGATES = ListManager.from_lister_fn(list_aggregate_names)
def filter_prefix(
src: _NDependable[list[str]],
) -> Dependable[[str], list[str]]:
"""
Filter names from an async source `src` for names starting with a given prefix.
"""
async def _filter_prefix(
prefix: str,
) -> list[str]:
return list(
item for item in (await src.func()) if item.lower().startswith(prefix.lower())
)
return Dependable(
func=_filter_prefix,
responses={
**_RESPONSE_OK,
status.HTTP_404_NOT_FOUND: {
"description": f"Failure in lister {src.__class__.__name__!r}",
"content": None,
},
},
)
def filter_prefix_unique(
src: Dependable[[str], list[str]],
) -> Dependable[[str], str]:
"""
Determines if a given prefix is unique in the list produced by the async source `src`.
On success, produces the unique name with that prefix. Otherwise, throws a HTTPException.
"""
async def _filter_prefix_unique(
prefix: str,
) -> str:
names = await src.func(prefix)
match names:
case [name]:
return name
case []:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
case _:
raise HTTPException(status_code=status.HTTP_409_CONFLICT)
return Dependable(
func=_filter_prefix_unique,
responses={
**_RESPONSE_OK,
status.HTTP_404_NOT_FOUND: {
"description": "Prefix not found",
"content": None,
},
status.HTTP_409_CONFLICT: {
"description": "Ambiguous prefix",
"content": None,
},
},
)

View file

@ -7,56 +7,54 @@ Router "calendar" provides:
"""
from logging import getLogger
from typing import Iterator
from fastapi import APIRouter, Depends
from ...core.caldav import CalDAV, CalEvent
from ...core.config import CalendarUIConfig, Config, get_config
from ._common import LM_CALENDARS
from ...config import CalendarUIConfig, Config
from ...dav_calendar import CalEvent, DavCalendar
from ._common import CalendarNameLister, PrefixFinder, PrefixUnique
_logger = getLogger(__name__)
router = APIRouter(prefix="/calendar", tags=["calendar"])
calendar_lister = CalendarNameLister()
calendar_finder = PrefixFinder(calendar_lister)
calendar_unique = PrefixUnique(calendar_finder)
@router.on_event("startup")
async def start_router() -> None:
_logger.debug(f"{router.prefix} router starting.")
@router.get(
"/list",
responses=LM_CALENDARS.lister.responses,
)
async def list_all_calendars(
names: list[str] = Depends(LM_CALENDARS.lister.func),
@router.get("/list", response_model=list[str])
async def list_calendars(
names: Iterator[str] = Depends(calendar_lister),
) -> list[str]:
return names
return list(names)
@router.get(
"/find/{prefix}",
responses=LM_CALENDARS.filter.responses,
)
@router.get("/find/{prefix}", response_model=list[str])
async def find_calendars(
names: list[str] = Depends(LM_CALENDARS.filter.func),
names: Iterator[str] = Depends(calendar_finder),
) -> list[str]:
return names
return list(names)
@router.get("/get/{prefix}", response_model=list[CalEvent])
async def get_calendar(
name: str = Depends(calendar_unique),
) -> list[CalEvent]:
return list(await DavCalendar(name).events)
@router.get(
"/get/{prefix}",
responses=LM_CALENDARS.getter.responses,
"/config",
response_model=CalendarUIConfig,
)
async def get_calendar(
name: str = Depends(LM_CALENDARS.getter.func),
cfg: Config = Depends(get_config),
) -> list[CalEvent]:
return CalDAV.get_events(name, cfg)
@router.get("/config")
async def get_ui_config(
cfg: Config = Depends(get_config),
cfg: Config = Depends(Config.get),
) -> CalendarUIConfig:
return cfg.calendar

View file

@ -6,6 +6,7 @@ Router "file" provides:
- getting files by name prefix
"""
import re
from io import BytesIO
from logging import getLogger
@ -15,19 +16,32 @@ from magic import Magic
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
from ...core.webdav import WebDAV
from ._common import LM_FILE, RP_FILE
from ._common import filter_prefix, filter_prefix_unique, get_remote_path, list_files
_logger = getLogger(__name__)
_magic = Magic(mime=True)
_PATH_NAME = "file_dir"
router = APIRouter(prefix="/file", tags=["file"])
_ls = list_files(
path_name=_PATH_NAME,
re=re.compile(
r"[^/]$",
flags=re.IGNORECASE,
),
)
_rp = get_remote_path(path_name=_PATH_NAME)
_fp = filter_prefix(_ls)
_fpu = filter_prefix_unique(_fp)
@router.on_event("startup")
async def start_router() -> None:
_logger.debug(f"{router.prefix} router starting.")
remote_path = await RP_FILE()
remote_path = await _rp.func()
if not webdav_ensure_path(remote_path):
webdav_ensure_files(
remote_path,
@ -38,32 +52,33 @@ async def start_router() -> None:
@router.get(
"/list",
responses=LM_FILE.lister.responses,
responses=_ls.responses,
)
async def list_all_files(
names: list[str] = Depends(LM_FILE.lister.func),
names: list[str] = Depends(_ls.func),
) -> list[str]:
return names
@router.get(
"/find/{prefix}",
responses=LM_FILE.filter.responses,
responses=_fp.responses,
)
async def find_files_by_prefix(
names: list[str] = Depends(LM_FILE.filter.func),
names: list[str] = Depends(_fp.func),
) -> list[str]:
return names
@router.get(
"/get/{prefix}",
responses=LM_FILE.getter.responses,
responses=_fpu.responses,
response_class=StreamingResponse,
)
async def get_file_by_prefix(
remote_path: str = Depends(RP_FILE),
name: str = Depends(LM_FILE.getter.func),
prefix: str,
remote_path: str = Depends(_rp.func),
name: str = Depends(_fpu.func),
) -> StreamingResponse:
buffer = BytesIO(await WebDAV.read_bytes(f"{remote_path}/{name}"))
@ -73,5 +88,5 @@ async def get_file_by_prefix(
return StreamingResponse(
content=buffer,
media_type=mime,
headers={"Content-Disposition": f"filename={name}"},
headers={"Content-Disposition": f"filename={prefix}"},
)

View file

@ -6,6 +6,7 @@ Router "image" provides:
- getting image files in a uniform format by name prefix
"""
import re
from io import BytesIO
from logging import getLogger
@ -16,19 +17,31 @@ from PIL import Image
from ...core.config import Config, ImageUIConfig, get_config
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
from ...core.webdav import WebDAV
from ._common import LM_IMAGE, RP_IMAGE
from ._common import filter_prefix, filter_prefix_unique, get_remote_path, list_files
_logger = getLogger(__name__)
_PATH_NAME = "image_dir"
router = APIRouter(prefix="/image", tags=["image"])
_ls = list_files(
path_name=_PATH_NAME,
re=re.compile(
r"\.(gif|jpe?g|tiff?|png|bmp)$",
flags=re.IGNORECASE,
),
)
_rp = get_remote_path(path_name=_PATH_NAME)
_fp = filter_prefix(_ls)
_fpu = filter_prefix_unique(_fp)
@router.on_event("startup")
async def start_router() -> None:
_logger.debug(f"{router.prefix} router starting.")
remote_path = await RP_IMAGE()
remote_path = await _rp.func()
if not webdav_ensure_path(remote_path):
webdav_ensure_files(
remote_path,
@ -40,32 +53,33 @@ async def start_router() -> None:
@router.get(
"/list",
responses=LM_IMAGE.lister.responses,
responses=_ls.responses,
)
async def list_all_images(
names: list[str] = Depends(LM_IMAGE.lister.func),
names: list[str] = Depends(_ls.func),
) -> list[str]:
return names
@router.get(
"/find/{prefix}",
responses=LM_IMAGE.filter.responses,
responses=_fp.responses,
)
async def find_images_by_prefix(
names: list[str] = Depends(LM_IMAGE.filter.func),
names: list[str] = Depends(_fp.func),
) -> list[str]:
return names
@router.get(
"/get/{prefix}",
responses=LM_IMAGE.getter.responses,
responses=_fpu.responses,
response_class=StreamingResponse,
)
async def get_image_by_prefix(
remote_path: str = Depends(RP_IMAGE),
name: str = Depends(LM_IMAGE.getter.func),
prefix: str,
remote_path: str = Depends(_rp.func),
name: str = Depends(_fpu.func),
) -> StreamingResponse:
cfg = await get_config()
img = Image.open(BytesIO(await WebDAV.read_bytes(f"{remote_path}/{name}")))
@ -77,7 +91,7 @@ async def get_image_by_prefix(
return StreamingResponse(
content=img_buffer,
media_type="image/jpeg",
headers={"Content-Disposition": f"filename={name}.jpg"},
headers={"Content-Disposition": f"filename={prefix}.jpg"},
)

View file

@ -7,6 +7,7 @@ Router "text" provides:
- getting text file HTML content by name prefix (using Markdown)
"""
import re
from logging import getLogger
from fastapi import APIRouter, Depends
@ -14,19 +15,31 @@ from markdown import markdown
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
from ...core.webdav import WebDAV
from ._common import LM_TEXT, RP_TEXT
from ._common import filter_prefix, filter_prefix_unique, get_remote_path, list_files
_logger = getLogger(__name__)
_PATH_NAME = "text_dir"
router = APIRouter(prefix="/text", tags=["text"])
_ls = list_files(
path_name=_PATH_NAME,
re=re.compile(
r"\.(txt|md)$",
flags=re.IGNORECASE,
),
)
_rp = get_remote_path(path_name=_PATH_NAME)
_fp = filter_prefix(_ls)
_fpu = filter_prefix_unique(_fp)
@router.on_event("startup")
async def start_router() -> None:
_logger.debug(f"{router.prefix} router starting.")
remote_path = await RP_TEXT()
remote_path = await _rp.func()
if not webdav_ensure_path(remote_path):
webdav_ensure_files(
remote_path,
@ -38,34 +51,34 @@ async def start_router() -> None:
@router.get(
"/list",
responses=LM_TEXT.lister.responses,
responses=_ls.responses,
)
async def list_all_texts(
names: list[str] = Depends(LM_TEXT.lister.func),
names: list[str] = Depends(_ls.func),
) -> list[str]:
return names
@router.get(
"/find/{prefix}",
responses=LM_TEXT.filter.responses,
responses=_fp.responses,
)
async def find_texts_by_prefix(
names: list[str] = Depends(LM_TEXT.filter.func),
names: list[str] = Depends(_fp.func),
) -> list[str]:
return names
async def _get_raw_text_by_prefix(
remote_path: str = Depends(RP_TEXT),
name: str = Depends(LM_TEXT.getter.func),
remote_path: str = Depends(_rp.func),
name: str = Depends(_fpu.func),
) -> str:
return await WebDAV.read_str(f"{remote_path}/{name}")
@router.get(
"/get/raw/{prefix}",
responses=LM_TEXT.getter.responses,
responses=_fpu.responses,
)
async def get_raw_text_by_prefix(
text: str = Depends(_get_raw_text_by_prefix),
@ -75,7 +88,7 @@ async def get_raw_text_by_prefix(
@router.get(
"/get/html/{prefix}",
responses=LM_TEXT.getter.responses,
responses=_fpu.responses,
)
async def get_html_by_prefix(
text: str = Depends(_get_raw_text_by_prefix),

View file

@ -15,7 +15,7 @@ from markdown import markdown
from ...core.config import Config, TickerUIConfig, get_config
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
from ...core.webdav import WebDAV
from ._common import LM_TEXT, RP_TEXT
from .text import _fpu, _rp
_logger = getLogger(__name__)
@ -26,7 +26,7 @@ router = APIRouter(prefix="/ticker", tags=["text"])
async def start_router() -> None:
_logger.debug(f"{router.prefix} router starting.")
remote_path = await RP_TEXT()
remote_path = await _rp.func()
if not webdav_ensure_path(remote_path):
webdav_ensure_files(
remote_path,
@ -36,8 +36,8 @@ async def start_router() -> None:
async def get_ticker_lines() -> Iterator[str]:
cfg = await get_config()
file_name = await LM_TEXT.getter.func(cfg.ticker.file_name)
remote_path = await RP_TEXT()
file_name = await _fpu.func(cfg.ticker.file_name)
remote_path = await _rp.func()
ticker = await WebDAV.read_str(f"{remote_path}/{file_name}")

90
api/poetry.lock generated
View file

@ -56,46 +56,6 @@ files = [
funkify = ">=0.4.0,<0.5.0"
xtyping = ">=0.5.0"
[[package]]
name = "black"
version = "23.10.1"
description = "The uncompromising code formatter."
optional = false
python-versions = ">=3.8"
files = [
{file = "black-23.10.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:ec3f8e6234c4e46ff9e16d9ae96f4ef69fa328bb4ad08198c8cee45bb1f08c69"},
{file = "black-23.10.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:1b917a2aa020ca600483a7b340c165970b26e9029067f019e3755b56e8dd5916"},
{file = "black-23.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c74de4c77b849e6359c6f01987e94873c707098322b91490d24296f66d067dc"},
{file = "black-23.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:7b4d10b0f016616a0d93d24a448100adf1699712fb7a4efd0e2c32bbb219b173"},
{file = "black-23.10.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:b15b75fc53a2fbcac8a87d3e20f69874d161beef13954747e053bca7a1ce53a0"},
{file = "black-23.10.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:e293e4c2f4a992b980032bbd62df07c1bcff82d6964d6c9496f2cd726e246ace"},
{file = "black-23.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d56124b7a61d092cb52cce34182a5280e160e6aff3137172a68c2c2c4b76bcb"},
{file = "black-23.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f157a8945a7b2d424da3335f7ace89c14a3b0625e6593d21139c2d8214d55ce"},
{file = "black-23.10.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:cfcce6f0a384d0da692119f2d72d79ed07c7159879d0bb1bb32d2e443382bf3a"},
{file = "black-23.10.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:33d40f5b06be80c1bbce17b173cda17994fbad096ce60eb22054da021bf933d1"},
{file = "black-23.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:840015166dbdfbc47992871325799fd2dc0dcf9395e401ada6d88fe11498abad"},
{file = "black-23.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:037e9b4664cafda5f025a1728c50a9e9aedb99a759c89f760bd83730e76ba884"},
{file = "black-23.10.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:7cb5936e686e782fddb1c73f8aa6f459e1ad38a6a7b0e54b403f1f05a1507ee9"},
{file = "black-23.10.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:7670242e90dc129c539e9ca17665e39a146a761e681805c54fbd86015c7c84f7"},
{file = "black-23.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed45ac9a613fb52dad3b61c8dea2ec9510bf3108d4db88422bacc7d1ba1243d"},
{file = "black-23.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:6d23d7822140e3fef190734216cefb262521789367fbdc0b3f22af6744058982"},
{file = "black-23.10.1-py3-none-any.whl", hash = "sha256:d431e6739f727bb2e0495df64a6c7a5310758e87505f5f8cde9ff6c0f2d7e4fe"},
{file = "black-23.10.1.tar.gz", hash = "sha256:1f8ce316753428ff68749c65a5f7844631aa18c8679dfd3ca9dc1a289979c258"},
]
[package.dependencies]
click = ">=8.0.0"
mypy-extensions = ">=0.4.3"
packaging = ">=22.0"
pathspec = ">=0.9.0"
platformdirs = ">=2"
[package.extras]
colorama = ["colorama (>=0.4.3)"]
d = ["aiohttp (>=3.7.4)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "caldav"
version = "1.3.6"
@ -553,39 +513,6 @@ files = [
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
]
[[package]]
name = "mypy-extensions"
version = "1.0.0"
description = "Type system extensions for programs checked with the mypy type checker."
optional = false
python-versions = ">=3.5"
files = [
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
[[package]]
name = "packaging"
version = "23.2"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
files = [
{file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
]
[[package]]
name = "pathspec"
version = "0.11.2"
description = "Utility library for gitignore style pattern matching of file paths."
optional = false
python-versions = ">=3.7"
files = [
{file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"},
{file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"},
]
[[package]]
name = "pillow"
version = "10.1.0"
@ -653,21 +580,6 @@ files = [
docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"]
tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"]
[[package]]
name = "platformdirs"
version = "3.11.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
optional = false
python-versions = ">=3.7"
files = [
{file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"},
{file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"},
]
[package.extras]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
[[package]]
name = "pycodestyle"
version = "2.11.1"
@ -1396,4 +1308,4 @@ typing-extensions = ">=4.4.0"
[metadata]
lock-version = "2.0"
python-versions = "^3.12"
content-hash = "b0c5ea283c2b087215c864dcdfd271d6aadddbedf9e86247db99dc412dfa8fa4"
content-hash = "367a94d4b3b395034ee18e3e383c2f6bd127ccf0f33218c6eb13c669310b5a9f"

View file

@ -22,7 +22,6 @@ asyncify = "^0.9.2"
[tool.poetry.group.dev.dependencies]
flake8 = "^6.1.0"
flake8-isort = "^6.1.0"
black = "^23.10.1"
[build-system]
build-backend = "poetry.core.masonry.api"