wip: functionify routers._common

This commit is contained in:
Jörn-Michael Miehe 2023-10-23 23:32:25 +02:00
parent e6509d85fc
commit 78b1359603
5 changed files with 210 additions and 178 deletions

View file

@ -3,16 +3,19 @@ Dependables for defining Routers.
""" """
import re import re
from dataclasses import dataclass
from logging import getLogger from logging import getLogger
from typing import Awaitable, Callable from typing import Awaitable, Callable, ParamSpec, TypeVar
from fastapi import Depends, HTTPException, status from fastapi import HTTPException, status
from webdav3.exceptions import RemoteResourceNotFound from webdav3.exceptions import RemoteResourceNotFound
from ...core.caldav import CalDAV from ...core.config import get_config
from ...core.config import Config, get_config
from ...core.webdav import WebDAV from ...core.webdav import WebDAV
# from ...core.caldav import CalDAV
# from ...core.config import Config, get_config
_logger = getLogger(__name__) _logger = getLogger(__name__)
@ -22,35 +25,42 @@ _RESPONSE_OK = {
}, },
} }
Params = ParamSpec("Params")
Return = TypeVar("Return")
async def get_remote_path(
@dataclass(slots=True, frozen=True)
class Dependable[**Params, Return]:
func: Callable[Params, Return]
responses: dict
def get_remote_path(
path_name: str, path_name: str,
*, ) -> Dependable[[], Awaitable[str]]:
cfg: Config = Depends(get_config), async def _get_remote_path() -> str:
) -> str: cfg = await get_config()
return getattr(cfg, path_name) return getattr(cfg, path_name)
return Dependable(
def get_lf_responses( func=_get_remote_path,
path: str = Depends(get_remote_path), responses={**_RESPONSE_OK},
) -> dict: )
return {
**_RESPONSE_OK,
status.HTTP_404_NOT_FOUND: {
"description": f"{path!r} not found",
"content": None,
},
}
async def list_files( def list_files(
re: re.Pattern[str],
*, *,
path: str = Depends(get_remote_path), path_name: str,
) -> list[str]: re: re.Pattern[str],
) -> Dependable[[], Awaitable[list[str]]]:
""" """
List files in remote `path` matching the RegEx `re` List files in remote `path` matching the RegEx `re`
""" """
async def _list_files() -> list[str]:
cfg = await get_config()
path = getattr(cfg, path_name)
try: try:
return await WebDAV.list_files(path, regex=re) return await WebDAV.list_files(path, regex=re)
@ -61,72 +71,72 @@ async def list_files(
) )
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND) raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
return Dependable(
async def list_calendar_names() -> list[str]: func=_list_files,
""" responses={
List calendar names **_RESPONSE_OK,
""" status.HTTP_404_NOT_FOUND: {
return await CalDAV.calendars "description": f"{path_name!r} not found",
"content": None,
},
},
)
async def list_aggregate_names( # async def list_calendar_names() -> list[str]:
cfg: Config = Depends(get_config), # """
) -> list[str]: # List calendar names
""" # """
List aggregate calendar names # return await CalDAV.calendars
"""
return list(cfg.calendar.aggregates.keys())
def get_fp_responses( # async def list_aggregate_names(
# cfg: Config = Depends(get_config),
# ) -> list[str]:
# """
# List aggregate calendar names
# """
# return list(cfg.calendar.aggregates.keys())
def filter_prefix(
src: Callable[[], Awaitable[list[str]]], src: Callable[[], Awaitable[list[str]]],
) -> dict: ) -> Dependable[[str], Awaitable[list[str]]]:
return { """
Filter names from an async source `src` for names starting with a given prefix.
"""
async def _filter_prefix(
prefix: str,
) -> list[str]:
return list(
item for item in (await src()) if item.lower().startswith(prefix.lower())
)
return Dependable(
func=_filter_prefix,
responses={
**_RESPONSE_OK, **_RESPONSE_OK,
status.HTTP_404_NOT_FOUND: { status.HTTP_404_NOT_FOUND: {
"description": f"Failure in lister {src.__name__!r}", "description": f"Failure in lister {src.__name__!r}",
"content": None, "content": None,
}, },
} },
async def filter_prefix(
src: Callable[[], Awaitable[list[str]]],
prefix: str = "",
) -> list[str]:
"""
Filter names from an async source `src` for names starting with a given prefix.
"""
return list(
item for item in (await src()) if item.lower().startswith(prefix.lower())
) )
def get_fpu_responses() -> dict: def filter_prefix_unique(
return {
**_RESPONSE_OK,
status.HTTP_404_NOT_FOUND: {
"description": "Prefix not found",
"content": None,
},
status.HTTP_409_CONFLICT: {
"description": "Ambiguous prefix",
"content": None,
},
}
async def filter_prefix_unique(
src: Callable[[str], Awaitable[list[str]]], src: Callable[[str], Awaitable[list[str]]],
prefix: str = "", ) -> Dependable[[str], Awaitable[str]]:
) -> str:
""" """
Determines if a given prefix is unique in the list produced by the async source `src`. Determines if a given prefix is unique in the list produced by the async source `src`.
On success, produces the unique name with that prefix. Otherwise, throws a HTTPException. On success, produces the unique name with that prefix. Otherwise, throws a HTTPException.
""" """
async def _filter_prefix_unique(
prefix: str,
) -> str:
names = await src(prefix) names = await src(prefix)
match names: match names:
@ -138,3 +148,18 @@ async def filter_prefix_unique(
case _: case _:
raise HTTPException(status_code=status.HTTP_409_CONFLICT) raise HTTPException(status_code=status.HTTP_409_CONFLICT)
return Dependable(
func=_filter_prefix_unique,
responses={
**_RESPONSE_OK,
status.HTTP_404_NOT_FOUND: {
"description": "Prefix not found",
"content": None,
},
status.HTTP_409_CONFLICT: {
"description": "Ambiguous prefix",
"content": None,
},
},
)

View file

@ -9,7 +9,6 @@ Router "file" provides:
import re import re
from io import BytesIO from io import BytesIO
from logging import getLogger from logging import getLogger
from typing import Iterator
from fastapi import APIRouter, Depends from fastapi import APIRouter, Depends
from fastapi.responses import StreamingResponse from fastapi.responses import StreamingResponse
@ -17,32 +16,35 @@ from magic import Magic
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
from ...core.webdav import WebDAV from ...core.webdav import WebDAV
from ._common import FileNameLister, PrefixFinder, PrefixUnique from ._common import filter_prefix, filter_prefix_unique, get_remote_path, list_files
_logger = getLogger(__name__) _logger = getLogger(__name__)
_magic = Magic(mime=True) _magic = Magic(mime=True)
_PATH_NAME = "file_dir"
router = APIRouter(prefix="/file", tags=["file"]) router = APIRouter(prefix="/file", tags=["file"])
file_lister = FileNameLister( _ls = list_files(
path_name="file_dir", path_name=_PATH_NAME,
re=re.compile( re=re.compile(
r"[^/]$", r"[^/]$",
flags=re.IGNORECASE, flags=re.IGNORECASE,
), ),
) )
file_finder = PrefixFinder(file_lister) _rp = get_remote_path(path_name=_PATH_NAME)
file_unique = PrefixUnique(file_finder) _fp = filter_prefix(_ls.func)
_fpu = filter_prefix_unique(_fp.func)
@router.on_event("startup") @router.on_event("startup")
async def start_router() -> None: async def start_router() -> None:
_logger.debug(f"{router.prefix} router starting.") _logger.debug(f"{router.prefix} router starting.")
if not webdav_ensure_path(await file_lister.remote_path): remote_path = await _rp.func()
if not webdav_ensure_path(remote_path):
webdav_ensure_files( webdav_ensure_files(
await file_lister.remote_path, remote_path,
"logo.svg", "logo.svg",
"thw.svg", "thw.svg",
) )
@ -51,35 +53,36 @@ async def start_router() -> None:
@router.get( @router.get(
"/list", "/list",
response_model=list[str], response_model=list[str],
responses=file_lister.responses, responses=_ls.responses,
) )
async def list_files( async def list_all_files(
names: Iterator[str] = Depends(file_lister), names: list[str] = Depends(_ls.func),
) -> list[str]: ) -> list[str]:
return list(names) return names
@router.get( @router.get(
"/find/{prefix}", "/find/{prefix}",
response_model=list[str], response_model=list[str],
responses=file_finder.responses, responses=_fp.responses,
) )
async def find_files( async def find_files_by_prefix(
names: Iterator[str] = Depends(file_finder), names: list[str] = Depends(_fp.func),
) -> list[str]: ) -> list[str]:
return list(names) return names
@router.get( @router.get(
"/get/{prefix}", "/get/{prefix}",
response_class=StreamingResponse, response_class=StreamingResponse,
responses=file_unique.responses, responses=_fpu.responses,
) )
async def get_file( async def get_file_by_prefix(
prefix: str, prefix: str,
name: str = Depends(file_unique), remote_path: str = Depends(_rp.func),
name: str = Depends(_fpu.func),
) -> StreamingResponse: ) -> StreamingResponse:
buffer = BytesIO(await WebDAV.read_bytes(f"{await file_lister.remote_path}/{name}")) buffer = BytesIO(await WebDAV.read_bytes(f"{remote_path}/{name}"))
mime = _magic.from_buffer(buffer.read(2048)) mime = _magic.from_buffer(buffer.read(2048))
buffer.seek(0) buffer.seek(0)

View file

@ -9,7 +9,6 @@ Router "image" provides:
import re import re
from io import BytesIO from io import BytesIO
from logging import getLogger from logging import getLogger
from typing import Iterator
from fastapi import APIRouter, Depends from fastapi import APIRouter, Depends
from fastapi.responses import StreamingResponse from fastapi.responses import StreamingResponse
@ -18,31 +17,34 @@ from PIL import Image
from ...core.config import Config, ImageUIConfig, get_config from ...core.config import Config, ImageUIConfig, get_config
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
from ...core.webdav import WebDAV from ...core.webdav import WebDAV
from ._common import FileNameLister, PrefixFinder, PrefixUnique from ._common import filter_prefix, filter_prefix_unique, get_remote_path, list_files
_logger = getLogger(__name__) _logger = getLogger(__name__)
_PATH_NAME = "image_dir"
router = APIRouter(prefix="/image", tags=["image"]) router = APIRouter(prefix="/image", tags=["image"])
image_lister = FileNameLister( _ls = list_files(
path_name="image_dir", path_name=_PATH_NAME,
re=re.compile( re=re.compile(
r"\.(gif|jpe?g|tiff?|png|bmp)$", r"\.(gif|jpe?g|tiff?|png|bmp)$",
flags=re.IGNORECASE, flags=re.IGNORECASE,
), ),
) )
image_finder = PrefixFinder(image_lister) _rp = get_remote_path(path_name=_PATH_NAME)
image_unique = PrefixUnique(image_finder) _fp = filter_prefix(_ls.func)
_fpu = filter_prefix_unique(_fp.func)
@router.on_event("startup") @router.on_event("startup")
async def start_router() -> None: async def start_router() -> None:
_logger.debug(f"{router.prefix} router starting.") _logger.debug(f"{router.prefix} router starting.")
if not webdav_ensure_path(await image_lister.remote_path): remote_path = await _rp.func()
if not webdav_ensure_path(remote_path):
webdav_ensure_files( webdav_ensure_files(
await image_lister.remote_path, remote_path,
"img1.jpg", "img1.jpg",
"img2.jpg", "img2.jpg",
"img3.jpg", "img3.jpg",
@ -52,38 +54,37 @@ async def start_router() -> None:
@router.get( @router.get(
"/list", "/list",
response_model=list[str], response_model=list[str],
responses=image_lister.responses, responses=_ls.responses,
) )
async def list_images( async def list_all_images(
names: Iterator[str] = Depends(image_lister), names: list[str] = Depends(_ls.func),
) -> list[str]: ) -> list[str]:
return list(names) return names
@router.get( @router.get(
"/find/{prefix}", "/find/{prefix}",
response_model=list[str], response_model=list[str],
responses=image_finder.responses, responses=_fp.responses,
) )
async def find_images( async def find_images_by_prefix(
names: Iterator[str] = Depends(image_finder), names: list[str] = Depends(_fp.func),
) -> list[str]: ) -> list[str]:
return list(names) return names
@router.get( @router.get(
"/get/{prefix}", "/get/{prefix}",
response_class=StreamingResponse, response_class=StreamingResponse,
responses=image_unique.responses, responses=_fpu.responses,
) )
async def get_image( async def get_image_by_prefix(
prefix: str, prefix: str,
name: str = Depends(image_unique), remote_path: str = Depends(_rp.func),
name: str = Depends(_fpu.func),
) -> StreamingResponse: ) -> StreamingResponse:
cfg = await get_config() cfg = await get_config()
img = Image.open( img = Image.open(BytesIO(await WebDAV.read_bytes(f"{remote_path}/{name}")))
BytesIO(await WebDAV.read_bytes(f"{await image_lister.remote_path}/{name}"))
)
img_buffer = BytesIO() img_buffer = BytesIO()
img.save(img_buffer, **cfg.image.save_params) img.save(img_buffer, **cfg.image.save_params)

View file

@ -9,39 +9,40 @@ Router "text" provides:
import re import re
from logging import getLogger from logging import getLogger
from typing import Iterator
from fastapi import APIRouter, Depends from fastapi import APIRouter, Depends
from markdown import markdown from markdown import markdown
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
from ...core.webdav import WebDAV from ...core.webdav import WebDAV
from ._common import FileNameLister, PrefixFinder, PrefixUnique from ._common import filter_prefix, filter_prefix_unique, get_remote_path, list_files
_logger = getLogger(__name__) _logger = getLogger(__name__)
_PATH_NAME = "text_dir"
router = APIRouter(prefix="/text", tags=["text"]) router = APIRouter(prefix="/text", tags=["text"])
text_lister = FileNameLister( _ls = list_files(
path_name="text_dir", path_name=_PATH_NAME,
re=re.compile( re=re.compile(
r"\.(txt|md)$", r"\.(txt|md)$",
flags=re.IGNORECASE, flags=re.IGNORECASE,
), ),
) )
text_finder = PrefixFinder(text_lister) _rp = get_remote_path(path_name=_PATH_NAME)
text_unique = PrefixUnique(text_finder) _fp = filter_prefix(_ls.func)
_fpu = filter_prefix_unique(_fp.func)
@router.on_event("startup") @router.on_event("startup")
async def start_router() -> None: async def start_router() -> None:
_logger.debug(f"{router.prefix} router starting.") _logger.debug(f"{router.prefix} router starting.")
webdav_ensure_path(await text_lister.remote_path) remote_path = await _rp.func()
if not webdav_ensure_path(remote_path):
webdav_ensure_files( webdav_ensure_files(
await text_lister.remote_path, remote_path,
"message.txt", "message.txt",
"title.txt", "title.txt",
"ticker.txt", "ticker.txt",
@ -51,48 +52,49 @@ async def start_router() -> None:
@router.get( @router.get(
"/list", "/list",
response_model=list[str], response_model=list[str],
responses=text_lister.responses, responses=_ls.responses,
) )
async def list_texts( async def list_all_texts(
names: Iterator[str] = Depends(text_lister), names: list[str] = Depends(_ls.func),
) -> list[str]: ) -> list[str]:
return list(names) return names
@router.get( @router.get(
"/find/{prefix}", "/find/{prefix}",
response_model=list[str], response_model=list[str],
responses=text_finder.responses, responses=_fp.responses,
) )
async def find_texts( async def find_texts_by_prefix(
names: Iterator[str] = Depends(text_finder), names: list[str] = Depends(_fp.func),
) -> list[str]: ) -> list[str]:
return list(names) return names
async def get_text_content( async def _get_raw_text_by_prefix(
name: str = Depends(text_unique), remote_path: str = Depends(_rp.func),
name: str = Depends(_fpu.func),
) -> str: ) -> str:
return await WebDAV.read_str(f"{await text_lister.remote_path}/{name}") return await WebDAV.read_str(f"{remote_path}/{name}")
@router.get(
"/get/html/{prefix}",
response_model=str,
responses=text_unique.responses,
)
async def get_text(
text: str = Depends(get_text_content),
) -> str:
return markdown(text)
@router.get( @router.get(
"/get/raw/{prefix}", "/get/raw/{prefix}",
response_model=str, response_model=str,
responses=text_unique.responses, responses=_fpu.responses,
) )
async def get_raw_text( async def get_raw_text_by_prefix(
text: str = Depends(get_text_content), text: str = Depends(_get_raw_text_by_prefix),
) -> str: ) -> str:
return text return text
@router.get(
"/get/html/{prefix}",
response_model=str,
responses=_fpu.responses,
)
async def get_html_by_prefix(
text: str = Depends(_get_raw_text_by_prefix),
) -> str:
return markdown(text)

View file

@ -15,7 +15,7 @@ from markdown import markdown
from ...core.config import Config, TickerUIConfig, get_config from ...core.config import Config, TickerUIConfig, get_config
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
from ...core.webdav import WebDAV from ...core.webdav import WebDAV
from .text import text_lister, text_unique from .text import _fpu, _rp
_logger = getLogger(__name__) _logger = getLogger(__name__)
@ -26,19 +26,20 @@ router = APIRouter(prefix="/ticker", tags=["text"])
async def start_router() -> None: async def start_router() -> None:
_logger.debug(f"{router.prefix} router starting.") _logger.debug(f"{router.prefix} router starting.")
webdav_ensure_path(await text_lister.remote_path) remote_path = await _rp.func()
if not webdav_ensure_path(remote_path):
webdav_ensure_files( webdav_ensure_files(
await text_lister.remote_path, remote_path,
"ticker.txt", "ticker.txt",
) )
async def get_ticker_lines() -> Iterator[str]: async def get_ticker_lines() -> Iterator[str]:
cfg = await get_config() cfg = await get_config()
file_name = await text_unique(cfg.ticker.file_name) file_name = await _fpu.func(cfg.ticker.file_name)
remote_path = await _rp.func()
ticker = await WebDAV.read_str(f"{await text_lister.remote_path}/{file_name}") ticker = await WebDAV.read_str(f"{remote_path}/{file_name}")
return (line.strip() for line in ticker.split("\n") if line.strip()) return (line.strip() for line in ticker.split("\n") if line.strip())