wip: functionify routers._common

This commit is contained in:
Jörn-Michael Miehe 2023-10-23 23:44:09 +02:00
parent 78b1359603
commit b7292af6ad
5 changed files with 26 additions and 21 deletions

View file

@ -18,7 +18,6 @@ from ...core.webdav import WebDAV
_logger = getLogger(__name__)
_RESPONSE_OK = {
status.HTTP_200_OK: {
"description": "Operation successful",
@ -31,13 +30,19 @@ Return = TypeVar("Return")
@dataclass(slots=True, frozen=True)
class Dependable[**Params, Return]:
func: Callable[Params, Return]
func: Callable[Params, Awaitable[Return]]
responses: dict
async def __call__(self, *args: Params.args, **kwds: Params.kwargs) -> Return:
return await self.func(*args, **kwds)
type _NDependable[Return] = Dependable[[], Return]
def get_remote_path(
path_name: str,
) -> Dependable[[], Awaitable[str]]:
) -> _NDependable[str]:
async def _get_remote_path() -> str:
cfg = await get_config()
return getattr(cfg, path_name)
@ -52,7 +57,7 @@ def list_files(
*,
path_name: str,
re: re.Pattern[str],
) -> Dependable[[], Awaitable[list[str]]]:
) -> _NDependable[list[str]]:
"""
List files in remote `path` matching the RegEx `re`
"""
@ -100,8 +105,8 @@ def list_files(
def filter_prefix(
src: Callable[[], Awaitable[list[str]]],
) -> Dependable[[str], Awaitable[list[str]]]:
src: _NDependable[list[str]],
) -> Dependable[[str], list[str]]:
"""
Filter names from an async source `src` for names starting with a given prefix.
"""
@ -118,7 +123,7 @@ def filter_prefix(
responses={
**_RESPONSE_OK,
status.HTTP_404_NOT_FOUND: {
"description": f"Failure in lister {src.__name__!r}",
"description": f"Failure in lister {src.__class__.__name__!r}",
"content": None,
},
},
@ -126,8 +131,8 @@ def filter_prefix(
def filter_prefix_unique(
src: Callable[[str], Awaitable[list[str]]],
) -> Dependable[[str], Awaitable[str]]:
src: Dependable[[str], list[str]],
) -> Dependable[[str], str]:
"""
Determines if a given prefix is unique in the list produced by the async source `src`.

View file

@ -33,15 +33,15 @@ _ls = list_files(
)
_rp = get_remote_path(path_name=_PATH_NAME)
_fp = filter_prefix(_ls.func)
_fpu = filter_prefix_unique(_fp.func)
_fp = filter_prefix(_ls)
_fpu = filter_prefix_unique(_fp)
@router.on_event("startup")
async def start_router() -> None:
_logger.debug(f"{router.prefix} router starting.")
remote_path = await _rp.func()
remote_path = await _rp()
if not webdav_ensure_path(remote_path):
webdav_ensure_files(
remote_path,

View file

@ -33,15 +33,15 @@ _ls = list_files(
)
_rp = get_remote_path(path_name=_PATH_NAME)
_fp = filter_prefix(_ls.func)
_fpu = filter_prefix_unique(_fp.func)
_fp = filter_prefix(_ls)
_fpu = filter_prefix_unique(_fp)
@router.on_event("startup")
async def start_router() -> None:
_logger.debug(f"{router.prefix} router starting.")
remote_path = await _rp.func()
remote_path = await _rp()
if not webdav_ensure_path(remote_path):
webdav_ensure_files(
remote_path,

View file

@ -31,15 +31,15 @@ _ls = list_files(
)
_rp = get_remote_path(path_name=_PATH_NAME)
_fp = filter_prefix(_ls.func)
_fpu = filter_prefix_unique(_fp.func)
_fp = filter_prefix(_ls)
_fpu = filter_prefix_unique(_fp)
@router.on_event("startup")
async def start_router() -> None:
_logger.debug(f"{router.prefix} router starting.")
remote_path = await _rp.func()
remote_path = await _rp()
if not webdav_ensure_path(remote_path):
webdav_ensure_files(
remote_path,

View file

@ -26,7 +26,7 @@ router = APIRouter(prefix="/ticker", tags=["text"])
async def start_router() -> None:
_logger.debug(f"{router.prefix} router starting.")
remote_path = await _rp.func()
remote_path = await _rp()
if not webdav_ensure_path(remote_path):
webdav_ensure_files(
remote_path,
@ -36,8 +36,8 @@ async def start_router() -> None:
async def get_ticker_lines() -> Iterator[str]:
cfg = await get_config()
file_name = await _fpu.func(cfg.ticker.file_name)
remote_path = await _rp.func()
file_name = await _fpu(cfg.ticker.file_name)
remote_path = await _rp()
ticker = await WebDAV.read_str(f"{remote_path}/{file_name}")