mirror of
https://code.lenaisten.de/Lenaisten/advent22.git
synced 2024-11-23 00:03:07 +00:00
better async webdav implementation
This commit is contained in:
parent
5e5b2b164e
commit
d1cde05be7
5 changed files with 137 additions and 62 deletions
|
@ -84,10 +84,15 @@ async def list_images_auto() -> list[str]:
|
|||
Finde alle Bilddateien im "automatisch"-Verzeichnis
|
||||
"""
|
||||
|
||||
return await WebDAV.list_files(
|
||||
directory="/images_auto",
|
||||
__DIR = "/images_auto"
|
||||
|
||||
return [
|
||||
f"{__DIR}/{file}"
|
||||
for file in await WebDAV.list_files(
|
||||
directory=__DIR,
|
||||
regex=re.compile(r"\.(gif|jpe?g|tiff?|png|bmp)$", flags=re.IGNORECASE),
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
async def load_image(file_name: str) -> Image.Image:
|
||||
|
@ -95,7 +100,7 @@ async def load_image(file_name: str) -> Image.Image:
|
|||
Versuche, Bild aus Datei zu laden
|
||||
"""
|
||||
|
||||
if not await WebDAV.file_exists(file_name):
|
||||
if not await WebDAV.exists(file_name):
|
||||
raise RuntimeError(f"DAV-File {file_name} does not exist!")
|
||||
|
||||
return Image.open(BytesIO(await WebDAV.read_bytes(file_name)))
|
||||
|
|
|
@ -20,6 +20,7 @@ class DavSettings(BaseModel):
|
|||
password: str = "password"
|
||||
|
||||
cache_ttl: int = 60 * 30
|
||||
cache_size: int = 1024
|
||||
config_filename: str = "config.toml"
|
||||
|
||||
@property
|
||||
|
|
|
@ -1,16 +1,44 @@
|
|||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
import re
|
||||
from io import BytesIO
|
||||
|
||||
from cache import AsyncTTL
|
||||
from cache.key import KEY
|
||||
import requests
|
||||
from asyncify import asyncify
|
||||
from cachetools import TTLCache, cachedmethod
|
||||
from cachetools.keys import hashkey
|
||||
from webdav3.client import Client as WebDAVclient
|
||||
|
||||
from .settings import SETTINGS
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def davkey(name, _, *args, **kwargs):
|
||||
"""Return a cache key for use with cached methods."""
|
||||
|
||||
return hashkey(name, *args, **kwargs)
|
||||
|
||||
|
||||
class WebDAV:
|
||||
_webdav_client = WebDAVclient(
|
||||
class __WebDAVclient(WebDAVclient):
|
||||
def execute_request(
|
||||
self,
|
||||
action,
|
||||
path,
|
||||
data=None,
|
||||
headers_ext=None,
|
||||
) -> requests.Response:
|
||||
res = super().execute_request(action, path, data, headers_ext)
|
||||
|
||||
# the "Content-Length" header can randomly be missing on txt files,
|
||||
# this should fix that (probably serverside bug)
|
||||
if action == "download" and "Content-Length" not in res.headers:
|
||||
res.headers["Content-Length"] = str(len(res.text))
|
||||
|
||||
return res
|
||||
|
||||
_webdav_client = __WebDAVclient(
|
||||
{
|
||||
"webdav_hostname": SETTINGS.webdav.url,
|
||||
"webdav_login": SETTINGS.webdav.username,
|
||||
|
@ -18,55 +46,60 @@ class WebDAV:
|
|||
}
|
||||
)
|
||||
|
||||
_cache = TTLCache(
|
||||
ttl=SETTINGS.webdav.cache_ttl,
|
||||
maxsize=SETTINGS.webdav.cache_size,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@AsyncTTL(time_to_live=SETTINGS.webdav.cache_ttl, skip_args=1)
|
||||
async def list_files(
|
||||
@asyncify
|
||||
@cachedmethod(
|
||||
cache=lambda cls: cls._cache,
|
||||
key=functools.partial(davkey, "list_files"),
|
||||
)
|
||||
def list_files(
|
||||
cls,
|
||||
directory: str = "",
|
||||
*,
|
||||
regex: re.Pattern[str] = re.compile(""),
|
||||
) -> list[str]:
|
||||
"""
|
||||
Liste aller Dateien im Ordner `directory`, die zur RegEx `regex` passen
|
||||
List files in directory `directory` matching RegEx `regex`
|
||||
"""
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
ls = await loop.run_in_executor(
|
||||
None,
|
||||
cls._webdav_client.list,
|
||||
directory,
|
||||
)
|
||||
_logger.debug(f"list_files {directory!r}")
|
||||
ls = cls._webdav_client.list(directory)
|
||||
|
||||
return [f"{directory}/{path}" for path in ls if regex.search(path)]
|
||||
return [path for path in ls if regex.search(path)]
|
||||
|
||||
@classmethod
|
||||
@AsyncTTL(time_to_live=SETTINGS.webdav.cache_ttl, skip_args=1)
|
||||
async def file_exists(cls, path: str) -> bool:
|
||||
@asyncify
|
||||
@cachedmethod(
|
||||
cache=lambda cls: cls._cache,
|
||||
key=functools.partial(davkey, "exists"),
|
||||
)
|
||||
def exists(cls, path: str) -> bool:
|
||||
"""
|
||||
`True`, wenn an Pfad `path` eine Datei existiert
|
||||
`True` iff there is a WebDAV resource at `path`
|
||||
"""
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
cls._webdav_client.check,
|
||||
path,
|
||||
)
|
||||
_logger.debug(f"file_exists {path!r}")
|
||||
return cls._webdav_client.check(path)
|
||||
|
||||
@classmethod
|
||||
@(_rb_ttl := AsyncTTL(time_to_live=SETTINGS.webdav.cache_ttl, skip_args=1))
|
||||
async def read_bytes(cls, path: str) -> bytes:
|
||||
@asyncify
|
||||
@cachedmethod(
|
||||
cache=lambda cls: cls._cache,
|
||||
key=functools.partial(davkey, "read_bytes"),
|
||||
)
|
||||
def read_bytes(cls, path: str) -> bytes:
|
||||
"""
|
||||
Datei aus Pfad `path` als bytes laden
|
||||
Load WebDAV file from `path` as bytes
|
||||
"""
|
||||
|
||||
_logger.debug(f"read_bytes {path!r}")
|
||||
buffer = BytesIO()
|
||||
loop = asyncio.get_running_loop()
|
||||
await loop.run_in_executor(
|
||||
None,
|
||||
cls._webdav_client.resource(path).write_to,
|
||||
buffer,
|
||||
)
|
||||
cls._webdav_client.download_from(buffer, path)
|
||||
buffer.seek(0)
|
||||
|
||||
return buffer.read()
|
||||
|
@ -74,37 +107,30 @@ class WebDAV:
|
|||
@classmethod
|
||||
async def read_str(cls, path: str, encoding="utf-8") -> str:
|
||||
"""
|
||||
Datei aus Pfad `path` als string laden
|
||||
Load WebDAV file from `path` as string
|
||||
"""
|
||||
|
||||
_logger.debug(f"read_str {path!r}")
|
||||
return (await cls.read_bytes(path)).decode(encoding=encoding).strip()
|
||||
|
||||
@classmethod
|
||||
async def write_bytes(cls, path: str, buffer: bytes) -> None:
|
||||
@asyncify
|
||||
def write_bytes(cls, path: str, buffer: bytes) -> None:
|
||||
"""
|
||||
Bytes `buffer` in Datei in Pfad `path` schreiben
|
||||
Write bytes from `buffer` into WebDAV file at `path`
|
||||
"""
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
await loop.run_in_executor(
|
||||
None,
|
||||
cls._webdav_client.resource(path).read_from,
|
||||
buffer,
|
||||
)
|
||||
_logger.debug(f"write_bytes {path!r}")
|
||||
cls._webdav_client.upload_to(buffer, path)
|
||||
|
||||
try:
|
||||
# hack: zugehörigen Cache-Eintrag entfernen
|
||||
# -> AsyncTTL._TTL.__contains__
|
||||
del cls._rb_ttl.ttl[KEY((path,), {})]
|
||||
|
||||
except KeyError:
|
||||
# Cache-Eintrag existierte nicht
|
||||
pass
|
||||
# invalidate cache entry
|
||||
cls._cache.pop(hashkey("read_bytes", path))
|
||||
|
||||
@classmethod
|
||||
async def write_str(cls, path: str, content: str, encoding="utf-8") -> None:
|
||||
"""
|
||||
String `content` in Datei in Pfad `path` schreiben
|
||||
Write string from `content` into WebDAV file at `path`
|
||||
"""
|
||||
|
||||
_logger.debug(f"write_str {path!r}")
|
||||
await cls.write_bytes(path, content.encode(encoding=encoding))
|
||||
|
|
54
api/poetry.lock
generated
54
api/poetry.lock
generated
|
@ -32,13 +32,29 @@ test = ["anyio[trio]", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=
|
|||
trio = ["trio (<0.22)"]
|
||||
|
||||
[[package]]
|
||||
name = "async-cache"
|
||||
version = "1.1.1"
|
||||
description = "An asyncio Cache"
|
||||
name = "asyncify"
|
||||
version = "0.9.2"
|
||||
description = "sync 2 async"
|
||||
optional = false
|
||||
python-versions = ">=3.3"
|
||||
python-versions = ">=3.7,<4.0"
|
||||
files = [
|
||||
{file = "async-cache-1.1.1.tar.gz", hash = "sha256:81aa9ccd19fb06784aaf30bd5f2043dc0a23fc3e998b93d0c2c17d1af9803393"},
|
||||
{file = "asyncify-0.9.2-py3-none-any.whl", hash = "sha256:ee7efe8ecc11f348d4f25d4d1c5fb2f56a187aaa907aea3608106359728a2cdd"},
|
||||
{file = "asyncify-0.9.2.tar.gz", hash = "sha256:5f06016a5d805354505e98e9c009595cba7905ceb767ed7cd61bf60f2341d896"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
funkify = ">=0.4.0,<0.5.0"
|
||||
xtyping = ">=0.5.0"
|
||||
|
||||
[[package]]
|
||||
name = "cachetools"
|
||||
version = "5.3.2"
|
||||
description = "Extensible memoizing collections and decorators"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"},
|
||||
{file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -197,6 +213,17 @@ mccabe = ">=0.7.0,<0.8.0"
|
|||
pycodestyle = ">=2.11.0,<2.12.0"
|
||||
pyflakes = ">=3.1.0,<3.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "funkify"
|
||||
version = "0.4.5"
|
||||
description = "Funkify modules so that they are callable"
|
||||
optional = false
|
||||
python-versions = ">=3.7,<4.0"
|
||||
files = [
|
||||
{file = "funkify-0.4.5-py3-none-any.whl", hash = "sha256:43f1e6c27263468a60ba560dfc13e6e4df57aa75376438a62f741ffc7c83cdfe"},
|
||||
{file = "funkify-0.4.5.tar.gz", hash = "sha256:42df845f4afa63e0e66239a986d26b6572ab0b7ad600d7d6365d44d8a0cff3d5"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.14.0"
|
||||
|
@ -1106,7 +1133,22 @@ files = [
|
|||
{file = "websockets-11.0.3.tar.gz", hash = "sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xtyping"
|
||||
version = "0.7.0"
|
||||
description = "xtyping = typing + typing_extensions"
|
||||
optional = false
|
||||
python-versions = ">=3.7,<4.0"
|
||||
files = [
|
||||
{file = "xtyping-0.7.0-py3-none-any.whl", hash = "sha256:5b72b08d5b4775c1ff34a8b7bbdfaae92249aaa11c53b33f26a0a788ca209fda"},
|
||||
{file = "xtyping-0.7.0.tar.gz", hash = "sha256:441e597b227fcb51645e33de7cb47b7b23c014ee7c487a996b312652b8cacde0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
annotated-types = ">=0.5.0"
|
||||
typing-extensions = ">=4.4.0"
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.11,<3.13"
|
||||
content-hash = "5b956fd0da3635be7d6b62b220cb962ba676b0673b9d7f520f49da35c524d2c3"
|
||||
content-hash = "5fe0d4c2bffe2fed7a521efa95c416944bc413714eb69a1e13caa8dbff20d1ca"
|
||||
|
|
|
@ -10,7 +10,8 @@ version = "0.1.0"
|
|||
|
||||
[tool.poetry.dependencies]
|
||||
Pillow = "^10.0.1"
|
||||
async-cache = "^1.1.1"
|
||||
asyncify = "^0.9.2"
|
||||
cachetools = "^5.3.2"
|
||||
fastapi = "^0.103.1"
|
||||
numpy = "^1.26.0"
|
||||
pydantic-settings = "^2.0.3"
|
||||
|
|
Loading…
Reference in a new issue