Merge branch 'feature/python3.12' into develop
This commit is contained in:
commit
d9479eb2bc
67 changed files with 4423 additions and 3680 deletions
72
Dockerfile
72
Dockerfile
|
@ -2,41 +2,49 @@
|
||||||
# build ui #
|
# build ui #
|
||||||
############
|
############
|
||||||
|
|
||||||
FROM node:lts-alpine AS build-ui
|
FROM node:lts AS build-ui
|
||||||
|
|
||||||
# some dir for our code
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# install dependencies
|
|
||||||
COPY ui/package*.json ui/yarn*.lock ./
|
|
||||||
RUN yarn --production=false
|
|
||||||
|
|
||||||
# copy code
|
|
||||||
COPY ui .
|
|
||||||
RUN yarn build
|
|
||||||
|
|
||||||
|
|
||||||
##############
|
|
||||||
# webservice #
|
|
||||||
##############
|
|
||||||
|
|
||||||
FROM antonapetrov/uvicorn-gunicorn:python3.9-alpine3.13 AS production
|
|
||||||
|
|
||||||
RUN set -ex; \
|
|
||||||
# prerequisites
|
|
||||||
apk add --no-cache \
|
|
||||||
libmagic \
|
|
||||||
;
|
|
||||||
|
|
||||||
# env setup
|
# env setup
|
||||||
ENV \
|
WORKDIR /usr/local/src/ovdashboard_ui
|
||||||
|
|
||||||
|
# install ovdashboard_ui dependencies
|
||||||
|
COPY ui/package*.json ui/yarn*.lock ./
|
||||||
|
RUN yarn install --production false
|
||||||
|
|
||||||
|
# copy and build ovdashboard_ui
|
||||||
|
COPY ui ./
|
||||||
|
RUN yarn build --dest /tmp/ovdashboard_ui/html
|
||||||
|
|
||||||
|
###########
|
||||||
|
# web app #
|
||||||
|
###########
|
||||||
|
|
||||||
|
FROM tiangolo/uvicorn-gunicorn:python3.12-slim AS production
|
||||||
|
|
||||||
|
# add prepared ovdashboard_ui
|
||||||
|
COPY --from=build-ui /tmp/ovdashboard_ui /usr/local/share/ovdashboard_ui
|
||||||
|
|
||||||
|
# env setup
|
||||||
|
WORKDIR /usr/local/src/ovdashboard_api
|
||||||
|
ENV \
|
||||||
PRODUCTION_MODE="true" \
|
PRODUCTION_MODE="true" \
|
||||||
APP_MODULE="ovdashboard_api:app"
|
PORT="8000" \
|
||||||
|
MODULE_NAME="ovdashboard_api.app"
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
# install API
|
COPY api ./
|
||||||
COPY api /usr/src/ovdashboard_api
|
|
||||||
RUN set -ex; \
|
RUN set -ex; \
|
||||||
pip3 --no-cache-dir install /usr/src/ovdashboard_api;
|
# install libs
|
||||||
|
export DEBIAN_FRONTEND=noninteractive; \
|
||||||
|
apt-get update; apt-get install --yes --no-install-recommends \
|
||||||
|
libmagic1 \
|
||||||
|
; rm -rf /var/lib/apt/lists/*; \
|
||||||
|
\
|
||||||
|
# remove example app
|
||||||
|
rm -rf /app; \
|
||||||
|
\
|
||||||
|
# install ovdashboard_api
|
||||||
|
python -m pip --no-cache-dir install ./
|
||||||
|
|
||||||
# install UI
|
# run as unprivileged user
|
||||||
COPY --from=build-ui /app/dist /html
|
USER nobody
|
||||||
|
|
|
@ -1,12 +1,28 @@
|
||||||
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.224.2/containers/python-3/.devcontainer/base.Dockerfile
|
# See here for image contents: https://github.com/devcontainers/images/blob/main/src/python/.devcontainer/Dockerfile
|
||||||
|
|
||||||
# [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster
|
# [Choice] Python version (use -bookworm or -bullseye variants on local arm64/Apple Silicon):
|
||||||
ARG VARIANT="3.10-bullseye"
|
# - 3, 3.12, 3.11, 3.10, 3.9, 3.8
|
||||||
FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT}
|
# - 3-bookworm, 3.12-bookworm, 3.11-bookworm, 3.10-bookworm, 3.9-bookworm, 3.8-bookworm
|
||||||
|
# - 3-bullseye, 3.12-bullseye, 3.11-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye
|
||||||
|
# - 3-buster, 3.12-buster, 3.11-buster, 3.10-buster, 3.9-buster, 3.8-buster
|
||||||
|
ARG VARIANT="3.12-bookworm"
|
||||||
|
FROM mcr.microsoft.com/vscode/devcontainers/python:1-${VARIANT}
|
||||||
|
|
||||||
# [Choice] Node.js version: none, lts/*, 16, 14, 12, 10
|
# Add "Poetry": https://python-poetry.org
|
||||||
|
ARG POETRY_HOME="/usr/local"
|
||||||
|
ENV POETRY_HOME="${POETRY_HOME}"
|
||||||
|
RUN set -ex; \
|
||||||
|
\
|
||||||
|
curl -sSL https://install.python-poetry.org | python3 -; \
|
||||||
|
poetry self add poetry-plugin-up;
|
||||||
|
|
||||||
|
# [Choice] Node.js version: none, lts/*, 18, 16, 14, 12, 10
|
||||||
ARG NODE_VERSION="none"
|
ARG NODE_VERSION="none"
|
||||||
RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi
|
RUN set -ex; \
|
||||||
|
\
|
||||||
|
if [ "${NODE_VERSION}" != "none" ]; then \
|
||||||
|
su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; \
|
||||||
|
fi
|
||||||
|
|
||||||
# [Optional] If your pip requirements rarely change, uncomment this section to add them to the image.
|
# [Optional] If your pip requirements rarely change, uncomment this section to add them to the image.
|
||||||
# COPY requirements.txt /tmp/pip-tmp/
|
# COPY requirements.txt /tmp/pip-tmp/
|
||||||
|
@ -20,13 +36,10 @@ RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/
|
||||||
RUN set -ex; \
|
RUN set -ex; \
|
||||||
\
|
\
|
||||||
export DEBIAN_FRONTEND=noninteractive; \
|
export DEBIAN_FRONTEND=noninteractive; \
|
||||||
apt-get update; apt-get -y install --no-install-recommends \
|
apt-get update; apt-get install --yes --no-install-recommends \
|
||||||
git-flow \
|
git-flow \
|
||||||
libmagic1 \
|
libmagic1 \
|
||||||
; rm -rf /var/lib/apt/lists/*;
|
; rm -rf /var/lib/apt/lists/*;
|
||||||
|
|
||||||
# [Optional] Uncomment this line to install global node packages.
|
# [Optional] Uncomment this line to install global node packages.
|
||||||
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
||||||
|
|
||||||
USER vscode
|
|
||||||
RUN curl -sSL https://install.python-poetry.org | python3 -
|
|
||||||
|
|
|
@ -1,46 +1,51 @@
|
||||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.224.2/containers/python-3
|
// README at: https://github.com/devcontainers/templates/tree/main/src/python
|
||||||
{
|
{
|
||||||
"name": "Python 3",
|
"name": "OVD API",
|
||||||
"build": {
|
"build": {
|
||||||
"dockerfile": "Dockerfile",
|
"dockerfile": "Dockerfile",
|
||||||
"context": "..",
|
"context": "..",
|
||||||
"args": {
|
"args": {
|
||||||
// Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7, 3.6
|
// Update 'VARIANT' to pick a Python version.
|
||||||
// Append -bullseye or -buster to pin to an OS version.
|
// Append -bullseye or -buster to pin to an OS version.
|
||||||
// Use -bullseye variants on local on arm64/Apple Silicon.
|
// Use -bullseye variants on local on arm64/Apple Silicon.
|
||||||
"VARIANT": "3.9",
|
// "VARIANT": "3.11-bullseye",
|
||||||
// Options
|
// Options
|
||||||
"NODE_VERSION": "none"
|
"NODE_VERSION": "none"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
// Set *default* container specific settings.json values on container create.
|
"containerEnv": {
|
||||||
"settings": {
|
"TZ": "Europe/Berlin"
|
||||||
"terminal.integrated.defaultProfile.linux": "zsh",
|
|
||||||
"python.defaultInterpreterPath": "/usr/local/bin/python",
|
|
||||||
"python.linting.enabled": true,
|
|
||||||
"python.linting.pylintEnabled": true,
|
|
||||||
"python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
|
|
||||||
"python.formatting.blackPath": "/usr/local/py-utils/bin/black",
|
|
||||||
"python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf",
|
|
||||||
"python.linting.banditPath": "/usr/local/py-utils/bin/bandit",
|
|
||||||
"python.linting.flake8Path": "/usr/local/py-utils/bin/flake8",
|
|
||||||
"python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
|
|
||||||
"python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle",
|
|
||||||
"python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
|
|
||||||
"python.linting.pylintPath": "/usr/local/py-utils/bin/pylint"
|
|
||||||
},
|
},
|
||||||
// Add the IDs of extensions you want installed when the container is created.
|
// Configure tool-specific properties.
|
||||||
"extensions": [
|
"customizations": {
|
||||||
"ms-python.python",
|
// Configure properties specific to VS Code.
|
||||||
"ms-python.vscode-pylance",
|
"vscode": {
|
||||||
"be5invis.toml"
|
// Set *default* container specific settings.json values on container create.
|
||||||
],
|
"settings": {
|
||||||
|
"python.defaultInterpreterPath": "/usr/local/bin/python",
|
||||||
|
"terminal.integrated.defaultProfile.linux": "zsh"
|
||||||
|
},
|
||||||
|
// Add the IDs of extensions you want installed when the container is created.
|
||||||
|
"extensions": [
|
||||||
|
"be5invis.toml",
|
||||||
|
"mhutchie.git-graph",
|
||||||
|
"ms-python.python",
|
||||||
|
"ms-python.black-formatter",
|
||||||
|
"ms-python.flake8",
|
||||||
|
"ms-python.isort",
|
||||||
|
"ms-python.vscode-pylance"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// Use 'postStartCommand' to run commands after the container is started.
|
||||||
|
"postStartCommand": "poetry install"
|
||||||
|
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||||
|
// "features": {},
|
||||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||||
// "forwardPorts": [],
|
// "forwardPorts": [],
|
||||||
// Use 'postCreateCommand' to run commands after the container is created.
|
// Use 'postCreateCommand' to run commands after the container is created.
|
||||||
// "postCreateCommand": "pip3 install --user -r requirements.txt",
|
// "postCreateCommand": "pip3 install --user -r requirements.txt",
|
||||||
"postStartCommand": "poetry install",
|
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
// "remoteUser": "root"
|
||||||
"remoteUser": "vscode"
|
|
||||||
}
|
}
|
4
api/.flake8
Normal file
4
api/.flake8
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
[flake8]
|
||||||
|
max-line-length = 80
|
||||||
|
select = C,E,F,I,W,B,B950
|
||||||
|
extend-ignore = E203, E501
|
14
api/.vscode/launch.json
vendored
14
api/.vscode/launch.json
vendored
|
@ -8,7 +8,19 @@
|
||||||
"name": "Main Module",
|
"name": "Main Module",
|
||||||
"type": "python",
|
"type": "python",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"module": "ovdashboard_api",
|
"module": "ovdashboard_api.main",
|
||||||
|
"pythonArgs": [
|
||||||
|
"-Xfrozen_modules=off",
|
||||||
|
],
|
||||||
|
"env": {
|
||||||
|
"PYDEVD_DISABLE_FILE_VALIDATION": "1",
|
||||||
|
"LOG_LEVEL": "DEBUG",
|
||||||
|
"WEBDAV__CACHE_TTL": "30",
|
||||||
|
"CALDAV__CACHE_TTL": "30",
|
||||||
|
// "PRODUCTION_MODE": "true",
|
||||||
|
// "WEBDAV__RETRIES": "5",
|
||||||
|
// "WEBDAV__RETRY_DELAY": "1",
|
||||||
|
},
|
||||||
"justMyCode": true
|
"justMyCode": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
21
api/.vscode/settings.json
vendored
21
api/.vscode/settings.json
vendored
|
@ -1,17 +1,20 @@
|
||||||
{
|
{
|
||||||
"python.testing.pytestArgs": [
|
|
||||||
"tests"
|
|
||||||
],
|
|
||||||
"python.testing.unittestEnabled": false,
|
|
||||||
"python.testing.pytestEnabled": true,
|
|
||||||
"python.linting.enabled": true,
|
|
||||||
"python.linting.pylintEnabled": false,
|
|
||||||
"python.linting.flake8Enabled": true,
|
|
||||||
"python.languageServer": "Pylance",
|
"python.languageServer": "Pylance",
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
|
"[python]": {
|
||||||
|
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||||
|
},
|
||||||
"editor.codeActionsOnSave": {
|
"editor.codeActionsOnSave": {
|
||||||
"source.organizeImports": true
|
"source.organizeImports": true
|
||||||
},
|
},
|
||||||
"git.closeDiffOnOperation": true,
|
"git.closeDiffOnOperation": true,
|
||||||
"python.analysis.typeCheckingMode": "basic"
|
"python.analysis.typeCheckingMode": "basic",
|
||||||
|
"python.analysis.diagnosticMode": "workspace",
|
||||||
|
"python.testing.pytestArgs": [
|
||||||
|
"test"
|
||||||
|
],
|
||||||
|
"python.testing.unittestEnabled": false,
|
||||||
|
"python.testing.pytestEnabled": true,
|
||||||
|
"black-formatter.importStrategy": "fromEnvironment",
|
||||||
|
"flake8.importStrategy": "fromEnvironment",
|
||||||
}
|
}
|
|
@ -7,43 +7,34 @@ This file: Sets up logging.
|
||||||
|
|
||||||
from logging.config import dictConfig
|
from logging.config import dictConfig
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from .core.settings import SETTINGS
|
||||||
|
|
||||||
from .app import app
|
# Logging configuration to be set for the server.
|
||||||
from .settings import SETTINGS
|
# https://stackoverflow.com/a/67937084
|
||||||
|
|
||||||
__all__ = ["app"]
|
LOG_CONFIG = dict(
|
||||||
|
version=1,
|
||||||
|
disable_existing_loggers=False,
|
||||||
class LogConfig(BaseModel):
|
formatters={
|
||||||
"""
|
|
||||||
Logging configuration to be set for the server.
|
|
||||||
https://stackoverflow.com/a/67937084
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Logging config
|
|
||||||
version = 1
|
|
||||||
disable_existing_loggers = False
|
|
||||||
formatters = {
|
|
||||||
"default": {
|
"default": {
|
||||||
"()": "uvicorn.logging.DefaultFormatter",
|
"()": "uvicorn.logging.DefaultFormatter",
|
||||||
"fmt": "%(levelprefix)s [%(asctime)s] %(name)s: %(message)s",
|
"fmt": "%(levelprefix)s [%(asctime)s] %(name)s: %(message)s",
|
||||||
"datefmt": "%Y-%m-%d %H:%M:%S",
|
"datefmt": "%Y-%m-%d %H:%M:%S",
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
handlers = {
|
handlers={
|
||||||
"default": {
|
"default": {
|
||||||
"formatter": "default",
|
"formatter": "default",
|
||||||
"class": "logging.StreamHandler",
|
"class": "logging.StreamHandler",
|
||||||
"stream": "ext://sys.stderr",
|
"stream": "ext://sys.stderr",
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
loggers = {
|
loggers={
|
||||||
"ovdashboard_api": {
|
"ovdashboard_api": {
|
||||||
"handlers": ["default"],
|
"handlers": ["default"],
|
||||||
"level": SETTINGS.log_level,
|
"level": SETTINGS.log_level,
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
dictConfig(LOG_CONFIG)
|
||||||
dictConfig(LogConfig().dict())
|
|
||||||
|
|
|
@ -6,13 +6,18 @@ Main script for `ovdashboard_api` module.
|
||||||
Creates the main `FastAPI` app.
|
Creates the main `FastAPI` app.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from fastapi.staticfiles import StaticFiles
|
from fastapi.staticfiles import StaticFiles
|
||||||
|
|
||||||
from .dav_common import webdav_check
|
from .core.settings import SETTINGS
|
||||||
|
from .core.webdav import WebDAV
|
||||||
from .routers import v1_router
|
from .routers import v1_router
|
||||||
from .settings import SETTINGS
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
app = FastAPI(
|
app = FastAPI(
|
||||||
title="OVDashboard API",
|
title="OVDashboard API",
|
||||||
|
@ -30,33 +35,53 @@ app = FastAPI(
|
||||||
redoc_url=SETTINGS.redoc_url,
|
redoc_url=SETTINGS.redoc_url,
|
||||||
)
|
)
|
||||||
|
|
||||||
app.add_event_handler("startup", webdav_check)
|
|
||||||
|
|
||||||
|
|
||||||
@app.on_event("startup")
|
|
||||||
async def add_middlewares() -> None:
|
|
||||||
if SETTINGS.production_mode:
|
|
||||||
# Mount frontend in production mode
|
|
||||||
app.mount(
|
|
||||||
path="/",
|
|
||||||
app=StaticFiles(
|
|
||||||
directory=SETTINGS.ui_directory,
|
|
||||||
html=True,
|
|
||||||
),
|
|
||||||
name="frontend",
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Allow CORS in debug mode
|
|
||||||
app.add_middleware(
|
|
||||||
CORSMiddleware,
|
|
||||||
allow_origins=[
|
|
||||||
"*",
|
|
||||||
],
|
|
||||||
allow_credentials=True,
|
|
||||||
allow_methods=["*"],
|
|
||||||
allow_headers=["*"],
|
|
||||||
expose_headers=["*"],
|
|
||||||
)
|
|
||||||
|
|
||||||
app.include_router(v1_router)
|
app.include_router(v1_router)
|
||||||
|
|
||||||
|
_logger.info(
|
||||||
|
"Production mode is %s.",
|
||||||
|
"enabled" if SETTINGS.production_mode else "disabled",
|
||||||
|
)
|
||||||
|
|
||||||
|
if SETTINGS.production_mode:
|
||||||
|
# Mount frontend in production mode
|
||||||
|
app.mount(
|
||||||
|
path="/",
|
||||||
|
app=StaticFiles(
|
||||||
|
directory=SETTINGS.ui_directory,
|
||||||
|
html=True,
|
||||||
|
),
|
||||||
|
name="frontend",
|
||||||
|
)
|
||||||
|
|
||||||
|
def check_webdav(retry: int) -> bool | None:
|
||||||
|
if WebDAV._webdav_client.check(""):
|
||||||
|
return True
|
||||||
|
|
||||||
|
_logger.warning(
|
||||||
|
"WebDAV connection to %s failed (try %d of %d)",
|
||||||
|
repr(SETTINGS.webdav.url),
|
||||||
|
retry + 1,
|
||||||
|
SETTINGS.webdav.retries,
|
||||||
|
)
|
||||||
|
|
||||||
|
if retry < SETTINGS.webdav.retries:
|
||||||
|
_logger.debug("Retrying in %d seconds ...", SETTINGS.webdav.retry_delay)
|
||||||
|
time.sleep(SETTINGS.webdav.retry_delay)
|
||||||
|
|
||||||
|
if not any(check_webdav(n) for n in range(SETTINGS.webdav.retries)):
|
||||||
|
raise ConnectionError("WebDAV connection failed")
|
||||||
|
|
||||||
|
else:
|
||||||
|
assert WebDAV._webdav_client.check("")
|
||||||
|
|
||||||
|
# Allow CORS in debug mode
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_headers=["*"],
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_origins=["*"],
|
||||||
|
expose_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
_logger.debug("WebDAV connection ok.")
|
||||||
|
|
|
@ -1,29 +0,0 @@
|
||||||
"""
|
|
||||||
Some useful helpers for working in async contexts.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from asyncio import get_running_loop
|
|
||||||
from functools import partial, wraps
|
|
||||||
from typing import Awaitable, Callable, TypeVar
|
|
||||||
|
|
||||||
RT = TypeVar("RT")
|
|
||||||
|
|
||||||
|
|
||||||
def run_in_executor(
|
|
||||||
function: Callable[..., RT]
|
|
||||||
) -> Callable[..., Awaitable[RT]]:
|
|
||||||
"""
|
|
||||||
Decorator to make blocking a function call asyncio compatible.
|
|
||||||
https://stackoverflow.com/questions/41063331/how-to-use-asyncio-with-existing-blocking-library/
|
|
||||||
https://stackoverflow.com/a/53719009
|
|
||||||
"""
|
|
||||||
|
|
||||||
@wraps(function)
|
|
||||||
async def wrapper(*args, **kwargs) -> RT:
|
|
||||||
loop = get_running_loop()
|
|
||||||
return await loop.run_in_executor(
|
|
||||||
None,
|
|
||||||
partial(function, *args, **kwargs),
|
|
||||||
)
|
|
||||||
|
|
||||||
return wrapper
|
|
100
api/ovdashboard_api/core/caldav.py
Normal file
100
api/ovdashboard_api/core/caldav.py
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
|
from asyncify import asyncify
|
||||||
|
from cachetools import TTLCache, cachedmethod
|
||||||
|
from caldav import Calendar, DAVClient, Event, Principal
|
||||||
|
from vobject.base import Component, toVName
|
||||||
|
|
||||||
|
from .calevent import CalEvent
|
||||||
|
from .config import Config
|
||||||
|
from .settings import SETTINGS
|
||||||
|
from .webdav import davkey
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CalDAV:
|
||||||
|
_caldav_client = DAVClient(
|
||||||
|
url=SETTINGS.caldav.url,
|
||||||
|
username=SETTINGS.caldav.username,
|
||||||
|
password=SETTINGS.caldav.password,
|
||||||
|
)
|
||||||
|
|
||||||
|
_cache = TTLCache(
|
||||||
|
ttl=SETTINGS.caldav.cache_ttl,
|
||||||
|
maxsize=SETTINGS.caldav.cache_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@property
|
||||||
|
def principal(cls) -> Principal:
|
||||||
|
"""
|
||||||
|
Gets the `Principal` object of the main CalDAV client.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return cls._caldav_client.principal()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@property
|
||||||
|
@asyncify
|
||||||
|
@cachedmethod(
|
||||||
|
cache=lambda cls: cls._cache,
|
||||||
|
key=functools.partial(davkey, "calendars"),
|
||||||
|
)
|
||||||
|
def calendars(cls) -> list[str]:
|
||||||
|
"""
|
||||||
|
Asynchroneously lists all calendars using the main WebDAV client.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_logger.debug("calendars")
|
||||||
|
return [str(cal.name) for cal in cls.principal.calendars()]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@asyncify
|
||||||
|
@cachedmethod(
|
||||||
|
cache=lambda cls: cls._cache,
|
||||||
|
key=functools.partial(davkey, "get_calendar"),
|
||||||
|
)
|
||||||
|
def get_calendar(cls, calendar_name: str) -> Calendar:
|
||||||
|
"""
|
||||||
|
Get a calendar by name using the CalDAV principal object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return cls.principal.calendar(calendar_name)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@asyncify
|
||||||
|
@cachedmethod(
|
||||||
|
cache=lambda cls: cls._cache,
|
||||||
|
key=functools.partial(davkey, "get_events"),
|
||||||
|
)
|
||||||
|
def get_events(cls, calendar_name: str, cfg: Config) -> list[CalEvent]:
|
||||||
|
"""
|
||||||
|
Get a sorted list of events by CalDAV calendar name.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_logger.info(f"downloading {calendar_name!r} ...")
|
||||||
|
|
||||||
|
dt_start = datetime.combine(
|
||||||
|
datetime.now().date(),
|
||||||
|
datetime.min.time(),
|
||||||
|
)
|
||||||
|
dt_end = dt_start + timedelta(days=cfg.calendar.future_days)
|
||||||
|
|
||||||
|
search_result = cls.principal.calendar(calendar_name).search(
|
||||||
|
start=dt_start,
|
||||||
|
end=dt_end,
|
||||||
|
expand=True,
|
||||||
|
comp_class=Event,
|
||||||
|
split_expanded=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
vevents = []
|
||||||
|
for event in search_result:
|
||||||
|
vobject = cast(Component, event.vobject_instance)
|
||||||
|
vevents.extend(vobject.contents[toVName("vevent")])
|
||||||
|
|
||||||
|
return sorted(CalEvent.from_vevent(vevent) for vevent in vevents)
|
83
api/ovdashboard_api/core/calevent.py
Normal file
83
api/ovdashboard_api/core/calevent.py
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
"""
|
||||||
|
Definition of an asyncio compatible CalDAV calendar.
|
||||||
|
|
||||||
|
Caches events using `timed_alru_cache`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Annotated, Self
|
||||||
|
|
||||||
|
from pydantic import BaseModel, ConfigDict, StringConstraints
|
||||||
|
from vobject.base import Component
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
type StrippedStr = Annotated[str, StringConstraints(strip_whitespace=True)]
|
||||||
|
|
||||||
|
|
||||||
|
@functools.total_ordering
|
||||||
|
class CalEvent(BaseModel):
|
||||||
|
"""
|
||||||
|
A CalDAV calendar event.
|
||||||
|
|
||||||
|
Properties are to be named as in the EVENT component of
|
||||||
|
RFC5545 (iCalendar).
|
||||||
|
|
||||||
|
https://icalendar.org/iCalendar-RFC-5545/3-6-1-event-component.html
|
||||||
|
"""
|
||||||
|
|
||||||
|
model_config = ConfigDict(frozen=True)
|
||||||
|
|
||||||
|
summary: StrippedStr = ""
|
||||||
|
description: StrippedStr = ""
|
||||||
|
dtstart: datetime = datetime.now()
|
||||||
|
dtend: datetime = datetime.now()
|
||||||
|
|
||||||
|
def __lt__(self, other: Self) -> bool:
|
||||||
|
"""
|
||||||
|
Order Events by start time.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.dtstart < other.dtstart
|
||||||
|
|
||||||
|
def __eq__(self, other: Self) -> bool:
|
||||||
|
"""
|
||||||
|
Compare all properties.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return self.model_dump() == other.model_dump()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_vevent(cls, event: Component) -> Self:
|
||||||
|
"""
|
||||||
|
Create a CalEvent instance from a `VObject.VEvent` object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
data = {}
|
||||||
|
keys = ("summary", "description", "dtstart", "dtend", "duration")
|
||||||
|
|
||||||
|
for key in keys:
|
||||||
|
try:
|
||||||
|
data[key] = event.contents[key][0].value # type: ignore
|
||||||
|
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if "dtend" not in data:
|
||||||
|
data["dtend"] = data["dtstart"]
|
||||||
|
|
||||||
|
if "duration" in data:
|
||||||
|
try:
|
||||||
|
data["dtend"] += data["duration"]
|
||||||
|
|
||||||
|
except (ValueError, TypeError, AttributeError):
|
||||||
|
_logger.warn(
|
||||||
|
"Could not add duration %s to %s",
|
||||||
|
repr(data["duration"]),
|
||||||
|
repr(data["dtstart"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
del data["duration"]
|
||||||
|
|
||||||
|
return cls.model_validate(data)
|
|
@ -2,20 +2,9 @@
|
||||||
Python representation of the "config.txt" file inside the WebDAV directory.
|
Python representation of the "config.txt" file inside the WebDAV directory.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from io import BytesIO
|
|
||||||
from logging import getLogger
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from tomli import loads as toml_loads
|
|
||||||
from tomli_w import dump as toml_dump
|
|
||||||
from webdav3.exceptions import RemoteResourceNotFound
|
|
||||||
|
|
||||||
from .dav_common import caldav_list
|
|
||||||
from .dav_file import DavFile
|
|
||||||
from .settings import SETTINGS
|
|
||||||
|
|
||||||
_logger = getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class TickerUIConfig(BaseModel):
|
class TickerUIConfig(BaseModel):
|
||||||
|
@ -100,6 +89,13 @@ class Config(BaseModel):
|
||||||
Main representation of "config.txt".
|
Main representation of "config.txt".
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
"""
|
||||||
|
Fake hash (the config is always the config)
|
||||||
|
"""
|
||||||
|
|
||||||
|
return hash("config")
|
||||||
|
|
||||||
image_dir: str = "image"
|
image_dir: str = "image"
|
||||||
text_dir: str = "text"
|
text_dir: str = "text"
|
||||||
file_dir: str = "file"
|
file_dir: str = "file"
|
||||||
|
@ -109,31 +105,3 @@ class Config(BaseModel):
|
||||||
server: ServerUIConfig = ServerUIConfig()
|
server: ServerUIConfig = ServerUIConfig()
|
||||||
ticker: TickerConfig = TickerConfig()
|
ticker: TickerConfig = TickerConfig()
|
||||||
calendar: CalendarConfig = CalendarConfig()
|
calendar: CalendarConfig = CalendarConfig()
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def get(cls) -> "Config":
|
|
||||||
"""
|
|
||||||
Load the configuration instance from the server using `TOML`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
dav_file = DavFile(SETTINGS.config_path)
|
|
||||||
|
|
||||||
try:
|
|
||||||
cfg = cls.parse_obj(
|
|
||||||
toml_loads(await dav_file.as_string)
|
|
||||||
)
|
|
||||||
|
|
||||||
except RemoteResourceNotFound:
|
|
||||||
_logger.warning(
|
|
||||||
f"Config file {SETTINGS.config_path!r} not found, creating ..."
|
|
||||||
)
|
|
||||||
|
|
||||||
cfg = cls()
|
|
||||||
cfg.calendar.aggregates["All Events"] = list(await caldav_list())
|
|
||||||
|
|
||||||
buffer = BytesIO()
|
|
||||||
toml_dump(cfg.dict(), buffer)
|
|
||||||
buffer.seek(0)
|
|
||||||
await dav_file.write(buffer.read())
|
|
||||||
|
|
||||||
return cfg
|
|
60
api/ovdashboard_api/core/dav_common.py
Normal file
60
api/ovdashboard_api/core/dav_common.py
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
"""
|
||||||
|
Definition of WebDAV and CalDAV clients.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from os import path
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from .. import __file__ as OVD_INIT
|
||||||
|
from .webdav import WebDAV
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def webdav_ensure_path(remote_path: str) -> bool:
|
||||||
|
if WebDAV._webdav_client.check(remote_path):
|
||||||
|
_logger.debug(
|
||||||
|
"WebDAV path %s found.",
|
||||||
|
repr(remote_path),
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
_logger.info(
|
||||||
|
"WebDAV path %s not found, creating ...",
|
||||||
|
repr(remote_path),
|
||||||
|
)
|
||||||
|
WebDAV._webdav_client.mkdir(remote_path)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_skel_path(skel_file: str) -> Path:
|
||||||
|
skel_path = path.dirname(Path(OVD_INIT).absolute())
|
||||||
|
return Path(skel_path).joinpath("skel", skel_file)
|
||||||
|
|
||||||
|
|
||||||
|
def webdav_upload_skel(remote_path: str, *skel_files: str) -> None:
|
||||||
|
for skel_file in skel_files:
|
||||||
|
_logger.debug(
|
||||||
|
"Creating WebDAV file %s ...",
|
||||||
|
repr(skel_file),
|
||||||
|
)
|
||||||
|
|
||||||
|
WebDAV._webdav_client.upload_file(
|
||||||
|
f"{remote_path}/{skel_file}",
|
||||||
|
get_skel_path(skel_file),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def webdav_ensure_files(remote_path: str, *file_names: str) -> None:
|
||||||
|
missing_files = (
|
||||||
|
file_name
|
||||||
|
for file_name in file_names
|
||||||
|
if not WebDAV._webdav_client.check(f"{remote_path}/{file_name}")
|
||||||
|
)
|
||||||
|
|
||||||
|
webdav_upload_skel(
|
||||||
|
remote_path,
|
||||||
|
*missing_files,
|
||||||
|
)
|
162
api/ovdashboard_api/core/settings.py
Normal file
162
api/ovdashboard_api/core/settings.py
Normal file
|
@ -0,0 +1,162 @@
|
||||||
|
"""
|
||||||
|
Configuration definition.
|
||||||
|
|
||||||
|
Converts per-run (environment) variables and config files into the
|
||||||
|
"python world" using `pydantic`.
|
||||||
|
|
||||||
|
Pydantic models might have convenience methods attached.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pydantic import BaseModel, model_validator
|
||||||
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
|
||||||
|
|
||||||
|
class DAVSettings(BaseModel):
|
||||||
|
"""
|
||||||
|
Connection to a DAV server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
protocol: str | None = None
|
||||||
|
host: str | None = None
|
||||||
|
path: str | None = None
|
||||||
|
|
||||||
|
username: str | None = None
|
||||||
|
password: str | None = None
|
||||||
|
|
||||||
|
cache_ttl: int = 60 * 10
|
||||||
|
cache_size: int = 1024
|
||||||
|
|
||||||
|
@property
|
||||||
|
def url(self) -> str:
|
||||||
|
"""
|
||||||
|
Combined DAV URL.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return f"{self.protocol}://{self.host}{self.path}"
|
||||||
|
|
||||||
|
|
||||||
|
class WebDAVSettings(DAVSettings):
|
||||||
|
"""
|
||||||
|
Connection to a WebDAV server.
|
||||||
|
"""
|
||||||
|
|
||||||
|
protocol: str = "https"
|
||||||
|
host: str = "example.com"
|
||||||
|
path: str = "/remote.php/dav"
|
||||||
|
prefix: str = "/ovdashboard"
|
||||||
|
|
||||||
|
username: str = "ovd_user"
|
||||||
|
password: str = "password"
|
||||||
|
|
||||||
|
config_filename: str = "config.txt"
|
||||||
|
|
||||||
|
disable_check: bool = False
|
||||||
|
retries: int = 20
|
||||||
|
retry_delay: int = 30
|
||||||
|
prefix: str = "/ovdashboard"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def url(self) -> str:
|
||||||
|
"""
|
||||||
|
Combined DAV URL.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return f"{self.protocol}://{self.host}{self.path}{self.prefix}"
|
||||||
|
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
"""
|
||||||
|
Per-run settings.
|
||||||
|
"""
|
||||||
|
|
||||||
|
model_config = SettingsConfigDict(
|
||||||
|
env_file=".env",
|
||||||
|
env_file_encoding="utf-8",
|
||||||
|
env_nested_delimiter="__",
|
||||||
|
)
|
||||||
|
|
||||||
|
#####
|
||||||
|
# general settings
|
||||||
|
#####
|
||||||
|
|
||||||
|
log_level: str = "INFO"
|
||||||
|
production_mode: bool = False
|
||||||
|
ui_directory: str = "/usr/local/share/ovdashboard_ui/html"
|
||||||
|
|
||||||
|
# doesn't even have to be reachable
|
||||||
|
ping_host: str = "1.0.0.0"
|
||||||
|
ping_port: int = 1
|
||||||
|
|
||||||
|
#####
|
||||||
|
# openapi settings
|
||||||
|
#####
|
||||||
|
|
||||||
|
def __dev_value[T](self, value: T) -> T | None:
|
||||||
|
if self.production_mode:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def openapi_url(self) -> str | None:
|
||||||
|
return self.__dev_value("/api/openapi.json")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def docs_url(self) -> str | None:
|
||||||
|
return self.__dev_value("/api/docs")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def redoc_url(self) -> str | None:
|
||||||
|
return self.__dev_value("/api/redoc")
|
||||||
|
|
||||||
|
#####
|
||||||
|
# webdav settings
|
||||||
|
#####
|
||||||
|
|
||||||
|
webdav: WebDAVSettings = WebDAVSettings()
|
||||||
|
|
||||||
|
#####
|
||||||
|
# caldav settings
|
||||||
|
#####
|
||||||
|
|
||||||
|
caldav: DAVSettings = DAVSettings()
|
||||||
|
|
||||||
|
@model_validator(mode="before")
|
||||||
|
@classmethod
|
||||||
|
def validate_dav_settings(cls, data) -> dict[str, Any]:
|
||||||
|
assert isinstance(data, dict)
|
||||||
|
|
||||||
|
# ensure both settings dicts are created
|
||||||
|
for key in ("webdav", "caldav"):
|
||||||
|
if key not in data:
|
||||||
|
data[key] = {}
|
||||||
|
|
||||||
|
default_dav = DAVSettings(
|
||||||
|
protocol="https",
|
||||||
|
host="example.com",
|
||||||
|
username="ovdashboard",
|
||||||
|
password="secret",
|
||||||
|
).model_dump()
|
||||||
|
|
||||||
|
for key in default_dav:
|
||||||
|
# if "webdav" value is not specified, use default
|
||||||
|
if key not in data["webdav"] or data["webdav"][key] is None:
|
||||||
|
data["webdav"][key] = default_dav[key]
|
||||||
|
|
||||||
|
# if "caldav" value is not specified, use "webdav" value
|
||||||
|
if key not in data["caldav"] or data["caldav"][key] is None:
|
||||||
|
data["caldav"][key] = data["webdav"][key]
|
||||||
|
|
||||||
|
# add default "path"s if None
|
||||||
|
if data["webdav"]["path"] is None:
|
||||||
|
data["webdav"]["path"] = "/remote.php/webdav"
|
||||||
|
|
||||||
|
if data["caldav"]["path"] is None:
|
||||||
|
data["caldav"]["path"] = "/remote.php/dav"
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
SETTINGS = Settings()
|
137
api/ovdashboard_api/core/webdav.py
Normal file
137
api/ovdashboard_api/core/webdav.py
Normal file
|
@ -0,0 +1,137 @@
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from io import BytesIO
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from asyncify import asyncify
|
||||||
|
from cachetools import TTLCache, cachedmethod
|
||||||
|
from cachetools.keys import hashkey
|
||||||
|
from webdav3.client import Client as WebDAVclient
|
||||||
|
|
||||||
|
from .settings import SETTINGS
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def davkey(name, _, *args, **kwargs):
|
||||||
|
"""Return a cache key for use with cached methods."""
|
||||||
|
|
||||||
|
return hashkey(name, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class WebDAV:
|
||||||
|
class __WebDAVclient(WebDAVclient):
|
||||||
|
def execute_request(
|
||||||
|
self,
|
||||||
|
action,
|
||||||
|
path,
|
||||||
|
data=None,
|
||||||
|
headers_ext=None,
|
||||||
|
) -> requests.Response:
|
||||||
|
res = super().execute_request(action, path, data, headers_ext)
|
||||||
|
|
||||||
|
# the "Content-Length" header can randomly be missing on txt files,
|
||||||
|
# this should fix that (probably serverside bug)
|
||||||
|
if action == "download" and "Content-Length" not in res.headers:
|
||||||
|
res.headers["Content-Length"] = str(len(res.text))
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
_webdav_client = __WebDAVclient(
|
||||||
|
{
|
||||||
|
"webdav_hostname": SETTINGS.webdav.url,
|
||||||
|
"webdav_login": SETTINGS.webdav.username,
|
||||||
|
"webdav_password": SETTINGS.webdav.password,
|
||||||
|
"disable_check": SETTINGS.webdav.disable_check,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
_cache = TTLCache(
|
||||||
|
ttl=SETTINGS.webdav.cache_ttl,
|
||||||
|
maxsize=SETTINGS.webdav.cache_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@asyncify
|
||||||
|
@cachedmethod(
|
||||||
|
cache=lambda cls: cls._cache,
|
||||||
|
key=functools.partial(davkey, "list_files"),
|
||||||
|
)
|
||||||
|
def list_files(
|
||||||
|
cls,
|
||||||
|
directory: str = "",
|
||||||
|
*,
|
||||||
|
regex: re.Pattern[str] = re.compile(""),
|
||||||
|
) -> list[str]:
|
||||||
|
"""
|
||||||
|
List files in directory `directory` matching RegEx `regex`
|
||||||
|
"""
|
||||||
|
|
||||||
|
_logger.debug(f"list_files {directory!r}")
|
||||||
|
ls = cls._webdav_client.list(directory)
|
||||||
|
|
||||||
|
return [path for path in ls if regex.search(path)]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@asyncify
|
||||||
|
@cachedmethod(
|
||||||
|
cache=lambda cls: cls._cache,
|
||||||
|
key=functools.partial(davkey, "exists"),
|
||||||
|
)
|
||||||
|
def exists(cls, path: str) -> bool:
|
||||||
|
"""
|
||||||
|
`True` iff there is a WebDAV resource at `path`
|
||||||
|
"""
|
||||||
|
|
||||||
|
_logger.debug(f"file_exists {path!r}")
|
||||||
|
return cls._webdav_client.check(path)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@asyncify
|
||||||
|
@cachedmethod(
|
||||||
|
cache=lambda cls: cls._cache,
|
||||||
|
key=functools.partial(davkey, "read_bytes"),
|
||||||
|
)
|
||||||
|
def read_bytes(cls, path: str) -> bytes:
|
||||||
|
"""
|
||||||
|
Load WebDAV file from `path` as bytes
|
||||||
|
"""
|
||||||
|
|
||||||
|
_logger.debug(f"read_bytes {path!r}")
|
||||||
|
buffer = BytesIO()
|
||||||
|
cls._webdav_client.download_from(buffer, path)
|
||||||
|
buffer.seek(0)
|
||||||
|
|
||||||
|
return buffer.read()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def read_str(cls, path: str, encoding="utf-8") -> str:
|
||||||
|
"""
|
||||||
|
Load WebDAV file from `path` as string
|
||||||
|
"""
|
||||||
|
|
||||||
|
_logger.debug(f"read_str {path!r}")
|
||||||
|
return (await cls.read_bytes(path)).decode(encoding=encoding).strip()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@asyncify
|
||||||
|
def write_bytes(cls, path: str, buffer: bytes) -> None:
|
||||||
|
"""
|
||||||
|
Write bytes from `buffer` into WebDAV file at `path`
|
||||||
|
"""
|
||||||
|
|
||||||
|
_logger.debug(f"write_bytes {path!r}")
|
||||||
|
cls._webdav_client.upload_to(buffer, path)
|
||||||
|
|
||||||
|
# invalidate cache entry
|
||||||
|
cls._cache.pop(hashkey("read_bytes", path))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def write_str(cls, path: str, content: str, encoding="utf-8") -> None:
|
||||||
|
"""
|
||||||
|
Write string from `content` into WebDAV file at `path`
|
||||||
|
"""
|
||||||
|
|
||||||
|
_logger.debug(f"write_str {path!r}")
|
||||||
|
await cls.write_bytes(path, content.encode(encoding=encoding))
|
|
@ -1,213 +0,0 @@
|
||||||
"""
|
|
||||||
Definition of an asyncio compatible CalDAV calendar.
|
|
||||||
|
|
||||||
Caches events using `timed_alru_cache`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from functools import total_ordering
|
|
||||||
from logging import getLogger
|
|
||||||
from typing import Iterator
|
|
||||||
|
|
||||||
from cache import AsyncTTL
|
|
||||||
from caldav import Calendar
|
|
||||||
from caldav.lib.error import ReportError
|
|
||||||
from pydantic import BaseModel, validator
|
|
||||||
from vobject.base import Component
|
|
||||||
|
|
||||||
from .async_helpers import run_in_executor
|
|
||||||
from .config import Config
|
|
||||||
from .dav_common import caldav_principal
|
|
||||||
from .settings import SETTINGS
|
|
||||||
|
|
||||||
_logger = getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def _string_strip(in_str: str) -> str:
|
|
||||||
"""
|
|
||||||
Wrapper for str.strip().
|
|
||||||
|
|
||||||
Used to define `pydantic` validators.
|
|
||||||
"""
|
|
||||||
return in_str.strip()
|
|
||||||
|
|
||||||
|
|
||||||
@total_ordering
|
|
||||||
class CalEvent(BaseModel):
|
|
||||||
"""
|
|
||||||
A CalDAV calendar event.
|
|
||||||
|
|
||||||
Properties are to be named as in the EVENT component of
|
|
||||||
RFC5545 (iCalendar).
|
|
||||||
|
|
||||||
https://icalendar.org/iCalendar-RFC-5545/3-6-1-event-component.html
|
|
||||||
"""
|
|
||||||
|
|
||||||
summary: str = ""
|
|
||||||
description: str = ""
|
|
||||||
dtstart: datetime = datetime.utcnow()
|
|
||||||
dtend: datetime = datetime.utcnow()
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
frozen = True
|
|
||||||
|
|
||||||
def __lt__(self, other: "CalEvent") -> bool:
|
|
||||||
"""
|
|
||||||
Order Events by start time.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self.dtstart < other.dtstart
|
|
||||||
|
|
||||||
def __eq__(self, other: "CalEvent") -> bool:
|
|
||||||
"""
|
|
||||||
Compare all properties.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return self.dict() == other.dict()
|
|
||||||
|
|
||||||
_validate_summary = validator(
|
|
||||||
"summary",
|
|
||||||
allow_reuse=True,
|
|
||||||
)(_string_strip)
|
|
||||||
|
|
||||||
_validate_description = validator(
|
|
||||||
"description",
|
|
||||||
allow_reuse=True,
|
|
||||||
)(_string_strip)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_vevent(cls, event: Component) -> "CalEvent":
|
|
||||||
"""
|
|
||||||
Create a CalEvent instance from a `VObject.VEvent` object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
data = {}
|
|
||||||
keys = ("summary", "description", "dtstart", "dtend", "duration")
|
|
||||||
|
|
||||||
for key in keys:
|
|
||||||
try:
|
|
||||||
data[key] = event.contents[key][0].value # type: ignore
|
|
||||||
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if "dtend" not in data:
|
|
||||||
data["dtend"] = data["dtstart"]
|
|
||||||
|
|
||||||
if "duration" in data:
|
|
||||||
try:
|
|
||||||
data["dtend"] += data["duration"]
|
|
||||||
|
|
||||||
except (ValueError, TypeError, AttributeError):
|
|
||||||
_logger.warn(
|
|
||||||
"Could not add duration %s to %s",
|
|
||||||
repr(data["duration"]),
|
|
||||||
repr(data["dtstart"]),
|
|
||||||
)
|
|
||||||
|
|
||||||
del data["duration"]
|
|
||||||
|
|
||||||
return cls.parse_obj(data)
|
|
||||||
|
|
||||||
|
|
||||||
@AsyncTTL(time_to_live=SETTINGS.cache_time, maxsize=SETTINGS.cache_size)
|
|
||||||
async def _get_calendar(
|
|
||||||
calendar_name: str,
|
|
||||||
) -> Calendar:
|
|
||||||
"""
|
|
||||||
Get a calendar by name using the CalDAV principal object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@run_in_executor
|
|
||||||
def _inner() -> Calendar:
|
|
||||||
return caldav_principal().calendar(calendar_name)
|
|
||||||
|
|
||||||
return await _inner()
|
|
||||||
|
|
||||||
|
|
||||||
@AsyncTTL(time_to_live=SETTINGS.cache_time, maxsize=SETTINGS.cache_size)
|
|
||||||
async def _get_calendar_events(
|
|
||||||
calendar_name: str,
|
|
||||||
) -> list[CalEvent]:
|
|
||||||
"""
|
|
||||||
Get a sorted list of events by CalDAV calendar name.
|
|
||||||
|
|
||||||
Do not return an iterator here - this result is cached and
|
|
||||||
an iterator would get consumed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
cfg = await Config.get()
|
|
||||||
search_span = timedelta(days=cfg.calendar.future_days)
|
|
||||||
|
|
||||||
@run_in_executor
|
|
||||||
def _inner() -> Iterator[Component]:
|
|
||||||
"""
|
|
||||||
Get events by CalDAV calendar name.
|
|
||||||
|
|
||||||
This can return an iterator - only the outer function is
|
|
||||||
cached.
|
|
||||||
"""
|
|
||||||
_logger.info(f"downloading {calendar_name!r} ...")
|
|
||||||
|
|
||||||
calendar = caldav_principal().calendar(calendar_name)
|
|
||||||
|
|
||||||
date_start = datetime.utcnow().date()
|
|
||||||
time_min = datetime.min.time()
|
|
||||||
dt_start = datetime.combine(date_start, time_min)
|
|
||||||
dt_end = dt_start + search_span
|
|
||||||
|
|
||||||
try:
|
|
||||||
search_result = calendar.date_search(
|
|
||||||
start=dt_start,
|
|
||||||
end=dt_end,
|
|
||||||
expand=True,
|
|
||||||
verify_expand=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
except ReportError:
|
|
||||||
_logger.warning("CalDAV server does not support expanded search")
|
|
||||||
|
|
||||||
search_result = calendar.date_search(
|
|
||||||
start=dt_start,
|
|
||||||
end=dt_end,
|
|
||||||
expand=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
for event in search_result:
|
|
||||||
vobject: Component = event.vobject_instance # type: ignore
|
|
||||||
yield from vobject.vevent_list
|
|
||||||
|
|
||||||
return sorted([
|
|
||||||
CalEvent.from_vevent(vevent)
|
|
||||||
for vevent in await _inner()
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class DavCalendar:
|
|
||||||
"""
|
|
||||||
Object representation of a CalDAV calendar.
|
|
||||||
"""
|
|
||||||
|
|
||||||
calendar_name: str
|
|
||||||
|
|
||||||
@property
|
|
||||||
async def calendar(self) -> Calendar:
|
|
||||||
"""
|
|
||||||
Calendar as `caldav` library representation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return await _get_calendar(
|
|
||||||
calendar_name=self.calendar_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
async def events(self) -> list[CalEvent]:
|
|
||||||
"""
|
|
||||||
Calendar events in object representation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return await _get_calendar_events(
|
|
||||||
calendar_name=self.calendar_name,
|
|
||||||
)
|
|
|
@ -1,175 +0,0 @@
|
||||||
"""
|
|
||||||
Definition of WebDAV and CalDAV clients.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from functools import lru_cache
|
|
||||||
from logging import getLogger
|
|
||||||
from os import path
|
|
||||||
from pathlib import Path
|
|
||||||
from time import sleep
|
|
||||||
from typing import Any, Iterator
|
|
||||||
|
|
||||||
from caldav import DAVClient as CalDAVclient
|
|
||||||
from caldav import Principal as CalDAVPrincipal
|
|
||||||
from webdav3.client import Client as WebDAVclient
|
|
||||||
from webdav3.client import Resource as WebDAVResource
|
|
||||||
|
|
||||||
from . import __file__ as OVD_INIT
|
|
||||||
from .async_helpers import run_in_executor
|
|
||||||
from .settings import SETTINGS
|
|
||||||
|
|
||||||
_WEBDAV_CLIENT = WebDAVclient({
|
|
||||||
"webdav_hostname": SETTINGS.webdav.url,
|
|
||||||
"webdav_login": SETTINGS.webdav.username,
|
|
||||||
"webdav_password": SETTINGS.webdav.password,
|
|
||||||
"disable_check": SETTINGS.webdav_disable_check,
|
|
||||||
})
|
|
||||||
|
|
||||||
_logger = getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def webdav_check() -> None:
|
|
||||||
"""
|
|
||||||
Checks if base resources are available.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_logger.info(
|
|
||||||
"Production mode is %s.",
|
|
||||||
"enabled" if SETTINGS.production_mode else "disabled",
|
|
||||||
)
|
|
||||||
|
|
||||||
if SETTINGS.production_mode:
|
|
||||||
for _ in range(SETTINGS.webdav_retries):
|
|
||||||
if _WEBDAV_CLIENT.check(""):
|
|
||||||
break
|
|
||||||
|
|
||||||
_logger.warning(
|
|
||||||
"Waiting for WebDAV connection to %s ...",
|
|
||||||
repr(SETTINGS.webdav.url),
|
|
||||||
)
|
|
||||||
sleep(30)
|
|
||||||
|
|
||||||
_logger.debug("WebDAV connection ok.")
|
|
||||||
|
|
||||||
elif not _WEBDAV_CLIENT.check(""):
|
|
||||||
_logger.error(
|
|
||||||
"WebDAV connection to %s FAILED!",
|
|
||||||
repr(SETTINGS.webdav.url),
|
|
||||||
)
|
|
||||||
raise ConnectionError(SETTINGS.webdav.url)
|
|
||||||
|
|
||||||
_logger.debug("WebDAV connection ok.")
|
|
||||||
|
|
||||||
if not _WEBDAV_CLIENT.check(SETTINGS.webdav_prefix):
|
|
||||||
_logger.error(
|
|
||||||
"WebDAV prefix directory %s NOT FOUND, please create it!",
|
|
||||||
repr(SETTINGS.webdav_prefix),
|
|
||||||
)
|
|
||||||
raise FileNotFoundError(SETTINGS.webdav_prefix)
|
|
||||||
|
|
||||||
_logger.debug("WebDAV prefix directory found.")
|
|
||||||
|
|
||||||
|
|
||||||
def webdav_ensure_path(remote_path: str) -> bool:
|
|
||||||
remote_path = f"{SETTINGS.webdav_prefix}/{remote_path}"
|
|
||||||
|
|
||||||
if _WEBDAV_CLIENT.check(remote_path):
|
|
||||||
_logger.debug(
|
|
||||||
"WebDAV path %s found.",
|
|
||||||
repr(remote_path),
|
|
||||||
)
|
|
||||||
return True
|
|
||||||
|
|
||||||
_logger.info(
|
|
||||||
"WebDAV path %s not found, creating ...",
|
|
||||||
repr(remote_path),
|
|
||||||
)
|
|
||||||
_WEBDAV_CLIENT.mkdir(remote_path)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def get_skel_path(skel_file: str) -> Path:
|
|
||||||
skel_path = path.dirname(Path(OVD_INIT).absolute())
|
|
||||||
return Path(skel_path).joinpath("skel", skel_file)
|
|
||||||
|
|
||||||
|
|
||||||
def webdav_upload_skel(remote_path: str, *skel_files: str) -> None:
|
|
||||||
remote_path = f"{SETTINGS.webdav_prefix}/{remote_path}"
|
|
||||||
|
|
||||||
for skel_file in skel_files:
|
|
||||||
_logger.debug(
|
|
||||||
"Creating WebDAV file %s ...",
|
|
||||||
repr(skel_file),
|
|
||||||
)
|
|
||||||
|
|
||||||
_WEBDAV_CLIENT.upload_file(
|
|
||||||
f"{remote_path}/{skel_file}",
|
|
||||||
get_skel_path(skel_file),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def webdav_ensure_files(remote_path: str, *file_names: str) -> None:
|
|
||||||
missing_files = (
|
|
||||||
file_name
|
|
||||||
for file_name in file_names
|
|
||||||
if not _WEBDAV_CLIENT.check(path.join(
|
|
||||||
SETTINGS.webdav_prefix,
|
|
||||||
remote_path,
|
|
||||||
file_name,
|
|
||||||
))
|
|
||||||
)
|
|
||||||
|
|
||||||
webdav_upload_skel(
|
|
||||||
remote_path,
|
|
||||||
*missing_files,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@lru_cache(maxsize=SETTINGS.cache_size)
|
|
||||||
def webdav_resource(remote_path: Any) -> WebDAVResource:
|
|
||||||
"""
|
|
||||||
Gets a resource using the main WebDAV client.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return _WEBDAV_CLIENT.resource(
|
|
||||||
f"{SETTINGS.webdav_prefix}/{remote_path}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@run_in_executor
|
|
||||||
def webdav_list(remote_path: str) -> list[str]:
|
|
||||||
"""
|
|
||||||
Asynchronously lists a WebDAV path using the main WebDAV client.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return _WEBDAV_CLIENT.list(
|
|
||||||
f"{SETTINGS.webdav_prefix}/{remote_path}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_CALDAV_CLIENT = CalDAVclient(
|
|
||||||
url=SETTINGS.caldav.url,
|
|
||||||
username=SETTINGS.caldav.username,
|
|
||||||
password=SETTINGS.caldav.password,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def caldav_principal() -> CalDAVPrincipal:
|
|
||||||
"""
|
|
||||||
Gets the `Principal` object of the main CalDAV client.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return _CALDAV_CLIENT.principal()
|
|
||||||
|
|
||||||
|
|
||||||
@run_in_executor
|
|
||||||
def caldav_list() -> Iterator[str]:
|
|
||||||
"""
|
|
||||||
Asynchronously lists all calendars using the main WebDAV client.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return (
|
|
||||||
str(cal.name)
|
|
||||||
for cal in caldav_principal().calendars()
|
|
||||||
)
|
|
|
@ -1,98 +0,0 @@
|
||||||
"""
|
|
||||||
Definition of an asyncio compatible WebDAV file.
|
|
||||||
|
|
||||||
Caches files using `timed_alru_cache`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from io import BytesIO
|
|
||||||
from logging import getLogger
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from cache import AsyncTTL
|
|
||||||
from webdav3.client import Resource
|
|
||||||
|
|
||||||
from .async_helpers import run_in_executor
|
|
||||||
from .dav_common import webdav_resource
|
|
||||||
from .settings import SETTINGS
|
|
||||||
|
|
||||||
_logger = getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@AsyncTTL(time_to_live=SETTINGS.cache_time, maxsize=SETTINGS.cache_size)
|
|
||||||
async def _get_buffer(
|
|
||||||
remote_path: Any,
|
|
||||||
) -> BytesIO:
|
|
||||||
"""
|
|
||||||
Download file contents into a new `BytesIO` object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@run_in_executor
|
|
||||||
def _inner() -> BytesIO:
|
|
||||||
_logger.info(f"downloading {remote_path!r} ...")
|
|
||||||
|
|
||||||
resource = webdav_resource(remote_path)
|
|
||||||
buffer = BytesIO()
|
|
||||||
resource.write_to(buffer)
|
|
||||||
return buffer
|
|
||||||
|
|
||||||
return await _inner()
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class DavFile:
|
|
||||||
"""
|
|
||||||
Object representation of a WebDAV file.
|
|
||||||
"""
|
|
||||||
|
|
||||||
remote_path: str
|
|
||||||
|
|
||||||
@property
|
|
||||||
def resource(self) -> Resource:
|
|
||||||
"""
|
|
||||||
WebDAV file handle.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return webdav_resource(self.remote_path)
|
|
||||||
|
|
||||||
@property
|
|
||||||
async def __buffer(self) -> BytesIO:
|
|
||||||
"""
|
|
||||||
File contents as binary stream.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return await _get_buffer(
|
|
||||||
remote_path=self.remote_path,
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
async def as_bytes(self) -> bytes:
|
|
||||||
"""
|
|
||||||
File contents as binary data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
buffer = await self.__buffer
|
|
||||||
|
|
||||||
buffer.seek(0)
|
|
||||||
return buffer.read()
|
|
||||||
|
|
||||||
@property
|
|
||||||
async def as_string(self) -> str:
|
|
||||||
"""
|
|
||||||
File contents as string.
|
|
||||||
"""
|
|
||||||
|
|
||||||
bytes = await self.as_bytes
|
|
||||||
return bytes.decode(encoding="utf-8")
|
|
||||||
|
|
||||||
async def write(self, content: bytes) -> None:
|
|
||||||
"""
|
|
||||||
Write bytes into file.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@run_in_executor
|
|
||||||
def _inner() -> None:
|
|
||||||
buffer = BytesIO(content)
|
|
||||||
self.resource.read_from(buffer)
|
|
||||||
|
|
||||||
await _inner()
|
|
|
@ -1,6 +1,6 @@
|
||||||
from uvicorn import run as uvicorn_run
|
from uvicorn import run as uvicorn_run
|
||||||
|
|
||||||
from .settings import SETTINGS
|
from .core.settings import SETTINGS
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
|
@ -9,7 +9,7 @@ def main() -> None:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
uvicorn_run(
|
uvicorn_run(
|
||||||
app="ovdashboard_api:app",
|
app="ovdashboard_api.app:app",
|
||||||
host="0.0.0.0",
|
host="0.0.0.0",
|
||||||
port=8000,
|
port=8000,
|
||||||
reload=not SETTINGS.production_mode,
|
reload=not SETTINGS.production_mode,
|
|
@ -19,3 +19,5 @@ router.include_router(file.router)
|
||||||
|
|
||||||
router.include_router(calendar.router)
|
router.include_router(calendar.router)
|
||||||
router.include_router(aggregate.router)
|
router.include_router(aggregate.router)
|
||||||
|
|
||||||
|
__all__ = ["router"]
|
||||||
|
|
|
@ -2,28 +2,21 @@
|
||||||
Dependables for defining Routers.
|
Dependables for defining Routers.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
import re
|
import re
|
||||||
from dataclasses import dataclass
|
import tomllib
|
||||||
from logging import getLogger
|
|
||||||
from typing import Iterator, Protocol
|
|
||||||
|
|
||||||
from fastapi import HTTPException, status
|
import tomli_w
|
||||||
|
from fastapi import Depends, HTTPException, params, status
|
||||||
from webdav3.exceptions import RemoteResourceNotFound
|
from webdav3.exceptions import RemoteResourceNotFound
|
||||||
|
|
||||||
from ...config import Config
|
from ...core.caldav import CalDAV
|
||||||
from ...dav_common import caldav_list, webdav_list
|
from ...core.config import Config
|
||||||
|
from ...core.settings import SETTINGS
|
||||||
_logger = getLogger(__name__)
|
from ...core.webdav import WebDAV
|
||||||
|
from ._list_manager import Dependable, DependableFn, ListManager
|
||||||
|
|
||||||
class NameLister(Protocol):
|
|
||||||
"""
|
|
||||||
Can be called to create an iterator containing some names.
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def __call__(self) -> Iterator[str]:
|
|
||||||
...
|
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
_RESPONSE_OK = {
|
_RESPONSE_OK = {
|
||||||
status.HTTP_200_OK: {
|
status.HTTP_200_OK: {
|
||||||
|
@ -32,139 +25,117 @@ _RESPONSE_OK = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
async def get_config() -> Config:
|
||||||
class FileNameLister:
|
|
||||||
"""
|
"""
|
||||||
Can be called to create an iterator containing file names.
|
Load the configuration instance from the server using `TOML`.
|
||||||
|
|
||||||
File names listed will be in `remote_path` and will match the RegEx `re`.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
path_name: str
|
try:
|
||||||
re: re.Pattern[str]
|
cfg_str = await WebDAV.read_str(SETTINGS.webdav.config_filename)
|
||||||
|
cfg = Config.model_validate(tomllib.loads(cfg_str))
|
||||||
|
|
||||||
@property
|
except RemoteResourceNotFound:
|
||||||
def responses(self) -> dict:
|
_logger.warning(
|
||||||
return {
|
f"Config file {SETTINGS.webdav.config_filename!r} not found, creating ..."
|
||||||
**_RESPONSE_OK,
|
|
||||||
status.HTTP_404_NOT_FOUND: {
|
|
||||||
"description": f"{self.path_name!r} not found",
|
|
||||||
"content": None,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
@property
|
|
||||||
async def remote_path(self) -> str:
|
|
||||||
cfg = await Config.get()
|
|
||||||
|
|
||||||
return str(cfg.dict()[self.path_name])
|
|
||||||
|
|
||||||
async def __call__(self) -> Iterator[str]:
|
|
||||||
try:
|
|
||||||
file_names = await webdav_list(await self.remote_path)
|
|
||||||
|
|
||||||
return (
|
|
||||||
name
|
|
||||||
for name in file_names
|
|
||||||
if self.re.search(name)
|
|
||||||
)
|
|
||||||
|
|
||||||
except RemoteResourceNotFound:
|
|
||||||
_logger.error(
|
|
||||||
"WebDAV path %s lost!",
|
|
||||||
repr(await self.remote_path),
|
|
||||||
)
|
|
||||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class CalendarNameLister:
|
|
||||||
"""
|
|
||||||
Can be called to create an iterator containing calendar names.
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def __call__(self) -> Iterator[str]:
|
|
||||||
return await caldav_list()
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class AggregateNameLister:
|
|
||||||
"""
|
|
||||||
Can be called to create an iterator containing aggregate calendar names.
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def __call__(self) -> Iterator[str]:
|
|
||||||
cfg = await Config.get()
|
|
||||||
|
|
||||||
return iter(cfg.calendar.aggregates.keys())
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
|
||||||
class PrefixFinder:
|
|
||||||
"""
|
|
||||||
Can be called to create an iterator containing some names, all starting
|
|
||||||
with a given prefix.
|
|
||||||
|
|
||||||
All names will be taken from the list produced by the called `lister`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
lister: NameLister
|
|
||||||
|
|
||||||
@property
|
|
||||||
def responses(self) -> dict:
|
|
||||||
return {
|
|
||||||
**_RESPONSE_OK,
|
|
||||||
status.HTTP_404_NOT_FOUND: {
|
|
||||||
"description": "Failure in lister " +
|
|
||||||
repr(self.lister.__class__.__name__),
|
|
||||||
"content": None,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async def __call__(self, prefix: str) -> Iterator[str]:
|
|
||||||
return (
|
|
||||||
file_name
|
|
||||||
for file_name in (await self.lister())
|
|
||||||
if file_name.lower().startswith(prefix.lower())
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
cfg = Config()
|
||||||
|
cfg.calendar.aggregates["All Events"] = list(await CalDAV.calendars)
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
await WebDAV.write_str(
|
||||||
class PrefixUnique:
|
SETTINGS.webdav.config_filename,
|
||||||
|
tomli_w.dumps(cfg.model_dump()),
|
||||||
|
)
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
|
||||||
|
def get_remote_path(
|
||||||
|
path_name: str,
|
||||||
|
) -> DependableFn[[], str]:
|
||||||
|
async def _get_remote_path() -> str:
|
||||||
|
cfg = await get_config()
|
||||||
|
return getattr(cfg, path_name)
|
||||||
|
|
||||||
|
return _get_remote_path
|
||||||
|
|
||||||
|
|
||||||
|
RP_FILE = get_remote_path("file_dir")
|
||||||
|
RP_IMAGE = get_remote_path("image_dir")
|
||||||
|
RP_TEXT = get_remote_path("text_dir")
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_lister(
|
||||||
|
rp: DependableFn[[], str],
|
||||||
|
*,
|
||||||
|
re: re.Pattern[str],
|
||||||
|
) -> Dependable[[], list[str]]:
|
||||||
"""
|
"""
|
||||||
Can be called to determine if a given prefix is unique in the list
|
List files in remote `path` matching the RegEx `re`
|
||||||
produced by the called `finder`.
|
|
||||||
|
|
||||||
On success, produces the unique name with that prefix. Otherwise,
|
|
||||||
throws a HTTPException.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
finder: PrefixFinder
|
async def _list_files(
|
||||||
|
remote_path: str = Depends(rp),
|
||||||
|
) -> list[str]:
|
||||||
|
if isinstance(remote_path, params.Depends):
|
||||||
|
remote_path = await rp()
|
||||||
|
|
||||||
@property
|
_logger.debug("list %s", repr(remote_path))
|
||||||
def responses(self) -> dict:
|
|
||||||
return {
|
|
||||||
**_RESPONSE_OK,
|
|
||||||
status.HTTP_404_NOT_FOUND: {
|
|
||||||
"description": "Prefix not found",
|
|
||||||
"content": None,
|
|
||||||
},
|
|
||||||
status.HTTP_409_CONFLICT: {
|
|
||||||
"description": "Ambiguous prefix",
|
|
||||||
"content": None,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async def __call__(self, prefix: str) -> str:
|
|
||||||
names = await self.finder(prefix)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
name = next(names)
|
return await WebDAV.list_files(remote_path, regex=re)
|
||||||
|
|
||||||
except StopIteration:
|
except RemoteResourceNotFound:
|
||||||
|
_logger.error("WebDAV path %s lost!", repr(remote_path))
|
||||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
if any(True for _ in names):
|
return Dependable(
|
||||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT)
|
func=_list_files,
|
||||||
|
responses={
|
||||||
|
**_RESPONSE_OK,
|
||||||
|
status.HTTP_404_NOT_FOUND: {
|
||||||
|
"description": "Remote path not found",
|
||||||
|
"content": None,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
return name
|
|
||||||
|
LM_FILE = ListManager.from_lister(
|
||||||
|
get_file_lister(rp=RP_FILE, re=re.compile(r"[^/]$", flags=re.IGNORECASE))
|
||||||
|
)
|
||||||
|
LM_IMAGE = ListManager.from_lister(
|
||||||
|
get_file_lister(
|
||||||
|
rp=RP_IMAGE, re=re.compile(r"\.(gif|jpe?g|tiff?|png|bmp)$", flags=re.IGNORECASE)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
LM_TEXT = ListManager.from_lister(
|
||||||
|
get_file_lister(rp=RP_TEXT, re=re.compile(r"\.(txt|md)$", flags=re.IGNORECASE))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def list_calendar_names() -> list[str]:
|
||||||
|
"""
|
||||||
|
List calendar names
|
||||||
|
"""
|
||||||
|
|
||||||
|
return await CalDAV.calendars
|
||||||
|
|
||||||
|
|
||||||
|
LM_CALENDAR = ListManager.from_lister_fn(list_calendar_names)
|
||||||
|
|
||||||
|
|
||||||
|
async def list_aggregate_names(
|
||||||
|
cfg: Config = Depends(get_config),
|
||||||
|
) -> list[str]:
|
||||||
|
"""
|
||||||
|
List aggregate calendar names
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(cfg, params.Depends):
|
||||||
|
cfg = await get_config()
|
||||||
|
|
||||||
|
return list(cfg.calendar.aggregates.keys())
|
||||||
|
|
||||||
|
|
||||||
|
LM_AGGREGATE = ListManager.from_lister_fn(list_aggregate_names)
|
||||||
|
|
88
api/ovdashboard_api/routers/v1/_list_manager.py
Normal file
88
api/ovdashboard_api/routers/v1/_list_manager.py
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
import logging
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Awaitable, Callable, Generic, ParamSpec, Self, TypeVar
|
||||||
|
|
||||||
|
from fastapi import Depends, HTTPException, params, status
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_RESPONSE_OK = {
|
||||||
|
status.HTTP_200_OK: {"description": "Operation successful"},
|
||||||
|
}
|
||||||
|
|
||||||
|
Params = ParamSpec("Params")
|
||||||
|
Return = TypeVar("Return")
|
||||||
|
|
||||||
|
type DependableFn[**Params, Return] = Callable[Params, Awaitable[Return]]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True, frozen=True)
|
||||||
|
class Dependable(Generic[Params, Return]):
|
||||||
|
func: DependableFn[Params, Return]
|
||||||
|
responses: dict = field(default_factory=lambda: _RESPONSE_OK.copy())
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True, frozen=True)
|
||||||
|
class ListManager:
|
||||||
|
lister: Dependable[[], list[str]]
|
||||||
|
filter: Dependable[[str], list[str]]
|
||||||
|
getter: Dependable[[str], str]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_lister(cls, lister: Dependable[[], list[str]]) -> Self:
|
||||||
|
async def _filter_fn(
|
||||||
|
prefix: str,
|
||||||
|
names: list[str] = Depends(lister.func),
|
||||||
|
) -> list[str]:
|
||||||
|
"""
|
||||||
|
Filters `names` from an async source for names starting with a given prefix.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(names, params.Depends):
|
||||||
|
names = await lister.func()
|
||||||
|
|
||||||
|
# _logger.debug("filter %s from %s", repr(prefix), repr(names))
|
||||||
|
|
||||||
|
return [item for item in names if item.lower().startswith(prefix.lower())]
|
||||||
|
|
||||||
|
async def _getter_fn(
|
||||||
|
prefix: str,
|
||||||
|
names: list[str] = Depends(_filter_fn),
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Determines if a given prefix is unique in the async produced list `names`.
|
||||||
|
|
||||||
|
On success, produces the unique name with that prefix. Otherwise, throws a HTTPException.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if isinstance(names, params.Depends):
|
||||||
|
names = await _filter_fn(prefix)
|
||||||
|
|
||||||
|
_logger.debug("get %s from %s", repr(prefix), repr(names))
|
||||||
|
|
||||||
|
match names:
|
||||||
|
case [name]:
|
||||||
|
return name
|
||||||
|
|
||||||
|
case []:
|
||||||
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
case _:
|
||||||
|
raise HTTPException(status_code=status.HTTP_409_CONFLICT)
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
lister=lister,
|
||||||
|
filter=Dependable(_filter_fn),
|
||||||
|
getter=Dependable(
|
||||||
|
func=_getter_fn,
|
||||||
|
responses={
|
||||||
|
**_RESPONSE_OK,
|
||||||
|
status.HTTP_404_NOT_FOUND: {"description": "Prefix not found"},
|
||||||
|
status.HTTP_409_CONFLICT: {"description": "Ambiguous prefix"},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_lister_fn(cls, lister_fn: DependableFn[[], list[str]]) -> Self:
|
||||||
|
return cls.from_lister(Dependable(lister_fn))
|
|
@ -6,58 +6,57 @@ Router "aggregate" provides:
|
||||||
- getting aggregate calendar events by name prefix
|
- getting aggregate calendar events by name prefix
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from logging import getLogger
|
import logging
|
||||||
from typing import Iterator
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends
|
||||||
from ovdashboard_api.config import Config
|
|
||||||
|
|
||||||
from ...dav_calendar import CalEvent, DavCalendar
|
from ...core.caldav import CalDAV
|
||||||
from ._common import AggregateNameLister, PrefixFinder, PrefixUnique
|
from ...core.calevent import CalEvent
|
||||||
from .calendar import calendar_unique
|
from ...core.config import Config
|
||||||
|
from ._common import LM_AGGREGATE, LM_CALENDAR, get_config
|
||||||
|
|
||||||
_logger = getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
router = APIRouter(prefix="/aggregate", tags=["calendar"])
|
router = APIRouter(prefix="/aggregate", tags=["calendar"])
|
||||||
|
|
||||||
aggregate_lister = AggregateNameLister()
|
|
||||||
aggregate_finder = PrefixFinder(aggregate_lister)
|
|
||||||
aggregate_unique = PrefixUnique(aggregate_finder)
|
|
||||||
|
|
||||||
|
|
||||||
@router.on_event("startup")
|
@router.on_event("startup")
|
||||||
async def start_router() -> None:
|
async def start_router() -> None:
|
||||||
_logger.debug(f"{router.prefix} router starting.")
|
_logger.debug(f"{router.prefix} router starting.")
|
||||||
|
|
||||||
|
|
||||||
@router.get("/list", response_model=list[str])
|
@router.get(
|
||||||
|
"/list",
|
||||||
|
responses=LM_AGGREGATE.lister.responses,
|
||||||
|
)
|
||||||
async def list_aggregate_calendars(
|
async def list_aggregate_calendars(
|
||||||
names: Iterator[str] = Depends(aggregate_lister),
|
names: list[str] = Depends(LM_AGGREGATE.lister.func),
|
||||||
) -> list[str]:
|
) -> list[str]:
|
||||||
return list(names)
|
return names
|
||||||
|
|
||||||
|
|
||||||
@router.get("/find/{prefix}", response_model=list[str])
|
@router.get(
|
||||||
|
"/find/{prefix}",
|
||||||
|
responses=LM_AGGREGATE.filter.responses,
|
||||||
|
)
|
||||||
async def find_aggregate_calendars(
|
async def find_aggregate_calendars(
|
||||||
names: Iterator[str] = Depends(aggregate_finder),
|
names: list[str] = Depends(LM_AGGREGATE.filter.func),
|
||||||
) -> list[str]:
|
) -> list[str]:
|
||||||
return list(names)
|
return names
|
||||||
|
|
||||||
|
|
||||||
@router.get("/get/{prefix}", response_model=list[CalEvent])
|
@router.get(
|
||||||
|
"/get/{prefix}",
|
||||||
|
responses=LM_AGGREGATE.getter.responses,
|
||||||
|
)
|
||||||
async def get_aggregate_calendar(
|
async def get_aggregate_calendar(
|
||||||
name: str = Depends(aggregate_unique),
|
cfg: Config = Depends(get_config),
|
||||||
|
name: str = Depends(LM_AGGREGATE.getter.func),
|
||||||
) -> list[CalEvent]:
|
) -> list[CalEvent]:
|
||||||
cfg = await Config.get()
|
events: list[CalEvent] = []
|
||||||
aggregate = cfg.calendar.aggregates[name]
|
|
||||||
|
|
||||||
calendars = (
|
for cal_prefix in cfg.calendar.aggregates[name]:
|
||||||
DavCalendar(await calendar_unique(cal_prefix))
|
cal_name = await LM_CALENDAR.getter.func(cal_prefix)
|
||||||
for cal_prefix in aggregate
|
events.extend(await CalDAV.get_events(cal_name, cfg))
|
||||||
)
|
|
||||||
|
|
||||||
return sorted([
|
return sorted(events)
|
||||||
event
|
|
||||||
async for calendar in calendars # type: ignore
|
|
||||||
for event in (await calendar.events)
|
|
||||||
])
|
|
||||||
|
|
|
@ -6,55 +6,57 @@ Router "calendar" provides:
|
||||||
- getting calendar events by calendar name prefix
|
- getting calendar events by calendar name prefix
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from logging import getLogger
|
import logging
|
||||||
from typing import Iterator
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends
|
||||||
|
|
||||||
from ...config import CalendarUIConfig, Config
|
from ...core.caldav import CalDAV, CalEvent
|
||||||
from ...dav_calendar import CalEvent, DavCalendar
|
from ...core.config import CalendarUIConfig, Config
|
||||||
from ._common import CalendarNameLister, PrefixFinder, PrefixUnique
|
from ._common import LM_CALENDAR, get_config
|
||||||
|
|
||||||
_logger = getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
router = APIRouter(prefix="/calendar", tags=["calendar"])
|
router = APIRouter(prefix="/calendar", tags=["calendar"])
|
||||||
|
|
||||||
calendar_lister = CalendarNameLister()
|
|
||||||
calendar_finder = PrefixFinder(calendar_lister)
|
|
||||||
calendar_unique = PrefixUnique(calendar_finder)
|
|
||||||
|
|
||||||
|
|
||||||
@router.on_event("startup")
|
@router.on_event("startup")
|
||||||
async def start_router() -> None:
|
async def start_router() -> None:
|
||||||
_logger.debug(f"{router.prefix} router starting.")
|
_logger.debug(f"{router.prefix} router starting.")
|
||||||
|
|
||||||
|
|
||||||
@router.get("/list", response_model=list[str])
|
@router.get(
|
||||||
|
"/list",
|
||||||
|
responses=LM_CALENDAR.lister.responses,
|
||||||
|
)
|
||||||
async def list_calendars(
|
async def list_calendars(
|
||||||
names: Iterator[str] = Depends(calendar_lister),
|
names: list[str] = Depends(LM_CALENDAR.lister.func),
|
||||||
) -> list[str]:
|
) -> list[str]:
|
||||||
return list(names)
|
return names
|
||||||
|
|
||||||
|
|
||||||
@router.get("/find/{prefix}", response_model=list[str])
|
|
||||||
async def find_calendars(
|
|
||||||
names: Iterator[str] = Depends(calendar_finder),
|
|
||||||
) -> list[str]:
|
|
||||||
return list(names)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/get/{prefix}", response_model=list[CalEvent])
|
|
||||||
async def get_calendar(
|
|
||||||
name: str = Depends(calendar_unique),
|
|
||||||
) -> list[CalEvent]:
|
|
||||||
return list(await DavCalendar(name).events)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/config",
|
"/find/{prefix}",
|
||||||
response_model=CalendarUIConfig,
|
responses=LM_CALENDAR.filter.responses,
|
||||||
)
|
)
|
||||||
|
async def find_calendars(
|
||||||
|
names: list[str] = Depends(LM_CALENDAR.filter.func),
|
||||||
|
) -> list[str]:
|
||||||
|
return names
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/get/{prefix}",
|
||||||
|
responses=LM_CALENDAR.getter.responses,
|
||||||
|
)
|
||||||
|
async def get_calendar(
|
||||||
|
name: str = Depends(LM_CALENDAR.getter.func),
|
||||||
|
cfg: Config = Depends(get_config),
|
||||||
|
) -> list[CalEvent]:
|
||||||
|
return await CalDAV.get_events(name, cfg)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/config")
|
||||||
async def get_ui_config(
|
async def get_ui_config(
|
||||||
cfg: Config = Depends(Config.get),
|
cfg: Config = Depends(get_config),
|
||||||
) -> CalendarUIConfig:
|
) -> CalendarUIConfig:
|
||||||
return cfg.calendar
|
return cfg.calendar
|
||||||
|
|
|
@ -6,43 +6,31 @@ Router "file" provides:
|
||||||
- getting files by name prefix
|
- getting files by name prefix
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import logging
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from logging import getLogger
|
|
||||||
from typing import Iterator
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends
|
||||||
from fastapi.responses import StreamingResponse
|
from fastapi.responses import StreamingResponse
|
||||||
from magic import Magic
|
from magic import Magic
|
||||||
|
|
||||||
from ...dav_common import webdav_ensure_files, webdav_ensure_path
|
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
|
||||||
from ...dav_file import DavFile
|
from ...core.webdav import WebDAV
|
||||||
from ._common import FileNameLister, PrefixFinder, PrefixUnique
|
from ._common import LM_FILE, RP_FILE
|
||||||
|
|
||||||
_logger = getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
_magic = Magic(mime=True)
|
_magic = Magic(mime=True)
|
||||||
|
|
||||||
router = APIRouter(prefix="/file", tags=["file"])
|
router = APIRouter(prefix="/file", tags=["file"])
|
||||||
|
|
||||||
file_lister = FileNameLister(
|
|
||||||
path_name="file_dir",
|
|
||||||
re=re.compile(
|
|
||||||
r"[^/]$",
|
|
||||||
flags=re.IGNORECASE,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
file_finder = PrefixFinder(file_lister)
|
|
||||||
file_unique = PrefixUnique(file_finder)
|
|
||||||
|
|
||||||
|
|
||||||
@router.on_event("startup")
|
@router.on_event("startup")
|
||||||
async def start_router() -> None:
|
async def start_router() -> None:
|
||||||
_logger.debug(f"{router.prefix} router starting.")
|
_logger.debug(f"{router.prefix} router starting.")
|
||||||
|
|
||||||
if not webdav_ensure_path(await file_lister.remote_path):
|
remote_path = await RP_FILE()
|
||||||
|
if not webdav_ensure_path(remote_path):
|
||||||
webdav_ensure_files(
|
webdav_ensure_files(
|
||||||
await file_lister.remote_path,
|
remote_path,
|
||||||
"logo.svg",
|
"logo.svg",
|
||||||
"thw.svg",
|
"thw.svg",
|
||||||
)
|
)
|
||||||
|
@ -50,37 +38,34 @@ async def start_router() -> None:
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/list",
|
"/list",
|
||||||
response_model=list[str],
|
responses=LM_FILE.lister.responses,
|
||||||
responses=file_lister.responses,
|
|
||||||
)
|
)
|
||||||
async def list_files(
|
async def list_files(
|
||||||
names: Iterator[str] = Depends(file_lister),
|
names: list[str] = Depends(LM_FILE.lister.func),
|
||||||
) -> list[str]:
|
) -> list[str]:
|
||||||
return list(names)
|
return names
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/find/{prefix}",
|
"/find/{prefix}",
|
||||||
response_model=list[str],
|
responses=LM_FILE.filter.responses,
|
||||||
responses=file_finder.responses,
|
|
||||||
)
|
)
|
||||||
async def find_files(
|
async def find_files_by_prefix(
|
||||||
names: Iterator[str] = Depends(file_finder),
|
names: list[str] = Depends(LM_FILE.filter.func),
|
||||||
) -> list[str]:
|
) -> list[str]:
|
||||||
return list(names)
|
return names
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/get/{prefix}",
|
"/get/{prefix}",
|
||||||
|
responses=LM_FILE.getter.responses,
|
||||||
response_class=StreamingResponse,
|
response_class=StreamingResponse,
|
||||||
responses=file_unique.responses,
|
|
||||||
)
|
)
|
||||||
async def get_file(
|
async def get_file_by_prefix(
|
||||||
prefix: str,
|
remote_path: str = Depends(RP_FILE),
|
||||||
name: str = Depends(file_unique),
|
name: str = Depends(LM_FILE.getter.func),
|
||||||
) -> StreamingResponse:
|
) -> StreamingResponse:
|
||||||
dav_file = DavFile(f"{await file_lister.remote_path}/{name}")
|
buffer = BytesIO(await WebDAV.read_bytes(f"{remote_path}/{name}"))
|
||||||
buffer = BytesIO(await dav_file.as_bytes)
|
|
||||||
|
|
||||||
mime = _magic.from_buffer(buffer.read(2048))
|
mime = _magic.from_buffer(buffer.read(2048))
|
||||||
buffer.seek(0)
|
buffer.seek(0)
|
||||||
|
@ -88,7 +73,5 @@ async def get_file(
|
||||||
return StreamingResponse(
|
return StreamingResponse(
|
||||||
content=buffer,
|
content=buffer,
|
||||||
media_type=mime,
|
media_type=mime,
|
||||||
headers={
|
headers={"Content-Disposition": f"filename={name}"},
|
||||||
"Content-Disposition": f"filename={prefix}"
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
|
@ -6,43 +6,31 @@ Router "image" provides:
|
||||||
- getting image files in a uniform format by name prefix
|
- getting image files in a uniform format by name prefix
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import logging
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from logging import getLogger
|
|
||||||
from typing import Iterator
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends
|
||||||
from fastapi.responses import StreamingResponse
|
from fastapi.responses import StreamingResponse
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
|
|
||||||
from ...config import Config, ImageUIConfig
|
from ...core.config import Config, ImageUIConfig
|
||||||
from ...dav_common import webdav_ensure_files, webdav_ensure_path
|
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
|
||||||
from ...dav_file import DavFile
|
from ...core.webdav import WebDAV
|
||||||
from ._common import FileNameLister, PrefixFinder, PrefixUnique
|
from ._common import LM_IMAGE, RP_IMAGE, get_config
|
||||||
|
|
||||||
_logger = getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
router = APIRouter(prefix="/image", tags=["image"])
|
router = APIRouter(prefix="/image", tags=["image"])
|
||||||
|
|
||||||
image_lister = FileNameLister(
|
|
||||||
path_name="image_dir",
|
|
||||||
re=re.compile(
|
|
||||||
r"\.(gif|jpe?g|tiff?|png|bmp)$",
|
|
||||||
flags=re.IGNORECASE,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
image_finder = PrefixFinder(image_lister)
|
|
||||||
image_unique = PrefixUnique(image_finder)
|
|
||||||
|
|
||||||
|
|
||||||
@router.on_event("startup")
|
@router.on_event("startup")
|
||||||
async def start_router() -> None:
|
async def start_router() -> None:
|
||||||
_logger.debug(f"{router.prefix} router starting.")
|
_logger.debug(f"{router.prefix} router starting.")
|
||||||
|
|
||||||
if not webdav_ensure_path(await image_lister.remote_path):
|
remote_path = await RP_IMAGE()
|
||||||
|
if not webdav_ensure_path(remote_path):
|
||||||
webdav_ensure_files(
|
webdav_ensure_files(
|
||||||
await image_lister.remote_path,
|
remote_path,
|
||||||
"img1.jpg",
|
"img1.jpg",
|
||||||
"img2.jpg",
|
"img2.jpg",
|
||||||
"img3.jpg",
|
"img3.jpg",
|
||||||
|
@ -51,43 +39,35 @@ async def start_router() -> None:
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/list",
|
"/list",
|
||||||
response_model=list[str],
|
responses=LM_IMAGE.lister.responses,
|
||||||
responses=image_lister.responses,
|
|
||||||
)
|
)
|
||||||
async def list_images(
|
async def list_images(
|
||||||
names: Iterator[str] = Depends(image_lister),
|
names: list[str] = Depends(LM_IMAGE.lister.func),
|
||||||
) -> list[str]:
|
) -> list[str]:
|
||||||
return list(names)
|
return names
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/find/{prefix}",
|
"/find/{prefix}",
|
||||||
response_model=list[str],
|
responses=LM_IMAGE.filter.responses,
|
||||||
responses=image_finder.responses,
|
|
||||||
)
|
)
|
||||||
async def find_images(
|
async def find_images_by_prefix(
|
||||||
names: Iterator[str] = Depends(image_finder),
|
names: list[str] = Depends(LM_IMAGE.filter.func),
|
||||||
) -> list[str]:
|
) -> list[str]:
|
||||||
return list(names)
|
return names
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/get/{prefix}",
|
"/get/{prefix}",
|
||||||
|
responses=LM_IMAGE.getter.responses,
|
||||||
response_class=StreamingResponse,
|
response_class=StreamingResponse,
|
||||||
responses=image_unique.responses,
|
|
||||||
)
|
)
|
||||||
async def get_image(
|
async def get_image_by_prefix(
|
||||||
prefix: str,
|
remote_path: str = Depends(RP_IMAGE),
|
||||||
name: str = Depends(image_unique),
|
name: str = Depends(LM_IMAGE.getter.func),
|
||||||
) -> StreamingResponse:
|
) -> StreamingResponse:
|
||||||
cfg = await Config.get()
|
cfg = await get_config()
|
||||||
|
img = Image.open(BytesIO(await WebDAV.read_bytes(f"{remote_path}/{name}")))
|
||||||
dav_file = DavFile(f"{await image_lister.remote_path}/{name}")
|
|
||||||
img = Image.open(
|
|
||||||
BytesIO(await dav_file.as_bytes)
|
|
||||||
).convert(
|
|
||||||
cfg.image.mode
|
|
||||||
)
|
|
||||||
|
|
||||||
img_buffer = BytesIO()
|
img_buffer = BytesIO()
|
||||||
img.save(img_buffer, **cfg.image.save_params)
|
img.save(img_buffer, **cfg.image.save_params)
|
||||||
|
@ -96,17 +76,12 @@ async def get_image(
|
||||||
return StreamingResponse(
|
return StreamingResponse(
|
||||||
content=img_buffer,
|
content=img_buffer,
|
||||||
media_type="image/jpeg",
|
media_type="image/jpeg",
|
||||||
headers={
|
headers={"Content-Disposition": f"filename={name}.jpg"},
|
||||||
"Content-Disposition": f"filename={prefix}.jpg"
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get("/config")
|
||||||
"/config",
|
|
||||||
response_model=ImageUIConfig,
|
|
||||||
)
|
|
||||||
async def get_ui_config(
|
async def get_ui_config(
|
||||||
cfg: Config = Depends(Config.get),
|
cfg: Config = Depends(get_config),
|
||||||
) -> ImageUIConfig:
|
) -> ImageUIConfig:
|
||||||
return cfg.image
|
return cfg.image
|
||||||
|
|
|
@ -5,16 +5,17 @@ Router "misc" provides:
|
||||||
- getting the device IP
|
- getting the device IP
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from importlib.metadata import version
|
import importlib.metadata
|
||||||
from logging import getLogger
|
import logging
|
||||||
from socket import AF_INET, SOCK_DGRAM, socket
|
from socket import AF_INET, SOCK_DGRAM, socket
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends
|
||||||
|
|
||||||
from ...config import Config, LogoUIConfig, ServerUIConfig
|
from ...core.config import Config, LogoUIConfig, ServerUIConfig
|
||||||
from ...settings import SETTINGS
|
from ...core.settings import SETTINGS
|
||||||
|
from ._common import get_config
|
||||||
|
|
||||||
_logger = getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
router = APIRouter(prefix="/misc", tags=["misc"])
|
router = APIRouter(prefix="/misc", tags=["misc"])
|
||||||
|
|
||||||
|
@ -25,13 +26,13 @@ async def start_router() -> None:
|
||||||
|
|
||||||
|
|
||||||
@router.get("/lanip")
|
@router.get("/lanip")
|
||||||
async def get_ip() -> str:
|
async def get_lan_ip() -> str:
|
||||||
with socket(
|
with socket(
|
||||||
family=AF_INET,
|
family=AF_INET,
|
||||||
type=SOCK_DGRAM,
|
type=SOCK_DGRAM,
|
||||||
) as s:
|
) as s:
|
||||||
s.settimeout(0)
|
|
||||||
try:
|
try:
|
||||||
|
s.settimeout(0)
|
||||||
s.connect((SETTINGS.ping_host, SETTINGS.ping_port))
|
s.connect((SETTINGS.ping_host, SETTINGS.ping_port))
|
||||||
IP = s.getsockname()[0]
|
IP = s.getsockname()[0]
|
||||||
|
|
||||||
|
@ -42,25 +43,19 @@ async def get_ip() -> str:
|
||||||
|
|
||||||
|
|
||||||
@router.get("/version")
|
@router.get("/version")
|
||||||
async def get_version() -> str:
|
async def get_server_api_version() -> str:
|
||||||
return version("ovdashboard-api")
|
return importlib.metadata.version("ovdashboard_api")
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get("/config/server")
|
||||||
"/config/server",
|
|
||||||
response_model=ServerUIConfig,
|
|
||||||
)
|
|
||||||
async def get_server_ui_config(
|
async def get_server_ui_config(
|
||||||
cfg: Config = Depends(Config.get),
|
cfg: Config = Depends(get_config),
|
||||||
) -> ServerUIConfig:
|
) -> ServerUIConfig:
|
||||||
return cfg.server
|
return cfg.server
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get("/config/logo")
|
||||||
"/config/logo",
|
|
||||||
response_model=LogoUIConfig,
|
|
||||||
)
|
|
||||||
async def get_logo_ui_config(
|
async def get_logo_ui_config(
|
||||||
cfg: Config = Depends(Config.get),
|
cfg: Config = Depends(get_config),
|
||||||
) -> LogoUIConfig:
|
) -> LogoUIConfig:
|
||||||
return cfg.logo
|
return cfg.logo
|
||||||
|
|
|
@ -7,94 +7,76 @@ Router "text" provides:
|
||||||
- getting text file HTML content by name prefix (using Markdown)
|
- getting text file HTML content by name prefix (using Markdown)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import logging
|
||||||
from logging import getLogger
|
|
||||||
from typing import Iterator
|
|
||||||
|
|
||||||
|
import markdown
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends
|
||||||
from markdown import markdown
|
|
||||||
|
|
||||||
from ...dav_common import webdav_ensure_files, webdav_ensure_path
|
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
|
||||||
from ...dav_file import DavFile
|
from ...core.webdav import WebDAV
|
||||||
from ._common import FileNameLister, PrefixFinder, PrefixUnique
|
from ._common import LM_TEXT, RP_TEXT
|
||||||
|
|
||||||
_logger = getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
router = APIRouter(prefix="/text", tags=["text"])
|
router = APIRouter(prefix="/text", tags=["text"])
|
||||||
|
|
||||||
text_lister = FileNameLister(
|
|
||||||
path_name="text_dir",
|
|
||||||
re=re.compile(
|
|
||||||
r"\.(txt|md)$",
|
|
||||||
flags=re.IGNORECASE,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
text_finder = PrefixFinder(text_lister)
|
|
||||||
text_unique = PrefixUnique(text_finder)
|
|
||||||
|
|
||||||
|
|
||||||
@router.on_event("startup")
|
@router.on_event("startup")
|
||||||
async def start_router() -> None:
|
async def start_router() -> None:
|
||||||
_logger.debug(f"{router.prefix} router starting.")
|
_logger.debug(f"{router.prefix} router starting.")
|
||||||
|
|
||||||
webdav_ensure_path(await text_lister.remote_path)
|
remote_path = await RP_TEXT()
|
||||||
|
if not webdav_ensure_path(remote_path):
|
||||||
webdav_ensure_files(
|
webdav_ensure_files(
|
||||||
await text_lister.remote_path,
|
remote_path,
|
||||||
"message.txt",
|
"message.txt",
|
||||||
"title.txt",
|
"title.txt",
|
||||||
"ticker.txt",
|
"ticker.txt",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/list",
|
"/list",
|
||||||
response_model=list[str],
|
responses=LM_TEXT.lister.responses,
|
||||||
responses=text_lister.responses,
|
|
||||||
)
|
)
|
||||||
async def list_texts(
|
async def list_texts(
|
||||||
names: Iterator[str] = Depends(text_lister),
|
names: list[str] = Depends(LM_TEXT.lister.func),
|
||||||
) -> list[str]:
|
) -> list[str]:
|
||||||
return list(names)
|
return names
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/find/{prefix}",
|
"/find/{prefix}",
|
||||||
response_model=list[str],
|
responses=LM_TEXT.filter.responses,
|
||||||
responses=text_finder.responses,
|
|
||||||
)
|
)
|
||||||
async def find_texts(
|
async def find_texts_by_prefix(
|
||||||
names: Iterator[str] = Depends(text_finder),
|
names: list[str] = Depends(LM_TEXT.filter.func),
|
||||||
) -> list[str]:
|
) -> list[str]:
|
||||||
return list(names)
|
return names
|
||||||
|
|
||||||
|
|
||||||
async def get_text_content(
|
async def _get_raw_text_by_prefix(
|
||||||
name: str = Depends(text_unique),
|
remote_path: str = Depends(RP_TEXT),
|
||||||
|
name: str = Depends(LM_TEXT.getter.func),
|
||||||
) -> str:
|
) -> str:
|
||||||
return await DavFile(
|
return await WebDAV.read_str(f"{remote_path}/{name}")
|
||||||
f"{await text_lister.remote_path}/{name}",
|
|
||||||
).as_string
|
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
|
||||||
"/get/html/{prefix}",
|
|
||||||
response_model=str,
|
|
||||||
responses=text_unique.responses,
|
|
||||||
)
|
|
||||||
async def get_text(
|
|
||||||
text: str = Depends(get_text_content),
|
|
||||||
) -> str:
|
|
||||||
return markdown(text)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get(
|
||||||
"/get/raw/{prefix}",
|
"/get/raw/{prefix}",
|
||||||
response_model=str,
|
responses=LM_TEXT.getter.responses,
|
||||||
responses=text_unique.responses,
|
|
||||||
)
|
)
|
||||||
async def get_raw_text(
|
async def get_raw_text_by_prefix(
|
||||||
text: str = Depends(get_text_content),
|
text: str = Depends(_get_raw_text_by_prefix),
|
||||||
) -> str:
|
) -> str:
|
||||||
return text
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
@router.get(
|
||||||
|
"/get/html/{prefix}",
|
||||||
|
responses=LM_TEXT.getter.responses,
|
||||||
|
)
|
||||||
|
async def get_html_by_prefix(
|
||||||
|
text: str = Depends(_get_raw_text_by_prefix),
|
||||||
|
) -> str:
|
||||||
|
return markdown.markdown(text)
|
||||||
|
|
|
@ -6,18 +6,18 @@ Router "ticker" provides:
|
||||||
- getting the ticker's UI config
|
- getting the ticker's UI config
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from logging import getLogger
|
import logging
|
||||||
from typing import Iterator
|
from typing import Iterator
|
||||||
|
|
||||||
|
import markdown
|
||||||
from fastapi import APIRouter, Depends
|
from fastapi import APIRouter, Depends
|
||||||
from markdown import markdown
|
|
||||||
|
|
||||||
from ...config import Config, TickerUIConfig
|
from ...core.config import Config, TickerUIConfig
|
||||||
from ...dav_common import webdav_ensure_files, webdav_ensure_path
|
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
|
||||||
from ...dav_file import DavFile
|
from ...core.webdav import WebDAV
|
||||||
from .text import text_lister, text_unique
|
from ._common import LM_TEXT, RP_TEXT, get_config
|
||||||
|
|
||||||
_logger = getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
router = APIRouter(prefix="/ticker", tags=["text"])
|
router = APIRouter(prefix="/ticker", tags=["text"])
|
||||||
|
|
||||||
|
@ -26,38 +26,31 @@ router = APIRouter(prefix="/ticker", tags=["text"])
|
||||||
async def start_router() -> None:
|
async def start_router() -> None:
|
||||||
_logger.debug(f"{router.prefix} router starting.")
|
_logger.debug(f"{router.prefix} router starting.")
|
||||||
|
|
||||||
webdav_ensure_path(await text_lister.remote_path)
|
remote_path = await RP_TEXT()
|
||||||
|
if not webdav_ensure_path(remote_path):
|
||||||
webdav_ensure_files(
|
webdav_ensure_files(
|
||||||
await text_lister.remote_path,
|
remote_path,
|
||||||
"ticker.txt",
|
"ticker.txt",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def get_ticker_lines() -> Iterator[str]:
|
async def get_ticker_lines() -> Iterator[str]:
|
||||||
cfg = await Config.get()
|
cfg = await get_config()
|
||||||
file_name = await text_unique(cfg.ticker.file_name)
|
file_name = await LM_TEXT.getter.func(cfg.ticker.file_name)
|
||||||
|
remote_path = await RP_TEXT()
|
||||||
|
|
||||||
ticker = await DavFile(
|
ticker = await WebDAV.read_str(f"{remote_path}/{file_name}")
|
||||||
f"{await text_lister.remote_path}/{file_name}",
|
|
||||||
).as_string
|
|
||||||
|
|
||||||
return (
|
return (line.strip() for line in ticker.split("\n") if line.strip())
|
||||||
line.strip()
|
|
||||||
for line in ticker.split("\n")
|
|
||||||
if line.strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def get_ticker_content_lines(
|
async def get_ticker_content_lines(
|
||||||
ticker_lines: Iterator[str] = Depends(get_ticker_lines),
|
ticker_lines: Iterator[str] = Depends(get_ticker_lines),
|
||||||
) -> Iterator[str]:
|
) -> Iterator[str]:
|
||||||
cfg = await Config.get()
|
cfg = await get_config()
|
||||||
|
|
||||||
return (
|
return (
|
||||||
line
|
line for line in ticker_lines if not line.startswith(cfg.ticker.comment_marker)
|
||||||
for line in ticker_lines
|
|
||||||
if not line.startswith(cfg.ticker.comment_marker)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -68,7 +61,7 @@ async def get_ticker_content(
|
||||||
if len(ticker_content_padded) == 2:
|
if len(ticker_content_padded) == 2:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
cfg = await Config.get()
|
cfg = await get_config()
|
||||||
ticker_content = cfg.ticker.separator.join(
|
ticker_content = cfg.ticker.separator.join(
|
||||||
ticker_content_padded,
|
ticker_content_padded,
|
||||||
)
|
)
|
||||||
|
@ -80,7 +73,7 @@ async def get_ticker_content(
|
||||||
async def get_ticker(
|
async def get_ticker(
|
||||||
ticker_content: str = Depends(get_ticker_content),
|
ticker_content: str = Depends(get_ticker_content),
|
||||||
) -> str:
|
) -> str:
|
||||||
return markdown(ticker_content)
|
return markdown.markdown(ticker_content)
|
||||||
|
|
||||||
|
|
||||||
@router.get("/raw")
|
@router.get("/raw")
|
||||||
|
@ -90,11 +83,8 @@ async def get_raw_ticker(
|
||||||
return ticker_content
|
return ticker_content
|
||||||
|
|
||||||
|
|
||||||
@router.get(
|
@router.get("/config")
|
||||||
"/config",
|
|
||||||
response_model=TickerUIConfig,
|
|
||||||
)
|
|
||||||
async def get_ui_config(
|
async def get_ui_config(
|
||||||
cfg: Config = Depends(Config.get),
|
cfg: Config = Depends(get_config),
|
||||||
) -> TickerUIConfig:
|
) -> TickerUIConfig:
|
||||||
return cfg.ticker
|
return cfg.ticker
|
||||||
|
|
|
@ -1,117 +0,0 @@
|
||||||
"""
|
|
||||||
Configuration definition.
|
|
||||||
|
|
||||||
Converts per-run (environment) variables and config files into the
|
|
||||||
"python world" using `pydantic`.
|
|
||||||
|
|
||||||
Pydantic models might have convenience methods attached.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Any, Optional
|
|
||||||
|
|
||||||
from pydantic import BaseModel, BaseSettings, root_validator
|
|
||||||
|
|
||||||
|
|
||||||
class DavSettings(BaseModel):
|
|
||||||
"""
|
|
||||||
Connection to a DAV server.
|
|
||||||
"""
|
|
||||||
|
|
||||||
protocol: Optional[str] = None
|
|
||||||
host: Optional[str] = None
|
|
||||||
username: Optional[str] = None
|
|
||||||
password: Optional[str] = None
|
|
||||||
path: Optional[str] = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def url(self) -> str:
|
|
||||||
"""
|
|
||||||
Combined DAV URL.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return f"{self.protocol}://{self.host}{self.path}"
|
|
||||||
|
|
||||||
|
|
||||||
class Settings(BaseSettings):
|
|
||||||
"""
|
|
||||||
Per-run settings.
|
|
||||||
"""
|
|
||||||
|
|
||||||
#####
|
|
||||||
# general settings
|
|
||||||
#####
|
|
||||||
|
|
||||||
production_mode: bool = False
|
|
||||||
log_level: str = "INFO" if production_mode else "DEBUG"
|
|
||||||
ui_directory: str = "/html"
|
|
||||||
cache_time: int = 30
|
|
||||||
cache_size: int = 30
|
|
||||||
|
|
||||||
# doesn't even have to be reachable
|
|
||||||
ping_host: str = "10.0.0.0"
|
|
||||||
ping_port: int = 1
|
|
||||||
|
|
||||||
#####
|
|
||||||
# openapi settings
|
|
||||||
#####
|
|
||||||
|
|
||||||
openapi_url: str = "/openapi.json"
|
|
||||||
docs_url: Optional[str] = None if production_mode else "/docs"
|
|
||||||
redoc_url: Optional[str] = None if production_mode else "/redoc"
|
|
||||||
|
|
||||||
#####
|
|
||||||
# webdav settings
|
|
||||||
#####
|
|
||||||
|
|
||||||
webdav: DavSettings = DavSettings()
|
|
||||||
webdav_disable_check: bool = False
|
|
||||||
webdav_retries: int = 20
|
|
||||||
webdav_prefix: str = "/ovdashboard"
|
|
||||||
config_path: str = "config.txt"
|
|
||||||
|
|
||||||
#####
|
|
||||||
# caldav settings
|
|
||||||
#####
|
|
||||||
|
|
||||||
caldav: DavSettings = DavSettings()
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
env_file = ".env"
|
|
||||||
env_file_encoding = "utf-8"
|
|
||||||
env_nested_delimiter = "__"
|
|
||||||
|
|
||||||
@root_validator(pre=True)
|
|
||||||
@classmethod
|
|
||||||
def validate_dav_settings(cls, values: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
# ensure both settings dicts are created
|
|
||||||
for key in ("webdav", "caldav"):
|
|
||||||
if key not in values:
|
|
||||||
values[key] = {}
|
|
||||||
|
|
||||||
default_dav = DavSettings(
|
|
||||||
protocol="https",
|
|
||||||
host="example.com",
|
|
||||||
username="ovdashboard",
|
|
||||||
password="secret",
|
|
||||||
).dict()
|
|
||||||
|
|
||||||
for key in default_dav:
|
|
||||||
# if "webdav" value is not specified, use default
|
|
||||||
if key not in values["webdav"] or values["webdav"][key] is None:
|
|
||||||
values["webdav"][key] = default_dav[key]
|
|
||||||
|
|
||||||
# if "caldav" value is not specified, use "webdav" value
|
|
||||||
if key not in values["caldav"] or values["caldav"][key] is None:
|
|
||||||
values["caldav"][key] = values["webdav"][key]
|
|
||||||
|
|
||||||
# add default "path"s if None
|
|
||||||
if values["webdav"]["path"] is None:
|
|
||||||
values["webdav"]["path"] = "/remote.php/webdav"
|
|
||||||
|
|
||||||
if values["caldav"]["path"] is None:
|
|
||||||
values["caldav"]["path"] = "/remote.php/dav"
|
|
||||||
|
|
||||||
return values
|
|
||||||
|
|
||||||
|
|
||||||
SETTINGS = Settings()
|
|
1491
api/poetry.lock
generated
1491
api/poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -2,28 +2,28 @@
|
||||||
authors = ["Jörn-Michael Miehe <jmm@yavook.de>"]
|
authors = ["Jörn-Michael Miehe <jmm@yavook.de>"]
|
||||||
description = ""
|
description = ""
|
||||||
include = ["ovdashboard_api/skel/*"]
|
include = ["ovdashboard_api/skel/*"]
|
||||||
name = "ovdashboard-api"
|
name = "ovdashboard_api"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
Markdown = "^3.4.1"
|
Markdown = "^3.5"
|
||||||
Pillow = "^9.2.0"
|
Pillow = "^10.1.0"
|
||||||
caldav = "^0.9.1"
|
asyncify = "^0.9.2"
|
||||||
fastapi = "^0.81.0"
|
cachetools = "^5.3.2"
|
||||||
pydantic = {extras = ["dotenv"], version = "^1.9.2"}
|
caldav = "^1.3.6"
|
||||||
python = "^3.9"
|
fastapi = "^0.103.2"
|
||||||
|
pydantic-settings = "^2.0.3"
|
||||||
|
python = "^3.12"
|
||||||
python-magic = "^0.4.27"
|
python-magic = "^0.4.27"
|
||||||
tomli = "^2.0.1"
|
|
||||||
tomli-w = "^1.0.0"
|
tomli-w = "^1.0.0"
|
||||||
uvicorn = "^0.18.3"
|
uvicorn = {extras = ["standard"], version = "^0.23.2"}
|
||||||
webdavclient3 = "3.14.5"
|
webdavclient3 = "^3.14.6"
|
||||||
async-cache = "^1.1.1"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
# pytest = "^5.2"
|
black = "^23.10.1"
|
||||||
|
flake8 = "^6.1.0"
|
||||||
[tool.poetry.scripts]
|
flake8-isort = "^6.1.0"
|
||||||
ovdashboard-api = "ovdashboard_api.__main__:main"
|
types-cachetools = "^5.3.0.6"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
|
@ -1,11 +1,20 @@
|
||||||
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
|
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
|
||||||
ARG VARIANT=16-bullseye
|
ARG VARIANT=16-bookworm
|
||||||
FROM mcr.microsoft.com/vscode/devcontainers/javascript-node:0-${VARIANT}
|
FROM mcr.microsoft.com/vscode/devcontainers/javascript-node:1-${VARIANT}
|
||||||
|
|
||||||
# [Optional] Uncomment this section to install additional OS packages.
|
# [Optional] Uncomment this section to install additional OS packages.
|
||||||
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||||
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
||||||
|
|
||||||
|
RUN set -ex; \
|
||||||
|
\
|
||||||
|
export DEBIAN_FRONTEND=noninteractive; \
|
||||||
|
apt-get update; apt-get install --yes --no-install-recommends \
|
||||||
|
git-flow \
|
||||||
|
git-lfs \
|
||||||
|
; rm -rf /var/lib/apt/lists/*; \
|
||||||
|
su node -c "git lfs install"
|
||||||
|
|
||||||
# [Optional] Uncomment if you want to install an additional version of node using nvm
|
# [Optional] Uncomment if you want to install an additional version of node using nvm
|
||||||
# ARG EXTRA_NODE_VERSION=10
|
# ARG EXTRA_NODE_VERSION=10
|
||||||
# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
|
# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
|
||||||
|
|
|
@ -1,28 +1,35 @@
|
||||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.245.2/containers/javascript-node
|
// https://github.com/microsoft/vscode-dev-containers/tree/v0.245.2/containers/javascript-node
|
||||||
{
|
{
|
||||||
"name": "Node.js",
|
"name": "OVD UI",
|
||||||
"build": {
|
"build": {
|
||||||
"dockerfile": "Dockerfile",
|
"dockerfile": "Dockerfile",
|
||||||
|
"context": "..",
|
||||||
// Update 'VARIANT' to pick a Node version: 18, 16, 14.
|
// Update 'VARIANT' to pick a Node version: 18, 16, 14.
|
||||||
// Append -bullseye or -buster to pin to an OS version.
|
// Append -bullseye or -buster to pin to an OS version.
|
||||||
// Use -bullseye variants on local arm64/Apple Silicon.
|
// Use -bullseye variants on local arm64/Apple Silicon.
|
||||||
"args": {
|
"args": {
|
||||||
"VARIANT": "18-bullseye"
|
"VARIANT": "20-bookworm"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
// Set *default* container specific settings.json values on container create.
|
"containerEnv": {
|
||||||
"settings": {
|
"TZ": "Europe/Berlin"
|
||||||
"terminal.integrated.defaultProfile.linux": "zsh"
|
|
||||||
},
|
},
|
||||||
// Configure tool-specific properties.
|
// Configure tool-specific properties.
|
||||||
"customizations": {
|
"customizations": {
|
||||||
// Configure properties specific to VS Code.
|
// Configure properties specific to VS Code.
|
||||||
"vscode": {
|
"vscode": {
|
||||||
|
// Set *default* container specific settings.json values on container create.
|
||||||
|
"settings": {
|
||||||
|
"terminal.integrated.defaultProfile.linux": "zsh"
|
||||||
|
},
|
||||||
// Add the IDs of extensions you want installed when the container is created.
|
// Add the IDs of extensions you want installed when the container is created.
|
||||||
"extensions": [
|
"extensions": [
|
||||||
"dbaeumer.vscode-eslint",
|
"dbaeumer.vscode-eslint",
|
||||||
"octref.vetur"
|
"esbenp.prettier-vscode",
|
||||||
|
"mhutchie.git-graph",
|
||||||
|
"Syler.sass-indented",
|
||||||
|
"Vue.volar"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -30,7 +37,7 @@
|
||||||
// "forwardPorts": [],
|
// "forwardPorts": [],
|
||||||
// Use 'postCreateCommand' to run commands after the container is created.
|
// Use 'postCreateCommand' to run commands after the container is created.
|
||||||
// "postCreateCommand": "yarn install",
|
// "postCreateCommand": "yarn install",
|
||||||
"postStartCommand": "yarn install",
|
"postStartCommand": "yarn install --production false",
|
||||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||||
"remoteUser": "node"
|
"remoteUser": "node"
|
||||||
}
|
}
|
|
@ -1,18 +1,18 @@
|
||||||
module.exports = {
|
module.exports = {
|
||||||
root: true,
|
root: true,
|
||||||
env: {
|
env: {
|
||||||
node: true
|
node: true,
|
||||||
},
|
},
|
||||||
'extends': [
|
extends: [
|
||||||
'plugin:vue/essential',
|
"plugin:vue/essential",
|
||||||
'eslint:recommended',
|
"eslint:recommended",
|
||||||
'@vue/typescript/recommended'
|
"@vue/typescript/recommended",
|
||||||
],
|
],
|
||||||
parserOptions: {
|
parserOptions: {
|
||||||
ecmaVersion: 2020
|
ecmaVersion: 2020,
|
||||||
},
|
},
|
||||||
rules: {
|
rules: {
|
||||||
'no-console': process.env.NODE_ENV === 'production' ? 'warn' : 'off',
|
"no-console": process.env.NODE_ENV === "production" ? "warn" : "off",
|
||||||
'no-debugger': process.env.NODE_ENV === 'production' ? 'warn' : 'off'
|
"no-debugger": process.env.NODE_ENV === "production" ? "warn" : "off",
|
||||||
}
|
},
|
||||||
}
|
};
|
||||||
|
|
15
ui/.vscode/settings.json
vendored
15
ui/.vscode/settings.json
vendored
|
@ -1,8 +1,21 @@
|
||||||
{
|
{
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
|
"[vue]": {
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||||
|
},
|
||||||
|
"[typescript]": {
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||||
|
},
|
||||||
|
"[javascript]": {
|
||||||
|
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||||
|
},
|
||||||
"editor.codeActionsOnSave": {
|
"editor.codeActionsOnSave": {
|
||||||
"source.organizeImports": true
|
"source.organizeImports": true
|
||||||
},
|
},
|
||||||
"git.closeDiffOnOperation": true,
|
"git.closeDiffOnOperation": true,
|
||||||
"editor.tabSize": 2
|
"editor.tabSize": 2,
|
||||||
|
"sass.disableAutoIndent": true,
|
||||||
|
"sass.format.convert": false,
|
||||||
|
"sass.format.deleteWhitespace": true,
|
||||||
|
"prettier.trailingComma": "all",
|
||||||
}
|
}
|
|
@ -1,5 +1,3 @@
|
||||||
module.exports = {
|
module.exports = {
|
||||||
presets: [
|
presets: ["@vue/cli-plugin-babel/preset"],
|
||||||
'@vue/cli-plugin-babel/preset'
|
};
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
|
@ -7,35 +7,34 @@
|
||||||
"build": "vue-cli-service build",
|
"build": "vue-cli-service build",
|
||||||
"lint": "vue-cli-service lint"
|
"lint": "vue-cli-service lint"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
|
||||||
"axios": "^0.27.2",
|
|
||||||
"color": "^4.2.3",
|
|
||||||
"core-js": "^3.8.3",
|
|
||||||
"luxon": "^3.0.3",
|
|
||||||
"register-service-worker": "^1.7.2",
|
|
||||||
"vue": "^2.6.14",
|
|
||||||
"vue-class-component": "^7.2.3",
|
|
||||||
"vue-property-decorator": "^9.1.2",
|
|
||||||
"vuetify": "^2.6.0"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/color": "^3.0.3",
|
"@types/color": "^3.0.3",
|
||||||
"@types/luxon": "^3.0.1",
|
"@types/luxon": "^3.0.1",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.4.0",
|
"@typescript-eslint/eslint-plugin": "^6.9.0",
|
||||||
"@typescript-eslint/parser": "^5.4.0",
|
"@typescript-eslint/parser": "^6.9.0",
|
||||||
"@vue/cli-plugin-babel": "~5.0.0",
|
"@vue/cli-plugin-babel": "~5.0.0",
|
||||||
"@vue/cli-plugin-eslint": "~5.0.0",
|
"@vue/cli-plugin-eslint": "~5.0.0",
|
||||||
"@vue/cli-plugin-pwa": "~5.0.0",
|
"@vue/cli-plugin-pwa": "~5.0.0",
|
||||||
"@vue/cli-plugin-typescript": "~5.0.0",
|
"@vue/cli-plugin-typescript": "~5.0.0",
|
||||||
"@vue/cli-service": "~5.0.0",
|
"@vue/cli-service": "~5.0.0",
|
||||||
"@vue/eslint-config-typescript": "^9.1.0",
|
"@vue/eslint-config-typescript": "^12.0.0",
|
||||||
"eslint": "^7.32.0",
|
"axios": "^1.6.0",
|
||||||
"eslint-plugin-vue": "^8.0.3",
|
"color": "^4.2.3",
|
||||||
"sass": "~1.32.0",
|
"core-js": "^3.8.3",
|
||||||
"sass-loader": "^10.0.0",
|
"eslint": "^8.52.0",
|
||||||
"typescript": "~4.5.5",
|
"eslint-plugin-vue": "^9.18.0",
|
||||||
|
"luxon": "^3.0.3",
|
||||||
|
"prettier": "^3.0.3",
|
||||||
|
"register-service-worker": "^1.7.2",
|
||||||
|
"sass": "~1.69.5",
|
||||||
|
"sass-loader": "^13.3.2",
|
||||||
|
"typescript": "~5.2.2",
|
||||||
|
"vue": "^2.7.15",
|
||||||
|
"vue-class-component": "^7.2.3",
|
||||||
"vue-cli-plugin-vuetify": "^2.5.5",
|
"vue-cli-plugin-vuetify": "^2.5.5",
|
||||||
|
"vue-property-decorator": "^9.1.2",
|
||||||
"vue-template-compiler": "^2.6.14",
|
"vue-template-compiler": "^2.6.14",
|
||||||
|
"vuetify": "^2.7.1",
|
||||||
"vuetify-loader": "^1.7.0"
|
"vuetify-loader": "^1.7.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,17 +1,27 @@
|
||||||
<!DOCTYPE html>
|
<!doctype html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8" />
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
||||||
<meta name="viewport" content="width=device-width,initial-scale=1.0">
|
<meta name="viewport" content="width=device-width,initial-scale=1.0" />
|
||||||
<link rel="icon" href="<%= BASE_URL %>favicon.ico">
|
<link rel="icon" href="<%= BASE_URL %>favicon.ico" />
|
||||||
<title><%= htmlWebpackPlugin.options.title %></title>
|
<title><%= htmlWebpackPlugin.options.title %></title>
|
||||||
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:100,300,400,500,700,900">
|
<link
|
||||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@mdi/font@latest/css/materialdesignicons.min.css">
|
rel="stylesheet"
|
||||||
|
href="https://fonts.googleapis.com/css?family=Roboto:100,300,400,500,700,900"
|
||||||
|
/>
|
||||||
|
<link
|
||||||
|
rel="stylesheet"
|
||||||
|
href="https://cdn.jsdelivr.net/npm/@mdi/font@latest/css/materialdesignicons.min.css"
|
||||||
|
/>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<noscript>
|
<noscript>
|
||||||
<strong>We're sorry but <%= htmlWebpackPlugin.options.title %> doesn't work properly without JavaScript enabled. Please enable it to continue.</strong>
|
<strong
|
||||||
|
>We're sorry but <%= htmlWebpackPlugin.options.title %> doesn't work
|
||||||
|
properly without JavaScript enabled. Please enable it to
|
||||||
|
continue.</strong
|
||||||
|
>
|
||||||
</noscript>
|
</noscript>
|
||||||
<div id="app"></div>
|
<div id="app"></div>
|
||||||
<!-- built files will be auto injected -->
|
<!-- built files will be auto injected -->
|
||||||
|
|
|
@ -18,5 +18,4 @@ import { Component, Vue } from "vue-property-decorator";
|
||||||
export default class Dashboard extends Vue {}
|
export default class Dashboard extends Vue {}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<style>
|
<style></style>
|
||||||
</style>
|
|
||||||
|
|
|
@ -18,10 +18,10 @@ import { Component, Vue } from "@/ovd-vue";
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
export default class DashboardInfo extends Vue {
|
export default class DashboardInfo extends Vue {
|
||||||
private server_host = "https://oekzident.de";
|
public server_host = "https://oekzident.de";
|
||||||
private server_name = "OEKZident";
|
public server_name = "OEKZident";
|
||||||
private version = "0.0.1";
|
public version = "0.0.1";
|
||||||
private lan_ip = "0.0.0.0";
|
public lan_ip = "0.0.0.0";
|
||||||
|
|
||||||
public created(): void {
|
public created(): void {
|
||||||
super.created();
|
super.created();
|
||||||
|
@ -43,7 +43,7 @@ export default class DashboardInfo extends Vue {
|
||||||
(data) => {
|
(data) => {
|
||||||
this.server_host = data.host;
|
this.server_host = data.host;
|
||||||
this.server_name = data.name;
|
this.server_name = data.name;
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
// Update Version
|
// Update Version
|
||||||
|
@ -57,4 +57,4 @@ export default class DashboardInfo extends Vue {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -22,10 +22,10 @@ import { Component, Vue } from "@/ovd-vue";
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
export default class ImageCarousel extends Vue {
|
export default class ImageCarousel extends Vue {
|
||||||
private urls: string[] = require("@/assets/image_testdata.json");
|
public urls: string[] = require("@/assets/image_testdata.json");
|
||||||
private height = 300;
|
public height = 300;
|
||||||
private contain = false;
|
public contain = false;
|
||||||
private speed = 10000;
|
public speed = 10000;
|
||||||
|
|
||||||
public created(): void {
|
public created(): void {
|
||||||
super.created();
|
super.created();
|
||||||
|
@ -39,7 +39,7 @@ export default class ImageCarousel extends Vue {
|
||||||
// Update Images
|
// Update Images
|
||||||
this.$ovdashboard.api_get_list("image/list", (names) => {
|
this.$ovdashboard.api_get_list("image/list", (names) => {
|
||||||
this.urls = names.map((name: string) =>
|
this.urls = names.map((name: string) =>
|
||||||
this.$ovdashboard.api_url(`image/get/${name}`)
|
this.$ovdashboard.api_url(`image/get/${name}`),
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -71,4 +71,4 @@ export default class ImageCarousel extends Vue {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -7,7 +7,7 @@ import { Component, Vue } from "@/ovd-vue";
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
export default class Message extends Vue {
|
export default class Message extends Vue {
|
||||||
private html = require("@/assets/message_testdata.json");
|
public html = require("@/assets/message_testdata.json");
|
||||||
|
|
||||||
public created(): void {
|
public created(): void {
|
||||||
super.created();
|
super.created();
|
||||||
|
@ -21,7 +21,7 @@ export default class Message extends Vue {
|
||||||
// Update Message
|
// Update Message
|
||||||
this.$ovdashboard.api_get_string(
|
this.$ovdashboard.api_get_string(
|
||||||
"text/get/html/message",
|
"text/get/html/message",
|
||||||
(data) => (this.html = data)
|
(data) => (this.html = data),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -59,4 +59,4 @@ div:deep() {
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -5,7 +5,7 @@ export class Model {
|
||||||
// source: https://gist.github.com/hyamamoto/fd435505d29ebfa3d9716fd2be8d42f0?permalink_comment_id=2775538#gistcomment-2775538
|
// source: https://gist.github.com/hyamamoto/fd435505d29ebfa3d9716fd2be8d42f0?permalink_comment_id=2775538#gistcomment-2775538
|
||||||
let hash = 0;
|
let hash = 0;
|
||||||
for (let i = 0; i < str.length; i++)
|
for (let i = 0; i < str.length; i++)
|
||||||
hash = Math.imul(31, hash) + str.charCodeAt(i) | 0;
|
hash = (Math.imul(31, hash) + str.charCodeAt(i)) | 0;
|
||||||
|
|
||||||
return new Uint32Array([hash])[0].toString(36);
|
return new Uint32Array([hash])[0].toString(36);
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,23 +15,20 @@ import Color from "color";
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
export default class TickerBar extends Vue {
|
export default class TickerBar extends Vue {
|
||||||
private content = "<p>changeme</p>";
|
public content = "<p>changeme</p>";
|
||||||
|
|
||||||
private color = "primary";
|
public color = "primary";
|
||||||
|
|
||||||
@Ref("content")
|
|
||||||
private readonly _content!: HTMLDivElement;
|
|
||||||
|
|
||||||
@Ref("marquee")
|
@Ref("marquee")
|
||||||
private readonly _marquee!: HTMLSpanElement;
|
private readonly _marquee!: HTMLSpanElement;
|
||||||
|
|
||||||
private get is_dark(): boolean {
|
public get is_dark(): boolean {
|
||||||
return this.footer_color.isDark();
|
return this.footer_color.isDark();
|
||||||
}
|
}
|
||||||
|
|
||||||
private get footer_color(): Color {
|
private get footer_color(): Color {
|
||||||
// try getting from vuetify theme
|
// try getting from vuetify theme
|
||||||
let color = this.$vuetify.theme.themes.light[this.color];
|
const color = this.$vuetify.theme.themes.light[this.color];
|
||||||
|
|
||||||
if (typeof color === "string") {
|
if (typeof color === "string") {
|
||||||
return Color(color);
|
return Color(color);
|
||||||
|
|
|
@ -4,11 +4,11 @@
|
||||||
{{ title }}
|
{{ title }}
|
||||||
</span>
|
</span>
|
||||||
<template v-for="(event, index) in events">
|
<template v-for="(event, index) in events">
|
||||||
<EventItem :event="event" :key="event.hash" />
|
<EventItem :event="event" :key="`event-${index}`" />
|
||||||
<v-divider
|
<v-divider
|
||||||
v-if="index < events.length - 1"
|
v-if="index < events.length - 1"
|
||||||
class="mx-5"
|
class="mx-5"
|
||||||
:key="`${event.hash}-div`"
|
:key="`event-div-${index}`"
|
||||||
/>
|
/>
|
||||||
</template>
|
</template>
|
||||||
</v-list>
|
</v-list>
|
||||||
|
@ -16,8 +16,8 @@
|
||||||
|
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { Component, Prop, Vue } from "vue-property-decorator";
|
import { Component, Prop, Vue } from "vue-property-decorator";
|
||||||
import { EventData } from "./EventModel";
|
|
||||||
import EventItem from "./EventItem.vue";
|
import EventItem from "./EventItem.vue";
|
||||||
|
import { EventData } from "./EventModel";
|
||||||
|
|
||||||
@Component({
|
@Component({
|
||||||
components: {
|
components: {
|
||||||
|
@ -26,10 +26,10 @@ import EventItem from "./EventItem.vue";
|
||||||
})
|
})
|
||||||
export default class Calendar extends Vue {
|
export default class Calendar extends Vue {
|
||||||
@Prop({ default: "CALENDAR" })
|
@Prop({ default: "CALENDAR" })
|
||||||
private readonly title!: string;
|
public readonly title!: string;
|
||||||
|
|
||||||
@Prop({ default: () => [] })
|
@Prop({ default: () => [] })
|
||||||
private readonly events!: EventData[];
|
public readonly events!: EventData[];
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
@ -37,4 +37,4 @@ export default class Calendar extends Vue {
|
||||||
.v-list .v-divider {
|
.v-list .v-divider {
|
||||||
border-color: rgba(0, 0, 0, 0.25);
|
border-color: rgba(0, 0, 0, 0.25);
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -29,7 +29,7 @@ export default class CalendarCarousel extends Vue {
|
||||||
private interval?: number;
|
private interval?: number;
|
||||||
|
|
||||||
private data: CalendarData[] = require("@/assets/calendar_testdata.json");
|
private data: CalendarData[] = require("@/assets/calendar_testdata.json");
|
||||||
private speed = 10000;
|
public speed = 10000;
|
||||||
|
|
||||||
@Ref("main")
|
@Ref("main")
|
||||||
private readonly _main?: Vue;
|
private readonly _main?: Vue;
|
||||||
|
@ -57,7 +57,7 @@ export default class CalendarCarousel extends Vue {
|
||||||
events: calendars[i],
|
events: calendars[i],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ export default class CalendarCarousel extends Vue {
|
||||||
"calendar/config",
|
"calendar/config",
|
||||||
(data) => {
|
(data) => {
|
||||||
this.speed = data.speed;
|
this.speed = data.speed;
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,8 +98,8 @@ export default class CalendarCarousel extends Vue {
|
||||||
this.interval = setInterval(this.update_height, 10000);
|
this.interval = setInterval(this.update_height, 10000);
|
||||||
}
|
}
|
||||||
|
|
||||||
private get calendars(): CalendarModel[] {
|
public get calendars(): CalendarModel[] {
|
||||||
let arr = [];
|
const arr = [];
|
||||||
|
|
||||||
for (const json_data of this.data) {
|
for (const json_data of this.data) {
|
||||||
arr.push(new CalendarModel(json_data));
|
arr.push(new CalendarModel(json_data));
|
||||||
|
@ -131,4 +131,4 @@ export default class CalendarCarousel extends Vue {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -13,11 +13,11 @@ export class CalendarModel extends Model {
|
||||||
public constructor(json_data: CalendarData) {
|
public constructor(json_data: CalendarData) {
|
||||||
super();
|
super();
|
||||||
|
|
||||||
this.title = json_data.title
|
this.title = json_data.title;
|
||||||
|
|
||||||
this.events = [];
|
this.events = [];
|
||||||
for (const event_data of json_data.events) {
|
for (const event_data of json_data.events) {
|
||||||
this.events.push(new EventModel(event_data))
|
this.events.push(new EventModel(event_data));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,23 +17,23 @@
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { Component, Prop, Vue } from "vue-property-decorator";
|
|
||||||
import { DateTime } from "luxon";
|
import { DateTime } from "luxon";
|
||||||
|
import { Component, Prop, Vue } from "vue-property-decorator";
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
export default class EventDate extends Vue {
|
export default class EventDate extends Vue {
|
||||||
@Prop()
|
@Prop()
|
||||||
private readonly date!: DateTime;
|
private readonly date!: DateTime;
|
||||||
|
|
||||||
private get day(): string {
|
public get day(): string {
|
||||||
return this.date.toFormat("dd.");
|
return this.date.toFormat("dd.");
|
||||||
}
|
}
|
||||||
|
|
||||||
private get month(): string {
|
public get month(): string {
|
||||||
return this.date.toFormat("MM.");
|
return this.date.toFormat("MM.");
|
||||||
}
|
}
|
||||||
|
|
||||||
private get time(): string {
|
public get time(): string {
|
||||||
return this.date.toLocaleString(DateTime.TIME_24_SIMPLE);
|
return this.date.toLocaleString(DateTime.TIME_24_SIMPLE);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -49,4 +49,4 @@ export default class EventDate extends Vue {
|
||||||
min-width: 130px;
|
min-width: 130px;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -12,15 +12,7 @@
|
||||||
{{ event.description }}
|
{{ event.description }}
|
||||||
</v-list-item-subtitle>
|
</v-list-item-subtitle>
|
||||||
<v-list-item-subtitle
|
<v-list-item-subtitle
|
||||||
class="
|
class="d-inline-block text-truncate thw-heading-font blue-grey--text text--darken-1 font-weight-bold ma-0"
|
||||||
d-inline-block
|
|
||||||
text-truncate
|
|
||||||
thw-heading-font
|
|
||||||
blue-grey--text
|
|
||||||
text--darken-1
|
|
||||||
font-weight-bold
|
|
||||||
ma-0
|
|
||||||
"
|
|
||||||
>
|
>
|
||||||
{{ data_string }}
|
{{ data_string }}
|
||||||
</v-list-item-subtitle>
|
</v-list-item-subtitle>
|
||||||
|
@ -29,10 +21,10 @@
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { Component, Prop, Vue } from "vue-property-decorator";
|
|
||||||
import { DateTime, DurationLikeObject } from "luxon";
|
import { DateTime, DurationLikeObject } from "luxon";
|
||||||
import { EventModel } from "./EventModel";
|
import { Component, Prop, Vue } from "vue-property-decorator";
|
||||||
import EventDate from "./EventDate.vue";
|
import EventDate from "./EventDate.vue";
|
||||||
|
import { EventModel } from "./EventModel";
|
||||||
|
|
||||||
@Component({
|
@Component({
|
||||||
components: {
|
components: {
|
||||||
|
@ -41,15 +33,15 @@ import EventDate from "./EventDate.vue";
|
||||||
})
|
})
|
||||||
export default class EventItem extends Vue {
|
export default class EventItem extends Vue {
|
||||||
@Prop()
|
@Prop()
|
||||||
private readonly event!: EventModel;
|
public readonly event!: EventModel;
|
||||||
|
|
||||||
private get data_string(): string {
|
public get data_string(): string {
|
||||||
const locale_string = this.event.start.toLocaleString(
|
const locale_string = this.event.start.toLocaleString(
|
||||||
DateTime.DATETIME_MED_WITH_WEEKDAY
|
DateTime.DATETIME_MED_WITH_WEEKDAY,
|
||||||
);
|
);
|
||||||
|
|
||||||
// decide which duration units to include
|
// decide which duration units to include
|
||||||
let units: (keyof DurationLikeObject)[] = ["hours"];
|
const units: (keyof DurationLikeObject)[] = ["hours"];
|
||||||
|
|
||||||
if (this.event.duration.as("days") >= 1) {
|
if (this.event.duration.as("days") >= 1) {
|
||||||
// include days if duration is at least one day
|
// include days if duration is at least one day
|
||||||
|
@ -72,5 +64,4 @@ export default class EventItem extends Vue {
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<style>
|
<style></style>
|
||||||
</style>
|
|
||||||
|
|
|
@ -19,13 +19,11 @@ export class EventModel extends Model {
|
||||||
|
|
||||||
this.summary = json_data.summary;
|
this.summary = json_data.summary;
|
||||||
this.description = json_data.description;
|
this.description = json_data.description;
|
||||||
this.start = DateTime
|
this.start = DateTime.fromISO(json_data.dtstart).setLocale(
|
||||||
.fromISO(json_data.dtstart)
|
navigator.language,
|
||||||
.setLocale(navigator.language);
|
);
|
||||||
const end = DateTime
|
const end = DateTime.fromISO(json_data.dtend).setLocale(navigator.language);
|
||||||
.fromISO(json_data.dtend)
|
|
||||||
.setLocale(navigator.language);
|
|
||||||
|
|
||||||
this.duration = end.diff(this.start);
|
this.duration = end.diff(this.start);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,12 +3,12 @@
|
||||||
</template>
|
</template>
|
||||||
|
|
||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { Component, Prop, Vue } from "vue-property-decorator";
|
|
||||||
import { DateTime } from "luxon";
|
import { DateTime } from "luxon";
|
||||||
|
import { Component, Prop, Vue } from "vue-property-decorator";
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
export default class Clock extends Vue {
|
export default class Clock extends Vue {
|
||||||
private formatted = "";
|
public formatted = "";
|
||||||
private interval?: number;
|
private interval?: number;
|
||||||
|
|
||||||
@Prop({ required: true })
|
@Prop({ required: true })
|
||||||
|
|
|
@ -23,10 +23,10 @@ import { Component, Vue } from "@/ovd-vue";
|
||||||
|
|
||||||
@Component
|
@Component
|
||||||
export default class THWLogo extends Vue {
|
export default class THWLogo extends Vue {
|
||||||
private above = "Technisches Hilfswerk";
|
public above = "Technisches Hilfswerk";
|
||||||
private below = "OV Musterstadt";
|
public below = "OV Musterstadt";
|
||||||
|
|
||||||
private get logo_url(): string {
|
public get logo_url(): string {
|
||||||
return this.$ovdashboard.api_url("file/get/logo");
|
return this.$ovdashboard.api_url("file/get/logo");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ import THWLogo from "./THWLogo.vue";
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
export default class TitleBar extends Vue {
|
export default class TitleBar extends Vue {
|
||||||
private title = "<h1>TITLE</h1>";
|
public title = "<h1>TITLE</h1>";
|
||||||
|
|
||||||
public created(): void {
|
public created(): void {
|
||||||
super.created();
|
super.created();
|
||||||
|
|
5
ui/src/d.ts/shims-ovdashboard.d.ts
vendored
5
ui/src/d.ts/shims-ovdashboard.d.ts
vendored
|
@ -1,10 +1,9 @@
|
||||||
import { OVDashboardPlugin } from "@/plugins/ovdashboard";
|
import { OVDashboardPlugin } from "@/plugins/ovdashboard";
|
||||||
|
|
||||||
declare module 'vue/types/vue' {
|
declare module "vue/types/vue" {
|
||||||
interface Vue {
|
interface Vue {
|
||||||
$ovdashboard: OVDashboardPlugin;
|
$ovdashboard: OVDashboardPlugin;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export { };
|
export {};
|
||||||
|
|
||||||
|
|
4
ui/src/d.ts/shims-tsx.d.ts
vendored
4
ui/src/d.ts/shims-tsx.d.ts
vendored
|
@ -1,11 +1,11 @@
|
||||||
import Vue, { VNode } from 'vue'
|
import Vue, { VNode } from "vue";
|
||||||
|
|
||||||
declare global {
|
declare global {
|
||||||
namespace JSX {
|
namespace JSX {
|
||||||
interface Element extends VNode {}
|
interface Element extends VNode {}
|
||||||
interface ElementClass extends Vue {}
|
interface ElementClass extends Vue {}
|
||||||
interface IntrinsicElements {
|
interface IntrinsicElements {
|
||||||
[elem: string]: any
|
[elem: string]: any;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
6
ui/src/d.ts/shims-vue.d.ts
vendored
6
ui/src/d.ts/shims-vue.d.ts
vendored
|
@ -1,4 +1,4 @@
|
||||||
declare module '*.vue' {
|
declare module "*.vue" {
|
||||||
import Vue from 'vue'
|
import Vue from "vue";
|
||||||
export default Vue
|
export default Vue;
|
||||||
}
|
}
|
||||||
|
|
6
ui/src/d.ts/shims-vuetify.d.ts
vendored
6
ui/src/d.ts/shims-vuetify.d.ts
vendored
|
@ -1,4 +1,4 @@
|
||||||
declare module 'vuetify/lib/framework' {
|
declare module "vuetify/lib/framework" {
|
||||||
import Vuetify from 'vuetify'
|
import Vuetify from "vuetify";
|
||||||
export default Vuetify
|
export default Vuetify;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
import Vue from "vue"
|
import Vue from "vue";
|
||||||
|
|
||||||
import "@/registerServiceWorker"
|
import "@/registerServiceWorker";
|
||||||
import "@/sass/fonts.scss"
|
import "@/sass/fonts.scss";
|
||||||
|
|
||||||
import App from "@/App.vue"
|
import App from "@/App.vue";
|
||||||
import ovdashboard from "@/plugins/ovdashboard"
|
import ovdashboard from "@/plugins/ovdashboard";
|
||||||
import vuetify from "@/plugins/vuetify"
|
import vuetify from "@/plugins/vuetify";
|
||||||
|
|
||||||
Vue.config.productionTip = false
|
Vue.config.productionTip = false;
|
||||||
Vue.use(ovdashboard)
|
Vue.use(ovdashboard);
|
||||||
|
|
||||||
new Vue({
|
new Vue({
|
||||||
vuetify,
|
vuetify,
|
||||||
render: h => h(App)
|
render: (h) => h(App),
|
||||||
}).$mount('#app')
|
}).$mount("#app");
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import axios, { AxiosInstance, AxiosPromise } from 'axios';
|
import axios, { AxiosInstance, AxiosPromise } from "axios";
|
||||||
import Vue from 'vue';
|
import Vue from "vue";
|
||||||
|
|
||||||
export class OVDashboardPlugin {
|
export class OVDashboardPlugin {
|
||||||
private axios: AxiosInstance;
|
private axios: AxiosInstance;
|
||||||
|
@ -28,7 +28,6 @@ export class OVDashboardPlugin {
|
||||||
private get api_baseurl(): string {
|
private get api_baseurl(): string {
|
||||||
if (process.env.NODE_ENV === "production") {
|
if (process.env.NODE_ENV === "production") {
|
||||||
return `//${window.location.host}/api`;
|
return `//${window.location.host}/api`;
|
||||||
|
|
||||||
} else if (process.env.NODE_ENV !== "development") {
|
} else if (process.env.NODE_ENV !== "development") {
|
||||||
console.warn("Unexpected NODE_ENV value");
|
console.warn("Unexpected NODE_ENV value");
|
||||||
}
|
}
|
||||||
|
@ -52,10 +51,7 @@ export class OVDashboardPlugin {
|
||||||
return this.axios.get<T>(this.api_url(endpoint));
|
return this.axios.get<T>(this.api_url(endpoint));
|
||||||
}
|
}
|
||||||
|
|
||||||
private api_get<T>(
|
private api_get<T>(endpoint: string, on_success: (data: T) => void): void {
|
||||||
endpoint: string,
|
|
||||||
on_success: (data: T) => void
|
|
||||||
): void {
|
|
||||||
this.api_get_prepare<T>(endpoint)
|
this.api_get_prepare<T>(endpoint)
|
||||||
.then((response) => on_success(response.data))
|
.then((response) => on_success(response.data))
|
||||||
.catch(this.fail(endpoint));
|
.catch(this.fail(endpoint));
|
||||||
|
@ -63,7 +59,7 @@ export class OVDashboardPlugin {
|
||||||
|
|
||||||
public api_get_string(
|
public api_get_string(
|
||||||
endpoint: string,
|
endpoint: string,
|
||||||
on_success: (data: string) => void
|
on_success: (data: string) => void,
|
||||||
): void {
|
): void {
|
||||||
this.api_get<string>(endpoint, (data) => {
|
this.api_get<string>(endpoint, (data) => {
|
||||||
if (typeof data !== "string") {
|
if (typeof data !== "string") {
|
||||||
|
@ -84,7 +80,7 @@ export class OVDashboardPlugin {
|
||||||
|
|
||||||
public api_get_list(
|
public api_get_list(
|
||||||
endpoint: string,
|
endpoint: string,
|
||||||
on_success: (data: string[]) => void
|
on_success: (data: string[]) => void,
|
||||||
): void {
|
): void {
|
||||||
this.api_get(endpoint, (data) => {
|
this.api_get(endpoint, (data) => {
|
||||||
if (!this.check_array<string>(data)) {
|
if (!this.check_array<string>(data)) {
|
||||||
|
@ -105,7 +101,7 @@ export class OVDashboardPlugin {
|
||||||
|
|
||||||
public api_get_object<Type extends object>(
|
public api_get_object<Type extends object>(
|
||||||
endpoint: string,
|
endpoint: string,
|
||||||
on_success: (data: Type) => void
|
on_success: (data: Type) => void,
|
||||||
): void {
|
): void {
|
||||||
this.api_get<Type>(endpoint, (data) => {
|
this.api_get<Type>(endpoint, (data) => {
|
||||||
if (!this.check_object(data)) {
|
if (!this.check_object(data)) {
|
||||||
|
@ -119,9 +115,11 @@ export class OVDashboardPlugin {
|
||||||
|
|
||||||
public api_get_object_multi<Type extends object>(
|
public api_get_object_multi<Type extends object>(
|
||||||
endpoints: string[],
|
endpoints: string[],
|
||||||
on_success: (data: Type[]) => void
|
on_success: (data: Type[]) => void,
|
||||||
): void {
|
): void {
|
||||||
const promises = endpoints.map((endpoint) => this.api_get_prepare<Type>(endpoint));
|
const promises = endpoints.map((endpoint) =>
|
||||||
|
this.api_get_prepare<Type>(endpoint),
|
||||||
|
);
|
||||||
|
|
||||||
Promise.all(promises)
|
Promise.all(promises)
|
||||||
.then((responses) => {
|
.then((responses) => {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import Vue from 'vue';
|
import Vue from "vue";
|
||||||
import Vuetify from 'vuetify/lib/framework';
|
import Vuetify from "vuetify/lib/framework";
|
||||||
|
|
||||||
Vue.use(Vuetify);
|
Vue.use(Vuetify);
|
||||||
|
|
||||||
|
|
|
@ -1,32 +1,34 @@
|
||||||
/* eslint-disable no-console */
|
/* eslint-disable no-console */
|
||||||
|
|
||||||
import { register } from 'register-service-worker'
|
import { register } from "register-service-worker";
|
||||||
|
|
||||||
if (process.env.NODE_ENV === 'production') {
|
if (process.env.NODE_ENV === "production") {
|
||||||
register(`${process.env.BASE_URL}service-worker.js`, {
|
register(`${process.env.BASE_URL}service-worker.js`, {
|
||||||
ready () {
|
ready() {
|
||||||
console.log(
|
console.log(
|
||||||
'App is being served from cache by a service worker.\n' +
|
"App is being served from cache by a service worker.\n" +
|
||||||
'For more details, visit https://goo.gl/AFskqB'
|
"For more details, visit https://goo.gl/AFskqB",
|
||||||
)
|
);
|
||||||
},
|
},
|
||||||
registered () {
|
registered() {
|
||||||
console.log('Service worker has been registered.')
|
console.log("Service worker has been registered.");
|
||||||
},
|
},
|
||||||
cached () {
|
cached() {
|
||||||
console.log('Content has been cached for offline use.')
|
console.log("Content has been cached for offline use.");
|
||||||
},
|
},
|
||||||
updatefound () {
|
updatefound() {
|
||||||
console.log('New content is downloading.')
|
console.log("New content is downloading.");
|
||||||
},
|
},
|
||||||
updated () {
|
updated() {
|
||||||
console.log('New content is available; please refresh.')
|
console.log("New content is available; please refresh.");
|
||||||
},
|
},
|
||||||
offline () {
|
offline() {
|
||||||
console.log('No internet connection found. App is running in offline mode.')
|
console.log(
|
||||||
|
"No internet connection found. App is running in offline mode.",
|
||||||
|
);
|
||||||
},
|
},
|
||||||
error (error) {
|
error(error) {
|
||||||
console.error('Error during service worker registration:', error)
|
console.error("Error during service worker registration:", error);
|
||||||
}
|
},
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,17 @@
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: "Lubalin Graph";
|
font-family: "Lubalin Graph";
|
||||||
src: url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.eot");
|
src: url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.eot");
|
||||||
src: url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.eot?#iefix") format("embedded-opentype"),
|
src:
|
||||||
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.woff2") format("woff2"),
|
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.eot?#iefix")
|
||||||
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.woff") format("woff"),
|
format("embedded-opentype"),
|
||||||
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.ttf") format("truetype"),
|
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.woff2")
|
||||||
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.svg#Lubalin BQ") format("svg");
|
format("woff2"),
|
||||||
|
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.woff")
|
||||||
|
format("woff"),
|
||||||
|
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.ttf")
|
||||||
|
format("truetype"),
|
||||||
|
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.svg#Lubalin BQ")
|
||||||
|
format("svg");
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
}
|
}
|
||||||
|
@ -13,11 +19,17 @@
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: "Lubalin Graph";
|
font-family: "Lubalin Graph";
|
||||||
src: url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.eot");
|
src: url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.eot");
|
||||||
src: url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.eot?#iefix") format("embedded-opentype"),
|
src:
|
||||||
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.woff2") format("woff2"),
|
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.eot?#iefix")
|
||||||
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.woff") format("woff"),
|
format("embedded-opentype"),
|
||||||
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.ttf") format("truetype"),
|
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.woff2")
|
||||||
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.svg#LubalinGraph-Book") format("svg");
|
format("woff2"),
|
||||||
|
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.woff")
|
||||||
|
format("woff"),
|
||||||
|
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.ttf")
|
||||||
|
format("truetype"),
|
||||||
|
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.svg#LubalinGraph-Book")
|
||||||
|
format("svg");
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
}
|
}
|
||||||
|
@ -25,11 +37,17 @@
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: "Neue Praxis";
|
font-family: "Neue Praxis";
|
||||||
src: url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.eot");
|
src: url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.eot");
|
||||||
src: url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.eot?#iefix") format("embedded-opentype"),
|
src:
|
||||||
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.woff2") format("woff2"),
|
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.eot?#iefix")
|
||||||
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.woff") format("woff"),
|
format("embedded-opentype"),
|
||||||
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.ttf") format("truetype"),
|
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.woff2")
|
||||||
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.svg#PraxisEF") format("svg");
|
format("woff2"),
|
||||||
|
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.woff")
|
||||||
|
format("woff"),
|
||||||
|
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.ttf")
|
||||||
|
format("truetype"),
|
||||||
|
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.svg#PraxisEF")
|
||||||
|
format("svg");
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
}
|
}
|
||||||
|
@ -37,11 +55,17 @@
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: "Neue Praxis";
|
font-family: "Neue Praxis";
|
||||||
src: url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.eot");
|
src: url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.eot");
|
||||||
src: url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.eot?#iefix") format("embedded-opentype"),
|
src:
|
||||||
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.woff2") format("woff2"),
|
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.eot?#iefix")
|
||||||
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.woff") format("woff"),
|
format("embedded-opentype"),
|
||||||
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.ttf") format("truetype"),
|
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.woff2")
|
||||||
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.svg#PraxisEF") format("svg");
|
format("woff2"),
|
||||||
|
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.woff")
|
||||||
|
format("woff"),
|
||||||
|
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.ttf")
|
||||||
|
format("truetype"),
|
||||||
|
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.svg#PraxisEF")
|
||||||
|
format("svg");
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
}
|
}
|
||||||
|
@ -49,11 +73,17 @@
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: "Neue Demos";
|
font-family: "Neue Demos";
|
||||||
src: url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.eot");
|
src: url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.eot");
|
||||||
src: url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.eot?#iefix") format("embedded-opentype"),
|
src:
|
||||||
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.woff2") format("woff2"),
|
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.eot?#iefix")
|
||||||
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.woff") format("woff"),
|
format("embedded-opentype"),
|
||||||
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.ttf") format("truetype"),
|
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.woff2")
|
||||||
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.svg#DemosEF") format("svg");
|
format("woff2"),
|
||||||
|
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.woff")
|
||||||
|
format("woff"),
|
||||||
|
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.ttf")
|
||||||
|
format("truetype"),
|
||||||
|
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.svg#DemosEF")
|
||||||
|
format("svg");
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
}
|
}
|
||||||
|
@ -61,11 +91,17 @@
|
||||||
@font-face {
|
@font-face {
|
||||||
font-family: "Neue Demos";
|
font-family: "Neue Demos";
|
||||||
src: url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.eot");
|
src: url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.eot");
|
||||||
src: url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.eot?#iefix") format("embedded-opentype"),
|
src:
|
||||||
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.woff2") format("woff2"),
|
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.eot?#iefix")
|
||||||
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.woff") format("woff"),
|
format("embedded-opentype"),
|
||||||
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.ttf") format("truetype"),
|
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.woff2")
|
||||||
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.svg#DemosEF") format("svg");
|
format("woff2"),
|
||||||
|
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.woff")
|
||||||
|
format("woff"),
|
||||||
|
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.ttf")
|
||||||
|
format("truetype"),
|
||||||
|
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.svg#DemosEF")
|
||||||
|
format("svg");
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
font-style: italic;
|
font-style: italic;
|
||||||
}
|
}
|
||||||
|
@ -91,4 +127,4 @@
|
||||||
@extend .thw-text-font;
|
@extend .thw-text-font;
|
||||||
|
|
||||||
font-style: italic !important;
|
font-style: italic !important;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
$heading-font-family: "Neue Praxis", "Roboto", sans-serif;
|
$heading-font-family: "Neue Praxis", "Roboto", sans-serif;
|
||||||
$body-font-family: "Neue Demos", serif;
|
$body-font-family: "Neue Demos", serif;
|
||||||
|
|
|
@ -16,20 +16,11 @@
|
||||||
"useDefineForClassFields": true,
|
"useDefineForClassFields": true,
|
||||||
"sourceMap": true,
|
"sourceMap": true,
|
||||||
"baseUrl": ".",
|
"baseUrl": ".",
|
||||||
"types": [
|
"types": ["webpack-env"],
|
||||||
"webpack-env"
|
|
||||||
],
|
|
||||||
"paths": {
|
"paths": {
|
||||||
"@/*": [
|
"@/*": ["src/*"]
|
||||||
"src/*"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"lib": [
|
"lib": ["esnext", "dom", "dom.iterable", "scripthost"]
|
||||||
"esnext",
|
|
||||||
"dom",
|
|
||||||
"dom.iterable",
|
|
||||||
"scripthost"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"include": [
|
"include": [
|
||||||
"src/**/*.ts",
|
"src/**/*.ts",
|
||||||
|
@ -38,7 +29,5 @@
|
||||||
"tests/**/*.ts",
|
"tests/**/*.ts",
|
||||||
"tests/**/*.tsx"
|
"tests/**/*.tsx"
|
||||||
],
|
],
|
||||||
"exclude": [
|
"exclude": ["node_modules"]
|
||||||
"node_modules"
|
}
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
const { defineConfig } = require('@vue/cli-service')
|
const { defineConfig } = require("@vue/cli-service");
|
||||||
|
|
||||||
module.exports = defineConfig({
|
module.exports = defineConfig({
|
||||||
transpileDependencies: [
|
transpileDependencies: ["vuetify"],
|
||||||
'vuetify'
|
});
|
||||||
]
|
|
||||||
})
|
|
||||||
|
|
3740
ui/yarn.lock
3740
ui/yarn.lock
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue