Merge branch 'feature/python3.12' into develop
This commit is contained in:
commit
d9479eb2bc
67 changed files with 4423 additions and 3680 deletions
72
Dockerfile
72
Dockerfile
|
@ -2,41 +2,49 @@
|
|||
# build ui #
|
||||
############
|
||||
|
||||
FROM node:lts-alpine AS build-ui
|
||||
|
||||
# some dir for our code
|
||||
WORKDIR /app
|
||||
|
||||
# install dependencies
|
||||
COPY ui/package*.json ui/yarn*.lock ./
|
||||
RUN yarn --production=false
|
||||
|
||||
# copy code
|
||||
COPY ui .
|
||||
RUN yarn build
|
||||
|
||||
|
||||
##############
|
||||
# webservice #
|
||||
##############
|
||||
|
||||
FROM antonapetrov/uvicorn-gunicorn:python3.9-alpine3.13 AS production
|
||||
|
||||
RUN set -ex; \
|
||||
# prerequisites
|
||||
apk add --no-cache \
|
||||
libmagic \
|
||||
;
|
||||
FROM node:lts AS build-ui
|
||||
|
||||
# env setup
|
||||
ENV \
|
||||
WORKDIR /usr/local/src/ovdashboard_ui
|
||||
|
||||
# install ovdashboard_ui dependencies
|
||||
COPY ui/package*.json ui/yarn*.lock ./
|
||||
RUN yarn install --production false
|
||||
|
||||
# copy and build ovdashboard_ui
|
||||
COPY ui ./
|
||||
RUN yarn build --dest /tmp/ovdashboard_ui/html
|
||||
|
||||
###########
|
||||
# web app #
|
||||
###########
|
||||
|
||||
FROM tiangolo/uvicorn-gunicorn:python3.12-slim AS production
|
||||
|
||||
# add prepared ovdashboard_ui
|
||||
COPY --from=build-ui /tmp/ovdashboard_ui /usr/local/share/ovdashboard_ui
|
||||
|
||||
# env setup
|
||||
WORKDIR /usr/local/src/ovdashboard_api
|
||||
ENV \
|
||||
PRODUCTION_MODE="true" \
|
||||
APP_MODULE="ovdashboard_api:app"
|
||||
PORT="8000" \
|
||||
MODULE_NAME="ovdashboard_api.app"
|
||||
EXPOSE 8000
|
||||
|
||||
# install API
|
||||
COPY api /usr/src/ovdashboard_api
|
||||
COPY api ./
|
||||
RUN set -ex; \
|
||||
pip3 --no-cache-dir install /usr/src/ovdashboard_api;
|
||||
# install libs
|
||||
export DEBIAN_FRONTEND=noninteractive; \
|
||||
apt-get update; apt-get install --yes --no-install-recommends \
|
||||
libmagic1 \
|
||||
; rm -rf /var/lib/apt/lists/*; \
|
||||
\
|
||||
# remove example app
|
||||
rm -rf /app; \
|
||||
\
|
||||
# install ovdashboard_api
|
||||
python -m pip --no-cache-dir install ./
|
||||
|
||||
# install UI
|
||||
COPY --from=build-ui /app/dist /html
|
||||
# run as unprivileged user
|
||||
USER nobody
|
||||
|
|
|
@ -1,12 +1,28 @@
|
|||
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.224.2/containers/python-3/.devcontainer/base.Dockerfile
|
||||
# See here for image contents: https://github.com/devcontainers/images/blob/main/src/python/.devcontainer/Dockerfile
|
||||
|
||||
# [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster
|
||||
ARG VARIANT="3.10-bullseye"
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT}
|
||||
# [Choice] Python version (use -bookworm or -bullseye variants on local arm64/Apple Silicon):
|
||||
# - 3, 3.12, 3.11, 3.10, 3.9, 3.8
|
||||
# - 3-bookworm, 3.12-bookworm, 3.11-bookworm, 3.10-bookworm, 3.9-bookworm, 3.8-bookworm
|
||||
# - 3-bullseye, 3.12-bullseye, 3.11-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye
|
||||
# - 3-buster, 3.12-buster, 3.11-buster, 3.10-buster, 3.9-buster, 3.8-buster
|
||||
ARG VARIANT="3.12-bookworm"
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/python:1-${VARIANT}
|
||||
|
||||
# [Choice] Node.js version: none, lts/*, 16, 14, 12, 10
|
||||
# Add "Poetry": https://python-poetry.org
|
||||
ARG POETRY_HOME="/usr/local"
|
||||
ENV POETRY_HOME="${POETRY_HOME}"
|
||||
RUN set -ex; \
|
||||
\
|
||||
curl -sSL https://install.python-poetry.org | python3 -; \
|
||||
poetry self add poetry-plugin-up;
|
||||
|
||||
# [Choice] Node.js version: none, lts/*, 18, 16, 14, 12, 10
|
||||
ARG NODE_VERSION="none"
|
||||
RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi
|
||||
RUN set -ex; \
|
||||
\
|
||||
if [ "${NODE_VERSION}" != "none" ]; then \
|
||||
su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; \
|
||||
fi
|
||||
|
||||
# [Optional] If your pip requirements rarely change, uncomment this section to add them to the image.
|
||||
# COPY requirements.txt /tmp/pip-tmp/
|
||||
|
@ -20,13 +36,10 @@ RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/
|
|||
RUN set -ex; \
|
||||
\
|
||||
export DEBIAN_FRONTEND=noninteractive; \
|
||||
apt-get update; apt-get -y install --no-install-recommends \
|
||||
apt-get update; apt-get install --yes --no-install-recommends \
|
||||
git-flow \
|
||||
libmagic1 \
|
||||
; rm -rf /var/lib/apt/lists/*;
|
||||
|
||||
# [Optional] Uncomment this line to install global node packages.
|
||||
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
||||
|
||||
USER vscode
|
||||
RUN curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
|
|
@ -1,46 +1,51 @@
|
|||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.224.2/containers/python-3
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||
// README at: https://github.com/devcontainers/templates/tree/main/src/python
|
||||
{
|
||||
"name": "Python 3",
|
||||
"name": "OVD API",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"context": "..",
|
||||
"args": {
|
||||
// Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7, 3.6
|
||||
// Update 'VARIANT' to pick a Python version.
|
||||
// Append -bullseye or -buster to pin to an OS version.
|
||||
// Use -bullseye variants on local on arm64/Apple Silicon.
|
||||
"VARIANT": "3.9",
|
||||
// "VARIANT": "3.11-bullseye",
|
||||
// Options
|
||||
"NODE_VERSION": "none"
|
||||
}
|
||||
},
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {
|
||||
"terminal.integrated.defaultProfile.linux": "zsh",
|
||||
"python.defaultInterpreterPath": "/usr/local/bin/python",
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
|
||||
"python.formatting.blackPath": "/usr/local/py-utils/bin/black",
|
||||
"python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf",
|
||||
"python.linting.banditPath": "/usr/local/py-utils/bin/bandit",
|
||||
"python.linting.flake8Path": "/usr/local/py-utils/bin/flake8",
|
||||
"python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
|
||||
"python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle",
|
||||
"python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
|
||||
"python.linting.pylintPath": "/usr/local/py-utils/bin/pylint"
|
||||
"containerEnv": {
|
||||
"TZ": "Europe/Berlin"
|
||||
},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"be5invis.toml"
|
||||
],
|
||||
// Configure tool-specific properties.
|
||||
"customizations": {
|
||||
// Configure properties specific to VS Code.
|
||||
"vscode": {
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {
|
||||
"python.defaultInterpreterPath": "/usr/local/bin/python",
|
||||
"terminal.integrated.defaultProfile.linux": "zsh"
|
||||
},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"be5invis.toml",
|
||||
"mhutchie.git-graph",
|
||||
"ms-python.python",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.flake8",
|
||||
"ms-python.isort",
|
||||
"ms-python.vscode-pylance"
|
||||
]
|
||||
}
|
||||
},
|
||||
// Use 'postStartCommand' to run commands after the container is started.
|
||||
"postStartCommand": "poetry install"
|
||||
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||
// "features": {},
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
// "postCreateCommand": "pip3 install --user -r requirements.txt",
|
||||
"postStartCommand": "poetry install",
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "vscode"
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
}
|
4
api/.flake8
Normal file
4
api/.flake8
Normal file
|
@ -0,0 +1,4 @@
|
|||
[flake8]
|
||||
max-line-length = 80
|
||||
select = C,E,F,I,W,B,B950
|
||||
extend-ignore = E203, E501
|
14
api/.vscode/launch.json
vendored
14
api/.vscode/launch.json
vendored
|
@ -8,7 +8,19 @@
|
|||
"name": "Main Module",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"module": "ovdashboard_api",
|
||||
"module": "ovdashboard_api.main",
|
||||
"pythonArgs": [
|
||||
"-Xfrozen_modules=off",
|
||||
],
|
||||
"env": {
|
||||
"PYDEVD_DISABLE_FILE_VALIDATION": "1",
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"WEBDAV__CACHE_TTL": "30",
|
||||
"CALDAV__CACHE_TTL": "30",
|
||||
// "PRODUCTION_MODE": "true",
|
||||
// "WEBDAV__RETRIES": "5",
|
||||
// "WEBDAV__RETRY_DELAY": "1",
|
||||
},
|
||||
"justMyCode": true
|
||||
}
|
||||
]
|
||||
|
|
21
api/.vscode/settings.json
vendored
21
api/.vscode/settings.json
vendored
|
@ -1,17 +1,20 @@
|
|||
{
|
||||
"python.testing.pytestArgs": [
|
||||
"tests"
|
||||
],
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.testing.pytestEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.pylintEnabled": false,
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.languageServer": "Pylance",
|
||||
"editor.formatOnSave": true,
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||
},
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": true
|
||||
},
|
||||
"git.closeDiffOnOperation": true,
|
||||
"python.analysis.typeCheckingMode": "basic"
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"python.analysis.diagnosticMode": "workspace",
|
||||
"python.testing.pytestArgs": [
|
||||
"test"
|
||||
],
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.testing.pytestEnabled": true,
|
||||
"black-formatter.importStrategy": "fromEnvironment",
|
||||
"flake8.importStrategy": "fromEnvironment",
|
||||
}
|
|
@ -7,43 +7,34 @@ This file: Sets up logging.
|
|||
|
||||
from logging.config import dictConfig
|
||||
|
||||
from pydantic import BaseModel
|
||||
from .core.settings import SETTINGS
|
||||
|
||||
from .app import app
|
||||
from .settings import SETTINGS
|
||||
# Logging configuration to be set for the server.
|
||||
# https://stackoverflow.com/a/67937084
|
||||
|
||||
__all__ = ["app"]
|
||||
|
||||
|
||||
class LogConfig(BaseModel):
|
||||
"""
|
||||
Logging configuration to be set for the server.
|
||||
https://stackoverflow.com/a/67937084
|
||||
"""
|
||||
|
||||
# Logging config
|
||||
version = 1
|
||||
disable_existing_loggers = False
|
||||
formatters = {
|
||||
LOG_CONFIG = dict(
|
||||
version=1,
|
||||
disable_existing_loggers=False,
|
||||
formatters={
|
||||
"default": {
|
||||
"()": "uvicorn.logging.DefaultFormatter",
|
||||
"fmt": "%(levelprefix)s [%(asctime)s] %(name)s: %(message)s",
|
||||
"datefmt": "%Y-%m-%d %H:%M:%S",
|
||||
},
|
||||
}
|
||||
handlers = {
|
||||
},
|
||||
handlers={
|
||||
"default": {
|
||||
"formatter": "default",
|
||||
"class": "logging.StreamHandler",
|
||||
"stream": "ext://sys.stderr",
|
||||
},
|
||||
}
|
||||
loggers = {
|
||||
},
|
||||
loggers={
|
||||
"ovdashboard_api": {
|
||||
"handlers": ["default"],
|
||||
"level": SETTINGS.log_level,
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
dictConfig(LogConfig().dict())
|
||||
dictConfig(LOG_CONFIG)
|
||||
|
|
|
@ -6,13 +6,18 @@ Main script for `ovdashboard_api` module.
|
|||
Creates the main `FastAPI` app.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from .dav_common import webdav_check
|
||||
from .core.settings import SETTINGS
|
||||
from .core.webdav import WebDAV
|
||||
from .routers import v1_router
|
||||
from .settings import SETTINGS
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
app = FastAPI(
|
||||
title="OVDashboard API",
|
||||
|
@ -30,33 +35,53 @@ app = FastAPI(
|
|||
redoc_url=SETTINGS.redoc_url,
|
||||
)
|
||||
|
||||
app.add_event_handler("startup", webdav_check)
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
async def add_middlewares() -> None:
|
||||
if SETTINGS.production_mode:
|
||||
# Mount frontend in production mode
|
||||
app.mount(
|
||||
path="/",
|
||||
app=StaticFiles(
|
||||
directory=SETTINGS.ui_directory,
|
||||
html=True,
|
||||
),
|
||||
name="frontend",
|
||||
)
|
||||
|
||||
else:
|
||||
# Allow CORS in debug mode
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=[
|
||||
"*",
|
||||
],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
expose_headers=["*"],
|
||||
)
|
||||
|
||||
app.include_router(v1_router)
|
||||
|
||||
_logger.info(
|
||||
"Production mode is %s.",
|
||||
"enabled" if SETTINGS.production_mode else "disabled",
|
||||
)
|
||||
|
||||
if SETTINGS.production_mode:
|
||||
# Mount frontend in production mode
|
||||
app.mount(
|
||||
path="/",
|
||||
app=StaticFiles(
|
||||
directory=SETTINGS.ui_directory,
|
||||
html=True,
|
||||
),
|
||||
name="frontend",
|
||||
)
|
||||
|
||||
def check_webdav(retry: int) -> bool | None:
|
||||
if WebDAV._webdav_client.check(""):
|
||||
return True
|
||||
|
||||
_logger.warning(
|
||||
"WebDAV connection to %s failed (try %d of %d)",
|
||||
repr(SETTINGS.webdav.url),
|
||||
retry + 1,
|
||||
SETTINGS.webdav.retries,
|
||||
)
|
||||
|
||||
if retry < SETTINGS.webdav.retries:
|
||||
_logger.debug("Retrying in %d seconds ...", SETTINGS.webdav.retry_delay)
|
||||
time.sleep(SETTINGS.webdav.retry_delay)
|
||||
|
||||
if not any(check_webdav(n) for n in range(SETTINGS.webdav.retries)):
|
||||
raise ConnectionError("WebDAV connection failed")
|
||||
|
||||
else:
|
||||
assert WebDAV._webdav_client.check("")
|
||||
|
||||
# Allow CORS in debug mode
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_credentials=True,
|
||||
allow_headers=["*"],
|
||||
allow_methods=["*"],
|
||||
allow_origins=["*"],
|
||||
expose_headers=["*"],
|
||||
)
|
||||
|
||||
_logger.debug("WebDAV connection ok.")
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
"""
|
||||
Some useful helpers for working in async contexts.
|
||||
"""
|
||||
|
||||
from asyncio import get_running_loop
|
||||
from functools import partial, wraps
|
||||
from typing import Awaitable, Callable, TypeVar
|
||||
|
||||
RT = TypeVar("RT")
|
||||
|
||||
|
||||
def run_in_executor(
|
||||
function: Callable[..., RT]
|
||||
) -> Callable[..., Awaitable[RT]]:
|
||||
"""
|
||||
Decorator to make blocking a function call asyncio compatible.
|
||||
https://stackoverflow.com/questions/41063331/how-to-use-asyncio-with-existing-blocking-library/
|
||||
https://stackoverflow.com/a/53719009
|
||||
"""
|
||||
|
||||
@wraps(function)
|
||||
async def wrapper(*args, **kwargs) -> RT:
|
||||
loop = get_running_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
partial(function, *args, **kwargs),
|
||||
)
|
||||
|
||||
return wrapper
|
100
api/ovdashboard_api/core/caldav.py
Normal file
100
api/ovdashboard_api/core/caldav.py
Normal file
|
@ -0,0 +1,100 @@
|
|||
import functools
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import cast
|
||||
|
||||
from asyncify import asyncify
|
||||
from cachetools import TTLCache, cachedmethod
|
||||
from caldav import Calendar, DAVClient, Event, Principal
|
||||
from vobject.base import Component, toVName
|
||||
|
||||
from .calevent import CalEvent
|
||||
from .config import Config
|
||||
from .settings import SETTINGS
|
||||
from .webdav import davkey
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CalDAV:
|
||||
_caldav_client = DAVClient(
|
||||
url=SETTINGS.caldav.url,
|
||||
username=SETTINGS.caldav.username,
|
||||
password=SETTINGS.caldav.password,
|
||||
)
|
||||
|
||||
_cache = TTLCache(
|
||||
ttl=SETTINGS.caldav.cache_ttl,
|
||||
maxsize=SETTINGS.caldav.cache_size,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def principal(cls) -> Principal:
|
||||
"""
|
||||
Gets the `Principal` object of the main CalDAV client.
|
||||
"""
|
||||
|
||||
return cls._caldav_client.principal()
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
@asyncify
|
||||
@cachedmethod(
|
||||
cache=lambda cls: cls._cache,
|
||||
key=functools.partial(davkey, "calendars"),
|
||||
)
|
||||
def calendars(cls) -> list[str]:
|
||||
"""
|
||||
Asynchroneously lists all calendars using the main WebDAV client.
|
||||
"""
|
||||
|
||||
_logger.debug("calendars")
|
||||
return [str(cal.name) for cal in cls.principal.calendars()]
|
||||
|
||||
@classmethod
|
||||
@asyncify
|
||||
@cachedmethod(
|
||||
cache=lambda cls: cls._cache,
|
||||
key=functools.partial(davkey, "get_calendar"),
|
||||
)
|
||||
def get_calendar(cls, calendar_name: str) -> Calendar:
|
||||
"""
|
||||
Get a calendar by name using the CalDAV principal object.
|
||||
"""
|
||||
|
||||
return cls.principal.calendar(calendar_name)
|
||||
|
||||
@classmethod
|
||||
@asyncify
|
||||
@cachedmethod(
|
||||
cache=lambda cls: cls._cache,
|
||||
key=functools.partial(davkey, "get_events"),
|
||||
)
|
||||
def get_events(cls, calendar_name: str, cfg: Config) -> list[CalEvent]:
|
||||
"""
|
||||
Get a sorted list of events by CalDAV calendar name.
|
||||
"""
|
||||
|
||||
_logger.info(f"downloading {calendar_name!r} ...")
|
||||
|
||||
dt_start = datetime.combine(
|
||||
datetime.now().date(),
|
||||
datetime.min.time(),
|
||||
)
|
||||
dt_end = dt_start + timedelta(days=cfg.calendar.future_days)
|
||||
|
||||
search_result = cls.principal.calendar(calendar_name).search(
|
||||
start=dt_start,
|
||||
end=dt_end,
|
||||
expand=True,
|
||||
comp_class=Event,
|
||||
split_expanded=False,
|
||||
)
|
||||
|
||||
vevents = []
|
||||
for event in search_result:
|
||||
vobject = cast(Component, event.vobject_instance)
|
||||
vevents.extend(vobject.contents[toVName("vevent")])
|
||||
|
||||
return sorted(CalEvent.from_vevent(vevent) for vevent in vevents)
|
83
api/ovdashboard_api/core/calevent.py
Normal file
83
api/ovdashboard_api/core/calevent.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
"""
|
||||
Definition of an asyncio compatible CalDAV calendar.
|
||||
|
||||
Caches events using `timed_alru_cache`.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Annotated, Self
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, StringConstraints
|
||||
from vobject.base import Component
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
type StrippedStr = Annotated[str, StringConstraints(strip_whitespace=True)]
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
class CalEvent(BaseModel):
|
||||
"""
|
||||
A CalDAV calendar event.
|
||||
|
||||
Properties are to be named as in the EVENT component of
|
||||
RFC5545 (iCalendar).
|
||||
|
||||
https://icalendar.org/iCalendar-RFC-5545/3-6-1-event-component.html
|
||||
"""
|
||||
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
summary: StrippedStr = ""
|
||||
description: StrippedStr = ""
|
||||
dtstart: datetime = datetime.now()
|
||||
dtend: datetime = datetime.now()
|
||||
|
||||
def __lt__(self, other: Self) -> bool:
|
||||
"""
|
||||
Order Events by start time.
|
||||
"""
|
||||
|
||||
return self.dtstart < other.dtstart
|
||||
|
||||
def __eq__(self, other: Self) -> bool:
|
||||
"""
|
||||
Compare all properties.
|
||||
"""
|
||||
|
||||
return self.model_dump() == other.model_dump()
|
||||
|
||||
@classmethod
|
||||
def from_vevent(cls, event: Component) -> Self:
|
||||
"""
|
||||
Create a CalEvent instance from a `VObject.VEvent` object.
|
||||
"""
|
||||
|
||||
data = {}
|
||||
keys = ("summary", "description", "dtstart", "dtend", "duration")
|
||||
|
||||
for key in keys:
|
||||
try:
|
||||
data[key] = event.contents[key][0].value # type: ignore
|
||||
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if "dtend" not in data:
|
||||
data["dtend"] = data["dtstart"]
|
||||
|
||||
if "duration" in data:
|
||||
try:
|
||||
data["dtend"] += data["duration"]
|
||||
|
||||
except (ValueError, TypeError, AttributeError):
|
||||
_logger.warn(
|
||||
"Could not add duration %s to %s",
|
||||
repr(data["duration"]),
|
||||
repr(data["dtstart"]),
|
||||
)
|
||||
|
||||
del data["duration"]
|
||||
|
||||
return cls.model_validate(data)
|
|
@ -2,20 +2,9 @@
|
|||
Python representation of the "config.txt" file inside the WebDAV directory.
|
||||
"""
|
||||
|
||||
from io import BytesIO
|
||||
from logging import getLogger
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel
|
||||
from tomli import loads as toml_loads
|
||||
from tomli_w import dump as toml_dump
|
||||
from webdav3.exceptions import RemoteResourceNotFound
|
||||
|
||||
from .dav_common import caldav_list
|
||||
from .dav_file import DavFile
|
||||
from .settings import SETTINGS
|
||||
|
||||
_logger = getLogger(__name__)
|
||||
|
||||
|
||||
class TickerUIConfig(BaseModel):
|
||||
|
@ -100,6 +89,13 @@ class Config(BaseModel):
|
|||
Main representation of "config.txt".
|
||||
"""
|
||||
|
||||
def __hash__(self) -> int:
|
||||
"""
|
||||
Fake hash (the config is always the config)
|
||||
"""
|
||||
|
||||
return hash("config")
|
||||
|
||||
image_dir: str = "image"
|
||||
text_dir: str = "text"
|
||||
file_dir: str = "file"
|
||||
|
@ -109,31 +105,3 @@ class Config(BaseModel):
|
|||
server: ServerUIConfig = ServerUIConfig()
|
||||
ticker: TickerConfig = TickerConfig()
|
||||
calendar: CalendarConfig = CalendarConfig()
|
||||
|
||||
@classmethod
|
||||
async def get(cls) -> "Config":
|
||||
"""
|
||||
Load the configuration instance from the server using `TOML`.
|
||||
"""
|
||||
|
||||
dav_file = DavFile(SETTINGS.config_path)
|
||||
|
||||
try:
|
||||
cfg = cls.parse_obj(
|
||||
toml_loads(await dav_file.as_string)
|
||||
)
|
||||
|
||||
except RemoteResourceNotFound:
|
||||
_logger.warning(
|
||||
f"Config file {SETTINGS.config_path!r} not found, creating ..."
|
||||
)
|
||||
|
||||
cfg = cls()
|
||||
cfg.calendar.aggregates["All Events"] = list(await caldav_list())
|
||||
|
||||
buffer = BytesIO()
|
||||
toml_dump(cfg.dict(), buffer)
|
||||
buffer.seek(0)
|
||||
await dav_file.write(buffer.read())
|
||||
|
||||
return cfg
|
60
api/ovdashboard_api/core/dav_common.py
Normal file
60
api/ovdashboard_api/core/dav_common.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
"""
|
||||
Definition of WebDAV and CalDAV clients.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from os import path
|
||||
from pathlib import Path
|
||||
|
||||
from .. import __file__ as OVD_INIT
|
||||
from .webdav import WebDAV
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def webdav_ensure_path(remote_path: str) -> bool:
|
||||
if WebDAV._webdav_client.check(remote_path):
|
||||
_logger.debug(
|
||||
"WebDAV path %s found.",
|
||||
repr(remote_path),
|
||||
)
|
||||
return True
|
||||
|
||||
_logger.info(
|
||||
"WebDAV path %s not found, creating ...",
|
||||
repr(remote_path),
|
||||
)
|
||||
WebDAV._webdav_client.mkdir(remote_path)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def get_skel_path(skel_file: str) -> Path:
|
||||
skel_path = path.dirname(Path(OVD_INIT).absolute())
|
||||
return Path(skel_path).joinpath("skel", skel_file)
|
||||
|
||||
|
||||
def webdav_upload_skel(remote_path: str, *skel_files: str) -> None:
|
||||
for skel_file in skel_files:
|
||||
_logger.debug(
|
||||
"Creating WebDAV file %s ...",
|
||||
repr(skel_file),
|
||||
)
|
||||
|
||||
WebDAV._webdav_client.upload_file(
|
||||
f"{remote_path}/{skel_file}",
|
||||
get_skel_path(skel_file),
|
||||
)
|
||||
|
||||
|
||||
def webdav_ensure_files(remote_path: str, *file_names: str) -> None:
|
||||
missing_files = (
|
||||
file_name
|
||||
for file_name in file_names
|
||||
if not WebDAV._webdav_client.check(f"{remote_path}/{file_name}")
|
||||
)
|
||||
|
||||
webdav_upload_skel(
|
||||
remote_path,
|
||||
*missing_files,
|
||||
)
|
162
api/ovdashboard_api/core/settings.py
Normal file
162
api/ovdashboard_api/core/settings.py
Normal file
|
@ -0,0 +1,162 @@
|
|||
"""
|
||||
Configuration definition.
|
||||
|
||||
Converts per-run (environment) variables and config files into the
|
||||
"python world" using `pydantic`.
|
||||
|
||||
Pydantic models might have convenience methods attached.
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, model_validator
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class DAVSettings(BaseModel):
|
||||
"""
|
||||
Connection to a DAV server.
|
||||
"""
|
||||
|
||||
protocol: str | None = None
|
||||
host: str | None = None
|
||||
path: str | None = None
|
||||
|
||||
username: str | None = None
|
||||
password: str | None = None
|
||||
|
||||
cache_ttl: int = 60 * 10
|
||||
cache_size: int = 1024
|
||||
|
||||
@property
|
||||
def url(self) -> str:
|
||||
"""
|
||||
Combined DAV URL.
|
||||
"""
|
||||
|
||||
return f"{self.protocol}://{self.host}{self.path}"
|
||||
|
||||
|
||||
class WebDAVSettings(DAVSettings):
|
||||
"""
|
||||
Connection to a WebDAV server.
|
||||
"""
|
||||
|
||||
protocol: str = "https"
|
||||
host: str = "example.com"
|
||||
path: str = "/remote.php/dav"
|
||||
prefix: str = "/ovdashboard"
|
||||
|
||||
username: str = "ovd_user"
|
||||
password: str = "password"
|
||||
|
||||
config_filename: str = "config.txt"
|
||||
|
||||
disable_check: bool = False
|
||||
retries: int = 20
|
||||
retry_delay: int = 30
|
||||
prefix: str = "/ovdashboard"
|
||||
|
||||
@property
|
||||
def url(self) -> str:
|
||||
"""
|
||||
Combined DAV URL.
|
||||
"""
|
||||
|
||||
return f"{self.protocol}://{self.host}{self.path}{self.prefix}"
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""
|
||||
Per-run settings.
|
||||
"""
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
env_nested_delimiter="__",
|
||||
)
|
||||
|
||||
#####
|
||||
# general settings
|
||||
#####
|
||||
|
||||
log_level: str = "INFO"
|
||||
production_mode: bool = False
|
||||
ui_directory: str = "/usr/local/share/ovdashboard_ui/html"
|
||||
|
||||
# doesn't even have to be reachable
|
||||
ping_host: str = "1.0.0.0"
|
||||
ping_port: int = 1
|
||||
|
||||
#####
|
||||
# openapi settings
|
||||
#####
|
||||
|
||||
def __dev_value[T](self, value: T) -> T | None:
|
||||
if self.production_mode:
|
||||
return None
|
||||
|
||||
return value
|
||||
|
||||
@property
|
||||
def openapi_url(self) -> str | None:
|
||||
return self.__dev_value("/api/openapi.json")
|
||||
|
||||
@property
|
||||
def docs_url(self) -> str | None:
|
||||
return self.__dev_value("/api/docs")
|
||||
|
||||
@property
|
||||
def redoc_url(self) -> str | None:
|
||||
return self.__dev_value("/api/redoc")
|
||||
|
||||
#####
|
||||
# webdav settings
|
||||
#####
|
||||
|
||||
webdav: WebDAVSettings = WebDAVSettings()
|
||||
|
||||
#####
|
||||
# caldav settings
|
||||
#####
|
||||
|
||||
caldav: DAVSettings = DAVSettings()
|
||||
|
||||
@model_validator(mode="before")
|
||||
@classmethod
|
||||
def validate_dav_settings(cls, data) -> dict[str, Any]:
|
||||
assert isinstance(data, dict)
|
||||
|
||||
# ensure both settings dicts are created
|
||||
for key in ("webdav", "caldav"):
|
||||
if key not in data:
|
||||
data[key] = {}
|
||||
|
||||
default_dav = DAVSettings(
|
||||
protocol="https",
|
||||
host="example.com",
|
||||
username="ovdashboard",
|
||||
password="secret",
|
||||
).model_dump()
|
||||
|
||||
for key in default_dav:
|
||||
# if "webdav" value is not specified, use default
|
||||
if key not in data["webdav"] or data["webdav"][key] is None:
|
||||
data["webdav"][key] = default_dav[key]
|
||||
|
||||
# if "caldav" value is not specified, use "webdav" value
|
||||
if key not in data["caldav"] or data["caldav"][key] is None:
|
||||
data["caldav"][key] = data["webdav"][key]
|
||||
|
||||
# add default "path"s if None
|
||||
if data["webdav"]["path"] is None:
|
||||
data["webdav"]["path"] = "/remote.php/webdav"
|
||||
|
||||
if data["caldav"]["path"] is None:
|
||||
data["caldav"]["path"] = "/remote.php/dav"
|
||||
|
||||
return data
|
||||
|
||||
|
||||
SETTINGS = Settings()
|
137
api/ovdashboard_api/core/webdav.py
Normal file
137
api/ovdashboard_api/core/webdav.py
Normal file
|
@ -0,0 +1,137 @@
|
|||
import functools
|
||||
import logging
|
||||
import re
|
||||
from io import BytesIO
|
||||
|
||||
import requests
|
||||
from asyncify import asyncify
|
||||
from cachetools import TTLCache, cachedmethod
|
||||
from cachetools.keys import hashkey
|
||||
from webdav3.client import Client as WebDAVclient
|
||||
|
||||
from .settings import SETTINGS
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def davkey(name, _, *args, **kwargs):
|
||||
"""Return a cache key for use with cached methods."""
|
||||
|
||||
return hashkey(name, *args, **kwargs)
|
||||
|
||||
|
||||
class WebDAV:
|
||||
class __WebDAVclient(WebDAVclient):
|
||||
def execute_request(
|
||||
self,
|
||||
action,
|
||||
path,
|
||||
data=None,
|
||||
headers_ext=None,
|
||||
) -> requests.Response:
|
||||
res = super().execute_request(action, path, data, headers_ext)
|
||||
|
||||
# the "Content-Length" header can randomly be missing on txt files,
|
||||
# this should fix that (probably serverside bug)
|
||||
if action == "download" and "Content-Length" not in res.headers:
|
||||
res.headers["Content-Length"] = str(len(res.text))
|
||||
|
||||
return res
|
||||
|
||||
_webdav_client = __WebDAVclient(
|
||||
{
|
||||
"webdav_hostname": SETTINGS.webdav.url,
|
||||
"webdav_login": SETTINGS.webdav.username,
|
||||
"webdav_password": SETTINGS.webdav.password,
|
||||
"disable_check": SETTINGS.webdav.disable_check,
|
||||
}
|
||||
)
|
||||
|
||||
_cache = TTLCache(
|
||||
ttl=SETTINGS.webdav.cache_ttl,
|
||||
maxsize=SETTINGS.webdav.cache_size,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@asyncify
|
||||
@cachedmethod(
|
||||
cache=lambda cls: cls._cache,
|
||||
key=functools.partial(davkey, "list_files"),
|
||||
)
|
||||
def list_files(
|
||||
cls,
|
||||
directory: str = "",
|
||||
*,
|
||||
regex: re.Pattern[str] = re.compile(""),
|
||||
) -> list[str]:
|
||||
"""
|
||||
List files in directory `directory` matching RegEx `regex`
|
||||
"""
|
||||
|
||||
_logger.debug(f"list_files {directory!r}")
|
||||
ls = cls._webdav_client.list(directory)
|
||||
|
||||
return [path for path in ls if regex.search(path)]
|
||||
|
||||
@classmethod
|
||||
@asyncify
|
||||
@cachedmethod(
|
||||
cache=lambda cls: cls._cache,
|
||||
key=functools.partial(davkey, "exists"),
|
||||
)
|
||||
def exists(cls, path: str) -> bool:
|
||||
"""
|
||||
`True` iff there is a WebDAV resource at `path`
|
||||
"""
|
||||
|
||||
_logger.debug(f"file_exists {path!r}")
|
||||
return cls._webdav_client.check(path)
|
||||
|
||||
@classmethod
|
||||
@asyncify
|
||||
@cachedmethod(
|
||||
cache=lambda cls: cls._cache,
|
||||
key=functools.partial(davkey, "read_bytes"),
|
||||
)
|
||||
def read_bytes(cls, path: str) -> bytes:
|
||||
"""
|
||||
Load WebDAV file from `path` as bytes
|
||||
"""
|
||||
|
||||
_logger.debug(f"read_bytes {path!r}")
|
||||
buffer = BytesIO()
|
||||
cls._webdav_client.download_from(buffer, path)
|
||||
buffer.seek(0)
|
||||
|
||||
return buffer.read()
|
||||
|
||||
@classmethod
|
||||
async def read_str(cls, path: str, encoding="utf-8") -> str:
|
||||
"""
|
||||
Load WebDAV file from `path` as string
|
||||
"""
|
||||
|
||||
_logger.debug(f"read_str {path!r}")
|
||||
return (await cls.read_bytes(path)).decode(encoding=encoding).strip()
|
||||
|
||||
@classmethod
|
||||
@asyncify
|
||||
def write_bytes(cls, path: str, buffer: bytes) -> None:
|
||||
"""
|
||||
Write bytes from `buffer` into WebDAV file at `path`
|
||||
"""
|
||||
|
||||
_logger.debug(f"write_bytes {path!r}")
|
||||
cls._webdav_client.upload_to(buffer, path)
|
||||
|
||||
# invalidate cache entry
|
||||
cls._cache.pop(hashkey("read_bytes", path))
|
||||
|
||||
@classmethod
|
||||
async def write_str(cls, path: str, content: str, encoding="utf-8") -> None:
|
||||
"""
|
||||
Write string from `content` into WebDAV file at `path`
|
||||
"""
|
||||
|
||||
_logger.debug(f"write_str {path!r}")
|
||||
await cls.write_bytes(path, content.encode(encoding=encoding))
|
|
@ -1,213 +0,0 @@
|
|||
"""
|
||||
Definition of an asyncio compatible CalDAV calendar.
|
||||
|
||||
Caches events using `timed_alru_cache`.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from functools import total_ordering
|
||||
from logging import getLogger
|
||||
from typing import Iterator
|
||||
|
||||
from cache import AsyncTTL
|
||||
from caldav import Calendar
|
||||
from caldav.lib.error import ReportError
|
||||
from pydantic import BaseModel, validator
|
||||
from vobject.base import Component
|
||||
|
||||
from .async_helpers import run_in_executor
|
||||
from .config import Config
|
||||
from .dav_common import caldav_principal
|
||||
from .settings import SETTINGS
|
||||
|
||||
_logger = getLogger(__name__)
|
||||
|
||||
|
||||
def _string_strip(in_str: str) -> str:
|
||||
"""
|
||||
Wrapper for str.strip().
|
||||
|
||||
Used to define `pydantic` validators.
|
||||
"""
|
||||
return in_str.strip()
|
||||
|
||||
|
||||
@total_ordering
|
||||
class CalEvent(BaseModel):
|
||||
"""
|
||||
A CalDAV calendar event.
|
||||
|
||||
Properties are to be named as in the EVENT component of
|
||||
RFC5545 (iCalendar).
|
||||
|
||||
https://icalendar.org/iCalendar-RFC-5545/3-6-1-event-component.html
|
||||
"""
|
||||
|
||||
summary: str = ""
|
||||
description: str = ""
|
||||
dtstart: datetime = datetime.utcnow()
|
||||
dtend: datetime = datetime.utcnow()
|
||||
|
||||
class Config:
|
||||
frozen = True
|
||||
|
||||
def __lt__(self, other: "CalEvent") -> bool:
|
||||
"""
|
||||
Order Events by start time.
|
||||
"""
|
||||
|
||||
return self.dtstart < other.dtstart
|
||||
|
||||
def __eq__(self, other: "CalEvent") -> bool:
|
||||
"""
|
||||
Compare all properties.
|
||||
"""
|
||||
|
||||
return self.dict() == other.dict()
|
||||
|
||||
_validate_summary = validator(
|
||||
"summary",
|
||||
allow_reuse=True,
|
||||
)(_string_strip)
|
||||
|
||||
_validate_description = validator(
|
||||
"description",
|
||||
allow_reuse=True,
|
||||
)(_string_strip)
|
||||
|
||||
@classmethod
|
||||
def from_vevent(cls, event: Component) -> "CalEvent":
|
||||
"""
|
||||
Create a CalEvent instance from a `VObject.VEvent` object.
|
||||
"""
|
||||
|
||||
data = {}
|
||||
keys = ("summary", "description", "dtstart", "dtend", "duration")
|
||||
|
||||
for key in keys:
|
||||
try:
|
||||
data[key] = event.contents[key][0].value # type: ignore
|
||||
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if "dtend" not in data:
|
||||
data["dtend"] = data["dtstart"]
|
||||
|
||||
if "duration" in data:
|
||||
try:
|
||||
data["dtend"] += data["duration"]
|
||||
|
||||
except (ValueError, TypeError, AttributeError):
|
||||
_logger.warn(
|
||||
"Could not add duration %s to %s",
|
||||
repr(data["duration"]),
|
||||
repr(data["dtstart"]),
|
||||
)
|
||||
|
||||
del data["duration"]
|
||||
|
||||
return cls.parse_obj(data)
|
||||
|
||||
|
||||
@AsyncTTL(time_to_live=SETTINGS.cache_time, maxsize=SETTINGS.cache_size)
|
||||
async def _get_calendar(
|
||||
calendar_name: str,
|
||||
) -> Calendar:
|
||||
"""
|
||||
Get a calendar by name using the CalDAV principal object.
|
||||
"""
|
||||
|
||||
@run_in_executor
|
||||
def _inner() -> Calendar:
|
||||
return caldav_principal().calendar(calendar_name)
|
||||
|
||||
return await _inner()
|
||||
|
||||
|
||||
@AsyncTTL(time_to_live=SETTINGS.cache_time, maxsize=SETTINGS.cache_size)
|
||||
async def _get_calendar_events(
|
||||
calendar_name: str,
|
||||
) -> list[CalEvent]:
|
||||
"""
|
||||
Get a sorted list of events by CalDAV calendar name.
|
||||
|
||||
Do not return an iterator here - this result is cached and
|
||||
an iterator would get consumed.
|
||||
"""
|
||||
|
||||
cfg = await Config.get()
|
||||
search_span = timedelta(days=cfg.calendar.future_days)
|
||||
|
||||
@run_in_executor
|
||||
def _inner() -> Iterator[Component]:
|
||||
"""
|
||||
Get events by CalDAV calendar name.
|
||||
|
||||
This can return an iterator - only the outer function is
|
||||
cached.
|
||||
"""
|
||||
_logger.info(f"downloading {calendar_name!r} ...")
|
||||
|
||||
calendar = caldav_principal().calendar(calendar_name)
|
||||
|
||||
date_start = datetime.utcnow().date()
|
||||
time_min = datetime.min.time()
|
||||
dt_start = datetime.combine(date_start, time_min)
|
||||
dt_end = dt_start + search_span
|
||||
|
||||
try:
|
||||
search_result = calendar.date_search(
|
||||
start=dt_start,
|
||||
end=dt_end,
|
||||
expand=True,
|
||||
verify_expand=True,
|
||||
)
|
||||
|
||||
except ReportError:
|
||||
_logger.warning("CalDAV server does not support expanded search")
|
||||
|
||||
search_result = calendar.date_search(
|
||||
start=dt_start,
|
||||
end=dt_end,
|
||||
expand=False,
|
||||
)
|
||||
|
||||
for event in search_result:
|
||||
vobject: Component = event.vobject_instance # type: ignore
|
||||
yield from vobject.vevent_list
|
||||
|
||||
return sorted([
|
||||
CalEvent.from_vevent(vevent)
|
||||
for vevent in await _inner()
|
||||
])
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DavCalendar:
|
||||
"""
|
||||
Object representation of a CalDAV calendar.
|
||||
"""
|
||||
|
||||
calendar_name: str
|
||||
|
||||
@property
|
||||
async def calendar(self) -> Calendar:
|
||||
"""
|
||||
Calendar as `caldav` library representation.
|
||||
"""
|
||||
|
||||
return await _get_calendar(
|
||||
calendar_name=self.calendar_name,
|
||||
)
|
||||
|
||||
@property
|
||||
async def events(self) -> list[CalEvent]:
|
||||
"""
|
||||
Calendar events in object representation.
|
||||
"""
|
||||
|
||||
return await _get_calendar_events(
|
||||
calendar_name=self.calendar_name,
|
||||
)
|
|
@ -1,175 +0,0 @@
|
|||
"""
|
||||
Definition of WebDAV and CalDAV clients.
|
||||
"""
|
||||
|
||||
from functools import lru_cache
|
||||
from logging import getLogger
|
||||
from os import path
|
||||
from pathlib import Path
|
||||
from time import sleep
|
||||
from typing import Any, Iterator
|
||||
|
||||
from caldav import DAVClient as CalDAVclient
|
||||
from caldav import Principal as CalDAVPrincipal
|
||||
from webdav3.client import Client as WebDAVclient
|
||||
from webdav3.client import Resource as WebDAVResource
|
||||
|
||||
from . import __file__ as OVD_INIT
|
||||
from .async_helpers import run_in_executor
|
||||
from .settings import SETTINGS
|
||||
|
||||
_WEBDAV_CLIENT = WebDAVclient({
|
||||
"webdav_hostname": SETTINGS.webdav.url,
|
||||
"webdav_login": SETTINGS.webdav.username,
|
||||
"webdav_password": SETTINGS.webdav.password,
|
||||
"disable_check": SETTINGS.webdav_disable_check,
|
||||
})
|
||||
|
||||
_logger = getLogger(__name__)
|
||||
|
||||
|
||||
def webdav_check() -> None:
|
||||
"""
|
||||
Checks if base resources are available.
|
||||
"""
|
||||
|
||||
_logger.info(
|
||||
"Production mode is %s.",
|
||||
"enabled" if SETTINGS.production_mode else "disabled",
|
||||
)
|
||||
|
||||
if SETTINGS.production_mode:
|
||||
for _ in range(SETTINGS.webdav_retries):
|
||||
if _WEBDAV_CLIENT.check(""):
|
||||
break
|
||||
|
||||
_logger.warning(
|
||||
"Waiting for WebDAV connection to %s ...",
|
||||
repr(SETTINGS.webdav.url),
|
||||
)
|
||||
sleep(30)
|
||||
|
||||
_logger.debug("WebDAV connection ok.")
|
||||
|
||||
elif not _WEBDAV_CLIENT.check(""):
|
||||
_logger.error(
|
||||
"WebDAV connection to %s FAILED!",
|
||||
repr(SETTINGS.webdav.url),
|
||||
)
|
||||
raise ConnectionError(SETTINGS.webdav.url)
|
||||
|
||||
_logger.debug("WebDAV connection ok.")
|
||||
|
||||
if not _WEBDAV_CLIENT.check(SETTINGS.webdav_prefix):
|
||||
_logger.error(
|
||||
"WebDAV prefix directory %s NOT FOUND, please create it!",
|
||||
repr(SETTINGS.webdav_prefix),
|
||||
)
|
||||
raise FileNotFoundError(SETTINGS.webdav_prefix)
|
||||
|
||||
_logger.debug("WebDAV prefix directory found.")
|
||||
|
||||
|
||||
def webdav_ensure_path(remote_path: str) -> bool:
|
||||
remote_path = f"{SETTINGS.webdav_prefix}/{remote_path}"
|
||||
|
||||
if _WEBDAV_CLIENT.check(remote_path):
|
||||
_logger.debug(
|
||||
"WebDAV path %s found.",
|
||||
repr(remote_path),
|
||||
)
|
||||
return True
|
||||
|
||||
_logger.info(
|
||||
"WebDAV path %s not found, creating ...",
|
||||
repr(remote_path),
|
||||
)
|
||||
_WEBDAV_CLIENT.mkdir(remote_path)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def get_skel_path(skel_file: str) -> Path:
|
||||
skel_path = path.dirname(Path(OVD_INIT).absolute())
|
||||
return Path(skel_path).joinpath("skel", skel_file)
|
||||
|
||||
|
||||
def webdav_upload_skel(remote_path: str, *skel_files: str) -> None:
|
||||
remote_path = f"{SETTINGS.webdav_prefix}/{remote_path}"
|
||||
|
||||
for skel_file in skel_files:
|
||||
_logger.debug(
|
||||
"Creating WebDAV file %s ...",
|
||||
repr(skel_file),
|
||||
)
|
||||
|
||||
_WEBDAV_CLIENT.upload_file(
|
||||
f"{remote_path}/{skel_file}",
|
||||
get_skel_path(skel_file),
|
||||
)
|
||||
|
||||
|
||||
def webdav_ensure_files(remote_path: str, *file_names: str) -> None:
|
||||
missing_files = (
|
||||
file_name
|
||||
for file_name in file_names
|
||||
if not _WEBDAV_CLIENT.check(path.join(
|
||||
SETTINGS.webdav_prefix,
|
||||
remote_path,
|
||||
file_name,
|
||||
))
|
||||
)
|
||||
|
||||
webdav_upload_skel(
|
||||
remote_path,
|
||||
*missing_files,
|
||||
)
|
||||
|
||||
|
||||
@lru_cache(maxsize=SETTINGS.cache_size)
|
||||
def webdav_resource(remote_path: Any) -> WebDAVResource:
|
||||
"""
|
||||
Gets a resource using the main WebDAV client.
|
||||
"""
|
||||
|
||||
return _WEBDAV_CLIENT.resource(
|
||||
f"{SETTINGS.webdav_prefix}/{remote_path}"
|
||||
)
|
||||
|
||||
|
||||
@run_in_executor
|
||||
def webdav_list(remote_path: str) -> list[str]:
|
||||
"""
|
||||
Asynchronously lists a WebDAV path using the main WebDAV client.
|
||||
"""
|
||||
|
||||
return _WEBDAV_CLIENT.list(
|
||||
f"{SETTINGS.webdav_prefix}/{remote_path}"
|
||||
)
|
||||
|
||||
|
||||
_CALDAV_CLIENT = CalDAVclient(
|
||||
url=SETTINGS.caldav.url,
|
||||
username=SETTINGS.caldav.username,
|
||||
password=SETTINGS.caldav.password,
|
||||
)
|
||||
|
||||
|
||||
def caldav_principal() -> CalDAVPrincipal:
|
||||
"""
|
||||
Gets the `Principal` object of the main CalDAV client.
|
||||
"""
|
||||
|
||||
return _CALDAV_CLIENT.principal()
|
||||
|
||||
|
||||
@run_in_executor
|
||||
def caldav_list() -> Iterator[str]:
|
||||
"""
|
||||
Asynchronously lists all calendars using the main WebDAV client.
|
||||
"""
|
||||
|
||||
return (
|
||||
str(cal.name)
|
||||
for cal in caldav_principal().calendars()
|
||||
)
|
|
@ -1,98 +0,0 @@
|
|||
"""
|
||||
Definition of an asyncio compatible WebDAV file.
|
||||
|
||||
Caches files using `timed_alru_cache`.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from io import BytesIO
|
||||
from logging import getLogger
|
||||
from typing import Any
|
||||
|
||||
from cache import AsyncTTL
|
||||
from webdav3.client import Resource
|
||||
|
||||
from .async_helpers import run_in_executor
|
||||
from .dav_common import webdav_resource
|
||||
from .settings import SETTINGS
|
||||
|
||||
_logger = getLogger(__name__)
|
||||
|
||||
|
||||
@AsyncTTL(time_to_live=SETTINGS.cache_time, maxsize=SETTINGS.cache_size)
|
||||
async def _get_buffer(
|
||||
remote_path: Any,
|
||||
) -> BytesIO:
|
||||
"""
|
||||
Download file contents into a new `BytesIO` object.
|
||||
"""
|
||||
|
||||
@run_in_executor
|
||||
def _inner() -> BytesIO:
|
||||
_logger.info(f"downloading {remote_path!r} ...")
|
||||
|
||||
resource = webdav_resource(remote_path)
|
||||
buffer = BytesIO()
|
||||
resource.write_to(buffer)
|
||||
return buffer
|
||||
|
||||
return await _inner()
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class DavFile:
|
||||
"""
|
||||
Object representation of a WebDAV file.
|
||||
"""
|
||||
|
||||
remote_path: str
|
||||
|
||||
@property
|
||||
def resource(self) -> Resource:
|
||||
"""
|
||||
WebDAV file handle.
|
||||
"""
|
||||
|
||||
return webdav_resource(self.remote_path)
|
||||
|
||||
@property
|
||||
async def __buffer(self) -> BytesIO:
|
||||
"""
|
||||
File contents as binary stream.
|
||||
"""
|
||||
|
||||
return await _get_buffer(
|
||||
remote_path=self.remote_path,
|
||||
)
|
||||
|
||||
@property
|
||||
async def as_bytes(self) -> bytes:
|
||||
"""
|
||||
File contents as binary data.
|
||||
"""
|
||||
|
||||
buffer = await self.__buffer
|
||||
|
||||
buffer.seek(0)
|
||||
return buffer.read()
|
||||
|
||||
@property
|
||||
async def as_string(self) -> str:
|
||||
"""
|
||||
File contents as string.
|
||||
"""
|
||||
|
||||
bytes = await self.as_bytes
|
||||
return bytes.decode(encoding="utf-8")
|
||||
|
||||
async def write(self, content: bytes) -> None:
|
||||
"""
|
||||
Write bytes into file.
|
||||
"""
|
||||
|
||||
@run_in_executor
|
||||
def _inner() -> None:
|
||||
buffer = BytesIO(content)
|
||||
self.resource.read_from(buffer)
|
||||
|
||||
await _inner()
|
|
@ -1,6 +1,6 @@
|
|||
from uvicorn import run as uvicorn_run
|
||||
|
||||
from .settings import SETTINGS
|
||||
from .core.settings import SETTINGS
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
@ -9,7 +9,7 @@ def main() -> None:
|
|||
"""
|
||||
|
||||
uvicorn_run(
|
||||
app="ovdashboard_api:app",
|
||||
app="ovdashboard_api.app:app",
|
||||
host="0.0.0.0",
|
||||
port=8000,
|
||||
reload=not SETTINGS.production_mode,
|
|
@ -19,3 +19,5 @@ router.include_router(file.router)
|
|||
|
||||
router.include_router(calendar.router)
|
||||
router.include_router(aggregate.router)
|
||||
|
||||
__all__ = ["router"]
|
||||
|
|
|
@ -2,28 +2,21 @@
|
|||
Dependables for defining Routers.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
from logging import getLogger
|
||||
from typing import Iterator, Protocol
|
||||
import tomllib
|
||||
|
||||
from fastapi import HTTPException, status
|
||||
import tomli_w
|
||||
from fastapi import Depends, HTTPException, params, status
|
||||
from webdav3.exceptions import RemoteResourceNotFound
|
||||
|
||||
from ...config import Config
|
||||
from ...dav_common import caldav_list, webdav_list
|
||||
|
||||
_logger = getLogger(__name__)
|
||||
|
||||
|
||||
class NameLister(Protocol):
|
||||
"""
|
||||
Can be called to create an iterator containing some names.
|
||||
"""
|
||||
|
||||
async def __call__(self) -> Iterator[str]:
|
||||
...
|
||||
from ...core.caldav import CalDAV
|
||||
from ...core.config import Config
|
||||
from ...core.settings import SETTINGS
|
||||
from ...core.webdav import WebDAV
|
||||
from ._list_manager import Dependable, DependableFn, ListManager
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
_RESPONSE_OK = {
|
||||
status.HTTP_200_OK: {
|
||||
|
@ -32,139 +25,117 @@ _RESPONSE_OK = {
|
|||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class FileNameLister:
|
||||
async def get_config() -> Config:
|
||||
"""
|
||||
Can be called to create an iterator containing file names.
|
||||
|
||||
File names listed will be in `remote_path` and will match the RegEx `re`.
|
||||
Load the configuration instance from the server using `TOML`.
|
||||
"""
|
||||
|
||||
path_name: str
|
||||
re: re.Pattern[str]
|
||||
try:
|
||||
cfg_str = await WebDAV.read_str(SETTINGS.webdav.config_filename)
|
||||
cfg = Config.model_validate(tomllib.loads(cfg_str))
|
||||
|
||||
@property
|
||||
def responses(self) -> dict:
|
||||
return {
|
||||
**_RESPONSE_OK,
|
||||
status.HTTP_404_NOT_FOUND: {
|
||||
"description": f"{self.path_name!r} not found",
|
||||
"content": None,
|
||||
},
|
||||
}
|
||||
|
||||
@property
|
||||
async def remote_path(self) -> str:
|
||||
cfg = await Config.get()
|
||||
|
||||
return str(cfg.dict()[self.path_name])
|
||||
|
||||
async def __call__(self) -> Iterator[str]:
|
||||
try:
|
||||
file_names = await webdav_list(await self.remote_path)
|
||||
|
||||
return (
|
||||
name
|
||||
for name in file_names
|
||||
if self.re.search(name)
|
||||
)
|
||||
|
||||
except RemoteResourceNotFound:
|
||||
_logger.error(
|
||||
"WebDAV path %s lost!",
|
||||
repr(await self.remote_path),
|
||||
)
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class CalendarNameLister:
|
||||
"""
|
||||
Can be called to create an iterator containing calendar names.
|
||||
"""
|
||||
|
||||
async def __call__(self) -> Iterator[str]:
|
||||
return await caldav_list()
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AggregateNameLister:
|
||||
"""
|
||||
Can be called to create an iterator containing aggregate calendar names.
|
||||
"""
|
||||
|
||||
async def __call__(self) -> Iterator[str]:
|
||||
cfg = await Config.get()
|
||||
|
||||
return iter(cfg.calendar.aggregates.keys())
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PrefixFinder:
|
||||
"""
|
||||
Can be called to create an iterator containing some names, all starting
|
||||
with a given prefix.
|
||||
|
||||
All names will be taken from the list produced by the called `lister`.
|
||||
"""
|
||||
|
||||
lister: NameLister
|
||||
|
||||
@property
|
||||
def responses(self) -> dict:
|
||||
return {
|
||||
**_RESPONSE_OK,
|
||||
status.HTTP_404_NOT_FOUND: {
|
||||
"description": "Failure in lister " +
|
||||
repr(self.lister.__class__.__name__),
|
||||
"content": None,
|
||||
},
|
||||
}
|
||||
|
||||
async def __call__(self, prefix: str) -> Iterator[str]:
|
||||
return (
|
||||
file_name
|
||||
for file_name in (await self.lister())
|
||||
if file_name.lower().startswith(prefix.lower())
|
||||
except RemoteResourceNotFound:
|
||||
_logger.warning(
|
||||
f"Config file {SETTINGS.webdav.config_filename!r} not found, creating ..."
|
||||
)
|
||||
|
||||
cfg = Config()
|
||||
cfg.calendar.aggregates["All Events"] = list(await CalDAV.calendars)
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class PrefixUnique:
|
||||
await WebDAV.write_str(
|
||||
SETTINGS.webdav.config_filename,
|
||||
tomli_w.dumps(cfg.model_dump()),
|
||||
)
|
||||
|
||||
return cfg
|
||||
|
||||
|
||||
def get_remote_path(
|
||||
path_name: str,
|
||||
) -> DependableFn[[], str]:
|
||||
async def _get_remote_path() -> str:
|
||||
cfg = await get_config()
|
||||
return getattr(cfg, path_name)
|
||||
|
||||
return _get_remote_path
|
||||
|
||||
|
||||
RP_FILE = get_remote_path("file_dir")
|
||||
RP_IMAGE = get_remote_path("image_dir")
|
||||
RP_TEXT = get_remote_path("text_dir")
|
||||
|
||||
|
||||
def get_file_lister(
|
||||
rp: DependableFn[[], str],
|
||||
*,
|
||||
re: re.Pattern[str],
|
||||
) -> Dependable[[], list[str]]:
|
||||
"""
|
||||
Can be called to determine if a given prefix is unique in the list
|
||||
produced by the called `finder`.
|
||||
|
||||
On success, produces the unique name with that prefix. Otherwise,
|
||||
throws a HTTPException.
|
||||
List files in remote `path` matching the RegEx `re`
|
||||
"""
|
||||
|
||||
finder: PrefixFinder
|
||||
async def _list_files(
|
||||
remote_path: str = Depends(rp),
|
||||
) -> list[str]:
|
||||
if isinstance(remote_path, params.Depends):
|
||||
remote_path = await rp()
|
||||
|
||||
@property
|
||||
def responses(self) -> dict:
|
||||
return {
|
||||
**_RESPONSE_OK,
|
||||
status.HTTP_404_NOT_FOUND: {
|
||||
"description": "Prefix not found",
|
||||
"content": None,
|
||||
},
|
||||
status.HTTP_409_CONFLICT: {
|
||||
"description": "Ambiguous prefix",
|
||||
"content": None,
|
||||
},
|
||||
}
|
||||
|
||||
async def __call__(self, prefix: str) -> str:
|
||||
names = await self.finder(prefix)
|
||||
_logger.debug("list %s", repr(remote_path))
|
||||
|
||||
try:
|
||||
name = next(names)
|
||||
return await WebDAV.list_files(remote_path, regex=re)
|
||||
|
||||
except StopIteration:
|
||||
except RemoteResourceNotFound:
|
||||
_logger.error("WebDAV path %s lost!", repr(remote_path))
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
if any(True for _ in names):
|
||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT)
|
||||
return Dependable(
|
||||
func=_list_files,
|
||||
responses={
|
||||
**_RESPONSE_OK,
|
||||
status.HTTP_404_NOT_FOUND: {
|
||||
"description": "Remote path not found",
|
||||
"content": None,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
return name
|
||||
|
||||
LM_FILE = ListManager.from_lister(
|
||||
get_file_lister(rp=RP_FILE, re=re.compile(r"[^/]$", flags=re.IGNORECASE))
|
||||
)
|
||||
LM_IMAGE = ListManager.from_lister(
|
||||
get_file_lister(
|
||||
rp=RP_IMAGE, re=re.compile(r"\.(gif|jpe?g|tiff?|png|bmp)$", flags=re.IGNORECASE)
|
||||
)
|
||||
)
|
||||
LM_TEXT = ListManager.from_lister(
|
||||
get_file_lister(rp=RP_TEXT, re=re.compile(r"\.(txt|md)$", flags=re.IGNORECASE))
|
||||
)
|
||||
|
||||
|
||||
async def list_calendar_names() -> list[str]:
|
||||
"""
|
||||
List calendar names
|
||||
"""
|
||||
|
||||
return await CalDAV.calendars
|
||||
|
||||
|
||||
LM_CALENDAR = ListManager.from_lister_fn(list_calendar_names)
|
||||
|
||||
|
||||
async def list_aggregate_names(
|
||||
cfg: Config = Depends(get_config),
|
||||
) -> list[str]:
|
||||
"""
|
||||
List aggregate calendar names
|
||||
"""
|
||||
|
||||
if isinstance(cfg, params.Depends):
|
||||
cfg = await get_config()
|
||||
|
||||
return list(cfg.calendar.aggregates.keys())
|
||||
|
||||
|
||||
LM_AGGREGATE = ListManager.from_lister_fn(list_aggregate_names)
|
||||
|
|
88
api/ovdashboard_api/routers/v1/_list_manager.py
Normal file
88
api/ovdashboard_api/routers/v1/_list_manager.py
Normal file
|
@ -0,0 +1,88 @@
|
|||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Awaitable, Callable, Generic, ParamSpec, Self, TypeVar
|
||||
|
||||
from fastapi import Depends, HTTPException, params, status
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
_RESPONSE_OK = {
|
||||
status.HTTP_200_OK: {"description": "Operation successful"},
|
||||
}
|
||||
|
||||
Params = ParamSpec("Params")
|
||||
Return = TypeVar("Return")
|
||||
|
||||
type DependableFn[**Params, Return] = Callable[Params, Awaitable[Return]]
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class Dependable(Generic[Params, Return]):
|
||||
func: DependableFn[Params, Return]
|
||||
responses: dict = field(default_factory=lambda: _RESPONSE_OK.copy())
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class ListManager:
|
||||
lister: Dependable[[], list[str]]
|
||||
filter: Dependable[[str], list[str]]
|
||||
getter: Dependable[[str], str]
|
||||
|
||||
@classmethod
|
||||
def from_lister(cls, lister: Dependable[[], list[str]]) -> Self:
|
||||
async def _filter_fn(
|
||||
prefix: str,
|
||||
names: list[str] = Depends(lister.func),
|
||||
) -> list[str]:
|
||||
"""
|
||||
Filters `names` from an async source for names starting with a given prefix.
|
||||
"""
|
||||
|
||||
if isinstance(names, params.Depends):
|
||||
names = await lister.func()
|
||||
|
||||
# _logger.debug("filter %s from %s", repr(prefix), repr(names))
|
||||
|
||||
return [item for item in names if item.lower().startswith(prefix.lower())]
|
||||
|
||||
async def _getter_fn(
|
||||
prefix: str,
|
||||
names: list[str] = Depends(_filter_fn),
|
||||
) -> str:
|
||||
"""
|
||||
Determines if a given prefix is unique in the async produced list `names`.
|
||||
|
||||
On success, produces the unique name with that prefix. Otherwise, throws a HTTPException.
|
||||
"""
|
||||
|
||||
if isinstance(names, params.Depends):
|
||||
names = await _filter_fn(prefix)
|
||||
|
||||
_logger.debug("get %s from %s", repr(prefix), repr(names))
|
||||
|
||||
match names:
|
||||
case [name]:
|
||||
return name
|
||||
|
||||
case []:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
case _:
|
||||
raise HTTPException(status_code=status.HTTP_409_CONFLICT)
|
||||
|
||||
return cls(
|
||||
lister=lister,
|
||||
filter=Dependable(_filter_fn),
|
||||
getter=Dependable(
|
||||
func=_getter_fn,
|
||||
responses={
|
||||
**_RESPONSE_OK,
|
||||
status.HTTP_404_NOT_FOUND: {"description": "Prefix not found"},
|
||||
status.HTTP_409_CONFLICT: {"description": "Ambiguous prefix"},
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_lister_fn(cls, lister_fn: DependableFn[[], list[str]]) -> Self:
|
||||
return cls.from_lister(Dependable(lister_fn))
|
|
@ -6,58 +6,57 @@ Router "aggregate" provides:
|
|||
- getting aggregate calendar events by name prefix
|
||||
"""
|
||||
|
||||
from logging import getLogger
|
||||
from typing import Iterator
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from ovdashboard_api.config import Config
|
||||
|
||||
from ...dav_calendar import CalEvent, DavCalendar
|
||||
from ._common import AggregateNameLister, PrefixFinder, PrefixUnique
|
||||
from .calendar import calendar_unique
|
||||
from ...core.caldav import CalDAV
|
||||
from ...core.calevent import CalEvent
|
||||
from ...core.config import Config
|
||||
from ._common import LM_AGGREGATE, LM_CALENDAR, get_config
|
||||
|
||||
_logger = getLogger(__name__)
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/aggregate", tags=["calendar"])
|
||||
|
||||
aggregate_lister = AggregateNameLister()
|
||||
aggregate_finder = PrefixFinder(aggregate_lister)
|
||||
aggregate_unique = PrefixUnique(aggregate_finder)
|
||||
|
||||
|
||||
@router.on_event("startup")
|
||||
async def start_router() -> None:
|
||||
_logger.debug(f"{router.prefix} router starting.")
|
||||
|
||||
|
||||
@router.get("/list", response_model=list[str])
|
||||
@router.get(
|
||||
"/list",
|
||||
responses=LM_AGGREGATE.lister.responses,
|
||||
)
|
||||
async def list_aggregate_calendars(
|
||||
names: Iterator[str] = Depends(aggregate_lister),
|
||||
names: list[str] = Depends(LM_AGGREGATE.lister.func),
|
||||
) -> list[str]:
|
||||
return list(names)
|
||||
return names
|
||||
|
||||
|
||||
@router.get("/find/{prefix}", response_model=list[str])
|
||||
@router.get(
|
||||
"/find/{prefix}",
|
||||
responses=LM_AGGREGATE.filter.responses,
|
||||
)
|
||||
async def find_aggregate_calendars(
|
||||
names: Iterator[str] = Depends(aggregate_finder),
|
||||
names: list[str] = Depends(LM_AGGREGATE.filter.func),
|
||||
) -> list[str]:
|
||||
return list(names)
|
||||
return names
|
||||
|
||||
|
||||
@router.get("/get/{prefix}", response_model=list[CalEvent])
|
||||
@router.get(
|
||||
"/get/{prefix}",
|
||||
responses=LM_AGGREGATE.getter.responses,
|
||||
)
|
||||
async def get_aggregate_calendar(
|
||||
name: str = Depends(aggregate_unique),
|
||||
cfg: Config = Depends(get_config),
|
||||
name: str = Depends(LM_AGGREGATE.getter.func),
|
||||
) -> list[CalEvent]:
|
||||
cfg = await Config.get()
|
||||
aggregate = cfg.calendar.aggregates[name]
|
||||
events: list[CalEvent] = []
|
||||
|
||||
calendars = (
|
||||
DavCalendar(await calendar_unique(cal_prefix))
|
||||
for cal_prefix in aggregate
|
||||
)
|
||||
for cal_prefix in cfg.calendar.aggregates[name]:
|
||||
cal_name = await LM_CALENDAR.getter.func(cal_prefix)
|
||||
events.extend(await CalDAV.get_events(cal_name, cfg))
|
||||
|
||||
return sorted([
|
||||
event
|
||||
async for calendar in calendars # type: ignore
|
||||
for event in (await calendar.events)
|
||||
])
|
||||
return sorted(events)
|
||||
|
|
|
@ -6,55 +6,57 @@ Router "calendar" provides:
|
|||
- getting calendar events by calendar name prefix
|
||||
"""
|
||||
|
||||
from logging import getLogger
|
||||
from typing import Iterator
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from ...config import CalendarUIConfig, Config
|
||||
from ...dav_calendar import CalEvent, DavCalendar
|
||||
from ._common import CalendarNameLister, PrefixFinder, PrefixUnique
|
||||
from ...core.caldav import CalDAV, CalEvent
|
||||
from ...core.config import CalendarUIConfig, Config
|
||||
from ._common import LM_CALENDAR, get_config
|
||||
|
||||
_logger = getLogger(__name__)
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/calendar", tags=["calendar"])
|
||||
|
||||
calendar_lister = CalendarNameLister()
|
||||
calendar_finder = PrefixFinder(calendar_lister)
|
||||
calendar_unique = PrefixUnique(calendar_finder)
|
||||
|
||||
|
||||
@router.on_event("startup")
|
||||
async def start_router() -> None:
|
||||
_logger.debug(f"{router.prefix} router starting.")
|
||||
|
||||
|
||||
@router.get("/list", response_model=list[str])
|
||||
@router.get(
|
||||
"/list",
|
||||
responses=LM_CALENDAR.lister.responses,
|
||||
)
|
||||
async def list_calendars(
|
||||
names: Iterator[str] = Depends(calendar_lister),
|
||||
names: list[str] = Depends(LM_CALENDAR.lister.func),
|
||||
) -> list[str]:
|
||||
return list(names)
|
||||
|
||||
|
||||
@router.get("/find/{prefix}", response_model=list[str])
|
||||
async def find_calendars(
|
||||
names: Iterator[str] = Depends(calendar_finder),
|
||||
) -> list[str]:
|
||||
return list(names)
|
||||
|
||||
|
||||
@router.get("/get/{prefix}", response_model=list[CalEvent])
|
||||
async def get_calendar(
|
||||
name: str = Depends(calendar_unique),
|
||||
) -> list[CalEvent]:
|
||||
return list(await DavCalendar(name).events)
|
||||
return names
|
||||
|
||||
|
||||
@router.get(
|
||||
"/config",
|
||||
response_model=CalendarUIConfig,
|
||||
"/find/{prefix}",
|
||||
responses=LM_CALENDAR.filter.responses,
|
||||
)
|
||||
async def find_calendars(
|
||||
names: list[str] = Depends(LM_CALENDAR.filter.func),
|
||||
) -> list[str]:
|
||||
return names
|
||||
|
||||
|
||||
@router.get(
|
||||
"/get/{prefix}",
|
||||
responses=LM_CALENDAR.getter.responses,
|
||||
)
|
||||
async def get_calendar(
|
||||
name: str = Depends(LM_CALENDAR.getter.func),
|
||||
cfg: Config = Depends(get_config),
|
||||
) -> list[CalEvent]:
|
||||
return await CalDAV.get_events(name, cfg)
|
||||
|
||||
|
||||
@router.get("/config")
|
||||
async def get_ui_config(
|
||||
cfg: Config = Depends(Config.get),
|
||||
cfg: Config = Depends(get_config),
|
||||
) -> CalendarUIConfig:
|
||||
return cfg.calendar
|
||||
|
|
|
@ -6,43 +6,31 @@ Router "file" provides:
|
|||
- getting files by name prefix
|
||||
"""
|
||||
|
||||
import re
|
||||
import logging
|
||||
from io import BytesIO
|
||||
from logging import getLogger
|
||||
from typing import Iterator
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi.responses import StreamingResponse
|
||||
from magic import Magic
|
||||
|
||||
from ...dav_common import webdav_ensure_files, webdav_ensure_path
|
||||
from ...dav_file import DavFile
|
||||
from ._common import FileNameLister, PrefixFinder, PrefixUnique
|
||||
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
|
||||
from ...core.webdav import WebDAV
|
||||
from ._common import LM_FILE, RP_FILE
|
||||
|
||||
_logger = getLogger(__name__)
|
||||
_logger = logging.getLogger(__name__)
|
||||
_magic = Magic(mime=True)
|
||||
|
||||
router = APIRouter(prefix="/file", tags=["file"])
|
||||
|
||||
file_lister = FileNameLister(
|
||||
path_name="file_dir",
|
||||
re=re.compile(
|
||||
r"[^/]$",
|
||||
flags=re.IGNORECASE,
|
||||
),
|
||||
)
|
||||
|
||||
file_finder = PrefixFinder(file_lister)
|
||||
file_unique = PrefixUnique(file_finder)
|
||||
|
||||
|
||||
@router.on_event("startup")
|
||||
async def start_router() -> None:
|
||||
_logger.debug(f"{router.prefix} router starting.")
|
||||
|
||||
if not webdav_ensure_path(await file_lister.remote_path):
|
||||
remote_path = await RP_FILE()
|
||||
if not webdav_ensure_path(remote_path):
|
||||
webdav_ensure_files(
|
||||
await file_lister.remote_path,
|
||||
remote_path,
|
||||
"logo.svg",
|
||||
"thw.svg",
|
||||
)
|
||||
|
@ -50,37 +38,34 @@ async def start_router() -> None:
|
|||
|
||||
@router.get(
|
||||
"/list",
|
||||
response_model=list[str],
|
||||
responses=file_lister.responses,
|
||||
responses=LM_FILE.lister.responses,
|
||||
)
|
||||
async def list_files(
|
||||
names: Iterator[str] = Depends(file_lister),
|
||||
names: list[str] = Depends(LM_FILE.lister.func),
|
||||
) -> list[str]:
|
||||
return list(names)
|
||||
return names
|
||||
|
||||
|
||||
@router.get(
|
||||
"/find/{prefix}",
|
||||
response_model=list[str],
|
||||
responses=file_finder.responses,
|
||||
responses=LM_FILE.filter.responses,
|
||||
)
|
||||
async def find_files(
|
||||
names: Iterator[str] = Depends(file_finder),
|
||||
async def find_files_by_prefix(
|
||||
names: list[str] = Depends(LM_FILE.filter.func),
|
||||
) -> list[str]:
|
||||
return list(names)
|
||||
return names
|
||||
|
||||
|
||||
@router.get(
|
||||
"/get/{prefix}",
|
||||
responses=LM_FILE.getter.responses,
|
||||
response_class=StreamingResponse,
|
||||
responses=file_unique.responses,
|
||||
)
|
||||
async def get_file(
|
||||
prefix: str,
|
||||
name: str = Depends(file_unique),
|
||||
async def get_file_by_prefix(
|
||||
remote_path: str = Depends(RP_FILE),
|
||||
name: str = Depends(LM_FILE.getter.func),
|
||||
) -> StreamingResponse:
|
||||
dav_file = DavFile(f"{await file_lister.remote_path}/{name}")
|
||||
buffer = BytesIO(await dav_file.as_bytes)
|
||||
buffer = BytesIO(await WebDAV.read_bytes(f"{remote_path}/{name}"))
|
||||
|
||||
mime = _magic.from_buffer(buffer.read(2048))
|
||||
buffer.seek(0)
|
||||
|
@ -88,7 +73,5 @@ async def get_file(
|
|||
return StreamingResponse(
|
||||
content=buffer,
|
||||
media_type=mime,
|
||||
headers={
|
||||
"Content-Disposition": f"filename={prefix}"
|
||||
},
|
||||
headers={"Content-Disposition": f"filename={name}"},
|
||||
)
|
||||
|
|
|
@ -6,43 +6,31 @@ Router "image" provides:
|
|||
- getting image files in a uniform format by name prefix
|
||||
"""
|
||||
|
||||
import re
|
||||
import logging
|
||||
from io import BytesIO
|
||||
from logging import getLogger
|
||||
from typing import Iterator
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi.responses import StreamingResponse
|
||||
from PIL import Image
|
||||
|
||||
from ...config import Config, ImageUIConfig
|
||||
from ...dav_common import webdav_ensure_files, webdav_ensure_path
|
||||
from ...dav_file import DavFile
|
||||
from ._common import FileNameLister, PrefixFinder, PrefixUnique
|
||||
from ...core.config import Config, ImageUIConfig
|
||||
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
|
||||
from ...core.webdav import WebDAV
|
||||
from ._common import LM_IMAGE, RP_IMAGE, get_config
|
||||
|
||||
_logger = getLogger(__name__)
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/image", tags=["image"])
|
||||
|
||||
image_lister = FileNameLister(
|
||||
path_name="image_dir",
|
||||
re=re.compile(
|
||||
r"\.(gif|jpe?g|tiff?|png|bmp)$",
|
||||
flags=re.IGNORECASE,
|
||||
),
|
||||
)
|
||||
|
||||
image_finder = PrefixFinder(image_lister)
|
||||
image_unique = PrefixUnique(image_finder)
|
||||
|
||||
|
||||
@router.on_event("startup")
|
||||
async def start_router() -> None:
|
||||
_logger.debug(f"{router.prefix} router starting.")
|
||||
|
||||
if not webdav_ensure_path(await image_lister.remote_path):
|
||||
remote_path = await RP_IMAGE()
|
||||
if not webdav_ensure_path(remote_path):
|
||||
webdav_ensure_files(
|
||||
await image_lister.remote_path,
|
||||
remote_path,
|
||||
"img1.jpg",
|
||||
"img2.jpg",
|
||||
"img3.jpg",
|
||||
|
@ -51,43 +39,35 @@ async def start_router() -> None:
|
|||
|
||||
@router.get(
|
||||
"/list",
|
||||
response_model=list[str],
|
||||
responses=image_lister.responses,
|
||||
responses=LM_IMAGE.lister.responses,
|
||||
)
|
||||
async def list_images(
|
||||
names: Iterator[str] = Depends(image_lister),
|
||||
names: list[str] = Depends(LM_IMAGE.lister.func),
|
||||
) -> list[str]:
|
||||
return list(names)
|
||||
return names
|
||||
|
||||
|
||||
@router.get(
|
||||
"/find/{prefix}",
|
||||
response_model=list[str],
|
||||
responses=image_finder.responses,
|
||||
responses=LM_IMAGE.filter.responses,
|
||||
)
|
||||
async def find_images(
|
||||
names: Iterator[str] = Depends(image_finder),
|
||||
async def find_images_by_prefix(
|
||||
names: list[str] = Depends(LM_IMAGE.filter.func),
|
||||
) -> list[str]:
|
||||
return list(names)
|
||||
return names
|
||||
|
||||
|
||||
@router.get(
|
||||
"/get/{prefix}",
|
||||
responses=LM_IMAGE.getter.responses,
|
||||
response_class=StreamingResponse,
|
||||
responses=image_unique.responses,
|
||||
)
|
||||
async def get_image(
|
||||
prefix: str,
|
||||
name: str = Depends(image_unique),
|
||||
async def get_image_by_prefix(
|
||||
remote_path: str = Depends(RP_IMAGE),
|
||||
name: str = Depends(LM_IMAGE.getter.func),
|
||||
) -> StreamingResponse:
|
||||
cfg = await Config.get()
|
||||
|
||||
dav_file = DavFile(f"{await image_lister.remote_path}/{name}")
|
||||
img = Image.open(
|
||||
BytesIO(await dav_file.as_bytes)
|
||||
).convert(
|
||||
cfg.image.mode
|
||||
)
|
||||
cfg = await get_config()
|
||||
img = Image.open(BytesIO(await WebDAV.read_bytes(f"{remote_path}/{name}")))
|
||||
|
||||
img_buffer = BytesIO()
|
||||
img.save(img_buffer, **cfg.image.save_params)
|
||||
|
@ -96,17 +76,12 @@ async def get_image(
|
|||
return StreamingResponse(
|
||||
content=img_buffer,
|
||||
media_type="image/jpeg",
|
||||
headers={
|
||||
"Content-Disposition": f"filename={prefix}.jpg"
|
||||
},
|
||||
headers={"Content-Disposition": f"filename={name}.jpg"},
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/config",
|
||||
response_model=ImageUIConfig,
|
||||
)
|
||||
@router.get("/config")
|
||||
async def get_ui_config(
|
||||
cfg: Config = Depends(Config.get),
|
||||
cfg: Config = Depends(get_config),
|
||||
) -> ImageUIConfig:
|
||||
return cfg.image
|
||||
|
|
|
@ -5,16 +5,17 @@ Router "misc" provides:
|
|||
- getting the device IP
|
||||
"""
|
||||
|
||||
from importlib.metadata import version
|
||||
from logging import getLogger
|
||||
import importlib.metadata
|
||||
import logging
|
||||
from socket import AF_INET, SOCK_DGRAM, socket
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from ...config import Config, LogoUIConfig, ServerUIConfig
|
||||
from ...settings import SETTINGS
|
||||
from ...core.config import Config, LogoUIConfig, ServerUIConfig
|
||||
from ...core.settings import SETTINGS
|
||||
from ._common import get_config
|
||||
|
||||
_logger = getLogger(__name__)
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/misc", tags=["misc"])
|
||||
|
||||
|
@ -25,13 +26,13 @@ async def start_router() -> None:
|
|||
|
||||
|
||||
@router.get("/lanip")
|
||||
async def get_ip() -> str:
|
||||
async def get_lan_ip() -> str:
|
||||
with socket(
|
||||
family=AF_INET,
|
||||
type=SOCK_DGRAM,
|
||||
) as s:
|
||||
s.settimeout(0)
|
||||
try:
|
||||
s.settimeout(0)
|
||||
s.connect((SETTINGS.ping_host, SETTINGS.ping_port))
|
||||
IP = s.getsockname()[0]
|
||||
|
||||
|
@ -42,25 +43,19 @@ async def get_ip() -> str:
|
|||
|
||||
|
||||
@router.get("/version")
|
||||
async def get_version() -> str:
|
||||
return version("ovdashboard-api")
|
||||
async def get_server_api_version() -> str:
|
||||
return importlib.metadata.version("ovdashboard_api")
|
||||
|
||||
|
||||
@router.get(
|
||||
"/config/server",
|
||||
response_model=ServerUIConfig,
|
||||
)
|
||||
@router.get("/config/server")
|
||||
async def get_server_ui_config(
|
||||
cfg: Config = Depends(Config.get),
|
||||
cfg: Config = Depends(get_config),
|
||||
) -> ServerUIConfig:
|
||||
return cfg.server
|
||||
|
||||
|
||||
@router.get(
|
||||
"/config/logo",
|
||||
response_model=LogoUIConfig,
|
||||
)
|
||||
@router.get("/config/logo")
|
||||
async def get_logo_ui_config(
|
||||
cfg: Config = Depends(Config.get),
|
||||
cfg: Config = Depends(get_config),
|
||||
) -> LogoUIConfig:
|
||||
return cfg.logo
|
||||
|
|
|
@ -7,94 +7,76 @@ Router "text" provides:
|
|||
- getting text file HTML content by name prefix (using Markdown)
|
||||
"""
|
||||
|
||||
import re
|
||||
from logging import getLogger
|
||||
from typing import Iterator
|
||||
import logging
|
||||
|
||||
import markdown
|
||||
from fastapi import APIRouter, Depends
|
||||
from markdown import markdown
|
||||
|
||||
from ...dav_common import webdav_ensure_files, webdav_ensure_path
|
||||
from ...dav_file import DavFile
|
||||
from ._common import FileNameLister, PrefixFinder, PrefixUnique
|
||||
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
|
||||
from ...core.webdav import WebDAV
|
||||
from ._common import LM_TEXT, RP_TEXT
|
||||
|
||||
_logger = getLogger(__name__)
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/text", tags=["text"])
|
||||
|
||||
text_lister = FileNameLister(
|
||||
path_name="text_dir",
|
||||
re=re.compile(
|
||||
r"\.(txt|md)$",
|
||||
flags=re.IGNORECASE,
|
||||
),
|
||||
)
|
||||
|
||||
text_finder = PrefixFinder(text_lister)
|
||||
text_unique = PrefixUnique(text_finder)
|
||||
|
||||
|
||||
@router.on_event("startup")
|
||||
async def start_router() -> None:
|
||||
_logger.debug(f"{router.prefix} router starting.")
|
||||
|
||||
webdav_ensure_path(await text_lister.remote_path)
|
||||
|
||||
webdav_ensure_files(
|
||||
await text_lister.remote_path,
|
||||
"message.txt",
|
||||
"title.txt",
|
||||
"ticker.txt",
|
||||
)
|
||||
remote_path = await RP_TEXT()
|
||||
if not webdav_ensure_path(remote_path):
|
||||
webdav_ensure_files(
|
||||
remote_path,
|
||||
"message.txt",
|
||||
"title.txt",
|
||||
"ticker.txt",
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/list",
|
||||
response_model=list[str],
|
||||
responses=text_lister.responses,
|
||||
responses=LM_TEXT.lister.responses,
|
||||
)
|
||||
async def list_texts(
|
||||
names: Iterator[str] = Depends(text_lister),
|
||||
names: list[str] = Depends(LM_TEXT.lister.func),
|
||||
) -> list[str]:
|
||||
return list(names)
|
||||
return names
|
||||
|
||||
|
||||
@router.get(
|
||||
"/find/{prefix}",
|
||||
response_model=list[str],
|
||||
responses=text_finder.responses,
|
||||
responses=LM_TEXT.filter.responses,
|
||||
)
|
||||
async def find_texts(
|
||||
names: Iterator[str] = Depends(text_finder),
|
||||
async def find_texts_by_prefix(
|
||||
names: list[str] = Depends(LM_TEXT.filter.func),
|
||||
) -> list[str]:
|
||||
return list(names)
|
||||
return names
|
||||
|
||||
|
||||
async def get_text_content(
|
||||
name: str = Depends(text_unique),
|
||||
async def _get_raw_text_by_prefix(
|
||||
remote_path: str = Depends(RP_TEXT),
|
||||
name: str = Depends(LM_TEXT.getter.func),
|
||||
) -> str:
|
||||
return await DavFile(
|
||||
f"{await text_lister.remote_path}/{name}",
|
||||
).as_string
|
||||
|
||||
|
||||
@router.get(
|
||||
"/get/html/{prefix}",
|
||||
response_model=str,
|
||||
responses=text_unique.responses,
|
||||
)
|
||||
async def get_text(
|
||||
text: str = Depends(get_text_content),
|
||||
) -> str:
|
||||
return markdown(text)
|
||||
return await WebDAV.read_str(f"{remote_path}/{name}")
|
||||
|
||||
|
||||
@router.get(
|
||||
"/get/raw/{prefix}",
|
||||
response_model=str,
|
||||
responses=text_unique.responses,
|
||||
responses=LM_TEXT.getter.responses,
|
||||
)
|
||||
async def get_raw_text(
|
||||
text: str = Depends(get_text_content),
|
||||
async def get_raw_text_by_prefix(
|
||||
text: str = Depends(_get_raw_text_by_prefix),
|
||||
) -> str:
|
||||
return text
|
||||
|
||||
|
||||
@router.get(
|
||||
"/get/html/{prefix}",
|
||||
responses=LM_TEXT.getter.responses,
|
||||
)
|
||||
async def get_html_by_prefix(
|
||||
text: str = Depends(_get_raw_text_by_prefix),
|
||||
) -> str:
|
||||
return markdown.markdown(text)
|
||||
|
|
|
@ -6,18 +6,18 @@ Router "ticker" provides:
|
|||
- getting the ticker's UI config
|
||||
"""
|
||||
|
||||
from logging import getLogger
|
||||
import logging
|
||||
from typing import Iterator
|
||||
|
||||
import markdown
|
||||
from fastapi import APIRouter, Depends
|
||||
from markdown import markdown
|
||||
|
||||
from ...config import Config, TickerUIConfig
|
||||
from ...dav_common import webdav_ensure_files, webdav_ensure_path
|
||||
from ...dav_file import DavFile
|
||||
from .text import text_lister, text_unique
|
||||
from ...core.config import Config, TickerUIConfig
|
||||
from ...core.dav_common import webdav_ensure_files, webdav_ensure_path
|
||||
from ...core.webdav import WebDAV
|
||||
from ._common import LM_TEXT, RP_TEXT, get_config
|
||||
|
||||
_logger = getLogger(__name__)
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/ticker", tags=["text"])
|
||||
|
||||
|
@ -26,38 +26,31 @@ router = APIRouter(prefix="/ticker", tags=["text"])
|
|||
async def start_router() -> None:
|
||||
_logger.debug(f"{router.prefix} router starting.")
|
||||
|
||||
webdav_ensure_path(await text_lister.remote_path)
|
||||
|
||||
webdav_ensure_files(
|
||||
await text_lister.remote_path,
|
||||
"ticker.txt",
|
||||
)
|
||||
remote_path = await RP_TEXT()
|
||||
if not webdav_ensure_path(remote_path):
|
||||
webdav_ensure_files(
|
||||
remote_path,
|
||||
"ticker.txt",
|
||||
)
|
||||
|
||||
|
||||
async def get_ticker_lines() -> Iterator[str]:
|
||||
cfg = await Config.get()
|
||||
file_name = await text_unique(cfg.ticker.file_name)
|
||||
cfg = await get_config()
|
||||
file_name = await LM_TEXT.getter.func(cfg.ticker.file_name)
|
||||
remote_path = await RP_TEXT()
|
||||
|
||||
ticker = await DavFile(
|
||||
f"{await text_lister.remote_path}/{file_name}",
|
||||
).as_string
|
||||
ticker = await WebDAV.read_str(f"{remote_path}/{file_name}")
|
||||
|
||||
return (
|
||||
line.strip()
|
||||
for line in ticker.split("\n")
|
||||
if line.strip()
|
||||
)
|
||||
return (line.strip() for line in ticker.split("\n") if line.strip())
|
||||
|
||||
|
||||
async def get_ticker_content_lines(
|
||||
ticker_lines: Iterator[str] = Depends(get_ticker_lines),
|
||||
) -> Iterator[str]:
|
||||
cfg = await Config.get()
|
||||
cfg = await get_config()
|
||||
|
||||
return (
|
||||
line
|
||||
for line in ticker_lines
|
||||
if not line.startswith(cfg.ticker.comment_marker)
|
||||
line for line in ticker_lines if not line.startswith(cfg.ticker.comment_marker)
|
||||
)
|
||||
|
||||
|
||||
|
@ -68,7 +61,7 @@ async def get_ticker_content(
|
|||
if len(ticker_content_padded) == 2:
|
||||
return ""
|
||||
|
||||
cfg = await Config.get()
|
||||
cfg = await get_config()
|
||||
ticker_content = cfg.ticker.separator.join(
|
||||
ticker_content_padded,
|
||||
)
|
||||
|
@ -80,7 +73,7 @@ async def get_ticker_content(
|
|||
async def get_ticker(
|
||||
ticker_content: str = Depends(get_ticker_content),
|
||||
) -> str:
|
||||
return markdown(ticker_content)
|
||||
return markdown.markdown(ticker_content)
|
||||
|
||||
|
||||
@router.get("/raw")
|
||||
|
@ -90,11 +83,8 @@ async def get_raw_ticker(
|
|||
return ticker_content
|
||||
|
||||
|
||||
@router.get(
|
||||
"/config",
|
||||
response_model=TickerUIConfig,
|
||||
)
|
||||
@router.get("/config")
|
||||
async def get_ui_config(
|
||||
cfg: Config = Depends(Config.get),
|
||||
cfg: Config = Depends(get_config),
|
||||
) -> TickerUIConfig:
|
||||
return cfg.ticker
|
||||
|
|
|
@ -1,117 +0,0 @@
|
|||
"""
|
||||
Configuration definition.
|
||||
|
||||
Converts per-run (environment) variables and config files into the
|
||||
"python world" using `pydantic`.
|
||||
|
||||
Pydantic models might have convenience methods attached.
|
||||
"""
|
||||
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, BaseSettings, root_validator
|
||||
|
||||
|
||||
class DavSettings(BaseModel):
|
||||
"""
|
||||
Connection to a DAV server.
|
||||
"""
|
||||
|
||||
protocol: Optional[str] = None
|
||||
host: Optional[str] = None
|
||||
username: Optional[str] = None
|
||||
password: Optional[str] = None
|
||||
path: Optional[str] = None
|
||||
|
||||
@property
|
||||
def url(self) -> str:
|
||||
"""
|
||||
Combined DAV URL.
|
||||
"""
|
||||
|
||||
return f"{self.protocol}://{self.host}{self.path}"
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""
|
||||
Per-run settings.
|
||||
"""
|
||||
|
||||
#####
|
||||
# general settings
|
||||
#####
|
||||
|
||||
production_mode: bool = False
|
||||
log_level: str = "INFO" if production_mode else "DEBUG"
|
||||
ui_directory: str = "/html"
|
||||
cache_time: int = 30
|
||||
cache_size: int = 30
|
||||
|
||||
# doesn't even have to be reachable
|
||||
ping_host: str = "10.0.0.0"
|
||||
ping_port: int = 1
|
||||
|
||||
#####
|
||||
# openapi settings
|
||||
#####
|
||||
|
||||
openapi_url: str = "/openapi.json"
|
||||
docs_url: Optional[str] = None if production_mode else "/docs"
|
||||
redoc_url: Optional[str] = None if production_mode else "/redoc"
|
||||
|
||||
#####
|
||||
# webdav settings
|
||||
#####
|
||||
|
||||
webdav: DavSettings = DavSettings()
|
||||
webdav_disable_check: bool = False
|
||||
webdav_retries: int = 20
|
||||
webdav_prefix: str = "/ovdashboard"
|
||||
config_path: str = "config.txt"
|
||||
|
||||
#####
|
||||
# caldav settings
|
||||
#####
|
||||
|
||||
caldav: DavSettings = DavSettings()
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
env_file_encoding = "utf-8"
|
||||
env_nested_delimiter = "__"
|
||||
|
||||
@root_validator(pre=True)
|
||||
@classmethod
|
||||
def validate_dav_settings(cls, values: dict[str, Any]) -> dict[str, Any]:
|
||||
# ensure both settings dicts are created
|
||||
for key in ("webdav", "caldav"):
|
||||
if key not in values:
|
||||
values[key] = {}
|
||||
|
||||
default_dav = DavSettings(
|
||||
protocol="https",
|
||||
host="example.com",
|
||||
username="ovdashboard",
|
||||
password="secret",
|
||||
).dict()
|
||||
|
||||
for key in default_dav:
|
||||
# if "webdav" value is not specified, use default
|
||||
if key not in values["webdav"] or values["webdav"][key] is None:
|
||||
values["webdav"][key] = default_dav[key]
|
||||
|
||||
# if "caldav" value is not specified, use "webdav" value
|
||||
if key not in values["caldav"] or values["caldav"][key] is None:
|
||||
values["caldav"][key] = values["webdav"][key]
|
||||
|
||||
# add default "path"s if None
|
||||
if values["webdav"]["path"] is None:
|
||||
values["webdav"]["path"] = "/remote.php/webdav"
|
||||
|
||||
if values["caldav"]["path"] is None:
|
||||
values["caldav"]["path"] = "/remote.php/dav"
|
||||
|
||||
return values
|
||||
|
||||
|
||||
SETTINGS = Settings()
|
1491
api/poetry.lock
generated
1491
api/poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -2,28 +2,28 @@
|
|||
authors = ["Jörn-Michael Miehe <jmm@yavook.de>"]
|
||||
description = ""
|
||||
include = ["ovdashboard_api/skel/*"]
|
||||
name = "ovdashboard-api"
|
||||
name = "ovdashboard_api"
|
||||
version = "0.1.0"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
Markdown = "^3.4.1"
|
||||
Pillow = "^9.2.0"
|
||||
caldav = "^0.9.1"
|
||||
fastapi = "^0.81.0"
|
||||
pydantic = {extras = ["dotenv"], version = "^1.9.2"}
|
||||
python = "^3.9"
|
||||
Markdown = "^3.5"
|
||||
Pillow = "^10.1.0"
|
||||
asyncify = "^0.9.2"
|
||||
cachetools = "^5.3.2"
|
||||
caldav = "^1.3.6"
|
||||
fastapi = "^0.103.2"
|
||||
pydantic-settings = "^2.0.3"
|
||||
python = "^3.12"
|
||||
python-magic = "^0.4.27"
|
||||
tomli = "^2.0.1"
|
||||
tomli-w = "^1.0.0"
|
||||
uvicorn = "^0.18.3"
|
||||
webdavclient3 = "3.14.5"
|
||||
async-cache = "^1.1.1"
|
||||
uvicorn = {extras = ["standard"], version = "^0.23.2"}
|
||||
webdavclient3 = "^3.14.6"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
# pytest = "^5.2"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
ovdashboard-api = "ovdashboard_api.__main__:main"
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "^23.10.1"
|
||||
flake8 = "^6.1.0"
|
||||
flake8-isort = "^6.1.0"
|
||||
types-cachetools = "^5.3.0.6"
|
||||
|
||||
[build-system]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
|
|
@ -1,11 +1,20 @@
|
|||
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
|
||||
ARG VARIANT=16-bullseye
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/javascript-node:0-${VARIANT}
|
||||
ARG VARIANT=16-bookworm
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/javascript-node:1-${VARIANT}
|
||||
|
||||
# [Optional] Uncomment this section to install additional OS packages.
|
||||
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
||||
|
||||
RUN set -ex; \
|
||||
\
|
||||
export DEBIAN_FRONTEND=noninteractive; \
|
||||
apt-get update; apt-get install --yes --no-install-recommends \
|
||||
git-flow \
|
||||
git-lfs \
|
||||
; rm -rf /var/lib/apt/lists/*; \
|
||||
su node -c "git lfs install"
|
||||
|
||||
# [Optional] Uncomment if you want to install an additional version of node using nvm
|
||||
# ARG EXTRA_NODE_VERSION=10
|
||||
# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
|
||||
|
|
|
@ -1,28 +1,35 @@
|
|||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.245.2/containers/javascript-node
|
||||
{
|
||||
"name": "Node.js",
|
||||
"name": "OVD UI",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"context": "..",
|
||||
// Update 'VARIANT' to pick a Node version: 18, 16, 14.
|
||||
// Append -bullseye or -buster to pin to an OS version.
|
||||
// Use -bullseye variants on local arm64/Apple Silicon.
|
||||
"args": {
|
||||
"VARIANT": "18-bullseye"
|
||||
"VARIANT": "20-bookworm"
|
||||
}
|
||||
},
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {
|
||||
"terminal.integrated.defaultProfile.linux": "zsh"
|
||||
"containerEnv": {
|
||||
"TZ": "Europe/Berlin"
|
||||
},
|
||||
// Configure tool-specific properties.
|
||||
"customizations": {
|
||||
// Configure properties specific to VS Code.
|
||||
"vscode": {
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {
|
||||
"terminal.integrated.defaultProfile.linux": "zsh"
|
||||
},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"dbaeumer.vscode-eslint",
|
||||
"octref.vetur"
|
||||
"esbenp.prettier-vscode",
|
||||
"mhutchie.git-graph",
|
||||
"Syler.sass-indented",
|
||||
"Vue.volar"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
@ -30,7 +37,7 @@
|
|||
// "forwardPorts": [],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
// "postCreateCommand": "yarn install",
|
||||
"postStartCommand": "yarn install",
|
||||
"postStartCommand": "yarn install --production false",
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "node"
|
||||
}
|
|
@ -1,18 +1,18 @@
|
|||
module.exports = {
|
||||
root: true,
|
||||
env: {
|
||||
node: true
|
||||
node: true,
|
||||
},
|
||||
'extends': [
|
||||
'plugin:vue/essential',
|
||||
'eslint:recommended',
|
||||
'@vue/typescript/recommended'
|
||||
extends: [
|
||||
"plugin:vue/essential",
|
||||
"eslint:recommended",
|
||||
"@vue/typescript/recommended",
|
||||
],
|
||||
parserOptions: {
|
||||
ecmaVersion: 2020
|
||||
ecmaVersion: 2020,
|
||||
},
|
||||
rules: {
|
||||
'no-console': process.env.NODE_ENV === 'production' ? 'warn' : 'off',
|
||||
'no-debugger': process.env.NODE_ENV === 'production' ? 'warn' : 'off'
|
||||
}
|
||||
}
|
||||
"no-console": process.env.NODE_ENV === "production" ? "warn" : "off",
|
||||
"no-debugger": process.env.NODE_ENV === "production" ? "warn" : "off",
|
||||
},
|
||||
};
|
||||
|
|
15
ui/.vscode/settings.json
vendored
15
ui/.vscode/settings.json
vendored
|
@ -1,8 +1,21 @@
|
|||
{
|
||||
"editor.formatOnSave": true,
|
||||
"[vue]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": true
|
||||
},
|
||||
"git.closeDiffOnOperation": true,
|
||||
"editor.tabSize": 2
|
||||
"editor.tabSize": 2,
|
||||
"sass.disableAutoIndent": true,
|
||||
"sass.format.convert": false,
|
||||
"sass.format.deleteWhitespace": true,
|
||||
"prettier.trailingComma": "all",
|
||||
}
|
|
@ -1,5 +1,3 @@
|
|||
module.exports = {
|
||||
presets: [
|
||||
'@vue/cli-plugin-babel/preset'
|
||||
]
|
||||
}
|
||||
presets: ["@vue/cli-plugin-babel/preset"],
|
||||
};
|
||||
|
|
|
@ -7,35 +7,34 @@
|
|||
"build": "vue-cli-service build",
|
||||
"lint": "vue-cli-service lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"axios": "^0.27.2",
|
||||
"color": "^4.2.3",
|
||||
"core-js": "^3.8.3",
|
||||
"luxon": "^3.0.3",
|
||||
"register-service-worker": "^1.7.2",
|
||||
"vue": "^2.6.14",
|
||||
"vue-class-component": "^7.2.3",
|
||||
"vue-property-decorator": "^9.1.2",
|
||||
"vuetify": "^2.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/color": "^3.0.3",
|
||||
"@types/luxon": "^3.0.1",
|
||||
"@typescript-eslint/eslint-plugin": "^5.4.0",
|
||||
"@typescript-eslint/parser": "^5.4.0",
|
||||
"@typescript-eslint/eslint-plugin": "^6.9.0",
|
||||
"@typescript-eslint/parser": "^6.9.0",
|
||||
"@vue/cli-plugin-babel": "~5.0.0",
|
||||
"@vue/cli-plugin-eslint": "~5.0.0",
|
||||
"@vue/cli-plugin-pwa": "~5.0.0",
|
||||
"@vue/cli-plugin-typescript": "~5.0.0",
|
||||
"@vue/cli-service": "~5.0.0",
|
||||
"@vue/eslint-config-typescript": "^9.1.0",
|
||||
"eslint": "^7.32.0",
|
||||
"eslint-plugin-vue": "^8.0.3",
|
||||
"sass": "~1.32.0",
|
||||
"sass-loader": "^10.0.0",
|
||||
"typescript": "~4.5.5",
|
||||
"@vue/eslint-config-typescript": "^12.0.0",
|
||||
"axios": "^1.6.0",
|
||||
"color": "^4.2.3",
|
||||
"core-js": "^3.8.3",
|
||||
"eslint": "^8.52.0",
|
||||
"eslint-plugin-vue": "^9.18.0",
|
||||
"luxon": "^3.0.3",
|
||||
"prettier": "^3.0.3",
|
||||
"register-service-worker": "^1.7.2",
|
||||
"sass": "~1.69.5",
|
||||
"sass-loader": "^13.3.2",
|
||||
"typescript": "~5.2.2",
|
||||
"vue": "^2.7.15",
|
||||
"vue-class-component": "^7.2.3",
|
||||
"vue-cli-plugin-vuetify": "^2.5.5",
|
||||
"vue-property-decorator": "^9.1.2",
|
||||
"vue-template-compiler": "^2.6.14",
|
||||
"vuetify": "^2.7.1",
|
||||
"vuetify-loader": "^1.7.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,17 +1,27 @@
|
|||
<!DOCTYPE html>
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1.0">
|
||||
<link rel="icon" href="<%= BASE_URL %>favicon.ico">
|
||||
<meta charset="utf-8" />
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1.0" />
|
||||
<link rel="icon" href="<%= BASE_URL %>favicon.ico" />
|
||||
<title><%= htmlWebpackPlugin.options.title %></title>
|
||||
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:100,300,400,500,700,900">
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/@mdi/font@latest/css/materialdesignicons.min.css">
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://fonts.googleapis.com/css?family=Roboto:100,300,400,500,700,900"
|
||||
/>
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://cdn.jsdelivr.net/npm/@mdi/font@latest/css/materialdesignicons.min.css"
|
||||
/>
|
||||
</head>
|
||||
<body>
|
||||
<noscript>
|
||||
<strong>We're sorry but <%= htmlWebpackPlugin.options.title %> doesn't work properly without JavaScript enabled. Please enable it to continue.</strong>
|
||||
<strong
|
||||
>We're sorry but <%= htmlWebpackPlugin.options.title %> doesn't work
|
||||
properly without JavaScript enabled. Please enable it to
|
||||
continue.</strong
|
||||
>
|
||||
</noscript>
|
||||
<div id="app"></div>
|
||||
<!-- built files will be auto injected -->
|
||||
|
|
|
@ -18,5 +18,4 @@ import { Component, Vue } from "vue-property-decorator";
|
|||
export default class Dashboard extends Vue {}
|
||||
</script>
|
||||
|
||||
<style>
|
||||
</style>
|
||||
<style></style>
|
||||
|
|
|
@ -18,10 +18,10 @@ import { Component, Vue } from "@/ovd-vue";
|
|||
|
||||
@Component
|
||||
export default class DashboardInfo extends Vue {
|
||||
private server_host = "https://oekzident.de";
|
||||
private server_name = "OEKZident";
|
||||
private version = "0.0.1";
|
||||
private lan_ip = "0.0.0.0";
|
||||
public server_host = "https://oekzident.de";
|
||||
public server_name = "OEKZident";
|
||||
public version = "0.0.1";
|
||||
public lan_ip = "0.0.0.0";
|
||||
|
||||
public created(): void {
|
||||
super.created();
|
||||
|
@ -43,7 +43,7 @@ export default class DashboardInfo extends Vue {
|
|||
(data) => {
|
||||
this.server_host = data.host;
|
||||
this.server_name = data.name;
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Update Version
|
||||
|
@ -57,4 +57,4 @@ export default class DashboardInfo extends Vue {
|
|||
});
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</script>
|
||||
|
|
|
@ -22,10 +22,10 @@ import { Component, Vue } from "@/ovd-vue";
|
|||
|
||||
@Component
|
||||
export default class ImageCarousel extends Vue {
|
||||
private urls: string[] = require("@/assets/image_testdata.json");
|
||||
private height = 300;
|
||||
private contain = false;
|
||||
private speed = 10000;
|
||||
public urls: string[] = require("@/assets/image_testdata.json");
|
||||
public height = 300;
|
||||
public contain = false;
|
||||
public speed = 10000;
|
||||
|
||||
public created(): void {
|
||||
super.created();
|
||||
|
@ -39,7 +39,7 @@ export default class ImageCarousel extends Vue {
|
|||
// Update Images
|
||||
this.$ovdashboard.api_get_list("image/list", (names) => {
|
||||
this.urls = names.map((name: string) =>
|
||||
this.$ovdashboard.api_url(`image/get/${name}`)
|
||||
this.$ovdashboard.api_url(`image/get/${name}`),
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -71,4 +71,4 @@ export default class ImageCarousel extends Vue {
|
|||
}
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</style>
|
||||
|
|
|
@ -7,7 +7,7 @@ import { Component, Vue } from "@/ovd-vue";
|
|||
|
||||
@Component
|
||||
export default class Message extends Vue {
|
||||
private html = require("@/assets/message_testdata.json");
|
||||
public html = require("@/assets/message_testdata.json");
|
||||
|
||||
public created(): void {
|
||||
super.created();
|
||||
|
@ -21,7 +21,7 @@ export default class Message extends Vue {
|
|||
// Update Message
|
||||
this.$ovdashboard.api_get_string(
|
||||
"text/get/html/message",
|
||||
(data) => (this.html = data)
|
||||
(data) => (this.html = data),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -59,4 +59,4 @@ div:deep() {
|
|||
font-weight: bold;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</style>
|
||||
|
|
|
@ -5,7 +5,7 @@ export class Model {
|
|||
// source: https://gist.github.com/hyamamoto/fd435505d29ebfa3d9716fd2be8d42f0?permalink_comment_id=2775538#gistcomment-2775538
|
||||
let hash = 0;
|
||||
for (let i = 0; i < str.length; i++)
|
||||
hash = Math.imul(31, hash) + str.charCodeAt(i) | 0;
|
||||
hash = (Math.imul(31, hash) + str.charCodeAt(i)) | 0;
|
||||
|
||||
return new Uint32Array([hash])[0].toString(36);
|
||||
}
|
||||
|
|
|
@ -15,23 +15,20 @@ import Color from "color";
|
|||
|
||||
@Component
|
||||
export default class TickerBar extends Vue {
|
||||
private content = "<p>changeme</p>";
|
||||
public content = "<p>changeme</p>";
|
||||
|
||||
private color = "primary";
|
||||
|
||||
@Ref("content")
|
||||
private readonly _content!: HTMLDivElement;
|
||||
public color = "primary";
|
||||
|
||||
@Ref("marquee")
|
||||
private readonly _marquee!: HTMLSpanElement;
|
||||
|
||||
private get is_dark(): boolean {
|
||||
public get is_dark(): boolean {
|
||||
return this.footer_color.isDark();
|
||||
}
|
||||
|
||||
private get footer_color(): Color {
|
||||
// try getting from vuetify theme
|
||||
let color = this.$vuetify.theme.themes.light[this.color];
|
||||
const color = this.$vuetify.theme.themes.light[this.color];
|
||||
|
||||
if (typeof color === "string") {
|
||||
return Color(color);
|
||||
|
|
|
@ -4,11 +4,11 @@
|
|||
{{ title }}
|
||||
</span>
|
||||
<template v-for="(event, index) in events">
|
||||
<EventItem :event="event" :key="event.hash" />
|
||||
<EventItem :event="event" :key="`event-${index}`" />
|
||||
<v-divider
|
||||
v-if="index < events.length - 1"
|
||||
class="mx-5"
|
||||
:key="`${event.hash}-div`"
|
||||
:key="`event-div-${index}`"
|
||||
/>
|
||||
</template>
|
||||
</v-list>
|
||||
|
@ -16,8 +16,8 @@
|
|||
|
||||
<script lang="ts">
|
||||
import { Component, Prop, Vue } from "vue-property-decorator";
|
||||
import { EventData } from "./EventModel";
|
||||
import EventItem from "./EventItem.vue";
|
||||
import { EventData } from "./EventModel";
|
||||
|
||||
@Component({
|
||||
components: {
|
||||
|
@ -26,10 +26,10 @@ import EventItem from "./EventItem.vue";
|
|||
})
|
||||
export default class Calendar extends Vue {
|
||||
@Prop({ default: "CALENDAR" })
|
||||
private readonly title!: string;
|
||||
public readonly title!: string;
|
||||
|
||||
@Prop({ default: () => [] })
|
||||
private readonly events!: EventData[];
|
||||
public readonly events!: EventData[];
|
||||
}
|
||||
</script>
|
||||
|
||||
|
@ -37,4 +37,4 @@ export default class Calendar extends Vue {
|
|||
.v-list .v-divider {
|
||||
border-color: rgba(0, 0, 0, 0.25);
|
||||
}
|
||||
</style>
|
||||
</style>
|
||||
|
|
|
@ -29,7 +29,7 @@ export default class CalendarCarousel extends Vue {
|
|||
private interval?: number;
|
||||
|
||||
private data: CalendarData[] = require("@/assets/calendar_testdata.json");
|
||||
private speed = 10000;
|
||||
public speed = 10000;
|
||||
|
||||
@Ref("main")
|
||||
private readonly _main?: Vue;
|
||||
|
@ -57,7 +57,7 @@ export default class CalendarCarousel extends Vue {
|
|||
events: calendars[i],
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -70,7 +70,7 @@ export default class CalendarCarousel extends Vue {
|
|||
"calendar/config",
|
||||
(data) => {
|
||||
this.speed = data.speed;
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -98,8 +98,8 @@ export default class CalendarCarousel extends Vue {
|
|||
this.interval = setInterval(this.update_height, 10000);
|
||||
}
|
||||
|
||||
private get calendars(): CalendarModel[] {
|
||||
let arr = [];
|
||||
public get calendars(): CalendarModel[] {
|
||||
const arr = [];
|
||||
|
||||
for (const json_data of this.data) {
|
||||
arr.push(new CalendarModel(json_data));
|
||||
|
@ -131,4 +131,4 @@ export default class CalendarCarousel extends Vue {
|
|||
}
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</style>
|
||||
|
|
|
@ -13,11 +13,11 @@ export class CalendarModel extends Model {
|
|||
public constructor(json_data: CalendarData) {
|
||||
super();
|
||||
|
||||
this.title = json_data.title
|
||||
this.title = json_data.title;
|
||||
|
||||
this.events = [];
|
||||
for (const event_data of json_data.events) {
|
||||
this.events.push(new EventModel(event_data))
|
||||
this.events.push(new EventModel(event_data));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,23 +17,23 @@
|
|||
</template>
|
||||
|
||||
<script lang="ts">
|
||||
import { Component, Prop, Vue } from "vue-property-decorator";
|
||||
import { DateTime } from "luxon";
|
||||
import { Component, Prop, Vue } from "vue-property-decorator";
|
||||
|
||||
@Component
|
||||
export default class EventDate extends Vue {
|
||||
@Prop()
|
||||
private readonly date!: DateTime;
|
||||
|
||||
private get day(): string {
|
||||
public get day(): string {
|
||||
return this.date.toFormat("dd.");
|
||||
}
|
||||
|
||||
private get month(): string {
|
||||
public get month(): string {
|
||||
return this.date.toFormat("MM.");
|
||||
}
|
||||
|
||||
private get time(): string {
|
||||
public get time(): string {
|
||||
return this.date.toLocaleString(DateTime.TIME_24_SIMPLE);
|
||||
}
|
||||
}
|
||||
|
@ -49,4 +49,4 @@ export default class EventDate extends Vue {
|
|||
min-width: 130px;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</style>
|
||||
|
|
|
@ -12,15 +12,7 @@
|
|||
{{ event.description }}
|
||||
</v-list-item-subtitle>
|
||||
<v-list-item-subtitle
|
||||
class="
|
||||
d-inline-block
|
||||
text-truncate
|
||||
thw-heading-font
|
||||
blue-grey--text
|
||||
text--darken-1
|
||||
font-weight-bold
|
||||
ma-0
|
||||
"
|
||||
class="d-inline-block text-truncate thw-heading-font blue-grey--text text--darken-1 font-weight-bold ma-0"
|
||||
>
|
||||
{{ data_string }}
|
||||
</v-list-item-subtitle>
|
||||
|
@ -29,10 +21,10 @@
|
|||
</template>
|
||||
|
||||
<script lang="ts">
|
||||
import { Component, Prop, Vue } from "vue-property-decorator";
|
||||
import { DateTime, DurationLikeObject } from "luxon";
|
||||
import { EventModel } from "./EventModel";
|
||||
import { Component, Prop, Vue } from "vue-property-decorator";
|
||||
import EventDate from "./EventDate.vue";
|
||||
import { EventModel } from "./EventModel";
|
||||
|
||||
@Component({
|
||||
components: {
|
||||
|
@ -41,15 +33,15 @@ import EventDate from "./EventDate.vue";
|
|||
})
|
||||
export default class EventItem extends Vue {
|
||||
@Prop()
|
||||
private readonly event!: EventModel;
|
||||
public readonly event!: EventModel;
|
||||
|
||||
private get data_string(): string {
|
||||
public get data_string(): string {
|
||||
const locale_string = this.event.start.toLocaleString(
|
||||
DateTime.DATETIME_MED_WITH_WEEKDAY
|
||||
DateTime.DATETIME_MED_WITH_WEEKDAY,
|
||||
);
|
||||
|
||||
// decide which duration units to include
|
||||
let units: (keyof DurationLikeObject)[] = ["hours"];
|
||||
const units: (keyof DurationLikeObject)[] = ["hours"];
|
||||
|
||||
if (this.event.duration.as("days") >= 1) {
|
||||
// include days if duration is at least one day
|
||||
|
@ -72,5 +64,4 @@ export default class EventItem extends Vue {
|
|||
}
|
||||
</script>
|
||||
|
||||
<style>
|
||||
</style>
|
||||
<style></style>
|
||||
|
|
|
@ -19,13 +19,11 @@ export class EventModel extends Model {
|
|||
|
||||
this.summary = json_data.summary;
|
||||
this.description = json_data.description;
|
||||
this.start = DateTime
|
||||
.fromISO(json_data.dtstart)
|
||||
.setLocale(navigator.language);
|
||||
const end = DateTime
|
||||
.fromISO(json_data.dtend)
|
||||
.setLocale(navigator.language);
|
||||
this.start = DateTime.fromISO(json_data.dtstart).setLocale(
|
||||
navigator.language,
|
||||
);
|
||||
const end = DateTime.fromISO(json_data.dtend).setLocale(navigator.language);
|
||||
|
||||
this.duration = end.diff(this.start);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,12 +3,12 @@
|
|||
</template>
|
||||
|
||||
<script lang="ts">
|
||||
import { Component, Prop, Vue } from "vue-property-decorator";
|
||||
import { DateTime } from "luxon";
|
||||
import { Component, Prop, Vue } from "vue-property-decorator";
|
||||
|
||||
@Component
|
||||
export default class Clock extends Vue {
|
||||
private formatted = "";
|
||||
public formatted = "";
|
||||
private interval?: number;
|
||||
|
||||
@Prop({ required: true })
|
||||
|
|
|
@ -23,10 +23,10 @@ import { Component, Vue } from "@/ovd-vue";
|
|||
|
||||
@Component
|
||||
export default class THWLogo extends Vue {
|
||||
private above = "Technisches Hilfswerk";
|
||||
private below = "OV Musterstadt";
|
||||
public above = "Technisches Hilfswerk";
|
||||
public below = "OV Musterstadt";
|
||||
|
||||
private get logo_url(): string {
|
||||
public get logo_url(): string {
|
||||
return this.$ovdashboard.api_url("file/get/logo");
|
||||
}
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ import THWLogo from "./THWLogo.vue";
|
|||
},
|
||||
})
|
||||
export default class TitleBar extends Vue {
|
||||
private title = "<h1>TITLE</h1>";
|
||||
public title = "<h1>TITLE</h1>";
|
||||
|
||||
public created(): void {
|
||||
super.created();
|
||||
|
|
5
ui/src/d.ts/shims-ovdashboard.d.ts
vendored
5
ui/src/d.ts/shims-ovdashboard.d.ts
vendored
|
@ -1,10 +1,9 @@
|
|||
import { OVDashboardPlugin } from "@/plugins/ovdashboard";
|
||||
|
||||
declare module 'vue/types/vue' {
|
||||
declare module "vue/types/vue" {
|
||||
interface Vue {
|
||||
$ovdashboard: OVDashboardPlugin;
|
||||
}
|
||||
}
|
||||
|
||||
export { };
|
||||
|
||||
export {};
|
||||
|
|
4
ui/src/d.ts/shims-tsx.d.ts
vendored
4
ui/src/d.ts/shims-tsx.d.ts
vendored
|
@ -1,11 +1,11 @@
|
|||
import Vue, { VNode } from 'vue'
|
||||
import Vue, { VNode } from "vue";
|
||||
|
||||
declare global {
|
||||
namespace JSX {
|
||||
interface Element extends VNode {}
|
||||
interface ElementClass extends Vue {}
|
||||
interface IntrinsicElements {
|
||||
[elem: string]: any
|
||||
[elem: string]: any;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
6
ui/src/d.ts/shims-vue.d.ts
vendored
6
ui/src/d.ts/shims-vue.d.ts
vendored
|
@ -1,4 +1,4 @@
|
|||
declare module '*.vue' {
|
||||
import Vue from 'vue'
|
||||
export default Vue
|
||||
declare module "*.vue" {
|
||||
import Vue from "vue";
|
||||
export default Vue;
|
||||
}
|
||||
|
|
6
ui/src/d.ts/shims-vuetify.d.ts
vendored
6
ui/src/d.ts/shims-vuetify.d.ts
vendored
|
@ -1,4 +1,4 @@
|
|||
declare module 'vuetify/lib/framework' {
|
||||
import Vuetify from 'vuetify'
|
||||
export default Vuetify
|
||||
declare module "vuetify/lib/framework" {
|
||||
import Vuetify from "vuetify";
|
||||
export default Vuetify;
|
||||
}
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
import Vue from "vue"
|
||||
import Vue from "vue";
|
||||
|
||||
import "@/registerServiceWorker"
|
||||
import "@/sass/fonts.scss"
|
||||
import "@/registerServiceWorker";
|
||||
import "@/sass/fonts.scss";
|
||||
|
||||
import App from "@/App.vue"
|
||||
import ovdashboard from "@/plugins/ovdashboard"
|
||||
import vuetify from "@/plugins/vuetify"
|
||||
import App from "@/App.vue";
|
||||
import ovdashboard from "@/plugins/ovdashboard";
|
||||
import vuetify from "@/plugins/vuetify";
|
||||
|
||||
Vue.config.productionTip = false
|
||||
Vue.use(ovdashboard)
|
||||
Vue.config.productionTip = false;
|
||||
Vue.use(ovdashboard);
|
||||
|
||||
new Vue({
|
||||
vuetify,
|
||||
render: h => h(App)
|
||||
}).$mount('#app')
|
||||
render: (h) => h(App),
|
||||
}).$mount("#app");
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import axios, { AxiosInstance, AxiosPromise } from 'axios';
|
||||
import Vue from 'vue';
|
||||
import axios, { AxiosInstance, AxiosPromise } from "axios";
|
||||
import Vue from "vue";
|
||||
|
||||
export class OVDashboardPlugin {
|
||||
private axios: AxiosInstance;
|
||||
|
@ -28,7 +28,6 @@ export class OVDashboardPlugin {
|
|||
private get api_baseurl(): string {
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
return `//${window.location.host}/api`;
|
||||
|
||||
} else if (process.env.NODE_ENV !== "development") {
|
||||
console.warn("Unexpected NODE_ENV value");
|
||||
}
|
||||
|
@ -52,10 +51,7 @@ export class OVDashboardPlugin {
|
|||
return this.axios.get<T>(this.api_url(endpoint));
|
||||
}
|
||||
|
||||
private api_get<T>(
|
||||
endpoint: string,
|
||||
on_success: (data: T) => void
|
||||
): void {
|
||||
private api_get<T>(endpoint: string, on_success: (data: T) => void): void {
|
||||
this.api_get_prepare<T>(endpoint)
|
||||
.then((response) => on_success(response.data))
|
||||
.catch(this.fail(endpoint));
|
||||
|
@ -63,7 +59,7 @@ export class OVDashboardPlugin {
|
|||
|
||||
public api_get_string(
|
||||
endpoint: string,
|
||||
on_success: (data: string) => void
|
||||
on_success: (data: string) => void,
|
||||
): void {
|
||||
this.api_get<string>(endpoint, (data) => {
|
||||
if (typeof data !== "string") {
|
||||
|
@ -84,7 +80,7 @@ export class OVDashboardPlugin {
|
|||
|
||||
public api_get_list(
|
||||
endpoint: string,
|
||||
on_success: (data: string[]) => void
|
||||
on_success: (data: string[]) => void,
|
||||
): void {
|
||||
this.api_get(endpoint, (data) => {
|
||||
if (!this.check_array<string>(data)) {
|
||||
|
@ -105,7 +101,7 @@ export class OVDashboardPlugin {
|
|||
|
||||
public api_get_object<Type extends object>(
|
||||
endpoint: string,
|
||||
on_success: (data: Type) => void
|
||||
on_success: (data: Type) => void,
|
||||
): void {
|
||||
this.api_get<Type>(endpoint, (data) => {
|
||||
if (!this.check_object(data)) {
|
||||
|
@ -119,9 +115,11 @@ export class OVDashboardPlugin {
|
|||
|
||||
public api_get_object_multi<Type extends object>(
|
||||
endpoints: string[],
|
||||
on_success: (data: Type[]) => void
|
||||
on_success: (data: Type[]) => void,
|
||||
): void {
|
||||
const promises = endpoints.map((endpoint) => this.api_get_prepare<Type>(endpoint));
|
||||
const promises = endpoints.map((endpoint) =>
|
||||
this.api_get_prepare<Type>(endpoint),
|
||||
);
|
||||
|
||||
Promise.all(promises)
|
||||
.then((responses) => {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import Vue from 'vue';
|
||||
import Vuetify from 'vuetify/lib/framework';
|
||||
import Vue from "vue";
|
||||
import Vuetify from "vuetify/lib/framework";
|
||||
|
||||
Vue.use(Vuetify);
|
||||
|
||||
|
|
|
@ -1,32 +1,34 @@
|
|||
/* eslint-disable no-console */
|
||||
|
||||
import { register } from 'register-service-worker'
|
||||
import { register } from "register-service-worker";
|
||||
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
register(`${process.env.BASE_URL}service-worker.js`, {
|
||||
ready () {
|
||||
ready() {
|
||||
console.log(
|
||||
'App is being served from cache by a service worker.\n' +
|
||||
'For more details, visit https://goo.gl/AFskqB'
|
||||
)
|
||||
"App is being served from cache by a service worker.\n" +
|
||||
"For more details, visit https://goo.gl/AFskqB",
|
||||
);
|
||||
},
|
||||
registered () {
|
||||
console.log('Service worker has been registered.')
|
||||
registered() {
|
||||
console.log("Service worker has been registered.");
|
||||
},
|
||||
cached () {
|
||||
console.log('Content has been cached for offline use.')
|
||||
cached() {
|
||||
console.log("Content has been cached for offline use.");
|
||||
},
|
||||
updatefound () {
|
||||
console.log('New content is downloading.')
|
||||
updatefound() {
|
||||
console.log("New content is downloading.");
|
||||
},
|
||||
updated () {
|
||||
console.log('New content is available; please refresh.')
|
||||
updated() {
|
||||
console.log("New content is available; please refresh.");
|
||||
},
|
||||
offline () {
|
||||
console.log('No internet connection found. App is running in offline mode.')
|
||||
offline() {
|
||||
console.log(
|
||||
"No internet connection found. App is running in offline mode.",
|
||||
);
|
||||
},
|
||||
error (error) {
|
||||
console.error('Error during service worker registration:', error)
|
||||
}
|
||||
})
|
||||
error(error) {
|
||||
console.error("Error during service worker registration:", error);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,11 +1,17 @@
|
|||
@font-face {
|
||||
font-family: "Lubalin Graph";
|
||||
src: url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.eot");
|
||||
src: url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.eot?#iefix") format("embedded-opentype"),
|
||||
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.woff2") format("woff2"),
|
||||
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.woff") format("woff"),
|
||||
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.ttf") format("truetype"),
|
||||
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.svg#Lubalin BQ") format("svg");
|
||||
src:
|
||||
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.eot?#iefix")
|
||||
format("embedded-opentype"),
|
||||
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.woff2")
|
||||
format("woff2"),
|
||||
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.woff")
|
||||
format("woff"),
|
||||
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.ttf")
|
||||
format("truetype"),
|
||||
url("//db.onlinewebfonts.com/t/60eaf3171fce0c04eb9b3e08bba9bf05.svg#Lubalin BQ")
|
||||
format("svg");
|
||||
font-weight: bold;
|
||||
font-style: normal;
|
||||
}
|
||||
|
@ -13,11 +19,17 @@
|
|||
@font-face {
|
||||
font-family: "Lubalin Graph";
|
||||
src: url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.eot");
|
||||
src: url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.eot?#iefix") format("embedded-opentype"),
|
||||
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.woff2") format("woff2"),
|
||||
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.woff") format("woff"),
|
||||
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.ttf") format("truetype"),
|
||||
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.svg#LubalinGraph-Book") format("svg");
|
||||
src:
|
||||
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.eot?#iefix")
|
||||
format("embedded-opentype"),
|
||||
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.woff2")
|
||||
format("woff2"),
|
||||
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.woff")
|
||||
format("woff"),
|
||||
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.ttf")
|
||||
format("truetype"),
|
||||
url("//db.onlinewebfonts.com/t/ad42b6e73cbf720f172faa6355b69ec8.svg#LubalinGraph-Book")
|
||||
format("svg");
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
}
|
||||
|
@ -25,11 +37,17 @@
|
|||
@font-face {
|
||||
font-family: "Neue Praxis";
|
||||
src: url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.eot");
|
||||
src: url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.eot?#iefix") format("embedded-opentype"),
|
||||
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.woff2") format("woff2"),
|
||||
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.woff") format("woff"),
|
||||
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.ttf") format("truetype"),
|
||||
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.svg#PraxisEF") format("svg");
|
||||
src:
|
||||
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.eot?#iefix")
|
||||
format("embedded-opentype"),
|
||||
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.woff2")
|
||||
format("woff2"),
|
||||
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.woff")
|
||||
format("woff"),
|
||||
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.ttf")
|
||||
format("truetype"),
|
||||
url("//db.onlinewebfonts.com/t/95d43d14f7d8f0f4692f507c86a29e25.svg#PraxisEF")
|
||||
format("svg");
|
||||
font-weight: bold;
|
||||
font-style: normal;
|
||||
}
|
||||
|
@ -37,11 +55,17 @@
|
|||
@font-face {
|
||||
font-family: "Neue Praxis";
|
||||
src: url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.eot");
|
||||
src: url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.eot?#iefix") format("embedded-opentype"),
|
||||
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.woff2") format("woff2"),
|
||||
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.woff") format("woff"),
|
||||
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.ttf") format("truetype"),
|
||||
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.svg#PraxisEF") format("svg");
|
||||
src:
|
||||
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.eot?#iefix")
|
||||
format("embedded-opentype"),
|
||||
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.woff2")
|
||||
format("woff2"),
|
||||
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.woff")
|
||||
format("woff"),
|
||||
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.ttf")
|
||||
format("truetype"),
|
||||
url("//db.onlinewebfonts.com/t/3d62d4fffdd20ba4608e1b29e0f6fb42.svg#PraxisEF")
|
||||
format("svg");
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
}
|
||||
|
@ -49,11 +73,17 @@
|
|||
@font-face {
|
||||
font-family: "Neue Demos";
|
||||
src: url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.eot");
|
||||
src: url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.eot?#iefix") format("embedded-opentype"),
|
||||
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.woff2") format("woff2"),
|
||||
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.woff") format("woff"),
|
||||
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.ttf") format("truetype"),
|
||||
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.svg#DemosEF") format("svg");
|
||||
src:
|
||||
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.eot?#iefix")
|
||||
format("embedded-opentype"),
|
||||
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.woff2")
|
||||
format("woff2"),
|
||||
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.woff")
|
||||
format("woff"),
|
||||
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.ttf")
|
||||
format("truetype"),
|
||||
url("//db.onlinewebfonts.com/t/57c19d4b8c4d1632fc97994508a35f5d.svg#DemosEF")
|
||||
format("svg");
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
}
|
||||
|
@ -61,11 +91,17 @@
|
|||
@font-face {
|
||||
font-family: "Neue Demos";
|
||||
src: url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.eot");
|
||||
src: url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.eot?#iefix") format("embedded-opentype"),
|
||||
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.woff2") format("woff2"),
|
||||
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.woff") format("woff"),
|
||||
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.ttf") format("truetype"),
|
||||
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.svg#DemosEF") format("svg");
|
||||
src:
|
||||
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.eot?#iefix")
|
||||
format("embedded-opentype"),
|
||||
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.woff2")
|
||||
format("woff2"),
|
||||
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.woff")
|
||||
format("woff"),
|
||||
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.ttf")
|
||||
format("truetype"),
|
||||
url("//db.onlinewebfonts.com/t/ad75fa70682671bbf5a5cec5f6df1470.svg#DemosEF")
|
||||
format("svg");
|
||||
font-weight: normal;
|
||||
font-style: italic;
|
||||
}
|
||||
|
@ -91,4 +127,4 @@
|
|||
@extend .thw-text-font;
|
||||
|
||||
font-style: italic !important;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
$heading-font-family: "Neue Praxis", "Roboto", sans-serif;
|
||||
$body-font-family: "Neue Demos", serif;
|
||||
$body-font-family: "Neue Demos", serif;
|
||||
|
|
|
@ -16,20 +16,11 @@
|
|||
"useDefineForClassFields": true,
|
||||
"sourceMap": true,
|
||||
"baseUrl": ".",
|
||||
"types": [
|
||||
"webpack-env"
|
||||
],
|
||||
"types": ["webpack-env"],
|
||||
"paths": {
|
||||
"@/*": [
|
||||
"src/*"
|
||||
]
|
||||
"@/*": ["src/*"]
|
||||
},
|
||||
"lib": [
|
||||
"esnext",
|
||||
"dom",
|
||||
"dom.iterable",
|
||||
"scripthost"
|
||||
]
|
||||
"lib": ["esnext", "dom", "dom.iterable", "scripthost"]
|
||||
},
|
||||
"include": [
|
||||
"src/**/*.ts",
|
||||
|
@ -38,7 +29,5 @@
|
|||
"tests/**/*.ts",
|
||||
"tests/**/*.tsx"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
const { defineConfig } = require('@vue/cli-service')
|
||||
const { defineConfig } = require("@vue/cli-service");
|
||||
|
||||
module.exports = defineConfig({
|
||||
transpileDependencies: [
|
||||
'vuetify'
|
||||
]
|
||||
})
|
||||
transpileDependencies: ["vuetify"],
|
||||
});
|
||||
|
|
3740
ui/yarn.lock
3740
ui/yarn.lock
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue