refactoring/new_python #3
@@ -0,0 +1,10 @@
|
||||
.git
|
||||
.gitignore
|
||||
.pytest_cache
|
||||
.venv
|
||||
__pycache__
|
||||
*.pyc
|
||||
data
|
||||
openapi
|
||||
src
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
# Required: bootstrap and core app settings.
|
||||
# These values should be set before the container starts.
|
||||
APP_NAME=Home Automation Backend (Python)
|
||||
APP_ENV=production
|
||||
APP_HOSTNAME=home-automation.example.com
|
||||
APP_DATABASE_URL=sqlite:////app/data/app.db
|
||||
LOCATION_DATABASE_URL=sqlite:////app/data/locationRecorder.db
|
||||
POO_DATABASE_URL=sqlite:////app/data/pooRecorder.db
|
||||
AUTH_BOOTSTRAP_USERNAME=admin
|
||||
AUTH_BOOTSTRAP_PASSWORD=change-me
|
||||
|
||||
# Optional: runtime overrides.
|
||||
# Leave these commented out to use the application's built-in defaults.
|
||||
# APP_DEBUG=
|
||||
# AUTH_SESSION_COOKIE_NAME=
|
||||
# AUTH_SESSION_TTL_HOURS=
|
||||
# AUTH_COOKIE_SECURE_OVERRIDE=
|
||||
|
||||
# Optional: Home Assistant integration.
|
||||
# Leave these empty when Home Assistant integration is not needed.
|
||||
HOME_ASSISTANT_BASE_URL=
|
||||
HOME_ASSISTANT_AUTH_TOKEN=
|
||||
POO_WEBHOOK_ID=
|
||||
POO_SENSOR_ENTITY_NAME=
|
||||
POO_SENSOR_FRIENDLY_NAME=
|
||||
|
||||
# Optional: TickTick integration.
|
||||
# APP_HOSTNAME is used to derive the OAuth callback URI automatically.
|
||||
TICKTICK_CLIENT_ID=
|
||||
TICKTICK_CLIENT_SECRET=
|
||||
TICKTICK_TOKEN=
|
||||
HOME_ASSISTANT_ACTION_TASK_PROJECT_ID=
|
||||
@@ -1,22 +0,0 @@
|
||||
name: Run nightly tests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 20 * * *' # Every day at 20:00 UTC
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
jobs:
|
||||
nightly-tests:
|
||||
runs-on: [ubuntu-latest, cloud]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: '1.23'
|
||||
|
||||
- name: Test
|
||||
working-directory: ./src
|
||||
run: go test -v --short ./...
|
||||
@@ -0,0 +1,31 @@
|
||||
name: pytest
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "**"
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13"
|
||||
cache: pip
|
||||
cache-dependency-path: |
|
||||
requirements.txt
|
||||
dev-requirements.txt
|
||||
|
||||
- name: Install dependencies
|
||||
run: python -m pip install -r dev-requirements.txt
|
||||
|
||||
- name: Run pytest
|
||||
run: python -m pytest
|
||||
@@ -1,21 +0,0 @@
|
||||
name: Run short tests
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
run-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: '1.24'
|
||||
|
||||
- name: Run short tests with coverage
|
||||
working-directory: ./src
|
||||
run: | # TODO: at this moment only Home Assistant component is tested
|
||||
go test -v --short ./components/homeassistant/... -cover -coverprofile=cover.out
|
||||
+5
-35
@@ -1,37 +1,7 @@
|
||||
# If you prefer the allow list template instead of the deny list, see community template:
|
||||
# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
|
||||
#
|
||||
# Binaries for programs and plugins
|
||||
*.exe
|
||||
*.exe~
|
||||
*.dll
|
||||
*.so
|
||||
*.dylib
|
||||
|
||||
# Test binary, built with `go test -c`
|
||||
*.test
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
||||
# Dependency directories (remove the comment below to include it)
|
||||
# vendor/
|
||||
|
||||
# Go workspace file
|
||||
go.work
|
||||
go.work.sum
|
||||
|
||||
# env file
|
||||
.codex
|
||||
.env
|
||||
|
||||
temp_data/
|
||||
|
||||
# py file for branch switching
|
||||
.venv
|
||||
__pycache__/
|
||||
.pytest_cache/
|
||||
config.yaml
|
||||
bin/
|
||||
*.db
|
||||
|
||||
cover.html
|
||||
.venv/
|
||||
__pycache__/
|
||||
*.pyc
|
||||
data/
|
||||
|
||||
Vendored
+12
-27
@@ -1,35 +1,20 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Launch Package",
|
||||
"type": "go",
|
||||
"name": "Launch Python App",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Launch Poo Reverse",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/src/helper/poo_recorder_helper/main.go",
|
||||
"module": "uvicorn",
|
||||
"args": [
|
||||
"reverse"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Home Automation",
|
||||
"type": "go",
|
||||
"request": "launch",
|
||||
"mode": "auto",
|
||||
"program": "${workspaceFolder}/src/main.go",
|
||||
"args": [
|
||||
"serve"
|
||||
]
|
||||
"app.main:app",
|
||||
"--reload",
|
||||
"--host",
|
||||
"0.0.0.0",
|
||||
"--port",
|
||||
"8000"
|
||||
],
|
||||
"jinja": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
+25
@@ -0,0 +1,25 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY requirements.txt ./
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY app ./app
|
||||
COPY alembic_app ./alembic_app
|
||||
COPY alembic_app.ini ./
|
||||
COPY alembic_location ./alembic_location
|
||||
COPY alembic_location.ini ./
|
||||
COPY alembic_poo ./alembic_poo
|
||||
COPY alembic_poo.ini ./
|
||||
COPY scripts ./scripts
|
||||
COPY docker ./docker
|
||||
COPY README.md ./
|
||||
RUN mkdir -p /app/data
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
ENTRYPOINT ["/app/docker/entrypoint.sh"]
|
||||
@@ -1,3 +1,296 @@
|
||||
# Home Automation Backend
|
||||
|
||||

|
||||
这是当前 `home-automation` 项目的首个 Python 版本。
|
||||
|
||||
当前系统已经包含:
|
||||
|
||||
- FastAPI Web 应用与服务端模板页面
|
||||
- SQLite + SQLAlchemy + Alembic 的三库结构
|
||||
- username/password + server-side session 鉴权
|
||||
- runtime config 页面与 app DB 持久化
|
||||
- location recorder
|
||||
- poo recorder
|
||||
- Home Assistant inbound / outbound integration
|
||||
- TickTick OAuth 与 action task 集成
|
||||
- pytest 测试与 OpenAPI 导出脚本
|
||||
- Docker / Compose 部署入口
|
||||
|
||||
当前明确不包含:
|
||||
|
||||
- Notion 模块
|
||||
|
||||
## 当前配置现实
|
||||
|
||||
当前系统仍然是三个独立的 SQLite 数据库文件,而不是单一数据库:
|
||||
|
||||
- `app` 级共享数据使用自己的 DB 文件
|
||||
- `location` 模块使用自己的 DB 文件
|
||||
- `poo` 模块使用自己的 DB 文件
|
||||
|
||||
当前阶段明确不借这次重构把这些 DB 合并。配置层已经显式反映这一点:
|
||||
|
||||
- `APP_DATABASE_URL`
|
||||
- `LOCATION_DATABASE_URL`
|
||||
- `POO_DATABASE_URL`
|
||||
|
||||
目前 auth、`location` 和 `poo` 都已经接到各自独立的数据库文件。
|
||||
|
||||
其中 `app` 级共享 DB 当前主要用于:
|
||||
|
||||
- 单个 admin 用户
|
||||
- server-side session
|
||||
- runtime config 持久化
|
||||
|
||||
这部分现在也使用 Alembic 管理:
|
||||
|
||||
- `app db` 不会在应用启动时自动创建
|
||||
- 需要先运行 `python scripts/app_db_adopt.py`
|
||||
- 这个脚本会创建新 DB 并建好 schema
|
||||
|
||||
## 当前目录
|
||||
|
||||
主要目录如下:
|
||||
|
||||
- `app/`: FastAPI 应用代码
|
||||
- `alembic_app/`: App DB 的 Alembic migration 环境
|
||||
- `alembic_location/`: Location DB 的 Alembic migration 环境
|
||||
- `alembic_poo/`: Poo DB 的 Alembic migration 环境
|
||||
- `tests/`: pytest 测试
|
||||
- `docs/`: 当前系统说明文档
|
||||
- `scripts/`: 辅助脚本,例如 OpenAPI 导出
|
||||
|
||||
## 依赖管理
|
||||
|
||||
项目现在采用 `pip-tools` 管理依赖:
|
||||
|
||||
- 生产依赖源文件:`requirements.in`
|
||||
- 开发依赖源文件:`dev-requirements.in`
|
||||
- 编译产物:
|
||||
- `requirements.txt`
|
||||
- `dev-requirements.txt`
|
||||
|
||||
更新依赖时建议使用:
|
||||
|
||||
```bash
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install pip-tools
|
||||
pip-compile requirements.in
|
||||
pip-compile dev-requirements.in
|
||||
```
|
||||
|
||||
如果要升级某个依赖,可以用:
|
||||
|
||||
```bash
|
||||
pip-compile --upgrade-package fastapi requirements.in
|
||||
pip-compile dev-requirements.in
|
||||
```
|
||||
|
||||
## 本地启动
|
||||
|
||||
建议使用 Python 3.11 或以上版本。
|
||||
|
||||
1. 创建虚拟环境并安装依赖
|
||||
|
||||
```bash
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install -r dev-requirements.txt
|
||||
```
|
||||
|
||||
2. 准备环境变量
|
||||
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
3. 初始化数据库
|
||||
|
||||
```bash
|
||||
python scripts/app_db_adopt.py
|
||||
python scripts/location_db_adopt.py
|
||||
python scripts/poo_db_adopt.py
|
||||
```
|
||||
|
||||
4. 启动服务
|
||||
|
||||
```bash
|
||||
uvicorn app.main:app --reload --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
启动后可访问:
|
||||
|
||||
- 应用首页:`http://localhost:8000/`
|
||||
- 健康检查:`http://localhost:8000/status`
|
||||
- Swagger UI:`http://localhost:8000/docs`
|
||||
- ReDoc:`http://localhost:8000/redoc`
|
||||
|
||||
## 数据库与 Alembic
|
||||
|
||||
当前默认使用 SQLite,并区分三个数据库文件:
|
||||
|
||||
- App DB:`sqlite:///./data/app.db`
|
||||
- Location DB:`sqlite:///./data/locationRecorder.db`
|
||||
- Poo DB:`sqlite:///./data/pooRecorder.db`
|
||||
- 数据目录:`./data/`
|
||||
|
||||
初始化 migration 环境后,可继续添加模型并生成迁移:
|
||||
|
||||
当前 `app`、`location` 和 `poo` 都已经有各自独立的 Alembic 链路。
|
||||
|
||||
- App Alembic 环境:`alembic_app.ini` + `alembic_app/`
|
||||
- Location Alembic 环境:`alembic_location.ini` + `alembic_location/`
|
||||
- Poo Alembic 环境:`alembic_poo.ini` + `alembic_poo/`
|
||||
- App DB 初始化:`python scripts/app_db_adopt.py`
|
||||
- Location DB 接管 / 初始化:`python scripts/location_db_adopt.py`
|
||||
- Poo DB 接管 / 初始化:`python scripts/poo_db_adopt.py`
|
||||
|
||||
## 基础鉴权
|
||||
|
||||
当前项目提供一个单用户 admin 鉴权层,用于保护配置页面与管理能力。
|
||||
|
||||
- 认证模型:`username/password`
|
||||
- 会话模型:server-side session + cookie
|
||||
- 当前主要受保护页面:`/config`
|
||||
- 当前公开页面:`/login`
|
||||
- 当前公开 API:现有业务 API 暂未在这一轮统一收口到 auth 下
|
||||
|
||||
安全实现的当前边界:
|
||||
|
||||
- 密码使用 Argon2 做哈希存储
|
||||
- session cookie 使用 `HttpOnly`
|
||||
- `Secure` 默认随 `APP_ENV` 切换:非 development 时默认开启
|
||||
- `SameSite=Lax`
|
||||
- 登录表单和登出表单都有基础 CSRF 防护
|
||||
|
||||
首次启动时,如果 `APP_DATABASE_URL` 对应的 auth DB 里还没有用户,应用会使用:
|
||||
|
||||
- `AUTH_BOOTSTRAP_USERNAME`
|
||||
- `AUTH_BOOTSTRAP_PASSWORD`
|
||||
|
||||
创建初始 admin 用户。当前默认就是:
|
||||
|
||||
- username: `admin`
|
||||
- password: `admin`
|
||||
|
||||
首次登录后会被要求立即修改密码。这个 bootstrap 只用于首个用户落库,不是后续的完整配置管理方案。
|
||||
|
||||
当前前端主要有两条页面路径:
|
||||
|
||||
- `/login`
|
||||
- `/config`
|
||||
|
||||
无论是本地 `host:port` 还是反向代理后的域名访问,登录成功后都使用相对路径跳转到 `/config`。
|
||||
|
||||
## Config 持久化
|
||||
|
||||
当前 config 页面不会把修改写回 `.env`。
|
||||
|
||||
当前原则是:
|
||||
|
||||
- `.env` 只负责 bootstrap / fallback
|
||||
- app 启动先从 `.env` 读取数据库地址等基础配置
|
||||
- 请求期读取配置时,优先使用 app DB 中的 `app_config` 表
|
||||
- 如果数据库里没有对应值,再 fallback 到 `.env`
|
||||
|
||||
这意味着:
|
||||
|
||||
- location / poo / app DB 地址仍然属于 bootstrap 范畴
|
||||
- 运行时可编辑配置主要通过 `app_config` 表持久化
|
||||
- token / secret 这类运行时必须可取回的配置,目前允许明文存储在 config 表中
|
||||
- 登录密码仍然单独使用 Argon2 哈希,不走 config 表明文存储
|
||||
|
||||
## OpenAPI
|
||||
|
||||
可使用下面的脚本重新导出当前 API 定义:
|
||||
|
||||
```bash
|
||||
python scripts/export_openapi.py
|
||||
```
|
||||
|
||||
导出结果会写入:
|
||||
|
||||
- `openapi/openapi.json`
|
||||
- `openapi/openapi.yaml`
|
||||
|
||||
## Docker Compose
|
||||
|
||||
当前默认 Compose 服务名为 `app`,容器名固定为 `home-automation-app`。
|
||||
|
||||
启动方式:
|
||||
|
||||
```bash
|
||||
docker compose up -d --build
|
||||
```
|
||||
|
||||
持续查看日志:
|
||||
|
||||
```bash
|
||||
docker compose logs -f app
|
||||
```
|
||||
|
||||
## 运行测试
|
||||
|
||||
```bash
|
||||
pytest
|
||||
```
|
||||
|
||||
当前测试包含:
|
||||
|
||||
- app 基本启动测试
|
||||
- `/status` endpoint 测试
|
||||
- 登录 / session 基础流程测试
|
||||
|
||||
## OpenAPI 导出
|
||||
|
||||
FastAPI 默认会暴露 OpenAPI。若需要导出静态 schema 文件,可运行:
|
||||
|
||||
```bash
|
||||
python scripts/export_openapi.py
|
||||
```
|
||||
|
||||
输出文件会写到:
|
||||
|
||||
- `openapi/openapi.json`
|
||||
- `openapi/openapi.yaml`
|
||||
|
||||
`openapi/` 当前纳入版本控制。接口发生变更时,应重新运行导出脚本并同步提交生成的 schema 文件。
|
||||
|
||||
## 容器启动
|
||||
|
||||
1. 准备环境变量文件
|
||||
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
2. 启动容器
|
||||
|
||||
```bash
|
||||
docker compose up --build
|
||||
```
|
||||
|
||||
默认端口:
|
||||
|
||||
- `8000:8000`
|
||||
|
||||
SQLite 持久化目录:
|
||||
|
||||
- 本地 `./data`
|
||||
- 容器内 `/app/data`
|
||||
|
||||
## 后续迁移建议
|
||||
|
||||
后续可以在当前骨架上继续迁移这些模块:
|
||||
|
||||
- TickTick integration
|
||||
- Home Assistant integration
|
||||
- poo records
|
||||
|
||||
建议继续参考:
|
||||
|
||||
- [当前系统盘点](docs/current-system-inventory.md)
|
||||
- [Python 重构方案](docs/python-rewrite-plan.md)
|
||||
- [迁移风险清单](docs/migration-risks.md)
|
||||
- [Location Recorder 接管说明](docs/location-recorder.md)
|
||||
- [基础鉴权说明](docs/auth.md)
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
[alembic]
|
||||
script_location = alembic_app
|
||||
prepend_sys_path = .
|
||||
path_separator = os
|
||||
sqlalchemy.url = sqlite:///./data/app.db
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
@@ -0,0 +1,49 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from app.auth_db import AuthBase
|
||||
from app.config import get_settings
|
||||
from app.models.config import AppConfigEntry # noqa: F401
|
||||
from app.models.auth import AuthSession, AuthUser # noqa: F401
|
||||
|
||||
config = context.config
|
||||
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
settings = get_settings()
|
||||
configured_url = config.get_main_option("sqlalchemy.url")
|
||||
if not configured_url or configured_url == "sqlite:///./data/app.db":
|
||||
config.set_main_option("sqlalchemy.url", settings.app_database_url)
|
||||
|
||||
target_metadata = AuthBase.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
@@ -0,0 +1,25 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,56 @@
|
||||
"""app auth baseline
|
||||
|
||||
Revision ID: 20260420_03_app_auth_baseline
|
||||
Revises:
|
||||
Create Date: 2026-04-20 00:00:00.000000
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
revision: str = "20260420_03_app_auth_baseline"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"auth_users",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("username", sa.String(length=255), nullable=False),
|
||||
sa.Column("password_hash", sa.String(length=255), nullable=False),
|
||||
sa.Column("is_active", sa.Boolean(), nullable=False),
|
||||
sa.Column("force_password_change", sa.Boolean(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index(op.f("ix_auth_users_username"), "auth_users", ["username"], unique=True)
|
||||
|
||||
op.create_table(
|
||||
"auth_sessions",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||
sa.Column("token_hash", sa.String(length=64), nullable=False),
|
||||
sa.Column("csrf_token", sa.String(length=128), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("expires_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("revoked_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["auth_users.id"]),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index(op.f("ix_auth_sessions_expires_at"), "auth_sessions", ["expires_at"], unique=False)
|
||||
op.create_index(op.f("ix_auth_sessions_token_hash"), "auth_sessions", ["token_hash"], unique=True)
|
||||
op.create_index(op.f("ix_auth_sessions_user_id"), "auth_sessions", ["user_id"], unique=False)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index(op.f("ix_auth_sessions_user_id"), table_name="auth_sessions")
|
||||
op.drop_index(op.f("ix_auth_sessions_token_hash"), table_name="auth_sessions")
|
||||
op.drop_index(op.f("ix_auth_sessions_expires_at"), table_name="auth_sessions")
|
||||
op.drop_table("auth_sessions")
|
||||
op.drop_index(op.f("ix_auth_users_username"), table_name="auth_users")
|
||||
op.drop_table("auth_users")
|
||||
@@ -0,0 +1,34 @@
|
||||
"""app config table
|
||||
|
||||
Revision ID: 20260420_04_app_config_table
|
||||
Revises: 20260420_03_app_auth_baseline
|
||||
Create Date: 2026-04-20 00:00:01.000000
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
revision: str = "20260420_04_app_config_table"
|
||||
down_revision: Union[str, None] = "20260420_03_app_auth_baseline"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"app_config",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("key", sa.String(length=255), nullable=False),
|
||||
sa.Column("value", sa.String(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_index(op.f("ix_app_config_key"), "app_config", ["key"], unique=True)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index(op.f("ix_app_config_key"), table_name="app_config")
|
||||
op.drop_table("app_config")
|
||||
@@ -0,0 +1,37 @@
|
||||
[alembic]
|
||||
script_location = alembic_location
|
||||
prepend_sys_path = .
|
||||
path_separator = os
|
||||
sqlalchemy.url = sqlite:///./data/locationRecorder.db
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
@@ -0,0 +1,2 @@
|
||||
This directory contains the Alembic migration environment for the Python rewrite skeleton.
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from app.config import get_settings
|
||||
from app.models import Location # noqa: F401
|
||||
from app.models.base import Base
|
||||
|
||||
config = context.config
|
||||
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
settings = get_settings()
|
||||
configured_url = config.get_main_option("sqlalchemy.url")
|
||||
if not configured_url or configured_url == "sqlite:///./data/locationRecorder.db":
|
||||
config.set_main_option("sqlalchemy.url", settings.location_database_url)
|
||||
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
"""location baseline
|
||||
|
||||
Revision ID: 20260419_01_location_baseline
|
||||
Revises:
|
||||
Create Date: 2026-04-19 00:00:00.000000
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
revision: str = "20260419_01_location_baseline"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"location",
|
||||
sa.Column("person", sa.Text(), nullable=False),
|
||||
sa.Column("datetime", sa.Text(), nullable=False),
|
||||
sa.Column("latitude", sa.Float(), nullable=False),
|
||||
sa.Column("longitude", sa.Float(), nullable=False),
|
||||
sa.Column("altitude", sa.Float(), nullable=True),
|
||||
sa.PrimaryKeyConstraint("person", "datetime"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("location")
|
||||
@@ -0,0 +1,37 @@
|
||||
[alembic]
|
||||
script_location = alembic_poo
|
||||
prepend_sys_path = .
|
||||
path_separator = os
|
||||
sqlalchemy.url = sqlite:///./data/pooRecorder.db
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers = console
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
@@ -0,0 +1,48 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from app.config import get_settings
|
||||
from app.models.poo import PooRecord # noqa: F401
|
||||
from app.poo_db import PooBase
|
||||
|
||||
config = context.config
|
||||
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
settings = get_settings()
|
||||
configured_url = config.get_main_option("sqlalchemy.url")
|
||||
if not configured_url or configured_url == "sqlite:///./data/pooRecorder.db":
|
||||
config.set_main_option("sqlalchemy.url", settings.poo_database_url)
|
||||
|
||||
target_metadata = PooBase.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
@@ -0,0 +1,32 @@
|
||||
"""poo baseline
|
||||
|
||||
Revision ID: 20260420_01_poo_baseline
|
||||
Revises:
|
||||
Create Date: 2026-04-20 00:00:00.000000
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
revision: str = "20260420_01_poo_baseline"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"poo_records",
|
||||
sa.Column("timestamp", sa.Text(), nullable=False),
|
||||
sa.Column("status", sa.Text(), nullable=False),
|
||||
sa.Column("latitude", sa.Float(), nullable=False),
|
||||
sa.Column("longitude", sa.Float(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("timestamp"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("poo_records")
|
||||
@@ -0,0 +1,2 @@
|
||||
"""Application package for the home automation backend."""
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
"""API package."""
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
"""Route modules."""
|
||||
|
||||
@@ -0,0 +1,234 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter, Depends, Form, Request, status
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse, Response
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.config import Settings
|
||||
from app.dependencies import get_app_settings, get_auth_db, get_current_auth_session
|
||||
from app.services.auth import (
|
||||
AuthenticatedSession,
|
||||
authenticate_user,
|
||||
change_password,
|
||||
create_session,
|
||||
AuthPasswordChangeError,
|
||||
issue_login_csrf_token,
|
||||
revoke_session,
|
||||
validate_csrf_token,
|
||||
)
|
||||
from app.services.config_page import build_config_sections, is_ticktick_oauth_ready
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
templates = Jinja2Templates(directory=str(Path(__file__).resolve().parents[2] / "templates"))
|
||||
router = APIRouter(tags=["auth"])
|
||||
|
||||
LOGIN_CSRF_COOKIE_NAME = "login_csrf"
|
||||
|
||||
|
||||
@router.get("/login", response_class=HTMLResponse)
|
||||
def login_page(
|
||||
request: Request,
|
||||
settings: Settings = Depends(get_app_settings),
|
||||
current_auth: AuthenticatedSession | None = Depends(get_current_auth_session),
|
||||
) -> Response:
|
||||
if current_auth is not None:
|
||||
return RedirectResponse(url="/config", status_code=status.HTTP_303_SEE_OTHER)
|
||||
|
||||
csrf_token = issue_login_csrf_token()
|
||||
response = templates.TemplateResponse(
|
||||
request,
|
||||
"login.html",
|
||||
{
|
||||
"app_name": settings.app_name,
|
||||
"app_env": settings.app_env,
|
||||
"csrf_token": csrf_token,
|
||||
"error_message": None,
|
||||
},
|
||||
)
|
||||
_set_login_csrf_cookie(response, settings=settings, token=csrf_token)
|
||||
return response
|
||||
|
||||
|
||||
@router.post("/login", response_class=HTMLResponse)
|
||||
def login_submit(
|
||||
request: Request,
|
||||
username: str = Form(),
|
||||
password: str = Form(),
|
||||
csrf_token: str = Form(),
|
||||
session: Session = Depends(get_auth_db),
|
||||
settings: Settings = Depends(get_app_settings),
|
||||
) -> Response:
|
||||
cookie_csrf_token = request.cookies.get(LOGIN_CSRF_COOKIE_NAME)
|
||||
if not validate_csrf_token(expected=cookie_csrf_token, actual=csrf_token):
|
||||
logger.warning("Rejected login attempt due to CSRF validation failure")
|
||||
return _render_login_error(
|
||||
request,
|
||||
settings=settings,
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
error_message="invalid login request",
|
||||
)
|
||||
|
||||
user = authenticate_user(session, username=username, password=password)
|
||||
if user is None:
|
||||
return _render_login_error(
|
||||
request,
|
||||
settings=settings,
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
error_message="invalid username or password",
|
||||
)
|
||||
|
||||
auth_session, raw_token = create_session(session, user=user, settings=settings)
|
||||
response = RedirectResponse(url="/config", status_code=status.HTTP_303_SEE_OTHER)
|
||||
response.delete_cookie(LOGIN_CSRF_COOKIE_NAME, path="/login")
|
||||
response.set_cookie(
|
||||
key=settings.auth_session_cookie_name,
|
||||
value=raw_token,
|
||||
max_age=settings.auth_session_ttl_hours * 3600,
|
||||
httponly=True,
|
||||
secure=settings.auth_cookie_secure,
|
||||
samesite="lax",
|
||||
path="/",
|
||||
)
|
||||
logger.info("Created authenticated session for user '%s'", user.username)
|
||||
return response
|
||||
|
||||
|
||||
@router.post("/config/change-password", response_class=HTMLResponse)
|
||||
def change_password_submit(
|
||||
request: Request,
|
||||
current_password: str = Form(),
|
||||
new_password: str = Form(),
|
||||
confirm_password: str = Form(),
|
||||
csrf_token: str = Form(),
|
||||
session: Session = Depends(get_auth_db),
|
||||
settings: Settings = Depends(get_app_settings),
|
||||
current_auth: AuthenticatedSession | None = Depends(get_current_auth_session),
|
||||
) -> Response:
|
||||
if current_auth is None:
|
||||
return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER)
|
||||
|
||||
if not validate_csrf_token(expected=current_auth.session.csrf_token, actual=csrf_token):
|
||||
logger.warning("Rejected password change attempt due to CSRF validation failure")
|
||||
return _render_config_page(
|
||||
request,
|
||||
settings=settings,
|
||||
auth_db_session=session,
|
||||
current_auth=current_auth,
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
password_change_error="invalid password change request",
|
||||
)
|
||||
|
||||
try:
|
||||
change_password(
|
||||
session,
|
||||
user=current_auth.user,
|
||||
current_password=current_password,
|
||||
new_password=new_password,
|
||||
confirm_password=confirm_password,
|
||||
)
|
||||
except AuthPasswordChangeError as exc:
|
||||
logger.info(
|
||||
"Rejected password change for user '%s': %s",
|
||||
current_auth.user.username,
|
||||
exc,
|
||||
)
|
||||
return _render_config_page(
|
||||
request,
|
||||
settings=settings,
|
||||
auth_db_session=session,
|
||||
current_auth=current_auth,
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
password_change_error="password change failed",
|
||||
)
|
||||
|
||||
logger.info("Password updated for user '%s'", current_auth.user.username)
|
||||
return RedirectResponse(url="/config", status_code=status.HTTP_303_SEE_OTHER)
|
||||
|
||||
|
||||
@router.post("/logout")
|
||||
def logout(
|
||||
request: Request,
|
||||
csrf_token: str = Form(),
|
||||
session: Session = Depends(get_auth_db),
|
||||
settings: Settings = Depends(get_app_settings),
|
||||
current_auth: AuthenticatedSession | None = Depends(get_current_auth_session),
|
||||
) -> RedirectResponse:
|
||||
if current_auth is not None and validate_csrf_token(
|
||||
expected=current_auth.session.csrf_token, actual=csrf_token
|
||||
):
|
||||
revoke_session(session, auth_session=current_auth.session)
|
||||
logger.info("Revoked authenticated session for user '%s'", current_auth.user.username)
|
||||
else:
|
||||
logger.warning("Rejected logout request due to missing session or invalid CSRF token")
|
||||
|
||||
response = RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER)
|
||||
response.delete_cookie(settings.auth_session_cookie_name, path="/")
|
||||
return response
|
||||
|
||||
|
||||
def _render_login_error(
|
||||
request: Request,
|
||||
*,
|
||||
settings: Settings,
|
||||
status_code: int,
|
||||
error_message: str,
|
||||
) -> HTMLResponse:
|
||||
csrf_token = issue_login_csrf_token()
|
||||
response = templates.TemplateResponse(
|
||||
request,
|
||||
"login.html",
|
||||
{
|
||||
"app_name": settings.app_name,
|
||||
"app_env": settings.app_env,
|
||||
"csrf_token": csrf_token,
|
||||
"error_message": error_message,
|
||||
},
|
||||
status_code=status_code,
|
||||
)
|
||||
_set_login_csrf_cookie(response, settings=settings, token=csrf_token)
|
||||
return response
|
||||
|
||||
|
||||
def _set_login_csrf_cookie(response: HTMLResponse, *, settings: Settings, token: str) -> None:
|
||||
response.set_cookie(
|
||||
key=LOGIN_CSRF_COOKIE_NAME,
|
||||
value=token,
|
||||
max_age=1800,
|
||||
httponly=True,
|
||||
secure=settings.auth_cookie_secure,
|
||||
samesite="lax",
|
||||
path="/login",
|
||||
)
|
||||
|
||||
|
||||
def _render_config_page(
|
||||
request: Request,
|
||||
*,
|
||||
settings: Settings,
|
||||
auth_db_session: Session,
|
||||
current_auth: AuthenticatedSession,
|
||||
status_code: int,
|
||||
password_change_error: str | None,
|
||||
) -> HTMLResponse:
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"config.html",
|
||||
{
|
||||
"app_name": settings.app_name,
|
||||
"app_env": settings.app_env,
|
||||
"current_username": current_auth.user.username,
|
||||
"csrf_token": current_auth.session.csrf_token,
|
||||
"force_password_change": current_auth.user.force_password_change,
|
||||
"password_change_error": password_change_error,
|
||||
"config_error": None,
|
||||
"config_saved": False,
|
||||
"config_sections": build_config_sections(auth_db_session, settings),
|
||||
"ticktick_oauth_ready": is_ticktick_oauth_ready(settings),
|
||||
"ticktick_redirect_uri": settings.ticktick_redirect_uri,
|
||||
"ticktick_oauth_notice": None,
|
||||
"ticktick_oauth_error": None,
|
||||
},
|
||||
status_code=status_code,
|
||||
)
|
||||
@@ -0,0 +1,58 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends, Request, status
|
||||
from fastapi.responses import PlainTextResponse, Response
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.dependencies import get_db, get_ticktick_client
|
||||
from app.integrations.ticktick import TickTickClient, TickTickConfigError, TickTickRequestError
|
||||
from app.schemas.homeassistant import HomeAssistantPublishEnvelope
|
||||
from app.services.homeassistant_inbound import (
|
||||
UnsupportedHomeAssistantMessage,
|
||||
handle_homeassistant_message,
|
||||
)
|
||||
|
||||
router = APIRouter(tags=["homeassistant"])
|
||||
logger = logging.getLogger(__name__)
|
||||
BAD_REQUEST_MESSAGE = "bad request"
|
||||
INTERNAL_SERVER_ERROR_MESSAGE = "internal server error"
|
||||
|
||||
|
||||
@router.post("/homeassistant/publish")
|
||||
async def publish_from_homeassistant(
|
||||
request: Request,
|
||||
db: Session = Depends(get_db),
|
||||
ticktick_client: TickTickClient = Depends(get_ticktick_client),
|
||||
) -> Response:
|
||||
try:
|
||||
raw_payload = await request.body()
|
||||
data = json.loads(raw_payload)
|
||||
envelope = HomeAssistantPublishEnvelope.model_validate(data)
|
||||
handle_homeassistant_message(db, envelope, ticktick_client)
|
||||
except json.JSONDecodeError as exc:
|
||||
logger.warning("Rejected Home Assistant publish request due to invalid JSON: %s", exc)
|
||||
return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST)
|
||||
except ValidationError as exc:
|
||||
logger.warning(
|
||||
"Rejected Home Assistant publish request due to validation failure: %s", exc
|
||||
)
|
||||
return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST)
|
||||
except UnsupportedHomeAssistantMessage as exc:
|
||||
logger.warning("Home Assistant publish target/action unsupported: %s", exc)
|
||||
return PlainTextResponse(
|
||||
INTERNAL_SERVER_ERROR_MESSAGE,
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
except (TickTickConfigError, TickTickRequestError, RuntimeError) as exc:
|
||||
logger.warning("Home Assistant publish request failed during TickTick handling: %s", exc)
|
||||
return PlainTextResponse(
|
||||
INTERNAL_SERVER_ERROR_MESSAGE,
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
except ValueError as exc:
|
||||
logger.warning("Rejected Home Assistant publish request due to invalid content: %s", exc)
|
||||
return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return Response(status_code=status.HTTP_200_OK)
|
||||
@@ -0,0 +1,35 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends, Request, status
|
||||
from fastapi.responses import PlainTextResponse, Response
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.dependencies import get_db
|
||||
from app.schemas.location import LocationRecordRequest
|
||||
from app.services.location import record_location
|
||||
|
||||
router = APIRouter(tags=["location"])
|
||||
logger = logging.getLogger(__name__)
|
||||
BAD_REQUEST_MESSAGE = "bad request"
|
||||
|
||||
|
||||
@router.post("/location/record")
|
||||
async def create_location_record(request: Request, db: Session = Depends(get_db)) -> Response:
|
||||
try:
|
||||
raw_payload = await request.body()
|
||||
data = json.loads(raw_payload)
|
||||
payload = LocationRecordRequest.model_validate(data)
|
||||
record_location(db, payload)
|
||||
except json.JSONDecodeError as exc:
|
||||
logger.warning("Rejected location request due to invalid JSON: %s", exc)
|
||||
return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST)
|
||||
except ValidationError as exc:
|
||||
logger.warning("Rejected location request due to payload validation failure: %s", exc)
|
||||
return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST)
|
||||
except ValueError as exc:
|
||||
logger.warning("Rejected location request due to invalid numeric input: %s", exc)
|
||||
return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
return Response(status_code=status.HTTP_200_OK)
|
||||
@@ -0,0 +1,151 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import APIRouter, Depends, Request, status
|
||||
from fastapi.responses import HTMLResponse, RedirectResponse, Response
|
||||
from fastapi.templating import Jinja2Templates
|
||||
|
||||
from app.config import Settings, get_settings
|
||||
from app.dependencies import get_app_settings, get_auth_db, get_current_auth_session
|
||||
from app.services.auth import AuthenticatedSession
|
||||
from app.services.config_page import (
|
||||
ConfigSaveError,
|
||||
build_config_sections,
|
||||
is_ticktick_oauth_ready,
|
||||
save_config_updates,
|
||||
)
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
templates = Jinja2Templates(directory=str(Path(__file__).resolve().parents[2] / "templates"))
|
||||
router = APIRouter(tags=["pages"])
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _ticktick_oauth_notice(status_value: str | None) -> tuple[str | None, str | None]:
|
||||
if status_value == "success":
|
||||
return "TickTick authorization completed successfully.", None
|
||||
if status_value == "invalid-state":
|
||||
return None, "TickTick authorization failed due to invalid OAuth state. Start the flow again."
|
||||
if status_value == "invalid-callback":
|
||||
return None, "TickTick authorization callback was missing required parameters."
|
||||
if status_value == "failed":
|
||||
return None, "TickTick authorization failed. Check server logs for the provider response and verify TickTick app credentials and redirect URI."
|
||||
return None, None
|
||||
|
||||
|
||||
@router.get("/", response_class=HTMLResponse)
|
||||
def home(
|
||||
request: Request,
|
||||
current_auth: AuthenticatedSession | None = Depends(get_current_auth_session),
|
||||
) -> RedirectResponse:
|
||||
if current_auth is None:
|
||||
return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER)
|
||||
return RedirectResponse(url="/config", status_code=status.HTTP_303_SEE_OTHER)
|
||||
|
||||
|
||||
@router.get("/admin", response_class=HTMLResponse)
|
||||
def admin_redirect(
|
||||
request: Request,
|
||||
current_auth: AuthenticatedSession | None = Depends(get_current_auth_session),
|
||||
) -> RedirectResponse:
|
||||
if current_auth is None:
|
||||
return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER)
|
||||
return RedirectResponse(url="/config", status_code=status.HTTP_303_SEE_OTHER)
|
||||
|
||||
|
||||
@router.get("/config", response_class=HTMLResponse)
|
||||
def config_page(
|
||||
request: Request,
|
||||
auth_db_session: Session = Depends(get_auth_db),
|
||||
settings: Settings = Depends(get_app_settings),
|
||||
current_auth: AuthenticatedSession | None = Depends(get_current_auth_session),
|
||||
) -> Response:
|
||||
if current_auth is None:
|
||||
return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER)
|
||||
|
||||
ticktick_oauth_notice, ticktick_oauth_error = _ticktick_oauth_notice(
|
||||
request.query_params.get("ticktick_oauth")
|
||||
)
|
||||
|
||||
context = {
|
||||
"app_name": settings.app_name,
|
||||
"app_env": settings.app_env,
|
||||
"current_username": current_auth.user.username,
|
||||
"csrf_token": current_auth.session.csrf_token,
|
||||
"force_password_change": current_auth.user.force_password_change,
|
||||
"password_change_error": None,
|
||||
"config_error": None,
|
||||
"config_saved": request.query_params.get("saved") == "1",
|
||||
"config_sections": build_config_sections(auth_db_session, settings),
|
||||
"ticktick_oauth_ready": is_ticktick_oauth_ready(settings),
|
||||
"ticktick_redirect_uri": settings.ticktick_redirect_uri,
|
||||
"ticktick_oauth_notice": ticktick_oauth_notice,
|
||||
"ticktick_oauth_error": ticktick_oauth_error,
|
||||
}
|
||||
return templates.TemplateResponse(request, "config.html", context)
|
||||
|
||||
|
||||
@router.post("/config", response_class=HTMLResponse)
|
||||
async def config_submit(
|
||||
request: Request,
|
||||
auth_db_session: Session = Depends(get_auth_db),
|
||||
settings: Settings = Depends(get_app_settings),
|
||||
current_auth: AuthenticatedSession | None = Depends(get_current_auth_session),
|
||||
) -> Response:
|
||||
if current_auth is None:
|
||||
return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER)
|
||||
|
||||
form = await request.form()
|
||||
csrf_token = form.get("csrf_token")
|
||||
if csrf_token != current_auth.session.csrf_token:
|
||||
logger.warning("Rejected config update due to CSRF validation failure")
|
||||
context = {
|
||||
"app_name": settings.app_name,
|
||||
"app_env": settings.app_env,
|
||||
"current_username": current_auth.user.username,
|
||||
"csrf_token": current_auth.session.csrf_token,
|
||||
"force_password_change": current_auth.user.force_password_change,
|
||||
"password_change_error": None,
|
||||
"config_error": "invalid config update request",
|
||||
"config_saved": False,
|
||||
"config_sections": build_config_sections(auth_db_session, settings),
|
||||
"ticktick_oauth_ready": is_ticktick_oauth_ready(settings),
|
||||
"ticktick_redirect_uri": settings.ticktick_redirect_uri,
|
||||
"ticktick_oauth_notice": None,
|
||||
"ticktick_oauth_error": None,
|
||||
}
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"config.html",
|
||||
context,
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
save_config_updates(auth_db_session, dict(form), settings)
|
||||
except ConfigSaveError:
|
||||
logger.warning("Rejected config update due to invalid submitted values")
|
||||
refreshed_settings = get_settings()
|
||||
context = {
|
||||
"app_name": refreshed_settings.app_name,
|
||||
"app_env": refreshed_settings.app_env,
|
||||
"current_username": current_auth.user.username,
|
||||
"csrf_token": current_auth.session.csrf_token,
|
||||
"force_password_change": current_auth.user.force_password_change,
|
||||
"password_change_error": None,
|
||||
"config_error": "invalid config submission",
|
||||
"config_saved": False,
|
||||
"config_sections": build_config_sections(auth_db_session, refreshed_settings),
|
||||
"ticktick_oauth_ready": is_ticktick_oauth_ready(refreshed_settings),
|
||||
"ticktick_redirect_uri": refreshed_settings.ticktick_redirect_uri,
|
||||
"ticktick_oauth_notice": None,
|
||||
"ticktick_oauth_error": None,
|
||||
}
|
||||
return templates.TemplateResponse(
|
||||
request,
|
||||
"config.html",
|
||||
context,
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
return RedirectResponse(url="/config?saved=1", status_code=status.HTTP_303_SEE_OTHER)
|
||||
@@ -0,0 +1,76 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends, Request, status
|
||||
from fastapi.responses import PlainTextResponse, Response
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.config import Settings
|
||||
from app.dependencies import get_app_settings, get_homeassistant_client, get_poo_db
|
||||
from app.integrations.homeassistant import HomeAssistantClient
|
||||
from app.schemas.poo import PooRecordRequest
|
||||
from app.services.poo import publish_latest_poo_status, record_poo
|
||||
|
||||
router = APIRouter(tags=["poo"])
|
||||
logger = logging.getLogger(__name__)
|
||||
BAD_REQUEST_MESSAGE = "bad request"
|
||||
INTERNAL_SERVER_ERROR_MESSAGE = "internal server error"
|
||||
|
||||
|
||||
@router.post("/poo/record")
|
||||
async def create_poo_record(
|
||||
request: Request,
|
||||
db: Session = Depends(get_poo_db),
|
||||
settings: Settings = Depends(get_app_settings),
|
||||
homeassistant_client: HomeAssistantClient = Depends(get_homeassistant_client),
|
||||
) -> Response:
|
||||
try:
|
||||
raw_payload = await request.body()
|
||||
data = json.loads(raw_payload)
|
||||
payload = PooRecordRequest.model_validate(data)
|
||||
record_poo(
|
||||
db,
|
||||
payload,
|
||||
settings=settings,
|
||||
homeassistant_client=homeassistant_client,
|
||||
)
|
||||
except json.JSONDecodeError as exc:
|
||||
logger.warning("Rejected poo record request due to invalid JSON: %s", exc)
|
||||
return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST)
|
||||
except ValidationError as exc:
|
||||
logger.warning("Rejected poo record request due to validation failure: %s", exc)
|
||||
return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST)
|
||||
except ValueError as exc:
|
||||
logger.warning("Rejected poo record request due to invalid numeric input: %s", exc)
|
||||
return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST)
|
||||
except Exception as exc:
|
||||
logger.warning("Failed to store poo record: %s", exc)
|
||||
return PlainTextResponse(
|
||||
INTERNAL_SERVER_ERROR_MESSAGE,
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
return Response(status_code=status.HTTP_200_OK)
|
||||
|
||||
|
||||
@router.get("/poo/latest")
|
||||
def notify_latest_poo(
|
||||
db: Session = Depends(get_poo_db),
|
||||
settings: Settings = Depends(get_app_settings),
|
||||
homeassistant_client: HomeAssistantClient = Depends(get_homeassistant_client),
|
||||
) -> Response:
|
||||
try:
|
||||
publish_latest_poo_status(
|
||||
session=db,
|
||||
settings=settings,
|
||||
homeassistant_client=homeassistant_client,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.warning("Failed to publish latest poo status: %s", exc)
|
||||
return PlainTextResponse(
|
||||
INTERNAL_SERVER_ERROR_MESSAGE,
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
return Response(status_code=status.HTTP_200_OK)
|
||||
@@ -0,0 +1,11 @@
|
||||
from fastapi import APIRouter
|
||||
|
||||
from app.schemas.health import StatusResponse
|
||||
|
||||
router = APIRouter(tags=["system"])
|
||||
|
||||
|
||||
@router.get("/status", response_model=StatusResponse)
|
||||
def get_status() -> StatusResponse:
|
||||
return StatusResponse(status="ok")
|
||||
|
||||
@@ -0,0 +1,79 @@
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends, Request, status
|
||||
from fastapi.responses import PlainTextResponse, RedirectResponse, Response
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.config import Settings
|
||||
from app.dependencies import (
|
||||
get_app_settings,
|
||||
get_auth_db,
|
||||
get_current_auth_session,
|
||||
get_ticktick_client,
|
||||
)
|
||||
from app.integrations.ticktick import TickTickAuthError, TickTickClient, TickTickConfigError, TickTickRequestError
|
||||
from app.services.auth import AuthenticatedSession
|
||||
from app.services.config_page import save_config_value
|
||||
|
||||
router = APIRouter(tags=["ticktick"])
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@router.get("/ticktick/auth/start")
|
||||
def start_ticktick_auth(
|
||||
current_auth: AuthenticatedSession | None = Depends(get_current_auth_session),
|
||||
ticktick_client: TickTickClient = Depends(get_ticktick_client),
|
||||
) -> Response:
|
||||
if current_auth is None:
|
||||
return RedirectResponse(url="/login", status_code=status.HTTP_303_SEE_OTHER)
|
||||
|
||||
try:
|
||||
authorization_url = ticktick_client.build_authorization_url()
|
||||
except TickTickConfigError as exc:
|
||||
logger.warning("Rejected TickTick OAuth start due to incomplete configuration: %s", exc)
|
||||
return PlainTextResponse("TickTick integration is not configured", status_code=400)
|
||||
|
||||
return RedirectResponse(url=authorization_url, status_code=status.HTTP_303_SEE_OTHER)
|
||||
|
||||
|
||||
@router.get("/ticktick/auth/code")
|
||||
def handle_ticktick_auth_code(
|
||||
request: Request,
|
||||
auth_db_session: Session = Depends(get_auth_db),
|
||||
settings: Settings = Depends(get_app_settings),
|
||||
ticktick_client: TickTickClient = Depends(get_ticktick_client),
|
||||
) -> Response:
|
||||
code = request.query_params.get("code", "")
|
||||
state = request.query_params.get("state", "")
|
||||
|
||||
if not code or not state:
|
||||
return RedirectResponse(
|
||||
url="/config?ticktick_oauth=invalid-callback",
|
||||
status_code=status.HTTP_303_SEE_OTHER,
|
||||
)
|
||||
|
||||
try:
|
||||
token = ticktick_client.exchange_authorization_code(code=code, state=state)
|
||||
save_config_value(
|
||||
auth_db_session,
|
||||
env_name="TICKTICK_TOKEN",
|
||||
value=token,
|
||||
bootstrap_settings=settings,
|
||||
)
|
||||
except TickTickAuthError as exc:
|
||||
logger.warning("Rejected TickTick OAuth callback due to invalid state: %s", exc)
|
||||
return RedirectResponse(
|
||||
url="/config?ticktick_oauth=invalid-state",
|
||||
status_code=status.HTTP_303_SEE_OTHER,
|
||||
)
|
||||
except (TickTickConfigError, TickTickRequestError, ValueError) as exc:
|
||||
logger.warning("TickTick OAuth callback failed: %s", exc)
|
||||
return RedirectResponse(
|
||||
url="/config?ticktick_oauth=failed",
|
||||
status_code=status.HTTP_303_SEE_OTHER,
|
||||
)
|
||||
|
||||
return RedirectResponse(
|
||||
url="/config?ticktick_oauth=success",
|
||||
status_code=status.HTTP_303_SEE_OTHER,
|
||||
)
|
||||
@@ -0,0 +1,53 @@
|
||||
from collections.abc import Generator
|
||||
from functools import lru_cache
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import DeclarativeBase, Session, sessionmaker
|
||||
|
||||
from app.config import get_settings
|
||||
|
||||
|
||||
class AuthBase(DeclarativeBase):
|
||||
pass
|
||||
|
||||
|
||||
def _build_connect_args(database_url: str) -> dict[str, object]:
|
||||
connect_args: dict[str, object] = {}
|
||||
if database_url.startswith("sqlite"):
|
||||
connect_args["check_same_thread"] = False
|
||||
return connect_args
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _get_auth_engine(database_url: str):
|
||||
return create_engine(database_url, connect_args=_build_connect_args(database_url))
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _get_auth_session_local(database_url: str):
|
||||
engine = _get_auth_engine(database_url)
|
||||
return sessionmaker(bind=engine, autoflush=False, autocommit=False, class_=Session)
|
||||
|
||||
|
||||
def get_auth_engine():
|
||||
settings = get_settings()
|
||||
return _get_auth_engine(settings.app_database_url)
|
||||
|
||||
|
||||
def get_auth_session_local():
|
||||
settings = get_settings()
|
||||
return _get_auth_session_local(settings.app_database_url)
|
||||
|
||||
|
||||
def reset_auth_db_caches() -> None:
|
||||
_get_auth_session_local.cache_clear()
|
||||
_get_auth_engine.cache_clear()
|
||||
|
||||
|
||||
def get_auth_db_session() -> Generator[Session, None, None]:
|
||||
session_local = get_auth_session_local()
|
||||
session = session_local()
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
session.close()
|
||||
@@ -0,0 +1,96 @@
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import computed_field
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
app_name: str = "Home Automation Backend (Python)"
|
||||
app_env: str = "production"
|
||||
app_debug: bool = False
|
||||
app_hostname: str = "localhost:8000"
|
||||
app_database_url: str = "sqlite:///./data/app.db"
|
||||
|
||||
location_database_url: str = "sqlite:///./data/locationRecorder.db"
|
||||
poo_database_url: str = "sqlite:///./data/pooRecorder.db"
|
||||
|
||||
ticktick_client_id: str = ""
|
||||
ticktick_client_secret: str = ""
|
||||
ticktick_token: str = ""
|
||||
|
||||
home_assistant_base_url: str = ""
|
||||
home_assistant_auth_token: str = ""
|
||||
home_assistant_timeout_seconds: float = 1.0
|
||||
home_assistant_action_task_project_id: str = ""
|
||||
poo_webhook_id: str = ""
|
||||
poo_sensor_entity_name: str = "sensor.test_poo_status"
|
||||
poo_sensor_friendly_name: str = "Poo Status"
|
||||
auth_bootstrap_username: str = "admin"
|
||||
auth_bootstrap_password: str = "admin"
|
||||
auth_session_cookie_name: str = "home_automation_session"
|
||||
auth_session_ttl_hours: int = 12
|
||||
auth_cookie_secure_override: bool | None = True
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
case_sensitive=False,
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def is_development(self) -> bool:
|
||||
return self.app_env.lower() == "development"
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def app_base_url(self) -> str:
|
||||
hostname = self.app_hostname.strip().rstrip("/")
|
||||
if not hostname:
|
||||
return ""
|
||||
scheme = "http" if self.is_development else "https"
|
||||
return f"{scheme}://{hostname}"
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def ticktick_redirect_uri(self) -> str:
|
||||
if not self.app_base_url:
|
||||
return ""
|
||||
return f"{self.app_base_url}/ticktick/auth/code"
|
||||
|
||||
@staticmethod
|
||||
def _sqlite_path_from_url(database_url: str) -> Path | None:
|
||||
prefix = "sqlite:///"
|
||||
if not database_url.startswith(prefix):
|
||||
return None
|
||||
raw_path = database_url[len(prefix) :]
|
||||
return Path(raw_path)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def location_sqlite_path(self) -> Path | None:
|
||||
return self._sqlite_path_from_url(self.location_database_url)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def app_sqlite_path(self) -> Path | None:
|
||||
return self._sqlite_path_from_url(self.app_database_url)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def poo_sqlite_path(self) -> Path | None:
|
||||
return self._sqlite_path_from_url(self.poo_database_url)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def auth_cookie_secure(self) -> bool:
|
||||
if self.auth_cookie_secure_override is not None:
|
||||
return self.auth_cookie_secure_override
|
||||
return not self.is_development
|
||||
|
||||
|
||||
@lru_cache
|
||||
def get_settings() -> Settings:
|
||||
return Settings()
|
||||
@@ -0,0 +1,28 @@
|
||||
from collections.abc import Generator
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import DeclarativeBase, Session, sessionmaker
|
||||
|
||||
from app.config import get_settings
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
|
||||
|
||||
settings = get_settings()
|
||||
|
||||
connect_args: dict[str, object] = {}
|
||||
if settings.location_database_url.startswith("sqlite"):
|
||||
connect_args["check_same_thread"] = False
|
||||
|
||||
engine = create_engine(settings.location_database_url, connect_args=connect_args)
|
||||
SessionLocal = sessionmaker(bind=engine, autoflush=False, autocommit=False, class_=Session)
|
||||
|
||||
|
||||
def get_db_session() -> Generator[Session, None, None]:
|
||||
session = SessionLocal()
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
session.close()
|
||||
@@ -0,0 +1,46 @@
|
||||
from collections.abc import Generator
|
||||
|
||||
from fastapi import Depends, Request
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.auth_db import get_auth_db_session
|
||||
from app.config import Settings, get_settings
|
||||
from app.db import get_db_session
|
||||
from app.integrations.homeassistant import HomeAssistantClient
|
||||
from app.integrations.ticktick import TickTickClient
|
||||
from app.poo_db import get_poo_db_session
|
||||
from app.services.auth import AuthenticatedSession, get_authenticated_session
|
||||
from app.services.config_page import build_runtime_settings
|
||||
|
||||
|
||||
def get_auth_db() -> Generator[Session, None, None]:
|
||||
yield from get_auth_db_session()
|
||||
|
||||
|
||||
def get_app_settings(session: Session = Depends(get_auth_db)) -> Settings:
|
||||
return build_runtime_settings(session, get_settings())
|
||||
|
||||
|
||||
def get_db() -> Generator[Session, None, None]:
|
||||
yield from get_db_session()
|
||||
|
||||
|
||||
def get_poo_db() -> Generator[Session, None, None]:
|
||||
yield from get_poo_db_session()
|
||||
|
||||
|
||||
def get_homeassistant_client(settings: Settings = Depends(get_app_settings)) -> HomeAssistantClient:
|
||||
return HomeAssistantClient(settings)
|
||||
|
||||
|
||||
def get_ticktick_client(settings: Settings = Depends(get_app_settings)) -> TickTickClient:
|
||||
return TickTickClient(settings)
|
||||
|
||||
|
||||
def get_current_auth_session(
|
||||
request: Request,
|
||||
session: Session = Depends(get_auth_db),
|
||||
settings: Settings = Depends(get_app_settings),
|
||||
) -> AuthenticatedSession | None:
|
||||
raw_token = request.cookies.get(settings.auth_session_cookie_name)
|
||||
return get_authenticated_session(session, raw_token=raw_token)
|
||||
@@ -0,0 +1,2 @@
|
||||
"""External integration placeholders for future migration."""
|
||||
|
||||
@@ -0,0 +1,108 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any
|
||||
from urllib import error, parse, request
|
||||
|
||||
from app.config import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
SUCCESS_STATUS_CODES = {200, 201}
|
||||
|
||||
|
||||
class HomeAssistantConfigError(RuntimeError):
|
||||
"""Raised when required Home Assistant outbound configuration is missing."""
|
||||
|
||||
|
||||
class HomeAssistantRequestError(RuntimeError):
|
||||
"""Raised when a Home Assistant outbound HTTP request fails."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class HomeAssistantClient:
|
||||
settings: Settings
|
||||
timeout_seconds: float | None = field(default=None)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.timeout_seconds is None:
|
||||
self.timeout_seconds = self.settings.home_assistant_timeout_seconds
|
||||
|
||||
def is_configured(self) -> bool:
|
||||
return bool(self.settings.home_assistant_base_url and self.settings.home_assistant_auth_token)
|
||||
|
||||
def publish_sensor(
|
||||
self,
|
||||
*,
|
||||
entity_id: str,
|
||||
state: str,
|
||||
attributes: dict[str, Any] | None = None,
|
||||
) -> None:
|
||||
self._require_config()
|
||||
if not entity_id:
|
||||
raise ValueError("entity_id must not be empty")
|
||||
|
||||
payload = {
|
||||
"entity_id": entity_id,
|
||||
"state": state,
|
||||
"attributes": attributes or {},
|
||||
}
|
||||
self._post_json(f"/api/states/{entity_id}", payload, operation="publish_sensor")
|
||||
|
||||
def trigger_webhook(self, *, webhook_id: str, body: Any) -> None:
|
||||
self._require_config()
|
||||
if not webhook_id:
|
||||
raise ValueError("webhook_id must not be empty")
|
||||
|
||||
self._post_json(f"/api/webhook/{webhook_id}", body, operation="trigger_webhook")
|
||||
|
||||
def _require_config(self) -> None:
|
||||
if self.is_configured():
|
||||
return
|
||||
raise HomeAssistantConfigError(
|
||||
"Home Assistant outbound integration is not configured. "
|
||||
"Set HOME_ASSISTANT_BASE_URL and HOME_ASSISTANT_AUTH_TOKEN."
|
||||
)
|
||||
|
||||
def _post_json(self, path: str, payload: Any, *, operation: str) -> None:
|
||||
url = self._build_url(path)
|
||||
body = json.dumps(payload).encode("utf-8")
|
||||
req = request.Request(url, data=body, method="POST")
|
||||
req.add_header("Content-Type", "application/json")
|
||||
req.add_header("Authorization", f"Bearer {self.settings.home_assistant_auth_token}")
|
||||
|
||||
try:
|
||||
with request.urlopen(req, timeout=self.timeout_seconds) as response:
|
||||
status_code = response.getcode()
|
||||
except error.HTTPError as exc:
|
||||
logger.warning(
|
||||
"Home Assistant outbound %s failed with HTTP %s for %s",
|
||||
operation,
|
||||
exc.code,
|
||||
url,
|
||||
)
|
||||
raise HomeAssistantRequestError(
|
||||
f"Home Assistant outbound {operation} failed with HTTP {exc.code}"
|
||||
) from exc
|
||||
except error.URLError as exc:
|
||||
logger.warning("Home Assistant outbound %s failed for %s: %s", operation, url, exc)
|
||||
raise HomeAssistantRequestError(
|
||||
f"Home Assistant outbound {operation} failed to reach Home Assistant"
|
||||
) from exc
|
||||
|
||||
if status_code not in SUCCESS_STATUS_CODES:
|
||||
logger.warning(
|
||||
"Home Assistant outbound %s returned unexpected status %s for %s",
|
||||
operation,
|
||||
status_code,
|
||||
url,
|
||||
)
|
||||
raise HomeAssistantRequestError(
|
||||
f"Home Assistant outbound {operation} returned unexpected status {status_code}"
|
||||
)
|
||||
|
||||
def _build_url(self, path: str) -> str:
|
||||
base_url = self.settings.home_assistant_base_url.rstrip("/")
|
||||
quoted_path = parse.quote(path.lstrip("/"), safe="/")
|
||||
return f"{base_url}/{quoted_path}"
|
||||
@@ -0,0 +1,301 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import secrets
|
||||
import base64
|
||||
from dataclasses import asdict, dataclass, field, fields
|
||||
from typing import Any
|
||||
from urllib import error, parse, request
|
||||
|
||||
from app.config import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
TICKTICK_AUTH_URL = "https://ticktick.com/oauth/authorize"
|
||||
TICKTICK_TOKEN_URL = "https://ticktick.com/oauth/token"
|
||||
TICKTICK_OPEN_API_BASE_URL = "https://api.ticktick.com/open/v1"
|
||||
TICKTICK_DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S%z"
|
||||
AUTH_SCOPE = "tasks:read tasks:write"
|
||||
|
||||
|
||||
class TickTickConfigError(RuntimeError):
|
||||
"""Raised when TickTick is missing required runtime configuration."""
|
||||
|
||||
|
||||
class TickTickAuthError(RuntimeError):
|
||||
"""Raised when TickTick OAuth state validation fails."""
|
||||
|
||||
|
||||
class TickTickRequestError(RuntimeError):
|
||||
"""Raised when a TickTick API request fails."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TickTickProject:
|
||||
id: str
|
||||
name: str
|
||||
color: str | None = None
|
||||
sortOrder: int | None = None
|
||||
closed: bool | None = None
|
||||
groupId: str | None = None
|
||||
viewMode: str | None = None
|
||||
permission: str | None = None
|
||||
kind: str | None = None
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TickTickTask:
|
||||
projectId: str
|
||||
title: str
|
||||
id: str | None = None
|
||||
isAllDay: bool | None = None
|
||||
completedTime: str | None = None
|
||||
content: str | None = None
|
||||
desc: str | None = None
|
||||
dueDate: str | None = None
|
||||
items: list[Any] | None = None
|
||||
priority: int | None = None
|
||||
reminders: list[str] | None = None
|
||||
repeatFlag: str | None = None
|
||||
sortOrder: int | None = None
|
||||
startDate: str | None = None
|
||||
status: int | None = None
|
||||
timeZone: str | None = None
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TickTickAuthStateStore:
|
||||
pending_state: str | None = None
|
||||
|
||||
def issue_state(self) -> str:
|
||||
self.pending_state = secrets.token_hex(6)
|
||||
return self.pending_state
|
||||
|
||||
def matches_state(self, state: str) -> bool:
|
||||
return bool(self.pending_state and state == self.pending_state)
|
||||
|
||||
def consume_state(self, state: str) -> bool:
|
||||
if not self.pending_state or state != self.pending_state:
|
||||
return False
|
||||
self.pending_state = None
|
||||
return True
|
||||
|
||||
def clear(self) -> None:
|
||||
self.pending_state = None
|
||||
|
||||
|
||||
default_auth_state_store = TickTickAuthStateStore()
|
||||
|
||||
|
||||
def _coerce_dataclass_payload(model_type: type, payload: dict[str, Any]) -> Any:
|
||||
allowed_field_names = {item.name for item in fields(model_type)}
|
||||
filtered_payload = {
|
||||
key: value for key, value in payload.items() if key in allowed_field_names
|
||||
}
|
||||
return model_type(**filtered_payload)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class TickTickClient:
|
||||
settings: Settings
|
||||
auth_state_store: TickTickAuthStateStore = field(default_factory=lambda: default_auth_state_store)
|
||||
timeout_seconds: float = 10.0
|
||||
|
||||
def is_configured(self) -> bool:
|
||||
return bool(self._client_id() and self._client_secret())
|
||||
|
||||
def has_token(self) -> bool:
|
||||
return bool(self.settings.ticktick_token)
|
||||
|
||||
def build_authorization_url(self) -> str:
|
||||
self._require_auth_config()
|
||||
state = self.auth_state_store.issue_state()
|
||||
params = parse.urlencode(
|
||||
{
|
||||
"client_id": self._client_id(),
|
||||
"response_type": "code",
|
||||
"redirect_uri": self._redirect_uri(),
|
||||
"state": state,
|
||||
"scope": AUTH_SCOPE,
|
||||
}
|
||||
)
|
||||
return f"{TICKTICK_AUTH_URL}?{params}"
|
||||
|
||||
def exchange_authorization_code(self, *, code: str, state: str) -> str:
|
||||
self._require_auth_config()
|
||||
if not code:
|
||||
raise ValueError("code must not be empty")
|
||||
if not state:
|
||||
raise ValueError("state must not be empty")
|
||||
if not self.auth_state_store.matches_state(state):
|
||||
raise TickTickAuthError("Invalid state")
|
||||
|
||||
body = parse.urlencode(
|
||||
{
|
||||
"code": code,
|
||||
"grant_type": "authorization_code",
|
||||
"scope": AUTH_SCOPE,
|
||||
"redirect_uri": self._redirect_uri(),
|
||||
}
|
||||
).encode("utf-8")
|
||||
req = request.Request(TICKTICK_TOKEN_URL, data=body, method="POST")
|
||||
req.add_header("Content-Type", "application/x-www-form-urlencoded")
|
||||
req.add_header("Authorization", self._basic_auth_header())
|
||||
payload = self._send_json_request(req, operation="exchange_authorization_code")
|
||||
self.auth_state_store.clear()
|
||||
token = payload.get("access_token")
|
||||
if not isinstance(token, str) or not token:
|
||||
raise TickTickRequestError("TickTick token response did not include access_token")
|
||||
return token
|
||||
|
||||
def get_projects(self) -> list[TickTickProject]:
|
||||
self._require_token()
|
||||
payload = self._authorized_json_request(
|
||||
method="GET",
|
||||
path="/project/",
|
||||
operation="get_projects",
|
||||
)
|
||||
if not isinstance(payload, list):
|
||||
raise TickTickRequestError("TickTick get_projects returned an unexpected payload")
|
||||
return [_coerce_dataclass_payload(TickTickProject, project) for project in payload]
|
||||
|
||||
def get_tasks(self, project_id: str) -> list[TickTickTask]:
|
||||
self._require_token()
|
||||
if not project_id:
|
||||
raise ValueError("project_id must not be empty")
|
||||
payload = self._authorized_json_request(
|
||||
method="GET",
|
||||
path=f"/project/{parse.quote(project_id, safe='')}/data",
|
||||
operation="get_tasks",
|
||||
accepted_status_codes={200, 404},
|
||||
)
|
||||
if payload is None:
|
||||
return []
|
||||
if not isinstance(payload, dict):
|
||||
raise TickTickRequestError("TickTick get_tasks returned an unexpected payload")
|
||||
tasks = payload.get("tasks", [])
|
||||
if not isinstance(tasks, list):
|
||||
raise TickTickRequestError("TickTick get_tasks returned an invalid tasks payload")
|
||||
return [_coerce_dataclass_payload(TickTickTask, task) for task in tasks]
|
||||
|
||||
def has_duplicate_task(self, *, project_id: str, task_title: str) -> bool:
|
||||
if not task_title:
|
||||
raise ValueError("task_title must not be empty")
|
||||
return any(task.title == task_title for task in self.get_tasks(project_id))
|
||||
|
||||
def create_task(self, task: TickTickTask) -> None:
|
||||
self._require_token()
|
||||
if not task.projectId:
|
||||
raise ValueError("task.projectId must not be empty")
|
||||
if not task.title:
|
||||
raise ValueError("task.title must not be empty")
|
||||
if self.has_duplicate_task(project_id=task.projectId, task_title=task.title):
|
||||
return
|
||||
|
||||
payload = {key: value for key, value in asdict(task).items() if value is not None}
|
||||
self._authorized_json_request(
|
||||
method="POST",
|
||||
path="/task",
|
||||
operation="create_task",
|
||||
body=payload,
|
||||
accepted_status_codes={200},
|
||||
)
|
||||
|
||||
def _authorized_json_request(
|
||||
self,
|
||||
*,
|
||||
method: str,
|
||||
path: str,
|
||||
operation: str,
|
||||
body: Any | None = None,
|
||||
accepted_status_codes: set[int] | None = None,
|
||||
) -> Any:
|
||||
url = f"{TICKTICK_OPEN_API_BASE_URL}{path}"
|
||||
encoded_body = None if body is None else json.dumps(body).encode("utf-8")
|
||||
req = request.Request(url, data=encoded_body, method=method)
|
||||
req.add_header("Authorization", f"Bearer {self.settings.ticktick_token}")
|
||||
if body is not None:
|
||||
req.add_header("Content-Type", "application/json")
|
||||
return self._send_json_request(
|
||||
req,
|
||||
operation=operation,
|
||||
accepted_status_codes=accepted_status_codes,
|
||||
)
|
||||
|
||||
def _send_json_request(
|
||||
self,
|
||||
req: request.Request,
|
||||
*,
|
||||
operation: str,
|
||||
accepted_status_codes: set[int] | None = None,
|
||||
) -> Any:
|
||||
accepted_codes = accepted_status_codes or {200}
|
||||
try:
|
||||
with request.urlopen(req, timeout=self.timeout_seconds) as response:
|
||||
status_code = response.getcode()
|
||||
if status_code not in accepted_codes:
|
||||
raise TickTickRequestError(
|
||||
f"TickTick {operation} returned unexpected status {status_code}"
|
||||
)
|
||||
raw_body = response.read()
|
||||
except error.HTTPError as exc:
|
||||
if exc.code in accepted_codes:
|
||||
raw_body = exc.read()
|
||||
else:
|
||||
logger.warning(
|
||||
"TickTick %s failed with HTTP %s for %s",
|
||||
operation,
|
||||
exc.code,
|
||||
req.full_url,
|
||||
)
|
||||
raise TickTickRequestError(
|
||||
f"TickTick {operation} failed with HTTP {exc.code}"
|
||||
) from exc
|
||||
except error.URLError as exc:
|
||||
logger.warning("TickTick %s failed for %s: %s", operation, req.full_url, exc)
|
||||
raise TickTickRequestError(
|
||||
f"TickTick {operation} failed to reach TickTick API"
|
||||
) from exc
|
||||
|
||||
if not raw_body:
|
||||
return None
|
||||
try:
|
||||
return json.loads(raw_body)
|
||||
except json.JSONDecodeError as exc:
|
||||
raise TickTickRequestError(
|
||||
f"TickTick {operation} returned invalid JSON"
|
||||
) from exc
|
||||
|
||||
def _basic_auth_header(self) -> str:
|
||||
raw_credentials = f"{self._client_id()}:{self._client_secret()}"
|
||||
token = base64.b64encode(raw_credentials.encode("utf-8")).decode("ascii")
|
||||
return f"Basic {token}"
|
||||
|
||||
def _client_id(self) -> str:
|
||||
return self.settings.ticktick_client_id.strip()
|
||||
|
||||
def _client_secret(self) -> str:
|
||||
return self.settings.ticktick_client_secret.strip()
|
||||
|
||||
def _redirect_uri(self) -> str:
|
||||
return self.settings.ticktick_redirect_uri
|
||||
|
||||
def _require_auth_config(self) -> None:
|
||||
if not self.is_configured():
|
||||
raise TickTickConfigError(
|
||||
"TickTick integration is not configured. Set TICKTICK_CLIENT_ID and "
|
||||
"TICKTICK_CLIENT_SECRET."
|
||||
)
|
||||
if not self._redirect_uri():
|
||||
raise TickTickConfigError(
|
||||
"TickTick integration is missing APP_HOSTNAME for OAuth callback generation."
|
||||
)
|
||||
|
||||
def _require_token(self) -> None:
|
||||
self._require_auth_config()
|
||||
if self.has_token():
|
||||
return
|
||||
raise TickTickConfigError(
|
||||
"TickTick integration is missing TICKTICK_TOKEN. Complete the OAuth flow first."
|
||||
)
|
||||
|
||||
+104
@@ -0,0 +1,104 @@
|
||||
from contextlib import asynccontextmanager
|
||||
from pathlib import Path
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app import models # noqa: F401
|
||||
from app.api.routes.auth import router as auth_router
|
||||
from app.api.routes import pages, status
|
||||
import app.auth_db as auth_db
|
||||
from app.api.routes.homeassistant import router as homeassistant_router
|
||||
from app.api.routes.location import router as location_router
|
||||
from app.api.routes.poo import router as poo_router
|
||||
from app.api.routes.ticktick import router as ticktick_router
|
||||
from app.config import get_settings
|
||||
from app.services.auth import AuthBootstrapError, initialize_auth_schema
|
||||
from app.services.config_page import seed_missing_config_from_bootstrap, sync_app_hostname_from_bootstrap
|
||||
from scripts.app_db_adopt import AppDatabaseAdoptionError, validate_app_runtime_db
|
||||
from scripts.location_db_adopt import LocationDatabaseAdoptionError, validate_location_runtime_db
|
||||
from scripts.poo_db_adopt import PooDatabaseAdoptionError, validate_poo_runtime_db
|
||||
|
||||
|
||||
def ensure_auth_db_ready() -> None:
|
||||
session_local = auth_db.get_auth_session_local()
|
||||
session: Session = session_local()
|
||||
try:
|
||||
validate_app_runtime_db(get_settings().app_database_url)
|
||||
initialize_auth_schema(session, get_settings())
|
||||
seed_missing_config_from_bootstrap(session, get_settings())
|
||||
sync_app_hostname_from_bootstrap(session, get_settings())
|
||||
except AppDatabaseAdoptionError as exc:
|
||||
raise RuntimeError(str(exc)) from exc
|
||||
except AuthBootstrapError as exc:
|
||||
raise RuntimeError(str(exc)) from exc
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
def ensure_location_db_ready() -> None:
|
||||
settings = get_settings()
|
||||
if settings.location_sqlite_path is None:
|
||||
return
|
||||
|
||||
try:
|
||||
validate_location_runtime_db(settings.location_database_url)
|
||||
except LocationDatabaseAdoptionError as exc:
|
||||
raise RuntimeError(str(exc)) from exc
|
||||
|
||||
|
||||
def ensure_poo_db_ready() -> None:
|
||||
settings = get_settings()
|
||||
if settings.poo_sqlite_path is None:
|
||||
return
|
||||
|
||||
try:
|
||||
validate_poo_runtime_db(settings.poo_database_url)
|
||||
except PooDatabaseAdoptionError as exc:
|
||||
raise RuntimeError(str(exc)) from exc
|
||||
|
||||
|
||||
def ensure_runtime_dirs() -> None:
|
||||
settings = get_settings()
|
||||
for path in (settings.app_sqlite_path, settings.location_sqlite_path, settings.poo_sqlite_path):
|
||||
if path is not None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(_: FastAPI):
|
||||
ensure_runtime_dirs()
|
||||
ensure_auth_db_ready()
|
||||
ensure_location_db_ready()
|
||||
ensure_poo_db_ready()
|
||||
yield
|
||||
|
||||
|
||||
def create_app() -> FastAPI:
|
||||
settings = get_settings()
|
||||
app = FastAPI(
|
||||
title=settings.app_name,
|
||||
debug=settings.app_debug,
|
||||
version="0.1.0",
|
||||
lifespan=lifespan,
|
||||
description=(
|
||||
"Home automation backend with auth, runtime config, Home Assistant "
|
||||
"integrations, TickTick integration, and SQLite-backed recorders."
|
||||
),
|
||||
)
|
||||
|
||||
static_dir = Path(__file__).parent / "static"
|
||||
app.mount("/static", StaticFiles(directory=static_dir), name="static")
|
||||
|
||||
app.include_router(status.router)
|
||||
app.include_router(auth_router)
|
||||
app.include_router(pages.router)
|
||||
app.include_router(homeassistant_router)
|
||||
app.include_router(location_router)
|
||||
app.include_router(poo_router)
|
||||
app.include_router(ticktick_router)
|
||||
return app
|
||||
|
||||
|
||||
app = create_app()
|
||||
@@ -0,0 +1,7 @@
|
||||
"""SQLAlchemy models package."""
|
||||
|
||||
from app.models.auth import AuthSession, AuthUser
|
||||
from app.models.config import AppConfigEntry
|
||||
from app.models.location import Location
|
||||
|
||||
__all__ = ["AppConfigEntry", "AuthSession", "AuthUser", "Location"]
|
||||
@@ -0,0 +1,33 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Boolean, DateTime, ForeignKey, Integer, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.auth_db import AuthBase
|
||||
|
||||
|
||||
class AuthUser(AuthBase):
|
||||
__tablename__ = "auth_users"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
username: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True)
|
||||
password_hash: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
|
||||
force_password_change: Mapped[bool] = mapped_column(Boolean, nullable=False, default=True)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
||||
|
||||
sessions: Mapped[list["AuthSession"]] = relationship(back_populates="user")
|
||||
|
||||
|
||||
class AuthSession(AuthBase):
|
||||
__tablename__ = "auth_sessions"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
user_id: Mapped[int] = mapped_column(ForeignKey("auth_users.id"), nullable=False, index=True)
|
||||
token_hash: Mapped[str] = mapped_column(String(64), unique=True, nullable=False, index=True)
|
||||
csrf_token: Mapped[str] = mapped_column(String(128), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
||||
expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False, index=True)
|
||||
revoked_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
user: Mapped[AuthUser] = relationship(back_populates="sessions")
|
||||
@@ -0,0 +1,4 @@
|
||||
from app.db import Base
|
||||
|
||||
__all__ = ["Base"]
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import DateTime, Integer, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from app.auth_db import AuthBase
|
||||
|
||||
|
||||
class AppConfigEntry(AuthBase):
|
||||
__tablename__ = "app_config"
|
||||
|
||||
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
|
||||
key: Mapped[str] = mapped_column(String(255), unique=True, nullable=False, index=True)
|
||||
value: Mapped[str] = mapped_column(String, nullable=False)
|
||||
updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False)
|
||||
@@ -0,0 +1,15 @@
|
||||
from sqlalchemy import Float, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from app.db import Base
|
||||
|
||||
|
||||
class Location(Base):
|
||||
__tablename__ = "location"
|
||||
|
||||
person: Mapped[str] = mapped_column(String, primary_key=True)
|
||||
datetime: Mapped[str] = mapped_column(String, primary_key=True)
|
||||
latitude: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
longitude: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
altitude: Mapped[float | None] = mapped_column(Float, nullable=True)
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
from sqlalchemy import Float, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from app.poo_db import PooBase
|
||||
|
||||
|
||||
class PooRecord(PooBase):
|
||||
__tablename__ = "poo_records"
|
||||
|
||||
timestamp: Mapped[str] = mapped_column(String, primary_key=True)
|
||||
status: Mapped[str] = mapped_column(String, nullable=False)
|
||||
latitude: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
longitude: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
@@ -0,0 +1,28 @@
|
||||
from collections.abc import Generator
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import DeclarativeBase, Session, sessionmaker
|
||||
|
||||
from app.config import get_settings
|
||||
|
||||
|
||||
class PooBase(DeclarativeBase):
|
||||
pass
|
||||
|
||||
|
||||
settings = get_settings()
|
||||
|
||||
connect_args: dict[str, object] = {}
|
||||
if settings.poo_database_url.startswith("sqlite"):
|
||||
connect_args["check_same_thread"] = False
|
||||
|
||||
poo_engine = create_engine(settings.poo_database_url, connect_args=connect_args)
|
||||
PooSessionLocal = sessionmaker(bind=poo_engine, autoflush=False, autocommit=False, class_=Session)
|
||||
|
||||
|
||||
def get_poo_db_session() -> Generator[Session, None, None]:
|
||||
session = PooSessionLocal()
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
session.close()
|
||||
@@ -0,0 +1,2 @@
|
||||
"""Pydantic schemas package."""
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class StatusResponse(BaseModel):
|
||||
status: str
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class HomeAssistantPublishEnvelope(BaseModel):
|
||||
target: str
|
||||
action: str
|
||||
content: str
|
||||
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
@@ -0,0 +1,10 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class LocationRecordRequest(BaseModel):
|
||||
person: str
|
||||
latitude: str
|
||||
longitude: str
|
||||
altitude: str | None = None
|
||||
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
@@ -0,0 +1,9 @@
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class PooRecordRequest(BaseModel):
|
||||
status: str
|
||||
latitude: str
|
||||
longitude: str
|
||||
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
@@ -0,0 +1,9 @@
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
|
||||
class TickTickActionTaskRequest(BaseModel):
|
||||
title: str | None = None
|
||||
action: str
|
||||
due_hour: int = Field(alias="due_hour")
|
||||
|
||||
model_config = ConfigDict(extra="forbid", populate_by_name=True)
|
||||
@@ -0,0 +1,2 @@
|
||||
"""Service layer package."""
|
||||
|
||||
@@ -0,0 +1,192 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import secrets
|
||||
from dataclasses import dataclass
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
from argon2 import PasswordHasher
|
||||
from argon2.exceptions import InvalidHashError, VerificationError, VerifyMismatchError
|
||||
from sqlalchemy import Select, select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.config import Settings
|
||||
from app.models.auth import AuthSession, AuthUser
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
password_hasher = PasswordHasher()
|
||||
|
||||
|
||||
class AuthBootstrapError(RuntimeError):
|
||||
"""Raised when the auth system cannot be safely initialized."""
|
||||
|
||||
|
||||
class AuthPasswordChangeError(ValueError):
|
||||
"""Raised when a password change request is invalid."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class AuthenticatedSession:
|
||||
user: AuthUser
|
||||
session: AuthSession
|
||||
|
||||
|
||||
def initialize_auth_schema(session: Session, settings: Settings) -> None:
|
||||
has_any_user = session.scalar(select(AuthUser.id).limit(1)) is not None
|
||||
if has_any_user:
|
||||
return
|
||||
|
||||
if not settings.auth_bootstrap_username or not settings.auth_bootstrap_password:
|
||||
raise AuthBootstrapError(
|
||||
"Auth DB has no users. Set AUTH_BOOTSTRAP_USERNAME and "
|
||||
"AUTH_BOOTSTRAP_PASSWORD before starting the app."
|
||||
)
|
||||
|
||||
bootstrap_user = AuthUser(
|
||||
username=settings.auth_bootstrap_username,
|
||||
password_hash=hash_password(settings.auth_bootstrap_password),
|
||||
is_active=True,
|
||||
force_password_change=True,
|
||||
created_at=_utc_now(),
|
||||
)
|
||||
session.add(bootstrap_user)
|
||||
session.commit()
|
||||
logger.warning(
|
||||
"Bootstrapped initial auth user '%s'. Rotate AUTH_BOOTSTRAP_PASSWORD after first setup.",
|
||||
bootstrap_user.username,
|
||||
)
|
||||
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
return password_hasher.hash(password)
|
||||
|
||||
|
||||
def verify_password(password: str, stored_hash: str) -> bool:
|
||||
try:
|
||||
return password_hasher.verify(stored_hash, password)
|
||||
except VerifyMismatchError:
|
||||
return False
|
||||
except (InvalidHashError, VerificationError):
|
||||
return False
|
||||
|
||||
|
||||
def authenticate_user(session: Session, *, username: str, password: str) -> AuthUser | None:
|
||||
user = session.scalar(select(AuthUser).where(AuthUser.username == username).limit(1))
|
||||
if user is None or not user.is_active:
|
||||
logger.info("Failed login for unknown or inactive user '%s'", username)
|
||||
return None
|
||||
|
||||
if not verify_password(password, user.password_hash):
|
||||
logger.info("Failed login due to invalid password for user '%s'", username)
|
||||
return None
|
||||
|
||||
return user
|
||||
|
||||
|
||||
def create_session(session: Session, *, user: AuthUser, settings: Settings) -> tuple[AuthSession, str]:
|
||||
raw_token = secrets.token_urlsafe(32)
|
||||
auth_session = AuthSession(
|
||||
user_id=user.id,
|
||||
token_hash=_hash_token(raw_token),
|
||||
csrf_token=secrets.token_urlsafe(24),
|
||||
created_at=_utc_now(),
|
||||
expires_at=_utc_now() + timedelta(hours=settings.auth_session_ttl_hours),
|
||||
revoked_at=None,
|
||||
)
|
||||
session.add(auth_session)
|
||||
session.commit()
|
||||
session.refresh(auth_session)
|
||||
return auth_session, raw_token
|
||||
|
||||
|
||||
def get_authenticated_session(session: Session, *, raw_token: str | None) -> AuthenticatedSession | None:
|
||||
if not raw_token:
|
||||
return None
|
||||
|
||||
stmt: Select[tuple[AuthSession, AuthUser]] = (
|
||||
select(AuthSession, AuthUser)
|
||||
.join(AuthUser, AuthSession.user_id == AuthUser.id)
|
||||
.where(AuthSession.token_hash == _hash_token(raw_token))
|
||||
.limit(1)
|
||||
)
|
||||
result = session.execute(stmt).first()
|
||||
if result is None:
|
||||
return None
|
||||
|
||||
auth_session, user = result
|
||||
now = _utc_now()
|
||||
expires_at = _as_utc(auth_session.expires_at)
|
||||
revoked_at = _as_utc(auth_session.revoked_at)
|
||||
if expires_at is None:
|
||||
logger.warning("Auth session %s has no expires_at; treating it as invalid", auth_session.id)
|
||||
return None
|
||||
|
||||
if revoked_at is not None or expires_at <= now or not user.is_active:
|
||||
if revoked_at is None and expires_at <= now:
|
||||
auth_session.revoked_at = now
|
||||
session.commit()
|
||||
return None
|
||||
|
||||
return AuthenticatedSession(user=user, session=auth_session)
|
||||
|
||||
|
||||
def revoke_session(session: Session, *, auth_session: AuthSession) -> None:
|
||||
if auth_session.revoked_at is not None:
|
||||
return
|
||||
auth_session.revoked_at = _utc_now()
|
||||
session.commit()
|
||||
|
||||
|
||||
def change_password(
|
||||
session: Session,
|
||||
*,
|
||||
user: AuthUser,
|
||||
current_password: str,
|
||||
new_password: str,
|
||||
confirm_password: str,
|
||||
) -> None:
|
||||
if not verify_password(current_password, user.password_hash):
|
||||
raise AuthPasswordChangeError("current password is invalid")
|
||||
|
||||
if not new_password:
|
||||
raise AuthPasswordChangeError("new password must not be empty")
|
||||
|
||||
if new_password != confirm_password:
|
||||
raise AuthPasswordChangeError("new password confirmation does not match")
|
||||
|
||||
if len(new_password) < 8:
|
||||
raise AuthPasswordChangeError("new password must be at least 8 characters long")
|
||||
|
||||
if verify_password(new_password, user.password_hash):
|
||||
raise AuthPasswordChangeError("new password must be different from the current password")
|
||||
|
||||
user.password_hash = hash_password(new_password)
|
||||
user.force_password_change = False
|
||||
session.commit()
|
||||
|
||||
|
||||
def issue_login_csrf_token() -> str:
|
||||
return secrets.token_urlsafe(24)
|
||||
|
||||
|
||||
def validate_csrf_token(*, expected: str | None, actual: str | None) -> bool:
|
||||
if not expected or not actual:
|
||||
return False
|
||||
return secrets.compare_digest(expected, actual)
|
||||
|
||||
|
||||
def _hash_token(raw_token: str) -> str:
|
||||
return hashlib.sha256(raw_token.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def _utc_now() -> datetime:
|
||||
return datetime.now(UTC)
|
||||
|
||||
|
||||
def _as_utc(value: datetime | None) -> datetime | None:
|
||||
if value is None:
|
||||
return None
|
||||
if value.tzinfo is None:
|
||||
return value.replace(tzinfo=UTC)
|
||||
return value.astimezone(UTC)
|
||||
@@ -0,0 +1,271 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.auth_db import reset_auth_db_caches
|
||||
from app.config import Settings, get_settings
|
||||
from app.models.config import AppConfigEntry
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class ConfigField:
|
||||
section: str
|
||||
env_name: str
|
||||
setting_attr: str
|
||||
label: str
|
||||
secret: bool = False
|
||||
input_type: str = "text"
|
||||
|
||||
|
||||
CONFIG_FIELDS: tuple[ConfigField, ...] = (
|
||||
ConfigField("System", "APP_NAME", "app_name", "App Name"),
|
||||
ConfigField("System", "APP_ENV", "app_env", "App Env"),
|
||||
ConfigField("System", "APP_DEBUG", "app_debug", "App Debug"),
|
||||
ConfigField("System", "APP_HOSTNAME", "app_hostname", "App Hostname"),
|
||||
ConfigField(
|
||||
"Authentication",
|
||||
"AUTH_SESSION_COOKIE_NAME",
|
||||
"auth_session_cookie_name",
|
||||
"Session Cookie Name",
|
||||
),
|
||||
ConfigField("Authentication", "AUTH_SESSION_TTL_HOURS", "auth_session_ttl_hours", "Session TTL Hours"),
|
||||
ConfigField(
|
||||
"Authentication",
|
||||
"AUTH_COOKIE_SECURE_OVERRIDE",
|
||||
"auth_cookie_secure_override",
|
||||
"Cookie Secure Override",
|
||||
),
|
||||
ConfigField("Poo", "POO_WEBHOOK_ID", "poo_webhook_id", "Poo Webhook ID", secret=True),
|
||||
ConfigField(
|
||||
"Poo",
|
||||
"POO_SENSOR_ENTITY_NAME",
|
||||
"poo_sensor_entity_name",
|
||||
"Poo Sensor Entity Name",
|
||||
),
|
||||
ConfigField(
|
||||
"Poo",
|
||||
"POO_SENSOR_FRIENDLY_NAME",
|
||||
"poo_sensor_friendly_name",
|
||||
"Poo Sensor Friendly Name",
|
||||
),
|
||||
ConfigField("TickTick", "TICKTICK_CLIENT_ID", "ticktick_client_id", "TickTick Client ID"),
|
||||
ConfigField(
|
||||
"TickTick",
|
||||
"TICKTICK_CLIENT_SECRET",
|
||||
"ticktick_client_secret",
|
||||
"TickTick Client Secret",
|
||||
secret=True,
|
||||
),
|
||||
ConfigField("TickTick", "TICKTICK_TOKEN", "ticktick_token", "TickTick Token", secret=True),
|
||||
ConfigField(
|
||||
"Home Assistant",
|
||||
"HOME_ASSISTANT_BASE_URL",
|
||||
"home_assistant_base_url",
|
||||
"Home Assistant Base URL",
|
||||
),
|
||||
ConfigField(
|
||||
"Home Assistant",
|
||||
"HOME_ASSISTANT_AUTH_TOKEN",
|
||||
"home_assistant_auth_token",
|
||||
"Home Assistant Auth Token",
|
||||
secret=True,
|
||||
),
|
||||
ConfigField(
|
||||
"Home Assistant",
|
||||
"HOME_ASSISTANT_TIMEOUT_SECONDS",
|
||||
"home_assistant_timeout_seconds",
|
||||
"Home Assistant Timeout Seconds",
|
||||
),
|
||||
ConfigField(
|
||||
"Home Assistant",
|
||||
"HOME_ASSISTANT_ACTION_TASK_PROJECT_ID",
|
||||
"home_assistant_action_task_project_id",
|
||||
"Home Assistant Action Task Project ID",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class ConfigSaveError(ValueError):
|
||||
"""Raised when the submitted config payload is invalid."""
|
||||
|
||||
|
||||
def seed_missing_config_from_bootstrap(session: Session, bootstrap_settings: Settings) -> None:
|
||||
current_values = _read_config_values(session)
|
||||
missing_values: dict[str, str] = {}
|
||||
|
||||
for field in CONFIG_FIELDS:
|
||||
if field.env_name in current_values:
|
||||
continue
|
||||
missing_values[field.env_name] = _stringify(getattr(bootstrap_settings, field.setting_attr))
|
||||
|
||||
if not missing_values:
|
||||
return
|
||||
|
||||
_persist_config_values(session, {**current_values, **missing_values})
|
||||
|
||||
|
||||
def sync_app_hostname_from_bootstrap(session: Session, bootstrap_settings: Settings) -> None:
|
||||
current_values = _read_config_values(session)
|
||||
bootstrap_hostname = _stringify(bootstrap_settings.app_hostname)
|
||||
if current_values.get("APP_HOSTNAME") == bootstrap_hostname:
|
||||
return
|
||||
|
||||
current_values["APP_HOSTNAME"] = bootstrap_hostname
|
||||
_persist_config_values(session, current_values)
|
||||
get_settings.cache_clear()
|
||||
reset_auth_db_caches()
|
||||
|
||||
|
||||
def build_runtime_settings(session: Session, bootstrap_settings: Settings) -> Settings:
|
||||
overrides = _read_config_values(session)
|
||||
if not overrides:
|
||||
return bootstrap_settings
|
||||
|
||||
payload = _settings_payload(bootstrap_settings)
|
||||
for field in CONFIG_FIELDS:
|
||||
if field.env_name in overrides:
|
||||
payload[field.setting_attr] = overrides[field.env_name]
|
||||
|
||||
return Settings(_env_file=None, **payload)
|
||||
|
||||
|
||||
def build_config_sections(session: Session, bootstrap_settings: Settings) -> list[dict[str, Any]]:
|
||||
runtime_settings = build_runtime_settings(session, bootstrap_settings)
|
||||
persisted_values = _read_config_values(session)
|
||||
sections: list[dict[str, Any]] = []
|
||||
current_section: dict[str, Any] | None = None
|
||||
|
||||
for field in CONFIG_FIELDS:
|
||||
if current_section is None or current_section["name"] != field.section:
|
||||
current_section = {"name": field.section, "fields": []}
|
||||
sections.append(current_section)
|
||||
|
||||
current_section["fields"].append(
|
||||
{
|
||||
"env_name": field.env_name,
|
||||
"label": field.label,
|
||||
"value": "" if field.secret else _stringify(getattr(runtime_settings, field.setting_attr)),
|
||||
"secret": field.secret,
|
||||
"input_type": "password" if field.secret else field.input_type,
|
||||
"configured": field.env_name in persisted_values
|
||||
or bool(_stringify(getattr(bootstrap_settings, field.setting_attr))),
|
||||
}
|
||||
)
|
||||
|
||||
return sections
|
||||
|
||||
|
||||
def save_config_updates(session: Session, form_data: dict[str, str], bootstrap_settings: Settings) -> None:
|
||||
current_values = _read_config_values(session)
|
||||
merged_values = dict(current_values)
|
||||
|
||||
for field in CONFIG_FIELDS:
|
||||
submitted_value = form_data.get(field.env_name, "")
|
||||
if field.secret:
|
||||
if submitted_value:
|
||||
merged_values[field.env_name] = submitted_value
|
||||
else:
|
||||
merged_values[field.env_name] = submitted_value
|
||||
|
||||
_validate_config_values(merged_values, bootstrap_settings)
|
||||
_persist_config_values(session, merged_values)
|
||||
get_settings.cache_clear()
|
||||
reset_auth_db_caches()
|
||||
|
||||
|
||||
def save_config_value(
|
||||
session: Session,
|
||||
*,
|
||||
env_name: str,
|
||||
value: str,
|
||||
bootstrap_settings: Settings,
|
||||
) -> None:
|
||||
current_values = _read_config_values(session)
|
||||
current_values[env_name] = value
|
||||
_validate_config_values(current_values, bootstrap_settings)
|
||||
_persist_config_values(session, current_values)
|
||||
get_settings.cache_clear()
|
||||
reset_auth_db_caches()
|
||||
|
||||
|
||||
def is_ticktick_oauth_ready(settings: Settings) -> bool:
|
||||
return bool(
|
||||
settings.app_hostname
|
||||
and settings.ticktick_client_id
|
||||
and settings.ticktick_client_secret
|
||||
)
|
||||
|
||||
|
||||
def _read_config_values(session: Session) -> dict[str, str]:
|
||||
rows = session.execute(select(AppConfigEntry).order_by(AppConfigEntry.key)).scalars().all()
|
||||
return {row.key: row.value for row in rows}
|
||||
|
||||
|
||||
def _validate_config_values(config_values: dict[str, str], bootstrap_settings: Settings) -> None:
|
||||
payload = _settings_payload(bootstrap_settings)
|
||||
for field in CONFIG_FIELDS:
|
||||
if field.env_name in config_values:
|
||||
payload[field.setting_attr] = config_values[field.env_name]
|
||||
|
||||
try:
|
||||
Settings(_env_file=None, **payload)
|
||||
except Exception as exc:
|
||||
raise ConfigSaveError("invalid config submission") from exc
|
||||
|
||||
|
||||
def _persist_config_values(session: Session, config_values: dict[str, str]) -> None:
|
||||
existing_entries = {
|
||||
row.key: row
|
||||
for row in session.execute(select(AppConfigEntry)).scalars().all()
|
||||
}
|
||||
now = datetime.now(UTC)
|
||||
|
||||
for env_name, value in config_values.items():
|
||||
entry = existing_entries.get(env_name)
|
||||
if entry is None:
|
||||
session.add(AppConfigEntry(key=env_name, value=value, updated_at=now))
|
||||
else:
|
||||
entry.value = value
|
||||
entry.updated_at = now
|
||||
|
||||
session.commit()
|
||||
|
||||
|
||||
def _stringify(value: Any) -> str:
|
||||
if value is None:
|
||||
return ""
|
||||
if isinstance(value, bool):
|
||||
return str(value).lower()
|
||||
return str(value)
|
||||
|
||||
|
||||
def _settings_payload(settings: Settings) -> dict[str, Any]:
|
||||
return {
|
||||
"app_name": settings.app_name,
|
||||
"app_env": settings.app_env,
|
||||
"app_debug": settings.app_debug,
|
||||
"app_hostname": settings.app_hostname,
|
||||
"app_database_url": settings.app_database_url,
|
||||
"location_database_url": settings.location_database_url,
|
||||
"poo_database_url": settings.poo_database_url,
|
||||
"ticktick_client_id": settings.ticktick_client_id,
|
||||
"ticktick_client_secret": settings.ticktick_client_secret,
|
||||
"ticktick_token": settings.ticktick_token,
|
||||
"home_assistant_base_url": settings.home_assistant_base_url,
|
||||
"home_assistant_auth_token": settings.home_assistant_auth_token,
|
||||
"home_assistant_timeout_seconds": settings.home_assistant_timeout_seconds,
|
||||
"home_assistant_action_task_project_id": settings.home_assistant_action_task_project_id,
|
||||
"poo_webhook_id": settings.poo_webhook_id,
|
||||
"poo_sensor_entity_name": settings.poo_sensor_entity_name,
|
||||
"poo_sensor_friendly_name": settings.poo_sensor_friendly_name,
|
||||
"auth_bootstrap_username": settings.auth_bootstrap_username,
|
||||
"auth_bootstrap_password": settings.auth_bootstrap_password,
|
||||
"auth_session_cookie_name": settings.auth_session_cookie_name,
|
||||
"auth_session_ttl_hours": settings.auth_session_ttl_hours,
|
||||
"auth_cookie_secure_override": settings.auth_cookie_secure_override,
|
||||
}
|
||||
@@ -0,0 +1,79 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from datetime import UTC, datetime, time, timedelta
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.integrations.ticktick import TICKTICK_DATETIME_FORMAT, TickTickClient, TickTickTask
|
||||
from app.schemas.homeassistant import HomeAssistantPublishEnvelope
|
||||
from app.schemas.location import LocationRecordRequest
|
||||
from app.schemas.ticktick import TickTickActionTaskRequest
|
||||
from app.services.location import record_location
|
||||
|
||||
|
||||
class UnsupportedHomeAssistantMessage(RuntimeError):
|
||||
"""Raised when the inbound gateway receives a target/action that is not supported yet."""
|
||||
|
||||
|
||||
def handle_homeassistant_message(
|
||||
session: Session,
|
||||
envelope: HomeAssistantPublishEnvelope,
|
||||
ticktick_client: TickTickClient | None = None,
|
||||
) -> None:
|
||||
if envelope.target == "location_recorder":
|
||||
_handle_location_message(session, envelope)
|
||||
return
|
||||
|
||||
if envelope.target == "ticktick":
|
||||
_handle_ticktick_message(envelope, ticktick_client)
|
||||
return
|
||||
|
||||
raise UnsupportedHomeAssistantMessage(
|
||||
f"Unsupported Home Assistant target/action: {envelope.target}/{envelope.action}"
|
||||
)
|
||||
|
||||
|
||||
def _handle_location_message(session: Session, envelope: HomeAssistantPublishEnvelope) -> None:
|
||||
if envelope.action != "record":
|
||||
raise UnsupportedHomeAssistantMessage(
|
||||
f"Unsupported Home Assistant target/action: {envelope.target}/{envelope.action}"
|
||||
)
|
||||
|
||||
content = json.loads(envelope.content.replace("'", '"'))
|
||||
payload = LocationRecordRequest.model_validate(content)
|
||||
record_location(session, payload)
|
||||
|
||||
|
||||
def _handle_ticktick_message(
|
||||
envelope: HomeAssistantPublishEnvelope,
|
||||
ticktick_client: TickTickClient | None,
|
||||
) -> None:
|
||||
if envelope.action != "create_action_task":
|
||||
raise UnsupportedHomeAssistantMessage(
|
||||
f"Unsupported Home Assistant target/action: {envelope.target}/{envelope.action}"
|
||||
)
|
||||
if ticktick_client is None:
|
||||
raise UnsupportedHomeAssistantMessage("TickTick client is unavailable")
|
||||
|
||||
content = json.loads(envelope.content.replace("'", '"'))
|
||||
payload = TickTickActionTaskRequest.model_validate(content)
|
||||
project_id = ticktick_client.settings.home_assistant_action_task_project_id
|
||||
if not project_id:
|
||||
raise RuntimeError(
|
||||
"TickTick action task integration is missing HOME_ASSISTANT_ACTION_TASK_PROJECT_ID"
|
||||
)
|
||||
|
||||
ticktick_client.create_task(
|
||||
TickTickTask(
|
||||
projectId=project_id,
|
||||
title=payload.action,
|
||||
dueDate=build_action_task_due_date(datetime.now().astimezone(), payload.due_hour),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def build_action_task_due_date(now: datetime, due_hour: int) -> str:
|
||||
local_now = now.astimezone()
|
||||
due = local_now + timedelta(hours=due_hour)
|
||||
next_midnight = datetime.combine(due.date(), time.min, tzinfo=local_now.tzinfo) + timedelta(days=1)
|
||||
return next_midnight.astimezone(UTC).strftime(TICKTICK_DATETIME_FORMAT)
|
||||
@@ -0,0 +1,42 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import insert
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.models.location import Location
|
||||
from app.schemas.location import LocationRecordRequest
|
||||
|
||||
|
||||
def _parse_optional_float_compat(value: str | None) -> float:
|
||||
try:
|
||||
return float(value)
|
||||
except (TypeError, ValueError):
|
||||
return 0.0
|
||||
|
||||
|
||||
def _parse_required_float(value: str, field_name: str) -> float:
|
||||
try:
|
||||
return float(value)
|
||||
except (TypeError, ValueError) as exc:
|
||||
raise ValueError(f"Invalid numeric value for {field_name}") from exc
|
||||
|
||||
|
||||
def _utc_now_rfc3339() -> str:
|
||||
now = datetime.now(timezone.utc).replace(microsecond=0)
|
||||
return now.isoformat().replace("+00:00", "Z")
|
||||
|
||||
|
||||
def record_location(session: Session, payload: LocationRecordRequest) -> None:
|
||||
stmt = (
|
||||
insert(Location)
|
||||
.prefix_with("OR IGNORE")
|
||||
.values(
|
||||
person=payload.person,
|
||||
datetime=_utc_now_rfc3339(),
|
||||
latitude=_parse_required_float(payload.latitude, "latitude"),
|
||||
longitude=_parse_required_float(payload.longitude, "longitude"),
|
||||
altitude=_parse_optional_float_compat(payload.altitude),
|
||||
)
|
||||
)
|
||||
session.execute(stmt)
|
||||
session.commit()
|
||||
@@ -0,0 +1,112 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
import logging
|
||||
|
||||
from sqlalchemy import desc, insert, select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.config import Settings
|
||||
from app.integrations.homeassistant import (
|
||||
HomeAssistantClient,
|
||||
HomeAssistantConfigError,
|
||||
HomeAssistantRequestError,
|
||||
)
|
||||
from app.models.poo import PooRecord
|
||||
from app.schemas.poo import PooRecordRequest
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class LatestPooRecord:
|
||||
timestamp: str
|
||||
status: str
|
||||
latitude: float
|
||||
longitude: float
|
||||
|
||||
|
||||
def _parse_required_float(value: str, field_name: str) -> float:
|
||||
try:
|
||||
return float(value)
|
||||
except (TypeError, ValueError) as exc:
|
||||
raise ValueError(f"Invalid numeric value for {field_name}") from exc
|
||||
|
||||
|
||||
def _utc_now_minute_precision() -> str:
|
||||
now = datetime.now(timezone.utc).replace(second=0, microsecond=0)
|
||||
return now.strftime("%Y-%m-%dT%H:%MZ")
|
||||
|
||||
|
||||
def record_poo(
|
||||
session: Session,
|
||||
payload: PooRecordRequest,
|
||||
*,
|
||||
settings: Settings,
|
||||
homeassistant_client: HomeAssistantClient,
|
||||
) -> None:
|
||||
stmt = insert(PooRecord).prefix_with("OR IGNORE").values(
|
||||
timestamp=_utc_now_minute_precision(),
|
||||
status=payload.status,
|
||||
latitude=_parse_required_float(payload.latitude, "latitude"),
|
||||
longitude=_parse_required_float(payload.longitude, "longitude"),
|
||||
)
|
||||
session.execute(stmt)
|
||||
session.commit()
|
||||
|
||||
try:
|
||||
publish_latest_poo_status(
|
||||
session=session,
|
||||
settings=settings,
|
||||
homeassistant_client=homeassistant_client,
|
||||
)
|
||||
except (HomeAssistantConfigError, HomeAssistantRequestError) as exc:
|
||||
logger.warning("Failed to publish latest poo status to Home Assistant: %s", exc)
|
||||
|
||||
if settings.poo_webhook_id:
|
||||
try:
|
||||
homeassistant_client.trigger_webhook(
|
||||
webhook_id=settings.poo_webhook_id,
|
||||
body={"status": payload.status},
|
||||
)
|
||||
except (HomeAssistantConfigError, HomeAssistantRequestError) as exc:
|
||||
logger.warning("Failed to trigger poo webhook on Home Assistant: %s", exc)
|
||||
|
||||
|
||||
def get_latest_poo_record(session: Session) -> LatestPooRecord | None:
|
||||
stmt = select(PooRecord).order_by(desc(PooRecord.timestamp)).limit(1)
|
||||
record = session.execute(stmt).scalar_one_or_none()
|
||||
if record is None:
|
||||
logger.info("No poo record is available yet")
|
||||
return None
|
||||
return LatestPooRecord(
|
||||
timestamp=record.timestamp,
|
||||
status=record.status,
|
||||
latitude=record.latitude,
|
||||
longitude=record.longitude,
|
||||
)
|
||||
|
||||
|
||||
def publish_latest_poo_status(
|
||||
*,
|
||||
session: Session,
|
||||
settings: Settings,
|
||||
homeassistant_client: HomeAssistantClient,
|
||||
) -> LatestPooRecord | None:
|
||||
latest = get_latest_poo_record(session)
|
||||
if latest is None:
|
||||
logger.info("Skipping Home Assistant poo sensor publish because no poo record exists yet")
|
||||
return None
|
||||
|
||||
record_time = datetime.fromisoformat(latest.timestamp.replace("Z", "+00:00")).astimezone()
|
||||
|
||||
homeassistant_client.publish_sensor(
|
||||
entity_id=settings.poo_sensor_entity_name,
|
||||
state=latest.status,
|
||||
attributes={
|
||||
"last_poo": record_time.strftime("%a | %Y-%m-%d | %H:%M"),
|
||||
"friendly_name": settings.poo_sensor_friendly_name,
|
||||
},
|
||||
)
|
||||
return latest
|
||||
@@ -0,0 +1,6 @@
|
||||
from app.config import Settings
|
||||
|
||||
|
||||
def build_status_payload(settings: Settings) -> dict[str, str]:
|
||||
return {"status": "ok", "environment": settings.app_env}
|
||||
|
||||
@@ -0,0 +1,245 @@
|
||||
:root {
|
||||
--bg: #f4f1ea;
|
||||
--panel: rgba(255, 255, 255, 0.88);
|
||||
--text: #1f2933;
|
||||
--muted: #5b6875;
|
||||
--accent: #2d6a4f;
|
||||
--border: rgba(31, 41, 51, 0.08);
|
||||
}
|
||||
|
||||
* {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
min-height: 100vh;
|
||||
font-family: "Iowan Old Style", "Palatino Linotype", "Book Antiqua", serif;
|
||||
color: var(--text);
|
||||
background:
|
||||
radial-gradient(circle at top left, rgba(45, 106, 79, 0.18), transparent 28%),
|
||||
linear-gradient(160deg, #f7f4ee 0%, #ece6d8 100%);
|
||||
}
|
||||
|
||||
.shell {
|
||||
width: min(880px, calc(100% - 32px));
|
||||
margin: 48px auto;
|
||||
}
|
||||
|
||||
.panel {
|
||||
padding: 32px;
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 24px;
|
||||
background: var(--panel);
|
||||
backdrop-filter: blur(12px);
|
||||
box-shadow: 0 20px 60px rgba(31, 41, 51, 0.12);
|
||||
}
|
||||
|
||||
.eyebrow {
|
||||
margin: 0 0 8px;
|
||||
font-size: 0.85rem;
|
||||
letter-spacing: 0.12em;
|
||||
text-transform: uppercase;
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
h1 {
|
||||
margin: 0 0 16px;
|
||||
font-size: clamp(2rem, 4vw, 3.2rem);
|
||||
}
|
||||
|
||||
.lead {
|
||||
margin: 0 0 24px;
|
||||
line-height: 1.7;
|
||||
color: var(--muted);
|
||||
}
|
||||
|
||||
.meta {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(180px, 1fr));
|
||||
gap: 16px;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.single-column {
|
||||
grid-template-columns: minmax(180px, 320px);
|
||||
margin-bottom: 24px;
|
||||
}
|
||||
|
||||
.meta div {
|
||||
padding: 16px;
|
||||
border-radius: 16px;
|
||||
background: rgba(255, 255, 255, 0.7);
|
||||
border: 1px solid rgba(31, 41, 51, 0.06);
|
||||
}
|
||||
|
||||
.meta dt {
|
||||
margin-bottom: 8px;
|
||||
font-size: 0.9rem;
|
||||
color: var(--muted);
|
||||
}
|
||||
|
||||
.meta dd {
|
||||
margin: 0;
|
||||
font-size: 1.05rem;
|
||||
}
|
||||
|
||||
a {
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.auth-panel {
|
||||
max-width: 520px;
|
||||
margin-inline: auto;
|
||||
}
|
||||
|
||||
.auth-form,
|
||||
.logout-form {
|
||||
display: grid;
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
.auth-form label {
|
||||
display: grid;
|
||||
gap: 8px;
|
||||
font-size: 0.95rem;
|
||||
color: var(--muted);
|
||||
}
|
||||
|
||||
.auth-form input {
|
||||
width: 100%;
|
||||
padding: 12px 14px;
|
||||
border: 1px solid rgba(31, 41, 51, 0.14);
|
||||
border-radius: 12px;
|
||||
background: rgba(255, 255, 255, 0.92);
|
||||
color: var(--text);
|
||||
font: inherit;
|
||||
}
|
||||
|
||||
button {
|
||||
width: fit-content;
|
||||
min-width: 120px;
|
||||
padding: 12px 18px;
|
||||
border: none;
|
||||
border-radius: 999px;
|
||||
background: var(--accent);
|
||||
color: white;
|
||||
font: inherit;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
button:hover {
|
||||
filter: brightness(1.04);
|
||||
}
|
||||
|
||||
.alert {
|
||||
margin-bottom: 16px;
|
||||
padding: 12px 14px;
|
||||
border-radius: 12px;
|
||||
background: rgba(157, 37, 37, 0.08);
|
||||
border: 1px solid rgba(157, 37, 37, 0.14);
|
||||
color: #8b2a2a;
|
||||
}
|
||||
|
||||
.notice {
|
||||
margin-bottom: 16px;
|
||||
padding: 12px 14px;
|
||||
border-radius: 12px;
|
||||
background: rgba(45, 106, 79, 0.08);
|
||||
border: 1px solid rgba(45, 106, 79, 0.14);
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.config-block + .config-block {
|
||||
margin-top: 28px;
|
||||
}
|
||||
|
||||
.config-block h2 {
|
||||
margin: 0 0 16px;
|
||||
font-size: 1.25rem;
|
||||
}
|
||||
|
||||
.config-form {
|
||||
display: grid;
|
||||
gap: 20px;
|
||||
}
|
||||
|
||||
.config-section {
|
||||
margin: 0;
|
||||
padding: 18px;
|
||||
border: 1px solid rgba(31, 41, 51, 0.08);
|
||||
border-radius: 16px;
|
||||
display: grid;
|
||||
gap: 14px;
|
||||
}
|
||||
|
||||
.config-section legend {
|
||||
padding: 0 8px;
|
||||
color: var(--accent);
|
||||
}
|
||||
|
||||
.config-form label small {
|
||||
color: var(--muted);
|
||||
}
|
||||
|
||||
.integration-action-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 16px;
|
||||
padding-top: 8px;
|
||||
border-top: 1px solid rgba(31, 41, 51, 0.08);
|
||||
}
|
||||
|
||||
.integration-action-title {
|
||||
margin: 0 0 6px;
|
||||
font-weight: 600;
|
||||
color: var(--text);
|
||||
}
|
||||
|
||||
.integration-action-copy {
|
||||
margin: 0;
|
||||
color: var(--muted);
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.button-link {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: fit-content;
|
||||
min-width: 120px;
|
||||
padding: 12px 18px;
|
||||
border: none;
|
||||
border-radius: 999px;
|
||||
background: var(--accent);
|
||||
color: white;
|
||||
text-decoration: none;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.button-link:hover {
|
||||
filter: brightness(1.04);
|
||||
}
|
||||
|
||||
.button-link.disabled {
|
||||
background: rgba(91, 104, 117, 0.28);
|
||||
color: rgba(31, 41, 51, 0.72);
|
||||
cursor: not-allowed;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
@media (max-width: 640px) {
|
||||
.shell {
|
||||
margin: 24px auto;
|
||||
}
|
||||
|
||||
.panel {
|
||||
padding: 24px;
|
||||
}
|
||||
|
||||
.integration-action-row {
|
||||
align-items: stretch;
|
||||
flex-direction: column;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="zh-CN">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>{% block title %}{{ app_name }}{% endblock %}</title>
|
||||
<link rel="icon" href="data:,">
|
||||
<link rel="stylesheet" href="/static/styles.css">
|
||||
</head>
|
||||
<body>
|
||||
<main class="shell">
|
||||
{% block content %}{% endblock %}
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -0,0 +1,117 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}Config · {{ app_name }}{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<section class="panel">
|
||||
<p class="eyebrow">Configuration</p>
|
||||
<h1>Config</h1>
|
||||
|
||||
{% if force_password_change %}
|
||||
<div class="alert">
|
||||
首次登录后需要先修改密码。完成后再继续长期使用当前配置页面。
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if password_change_error %}
|
||||
<div class="alert">{{ password_change_error }}</div>
|
||||
{% endif %}
|
||||
|
||||
{% if config_error %}
|
||||
<div class="alert">{{ config_error }}</div>
|
||||
{% endif %}
|
||||
|
||||
{% if config_saved %}
|
||||
<div class="notice">config saved to the app database. Some changes may require an app restart.</div>
|
||||
{% endif %}
|
||||
|
||||
{% if ticktick_oauth_error %}
|
||||
<div class="alert">{{ ticktick_oauth_error }}</div>
|
||||
{% endif %}
|
||||
|
||||
{% if ticktick_oauth_notice %}
|
||||
<div class="notice">{{ ticktick_oauth_notice }}</div>
|
||||
{% endif %}
|
||||
|
||||
<div class="meta single-column">
|
||||
<div>
|
||||
<dt>当前用户</dt>
|
||||
<dd>admin</dd>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<section class="config-block">
|
||||
<h2>Change Password</h2>
|
||||
<form class="auth-form" method="post" action="/config/change-password">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token }}">
|
||||
|
||||
<label>
|
||||
<span>Current Password</span>
|
||||
<input type="password" name="current_password" autocomplete="current-password" required>
|
||||
</label>
|
||||
|
||||
<label>
|
||||
<span>New Password</span>
|
||||
<input type="password" name="new_password" autocomplete="new-password" required>
|
||||
</label>
|
||||
|
||||
<label>
|
||||
<span>Confirm New Password</span>
|
||||
<input type="password" name="confirm_password" autocomplete="new-password" required>
|
||||
</label>
|
||||
|
||||
<button type="submit">修改密码</button>
|
||||
</form>
|
||||
</section>
|
||||
|
||||
<section class="config-block">
|
||||
<h2>Config</h2>
|
||||
<form class="config-form" method="post" action="/config">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token }}">
|
||||
|
||||
{% for section in config_sections %}
|
||||
<fieldset class="config-section">
|
||||
<legend>{{ section.name }}</legend>
|
||||
{% for field in section.fields %}
|
||||
<label>
|
||||
<span>{{ field.label }}</span>
|
||||
{% if field.secret %}
|
||||
<input type="{{ field.input_type }}" name="{{ field.env_name }}" value="" placeholder="leave blank to keep current value">
|
||||
<small>{% if field.configured %}configured{% else %}not configured{% endif %}</small>
|
||||
{% else %}
|
||||
<input type="{{ field.input_type }}" name="{{ field.env_name }}" value="{{ field.value }}">
|
||||
{% endif %}
|
||||
</label>
|
||||
{% endfor %}
|
||||
|
||||
{% if section.name == "TickTick" %}
|
||||
<div class="integration-action-row">
|
||||
<div>
|
||||
<p class="integration-action-title">TickTick OAuth</p>
|
||||
<p class="integration-action-copy">Redirect URI: {{ ticktick_redirect_uri or "configure APP_HOSTNAME to generate the callback URI" }}</p>
|
||||
{% if ticktick_oauth_ready %}
|
||||
<p class="integration-action-copy">Use the saved TickTick client settings to start the authorization flow.</p>
|
||||
{% else %}
|
||||
<p class="integration-action-copy">Fill in App Hostname, TickTick Client ID, and TickTick Client Secret before starting OAuth.</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% if ticktick_oauth_ready %}
|
||||
<a class="button-link" href="/ticktick/auth/start">Authorize TickTick</a>
|
||||
{% else %}
|
||||
<span class="button-link disabled" aria-disabled="true">Authorize TickTick</span>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</fieldset>
|
||||
{% endfor %}
|
||||
|
||||
<button type="submit">Save Config</button>
|
||||
</form>
|
||||
</section>
|
||||
|
||||
<form class="logout-form" method="post" action="/logout">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token }}">
|
||||
<button type="submit">登出</button>
|
||||
</form>
|
||||
</section>
|
||||
{% endblock %}
|
||||
@@ -0,0 +1,36 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{ app_name }}{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<section class="panel">
|
||||
<p class="eyebrow">Python Rewrite Skeleton</p>
|
||||
<h1>{{ app_name }}</h1>
|
||||
<p class="lead">
|
||||
这是当前 Go 后端的 Python 重构基础骨架。此阶段仅提供应用入口、配置、数据库、
|
||||
测试、模板和容器化基础,不包含业务逻辑迁移。
|
||||
</p>
|
||||
<dl class="meta">
|
||||
<div>
|
||||
<dt>运行环境</dt>
|
||||
<dd>{{ app_env }}</dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt>健康检查</dt>
|
||||
<dd><a href="/status">/status</a></dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt>OpenAPI</dt>
|
||||
<dd><a href="/docs">/docs</a></dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt>登录</dt>
|
||||
<dd><a href="/login">/login</a></dd>
|
||||
</div>
|
||||
<div>
|
||||
<dt>Notion</dt>
|
||||
<dd>{{ notion_status }}</dd>
|
||||
</div>
|
||||
</dl>
|
||||
</section>
|
||||
{% endblock %}
|
||||
@@ -0,0 +1,33 @@
|
||||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}登录 · {{ app_name }}{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<section class="panel auth-panel">
|
||||
<p class="eyebrow">Authentication</p>
|
||||
<h1>登录</h1>
|
||||
<p class="lead">
|
||||
登录成功后会进入受保护的 config 页面。
|
||||
</p>
|
||||
|
||||
{% if error_message %}
|
||||
<div class="alert">{{ error_message }}</div>
|
||||
{% endif %}
|
||||
|
||||
<form class="auth-form" method="post" action="/login">
|
||||
<input type="hidden" name="csrf_token" value="{{ csrf_token }}">
|
||||
|
||||
<label>
|
||||
<span>Username</span>
|
||||
<input type="text" name="username" autocomplete="username" required>
|
||||
</label>
|
||||
|
||||
<label>
|
||||
<span>Password</span>
|
||||
<input type="password" name="password" autocomplete="current-password" required>
|
||||
</label>
|
||||
|
||||
<button type="submit">登录</button>
|
||||
</form>
|
||||
</section>
|
||||
{% endblock %}
|
||||
@@ -0,0 +1,6 @@
|
||||
-r requirements.in
|
||||
|
||||
httpx>=0.28,<1.0
|
||||
pip-tools>=7.4,<8.0
|
||||
pytest>=8.3,<9.0
|
||||
|
||||
@@ -0,0 +1,128 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile with Python 3.13
|
||||
# by the following command:
|
||||
#
|
||||
# pip-compile dev-requirements.in
|
||||
#
|
||||
alembic==1.18.4
|
||||
# via -r requirements.in
|
||||
annotated-types==0.7.0
|
||||
# via pydantic
|
||||
argon2-cffi==25.1.0
|
||||
# via -r requirements.in
|
||||
argon2-cffi-bindings==25.1.0
|
||||
# via argon2-cffi
|
||||
anyio==4.13.0
|
||||
# via
|
||||
# httpx
|
||||
# starlette
|
||||
# watchfiles
|
||||
build==1.4.3
|
||||
# via pip-tools
|
||||
certifi==2026.2.25
|
||||
# via
|
||||
# httpcore
|
||||
# httpx
|
||||
cffi==2.0.0
|
||||
# via argon2-cffi-bindings
|
||||
click==8.3.2
|
||||
# via
|
||||
# pip-tools
|
||||
# uvicorn
|
||||
fastapi==0.115.14
|
||||
# via -r requirements.in
|
||||
greenlet==3.4.0
|
||||
# via sqlalchemy
|
||||
h11==0.16.0
|
||||
# via
|
||||
# httpcore
|
||||
# uvicorn
|
||||
httpcore==1.0.9
|
||||
# via httpx
|
||||
httptools==0.7.1
|
||||
# via uvicorn
|
||||
httpx==0.28.1
|
||||
# via -r dev-requirements.in
|
||||
idna==3.11
|
||||
# via
|
||||
# anyio
|
||||
# httpx
|
||||
iniconfig==2.3.0
|
||||
# via pytest
|
||||
jinja2==3.1.6
|
||||
# via -r requirements.in
|
||||
mako==1.3.11
|
||||
# via alembic
|
||||
markupsafe==3.0.3
|
||||
# via
|
||||
# jinja2
|
||||
# mako
|
||||
packaging==26.1
|
||||
# via
|
||||
# build
|
||||
# pytest
|
||||
# wheel
|
||||
pip-tools==7.5.3
|
||||
# via -r dev-requirements.in
|
||||
pluggy==1.6.0
|
||||
# via pytest
|
||||
pydantic==2.13.2
|
||||
# via
|
||||
# fastapi
|
||||
# pydantic-settings
|
||||
pydantic-core==2.46.2
|
||||
# via pydantic
|
||||
pydantic-settings==2.13.1
|
||||
# via -r requirements.in
|
||||
pygments==2.20.0
|
||||
# via pytest
|
||||
pyproject-hooks==1.2.0
|
||||
# via
|
||||
# build
|
||||
# pip-tools
|
||||
pytest==8.4.2
|
||||
# via -r dev-requirements.in
|
||||
python-dotenv==1.2.2
|
||||
# via
|
||||
# pydantic-settings
|
||||
# uvicorn
|
||||
python-multipart==0.0.26
|
||||
# via -r requirements.in
|
||||
pycparser==2.23
|
||||
# via cffi
|
||||
pyyaml==6.0.3
|
||||
# via
|
||||
# -r requirements.in
|
||||
# uvicorn
|
||||
sqlalchemy==2.0.49
|
||||
# via
|
||||
# -r requirements.in
|
||||
# alembic
|
||||
starlette==0.46.2
|
||||
# via fastapi
|
||||
typing-extensions==4.15.0
|
||||
# via
|
||||
# alembic
|
||||
# fastapi
|
||||
# pydantic
|
||||
# pydantic-core
|
||||
# sqlalchemy
|
||||
# typing-inspection
|
||||
typing-inspection==0.4.2
|
||||
# via
|
||||
# pydantic
|
||||
# pydantic-settings
|
||||
uvicorn[standard]==0.44.0
|
||||
# via -r requirements.in
|
||||
uvloop==0.22.1
|
||||
# via uvicorn
|
||||
watchfiles==1.1.1
|
||||
# via uvicorn
|
||||
websockets==16.0
|
||||
# via uvicorn
|
||||
wheel==0.46.3
|
||||
# via pip-tools
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
# pip
|
||||
# setuptools
|
||||
@@ -0,0 +1,12 @@
|
||||
services:
|
||||
app:
|
||||
container_name: home-automation-app
|
||||
build: .
|
||||
user: "1000:1000"
|
||||
restart: unless-stopped
|
||||
init: true
|
||||
ports:
|
||||
- "127.0.0.1:8881:8000"
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./.env:/app/.env:ro
|
||||
Executable
+9
@@ -0,0 +1,9 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -eu
|
||||
|
||||
python scripts/app_db_adopt.py
|
||||
python scripts/location_db_adopt.py
|
||||
python scripts/poo_db_adopt.py
|
||||
|
||||
exec uvicorn app.main:app --host 0.0.0.0 --port 8000
|
||||
@@ -0,0 +1,94 @@
|
||||
# Python 骨架架构概览
|
||||
|
||||
本文档说明当前 Python skeleton 的职责边界与目录组织。它描述的是“后续迁移承载体”,不是完整业务实现。
|
||||
|
||||
## 当前目标
|
||||
|
||||
这一轮的目标是提供一个稳定、轻量、可持续扩展的基础工程,使后续可以逐步迁移:
|
||||
|
||||
- TickTick integration
|
||||
- Home Assistant integration
|
||||
- poo records
|
||||
- location / life trajectory
|
||||
|
||||
## 目录设计
|
||||
|
||||
### `app/`
|
||||
|
||||
应用核心代码目录。
|
||||
|
||||
- `main.py`
|
||||
- FastAPI app factory
|
||||
- lifespan
|
||||
- 基础路由注册
|
||||
- `config.py`
|
||||
- 环境变量驱动的 settings
|
||||
- `auth_db.py`
|
||||
- app 级共享 auth 数据库
|
||||
- `db.py`
|
||||
- SQLAlchemy engine / session / Base
|
||||
- `dependencies.py`
|
||||
- 通用依赖注入
|
||||
- `api/`
|
||||
- HTTP routes
|
||||
- 当前已迁入 `/login`、`/logout`、`/admin`
|
||||
- 当前已迁入 `POST /homeassistant/publish` 第一版入口
|
||||
- 当前已迁入 `POST /poo/record` 与 `GET /poo/latest`
|
||||
- `models/`
|
||||
- SQLAlchemy models
|
||||
- 当前 `auth`、`location` 与 `poo` 使用各自独立的数据库 base
|
||||
- `schemas/`
|
||||
- Pydantic schemas
|
||||
- `services/`
|
||||
- 业务服务层
|
||||
- 当前已迁入 config page 的 DB 持久化逻辑
|
||||
- `integrations/`
|
||||
- 外部系统适配层
|
||||
- 当前已迁入 Home Assistant outbound adapter
|
||||
- `templates/`
|
||||
- Jinja2 模板
|
||||
- `static/`
|
||||
- 极简静态资源
|
||||
|
||||
### `alembic_location/`
|
||||
|
||||
Location DB 的 migration 基础设施。
|
||||
|
||||
### `alembic_app/`
|
||||
|
||||
App DB 的 migration 基础设施。
|
||||
|
||||
### `alembic_poo/`
|
||||
|
||||
Poo DB 的 migration 基础设施。
|
||||
|
||||
### `tests/`
|
||||
|
||||
pytest 测试目录。后续可以在这里自然扩展:
|
||||
|
||||
- unit tests
|
||||
- mock tests
|
||||
- integration tests
|
||||
|
||||
### `scripts/`
|
||||
|
||||
辅助脚本目录。当前包含 OpenAPI 导出脚本。
|
||||
|
||||
## 当前约束
|
||||
|
||||
- 当前只搭骨架,不迁业务逻辑
|
||||
- 当前数据库继续使用 SQLite
|
||||
- 当前不引入前后端分离
|
||||
- 当前不设计 Notion 模块
|
||||
|
||||
## 关于 Notion
|
||||
|
||||
Notion 在 Go 版本中仍是现状模块,但在 Python 重构中已经明确属于 removed scope。
|
||||
|
||||
因此当前 Python skeleton:
|
||||
|
||||
- 不提供 Notion integration 模块
|
||||
- 不提供 Notion schema
|
||||
- 不预留 Notion 相关业务流
|
||||
|
||||
如果未来需要回顾其历史作用,应继续参考 Go 版本和现有迁移盘点文档,而不是在 Python 骨架中保留它。
|
||||
+120
@@ -0,0 +1,120 @@
|
||||
# 基础鉴权说明
|
||||
|
||||
本文档说明当前 Python 重构项目里已经落地的第一版鉴权基座。
|
||||
|
||||
这一轮只解决:
|
||||
|
||||
- 登录页
|
||||
- 登录 / 登出流程
|
||||
- server-side session
|
||||
- 一个最小受保护页面
|
||||
|
||||
这一轮明确不解决:
|
||||
|
||||
- 完整 config persistence
|
||||
- 完整 config CRUD
|
||||
- 多用户权限系统
|
||||
- OAuth / SSO / RBAC
|
||||
|
||||
## 当前 auth 模型
|
||||
|
||||
- 认证方式:`username/password`
|
||||
- 会话方式:server-side session
|
||||
- 客户端凭据:session cookie
|
||||
- 页面形态:Jinja server-side template
|
||||
|
||||
## 当前持久化
|
||||
|
||||
当前新增一个共享 App DB:
|
||||
|
||||
- `APP_DATABASE_URL`
|
||||
- 默认值:`sqlite:///./data/app.db`
|
||||
|
||||
当前 auth 相关数据存放在这个 DB 中:
|
||||
|
||||
- `auth_users`
|
||||
- `auth_sessions`
|
||||
- `app_config`
|
||||
|
||||
当前没有把 auth 数据和 `location` / `poo` DB 混放。
|
||||
|
||||
当前这部分现在也走 Alembic 管理:
|
||||
|
||||
- Alembic 环境:`alembic_app.ini` + `alembic_app/`
|
||||
- 初始化脚本:`python scripts/app_db_adopt.py`
|
||||
|
||||
当前没有 legacy app DB,所以这一版脚本只负责初始化新库,不负责 legacy adoption。
|
||||
|
||||
`app_config` 现在承接运行时配置持久化。
|
||||
|
||||
其中:
|
||||
|
||||
- `.env` 负责 bootstrap / fallback
|
||||
- `app_config` 表负责运行时配置覆盖
|
||||
- 登录密码仍然属于认证数据,使用 Argon2 哈希,不存进 `app_config`
|
||||
|
||||
## 首次启动与 bootstrap
|
||||
|
||||
如果 auth DB 中还没有任何用户,应用启动时会要求:
|
||||
|
||||
- `AUTH_BOOTSTRAP_USERNAME`
|
||||
- `AUTH_BOOTSTRAP_PASSWORD`
|
||||
|
||||
并创建首个 admin 用户。
|
||||
|
||||
当前默认 bootstrap 值就是:
|
||||
|
||||
- username: `admin`
|
||||
- password: `admin`
|
||||
|
||||
首次登录后,系统会强制要求修改密码。
|
||||
|
||||
如果你希望在首次启动前就覆盖默认值,可以直接设置环境变量:
|
||||
|
||||
- `AUTH_BOOTSTRAP_USERNAME`
|
||||
- `AUTH_BOOTSTRAP_PASSWORD`
|
||||
|
||||
建议流程是:
|
||||
|
||||
1. 配好 `.env`
|
||||
2. 运行 `python scripts/app_db_adopt.py`
|
||||
3. 启动应用
|
||||
4. 用 `admin / admin` 首次登录
|
||||
5. 立即修改密码
|
||||
|
||||
## 安全设计
|
||||
|
||||
当前这版已经落实的基础安全点:
|
||||
|
||||
- 密码不明文存储,使用 Argon2 哈希
|
||||
- session cookie 为 `HttpOnly`
|
||||
- cookie 使用 `SameSite=Lax`
|
||||
- `Secure` cookie 在非 `development` 环境默认开启
|
||||
- 登录表单与登出表单都有基础 CSRF 校验
|
||||
- session token 为随机生成,服务端只持久化 token hash
|
||||
- session 有过期时间与显式失效机制
|
||||
|
||||
## 当前受保护范围
|
||||
|
||||
当前这轮只保护了页面入口:
|
||||
|
||||
- `GET /config`
|
||||
- `POST /config`
|
||||
- `POST /config/change-password`
|
||||
- `POST /logout`
|
||||
|
||||
相关流程:
|
||||
|
||||
- `GET /login`
|
||||
- `POST /login`
|
||||
|
||||
未登录访问 `/config` 时会被重定向到 `/login`。
|
||||
|
||||
## 下一步不在本轮内
|
||||
|
||||
后续可以在这个基座上继续做:
|
||||
|
||||
- 配置页面接入
|
||||
- config persistence
|
||||
- 更细的受保护路由范围
|
||||
- 用户初始化 / 密码轮换的更正式 runbook
|
||||
@@ -0,0 +1,67 @@
|
||||
# Home Assistant Inbound Gateway
|
||||
|
||||
本文档说明当前 Python 项目中已经迁入的 Home Assistant inbound gateway 第一版。
|
||||
|
||||
这里的 inbound 指:
|
||||
|
||||
- Home Assistant 主动调用当前 app 的入口
|
||||
|
||||
当前已恢复的入口是:
|
||||
|
||||
- `POST /homeassistant/publish`
|
||||
|
||||
## Request Envelope
|
||||
|
||||
当前沿用 legacy Go 的 envelope 形状:
|
||||
|
||||
```json
|
||||
{
|
||||
"target": "location_recorder",
|
||||
"action": "record",
|
||||
"content": "{'person': 'alice', 'latitude': '1.23', 'longitude': '4.56'}"
|
||||
}
|
||||
```
|
||||
|
||||
说明:
|
||||
|
||||
- `target`、`action`、`content` 均为必填
|
||||
- unknown field 会被拒绝
|
||||
- `content` 当前仍兼容 legacy 常见的单引号 JSON 字符串风格
|
||||
|
||||
## 当前已支持的 Target / Action
|
||||
|
||||
当前已接回的路径:
|
||||
|
||||
- `location_recorder / record`
|
||||
- `ticktick / create_action_task`
|
||||
|
||||
其中:
|
||||
|
||||
- `location_recorder / record` 会把 `content` 解析为 location recorder 请求,并直接走当前 Python 项目里的 location 写入逻辑
|
||||
- `ticktick / create_action_task` 会沿用 legacy 行为,把 `content` 解析为:
|
||||
- `action: string`
|
||||
- `due_hour: int`
|
||||
- 可选 `title` 字段会被忽略
|
||||
- TickTick task title 仍使用 `action`
|
||||
- due date 仍按 legacy 语义计算:先取 `now + due_hour`,再落到该日期的“次日零点”,最后转成 UTC 后写给 TickTick
|
||||
- 具体 project 仍由 `HOME_ASSISTANT_ACTION_TASK_PROJECT_ID` 提供
|
||||
|
||||
## 当前尚未接回
|
||||
|
||||
以下 legacy 路径在当前阶段还没有迁入:
|
||||
|
||||
- `poo_recorder / get_latest`
|
||||
- 其他未定义 target/action
|
||||
|
||||
这些请求当前会返回:
|
||||
|
||||
- `500 internal server error`
|
||||
|
||||
## 错误处理
|
||||
|
||||
当前策略保持简洁:
|
||||
|
||||
- envelope 非法、缺字段、unknown field、`content` 非法:返回 `400 bad request`
|
||||
- target/action 当前未迁入:返回 `500 internal server error`
|
||||
|
||||
对 caller 的响应体保持简洁,不暴露过多内部细节;更详细原因只写日志。
|
||||
@@ -0,0 +1,51 @@
|
||||
# Home Assistant Outbound Integration
|
||||
|
||||
本文档说明当前 Python 项目中已经迁入的 Home Assistant outbound integration layer。
|
||||
|
||||
这里的 outbound 指:
|
||||
|
||||
- 由当前 app 主动调用 Home Assistant
|
||||
|
||||
当前不包含:
|
||||
|
||||
- `/homeassistant/publish`
|
||||
- Home Assistant inbound command gateway
|
||||
- Home Assistant 驱动当前 app 的入站消息路由
|
||||
|
||||
## 当前已支持能力
|
||||
|
||||
当前 `app/integrations/homeassistant.py` 提供一个轻量的 `HomeAssistantClient`,已支持:
|
||||
|
||||
- 发布 / 更新 sensor state
|
||||
- `POST /api/states/{entity_id}`
|
||||
- 触发 Home Assistant webhook
|
||||
- `POST /api/webhook/{webhook_id}`
|
||||
|
||||
这两项能力是按 legacy Go 中 `util/homeassistantutil/homeassistantutil.go` 的出站行为迁入的。
|
||||
|
||||
## 当前配置
|
||||
|
||||
当前 outbound adapter 依赖以下配置:
|
||||
|
||||
- `HOME_ASSISTANT_BASE_URL`
|
||||
- `HOME_ASSISTANT_AUTH_TOKEN`
|
||||
- `HOME_ASSISTANT_TIMEOUT_SECONDS`
|
||||
|
||||
如果缺少必要配置,client 会直接抛出配置错误,而不是静默跳过。
|
||||
|
||||
## 错误处理策略
|
||||
|
||||
当前策略保持保守和简单:
|
||||
|
||||
- 配置缺失:抛出 `HomeAssistantConfigError`
|
||||
- 参数明显非法:抛出 `ValueError`
|
||||
- Home Assistant 返回非 200/201:抛出 `HomeAssistantRequestError`
|
||||
- 网络请求失败:抛出 `HomeAssistantRequestError`
|
||||
|
||||
当前还没有做:
|
||||
|
||||
- 自动重试
|
||||
- 熔断
|
||||
- 更复杂的 backoff 策略
|
||||
|
||||
这一轮重点是先把 app -> Home Assistant 的出站契约和可复用结构迁进来。
|
||||
@@ -0,0 +1,176 @@
|
||||
# Location Recorder
|
||||
|
||||
本文档说明 `location recorder` 在 Python 项目中的当前数据库接管策略,以及 legacy SQLite 接管 runbook。
|
||||
|
||||
当前 Python 版本的 `POST /location/record` 请求校验策略是:
|
||||
|
||||
- `latitude` 和 `longitude` 为必填,缺失或无法解析成合法数值时返回 `400 bad request`
|
||||
- `altitude` 为可选,缺失或非法时按 `0` 处理
|
||||
- unknown field 仍返回 `400 bad request`
|
||||
- 对 caller 的错误响应保持简洁,不直接暴露底层校验细节;详细原因只写日志
|
||||
|
||||
## Legacy 事实基线
|
||||
|
||||
当前 legacy SQLite 中 `location` 表的真实 schema 为:
|
||||
|
||||
```sql
|
||||
CREATE TABLE location (
|
||||
person TEXT NOT NULL,
|
||||
datetime TEXT NOT NULL,
|
||||
latitude REAL NOT NULL,
|
||||
longitude REAL NOT NULL,
|
||||
altitude REAL,
|
||||
PRIMARY KEY (person, datetime)
|
||||
);
|
||||
```
|
||||
|
||||
历史上 legacy Go 实现使用:
|
||||
|
||||
```sql
|
||||
PRAGMA user_version = 2;
|
||||
```
|
||||
|
||||
这代表旧系统曾依赖 `user_version` 管理 location 数据库版本,但这不再是 Python 项目的长期 migration 机制。
|
||||
|
||||
## 当前策略
|
||||
|
||||
当前采用的最小必要接管方案是:
|
||||
|
||||
1. 把上述 `location` schema 视为 Alembic baseline
|
||||
2. 新数据库通过 Alembic `upgrade head` 初始化
|
||||
3. 已有 legacy SQLite 数据库,只要确认 schema 与 baseline 一致,再通过 `alembic stamp` 接管
|
||||
4. 如果数据库已经存在 `alembic_version`,则必须先确认当前 revision 与项目预期 baseline 一致
|
||||
5. 只有 revision 一致时,才视为该库已经被正确接管
|
||||
6. 未来不再以 `PRAGMA user_version` 作为主 migration 机制
|
||||
|
||||
当前 baseline revision 是:
|
||||
|
||||
- `20260419_01_location_baseline`
|
||||
|
||||
当前提供的最小脚本入口是:
|
||||
|
||||
```bash
|
||||
python scripts/location_db_adopt.py
|
||||
```
|
||||
|
||||
如果你更喜欢模块方式运行,也可以用:
|
||||
|
||||
```bash
|
||||
python -m scripts.location_db_adopt
|
||||
```
|
||||
|
||||
它只针对 `LOCATION_DATABASE_URL` 工作,并且遵守保守接管原则:
|
||||
|
||||
- 本地已有 DB 文件:先校验,再接管
|
||||
- 本地没有 DB 文件:按新库初始化
|
||||
- 任一校验不通过:立即报错并停止
|
||||
|
||||
应用本身在启动时不会自动替你初始化 `location` 数据库。
|
||||
应用启动时会对 `LOCATION_DATABASE_URL` 做只读校验:
|
||||
|
||||
- 文件不存在:直接报错,并提示先运行接管脚本
|
||||
- 文件存在但还没有 `alembic_version`:直接报错,要求先完成 legacy 接管
|
||||
- 文件已被 Alembic 管理但 revision 不匹配:直接报错并拒绝启动
|
||||
|
||||
这是有意为之,用来避免应用在错误路径上静默创建新库,或带着错误数据库版本继续跑业务。
|
||||
|
||||
## 新数据库初始化
|
||||
|
||||
如果本地不存在 `LOCATION_DATABASE_URL` 指向的 DB 文件:
|
||||
|
||||
- 脚本会先创建父目录
|
||||
- 然后执行 Alembic `upgrade head`
|
||||
- 最终建立 `location` 表与 `alembic_version` 表
|
||||
|
||||
手工执行时也等价于:
|
||||
|
||||
```bash
|
||||
alembic upgrade head
|
||||
```
|
||||
|
||||
这会创建与 legacy 相同的 `location` 表结构,并在库中建立 Alembic revision 记录。
|
||||
|
||||
## 旧数据库接管
|
||||
|
||||
对于已经存在的 legacy SQLite 数据库:
|
||||
|
||||
1. 先确认 DB 文件存在
|
||||
2. 如果已经存在 `alembic_version` 表,则先读取当前 revision
|
||||
3. 如果 revision 等于 `20260419_01_location_baseline`,则视为该库已经被 Alembic 正确接管
|
||||
4. 如果 revision 不匹配,立即报错并停止,不做任何自动修复
|
||||
5. 如果还没有 `alembic_version` 表,则读取当前 DB 中 `location` 表的实际 schema
|
||||
6. 与 baseline schema 做严格比对
|
||||
7. 再检查 `PRAGMA user_version`
|
||||
8. 只有 schema 匹配且 `user_version = 2` 时,才执行 Alembic `stamp`
|
||||
9. 接管完成后,后续 migration 才交给 Alembic 管理
|
||||
|
||||
示例:
|
||||
|
||||
```bash
|
||||
LOCATION_DATABASE_URL=sqlite:///./data/locationRecorder.db alembic stamp 20260419_01_location_baseline
|
||||
```
|
||||
|
||||
或直接执行脚本:
|
||||
|
||||
```bash
|
||||
LOCATION_DATABASE_URL=sqlite:///./data/locationRecorder.db python scripts/location_db_adopt.py
|
||||
```
|
||||
|
||||
这样做的含义是:
|
||||
|
||||
- 告诉 Alembic:这个数据库已经处于 baseline 结构
|
||||
- 不修改已有 `location` 表数据
|
||||
- 后续 migration 由 Alembic 接管
|
||||
|
||||
## Fail Closed 原则
|
||||
|
||||
当前策略是保守接管,不做未知 legacy 状态的自动修复。
|
||||
|
||||
如果出现以下任一情况,脚本会直接报错并停止:
|
||||
|
||||
- 找不到 `location` 表
|
||||
- `location` 表 schema 与 baseline 不一致
|
||||
- `PRAGMA user_version` 不等于 `2`
|
||||
- 已有 `alembic_version`,但 revision 与预期 baseline 不一致
|
||||
- 目标 DB 不是 SQLite URL
|
||||
|
||||
当前不会尝试:
|
||||
|
||||
- 自动修表
|
||||
- 自动调整 `user_version`
|
||||
- 自动推断未知 legacy 状态
|
||||
|
||||
如果发生这些情况,应先人工确认数据库状态,再决定是否需要单独迁移或修复。
|
||||
|
||||
## 关于 `data/locationRecorder.db`
|
||||
|
||||
你本地放在 `data/locationRecorder.db` 的 legacy 样本库,可以用于:
|
||||
|
||||
- 人工核对 schema
|
||||
- 手动验证 `stamp` 接管流程
|
||||
- 做开发时的兼容性确认
|
||||
|
||||
但当前代码不应硬依赖这个文件存在。
|
||||
|
||||
## 测试样本的安全使用方式
|
||||
|
||||
如果要用 legacy SQLite 样本做测试或验证,应遵守:
|
||||
|
||||
1. 不直接在原始样本文件上跑测试
|
||||
2. 先复制到临时路径
|
||||
3. 所有 `stamp`、写入、实验性 migration 都只针对副本执行
|
||||
|
||||
自动化测试里当前采用的方式是:
|
||||
|
||||
- 构造一个“legacy 风格”的临时 SQLite 文件
|
||||
- 建出同样的 `location` 表
|
||||
- 设置 `PRAGMA user_version = 2`
|
||||
- 再执行接管脚本中的 adopt 逻辑
|
||||
|
||||
同时也覆盖:
|
||||
|
||||
- DB 文件不存在时的新库初始化路径
|
||||
- schema 不匹配时的失败路径
|
||||
- `user_version` 不匹配时的失败路径
|
||||
|
||||
这样可以验证接管路径,同时不污染真实样本库。
|
||||
@@ -0,0 +1,140 @@
|
||||
# Poo Recorder
|
||||
|
||||
本文档说明 `poo recorder` 在 Python 项目中的当前行为边界,以及 poo SQLite 的 Alembic 接管策略。
|
||||
|
||||
## 当前基线
|
||||
|
||||
当前生产版本中的真实 SQLite schema 为:
|
||||
|
||||
```sql
|
||||
CREATE TABLE poo_records (
|
||||
timestamp TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
latitude REAL NOT NULL,
|
||||
longitude REAL NOT NULL,
|
||||
PRIMARY KEY (timestamp)
|
||||
);
|
||||
```
|
||||
|
||||
历史上 legacy Go 实现使用:
|
||||
|
||||
```sql
|
||||
PRAGMA user_version = 1;
|
||||
```
|
||||
|
||||
当前 Python 迁移以这套 schema 为事实基线,不重新设计表结构。
|
||||
|
||||
## 当前已迁入的 API
|
||||
|
||||
当前 Python 项目已经接入:
|
||||
|
||||
- `POST /poo/record`
|
||||
- `GET /poo/latest`
|
||||
|
||||
### `POST /poo/record`
|
||||
|
||||
用途:
|
||||
|
||||
- 记录一条 poo event
|
||||
- 最佳努力地刷新 Home Assistant sensor
|
||||
- 如果配置了 `POO_WEBHOOK_ID`,最佳努力地触发 Home Assistant webhook
|
||||
|
||||
请求体:
|
||||
|
||||
```json
|
||||
{
|
||||
"status": "done",
|
||||
"latitude": "1.23",
|
||||
"longitude": "4.56"
|
||||
}
|
||||
```
|
||||
|
||||
当前策略:
|
||||
|
||||
- unknown field:`400 bad request`
|
||||
- 数值非法:`400 bad request`
|
||||
- 记录成功后,即使 Home Assistant side effect 失败,也不会回滚本地 DB 写入
|
||||
|
||||
### `GET /poo/latest`
|
||||
|
||||
用途:
|
||||
|
||||
- 读取最新一条 poo 记录
|
||||
- 将其重新发布到 Home Assistant sensor
|
||||
|
||||
当前外部行为与 legacy 保持一致:
|
||||
|
||||
- 成功:空响应体,HTTP 200
|
||||
- 如果当前 DB 里还没有任何 poo 记录:仍返回空响应体,HTTP 200,但不会发布 sensor
|
||||
- 真正的发布失败:简洁 `internal server error`
|
||||
|
||||
## Home Assistant side effects
|
||||
|
||||
当前已复用 Python 项目中已有的 Home Assistant outbound adapter。
|
||||
|
||||
当前支持:
|
||||
|
||||
- 发布 / 更新 poo status sensor
|
||||
- 可选触发 webhook
|
||||
|
||||
相关配置:
|
||||
|
||||
- `HOME_ASSISTANT_BASE_URL`
|
||||
- `HOME_ASSISTANT_AUTH_TOKEN`
|
||||
- `HOME_ASSISTANT_TIMEOUT_SECONDS`
|
||||
- `POO_SENSOR_ENTITY_NAME`
|
||||
- `POO_SENSOR_FRIENDLY_NAME`
|
||||
- `POO_WEBHOOK_ID`
|
||||
|
||||
## Alembic 接管策略
|
||||
|
||||
poo 的接管逻辑刻意保持与 location 一致。
|
||||
|
||||
当前 baseline revision:
|
||||
|
||||
- `20260420_01_poo_baseline`
|
||||
|
||||
当前提供的脚本入口:
|
||||
|
||||
```bash
|
||||
python scripts/poo_db_adopt.py
|
||||
```
|
||||
|
||||
或:
|
||||
|
||||
```bash
|
||||
python -m scripts.poo_db_adopt
|
||||
```
|
||||
|
||||
规则如下:
|
||||
|
||||
1. 如果本地不存在 poo DB 文件:
|
||||
- 视为新库初始化
|
||||
- 通过 `alembic_poo upgrade head` 创建新库
|
||||
2. 如果本地已经存在 legacy DB:
|
||||
- 先检查 `poo_records` 表 schema
|
||||
- 再检查 `PRAGMA user_version = 1`
|
||||
- 只有完全匹配,才通过 Alembic `stamp` 接管
|
||||
3. 如果 schema 或 `user_version` 不匹配:
|
||||
- 直接失败
|
||||
- 不自动修复
|
||||
4. 如果数据库已经存在 `alembic_version`:
|
||||
- 只有 revision 与当前 baseline 一致才接受
|
||||
- 否则直接失败
|
||||
|
||||
同时,应用启动时也会对 `POO_DATABASE_URL` 做只读校验:
|
||||
|
||||
- 文件不存在:拒绝启动
|
||||
- DB 尚未被 Alembic 接管:拒绝启动
|
||||
- revision 不匹配:拒绝启动
|
||||
|
||||
## 明确移除 Notion
|
||||
|
||||
这一轮不会迁入任何 Notion 逻辑。
|
||||
|
||||
也就是说,当前 Python 版的 poo recorder:
|
||||
|
||||
- 不保留 Notion adapter
|
||||
- 不保留 Notion sync
|
||||
- 不保留 `tableId` 依赖
|
||||
- 不因为 legacy 中存在 Notion 就继续保留兼容层
|
||||
@@ -0,0 +1,43 @@
|
||||
# TickTick Integration
|
||||
|
||||
当前 Python 项目里的 TickTick 迁移先恢复 legacy 的最核心能力,不额外扩成更大的集成层。
|
||||
|
||||
## 当前已支持
|
||||
|
||||
- 运行时从 config 表读取 TickTick 配置,缺失时仍可 fallback `.env`
|
||||
- `GET /ticktick/auth/start`
|
||||
- 需要已登录 session
|
||||
- 生成 OAuth `state`
|
||||
- 直接重定向到 TickTick 授权页
|
||||
- `GET /ticktick/auth/code`
|
||||
- 校验进程内保存的 `state`
|
||||
- 用 authorization code 换取 access token
|
||||
- 将 `TICKTICK_TOKEN` 持久化到 `app_config` 表
|
||||
- TickTick Open API 基础调用:
|
||||
- 列 project
|
||||
- 列 project 下 task
|
||||
- 创建 task
|
||||
- 按 title 精确匹配做重复创建保护
|
||||
- Home Assistant inbound 已重新接回 `ticktick / create_action_task`
|
||||
|
||||
## 当前配置项
|
||||
|
||||
- `APP_HOSTNAME`
|
||||
- `TICKTICK_CLIENT_ID`
|
||||
- `TICKTICK_CLIENT_SECRET`
|
||||
- `TICKTICK_TOKEN`
|
||||
- `HOME_ASSISTANT_ACTION_TASK_PROJECT_ID`
|
||||
|
||||
## 兼容性说明
|
||||
|
||||
- 仍保留 legacy 的 OAuth authorization code flow
|
||||
- OAuth callback URI 现在由 `APP_HOSTNAME` 和当前环境自动推导:`development` 使用 `http`,其他环境使用 `https`
|
||||
- `state` 仍是进程内临时状态;如果服务在 start 和 callback 之间重启,本轮实现下授权需要重新开始
|
||||
- 不再把 token 写回 `.env` 或其他配置文件,统一写入 config 表
|
||||
- 当前没有引入 legacy 的第三方 TickTick 库,先用标准库完成兼容行为
|
||||
|
||||
## 后续适合单独拆分的工作
|
||||
|
||||
- 给 config 页面增加明确的 TickTick 授权入口
|
||||
- 增加 project 探测或选择能力,减少手工填写 `HOME_ASSISTANT_ACTION_TASK_PROJECT_ID`
|
||||
- 如果后续发现 OAuth/token 生命周期需要更强健,再补 refresh token 或持久化 auth state
|
||||
@@ -1,15 +0,0 @@
|
||||
[program:home_automation_backend]
|
||||
command=
|
||||
directory=
|
||||
user=
|
||||
group=
|
||||
environment=
|
||||
autostart=true
|
||||
autorestart=true
|
||||
startsecs=15
|
||||
startretries=100
|
||||
stopwaitsecs=30
|
||||
redirect_stderr=true
|
||||
stdout_logfile=/var/log/supervisor/%(program_name)s.log
|
||||
stdout_logfile_maxbytes=5MB
|
||||
stdout_logfile_backups=5
|
||||
@@ -1,100 +0,0 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
# Argument parsing
|
||||
if [[ $# -ne 1 ]]; then
|
||||
echo "Usage: $0 [--install|--uninstall|--help]"
|
||||
echo " --install Install the automation backend"
|
||||
echo " --uninstall Uninstall the automation backend"
|
||||
echo " --update Update the installation"
|
||||
echo " --help Show this help message"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
key="$1"
|
||||
case $key in
|
||||
--install)
|
||||
INSTALL=true
|
||||
;;
|
||||
--uninstall)
|
||||
UNINSTALL=true
|
||||
;;
|
||||
--update)
|
||||
UPDATE=true
|
||||
;;
|
||||
--help)
|
||||
echo "Usage: $0 [--install|--uninstall|--update|--help]"
|
||||
echo " --install Install the automation backend"
|
||||
echo " --uninstall Uninstall the automation backend"
|
||||
echo " --update Update the installation"
|
||||
echo " --help Show this help message"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "Invalid argument: $key"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
TARGET_DIR="$HOME/.local/home-automation-backend"
|
||||
SUPERVISOR_CFG_NAME="home_automation_backend"
|
||||
APP_NAME="home-automation-backend"
|
||||
SUPERVISOR_CFG="$SUPERVISOR_CFG_NAME.conf"
|
||||
BASEDIR=$(dirname "$(realpath "$0")")
|
||||
|
||||
# Install or uninstall based on arguments
|
||||
install_backend() {
|
||||
# Installation code here
|
||||
echo "Installing..."
|
||||
|
||||
sudo supervisorctl stop $SUPERVISOR_CFG_NAME
|
||||
|
||||
mkdir -p $TARGET_DIR
|
||||
cd $BASEDIR"/../src/" && go build -o $TARGET_DIR/$APP_NAME
|
||||
|
||||
|
||||
cp $BASEDIR/"$SUPERVISOR_CFG_NAME"_template.conf $BASEDIR/$SUPERVISOR_CFG
|
||||
|
||||
sed -i "s+command=+command=$TARGET_DIR/$APP_NAME serve+g" $BASEDIR/$SUPERVISOR_CFG
|
||||
sed -i "s+directory=+directory=$TARGET_DIR+g" $BASEDIR/$SUPERVISOR_CFG
|
||||
sed -i "s+user=+user=$USER+g" $BASEDIR/$SUPERVISOR_CFG
|
||||
sed -i "s+group=+group=$USER+g" $BASEDIR/$SUPERVISOR_CFG
|
||||
sed -i "s+environment=+environment=HOME=\"$HOME\"+g" $BASEDIR/$SUPERVISOR_CFG
|
||||
|
||||
sudo mv $BASEDIR/$SUPERVISOR_CFG /etc/supervisor/conf.d/$SUPERVISOR_CFG
|
||||
|
||||
sudo supervisorctl reread
|
||||
sudo supervisorctl update
|
||||
sudo supervisorctl start $SUPERVISOR_CFG_NAME
|
||||
|
||||
echo "Installation complete."
|
||||
}
|
||||
uninstall_backend() {
|
||||
# Uninstallation code here
|
||||
echo "Uninstalling..."
|
||||
|
||||
sudo supervisorctl stop $SUPERVISOR_CFG_NAME
|
||||
|
||||
sudo supervisorctl remove $SUPERVISOR_CFG_NAME
|
||||
|
||||
sudo rm /etc/supervisor/conf.d/$SUPERVISOR_CFG
|
||||
|
||||
rm -rf $TARGET_DIR/
|
||||
|
||||
echo "Uninstallation complete."
|
||||
echo "Config files and db is stored in $HOME/.config/home-automation"
|
||||
}
|
||||
update_backend() {
|
||||
uninstall_backend
|
||||
install_backend
|
||||
}
|
||||
|
||||
if [[ $INSTALL ]]; then
|
||||
install_backend
|
||||
elif [[ $UNINSTALL ]]; then
|
||||
uninstall_backend
|
||||
elif [[ $UPDATE ]]; then
|
||||
update_backend
|
||||
else
|
||||
echo "Invalid argument: $key"
|
||||
exit 1
|
||||
fi
|
||||
@@ -0,0 +1,494 @@
|
||||
{
|
||||
"openapi": "3.1.0",
|
||||
"info": {
|
||||
"title": "Home Automation Backend (Python)",
|
||||
"description": "Home automation backend with auth, runtime config, Home Assistant integrations, TickTick integration, and SQLite-backed recorders.",
|
||||
"version": "0.1.0"
|
||||
},
|
||||
"paths": {
|
||||
"/status": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"system"
|
||||
],
|
||||
"summary": "Get Status",
|
||||
"operationId": "get_status_status_get",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/StatusResponse"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/login": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"auth"
|
||||
],
|
||||
"summary": "Login Page",
|
||||
"operationId": "login_page_login_get",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"text/html": {
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"tags": [
|
||||
"auth"
|
||||
],
|
||||
"summary": "Login Submit",
|
||||
"operationId": "login_submit_login_post",
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/x-www-form-urlencoded": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Body_login_submit_login_post"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": true
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"text/html": {
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/config/change-password": {
|
||||
"post": {
|
||||
"tags": [
|
||||
"auth"
|
||||
],
|
||||
"summary": "Change Password Submit",
|
||||
"operationId": "change_password_submit_config_change_password_post",
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/x-www-form-urlencoded": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Body_change_password_submit_config_change_password_post"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": true
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"text/html": {
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/logout": {
|
||||
"post": {
|
||||
"tags": [
|
||||
"auth"
|
||||
],
|
||||
"summary": "Logout",
|
||||
"operationId": "logout_logout_post",
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/x-www-form-urlencoded": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Body_logout_logout_post"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": true
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"pages"
|
||||
],
|
||||
"summary": "Home",
|
||||
"operationId": "home__get",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"text/html": {
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/admin": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"pages"
|
||||
],
|
||||
"summary": "Admin Redirect",
|
||||
"operationId": "admin_redirect_admin_get",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"text/html": {
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/config": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"pages"
|
||||
],
|
||||
"summary": "Config Page",
|
||||
"operationId": "config_page_config_get",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"text/html": {
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"tags": [
|
||||
"pages"
|
||||
],
|
||||
"summary": "Config Submit",
|
||||
"operationId": "config_submit_config_post",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"text/html": {
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/homeassistant/publish": {
|
||||
"post": {
|
||||
"tags": [
|
||||
"homeassistant"
|
||||
],
|
||||
"summary": "Publish From Homeassistant",
|
||||
"operationId": "publish_from_homeassistant_homeassistant_publish_post",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/location/record": {
|
||||
"post": {
|
||||
"tags": [
|
||||
"location"
|
||||
],
|
||||
"summary": "Create Location Record",
|
||||
"operationId": "create_location_record_location_record_post",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/poo/record": {
|
||||
"post": {
|
||||
"tags": [
|
||||
"poo"
|
||||
],
|
||||
"summary": "Create Poo Record",
|
||||
"operationId": "create_poo_record_poo_record_post",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/poo/latest": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"poo"
|
||||
],
|
||||
"summary": "Notify Latest Poo",
|
||||
"operationId": "notify_latest_poo_poo_latest_get",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/ticktick/auth/start": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"ticktick"
|
||||
],
|
||||
"summary": "Start Ticktick Auth",
|
||||
"operationId": "start_ticktick_auth_ticktick_auth_start_get",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/ticktick/auth/code": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"ticktick"
|
||||
],
|
||||
"summary": "Handle Ticktick Auth Code",
|
||||
"operationId": "handle_ticktick_auth_code_ticktick_auth_code_get",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Body_change_password_submit_config_change_password_post": {
|
||||
"properties": {
|
||||
"current_password": {
|
||||
"type": "string",
|
||||
"title": "Current Password"
|
||||
},
|
||||
"new_password": {
|
||||
"type": "string",
|
||||
"title": "New Password"
|
||||
},
|
||||
"confirm_password": {
|
||||
"type": "string",
|
||||
"title": "Confirm Password"
|
||||
},
|
||||
"csrf_token": {
|
||||
"type": "string",
|
||||
"title": "Csrf Token"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": [
|
||||
"current_password",
|
||||
"new_password",
|
||||
"confirm_password",
|
||||
"csrf_token"
|
||||
],
|
||||
"title": "Body_change_password_submit_config_change_password_post"
|
||||
},
|
||||
"Body_login_submit_login_post": {
|
||||
"properties": {
|
||||
"username": {
|
||||
"type": "string",
|
||||
"title": "Username"
|
||||
},
|
||||
"password": {
|
||||
"type": "string",
|
||||
"title": "Password"
|
||||
},
|
||||
"csrf_token": {
|
||||
"type": "string",
|
||||
"title": "Csrf Token"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": [
|
||||
"username",
|
||||
"password",
|
||||
"csrf_token"
|
||||
],
|
||||
"title": "Body_login_submit_login_post"
|
||||
},
|
||||
"Body_logout_logout_post": {
|
||||
"properties": {
|
||||
"csrf_token": {
|
||||
"type": "string",
|
||||
"title": "Csrf Token"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": [
|
||||
"csrf_token"
|
||||
],
|
||||
"title": "Body_logout_logout_post"
|
||||
},
|
||||
"HTTPValidationError": {
|
||||
"properties": {
|
||||
"detail": {
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ValidationError"
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Detail"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"title": "HTTPValidationError"
|
||||
},
|
||||
"StatusResponse": {
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"title": "Status"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": [
|
||||
"status"
|
||||
],
|
||||
"title": "StatusResponse"
|
||||
},
|
||||
"ValidationError": {
|
||||
"properties": {
|
||||
"loc": {
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "integer"
|
||||
}
|
||||
]
|
||||
},
|
||||
"type": "array",
|
||||
"title": "Location"
|
||||
},
|
||||
"msg": {
|
||||
"type": "string",
|
||||
"title": "Message"
|
||||
},
|
||||
"type": {
|
||||
"type": "string",
|
||||
"title": "Error Type"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": [
|
||||
"loc",
|
||||
"msg",
|
||||
"type"
|
||||
],
|
||||
"title": "ValidationError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,317 @@
|
||||
openapi: 3.1.0
|
||||
info:
|
||||
title: Home Automation Backend (Python)
|
||||
description: Home automation backend with auth, runtime config, Home Assistant integrations,
|
||||
TickTick integration, and SQLite-backed recorders.
|
||||
version: 0.1.0
|
||||
paths:
|
||||
/status:
|
||||
get:
|
||||
tags:
|
||||
- system
|
||||
summary: Get Status
|
||||
operationId: get_status_status_get
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/StatusResponse'
|
||||
/login:
|
||||
get:
|
||||
tags:
|
||||
- auth
|
||||
summary: Login Page
|
||||
operationId: login_page_login_get
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
text/html:
|
||||
schema:
|
||||
type: string
|
||||
post:
|
||||
tags:
|
||||
- auth
|
||||
summary: Login Submit
|
||||
operationId: login_submit_login_post
|
||||
requestBody:
|
||||
content:
|
||||
application/x-www-form-urlencoded:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Body_login_submit_login_post'
|
||||
required: true
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
text/html:
|
||||
schema:
|
||||
type: string
|
||||
'422':
|
||||
description: Validation Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/HTTPValidationError'
|
||||
/config/change-password:
|
||||
post:
|
||||
tags:
|
||||
- auth
|
||||
summary: Change Password Submit
|
||||
operationId: change_password_submit_config_change_password_post
|
||||
requestBody:
|
||||
content:
|
||||
application/x-www-form-urlencoded:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Body_change_password_submit_config_change_password_post'
|
||||
required: true
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
text/html:
|
||||
schema:
|
||||
type: string
|
||||
'422':
|
||||
description: Validation Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/HTTPValidationError'
|
||||
/logout:
|
||||
post:
|
||||
tags:
|
||||
- auth
|
||||
summary: Logout
|
||||
operationId: logout_logout_post
|
||||
requestBody:
|
||||
content:
|
||||
application/x-www-form-urlencoded:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Body_logout_logout_post'
|
||||
required: true
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
application/json:
|
||||
schema: {}
|
||||
'422':
|
||||
description: Validation Error
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/HTTPValidationError'
|
||||
/:
|
||||
get:
|
||||
tags:
|
||||
- pages
|
||||
summary: Home
|
||||
operationId: home__get
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
text/html:
|
||||
schema:
|
||||
type: string
|
||||
/admin:
|
||||
get:
|
||||
tags:
|
||||
- pages
|
||||
summary: Admin Redirect
|
||||
operationId: admin_redirect_admin_get
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
text/html:
|
||||
schema:
|
||||
type: string
|
||||
/config:
|
||||
get:
|
||||
tags:
|
||||
- pages
|
||||
summary: Config Page
|
||||
operationId: config_page_config_get
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
text/html:
|
||||
schema:
|
||||
type: string
|
||||
post:
|
||||
tags:
|
||||
- pages
|
||||
summary: Config Submit
|
||||
operationId: config_submit_config_post
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
text/html:
|
||||
schema:
|
||||
type: string
|
||||
/homeassistant/publish:
|
||||
post:
|
||||
tags:
|
||||
- homeassistant
|
||||
summary: Publish From Homeassistant
|
||||
operationId: publish_from_homeassistant_homeassistant_publish_post
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
application/json:
|
||||
schema: {}
|
||||
/location/record:
|
||||
post:
|
||||
tags:
|
||||
- location
|
||||
summary: Create Location Record
|
||||
operationId: create_location_record_location_record_post
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
application/json:
|
||||
schema: {}
|
||||
/poo/record:
|
||||
post:
|
||||
tags:
|
||||
- poo
|
||||
summary: Create Poo Record
|
||||
operationId: create_poo_record_poo_record_post
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
application/json:
|
||||
schema: {}
|
||||
/poo/latest:
|
||||
get:
|
||||
tags:
|
||||
- poo
|
||||
summary: Notify Latest Poo
|
||||
operationId: notify_latest_poo_poo_latest_get
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
application/json:
|
||||
schema: {}
|
||||
/ticktick/auth/start:
|
||||
get:
|
||||
tags:
|
||||
- ticktick
|
||||
summary: Start Ticktick Auth
|
||||
operationId: start_ticktick_auth_ticktick_auth_start_get
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
application/json:
|
||||
schema: {}
|
||||
/ticktick/auth/code:
|
||||
get:
|
||||
tags:
|
||||
- ticktick
|
||||
summary: Handle Ticktick Auth Code
|
||||
operationId: handle_ticktick_auth_code_ticktick_auth_code_get
|
||||
responses:
|
||||
'200':
|
||||
description: Successful Response
|
||||
content:
|
||||
application/json:
|
||||
schema: {}
|
||||
components:
|
||||
schemas:
|
||||
Body_change_password_submit_config_change_password_post:
|
||||
properties:
|
||||
current_password:
|
||||
type: string
|
||||
title: Current Password
|
||||
new_password:
|
||||
type: string
|
||||
title: New Password
|
||||
confirm_password:
|
||||
type: string
|
||||
title: Confirm Password
|
||||
csrf_token:
|
||||
type: string
|
||||
title: Csrf Token
|
||||
type: object
|
||||
required:
|
||||
- current_password
|
||||
- new_password
|
||||
- confirm_password
|
||||
- csrf_token
|
||||
title: Body_change_password_submit_config_change_password_post
|
||||
Body_login_submit_login_post:
|
||||
properties:
|
||||
username:
|
||||
type: string
|
||||
title: Username
|
||||
password:
|
||||
type: string
|
||||
title: Password
|
||||
csrf_token:
|
||||
type: string
|
||||
title: Csrf Token
|
||||
type: object
|
||||
required:
|
||||
- username
|
||||
- password
|
||||
- csrf_token
|
||||
title: Body_login_submit_login_post
|
||||
Body_logout_logout_post:
|
||||
properties:
|
||||
csrf_token:
|
||||
type: string
|
||||
title: Csrf Token
|
||||
type: object
|
||||
required:
|
||||
- csrf_token
|
||||
title: Body_logout_logout_post
|
||||
HTTPValidationError:
|
||||
properties:
|
||||
detail:
|
||||
items:
|
||||
$ref: '#/components/schemas/ValidationError'
|
||||
type: array
|
||||
title: Detail
|
||||
type: object
|
||||
title: HTTPValidationError
|
||||
StatusResponse:
|
||||
properties:
|
||||
status:
|
||||
type: string
|
||||
title: Status
|
||||
type: object
|
||||
required:
|
||||
- status
|
||||
title: StatusResponse
|
||||
ValidationError:
|
||||
properties:
|
||||
loc:
|
||||
items:
|
||||
anyOf:
|
||||
- type: string
|
||||
- type: integer
|
||||
type: array
|
||||
title: Location
|
||||
msg:
|
||||
type: string
|
||||
title: Message
|
||||
type:
|
||||
type: string
|
||||
title: Error Type
|
||||
type: object
|
||||
required:
|
||||
- loc
|
||||
- msg
|
||||
- type
|
||||
title: ValidationError
|
||||
@@ -0,0 +1,28 @@
|
||||
[build-system]
|
||||
requires = ["setuptools>=68", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "home-automation-python"
|
||||
version = "0.1.0"
|
||||
description = "Home automation backend with auth, integrations, and SQLite-backed services."
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
|
||||
[tool.setuptools]
|
||||
packages = [
|
||||
"app",
|
||||
"app.api",
|
||||
"app.api.routes",
|
||||
"app.integrations",
|
||||
"app.models",
|
||||
"app.schemas",
|
||||
"app.services",
|
||||
]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = ["tests"]
|
||||
pythonpath = ["."]
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 100
|
||||
@@ -0,0 +1,9 @@
|
||||
alembic>=1.14,<2.0
|
||||
argon2-cffi>=25.1,<26.0
|
||||
fastapi>=0.115,<0.116
|
||||
jinja2>=3.1,<4.0
|
||||
pydantic-settings>=2.6,<3.0
|
||||
python-multipart>=0.0.12,<1.0
|
||||
pyyaml>=6.0,<7.0
|
||||
sqlalchemy>=2.0,<3.0
|
||||
uvicorn[standard]>=0.32,<1.0
|
||||
@@ -0,0 +1,86 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile with Python 3.13
|
||||
# by the following command:
|
||||
#
|
||||
# pip-compile requirements.in
|
||||
#
|
||||
alembic==1.18.4
|
||||
# via -r requirements.in
|
||||
annotated-types==0.7.0
|
||||
# via pydantic
|
||||
argon2-cffi==25.1.0
|
||||
# via -r requirements.in
|
||||
argon2-cffi-bindings==25.1.0
|
||||
# via argon2-cffi
|
||||
anyio==4.13.0
|
||||
# via
|
||||
# starlette
|
||||
# watchfiles
|
||||
cffi==2.0.0
|
||||
# via argon2-cffi-bindings
|
||||
click==8.3.2
|
||||
# via uvicorn
|
||||
fastapi==0.115.14
|
||||
# via -r requirements.in
|
||||
greenlet==3.4.0
|
||||
# via sqlalchemy
|
||||
h11==0.16.0
|
||||
# via uvicorn
|
||||
httptools==0.7.1
|
||||
# via uvicorn
|
||||
idna==3.11
|
||||
# via anyio
|
||||
jinja2==3.1.6
|
||||
# via -r requirements.in
|
||||
mako==1.3.11
|
||||
# via alembic
|
||||
markupsafe==3.0.3
|
||||
# via
|
||||
# jinja2
|
||||
# mako
|
||||
pydantic==2.13.2
|
||||
# via
|
||||
# fastapi
|
||||
# pydantic-settings
|
||||
pydantic-core==2.46.2
|
||||
# via pydantic
|
||||
pydantic-settings==2.13.1
|
||||
# via -r requirements.in
|
||||
python-dotenv==1.2.2
|
||||
# via
|
||||
# pydantic-settings
|
||||
# uvicorn
|
||||
python-multipart==0.0.26
|
||||
# via -r requirements.in
|
||||
pycparser==2.23
|
||||
# via cffi
|
||||
pyyaml==6.0.3
|
||||
# via
|
||||
# -r requirements.in
|
||||
# uvicorn
|
||||
sqlalchemy==2.0.49
|
||||
# via
|
||||
# -r requirements.in
|
||||
# alembic
|
||||
starlette==0.46.2
|
||||
# via fastapi
|
||||
typing-extensions==4.15.0
|
||||
# via
|
||||
# alembic
|
||||
# fastapi
|
||||
# pydantic
|
||||
# pydantic-core
|
||||
# sqlalchemy
|
||||
# typing-inspection
|
||||
typing-inspection==0.4.2
|
||||
# via
|
||||
# pydantic
|
||||
# pydantic-settings
|
||||
uvicorn[standard]==0.44.0
|
||||
# via -r requirements.in
|
||||
uvloop==0.22.1
|
||||
# via uvicorn
|
||||
watchfiles==1.1.1
|
||||
# via uvicorn
|
||||
websockets==16.0
|
||||
# via uvicorn
|
||||
@@ -0,0 +1 @@
|
||||
"""Project helper scripts."""
|
||||
@@ -0,0 +1,134 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
|
||||
PROJECT_ROOT = Path(__file__).resolve().parents[1]
|
||||
if str(PROJECT_ROOT) not in sys.path:
|
||||
sys.path.insert(0, str(PROJECT_ROOT))
|
||||
|
||||
from app.config import get_settings
|
||||
|
||||
APP_BASELINE_REVISION = "20260420_04_app_config_table"
|
||||
|
||||
|
||||
class AppDatabaseAdoptionError(RuntimeError):
|
||||
"""Raised when the app database is missing or not managed as expected."""
|
||||
|
||||
|
||||
def _database_path_from_url(database_url: str) -> Path:
|
||||
prefix = "sqlite:///"
|
||||
if not database_url.startswith(prefix):
|
||||
raise AppDatabaseAdoptionError(
|
||||
f"Only sqlite URLs are supported for app DB initialization, got: {database_url}"
|
||||
)
|
||||
return Path(database_url[len(prefix) :])
|
||||
|
||||
|
||||
def _make_alembic_config(database_url: str) -> Config:
|
||||
config = Config("alembic_app.ini")
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
return config
|
||||
|
||||
|
||||
def _alembic_version_table_exists(database_path: Path) -> bool:
|
||||
conn = sqlite3.connect(database_path)
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT 1 FROM sqlite_master WHERE type = 'table' AND name = 'alembic_version'"
|
||||
).fetchone()
|
||||
return row is not None
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _fetch_alembic_revision(database_path: Path) -> str:
|
||||
conn = sqlite3.connect(database_path)
|
||||
try:
|
||||
row = conn.execute("SELECT version_num FROM alembic_version").fetchone()
|
||||
if row is None:
|
||||
raise AppDatabaseAdoptionError("Alembic version table exists but contains no revision")
|
||||
return row[0]
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _list_user_tables(database_path: Path) -> list[str]:
|
||||
conn = sqlite3.connect(database_path)
|
||||
try:
|
||||
rows = conn.execute(
|
||||
"""
|
||||
SELECT name
|
||||
FROM sqlite_master
|
||||
WHERE type = 'table'
|
||||
AND name NOT LIKE 'sqlite_%'
|
||||
"""
|
||||
).fetchall()
|
||||
return sorted(row[0] for row in rows)
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def validate_app_runtime_db(database_url: str) -> None:
|
||||
database_path = _database_path_from_url(database_url)
|
||||
if not database_path.exists():
|
||||
raise AppDatabaseAdoptionError(
|
||||
"App DB file was not found. Run 'python scripts/app_db_adopt.py' first to "
|
||||
"initialize the app DB before starting the app."
|
||||
)
|
||||
|
||||
if not _alembic_version_table_exists(database_path):
|
||||
raise AppDatabaseAdoptionError(
|
||||
"App DB exists but is not yet Alembic-managed. Run "
|
||||
"'python scripts/app_db_adopt.py' first before starting the app."
|
||||
)
|
||||
|
||||
current_revision = _fetch_alembic_revision(database_path)
|
||||
if current_revision != APP_BASELINE_REVISION:
|
||||
raise AppDatabaseAdoptionError(
|
||||
"App DB revision mismatch. Refusing to start the app: "
|
||||
f"expected {APP_BASELINE_REVISION}, got {current_revision}"
|
||||
)
|
||||
|
||||
|
||||
def adopt_or_initialize_app_db(database_url: str) -> str:
|
||||
database_path = _database_path_from_url(database_url)
|
||||
alembic_config = _make_alembic_config(database_url)
|
||||
|
||||
if database_path.exists():
|
||||
if _alembic_version_table_exists(database_path):
|
||||
current_revision = _fetch_alembic_revision(database_path)
|
||||
if current_revision == APP_BASELINE_REVISION:
|
||||
return "already_managed"
|
||||
command.upgrade(alembic_config, "head")
|
||||
return "upgraded"
|
||||
|
||||
existing_tables = _list_user_tables(database_path)
|
||||
if existing_tables:
|
||||
raise AppDatabaseAdoptionError(
|
||||
"App DB exists with unmanaged tables. Refusing to continue because there is "
|
||||
"no legacy app DB adoption path in this revision."
|
||||
)
|
||||
|
||||
database_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
command.upgrade(alembic_config, "head")
|
||||
return "initialized"
|
||||
|
||||
|
||||
def main() -> None:
|
||||
settings = get_settings()
|
||||
result = adopt_or_initialize_app_db(settings.app_database_url)
|
||||
if result == "initialized":
|
||||
print("Initialized a new app DB via Alembic upgrade head.")
|
||||
elif result == "upgraded":
|
||||
print("Upgraded existing app DB to the expected Alembic head revision.")
|
||||
else:
|
||||
print("App DB is already Alembic-managed at the expected baseline revision.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,32 @@
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
|
||||
PROJECT_ROOT = Path(__file__).resolve().parents[1]
|
||||
if str(PROJECT_ROOT) not in sys.path:
|
||||
sys.path.insert(0, str(PROJECT_ROOT))
|
||||
|
||||
from app.main import create_app
|
||||
|
||||
|
||||
def main() -> None:
|
||||
app = create_app()
|
||||
output_dir = PROJECT_ROOT / "openapi"
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
schema = app.openapi()
|
||||
|
||||
json_path = output_dir / "openapi.json"
|
||||
yaml_path = output_dir / "openapi.yaml"
|
||||
|
||||
json_path.write_text(json.dumps(schema, ensure_ascii=False, indent=2), encoding="utf-8")
|
||||
yaml_path.write_text(yaml.safe_dump(schema, allow_unicode=True, sort_keys=False), encoding="utf-8")
|
||||
|
||||
print(f"Wrote {json_path}")
|
||||
print(f"Wrote {yaml_path}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,177 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
|
||||
PROJECT_ROOT = Path(__file__).resolve().parents[1]
|
||||
if str(PROJECT_ROOT) not in sys.path:
|
||||
sys.path.insert(0, str(PROJECT_ROOT))
|
||||
|
||||
from app.config import get_settings
|
||||
|
||||
LOCATION_BASELINE_REVISION = "20260419_01_location_baseline"
|
||||
EXPECTED_USER_VERSION = 2
|
||||
EXPECTED_LOCATION_TABLE_INFO = [
|
||||
(0, "person", "TEXT", 1, None, 1),
|
||||
(1, "datetime", "TEXT", 1, None, 2),
|
||||
(2, "latitude", "REAL", 1, None, 0),
|
||||
(3, "longitude", "REAL", 1, None, 0),
|
||||
(4, "altitude", "REAL", 0, None, 0),
|
||||
]
|
||||
|
||||
|
||||
class LocationDatabaseAdoptionError(RuntimeError):
|
||||
"""Raised when a legacy location database does not match the expected baseline."""
|
||||
|
||||
|
||||
def _database_path_from_url(database_url: str) -> Path:
|
||||
prefix = "sqlite:///"
|
||||
if not database_url.startswith(prefix):
|
||||
raise LocationDatabaseAdoptionError(
|
||||
f"Only sqlite URLs are supported for location DB adoption, got: {database_url}"
|
||||
)
|
||||
return Path(database_url[len(prefix) :])
|
||||
|
||||
|
||||
def _make_alembic_config(database_url: str) -> Config:
|
||||
config = Config("alembic_location.ini")
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
return config
|
||||
|
||||
|
||||
def _location_table_exists(database_path: Path) -> bool:
|
||||
conn = sqlite3.connect(database_path)
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT 1 FROM sqlite_master WHERE type = 'table' AND name = 'location'"
|
||||
).fetchone()
|
||||
return row is not None
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _alembic_version_table_exists(database_path: Path) -> bool:
|
||||
conn = sqlite3.connect(database_path)
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT 1 FROM sqlite_master WHERE type = 'table' AND name = 'alembic_version'"
|
||||
).fetchone()
|
||||
return row is not None
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _fetch_alembic_revision(database_path: Path) -> str:
|
||||
conn = sqlite3.connect(database_path)
|
||||
try:
|
||||
row = conn.execute("SELECT version_num FROM alembic_version").fetchone()
|
||||
if row is None:
|
||||
raise LocationDatabaseAdoptionError(
|
||||
"Alembic version table exists but contains no revision"
|
||||
)
|
||||
return row[0]
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _fetch_location_table_info(database_path: Path) -> list[tuple]:
|
||||
conn = sqlite3.connect(database_path)
|
||||
try:
|
||||
return list(conn.execute("PRAGMA table_info(location)"))
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _fetch_user_version(database_path: Path) -> int:
|
||||
conn = sqlite3.connect(database_path)
|
||||
try:
|
||||
return conn.execute("PRAGMA user_version").fetchone()[0]
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def validate_legacy_location_db(database_url: str) -> None:
|
||||
database_path = _database_path_from_url(database_url)
|
||||
if not database_path.exists():
|
||||
raise LocationDatabaseAdoptionError(f"Location DB file does not exist: {database_path}")
|
||||
|
||||
if not _location_table_exists(database_path):
|
||||
raise LocationDatabaseAdoptionError("Expected table 'location' was not found in the DB")
|
||||
|
||||
table_info = _fetch_location_table_info(database_path)
|
||||
if table_info != EXPECTED_LOCATION_TABLE_INFO:
|
||||
raise LocationDatabaseAdoptionError(
|
||||
"Location table schema does not match the expected baseline schema"
|
||||
)
|
||||
|
||||
user_version = _fetch_user_version(database_path)
|
||||
if user_version != EXPECTED_USER_VERSION:
|
||||
raise LocationDatabaseAdoptionError(
|
||||
f"Expected PRAGMA user_version = {EXPECTED_USER_VERSION}, got {user_version}"
|
||||
)
|
||||
|
||||
|
||||
def validate_location_runtime_db(database_url: str) -> None:
|
||||
database_path = _database_path_from_url(database_url)
|
||||
if not database_path.exists():
|
||||
raise LocationDatabaseAdoptionError(
|
||||
"Location DB file was not found. Run 'python scripts/location_db_adopt.py' "
|
||||
"first to initialize or adopt the location DB before starting the app."
|
||||
)
|
||||
|
||||
if not _alembic_version_table_exists(database_path):
|
||||
raise LocationDatabaseAdoptionError(
|
||||
"Location DB exists but is not yet Alembic-managed. Run "
|
||||
"'python scripts/location_db_adopt.py' first to adopt the legacy DB "
|
||||
"before starting the app."
|
||||
)
|
||||
|
||||
current_revision = _fetch_alembic_revision(database_path)
|
||||
if current_revision != LOCATION_BASELINE_REVISION:
|
||||
raise LocationDatabaseAdoptionError(
|
||||
"Location DB revision mismatch. Refusing to start the app: "
|
||||
f"expected {LOCATION_BASELINE_REVISION}, got {current_revision}"
|
||||
)
|
||||
|
||||
|
||||
def adopt_or_initialize_location_db(database_url: str) -> str:
|
||||
database_path = _database_path_from_url(database_url)
|
||||
alembic_config = _make_alembic_config(database_url)
|
||||
|
||||
if database_path.exists():
|
||||
if _alembic_version_table_exists(database_path):
|
||||
current_revision = _fetch_alembic_revision(database_path)
|
||||
if current_revision != LOCATION_BASELINE_REVISION:
|
||||
raise LocationDatabaseAdoptionError(
|
||||
"Location DB is already Alembic-managed but revision does not match "
|
||||
f"the expected baseline: expected {LOCATION_BASELINE_REVISION}, "
|
||||
f"got {current_revision}"
|
||||
)
|
||||
return "already_managed"
|
||||
|
||||
validate_legacy_location_db(database_url)
|
||||
command.stamp(alembic_config, LOCATION_BASELINE_REVISION)
|
||||
return "adopted"
|
||||
|
||||
database_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
command.upgrade(alembic_config, "head")
|
||||
return "initialized"
|
||||
|
||||
|
||||
def main() -> None:
|
||||
settings = get_settings()
|
||||
result = adopt_or_initialize_location_db(settings.location_database_url)
|
||||
if result == "initialized":
|
||||
print("Initialized a new location DB via Alembic upgrade head.")
|
||||
elif result == "already_managed":
|
||||
print("Location DB is already Alembic-managed at the expected baseline revision.")
|
||||
else:
|
||||
print("Validated legacy location DB and stamped Alembic baseline successfully.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,172 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from alembic import command
|
||||
from alembic.config import Config
|
||||
|
||||
PROJECT_ROOT = Path(__file__).resolve().parents[1]
|
||||
if str(PROJECT_ROOT) not in sys.path:
|
||||
sys.path.insert(0, str(PROJECT_ROOT))
|
||||
|
||||
from app.config import get_settings
|
||||
|
||||
POO_BASELINE_REVISION = "20260420_01_poo_baseline"
|
||||
EXPECTED_USER_VERSION = 1
|
||||
EXPECTED_POO_TABLE_INFO = [
|
||||
(0, "timestamp", "TEXT", 1, None, 1),
|
||||
(1, "status", "TEXT", 1, None, 0),
|
||||
(2, "latitude", "REAL", 1, None, 0),
|
||||
(3, "longitude", "REAL", 1, None, 0),
|
||||
]
|
||||
|
||||
|
||||
class PooDatabaseAdoptionError(RuntimeError):
|
||||
"""Raised when a legacy poo database does not match the expected baseline."""
|
||||
|
||||
|
||||
def _database_path_from_url(database_url: str) -> Path:
|
||||
prefix = "sqlite:///"
|
||||
if not database_url.startswith(prefix):
|
||||
raise PooDatabaseAdoptionError(
|
||||
f"Only sqlite URLs are supported for poo DB adoption, got: {database_url}"
|
||||
)
|
||||
return Path(database_url[len(prefix) :])
|
||||
|
||||
|
||||
def _make_alembic_config(database_url: str) -> Config:
|
||||
config = Config("alembic_poo.ini")
|
||||
config.set_main_option("sqlalchemy.url", database_url)
|
||||
return config
|
||||
|
||||
|
||||
def _poo_table_exists(database_path: Path) -> bool:
|
||||
conn = sqlite3.connect(database_path)
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT 1 FROM sqlite_master WHERE type = 'table' AND name = 'poo_records'"
|
||||
).fetchone()
|
||||
return row is not None
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _alembic_version_table_exists(database_path: Path) -> bool:
|
||||
conn = sqlite3.connect(database_path)
|
||||
try:
|
||||
row = conn.execute(
|
||||
"SELECT 1 FROM sqlite_master WHERE type = 'table' AND name = 'alembic_version'"
|
||||
).fetchone()
|
||||
return row is not None
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _fetch_alembic_revision(database_path: Path) -> str:
|
||||
conn = sqlite3.connect(database_path)
|
||||
try:
|
||||
row = conn.execute("SELECT version_num FROM alembic_version").fetchone()
|
||||
if row is None:
|
||||
raise PooDatabaseAdoptionError("Alembic version table exists but contains no revision")
|
||||
return row[0]
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _fetch_poo_table_info(database_path: Path) -> list[tuple]:
|
||||
conn = sqlite3.connect(database_path)
|
||||
try:
|
||||
return list(conn.execute("PRAGMA table_info(poo_records)"))
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def _fetch_user_version(database_path: Path) -> int:
|
||||
conn = sqlite3.connect(database_path)
|
||||
try:
|
||||
return conn.execute("PRAGMA user_version").fetchone()[0]
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
def validate_legacy_poo_db(database_url: str) -> None:
|
||||
database_path = _database_path_from_url(database_url)
|
||||
if not database_path.exists():
|
||||
raise PooDatabaseAdoptionError(f"Poo DB file does not exist: {database_path}")
|
||||
|
||||
if not _poo_table_exists(database_path):
|
||||
raise PooDatabaseAdoptionError("Expected table 'poo_records' was not found in the DB")
|
||||
|
||||
table_info = _fetch_poo_table_info(database_path)
|
||||
if table_info != EXPECTED_POO_TABLE_INFO:
|
||||
raise PooDatabaseAdoptionError("Poo table schema does not match the expected baseline")
|
||||
|
||||
user_version = _fetch_user_version(database_path)
|
||||
if user_version != EXPECTED_USER_VERSION:
|
||||
raise PooDatabaseAdoptionError(
|
||||
f"Expected PRAGMA user_version = {EXPECTED_USER_VERSION}, got {user_version}"
|
||||
)
|
||||
|
||||
|
||||
def validate_poo_runtime_db(database_url: str) -> None:
|
||||
database_path = _database_path_from_url(database_url)
|
||||
if not database_path.exists():
|
||||
raise PooDatabaseAdoptionError(
|
||||
"Poo DB file was not found. Run 'python scripts/poo_db_adopt.py' first to "
|
||||
"initialize or adopt the poo DB before starting the app."
|
||||
)
|
||||
|
||||
if not _alembic_version_table_exists(database_path):
|
||||
raise PooDatabaseAdoptionError(
|
||||
"Poo DB exists but is not yet Alembic-managed. Run "
|
||||
"'python scripts/poo_db_adopt.py' first to adopt the legacy DB "
|
||||
"before starting the app."
|
||||
)
|
||||
|
||||
current_revision = _fetch_alembic_revision(database_path)
|
||||
if current_revision != POO_BASELINE_REVISION:
|
||||
raise PooDatabaseAdoptionError(
|
||||
"Poo DB revision mismatch. Refusing to start the app: "
|
||||
f"expected {POO_BASELINE_REVISION}, got {current_revision}"
|
||||
)
|
||||
|
||||
|
||||
def adopt_or_initialize_poo_db(database_url: str) -> str:
|
||||
database_path = _database_path_from_url(database_url)
|
||||
alembic_config = _make_alembic_config(database_url)
|
||||
|
||||
if database_path.exists():
|
||||
if _alembic_version_table_exists(database_path):
|
||||
current_revision = _fetch_alembic_revision(database_path)
|
||||
if current_revision != POO_BASELINE_REVISION:
|
||||
raise PooDatabaseAdoptionError(
|
||||
"Poo DB is already Alembic-managed but revision does not match "
|
||||
f"the expected baseline: expected {POO_BASELINE_REVISION}, "
|
||||
f"got {current_revision}"
|
||||
)
|
||||
return "already_managed"
|
||||
|
||||
validate_legacy_poo_db(database_url)
|
||||
command.stamp(alembic_config, POO_BASELINE_REVISION)
|
||||
return "adopted"
|
||||
|
||||
database_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
command.upgrade(alembic_config, "head")
|
||||
return "initialized"
|
||||
|
||||
|
||||
def main() -> None:
|
||||
settings = get_settings()
|
||||
result = adopt_or_initialize_poo_db(settings.poo_database_url)
|
||||
if result == "initialized":
|
||||
print("Initialized a new poo DB via Alembic upgrade head.")
|
||||
elif result == "already_managed":
|
||||
print("Poo DB is already Alembic-managed at the expected baseline revision.")
|
||||
else:
|
||||
print("Validated legacy poo DB and stamped Alembic baseline successfully.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,41 +0,0 @@
|
||||
/*
|
||||
Copyright © 2024 Tianyu Liu
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// rootCmd represents the base command when called without any subcommands
|
||||
var rootCmd = &cobra.Command{
|
||||
Use: "home-automation-backend",
|
||||
Short: "This is the entry point of the home automation backend",
|
||||
Long: `Home automation backend is a RESTful API server that provides
|
||||
automation features for may devices.`,
|
||||
// Uncomment the following line if your bare application
|
||||
// has an action associated with it:
|
||||
// Run: func(cmd *cobra.Command, args []string) { },
|
||||
}
|
||||
|
||||
// Execute adds all child commands to the root command and sets flags appropriately.
|
||||
// This is called by main.main(). It only needs to happen once to the rootCmd.
|
||||
func Execute() {
|
||||
err := rootCmd.Execute()
|
||||
if err != nil {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func init() {
|
||||
// Here you will define your flags and configuration settings.
|
||||
// Cobra supports persistent flags, which, if defined here,
|
||||
// will be global for your application.
|
||||
|
||||
// rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.home-automation-backend.yaml)")
|
||||
|
||||
// Cobra also supports local flags, which will only run
|
||||
// when this action is called directly.
|
||||
}
|
||||
@@ -1,161 +0,0 @@
|
||||
/*
|
||||
Copyright © 2024 Tianyu Liu
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/go-co-op/gocron/v2"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/t-liu93/home-automation-backend/components/homeassistant"
|
||||
"github.com/t-liu93/home-automation-backend/components/locationRecorder"
|
||||
"github.com/t-liu93/home-automation-backend/components/pooRecorder"
|
||||
"github.com/t-liu93/home-automation-backend/util/notion"
|
||||
"github.com/t-liu93/home-automation-backend/util/ticktickutil"
|
||||
)
|
||||
|
||||
var (
|
||||
port string
|
||||
scheduler gocron.Scheduler
|
||||
ticktick ticktickutil.TicktickUtil
|
||||
ha *homeassistant.HomeAssistant
|
||||
)
|
||||
|
||||
// serveCmd represents the serve command
|
||||
var serveCmd = &cobra.Command{
|
||||
Use: "serve",
|
||||
Short: "Server automation backend",
|
||||
Run: serve,
|
||||
}
|
||||
|
||||
func initUtil() {
|
||||
// init notion
|
||||
if viper.InConfig("notion.token") {
|
||||
notion.Init(viper.GetString("notion.token"))
|
||||
} else {
|
||||
slog.Error("Notion token not found in config file, exiting..")
|
||||
os.Exit(1)
|
||||
}
|
||||
// init ticktick
|
||||
ticktick = ticktickutil.Init()
|
||||
}
|
||||
|
||||
func initComponent() {
|
||||
// init pooRecorder
|
||||
pooRecorder.Init(&scheduler)
|
||||
// init location recorder
|
||||
locationRecorder.Init()
|
||||
// init homeassistant
|
||||
ha = homeassistant.NewHomeAssistant(ticktick)
|
||||
}
|
||||
|
||||
func serve(cmd *cobra.Command, args []string) {
|
||||
slog.Info("Starting server..")
|
||||
|
||||
viper.SetConfigName("config") // name of config file (without extension)
|
||||
viper.SetConfigType("yaml")
|
||||
viper.AddConfigPath(".") // . is used for dev
|
||||
viper.AddConfigPath("$HOME/.config/home-automation")
|
||||
err := viper.ReadInConfig()
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("Cannot read config file, %s, exiting..", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
viper.WatchConfig()
|
||||
viper.SetDefault("logLevel", "info")
|
||||
logLevelCfg := viper.GetString("logLevel")
|
||||
switch logLevelCfg {
|
||||
case "debug":
|
||||
slog.SetLogLoggerLevel(slog.LevelDebug)
|
||||
case "info":
|
||||
slog.SetLogLoggerLevel(slog.LevelInfo)
|
||||
case "warn":
|
||||
slog.SetLogLoggerLevel(slog.LevelWarn)
|
||||
case "error":
|
||||
slog.SetLogLoggerLevel(slog.LevelError)
|
||||
}
|
||||
|
||||
if viper.InConfig("port") {
|
||||
port = viper.GetString("port")
|
||||
} else {
|
||||
slog.Error("Port not found in config file, exiting..")
|
||||
os.Exit(1)
|
||||
}
|
||||
scheduler, err = gocron.NewScheduler()
|
||||
defer scheduler.Shutdown()
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("Cannot create scheduler, %s, exiting..", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
initUtil()
|
||||
initComponent()
|
||||
scheduler.Start()
|
||||
|
||||
// routing
|
||||
router := mux.NewRouter()
|
||||
router.HandleFunc("/status", func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte("OK"))
|
||||
}).Methods("GET")
|
||||
|
||||
router.HandleFunc("/poo/latest", pooRecorder.HandleNotifyLatestPoo).Methods("GET")
|
||||
router.HandleFunc("/poo/record", pooRecorder.HandleRecordPoo).Methods("POST")
|
||||
router.HandleFunc("/homeassistant/publish", ha.HandleHaMessage).Methods("POST")
|
||||
|
||||
router.HandleFunc("/location/record", locationRecorder.HandleRecordLocation).Methods("POST")
|
||||
|
||||
router.HandleFunc("/ticktick/auth/code", ticktick.HandleAuthCode).Methods("GET")
|
||||
|
||||
srv := &http.Server{
|
||||
Addr: ":" + port,
|
||||
Handler: router,
|
||||
}
|
||||
|
||||
stop := make(chan os.Signal, 1)
|
||||
signal.Notify(stop, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
go func() {
|
||||
if err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed {
|
||||
slog.Error(fmt.Sprintf("ListenAndServe error: %v", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
}()
|
||||
|
||||
slog.Info(fmt.Sprintln("Server started on port", port))
|
||||
|
||||
<-stop
|
||||
|
||||
slog.Info(fmt.Sprintln("Shutting down the server..."))
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
defer cancel()
|
||||
|
||||
if err := srv.Shutdown(ctx); err != nil {
|
||||
slog.Error(fmt.Sprintf("Server Shutdown Failed:%+v", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
slog.Info(fmt.Sprintln("Server gracefully stopped"))
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(serveCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// serveCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// serveCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
serveCmd.Flags().StringVarP(&port, "port", "p", "18881", "Port to listen on")
|
||||
}
|
||||
@@ -1,152 +0,0 @@
|
||||
package homeassistant
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
"github.com/t-liu93/home-automation-backend/util/ticktickutil"
|
||||
)
|
||||
|
||||
type haMessage struct {
|
||||
Target string `json:"target"`
|
||||
Action string `json:"action"`
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
type HomeAssistant struct {
|
||||
ticktickUtil ticktickutil.TicktickUtil
|
||||
}
|
||||
|
||||
type actionTask struct {
|
||||
Action string `json:"action"`
|
||||
DueHour int `json:"due_hour"`
|
||||
}
|
||||
|
||||
func NewHomeAssistant(ticktick ticktickutil.TicktickUtil) *HomeAssistant {
|
||||
return &HomeAssistant{
|
||||
ticktickUtil: ticktick,
|
||||
}
|
||||
}
|
||||
|
||||
func (ha *HomeAssistant) HandleHaMessage(w http.ResponseWriter, r *http.Request) {
|
||||
var message haMessage
|
||||
decoder := json.NewDecoder(r.Body)
|
||||
decoder.DisallowUnknownFields()
|
||||
err := decoder.Decode(&message)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("homeassistant.HandleHaMessage: Error decoding request body", err))
|
||||
http.Error(w, "", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
switch message.Target {
|
||||
case "poo_recorder":
|
||||
res := ha.handlePooRecorderMsg(message)
|
||||
if !res {
|
||||
slog.Warn(fmt.Sprintln("homeassistant.HandleHaMessage: Error handling poo recorder message"))
|
||||
http.Error(w, "", http.StatusInternalServerError)
|
||||
}
|
||||
case "location_recorder":
|
||||
res := ha.handleLocationRecorderMsg(message)
|
||||
if !res {
|
||||
slog.Warn(fmt.Sprintln("homeassistant.HandleHaMessage: Error handling location recorder message"))
|
||||
http.Error(w, "", http.StatusInternalServerError)
|
||||
}
|
||||
case "ticktick":
|
||||
res := ha.handleTicktickMsg(message)
|
||||
if !res {
|
||||
slog.Warn(fmt.Sprintln("homeassistant.HandleHaMessage: Error handling ticktick message"))
|
||||
http.Error(w, "", http.StatusInternalServerError)
|
||||
}
|
||||
default:
|
||||
slog.Warn(fmt.Sprintln("homeassistant.HandleHaMessage: Unknown target", message.Target))
|
||||
http.Error(w, "", http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func (ha *HomeAssistant) handlePooRecorderMsg(message haMessage) bool {
|
||||
switch message.Action {
|
||||
case "get_latest":
|
||||
return ha.handleGetLatestPoo()
|
||||
default:
|
||||
slog.Warn(fmt.Sprintln("homeassistant.handlePooRecorderMsg: Unknown action", message.Action))
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (ha *HomeAssistant) handleLocationRecorderMsg(message haMessage) bool {
|
||||
if message.Action == "record" {
|
||||
port := viper.GetString("port")
|
||||
client := &http.Client{
|
||||
Timeout: time.Second * 1,
|
||||
}
|
||||
_, err := client.Post("http://localhost:"+port+"/location/record", "application/json", strings.NewReader(strings.ReplaceAll(message.Content, "'", "\"")))
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("homeassistant.handleLocationRecorderMsg: Error sending request to location recorder", err))
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
slog.Warn(fmt.Sprintln("homeassistant.handleLocationRecorderMsg: Unknown action", message.Action))
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (ha *HomeAssistant) handleTicktickMsg(message haMessage) bool {
|
||||
switch message.Action {
|
||||
case "create_action_task":
|
||||
return ha.createActionTask(message)
|
||||
default:
|
||||
slog.Warn(fmt.Sprintln("homeassistant.handleTicktickMsg: Unknown action", message.Action))
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (ha *HomeAssistant) handleGetLatestPoo() bool {
|
||||
client := &http.Client{
|
||||
Timeout: time.Second * 1,
|
||||
}
|
||||
port := viper.GetString("port")
|
||||
_, err := client.Get("http://localhost:" + port + "/poo/latest")
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("homeassistant.handleGetLatestPoo: Error sending request to poo recorder", err))
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (ha *HomeAssistant) createActionTask(message haMessage) bool {
|
||||
if !viper.IsSet("homeassistant.actionTaskProjectId") {
|
||||
slog.Warn("homeassistant.createActionTask: actionTaskProjectId not found in config file")
|
||||
return false
|
||||
}
|
||||
projectId := viper.GetString("homeassistant.actionTaskProjectId")
|
||||
detail := strings.ReplaceAll(message.Content, "'", "\"")
|
||||
var task actionTask
|
||||
err := json.Unmarshal([]byte(detail), &task)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("homeassistant.createActionTask: Error unmarshalling", err))
|
||||
return false
|
||||
}
|
||||
dueHour := task.DueHour
|
||||
due := time.Now().Add(time.Hour * time.Duration(dueHour))
|
||||
dueNextMidnight := time.Date(due.Year(), due.Month(), due.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, 1)
|
||||
dueTicktick := dueNextMidnight.UTC().Format(ticktickutil.DateTimeLayout)
|
||||
ticktickTask := ticktickutil.Task{
|
||||
ProjectId: projectId,
|
||||
Title: task.Action,
|
||||
DueDate: dueTicktick,
|
||||
}
|
||||
err = ha.ticktickUtil.CreateTask(ticktickTask)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintf("homeassistant.createActionTask: Error creating task %s", err))
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
@@ -1,280 +0,0 @@
|
||||
package homeassistant
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/t-liu93/home-automation-backend/util/ticktickutil"
|
||||
)
|
||||
|
||||
var (
|
||||
loggerText = new(bytes.Buffer)
|
||||
)
|
||||
|
||||
type MockTicktickUtil struct {
|
||||
mock.Mock
|
||||
}
|
||||
|
||||
func (m *MockTicktickUtil) HandleAuthCode(w http.ResponseWriter, r *http.Request) {
|
||||
m.Called(w, r)
|
||||
}
|
||||
|
||||
func (m *MockTicktickUtil) GetTasks(projectId string) []ticktickutil.Task {
|
||||
args := m.Called(projectId)
|
||||
return args.Get(0).([]ticktickutil.Task)
|
||||
}
|
||||
|
||||
func (m *MockTicktickUtil) HasDuplicateTask(projectId string, taskTitile string) bool {
|
||||
args := m.Called(projectId, taskTitile)
|
||||
return args.Bool(0)
|
||||
}
|
||||
|
||||
func (m *MockTicktickUtil) CreateTask(task ticktickutil.Task) error {
|
||||
args := m.Called(task)
|
||||
return args.Error(0)
|
||||
}
|
||||
|
||||
func SetupTearDown(t *testing.T) (func(), *HomeAssistant) {
|
||||
loggertearDown := loggerSetupTeardown()
|
||||
mockTicktick := &MockTicktickUtil{}
|
||||
ha := NewHomeAssistant(mockTicktick)
|
||||
|
||||
return func() {
|
||||
loggertearDown()
|
||||
viper.Reset()
|
||||
}, ha
|
||||
}
|
||||
|
||||
func loggerSetupTeardown() func() {
|
||||
logger := slog.New(slog.NewTextHandler(loggerText, nil))
|
||||
defaultLogger := slog.Default()
|
||||
slog.SetDefault(logger)
|
||||
|
||||
return func() {
|
||||
slog.SetDefault(defaultLogger)
|
||||
loggerText.Reset()
|
||||
}
|
||||
}
|
||||
|
||||
func TestHandleHaMessageJsonDecodeError(t *testing.T) {
|
||||
teardown, ha := SetupTearDown(t)
|
||||
defer teardown()
|
||||
|
||||
invalidRequestBody := ` { "target": "poo_recorder", "action": "get_latest", "content": " }`
|
||||
req := httptest.NewRequest(http.MethodPost, "/homeassistant/publish", strings.NewReader(invalidRequestBody))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
ha.HandleHaMessage(w, req)
|
||||
assert.Equal(t, http.StatusInternalServerError, w.Code)
|
||||
assert.Contains(t, loggerText.String(), "homeassistant.HandleHaMessage: Error decoding request body")
|
||||
}
|
||||
|
||||
func TestHandlePooRecorderMsgGetLatest(t *testing.T) {
|
||||
teardown, ha := SetupTearDown(t)
|
||||
defer teardown()
|
||||
requestBody := `{"target": "poo_recorder", "action": "get_latest", "content": ""}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/homeassistant/publish", strings.NewReader(requestBody))
|
||||
w := httptest.NewRecorder()
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
assert.Equal(t, http.MethodGet, r.Method)
|
||||
assert.Equal(t, "/poo/latest", r.URL.Path)
|
||||
}))
|
||||
defer server.Close()
|
||||
port := strings.Split(server.URL, ":")[2]
|
||||
viper.Set("port", port)
|
||||
|
||||
ha.HandleHaMessage(w, req)
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
assert.Empty(t, loggerText.String())
|
||||
}
|
||||
|
||||
func TestHandlePooRecorderMsgUnknownAction(t *testing.T) {
|
||||
teardown, ha := SetupTearDown(t)
|
||||
defer teardown()
|
||||
|
||||
requestBody := `{"target": "poo_recorder", "action": "unknown_action", "content": ""}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/homeassistant/publish", strings.NewReader(requestBody))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
ha.HandleHaMessage(w, req)
|
||||
assert.Equal(t, http.StatusInternalServerError, w.Code)
|
||||
assert.Contains(t, loggerText.String(), "homeassistant.handlePooRecorderMsg: Unknown action")
|
||||
}
|
||||
|
||||
func TestHandlePooRecorderMsgGetLatestError(t *testing.T) {
|
||||
teardown, ha := SetupTearDown(t)
|
||||
defer teardown()
|
||||
|
||||
requestBody := `{"target": "poo_recorder", "action": "get_latest", "content": ""}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/homeassistant/publish", strings.NewReader(requestBody))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
port := "invalid port"
|
||||
viper.Set("port", port)
|
||||
|
||||
ha.HandleHaMessage(w, req)
|
||||
assert.Equal(t, http.StatusInternalServerError, w.Code)
|
||||
assert.Contains(t, loggerText.String(), "homeassistant.handleGetLatestPoo: Error sending request to poo recorder")
|
||||
}
|
||||
|
||||
func TestHandleLocationRecorderMsg(t *testing.T) {
|
||||
teardown, ha := SetupTearDown(t)
|
||||
defer teardown()
|
||||
|
||||
requestBody := `{"target": "location_recorder", "action": "record", "content": "{'person': 'test', 'latitude': '1.0', 'longitude': '2.0', 'altitude': '3.0'}"}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/homeassistant/publish", strings.NewReader(requestBody))
|
||||
w := httptest.NewRecorder()
|
||||
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
assert.Equal(t, http.MethodPost, r.Method)
|
||||
assert.Equal(t, "/location/record", r.URL.Path)
|
||||
assert.Equal(t, "application/json", r.Header.Get("Content-Type"))
|
||||
}))
|
||||
defer server.Close()
|
||||
|
||||
port := strings.Split(server.URL, ":")[2]
|
||||
viper.Set("port", port)
|
||||
|
||||
ha.HandleHaMessage(w, req)
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
assert.Empty(t, loggerText.String())
|
||||
}
|
||||
|
||||
func TestHandleLocationRecorderMsgUnknownAction(t *testing.T) {
|
||||
teardown, ha := SetupTearDown(t)
|
||||
defer teardown()
|
||||
|
||||
requestBody := `{"target": "location_recorder", "action": "unknown_action", "content": ""}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/homeassistant/publish", strings.NewReader(requestBody))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
ha.HandleHaMessage(w, req)
|
||||
assert.Equal(t, http.StatusInternalServerError, w.Code)
|
||||
assert.Contains(t, loggerText.String(), "homeassistant.handleLocationRecorderMsg: Unknown action")
|
||||
}
|
||||
|
||||
func TestHandleLocationRecorderMsgRequestErr(t *testing.T) {
|
||||
teardown, ha := SetupTearDown(t)
|
||||
defer teardown()
|
||||
|
||||
requestBody := `{"target": "location_recorder", "action": "record", "content": "{'person': 'test', 'latitude': '1.0', 'longitude': '2.0', 'altitude': '3.0'}"}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/homeassistant/publish", strings.NewReader(requestBody))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
port := "invalid port"
|
||||
viper.Set("port", port)
|
||||
|
||||
ha.HandleHaMessage(w, req)
|
||||
assert.Equal(t, http.StatusInternalServerError, w.Code)
|
||||
assert.Contains(t, loggerText.String(), "homeassistant.handleLocationRecorderMsg: Error sending request to location recorder")
|
||||
}
|
||||
|
||||
func TestHandleTicktickMsgCreateActionTask(t *testing.T) {
|
||||
teardown, _ := SetupTearDown(t)
|
||||
defer teardown()
|
||||
const expectedProjectId = "test_project_id"
|
||||
const dueHour = 12
|
||||
due := time.Now().Add(time.Hour * time.Duration(dueHour))
|
||||
dueNextMidnight := time.Date(due.Year(), due.Month(), due.Day(), 0, 0, 0, 0, time.Local).AddDate(0, 0, 1)
|
||||
dueTicktick := dueNextMidnight.UTC().Format(ticktickutil.DateTimeLayout)
|
||||
|
||||
requestBody := `{"target": "ticktick", "action": "create_action_task", "content": "{'title': 'test', 'action': 'test_action', 'due_hour': 12}"}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/homeassistant/publish", strings.NewReader(requestBody))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
mockTicktick := &MockTicktickUtil{}
|
||||
mockTicktick.On("CreateTask", mock.Anything).Return(nil)
|
||||
ha := NewHomeAssistant(mockTicktick)
|
||||
viper.Set("homeassistant.actionTaskProjectId", expectedProjectId)
|
||||
ha.HandleHaMessage(w, req)
|
||||
expectedTask := ticktickutil.Task{
|
||||
Title: "test_action",
|
||||
DueDate: dueTicktick,
|
||||
ProjectId: expectedProjectId,
|
||||
}
|
||||
mockTicktick.AssertCalled(t, "CreateTask", expectedTask)
|
||||
mockTicktick.AssertNumberOfCalls(t, "CreateTask", 1)
|
||||
assert.Equal(t, http.StatusOK, w.Code)
|
||||
assert.Empty(t, loggerText.String())
|
||||
}
|
||||
|
||||
func TestHandleTicktickMsgUnknownAction(t *testing.T) {
|
||||
teardown, ha := SetupTearDown(t)
|
||||
defer teardown()
|
||||
|
||||
requestBody := `{"target": "ticktick", "action": "unknown_action", "content": ""}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/homeassistant/publish", strings.NewReader(requestBody))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
ha.HandleHaMessage(w, req)
|
||||
assert.Equal(t, http.StatusInternalServerError, w.Code)
|
||||
assert.Contains(t, loggerText.String(), "homeassistant.handleTicktickMsg: Unknown action")
|
||||
}
|
||||
|
||||
func TestHandleTicktickMsgProjectIdUnset(t *testing.T) {
|
||||
teardown, ha := SetupTearDown(t)
|
||||
defer teardown()
|
||||
|
||||
requestBody := `{"target": "ticktick", "action": "create_action_task", "content": "{'title': 'test', 'action': 'test_action', 'due_hour': 12}"}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/homeassistant/publish", strings.NewReader(requestBody))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
ha.HandleHaMessage(w, req)
|
||||
assert.Equal(t, http.StatusInternalServerError, w.Code)
|
||||
assert.Contains(t, loggerText.String(), "homeassistant.createActionTask: actionTaskProjectId not found in config file")
|
||||
}
|
||||
|
||||
func TestHandleTicktickMsgJsonError(t *testing.T) {
|
||||
teardown, ha := SetupTearDown(t)
|
||||
defer teardown()
|
||||
|
||||
invalidRequestBody := ` { "target": "ticktick", "action": "create_action_task", "content": "{'title': 'tes, 'action': 'test_action', 'due_hour': 12}"}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/homeassistant/publish", strings.NewReader(invalidRequestBody))
|
||||
w := httptest.NewRecorder()
|
||||
viper.Set("homeassistant.actionTaskProjectId", "some project id")
|
||||
ha.HandleHaMessage(w, req)
|
||||
assert.Equal(t, http.StatusInternalServerError, w.Code)
|
||||
assert.Contains(t, loggerText.String(), "homeassistant.createActionTask: Error unmarshalling")
|
||||
}
|
||||
|
||||
func TestHandleTicktickMsgTicktickUtilErr(t *testing.T) {
|
||||
teardown, _ := SetupTearDown(t)
|
||||
defer teardown()
|
||||
|
||||
requestBody := `{"target": "ticktick", "action": "create_action_task", "content": "{'title': 'test', 'action': 'test_action', 'due_hour': 12}"}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/homeassistant/publish", strings.NewReader(requestBody))
|
||||
w := httptest.NewRecorder()
|
||||
mockedTicktickUtil := &MockTicktickUtil{}
|
||||
viper.Set("homeassistant.actionTaskProjectId", "some project id")
|
||||
|
||||
mockedTicktickUtil.On("CreateTask", mock.Anything).Return(errors.New("some error"))
|
||||
|
||||
ha := NewHomeAssistant(mockedTicktickUtil)
|
||||
|
||||
ha.HandleHaMessage(w, req)
|
||||
|
||||
mockedTicktickUtil.AssertCalled(t, "CreateTask", mock.Anything)
|
||||
assert.Equal(t, http.StatusInternalServerError, w.Code)
|
||||
assert.Contains(t, loggerText.String(), "homeassistant.createActionTask: Error creating task")
|
||||
}
|
||||
|
||||
func TestHandleHaMessageUnknownTarget(t *testing.T) {
|
||||
teardown, ha := SetupTearDown(t)
|
||||
defer teardown()
|
||||
|
||||
requestBody := `{"target": "unknown_target", "action": "record", "content": ""}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/homeassistant/publish", strings.NewReader(requestBody))
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
ha.HandleHaMessage(w, req)
|
||||
assert.Equal(t, http.StatusInternalServerError, w.Code)
|
||||
assert.Contains(t, loggerText.String(), "homeassistant.HandleHaMessage: Unknown target")
|
||||
}
|
||||
@@ -1,194 +0,0 @@
|
||||
package locationRecorder
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
var (
|
||||
db *sql.DB
|
||||
)
|
||||
|
||||
const (
|
||||
currentDBVersion = 2
|
||||
)
|
||||
|
||||
type Location struct {
|
||||
Person string `json:"person"`
|
||||
DateTime string `json:"datetime"`
|
||||
Latitude float64 `json:"latitude"`
|
||||
Longitude float64 `json:"longitude"`
|
||||
Altitude sql.NullFloat64 `json:"altitude,omitempty"`
|
||||
}
|
||||
|
||||
type LocationContent struct {
|
||||
Person string `json:"person"`
|
||||
Latitude string `json:"latitude"`
|
||||
Longitude string `json:"longitude"`
|
||||
Altitude string `json:"altitude,omitempty"`
|
||||
}
|
||||
|
||||
func Init() {
|
||||
initDb()
|
||||
}
|
||||
|
||||
func HandleRecordLocation(w http.ResponseWriter, r *http.Request) {
|
||||
var location LocationContent
|
||||
|
||||
decoder := json.NewDecoder(r.Body)
|
||||
decoder.DisallowUnknownFields()
|
||||
err := decoder.Decode(&location)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("HandleRecordLocation Error decoding request body", err))
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
latiF64, _ := strconv.ParseFloat(location.Latitude, 64)
|
||||
longiF64, _ := strconv.ParseFloat(location.Longitude, 64)
|
||||
altiF64, _ := strconv.ParseFloat(location.Altitude, 64)
|
||||
InsertLocationNow(location.Person, latiF64, longiF64, altiF64)
|
||||
}
|
||||
|
||||
func InsertLocation(person string, datetime time.Time, latitude float64, longitude float64, altitude float64) {
|
||||
_, err := db.Exec(`INSERT OR IGNORE INTO location (person, datetime, latitude, longitude, altitude) VALUES (?, ?, ?, ?, ?)`,
|
||||
person, datetime.UTC().Format(time.RFC3339), latitude, longitude, altitude)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("LocationRecorder.InsertLocation Error inserting location", err))
|
||||
}
|
||||
}
|
||||
|
||||
func InsertLocationNow(person string, latitude float64, longitude float64, altitude float64) {
|
||||
InsertLocation(person, time.Now(), latitude, longitude, altitude)
|
||||
}
|
||||
|
||||
func initDb() {
|
||||
if !viper.InConfig("locationRecorder.dbPath") {
|
||||
slog.Info("LocationRecorderInit dbPath not found in config file, using default: location_recorder.db")
|
||||
viper.SetDefault("locationRecorder.dbPath", "location_recorder.db")
|
||||
}
|
||||
|
||||
dbPath := viper.GetString("locationRecorder.dbPath")
|
||||
err := error(nil)
|
||||
db, err = sql.Open("sqlite", dbPath)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("LocationRecorderInit Error opening database", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
err = db.Ping()
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("LocationRecorderInit Error pinging database", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
migrateDb()
|
||||
}
|
||||
|
||||
func migrateDb() {
|
||||
var userVersion int
|
||||
err := db.QueryRow("PRAGMA user_version").Scan(&userVersion)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("LocationRecorderInit Error getting db user version", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
if userVersion == 0 {
|
||||
migrateDb0To1(&userVersion)
|
||||
}
|
||||
if userVersion == 1 {
|
||||
migrateDb1To2(&userVersion)
|
||||
}
|
||||
if userVersion != currentDBVersion {
|
||||
slog.Error(fmt.Sprintln("LocationRecorderInit Error unsupported database version", userVersion))
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func migrateDb0To1(userVersion *int) {
|
||||
// this is actually create new db
|
||||
slog.Info("Creating location recorder database version 1..")
|
||||
_, err := db.Exec(`CREATE TABLE IF NOT EXISTS location (
|
||||
person TEXT NOT NULL,
|
||||
datetime TEXT NOT NULL,
|
||||
latitude REAL NOT NULL,
|
||||
longitude REAL NOT NULL,
|
||||
altitude REAL,
|
||||
PRIMARY KEY (person, datetime))`)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("LocationRecorderInit DB0To1 Error creating table", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
_, err = db.Exec(`PRAGMA user_version = 1`)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("LocationRecorderInit DB0To1 Error setting user version to 1", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
*userVersion = 1
|
||||
}
|
||||
|
||||
func migrateDb1To2(userVersion *int) {
|
||||
// this will change the datetime format into Real RFC3339
|
||||
slog.Info("Migrating location recorder database version 1 to 2..")
|
||||
dbTx, err := db.Begin()
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("LocationRecorderInit DB1To2 Error beginning transaction", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
fail := func(err error, step string) {
|
||||
slog.Error(fmt.Sprintf("LocationRecorderInit DB1To2 Error %s: %s", step, err))
|
||||
dbTx.Rollback()
|
||||
os.Exit(1)
|
||||
}
|
||||
_, err = dbTx.Exec(`ALTER TABLE location RENAME TO location_old`)
|
||||
if err != nil {
|
||||
fail(err, "renaming table")
|
||||
}
|
||||
_, err = dbTx.Exec(`CREATE TABLE IF NOT EXISTS location (
|
||||
person TEXT NOT NULL,
|
||||
datetime TEXT NOT NULL,
|
||||
latitude REAL NOT NULL,
|
||||
longitude REAL NOT NULL,
|
||||
altitude REAL,
|
||||
PRIMARY KEY (person, datetime))`)
|
||||
if err != nil {
|
||||
fail(err, "creating new table")
|
||||
}
|
||||
row, err := dbTx.Query(`SELECT person, datetime, latitude, longitude, altitude FROM location_old`)
|
||||
if err != nil {
|
||||
fail(err, "selecting from old table")
|
||||
}
|
||||
defer row.Close()
|
||||
for row.Next() {
|
||||
var location Location
|
||||
err = row.Scan(&location.Person, &location.DateTime, &location.Latitude, &location.Longitude, &location.Altitude)
|
||||
if err != nil {
|
||||
fail(err, "scanning row")
|
||||
}
|
||||
dateTime, err := time.Parse("2006-01-02T15:04:05-0700", location.DateTime)
|
||||
if err != nil {
|
||||
fail(err, "parsing datetime")
|
||||
}
|
||||
_, err = dbTx.Exec(`INSERT INTO location (person, datetime, latitude, longitude, altitude) VALUES (?, ?, ?, ?, ?)`, location.Person, dateTime.UTC().Format(time.RFC3339), location.Latitude, location.Longitude, location.Altitude)
|
||||
if err != nil {
|
||||
fail(err, "inserting new row")
|
||||
}
|
||||
}
|
||||
|
||||
_, err = dbTx.Exec(`DROP TABLE location_old`)
|
||||
if err != nil {
|
||||
fail(err, "dropping old table")
|
||||
}
|
||||
|
||||
_, err = dbTx.Exec(`PRAGMA user_version = 2`)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("LocationRecorderInit Error setting user version to 2", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
dbTx.Commit()
|
||||
*userVersion = 2
|
||||
}
|
||||
@@ -1,366 +0,0 @@
|
||||
package pooRecorder
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"log/slog"
|
||||
|
||||
"github.com/go-co-op/gocron/v2"
|
||||
"github.com/jomei/notionapi"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/t-liu93/home-automation-backend/util/homeassistantutil"
|
||||
"github.com/t-liu93/home-automation-backend/util/notion"
|
||||
_ "modernc.org/sqlite"
|
||||
)
|
||||
|
||||
var (
|
||||
db *sql.DB
|
||||
scheduler *gocron.Scheduler
|
||||
)
|
||||
|
||||
type recordDetail struct {
|
||||
Status string `json:"status"`
|
||||
Latitude string `json:"latitude"`
|
||||
Longitude string `json:"longitude"`
|
||||
}
|
||||
|
||||
type pooStatusSensorAttributes struct {
|
||||
LastPoo string `json:"last_poo"`
|
||||
FriendlyName string `json:"friendly_name,"`
|
||||
}
|
||||
|
||||
type pooStatusWebhookBody struct {
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
type pooStatusDbEntry struct {
|
||||
Timestamp string
|
||||
Status string
|
||||
Latitude float64
|
||||
Longitude float64
|
||||
}
|
||||
|
||||
func Init(mainScheduler *gocron.Scheduler) {
|
||||
initDb()
|
||||
initScheduler(mainScheduler)
|
||||
notionDbSync()
|
||||
publishLatestPooSensor()
|
||||
}
|
||||
|
||||
func HandleRecordPoo(w http.ResponseWriter, r *http.Request) {
|
||||
var record recordDetail
|
||||
if !viper.InConfig("pooRecorder.tableId") {
|
||||
slog.Warn("HandleRecordPoo Table ID not found in config file")
|
||||
http.Error(w, "Table ID not found in config file", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
decoder := json.NewDecoder(r.Body)
|
||||
decoder.DisallowUnknownFields()
|
||||
err := decoder.Decode(&record)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("HandleRecordPoo Error decoding request body", err))
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
now := time.Now()
|
||||
err = storeStatus(record, now)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("HandleRecordPoo Error storing status", err))
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
publishLatestPooSensor()
|
||||
if viper.InConfig("pooRecorder.webhookId") {
|
||||
homeassistantutil.TriggerWebhook(viper.GetString("pooRecorder.webhookId"), pooStatusWebhookBody{Status: record.Status})
|
||||
} else {
|
||||
slog.Warn("HandleRecordPoo Webhook ID not found in config file")
|
||||
}
|
||||
}
|
||||
|
||||
func HandleNotifyLatestPoo(w http.ResponseWriter, r *http.Request) {
|
||||
err := publishLatestPooSensor()
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("HandleNotifyLatestPoo Error publishing latest poo", err))
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
slog.Debug(fmt.Sprintln("HandleGetLatestPoo Latest poo"))
|
||||
}
|
||||
|
||||
func publishLatestPooSensor() error {
|
||||
var latest pooStatusDbEntry
|
||||
err := db.QueryRow(`SELECT timestamp, status, latitude, longitude FROM poo_records ORDER BY timestamp DESC LIMIT 1`).Scan(&latest.Timestamp, &latest.Status, &latest.Latitude, &latest.Longitude)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("HandleGetLatestPoo Error getting latest poo", err))
|
||||
return err
|
||||
}
|
||||
recordTime, err := time.Parse("2006-01-02T15:04Z07:00", latest.Timestamp)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("HandleGetLatestPoo Error parsing timestamp", err))
|
||||
return err
|
||||
}
|
||||
viper.SetDefault("pooRecorder.sensorEntityName", "sensor.test_poo_status")
|
||||
viper.SetDefault("pooRecorder.sensorFriendlyName", "Poo Status")
|
||||
sensorEntityName := viper.GetString("pooRecorder.sensorEntityName")
|
||||
sensorFriendlyName := viper.GetString("pooRecorder.sensorFriendlyName")
|
||||
recordTime = recordTime.Local()
|
||||
pooStatus := homeassistantutil.HttpSensor{
|
||||
EntityId: sensorEntityName,
|
||||
State: latest.Status,
|
||||
Attributes: pooStatusSensorAttributes{
|
||||
LastPoo: recordTime.Format("Mon | 2006-01-02 | 15:04"),
|
||||
FriendlyName: sensorFriendlyName,
|
||||
},
|
||||
}
|
||||
homeassistantutil.PublishSensor(pooStatus)
|
||||
return nil
|
||||
}
|
||||
|
||||
func initDb() {
|
||||
if !viper.InConfig("pooRecorder.dbPath") {
|
||||
slog.Info("PooRecorderInit dbPath not found in config file, using default: pooRecorder.db")
|
||||
viper.SetDefault("pooRecorder.dbPath", "pooRecorder.db")
|
||||
}
|
||||
|
||||
dbPath := viper.GetString("pooRecorder.dbPath")
|
||||
err := error(nil)
|
||||
db, err = sql.Open("sqlite", dbPath)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("PooRecorderInit Error opening database", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
err = db.Ping()
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("PooRecorderInit Error pinging database", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
migrateDb()
|
||||
}
|
||||
|
||||
func migrateDb() {
|
||||
var userVersion int
|
||||
err := db.QueryRow("PRAGMA user_version").Scan(&userVersion)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("PooRecorderInit Error getting db user version", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
if userVersion == 0 {
|
||||
migrateDb0To1(&userVersion)
|
||||
}
|
||||
}
|
||||
|
||||
func migrateDb0To1(userVersion *int) {
|
||||
// this is actually create new db
|
||||
slog.Info("Creating database version 1..")
|
||||
_, err := db.Exec(`CREATE TABLE IF NOT EXISTS poo_records (
|
||||
timestamp TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
latitude REAL NOT NULL,
|
||||
longitude REAL NOT NULL,
|
||||
PRIMARY KEY (timestamp))`)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("PooRecorderInit Error creating table", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
_, err = db.Exec(`PRAGMA user_version = 1`)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("PooRecorderInit Error setting user version to 1", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
*userVersion = 1
|
||||
}
|
||||
|
||||
func initScheduler(mainScheduler *gocron.Scheduler) {
|
||||
scheduler = mainScheduler
|
||||
_, err := (*scheduler).NewJob(gocron.CronJob("0 5 * * *", false), gocron.NewTask(
|
||||
notionDbSync,
|
||||
))
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("PooRecorderInit Error creating scheduled task", err))
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func notionDbSync() {
|
||||
slog.Info("PooRecorder Running DB sync with Notion..")
|
||||
if !viper.InConfig("pooRecorder.tableId") {
|
||||
slog.Warn("PooRecorder Table ID not found in config file, sync aborted")
|
||||
return
|
||||
}
|
||||
tableId := viper.GetString("pooRecorder.tableId")
|
||||
rowsNotion, err := notion.GetAllTableRows(tableId)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("PooRecorderSyncDb Failed to get table header", err))
|
||||
return
|
||||
}
|
||||
header := rowsNotion[0]
|
||||
rowsNotion = rowsNotion[1:] // remove header
|
||||
rowsDb, err := db.Query(`SELECT * FROM poo_records`)
|
||||
rowsDbMap := make(map[string]pooStatusDbEntry)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("PooRecorderSyncDb Failed to get db rows", err))
|
||||
return
|
||||
}
|
||||
defer rowsDb.Close()
|
||||
for rowsDb.Next() {
|
||||
var row pooStatusDbEntry
|
||||
err = rowsDb.Scan(&row.Timestamp, &row.Status, &row.Latitude, &row.Longitude)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("PooRecorderSyncDb Failed to scan db row", err))
|
||||
return
|
||||
}
|
||||
rowsDbMap[row.Timestamp] = row
|
||||
}
|
||||
// notion to db
|
||||
syncNotionToDb(rowsNotion, rowsDbMap)
|
||||
|
||||
// db to notion
|
||||
syncDbToNotion(header.GetID().String(), tableId, rowsNotion)
|
||||
|
||||
}
|
||||
|
||||
func syncNotionToDb(rowsNotion []notionapi.TableRowBlock, rowsDbMap map[string]pooStatusDbEntry) {
|
||||
counter := 0
|
||||
for _, rowNotion := range rowsNotion {
|
||||
rowNotionTimestamp := rowNotion.TableRow.Cells[0][0].PlainText + "T" + rowNotion.TableRow.Cells[1][0].PlainText
|
||||
rowNotionTime, err := time.ParseInLocation("2006-01-02T15:04", rowNotionTimestamp, time.Now().Location())
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("PooRecorderSyncDb Failed to parse timestamp", err))
|
||||
return
|
||||
}
|
||||
rowNotionTimeInDbFormat := rowNotionTime.UTC().Format("2006-01-02T15:04Z07:00")
|
||||
_, exists := rowsDbMap[rowNotionTimeInDbFormat]
|
||||
if !exists {
|
||||
locationNotion := rowNotion.TableRow.Cells[3][0].PlainText
|
||||
latitude, err := strconv.ParseFloat(strings.Split(locationNotion, ",")[0], 64)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("PooRecorderSyncDb Failed to parse latitude to float", err))
|
||||
return
|
||||
}
|
||||
longitude, err := strconv.ParseFloat(strings.Split(locationNotion, ",")[1], 64)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("PooRecorderSyncDb Failed to parse longitude to float", err))
|
||||
return
|
||||
}
|
||||
_, err = db.Exec(`INSERT INTO poo_records (timestamp, status, latitude, longitude) VALUES (?, ?, ?, ?)`,
|
||||
rowNotionTimeInDbFormat, rowNotion.TableRow.Cells[2][0].PlainText, latitude, longitude)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("PooRecorderSyncDb Failed to insert new row", err))
|
||||
return
|
||||
}
|
||||
counter++
|
||||
}
|
||||
}
|
||||
slog.Info(fmt.Sprintln("PooRecorderSyncDb Inserted", counter, "new rows from Notion to DB"))
|
||||
}
|
||||
|
||||
func syncDbToNotion(headerId string, tableId string, rowsNotion []notionapi.TableRowBlock) {
|
||||
counter := 0
|
||||
var rowsDbSlice []pooStatusDbEntry
|
||||
rowsDb, err := db.Query(`SELECT * FROM poo_records ORDER BY timestamp DESC`)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("PooRecorderSyncDb Failed to get db rows", err))
|
||||
return
|
||||
}
|
||||
defer rowsDb.Close()
|
||||
for rowsDb.Next() {
|
||||
var row pooStatusDbEntry
|
||||
err = rowsDb.Scan(&row.Timestamp, &row.Status, &row.Latitude, &row.Longitude)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintln("PooRecorderSyncDb Failed to scan db row", err))
|
||||
return
|
||||
}
|
||||
rowsDbSlice = append(rowsDbSlice, row)
|
||||
}
|
||||
startFromId := headerId
|
||||
for iNotion, iDb := 0, 0; iNotion < len(rowsNotion) && iDb < len(rowsDbSlice); {
|
||||
notionTimeStamp := rowsNotion[iNotion].TableRow.Cells[0][0].PlainText + "T" + rowsNotion[iNotion].TableRow.Cells[1][0].PlainText
|
||||
notionTime, err := time.ParseInLocation("2006-01-02T15:04", notionTimeStamp, time.Now().Location())
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("PooRecorderSyncDb Failed to parse notion timestamp", err))
|
||||
return
|
||||
}
|
||||
notionTimeStampInDbFormat := notionTime.UTC().Format("2006-01-02T15:04Z07:00")
|
||||
dbTimeStamp := rowsDbSlice[iDb].Timestamp
|
||||
dbTime, err := time.Parse("2006-01-02T15:04Z07:00", dbTimeStamp)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("PooRecorderSyncDb Failed to parse db timestamp", err))
|
||||
return
|
||||
}
|
||||
dbTimeLocal := dbTime.Local()
|
||||
dbTimeDate := dbTimeLocal.Format("2006-01-02")
|
||||
dbTimeTime := dbTimeLocal.Format("15:04")
|
||||
if notionTimeStampInDbFormat == dbTimeStamp {
|
||||
startFromId = rowsNotion[iNotion].GetID().String()
|
||||
iNotion++
|
||||
iDb++
|
||||
continue
|
||||
}
|
||||
if iNotion != len(rowsNotion)-1 {
|
||||
notionNextTimeStamp := rowsNotion[iNotion+1].TableRow.Cells[0][0].PlainText + "T" + rowsNotion[iNotion+1].TableRow.Cells[1][0].PlainText
|
||||
notionNextTime, err := time.ParseInLocation("2006-01-02T15:04", notionNextTimeStamp, time.Now().Location())
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("PooRecorderSyncDb Failed to parse next notion timestamp", err))
|
||||
return
|
||||
}
|
||||
if notionNextTime.After(notionTime) {
|
||||
slog.Error(fmt.Sprintf("PooRecorderSyncDb Notion timestamp %s is after next timestamp %s, checking, aborting", notionTimeStamp, notionNextTimeStamp))
|
||||
return
|
||||
}
|
||||
}
|
||||
id, err := notion.WriteTableRow([]string{
|
||||
dbTimeDate,
|
||||
dbTimeTime,
|
||||
rowsDbSlice[iDb].Status,
|
||||
fmt.Sprintf("%s,%s",
|
||||
strconv.FormatFloat(rowsDbSlice[iDb].Latitude, 'f', -1, 64),
|
||||
strconv.FormatFloat(rowsDbSlice[iDb].Longitude, 'f', -1, 64))},
|
||||
tableId,
|
||||
startFromId)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("PooRecorderSyncDb Failed to write row to Notion", err))
|
||||
return
|
||||
}
|
||||
startFromId = id
|
||||
iDb++
|
||||
counter++
|
||||
time.Sleep(400 * time.Millisecond)
|
||||
}
|
||||
slog.Info(fmt.Sprintln("PooRecorderSyncDb Inserted", counter, "new rows from DB to Notion"))
|
||||
}
|
||||
|
||||
func storeStatus(record recordDetail, timestamp time.Time) error {
|
||||
tableId := viper.GetString("pooRecorder.tableId")
|
||||
recordDate := timestamp.Format("2006-01-02")
|
||||
recordTime := timestamp.Format("15:04")
|
||||
slog.Debug(fmt.Sprintln("Recording poo", record.Status, "at", record.Latitude, record.Longitude))
|
||||
_, err := db.Exec(`INSERT OR IGNORE INTO poo_records (timestamp, status, latitude, longitude) VALUES (?, ?, ?, ?)`,
|
||||
timestamp.UTC().Format("2006-01-02T15:04Z07:00"), record.Status, record.Latitude, record.Longitude)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
go func() {
|
||||
header, err := notion.GetTableRows(tableId, 1, "")
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("HandleRecordPoo Failed to get table header", err))
|
||||
return
|
||||
}
|
||||
if len(header) == 0 {
|
||||
slog.Warn("HandleRecordPoo Table header not found")
|
||||
return
|
||||
}
|
||||
headerId := header[0].GetID()
|
||||
_, err = notion.WriteTableRow([]string{recordDate, recordTime, record.Status, record.Latitude + "," + record.Longitude}, tableId, headerId.String())
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintln("HandleRecordPoo Failed to write table row", err))
|
||||
}
|
||||
}()
|
||||
return nil
|
||||
}
|
||||
-54
@@ -1,54 +0,0 @@
|
||||
module github.com/t-liu93/home-automation-backend
|
||||
|
||||
go 1.23.0
|
||||
|
||||
require (
|
||||
github.com/go-co-op/gocron/v2 v2.11.0
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/jomei/notionapi v1.13.2
|
||||
github.com/spf13/cobra v1.8.1
|
||||
github.com/spf13/viper v1.19.0
|
||||
github.com/stretchr/testify v1.10.0
|
||||
golang.org/x/term v0.24.0
|
||||
modernc.org/sqlite v1.33.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/fsnotify/fsnotify v1.7.0 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jonboulle/clockwork v0.4.0 // indirect
|
||||
github.com/magiconair/properties v1.8.7 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
github.com/sagikazarmark/locafero v0.4.0 // indirect
|
||||
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
|
||||
github.com/sourcegraph/conc v0.3.0 // indirect
|
||||
github.com/spf13/afero v1.11.0 // indirect
|
||||
github.com/spf13/cast v1.6.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/stretchr/objx v0.5.2 // indirect
|
||||
github.com/subosito/gotenv v1.6.0 // indirect
|
||||
go.uber.org/atomic v1.9.0 // indirect
|
||||
go.uber.org/multierr v1.9.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8 // indirect
|
||||
golang.org/x/sys v0.25.0 // indirect
|
||||
golang.org/x/text v0.14.0 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6 // indirect
|
||||
modernc.org/libc v1.55.3 // indirect
|
||||
modernc.org/mathutil v1.6.0 // indirect
|
||||
modernc.org/memory v1.8.0 // indirect
|
||||
modernc.org/strutil v1.2.0 // indirect
|
||||
modernc.org/token v1.1.0 // indirect
|
||||
)
|
||||
-140
@@ -1,140 +0,0 @@
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
|
||||
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
|
||||
github.com/go-co-op/gocron/v2 v2.11.0 h1:IOowNA6SzwdRFnD4/Ol3Kj6G2xKfsoiiGq2Jhhm9bvE=
|
||||
github.com/go-co-op/gocron/v2 v2.11.0/go.mod h1:xY7bJxGazKam1cz04EebrlP4S9q4iWdiAylMGP3jY9w=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd h1:gbpYu9NMq8jhDVbvlGkMFWCjLFlqqEZjEmObmhUy6Vo=
|
||||
github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
|
||||
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jomei/notionapi v1.13.2 h1:YpHKNpkoTMlUfWTlVIodOmQDgRKjfwmtSNVa6/6yC9E=
|
||||
github.com/jomei/notionapi v1.13.2/go.mod h1:BqzP6JBddpBnXvMSIxiR5dCoCjKngmz5QNl1ONDlDoM=
|
||||
github.com/jonboulle/clockwork v0.4.0 h1:p4Cf1aMWXnXAUh8lVfewRBx1zaTSYKrKMF2g3ST4RZ4=
|
||||
github.com/jonboulle/clockwork v0.4.0/go.mod h1:xgRqUGwRcjKCO1vbZUEtSLrqKoPSsUpK7fnezOII0kc=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
|
||||
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
|
||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
|
||||
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
|
||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ=
|
||||
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
|
||||
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
|
||||
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
|
||||
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
||||
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
|
||||
github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
|
||||
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
|
||||
github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
|
||||
github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
|
||||
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
|
||||
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI=
|
||||
github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||
go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
|
||||
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
|
||||
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
|
||||
golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8 h1:yixxcjnhBmY0nkL253HFVIm0JsFHwrHdT3Yh6szTnfY=
|
||||
golang.org/x/exp v0.0.0-20240613232115-7f521ea00fb8/go.mod h1:jj3sYF3dwk5D+ghuXyeI3r5MFf+NT2An6/9dOA95KSI=
|
||||
golang.org/x/mod v0.18.0 h1:5+9lSbEzPSdWkH32vYPBwEpX8KwDbM52Ud9xBUvNlb0=
|
||||
golang.org/x/mod v0.18.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
|
||||
golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM=
|
||||
golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8=
|
||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/tools v0.22.0 h1:gqSGLZqv+AI9lIQzniJ0nZDRG5GBPsSi+DRNHWNz6yA=
|
||||
golang.org/x/tools v0.22.0/go.mod h1:aCwcsjqvq7Yqt6TNyX7QMU2enbQ/Gt0bo6krSeEri+c=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
||||
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ=
|
||||
modernc.org/cc/v4 v4.21.4/go.mod h1:HM7VJTZbUCR3rV8EYBi9wxnJ0ZBRiGE5OeGXNA0IsLQ=
|
||||
modernc.org/ccgo/v4 v4.19.2 h1:lwQZgvboKD0jBwdaeVCTouxhxAyN6iawF3STraAal8Y=
|
||||
modernc.org/ccgo/v4 v4.19.2/go.mod h1:ysS3mxiMV38XGRTTcgo0DQTeTmAO4oCmJl1nX9VFI3s=
|
||||
modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE=
|
||||
modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ=
|
||||
modernc.org/gc/v2 v2.4.1 h1:9cNzOqPyMJBvrUipmynX0ZohMhcxPtMccYgGOJdOiBw=
|
||||
modernc.org/gc/v2 v2.4.1/go.mod h1:wzN5dK1AzVGoH6XOzc3YZ+ey/jPgYHLuVckd62P0GYU=
|
||||
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6 h1:5D53IMaUuA5InSeMu9eJtlQXS2NxAhyWQvkKEgXZhHI=
|
||||
modernc.org/gc/v3 v3.0.0-20240107210532-573471604cb6/go.mod h1:Qz0X07sNOR1jWYCrJMEnbW/X55x206Q7Vt4mz6/wHp4=
|
||||
modernc.org/libc v1.55.3 h1:AzcW1mhlPNrRtjS5sS+eW2ISCgSOLLNyFzRh/V3Qj/U=
|
||||
modernc.org/libc v1.55.3/go.mod h1:qFXepLhz+JjFThQ4kzwzOjA/y/artDeg+pcYnY+Q83w=
|
||||
modernc.org/mathutil v1.6.0 h1:fRe9+AmYlaej+64JsEEhoWuAYBkOtQiMEU7n/XgfYi4=
|
||||
modernc.org/mathutil v1.6.0/go.mod h1:Ui5Q9q1TR2gFm0AQRqQUaBWFLAhQpCwNcuhBOSedWPo=
|
||||
modernc.org/memory v1.8.0 h1:IqGTL6eFMaDZZhEWwcREgeMXYwmW83LYW8cROZYkg+E=
|
||||
modernc.org/memory v1.8.0/go.mod h1:XPZ936zp5OMKGWPqbD3JShgd/ZoQ7899TUuQqxY+peU=
|
||||
modernc.org/opt v0.1.3 h1:3XOZf2yznlhC+ibLltsDGzABUGVx8J6pnFMS3E4dcq4=
|
||||
modernc.org/opt v0.1.3/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0=
|
||||
modernc.org/sortutil v1.2.0 h1:jQiD3PfS2REGJNzNCMMaLSp/wdMNieTbKX920Cqdgqc=
|
||||
modernc.org/sortutil v1.2.0/go.mod h1:TKU2s7kJMf1AE84OoiGppNHJwvB753OYfNl2WRb++Ss=
|
||||
modernc.org/sqlite v1.33.1 h1:trb6Z3YYoeM9eDL1O8do81kP+0ejv+YzgyFo+Gwy0nM=
|
||||
modernc.org/sqlite v1.33.1/go.mod h1:pXV2xHxhzXZsgT/RtTFAPY6JJDEvOTcTdwADQCCWD4k=
|
||||
modernc.org/strutil v1.2.0 h1:agBi9dp1I+eOnxXeiZawM8F4LawKv4NzGWSaLfyeNZA=
|
||||
modernc.org/strutil v1.2.0/go.mod h1:/mdcBmfOibveCTBxUl5B5l6W+TTH1FXPLHZE6bTosX0=
|
||||
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
|
||||
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
|
||||
@@ -1,40 +0,0 @@
|
||||
/*
|
||||
Copyright © 2024 Tianyu Liu
|
||||
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// addgpxCmd represents the addgpx command
|
||||
var addgpxCmd = &cobra.Command{
|
||||
Use: "addgpx",
|
||||
Short: "A brief description of your command",
|
||||
Long: `A longer description that spans multiple lines and likely contains examples
|
||||
and usage of using your command. For example:
|
||||
|
||||
Cobra is a CLI library for Go that empowers applications.
|
||||
This application is a tool to generate the needed files
|
||||
to quickly create a Cobra application.`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
fmt.Println("addgpx called")
|
||||
},
|
||||
}
|
||||
|
||||
func init() {
|
||||
rootCmd.AddCommand(addgpxCmd)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// addgpxCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// addgpxCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user