Migrate poo recorder and align Alembic naming

This commit is contained in:
2026-04-20 11:48:48 +02:00
parent e334df992f
commit 044b47c573
34 changed files with 1138 additions and 31 deletions
+3
View File
@@ -5,6 +5,9 @@ APP_HOST=0.0.0.0
APP_PORT=8000
LOCATION_DATABASE_URL=sqlite:///./data/locationRecorder.db
POO_DATABASE_URL=sqlite:///./data/pooRecorder.db
POO_WEBHOOK_ID=
POO_SENSOR_ENTITY_NAME=sensor.test_poo_status
POO_SENSOR_FRIENDLY_NAME=Poo Status
TICKTICK_CLIENT_ID=
TICKTICK_CLIENT_SECRET=
TICKTICK_REDIRECT_URI=http://localhost:8000/ticktick/auth/callback
+4 -2
View File
@@ -9,8 +9,10 @@ COPY requirements.txt ./
RUN pip install --no-cache-dir -r requirements.txt
COPY app ./app
COPY alembic ./alembic
COPY alembic.ini ./
COPY alembic_location ./alembic_location
COPY alembic_location.ini ./
COPY alembic_poo ./alembic_poo
COPY alembic_poo.ini ./
COPY scripts ./scripts
COPY README.md ./
RUN mkdir -p /app/data
+10 -9
View File
@@ -11,6 +11,7 @@
- SQLite + SQLAlchemy + Alembic 基础设施
- 极简 server-side templates
- location recorder 第一版迁移
- poo recorder 第一版迁移
- Home Assistant outbound integration layer
- Home Assistant inbound gateway 第一版
- pytest 测试基础
@@ -20,7 +21,6 @@
当前阶段明确不包含:
- TickTick 业务逻辑迁移
- poo records 业务迁移
- Notion 模块
当前 Home Assistant inbound gateway 仅接回第一版:
@@ -42,21 +42,22 @@ Notion 在 Go 版本中仍然存在,但已被明确视为 legacy / removed sco
当前系统仍然是两个独立的 SQLite 数据库文件,而不是单一数据库:
- `location` 模块使用自己的 DB 文件
- `poo` 模块未来也将使用自己的 DB 文件
- `poo` 模块使用自己的 DB 文件
当前阶段明确不借这次重构把两个 DB 合并。配置层已经显式反映这一点:
- `LOCATION_DATABASE_URL`
- `POO_DATABASE_URL`
目前真正接入的是 `location` 对应的数据库;`poo` 先保留配置占位,等模块迁入时再接上
目前 `location` `poo` 都已经接到各自独立的数据库文件
## 当前目录
Python 骨架的主要目录如下:
- `app/`: FastAPI 应用代码
- `alembic/`: Alembic migration 环境
- `alembic_location/`: Location DB 的 Alembic migration 环境
- `alembic_poo/`: Poo DB 的 Alembic migration 环境
- `tests/`: pytest 测试
- `docs/`: 架构说明与迁移文档
- `scripts/`: 辅助脚本,例如 OpenAPI 导出
@@ -129,12 +130,12 @@ uvicorn app.main:app --reload --host 0.0.0.0 --port 8000
初始化 migration 环境后,可继续添加模型并生成迁移:
```bash
alembic revision --autogenerate -m "init tables"
alembic upgrade head
```
当前 `location``poo` 都已经有各自独立的 Alembic baseline / 接管链路。
当前 Alembic 只接管 `location` 这条链路;`poo` 相关数据库与 migration 还没有迁入。
- Location Alembic 环境:`alembic_location.ini` + `alembic_location/`
- Poo Alembic 环境:`alembic_poo.ini` + `alembic_poo/`
- Location DB 接管 / 初始化:`python scripts/location_db_adopt.py`
- Poo DB 接管 / 初始化:`python scripts/poo_db_adopt.py`
## 运行测试
+1 -1
View File
@@ -1,5 +1,5 @@
[alembic]
script_location = alembic
script_location = alembic_location
prepend_sys_path = .
path_separator = os
sqlalchemy.url = sqlite:///./data/locationRecorder.db
+37
View File
@@ -0,0 +1,37 @@
[alembic]
script_location = alembic_poo
prepend_sys_path = .
path_separator = os
sqlalchemy.url = sqlite:///./data/pooRecorder.db
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers = console
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
+48
View File
@@ -0,0 +1,48 @@
from logging.config import fileConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
from app.config import get_settings
from app.models.poo import PooRecord # noqa: F401
from app.poo_db import PooBase
config = context.config
if config.config_file_name is not None:
fileConfig(config.config_file_name)
settings = get_settings()
configured_url = config.get_main_option("sqlalchemy.url")
if not configured_url or configured_url == "sqlite:///./data/pooRecorder.db":
config.set_main_option("sqlalchemy.url", settings.poo_database_url)
target_metadata = PooBase.metadata
def run_migrations_offline() -> None:
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
@@ -0,0 +1,32 @@
"""poo baseline
Revision ID: 20260420_01_poo_baseline
Revises:
Create Date: 2026-04-20 00:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = "20260420_01_poo_baseline"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
"poo_records",
sa.Column("timestamp", sa.Text(), nullable=False),
sa.Column("status", sa.Text(), nullable=False),
sa.Column("latitude", sa.Float(), nullable=False),
sa.Column("longitude", sa.Float(), nullable=False),
sa.PrimaryKeyConstraint("timestamp"),
)
def downgrade() -> None:
op.drop_table("poo_records")
+76
View File
@@ -0,0 +1,76 @@
import json
import logging
from fastapi import APIRouter, Depends, Request, status
from fastapi.responses import PlainTextResponse, Response
from pydantic import ValidationError
from sqlalchemy.orm import Session
from app.config import Settings
from app.dependencies import get_app_settings, get_homeassistant_client, get_poo_db
from app.integrations.homeassistant import HomeAssistantClient
from app.schemas.poo import PooRecordRequest
from app.services.poo import publish_latest_poo_status, record_poo
router = APIRouter(tags=["poo"])
logger = logging.getLogger(__name__)
BAD_REQUEST_MESSAGE = "bad request"
INTERNAL_SERVER_ERROR_MESSAGE = "internal server error"
@router.post("/poo/record")
async def create_poo_record(
request: Request,
db: Session = Depends(get_poo_db),
settings: Settings = Depends(get_app_settings),
homeassistant_client: HomeAssistantClient = Depends(get_homeassistant_client),
) -> Response:
try:
raw_payload = await request.body()
data = json.loads(raw_payload)
payload = PooRecordRequest.model_validate(data)
record_poo(
db,
payload,
settings=settings,
homeassistant_client=homeassistant_client,
)
except json.JSONDecodeError as exc:
logger.warning("Rejected poo record request due to invalid JSON: %s", exc)
return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST)
except ValidationError as exc:
logger.warning("Rejected poo record request due to validation failure: %s", exc)
return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST)
except ValueError as exc:
logger.warning("Rejected poo record request due to invalid numeric input: %s", exc)
return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST)
except Exception as exc:
logger.warning("Failed to store poo record: %s", exc)
return PlainTextResponse(
INTERNAL_SERVER_ERROR_MESSAGE,
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
return Response(status_code=status.HTTP_200_OK)
@router.get("/poo/latest")
def notify_latest_poo(
db: Session = Depends(get_poo_db),
settings: Settings = Depends(get_app_settings),
homeassistant_client: HomeAssistantClient = Depends(get_homeassistant_client),
) -> Response:
try:
publish_latest_poo_status(
session=db,
settings=settings,
homeassistant_client=homeassistant_client,
)
except Exception as exc:
logger.warning("Failed to publish latest poo status: %s", exc)
return PlainTextResponse(
INTERNAL_SERVER_ERROR_MESSAGE,
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
return Response(status_code=status.HTTP_200_OK)
+3
View File
@@ -24,6 +24,9 @@ class Settings(BaseSettings):
home_assistant_auth_token: str = ""
home_assistant_timeout_seconds: float = 1.0
home_assistant_action_task_project_id: str = ""
poo_webhook_id: str = ""
poo_sensor_entity_name: str = "sensor.test_poo_status"
poo_sensor_friendly_name: str = "Poo Status"
model_config = SettingsConfigDict(
env_file=".env",
+9
View File
@@ -4,6 +4,8 @@ from sqlalchemy.orm import Session
from app.config import Settings, get_settings
from app.db import get_db_session
from app.integrations.homeassistant import HomeAssistantClient
from app.poo_db import get_poo_db_session
def get_app_settings() -> Settings:
@@ -13,3 +15,10 @@ def get_app_settings() -> Settings:
def get_db() -> Generator[Session, None, None]:
yield from get_db_session()
def get_poo_db() -> Generator[Session, None, None]:
yield from get_poo_db_session()
def get_homeassistant_client() -> HomeAssistantClient:
return HomeAssistantClient(get_settings())
+15
View File
@@ -8,8 +8,10 @@ from app import models # noqa: F401
from app.api.routes import pages, status
from app.api.routes.homeassistant import router as homeassistant_router
from app.api.routes.location import router as location_router
from app.api.routes.poo import router as poo_router
from app.config import get_settings
from scripts.location_db_adopt import LocationDatabaseAdoptionError, validate_location_runtime_db
from scripts.poo_db_adopt import PooDatabaseAdoptionError, validate_poo_runtime_db
def ensure_location_db_ready() -> None:
@@ -23,6 +25,17 @@ def ensure_location_db_ready() -> None:
raise RuntimeError(str(exc)) from exc
def ensure_poo_db_ready() -> None:
settings = get_settings()
if settings.poo_sqlite_path is None:
return
try:
validate_poo_runtime_db(settings.poo_database_url)
except PooDatabaseAdoptionError as exc:
raise RuntimeError(str(exc)) from exc
def ensure_runtime_dirs() -> None:
settings = get_settings()
for path in (settings.location_sqlite_path, settings.poo_sqlite_path):
@@ -34,6 +47,7 @@ def ensure_runtime_dirs() -> None:
async def lifespan(_: FastAPI):
ensure_runtime_dirs()
ensure_location_db_ready()
ensure_poo_db_ready()
yield
@@ -57,6 +71,7 @@ def create_app() -> FastAPI:
app.include_router(pages.router)
app.include_router(homeassistant_router)
app.include_router(location_router)
app.include_router(poo_router)
return app
+13
View File
@@ -0,0 +1,13 @@
from sqlalchemy import Float, String
from sqlalchemy.orm import Mapped, mapped_column
from app.poo_db import PooBase
class PooRecord(PooBase):
__tablename__ = "poo_records"
timestamp: Mapped[str] = mapped_column(String, primary_key=True)
status: Mapped[str] = mapped_column(String, nullable=False)
latitude: Mapped[float] = mapped_column(Float, nullable=False)
longitude: Mapped[float] = mapped_column(Float, nullable=False)
+28
View File
@@ -0,0 +1,28 @@
from collections.abc import Generator
from sqlalchemy import create_engine
from sqlalchemy.orm import DeclarativeBase, Session, sessionmaker
from app.config import get_settings
class PooBase(DeclarativeBase):
pass
settings = get_settings()
connect_args: dict[str, object] = {}
if settings.poo_database_url.startswith("sqlite"):
connect_args["check_same_thread"] = False
poo_engine = create_engine(settings.poo_database_url, connect_args=connect_args)
PooSessionLocal = sessionmaker(bind=poo_engine, autoflush=False, autocommit=False, class_=Session)
def get_poo_db_session() -> Generator[Session, None, None]:
session = PooSessionLocal()
try:
yield session
finally:
session.close()
+9
View File
@@ -0,0 +1,9 @@
from pydantic import BaseModel, ConfigDict
class PooRecordRequest(BaseModel):
status: str
latitude: str
longitude: str
model_config = ConfigDict(extra="forbid")
+112
View File
@@ -0,0 +1,112 @@
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime, timezone
import logging
from sqlalchemy import desc, insert, select
from sqlalchemy.orm import Session
from app.config import Settings
from app.integrations.homeassistant import (
HomeAssistantClient,
HomeAssistantConfigError,
HomeAssistantRequestError,
)
from app.models.poo import PooRecord
from app.schemas.poo import PooRecordRequest
logger = logging.getLogger(__name__)
@dataclass(slots=True)
class LatestPooRecord:
timestamp: str
status: str
latitude: float
longitude: float
def _parse_required_float(value: str, field_name: str) -> float:
try:
return float(value)
except (TypeError, ValueError) as exc:
raise ValueError(f"Invalid numeric value for {field_name}") from exc
def _utc_now_minute_precision() -> str:
now = datetime.now(timezone.utc).replace(second=0, microsecond=0)
return now.strftime("%Y-%m-%dT%H:%MZ")
def record_poo(
session: Session,
payload: PooRecordRequest,
*,
settings: Settings,
homeassistant_client: HomeAssistantClient,
) -> None:
stmt = insert(PooRecord).prefix_with("OR IGNORE").values(
timestamp=_utc_now_minute_precision(),
status=payload.status,
latitude=_parse_required_float(payload.latitude, "latitude"),
longitude=_parse_required_float(payload.longitude, "longitude"),
)
session.execute(stmt)
session.commit()
try:
publish_latest_poo_status(
session=session,
settings=settings,
homeassistant_client=homeassistant_client,
)
except (HomeAssistantConfigError, HomeAssistantRequestError) as exc:
logger.warning("Failed to publish latest poo status to Home Assistant: %s", exc)
if settings.poo_webhook_id:
try:
homeassistant_client.trigger_webhook(
webhook_id=settings.poo_webhook_id,
body={"status": payload.status},
)
except (HomeAssistantConfigError, HomeAssistantRequestError) as exc:
logger.warning("Failed to trigger poo webhook on Home Assistant: %s", exc)
def get_latest_poo_record(session: Session) -> LatestPooRecord | None:
stmt = select(PooRecord).order_by(desc(PooRecord.timestamp)).limit(1)
record = session.execute(stmt).scalar_one_or_none()
if record is None:
logger.info("No poo record is available yet")
return None
return LatestPooRecord(
timestamp=record.timestamp,
status=record.status,
latitude=record.latitude,
longitude=record.longitude,
)
def publish_latest_poo_status(
*,
session: Session,
settings: Settings,
homeassistant_client: HomeAssistantClient,
) -> LatestPooRecord | None:
latest = get_latest_poo_record(session)
if latest is None:
logger.info("Skipping Home Assistant poo sensor publish because no poo record exists yet")
return None
record_time = datetime.fromisoformat(latest.timestamp.replace("Z", "+00:00")).astimezone()
homeassistant_client.publish_sensor(
entity_id=settings.poo_sensor_entity_name,
state=latest.status,
attributes={
"last_poo": record_time.strftime("%a | %Y-%m-%d | %H:%M"),
"friendly_name": settings.poo_sensor_friendly_name,
},
)
return latest
+8 -2
View File
@@ -30,8 +30,10 @@
- `api/`
- HTTP routes
- 当前已迁入 `POST /homeassistant/publish` 第一版入口
- 当前已迁入 `POST /poo/record``GET /poo/latest`
- `models/`
- SQLAlchemy models
- 当前 `location``poo` 使用各自独立的数据库 base
- `schemas/`
- Pydantic schemas
- `services/`
@@ -44,9 +46,13 @@
- `static/`
- 极简静态资源
### `alembic/`
### `alembic_location/`
数据库 migration 基础设施。当前尚未迁入业务表,但迁移链路已就绪。
Location DB 的 migration 基础设施。
### `alembic_poo/`
Poo DB 的 migration 基础设施。
### `tests/`
+48 -6
View File
@@ -12,6 +12,7 @@
- 建立 Docker / Compose 基础骨架
- 建立 OpenAPI 导出脚本
- 迁入 `location recorder` 第一版
- 迁入 `poo recorder` 第一版
## 数据库配置现状
@@ -25,13 +26,12 @@
其中:
- `location` 模块已经实际接到 `LOCATION_DATABASE_URL`
- `poo` 目前只保留 `POO_DATABASE_URL` 配置占位,等待模块迁入
- `poo` 模块已经实际接到 `POO_DATABASE_URL`
## 当前阶段未做内容
- 未迁移 TickTick 业务逻辑
- 未迁移 Home Assistant 业务逻辑
- 未迁移 poo records
- 未迁移 Home Assistant inbound / outbound 之外的其他业务逻辑
- 未实现真实 OAuth 流程
- 未做数据迁移
@@ -74,15 +74,57 @@ CREATE TABLE location (
- DB 尚未被 Alembic 接管时拒绝启动
- DB revision 与当前应用预期不一致时拒绝启动
## Poo recorder 说明
当前 Python 项目已经接入:
- `POST /poo/record`
- `GET /poo/latest`
并对齐当前真实 baseline schema
```sql
CREATE TABLE poo_records (
timestamp TEXT NOT NULL,
status TEXT NOT NULL,
latitude REAL NOT NULL,
longitude REAL NOT NULL,
PRIMARY KEY (timestamp)
);
```
历史上 legacy Go 实现使用:
```sql
PRAGMA user_version = 1;
```
当前已经补上与 location 一致风格的 Alembic baseline / 接管策略:
- `poo_records` 当前 schema 被视为 Alembic baseline
- 新数据库通过 `alembic_poo upgrade head` 初始化
- 已有 legacy SQLite 数据库通过 `alembic stamp` 接管
- `PRAGMA user_version = 1` 仅保留为历史事实,不再作为新的主 migration 机制
同时这一轮明确移除了 Notion
- 不迁 Notion sync
- 不迁 Notion adapter
- `POST /poo/record` 不再依赖 `tableId` 才能写入
详见:
- [poo-recorder.md](poo-recorder.md)
## 后续建议顺序
建议继续沿用既有迁移文档中的顺序:
1. 先迁 `location recorder`
2. 再迁 Home Assistant 出站适配层
3. 再迁 TickTick adapter
4. 再迁 Home Assistant 命令网关
5. 最后迁 `poo recorder`
3. 再迁 Home Assistant 命令网关
4. 再迁 `poo recorder`
5. 最后迁 TickTick adapter
## 开发约束提醒
+140
View File
@@ -0,0 +1,140 @@
# Poo Recorder
本文档说明 `poo recorder` 在 Python 项目中的当前行为边界,以及 poo SQLite 的 Alembic 接管策略。
## 当前基线
当前生产版本中的真实 SQLite schema 为:
```sql
CREATE TABLE poo_records (
timestamp TEXT NOT NULL,
status TEXT NOT NULL,
latitude REAL NOT NULL,
longitude REAL NOT NULL,
PRIMARY KEY (timestamp)
);
```
历史上 legacy Go 实现使用:
```sql
PRAGMA user_version = 1;
```
当前 Python 迁移以这套 schema 为事实基线,不重新设计表结构。
## 当前已迁入的 API
当前 Python 项目已经接入:
- `POST /poo/record`
- `GET /poo/latest`
### `POST /poo/record`
用途:
- 记录一条 poo event
- 最佳努力地刷新 Home Assistant sensor
- 如果配置了 `POO_WEBHOOK_ID`,最佳努力地触发 Home Assistant webhook
请求体:
```json
{
"status": "done",
"latitude": "1.23",
"longitude": "4.56"
}
```
当前策略:
- unknown field`400 bad request`
- 数值非法:`400 bad request`
- 记录成功后,即使 Home Assistant side effect 失败,也不会回滚本地 DB 写入
### `GET /poo/latest`
用途:
- 读取最新一条 poo 记录
- 将其重新发布到 Home Assistant sensor
当前外部行为与 legacy 保持一致:
- 成功:空响应体,HTTP 200
- 如果当前 DB 里还没有任何 poo 记录:仍返回空响应体,HTTP 200,但不会发布 sensor
- 真正的发布失败:简洁 `internal server error`
## Home Assistant side effects
当前已复用 Python 项目中已有的 Home Assistant outbound adapter。
当前支持:
- 发布 / 更新 poo status sensor
- 可选触发 webhook
相关配置:
- `HOME_ASSISTANT_BASE_URL`
- `HOME_ASSISTANT_AUTH_TOKEN`
- `HOME_ASSISTANT_TIMEOUT_SECONDS`
- `POO_SENSOR_ENTITY_NAME`
- `POO_SENSOR_FRIENDLY_NAME`
- `POO_WEBHOOK_ID`
## Alembic 接管策略
poo 的接管逻辑刻意保持与 location 一致。
当前 baseline revision
- `20260420_01_poo_baseline`
当前提供的脚本入口:
```bash
python scripts/poo_db_adopt.py
```
或:
```bash
python -m scripts.poo_db_adopt
```
规则如下:
1. 如果本地不存在 poo DB 文件:
- 视为新库初始化
- 通过 `alembic_poo upgrade head` 创建新库
2. 如果本地已经存在 legacy DB:
- 先检查 `poo_records` 表 schema
- 再检查 `PRAGMA user_version = 1`
- 只有完全匹配,才通过 Alembic `stamp` 接管
3. 如果 schema 或 `user_version` 不匹配:
- 直接失败
- 不自动修复
4. 如果数据库已经存在 `alembic_version`
- 只有 revision 与当前 baseline 一致才接受
- 否则直接失败
同时,应用启动时也会对 `POO_DATABASE_URL` 做只读校验:
- 文件不存在:拒绝启动
- DB 尚未被 Alembic 接管:拒绝启动
- revision 不匹配:拒绝启动
## 明确移除 Notion
这一轮不会迁入任何 Notion 逻辑。
也就是说,当前 Python 版的 poo recorder
- 不保留 Notion adapter
- 不保留 Notion sync
- 不保留 `tableId` 依赖
- 不因为 legacy 中存在 Notion 就继续保留兼容层
+1 -1
View File
@@ -13,6 +13,6 @@
原则上:
- 新的 Python 实现继续在仓库根目录的 `app/``tests/``alembic/` 等目录演进
- 新的 Python 实现继续在仓库根目录的 `app/``tests/``alembic_location/``alembic_poo/` 等目录演进
- 旧 Go 代码只作为迁移参考,不再作为新实现的结构基础
- 当 Python 重构完成并验证稳定后,可以考虑整块删除 `legacy/go-backend/`
+38
View File
@@ -85,6 +85,44 @@
}
}
}
},
"/poo/record": {
"post": {
"tags": [
"poo"
],
"summary": "Create Poo Record",
"operationId": "create_poo_record_poo_record_post",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
}
}
}
},
"/poo/latest": {
"get": {
"tags": [
"poo"
],
"summary": "Notify Latest Poo",
"operationId": "notify_latest_poo_poo_latest_get",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {}
}
}
}
}
}
}
},
"components": {
+24
View File
@@ -55,6 +55,30 @@ paths:
content:
application/json:
schema: {}
/poo/record:
post:
tags:
- poo
summary: Create Poo Record
operationId: create_poo_record_poo_record_post
responses:
'200':
description: Successful Response
content:
application/json:
schema: {}
/poo/latest:
get:
tags:
- poo
summary: Notify Latest Poo
operationId: notify_latest_poo_poo_latest_get
responses:
'200':
description: Successful Response
content:
application/json:
schema: {}
components:
schemas:
StatusResponse:
+1 -1
View File
@@ -38,7 +38,7 @@ def _database_path_from_url(database_url: str) -> Path:
def _make_alembic_config(database_url: str) -> Config:
config = Config("alembic.ini")
config = Config("alembic_location.ini")
config.set_main_option("sqlalchemy.url", database_url)
return config
+172
View File
@@ -0,0 +1,172 @@
from __future__ import annotations
import sqlite3
import sys
from pathlib import Path
from alembic import command
from alembic.config import Config
PROJECT_ROOT = Path(__file__).resolve().parents[1]
if str(PROJECT_ROOT) not in sys.path:
sys.path.insert(0, str(PROJECT_ROOT))
from app.config import get_settings
POO_BASELINE_REVISION = "20260420_01_poo_baseline"
EXPECTED_USER_VERSION = 1
EXPECTED_POO_TABLE_INFO = [
(0, "timestamp", "TEXT", 1, None, 1),
(1, "status", "TEXT", 1, None, 0),
(2, "latitude", "REAL", 1, None, 0),
(3, "longitude", "REAL", 1, None, 0),
]
class PooDatabaseAdoptionError(RuntimeError):
"""Raised when a legacy poo database does not match the expected baseline."""
def _database_path_from_url(database_url: str) -> Path:
prefix = "sqlite:///"
if not database_url.startswith(prefix):
raise PooDatabaseAdoptionError(
f"Only sqlite URLs are supported for poo DB adoption, got: {database_url}"
)
return Path(database_url[len(prefix) :])
def _make_alembic_config(database_url: str) -> Config:
config = Config("alembic_poo.ini")
config.set_main_option("sqlalchemy.url", database_url)
return config
def _poo_table_exists(database_path: Path) -> bool:
conn = sqlite3.connect(database_path)
try:
row = conn.execute(
"SELECT 1 FROM sqlite_master WHERE type = 'table' AND name = 'poo_records'"
).fetchone()
return row is not None
finally:
conn.close()
def _alembic_version_table_exists(database_path: Path) -> bool:
conn = sqlite3.connect(database_path)
try:
row = conn.execute(
"SELECT 1 FROM sqlite_master WHERE type = 'table' AND name = 'alembic_version'"
).fetchone()
return row is not None
finally:
conn.close()
def _fetch_alembic_revision(database_path: Path) -> str:
conn = sqlite3.connect(database_path)
try:
row = conn.execute("SELECT version_num FROM alembic_version").fetchone()
if row is None:
raise PooDatabaseAdoptionError("Alembic version table exists but contains no revision")
return row[0]
finally:
conn.close()
def _fetch_poo_table_info(database_path: Path) -> list[tuple]:
conn = sqlite3.connect(database_path)
try:
return list(conn.execute("PRAGMA table_info(poo_records)"))
finally:
conn.close()
def _fetch_user_version(database_path: Path) -> int:
conn = sqlite3.connect(database_path)
try:
return conn.execute("PRAGMA user_version").fetchone()[0]
finally:
conn.close()
def validate_legacy_poo_db(database_url: str) -> None:
database_path = _database_path_from_url(database_url)
if not database_path.exists():
raise PooDatabaseAdoptionError(f"Poo DB file does not exist: {database_path}")
if not _poo_table_exists(database_path):
raise PooDatabaseAdoptionError("Expected table 'poo_records' was not found in the DB")
table_info = _fetch_poo_table_info(database_path)
if table_info != EXPECTED_POO_TABLE_INFO:
raise PooDatabaseAdoptionError("Poo table schema does not match the expected baseline")
user_version = _fetch_user_version(database_path)
if user_version != EXPECTED_USER_VERSION:
raise PooDatabaseAdoptionError(
f"Expected PRAGMA user_version = {EXPECTED_USER_VERSION}, got {user_version}"
)
def validate_poo_runtime_db(database_url: str) -> None:
database_path = _database_path_from_url(database_url)
if not database_path.exists():
raise PooDatabaseAdoptionError(
"Poo DB file was not found. Run 'python scripts/poo_db_adopt.py' first to "
"initialize or adopt the poo DB before starting the app."
)
if not _alembic_version_table_exists(database_path):
raise PooDatabaseAdoptionError(
"Poo DB exists but is not yet Alembic-managed. Run "
"'python scripts/poo_db_adopt.py' first to adopt the legacy DB "
"before starting the app."
)
current_revision = _fetch_alembic_revision(database_path)
if current_revision != POO_BASELINE_REVISION:
raise PooDatabaseAdoptionError(
"Poo DB revision mismatch. Refusing to start the app: "
f"expected {POO_BASELINE_REVISION}, got {current_revision}"
)
def adopt_or_initialize_poo_db(database_url: str) -> str:
database_path = _database_path_from_url(database_url)
alembic_config = _make_alembic_config(database_url)
if database_path.exists():
if _alembic_version_table_exists(database_path):
current_revision = _fetch_alembic_revision(database_path)
if current_revision != POO_BASELINE_REVISION:
raise PooDatabaseAdoptionError(
"Poo DB is already Alembic-managed but revision does not match "
f"the expected baseline: expected {POO_BASELINE_REVISION}, "
f"got {current_revision}"
)
return "already_managed"
validate_legacy_poo_db(database_url)
command.stamp(alembic_config, POO_BASELINE_REVISION)
return "adopted"
database_path.parent.mkdir(parents=True, exist_ok=True)
command.upgrade(alembic_config, "head")
return "initialized"
def main() -> None:
settings = get_settings()
result = adopt_or_initialize_poo_db(settings.poo_database_url)
if result == "initialized":
print("Initialized a new poo DB via Alembic upgrade head.")
elif result == "already_managed":
print("Poo DB is already Alembic-managed at the expected baseline revision.")
else:
print("Validated legacy poo DB and stamped Alembic baseline successfully.")
if __name__ == "__main__":
main()
+34 -3
View File
@@ -13,7 +13,13 @@ from app.main import create_app
def _make_alembic_config(database_url: str) -> Config:
config = Config("alembic.ini")
config = Config("alembic_location.ini")
config.set_main_option("sqlalchemy.url", database_url)
return config
def _make_poo_alembic_config(database_url: str) -> Config:
config = Config("alembic_poo.ini")
config.set_main_option("sqlalchemy.url", database_url)
return config
@@ -47,7 +53,13 @@ def ready_location_database(test_database_urls):
@pytest.fixture
def app(ready_location_database):
def ready_poo_database(test_database_urls):
command.upgrade(_make_poo_alembic_config(test_database_urls["poo_url"]), "head")
return test_database_urls
@pytest.fixture
def app(ready_location_database, ready_poo_database):
yield create_app()
@@ -58,7 +70,7 @@ def client(app):
@pytest.fixture
def location_client(ready_location_database, monkeypatch: pytest.MonkeyPatch):
def location_client(ready_location_database, ready_poo_database, monkeypatch: pytest.MonkeyPatch):
database_url = ready_location_database["location_url"]
engine = create_engine(database_url, connect_args={"check_same_thread": False})
@@ -72,3 +84,22 @@ def location_client(ready_location_database, monkeypatch: pytest.MonkeyPatch):
yield client, engine
engine.dispose()
@pytest.fixture
def poo_client(ready_location_database, ready_poo_database, monkeypatch: pytest.MonkeyPatch):
database_url = ready_poo_database["poo_url"]
engine = create_engine(database_url, connect_args={"check_same_thread": False})
session_local = sessionmaker(bind=engine, autoflush=False, autocommit=False)
import app.poo_db as poo_db
monkeypatch.setattr(poo_db, "poo_engine", engine)
monkeypatch.setattr(poo_db, "PooSessionLocal", session_local)
fastapi_app = create_app()
with TestClient(fastapi_app) as client:
yield client, engine
engine.dispose()
+13 -4
View File
@@ -7,7 +7,7 @@ from fastapi.testclient import TestClient
from app.config import get_settings
from app.main import create_app
from tests.conftest import _make_alembic_config
from tests.conftest import _make_alembic_config, _make_poo_alembic_config
async def _run_lifespan(app) -> None:
@@ -29,8 +29,11 @@ def test_status_endpoint(client: TestClient) -> None:
def test_app_start_fails_when_location_db_missing(
tmp_path, monkeypatch: pytest.MonkeyPatch
) -> None:
poo_database_path = tmp_path / "poo_ready.db"
command.upgrade(_make_poo_alembic_config(f"sqlite:///{poo_database_path}"), "head")
monkeypatch.setenv("LOCATION_DATABASE_URL", f"sqlite:///{tmp_path / 'missing.db'}")
monkeypatch.setenv("POO_DATABASE_URL", f"sqlite:///{tmp_path / 'poo_placeholder.db'}")
monkeypatch.setenv("POO_DATABASE_URL", f"sqlite:///{poo_database_path}")
get_settings.cache_clear()
app = create_app()
@@ -43,6 +46,9 @@ def test_app_start_fails_when_location_db_missing(
def test_app_start_fails_when_location_db_exists_but_is_not_adopted(
tmp_path, monkeypatch: pytest.MonkeyPatch
) -> None:
poo_database_path = tmp_path / "poo_ready.db"
command.upgrade(_make_poo_alembic_config(f"sqlite:///{poo_database_path}"), "head")
database_path = tmp_path / "legacy_only.db"
conn = sqlite3.connect(database_path)
conn.execute(
@@ -62,7 +68,7 @@ def test_app_start_fails_when_location_db_exists_but_is_not_adopted(
conn.close()
monkeypatch.setenv("LOCATION_DATABASE_URL", f"sqlite:///{database_path}")
monkeypatch.setenv("POO_DATABASE_URL", f"sqlite:///{tmp_path / 'poo_placeholder.db'}")
monkeypatch.setenv("POO_DATABASE_URL", f"sqlite:///{poo_database_path}")
get_settings.cache_clear()
app = create_app()
@@ -75,6 +81,9 @@ def test_app_start_fails_when_location_db_exists_but_is_not_adopted(
def test_app_start_fails_when_location_db_revision_mismatches(
tmp_path, monkeypatch: pytest.MonkeyPatch
) -> None:
poo_database_path = tmp_path / "poo_ready.db"
command.upgrade(_make_poo_alembic_config(f"sqlite:///{poo_database_path}"), "head")
database_path = tmp_path / "wrong_revision.db"
command.upgrade(_make_alembic_config(f"sqlite:///{database_path}"), "head")
@@ -84,7 +93,7 @@ def test_app_start_fails_when_location_db_revision_mismatches(
conn.close()
monkeypatch.setenv("LOCATION_DATABASE_URL", f"sqlite:///{database_path}")
monkeypatch.setenv("POO_DATABASE_URL", f"sqlite:///{tmp_path / 'poo_placeholder.db'}")
monkeypatch.setenv("POO_DATABASE_URL", f"sqlite:///{poo_database_path}")
get_settings.cache_clear()
app = create_app()
+6
View File
@@ -4,6 +4,9 @@ from app.config import Settings
def test_settings_support_two_independent_database_urls(monkeypatch) -> None:
monkeypatch.setenv("LOCATION_DATABASE_URL", "sqlite:///./data/locationRecorder.db")
monkeypatch.setenv("POO_DATABASE_URL", "sqlite:///./data/pooRecorder.db")
monkeypatch.setenv("POO_WEBHOOK_ID", "poo-hook")
monkeypatch.setenv("POO_SENSOR_ENTITY_NAME", "sensor.test_poo_status")
monkeypatch.setenv("POO_SENSOR_FRIENDLY_NAME", "Poo Status")
monkeypatch.setenv("HOME_ASSISTANT_BASE_URL", "http://ha.local:8123")
monkeypatch.setenv("HOME_ASSISTANT_AUTH_TOKEN", "token")
monkeypatch.setenv("HOME_ASSISTANT_TIMEOUT_SECONDS", "2.5")
@@ -12,6 +15,9 @@ def test_settings_support_two_independent_database_urls(monkeypatch) -> None:
assert settings.location_database_url == "sqlite:///./data/locationRecorder.db"
assert settings.poo_database_url == "sqlite:///./data/pooRecorder.db"
assert settings.poo_webhook_id == "poo-hook"
assert settings.poo_sensor_entity_name == "sensor.test_poo_status"
assert settings.poo_sensor_friendly_name == "Poo Status"
assert settings.home_assistant_base_url == "http://ha.local:8123"
assert settings.home_assistant_auth_token == "token"
assert settings.home_assistant_timeout_seconds == 2.5
+1 -1
View File
@@ -97,7 +97,7 @@ def test_homeassistant_client_raises_on_http_error(monkeypatch: pytest.MonkeyPat
def test_homeassistant_client_raises_when_not_configured() -> None:
client = HomeAssistantClient(settings=Settings())
client = HomeAssistantClient(settings=Settings(_env_file=None))
with pytest.raises(HomeAssistantConfigError, match="not configured"):
client.publish_sensor(entity_id="sensor.test_status", state="ok")
+4 -1
View File
@@ -16,10 +16,11 @@ from scripts.location_db_adopt import (
LocationDatabaseAdoptionError,
adopt_or_initialize_location_db,
)
from tests.conftest import _make_poo_alembic_config
def _make_alembic_config(database_url: str) -> Config:
config = Config("alembic.ini")
config = Config("alembic_location.ini")
config.set_main_option("sqlalchemy.url", database_url)
return config
@@ -201,6 +202,7 @@ def test_legacy_style_location_db_can_be_stamped_and_adopted(
) -> None:
database_path = test_database_urls["location_path"]
database_url = test_database_urls["location_url"]
poo_database_url = test_database_urls["poo_url"]
conn = sqlite3.connect(database_path)
conn.execute(
@@ -220,6 +222,7 @@ def test_legacy_style_location_db_can_be_stamped_and_adopted(
conn.close()
command.stamp(_make_alembic_config(database_url), LOCATION_BASELINE_REVISION)
command.upgrade(_make_poo_alembic_config(poo_database_url), "head")
engine = create_engine(database_url, connect_args={"check_same_thread": False})
session_local = sessionmaker(bind=engine, autoflush=False, autocommit=False)
+248
View File
@@ -0,0 +1,248 @@
from pathlib import Path
import sqlite3
import pytest
from sqlalchemy import text
from app.config import Settings, get_settings
from app.dependencies import get_app_settings, get_homeassistant_client
from scripts.poo_db_adopt import (
EXPECTED_USER_VERSION,
POO_BASELINE_REVISION,
PooDatabaseAdoptionError,
adopt_or_initialize_poo_db,
)
class _FakeHomeAssistantClient:
def __init__(self) -> None:
self.sensor_calls: list[dict] = []
self.webhook_calls: list[dict] = []
def publish_sensor(self, *, entity_id: str, state: str, attributes: dict | None = None) -> None:
self.sensor_calls.append(
{"entity_id": entity_id, "state": state, "attributes": attributes or {}}
)
def trigger_webhook(self, *, webhook_id: str, body) -> None:
self.webhook_calls.append({"webhook_id": webhook_id, "body": body})
@pytest.fixture
def poo_client_with_overrides(poo_client):
client, engine = poo_client
fake_ha = _FakeHomeAssistantClient()
settings = Settings(
poo_webhook_id="poo-hook",
poo_sensor_entity_name="sensor.test_poo_status",
poo_sensor_friendly_name="Poo Status",
)
client.app.dependency_overrides[get_homeassistant_client] = lambda: fake_ha
client.app.dependency_overrides[get_app_settings] = lambda: settings
try:
yield client, engine, fake_ha
finally:
client.app.dependency_overrides.clear()
get_settings.cache_clear()
def test_poo_record_endpoint_writes_row_and_notifies_homeassistant(
poo_client_with_overrides,
) -> None:
client, engine, fake_ha = poo_client_with_overrides
response = client.post(
"/poo/record",
json={
"status": "done",
"latitude": "1.23",
"longitude": "4.56",
},
)
assert response.status_code == 200
assert response.text == ""
with engine.connect() as conn:
row = conn.execute(
text(
"SELECT status, latitude, longitude FROM poo_records "
"ORDER BY timestamp DESC LIMIT 1"
)
).one()
assert row.status == "done"
assert row.latitude == pytest.approx(1.23)
assert row.longitude == pytest.approx(4.56)
assert len(fake_ha.sensor_calls) == 1
assert fake_ha.sensor_calls[0]["entity_id"] == "sensor.test_poo_status"
assert fake_ha.sensor_calls[0]["state"] == "done"
assert fake_ha.sensor_calls[0]["attributes"]["friendly_name"] == "Poo Status"
assert len(fake_ha.webhook_calls) == 1
assert fake_ha.webhook_calls[0] == {
"webhook_id": "poo-hook",
"body": {"status": "done"},
}
def test_poo_latest_endpoint_publishes_latest_status(poo_client_with_overrides) -> None:
client, engine, fake_ha = poo_client_with_overrides
with engine.begin() as conn:
conn.execute(
text(
"INSERT INTO poo_records (timestamp, status, latitude, longitude) "
"VALUES (:timestamp, :status, :latitude, :longitude)"
),
{
"timestamp": "2026-04-20T10:05Z",
"status": "urgent",
"latitude": 3.21,
"longitude": 6.54,
},
)
response = client.get("/poo/latest")
assert response.status_code == 200
assert response.text == ""
assert len(fake_ha.sensor_calls) == 1
assert fake_ha.sensor_calls[0]["state"] == "urgent"
assert fake_ha.sensor_calls[0]["attributes"]["last_poo"]
def test_poo_record_endpoint_rejects_unknown_fields(poo_client_with_overrides) -> None:
client, _, _ = poo_client_with_overrides
response = client.post(
"/poo/record",
json={
"status": "done",
"latitude": "1.23",
"longitude": "4.56",
"extra": "nope",
},
)
assert response.status_code == 400
assert response.text == "bad request"
def test_poo_record_endpoint_rejects_invalid_latitude(poo_client_with_overrides) -> None:
client, _, _ = poo_client_with_overrides
response = client.post(
"/poo/record",
json={
"status": "done",
"latitude": "oops",
"longitude": "4.56",
},
)
assert response.status_code == 400
assert response.text == "bad request"
def test_poo_latest_endpoint_returns_ok_when_no_record_exists(poo_client_with_overrides) -> None:
client, _, _ = poo_client_with_overrides
response = client.get("/poo/latest")
assert response.status_code == 200
assert response.text == ""
def test_poo_db_adoption_initializes_new_db(tmp_path: Path) -> None:
database_path = tmp_path / "new_poo.db"
result = adopt_or_initialize_poo_db(f"sqlite:///{database_path}")
assert result == "initialized"
assert database_path.exists()
conn = sqlite3.connect(database_path)
try:
revision = conn.execute("SELECT version_num FROM alembic_version").fetchone()[0]
poo_table = conn.execute(
"SELECT name FROM sqlite_master WHERE type = 'table' AND name = 'poo_records'"
).fetchone()
finally:
conn.close()
assert revision == POO_BASELINE_REVISION
assert poo_table is not None
def test_poo_db_adoption_validates_and_stamps_legacy_db(tmp_path: Path) -> None:
database_path = tmp_path / "legacy_poo.db"
conn = sqlite3.connect(database_path)
conn.execute(
"""
CREATE TABLE poo_records (
timestamp TEXT NOT NULL,
status TEXT NOT NULL,
latitude REAL NOT NULL,
longitude REAL NOT NULL,
PRIMARY KEY (timestamp)
)
"""
)
conn.execute(f"PRAGMA user_version = {EXPECTED_USER_VERSION}")
conn.commit()
conn.close()
result = adopt_or_initialize_poo_db(f"sqlite:///{database_path}")
assert result == "adopted"
conn = sqlite3.connect(database_path)
try:
revision = conn.execute("SELECT version_num FROM alembic_version").fetchone()[0]
finally:
conn.close()
assert revision == POO_BASELINE_REVISION
def test_poo_db_adoption_fails_closed_on_schema_mismatch(tmp_path: Path) -> None:
database_path = tmp_path / "bad_poo_schema.db"
conn = sqlite3.connect(database_path)
conn.execute(
"""
CREATE TABLE poo_records (
timestamp TEXT NOT NULL,
status TEXT NOT NULL,
latitude REAL NOT NULL,
PRIMARY KEY (timestamp)
)
"""
)
conn.execute(f"PRAGMA user_version = {EXPECTED_USER_VERSION}")
conn.commit()
conn.close()
with pytest.raises(PooDatabaseAdoptionError, match="schema does not match"):
adopt_or_initialize_poo_db(f"sqlite:///{database_path}")
def test_poo_db_adoption_fails_closed_on_user_version_mismatch(tmp_path: Path) -> None:
database_path = tmp_path / "bad_poo_user_version.db"
conn = sqlite3.connect(database_path)
conn.execute(
"""
CREATE TABLE poo_records (
timestamp TEXT NOT NULL,
status TEXT NOT NULL,
latitude REAL NOT NULL,
longitude REAL NOT NULL,
PRIMARY KEY (timestamp)
)
"""
)
conn.execute("PRAGMA user_version = 999")
conn.commit()
conn.close()
with pytest.raises(PooDatabaseAdoptionError, match="Expected PRAGMA user_version"):
adopt_or_initialize_poo_db(f"sqlite:///{database_path}")