5 Commits

Author SHA1 Message Date
tliu93 c9af7530e5 Merge pull request 'change adoption to separate step' (#4) from feature/add_separate_migration_container into main
pytest / test (push) Failing after 44s
docker-image / build-and-push (push) Successful in 3m40s
Reviewed-on: #4
2026-04-22 13:28:30 +02:00
tliu93 a76d6bfb71 change adoption to separate step
pytest / test (push) Failing after 46s
pytest / test (pull_request) Failing after 45s
2026-04-22 13:28:00 +02:00
tliu93 35aee79d93 Restore legacy poo inbound dispatch
pytest / test (push) Successful in 43s
docker-image / build-and-push (push) Successful in 3m38s
2026-04-20 23:33:57 +02:00
tliu93 b9e7f51d51 Split compose dev build from registry deploy
pytest / test (push) Successful in 44s
2026-04-20 23:16:13 +02:00
tliu93 94747c75dd Align image publishing with repository path
pytest / test (push) Successful in 43s
docker-image / build-and-push (push) Successful in 3m37s
2026-04-20 23:05:27 +02:00
16 changed files with 627 additions and 35 deletions
+7 -3
View File
@@ -5,6 +5,10 @@ on:
tags: tags:
- "v*" - "v*"
env:
REGISTRY_HOST: code.wanderingbadger.dev
IMAGE_NAME: ${{ github.repository }}
jobs: jobs:
build-and-push: build-and-push:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -24,7 +28,7 @@ jobs:
- name: Log in to Gitea Container Registry - name: Log in to Gitea Container Registry
uses: docker/login-action@v3 uses: docker/login-action@v3
with: with:
registry: code.wanderingbadger.dev registry: ${{ env.REGISTRY_HOST }}
username: ${{ secrets.REGISTRY_USERNAME }} username: ${{ secrets.REGISTRY_USERNAME }}
password: ${{ secrets.REGISTRY_TOKEN }} password: ${{ secrets.REGISTRY_TOKEN }}
@@ -35,5 +39,5 @@ jobs:
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
push: true push: true
tags: | tags: |
code.wanderingbadger.dev/tliu93/home-automation:${{ github.ref_name }} ${{ env.REGISTRY_HOST }}/${{ env.IMAGE_NAME }}:${{ github.ref_name }}
code.wanderingbadger.dev/tliu93/home-automation:latest ${{ env.REGISTRY_HOST }}/${{ env.IMAGE_NAME }}:latest
+1
View File
@@ -23,3 +23,4 @@ RUN mkdir -p /app/data
EXPOSE 8000 EXPOSE 8000
ENTRYPOINT ["/app/docker/entrypoint.sh"] ENTRYPOINT ["/app/docker/entrypoint.sh"]
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
+28 -5
View File
@@ -107,9 +107,7 @@ cp .env.example .env
3. 初始化数据库 3. 初始化数据库
```bash ```bash
python scripts/app_db_adopt.py python -m scripts.run_migrations
python scripts/location_db_adopt.py
python scripts/poo_db_adopt.py
``` ```
4. 启动服务 4. 启动服务
@@ -141,6 +139,7 @@ uvicorn app.main:app --reload --host 0.0.0.0 --port 8000
- App Alembic 环境:`alembic_app.ini` + `alembic_app/` - App Alembic 环境:`alembic_app.ini` + `alembic_app/`
- Location Alembic 环境:`alembic_location.ini` + `alembic_location/` - Location Alembic 环境:`alembic_location.ini` + `alembic_location/`
- Poo Alembic 环境:`alembic_poo.ini` + `alembic_poo/` - Poo Alembic 环境:`alembic_poo.ini` + `alembic_poo/`
- 统一 migration job`python -m scripts.run_migrations`
- App DB 初始化:`python scripts/app_db_adopt.py` - App DB 初始化:`python scripts/app_db_adopt.py`
- Location DB 接管 / 初始化:`python scripts/location_db_adopt.py` - Location DB 接管 / 初始化:`python scripts/location_db_adopt.py`
- Poo DB 接管 / 初始化:`python scripts/poo_db_adopt.py` - Poo DB 接管 / 初始化:`python scripts/poo_db_adopt.py`
@@ -217,12 +216,26 @@ python scripts/export_openapi.py
当前默认 Compose 服务名为 `app`,容器名固定为 `home-automation-app` 当前默认 Compose 服务名为 `app`,容器名固定为 `home-automation-app`
启动方式 当前 Compose 分成两层
- `docker-compose.yml`:默认使用 registry image,适合部署 / 生产拉取
- `docker-compose.override.yml`:仅为本地开发追加 `build: .`
本地开发启动方式:
```bash ```bash
docker compose up -d --build docker compose up -d --build
``` ```
上面的命令会自动叠加 `docker-compose.override.yml`,因此本地仍然会按当前工作目录重新 build。
如果要按生产方式直接从 registry 拉取并启动,显式只使用基础 compose 文件:
```bash
docker compose -f docker-compose.yml pull
docker compose -f docker-compose.yml up -d
```
持续查看日志: 持续查看日志:
```bash ```bash
@@ -236,7 +249,17 @@ docker compose logs -f app
- workflow 文件:`.github/workflows/docker-image.yml` - workflow 文件:`.github/workflows/docker-image.yml`
- 触发条件:push 匹配 `v*` 的 tag,例如 `v1.0.0` - 触发条件:push 匹配 `v*` 的 tag,例如 `v1.0.0`
- registry`code.wanderingbadger.dev` - registry`code.wanderingbadger.dev`
- image`code.wanderingbadger.dev/tliu93/home-automation` - image`code.wanderingbadger.dev/<owner>/<repo>`
`docker-compose.yml` 中生产默认使用的 app image 当前为:
- `code.wanderingbadger.dev/tliu93/home-automation:latest`
当前 workflow 不再把 image name 硬编码到特定 user package 路径,而是直接使用当前仓库标识生成镜像路径:
- `code.wanderingbadger.dev/${github.repository}:${tag}`
在 Gitea 这里,package 更贴近 repo 归属的语义,主要体现在镜像命名路径本身,而不是额外的“绑定”动作。也就是说,当前发布方式是按仓库路径约定来对齐 repo/package 语义。
这个 workflow 会构建并推送 multi-arch image 这个 workflow 会构建并推送 multi-arch image
+32 -4
View File
@@ -6,7 +6,19 @@ from fastapi.responses import PlainTextResponse, Response
from pydantic import ValidationError from pydantic import ValidationError
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from app.dependencies import get_db, get_ticktick_client from app.config import Settings
from app.dependencies import (
get_app_settings,
get_db,
get_homeassistant_client,
get_poo_db,
get_ticktick_client,
)
from app.integrations.homeassistant import (
HomeAssistantClient,
HomeAssistantConfigError,
HomeAssistantRequestError,
)
from app.integrations.ticktick import TickTickClient, TickTickConfigError, TickTickRequestError from app.integrations.ticktick import TickTickClient, TickTickConfigError, TickTickRequestError
from app.schemas.homeassistant import HomeAssistantPublishEnvelope from app.schemas.homeassistant import HomeAssistantPublishEnvelope
from app.services.homeassistant_inbound import ( from app.services.homeassistant_inbound import (
@@ -24,13 +36,23 @@ INTERNAL_SERVER_ERROR_MESSAGE = "internal server error"
async def publish_from_homeassistant( async def publish_from_homeassistant(
request: Request, request: Request,
db: Session = Depends(get_db), db: Session = Depends(get_db),
poo_db: Session = Depends(get_poo_db),
settings: Settings = Depends(get_app_settings),
homeassistant_client: HomeAssistantClient = Depends(get_homeassistant_client),
ticktick_client: TickTickClient = Depends(get_ticktick_client), ticktick_client: TickTickClient = Depends(get_ticktick_client),
) -> Response: ) -> Response:
try: try:
raw_payload = await request.body() raw_payload = await request.body()
data = json.loads(raw_payload) data = json.loads(raw_payload)
envelope = HomeAssistantPublishEnvelope.model_validate(data) envelope = HomeAssistantPublishEnvelope.model_validate(data)
handle_homeassistant_message(db, envelope, ticktick_client) handle_homeassistant_message(
db,
envelope,
ticktick_client=ticktick_client,
poo_session=poo_db,
settings=settings,
homeassistant_client=homeassistant_client,
)
except json.JSONDecodeError as exc: except json.JSONDecodeError as exc:
logger.warning("Rejected Home Assistant publish request due to invalid JSON: %s", exc) logger.warning("Rejected Home Assistant publish request due to invalid JSON: %s", exc)
return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST) return PlainTextResponse(BAD_REQUEST_MESSAGE, status_code=status.HTTP_400_BAD_REQUEST)
@@ -45,8 +67,14 @@ async def publish_from_homeassistant(
INTERNAL_SERVER_ERROR_MESSAGE, INTERNAL_SERVER_ERROR_MESSAGE,
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
) )
except (TickTickConfigError, TickTickRequestError, RuntimeError) as exc: except (
logger.warning("Home Assistant publish request failed during TickTick handling: %s", exc) TickTickConfigError,
TickTickRequestError,
HomeAssistantConfigError,
HomeAssistantRequestError,
RuntimeError,
) as exc:
logger.warning("Home Assistant publish request failed during integration handling: %s", exc)
return PlainTextResponse( return PlainTextResponse(
INTERNAL_SERVER_ERROR_MESSAGE, INTERNAL_SERVER_ERROR_MESSAGE,
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+37
View File
@@ -4,11 +4,14 @@ import json
from datetime import UTC, datetime, time, timedelta from datetime import UTC, datetime, time, timedelta
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from app.config import Settings
from app.integrations.homeassistant import HomeAssistantClient
from app.integrations.ticktick import TICKTICK_DATETIME_FORMAT, TickTickClient, TickTickTask from app.integrations.ticktick import TICKTICK_DATETIME_FORMAT, TickTickClient, TickTickTask
from app.schemas.homeassistant import HomeAssistantPublishEnvelope from app.schemas.homeassistant import HomeAssistantPublishEnvelope
from app.schemas.location import LocationRecordRequest from app.schemas.location import LocationRecordRequest
from app.schemas.ticktick import TickTickActionTaskRequest from app.schemas.ticktick import TickTickActionTaskRequest
from app.services.location import record_location from app.services.location import record_location
from app.services.poo import publish_latest_poo_status
class UnsupportedHomeAssistantMessage(RuntimeError): class UnsupportedHomeAssistantMessage(RuntimeError):
@@ -19,11 +22,23 @@ def handle_homeassistant_message(
session: Session, session: Session,
envelope: HomeAssistantPublishEnvelope, envelope: HomeAssistantPublishEnvelope,
ticktick_client: TickTickClient | None = None, ticktick_client: TickTickClient | None = None,
poo_session: Session | None = None,
settings: Settings | None = None,
homeassistant_client: HomeAssistantClient | None = None,
) -> None: ) -> None:
if envelope.target == "location_recorder": if envelope.target == "location_recorder":
_handle_location_message(session, envelope) _handle_location_message(session, envelope)
return return
if envelope.target == "poo_recorder":
_handle_poo_message(
envelope,
poo_session=poo_session,
settings=settings,
homeassistant_client=homeassistant_client,
)
return
if envelope.target == "ticktick": if envelope.target == "ticktick":
_handle_ticktick_message(envelope, ticktick_client) _handle_ticktick_message(envelope, ticktick_client)
return return
@@ -44,6 +59,28 @@ def _handle_location_message(session: Session, envelope: HomeAssistantPublishEnv
record_location(session, payload) record_location(session, payload)
def _handle_poo_message(
envelope: HomeAssistantPublishEnvelope,
*,
poo_session: Session | None,
settings: Settings | None,
homeassistant_client: HomeAssistantClient | None,
) -> None:
if envelope.action != "get_latest":
raise UnsupportedHomeAssistantMessage(
f"Unsupported Home Assistant target/action: {envelope.target}/{envelope.action}"
)
if poo_session is None or settings is None or homeassistant_client is None:
raise RuntimeError("Poo recorder integration is unavailable")
publish_latest_poo_status(
session=poo_session,
settings=settings,
homeassistant_client=homeassistant_client,
)
def _handle_ticktick_message( def _handle_ticktick_message(
envelope: HomeAssistantPublishEnvelope, envelope: HomeAssistantPublishEnvelope,
ticktick_client: TickTickClient | None, ticktick_client: TickTickClient | None,
+6
View File
@@ -0,0 +1,6 @@
services:
migration:
build: .
app:
build: .
+15 -1
View File
@@ -1,10 +1,24 @@
services: services:
migration:
container_name: home-automation-migration
image: code.wanderingbadger.dev/tliu93/home-automation:latest
user: "1000:1000"
restart: "no"
init: true
command: ["python", "-m", "scripts.run_migrations"]
volumes:
- ./data:/app/data
- ./.env:/app/.env:ro
app: app:
container_name: home-automation-app container_name: home-automation-app
build: . image: code.wanderingbadger.dev/tliu93/home-automation:latest
user: "1000:1000" user: "1000:1000"
restart: unless-stopped restart: unless-stopped
init: true init: true
depends_on:
migration:
condition: service_completed_successfully
ports: ports:
- "127.0.0.1:8881:8000" - "127.0.0.1:8881:8000"
volumes: volumes:
+1 -5
View File
@@ -2,8 +2,4 @@
set -eu set -eu
python scripts/app_db_adopt.py exec "$@"
python scripts/location_db_adopt.py
python scripts/poo_db_adopt.py
exec uvicorn app.main:app --host 0.0.0.0 --port 8000
+31 -3
View File
@@ -6,6 +6,8 @@ from pathlib import Path
from alembic import command from alembic import command
from alembic.config import Config from alembic.config import Config
from alembic.script import ScriptDirectory
from alembic.util.exc import CommandError
PROJECT_ROOT = Path(__file__).resolve().parents[1] PROJECT_ROOT = Path(__file__).resolve().parents[1]
if str(PROJECT_ROOT) not in sys.path: if str(PROJECT_ROOT) not in sys.path:
@@ -35,6 +37,24 @@ def _make_alembic_config(database_url: str) -> Config:
return config return config
def _expected_head_revision(alembic_config: Config) -> str:
script = ScriptDirectory.from_config(alembic_config)
heads = script.get_heads()
if len(heads) != 1:
raise AppDatabaseAdoptionError(
f"Expected exactly one Alembic head for app DB, got {len(heads)}"
)
return heads[0]
def _is_known_revision(alembic_config: Config, revision: str) -> bool:
script = ScriptDirectory.from_config(alembic_config)
try:
return script.get_revision(revision) is not None
except CommandError:
return False
def _alembic_version_table_exists(database_path: Path) -> bool: def _alembic_version_table_exists(database_path: Path) -> bool:
conn = sqlite3.connect(database_path) conn = sqlite3.connect(database_path)
try: try:
@@ -75,6 +95,8 @@ def _list_user_tables(database_path: Path) -> list[str]:
def validate_app_runtime_db(database_url: str) -> None: def validate_app_runtime_db(database_url: str) -> None:
database_path = _database_path_from_url(database_url) database_path = _database_path_from_url(database_url)
alembic_config = _make_alembic_config(database_url)
expected_revision = _expected_head_revision(alembic_config)
if not database_path.exists(): if not database_path.exists():
raise AppDatabaseAdoptionError( raise AppDatabaseAdoptionError(
"App DB file was not found. Run 'python scripts/app_db_adopt.py' first to " "App DB file was not found. Run 'python scripts/app_db_adopt.py' first to "
@@ -88,22 +110,28 @@ def validate_app_runtime_db(database_url: str) -> None:
) )
current_revision = _fetch_alembic_revision(database_path) current_revision = _fetch_alembic_revision(database_path)
if current_revision != APP_BASELINE_REVISION: if current_revision != expected_revision:
raise AppDatabaseAdoptionError( raise AppDatabaseAdoptionError(
"App DB revision mismatch. Refusing to start the app: " "App DB revision mismatch. Refusing to start the app: "
f"expected {APP_BASELINE_REVISION}, got {current_revision}" f"expected {expected_revision}, got {current_revision}"
) )
def adopt_or_initialize_app_db(database_url: str) -> str: def adopt_or_initialize_app_db(database_url: str) -> str:
database_path = _database_path_from_url(database_url) database_path = _database_path_from_url(database_url)
alembic_config = _make_alembic_config(database_url) alembic_config = _make_alembic_config(database_url)
expected_revision = _expected_head_revision(alembic_config)
if database_path.exists(): if database_path.exists():
if _alembic_version_table_exists(database_path): if _alembic_version_table_exists(database_path):
current_revision = _fetch_alembic_revision(database_path) current_revision = _fetch_alembic_revision(database_path)
if current_revision == APP_BASELINE_REVISION: if current_revision == expected_revision:
return "already_managed" return "already_managed"
if not _is_known_revision(alembic_config, current_revision):
raise AppDatabaseAdoptionError(
"App DB is already Alembic-managed but revision does not match "
f"a known migration revision: got {current_revision}"
)
command.upgrade(alembic_config, "head") command.upgrade(alembic_config, "head")
return "upgraded" return "upgraded"
+34 -6
View File
@@ -6,6 +6,8 @@ from pathlib import Path
from alembic import command from alembic import command
from alembic.config import Config from alembic.config import Config
from alembic.script import ScriptDirectory
from alembic.util.exc import CommandError
PROJECT_ROOT = Path(__file__).resolve().parents[1] PROJECT_ROOT = Path(__file__).resolve().parents[1]
if str(PROJECT_ROOT) not in sys.path: if str(PROJECT_ROOT) not in sys.path:
@@ -43,6 +45,24 @@ def _make_alembic_config(database_url: str) -> Config:
return config return config
def _expected_head_revision(alembic_config: Config) -> str:
script = ScriptDirectory.from_config(alembic_config)
heads = script.get_heads()
if len(heads) != 1:
raise LocationDatabaseAdoptionError(
f"Expected exactly one Alembic head for location DB, got {len(heads)}"
)
return heads[0]
def _is_known_revision(alembic_config: Config, revision: str) -> bool:
script = ScriptDirectory.from_config(alembic_config)
try:
return script.get_revision(revision) is not None
except CommandError:
return False
def _location_table_exists(database_path: Path) -> bool: def _location_table_exists(database_path: Path) -> bool:
conn = sqlite3.connect(database_path) conn = sqlite3.connect(database_path)
try: try:
@@ -117,6 +137,8 @@ def validate_legacy_location_db(database_url: str) -> None:
def validate_location_runtime_db(database_url: str) -> None: def validate_location_runtime_db(database_url: str) -> None:
database_path = _database_path_from_url(database_url) database_path = _database_path_from_url(database_url)
alembic_config = _make_alembic_config(database_url)
expected_revision = _expected_head_revision(alembic_config)
if not database_path.exists(): if not database_path.exists():
raise LocationDatabaseAdoptionError( raise LocationDatabaseAdoptionError(
"Location DB file was not found. Run 'python scripts/location_db_adopt.py' " "Location DB file was not found. Run 'python scripts/location_db_adopt.py' "
@@ -131,30 +153,36 @@ def validate_location_runtime_db(database_url: str) -> None:
) )
current_revision = _fetch_alembic_revision(database_path) current_revision = _fetch_alembic_revision(database_path)
if current_revision != LOCATION_BASELINE_REVISION: if current_revision != expected_revision:
raise LocationDatabaseAdoptionError( raise LocationDatabaseAdoptionError(
"Location DB revision mismatch. Refusing to start the app: " "Location DB revision mismatch. Refusing to start the app: "
f"expected {LOCATION_BASELINE_REVISION}, got {current_revision}" f"expected {expected_revision}, got {current_revision}"
) )
def adopt_or_initialize_location_db(database_url: str) -> str: def adopt_or_initialize_location_db(database_url: str) -> str:
database_path = _database_path_from_url(database_url) database_path = _database_path_from_url(database_url)
alembic_config = _make_alembic_config(database_url) alembic_config = _make_alembic_config(database_url)
expected_revision = _expected_head_revision(alembic_config)
if database_path.exists(): if database_path.exists():
if _alembic_version_table_exists(database_path): if _alembic_version_table_exists(database_path):
current_revision = _fetch_alembic_revision(database_path) current_revision = _fetch_alembic_revision(database_path)
if current_revision != LOCATION_BASELINE_REVISION: if current_revision == expected_revision:
return "already_managed"
if not _is_known_revision(alembic_config, current_revision):
raise LocationDatabaseAdoptionError( raise LocationDatabaseAdoptionError(
"Location DB is already Alembic-managed but revision does not match " "Location DB is already Alembic-managed but revision does not match "
f"the expected baseline: expected {LOCATION_BASELINE_REVISION}, " f"a known migration revision: got {current_revision}"
f"got {current_revision}"
) )
return "already_managed" command.upgrade(alembic_config, "head")
return "upgraded"
validate_legacy_location_db(database_url) validate_legacy_location_db(database_url)
command.stamp(alembic_config, LOCATION_BASELINE_REVISION) command.stamp(alembic_config, LOCATION_BASELINE_REVISION)
if LOCATION_BASELINE_REVISION != expected_revision:
command.upgrade(alembic_config, "head")
return "upgraded"
return "adopted" return "adopted"
database_path.parent.mkdir(parents=True, exist_ok=True) database_path.parent.mkdir(parents=True, exist_ok=True)
+34 -6
View File
@@ -6,6 +6,8 @@ from pathlib import Path
from alembic import command from alembic import command
from alembic.config import Config from alembic.config import Config
from alembic.script import ScriptDirectory
from alembic.util.exc import CommandError
PROJECT_ROOT = Path(__file__).resolve().parents[1] PROJECT_ROOT = Path(__file__).resolve().parents[1]
if str(PROJECT_ROOT) not in sys.path: if str(PROJECT_ROOT) not in sys.path:
@@ -42,6 +44,24 @@ def _make_alembic_config(database_url: str) -> Config:
return config return config
def _expected_head_revision(alembic_config: Config) -> str:
script = ScriptDirectory.from_config(alembic_config)
heads = script.get_heads()
if len(heads) != 1:
raise PooDatabaseAdoptionError(
f"Expected exactly one Alembic head for poo DB, got {len(heads)}"
)
return heads[0]
def _is_known_revision(alembic_config: Config, revision: str) -> bool:
script = ScriptDirectory.from_config(alembic_config)
try:
return script.get_revision(revision) is not None
except CommandError:
return False
def _poo_table_exists(database_path: Path) -> bool: def _poo_table_exists(database_path: Path) -> bool:
conn = sqlite3.connect(database_path) conn = sqlite3.connect(database_path)
try: try:
@@ -112,6 +132,8 @@ def validate_legacy_poo_db(database_url: str) -> None:
def validate_poo_runtime_db(database_url: str) -> None: def validate_poo_runtime_db(database_url: str) -> None:
database_path = _database_path_from_url(database_url) database_path = _database_path_from_url(database_url)
alembic_config = _make_alembic_config(database_url)
expected_revision = _expected_head_revision(alembic_config)
if not database_path.exists(): if not database_path.exists():
raise PooDatabaseAdoptionError( raise PooDatabaseAdoptionError(
"Poo DB file was not found. Run 'python scripts/poo_db_adopt.py' first to " "Poo DB file was not found. Run 'python scripts/poo_db_adopt.py' first to "
@@ -126,30 +148,36 @@ def validate_poo_runtime_db(database_url: str) -> None:
) )
current_revision = _fetch_alembic_revision(database_path) current_revision = _fetch_alembic_revision(database_path)
if current_revision != POO_BASELINE_REVISION: if current_revision != expected_revision:
raise PooDatabaseAdoptionError( raise PooDatabaseAdoptionError(
"Poo DB revision mismatch. Refusing to start the app: " "Poo DB revision mismatch. Refusing to start the app: "
f"expected {POO_BASELINE_REVISION}, got {current_revision}" f"expected {expected_revision}, got {current_revision}"
) )
def adopt_or_initialize_poo_db(database_url: str) -> str: def adopt_or_initialize_poo_db(database_url: str) -> str:
database_path = _database_path_from_url(database_url) database_path = _database_path_from_url(database_url)
alembic_config = _make_alembic_config(database_url) alembic_config = _make_alembic_config(database_url)
expected_revision = _expected_head_revision(alembic_config)
if database_path.exists(): if database_path.exists():
if _alembic_version_table_exists(database_path): if _alembic_version_table_exists(database_path):
current_revision = _fetch_alembic_revision(database_path) current_revision = _fetch_alembic_revision(database_path)
if current_revision != POO_BASELINE_REVISION: if current_revision == expected_revision:
return "already_managed"
if not _is_known_revision(alembic_config, current_revision):
raise PooDatabaseAdoptionError( raise PooDatabaseAdoptionError(
"Poo DB is already Alembic-managed but revision does not match " "Poo DB is already Alembic-managed but revision does not match "
f"the expected baseline: expected {POO_BASELINE_REVISION}, " f"a known migration revision: got {current_revision}"
f"got {current_revision}"
) )
return "already_managed" command.upgrade(alembic_config, "head")
return "upgraded"
validate_legacy_poo_db(database_url) validate_legacy_poo_db(database_url)
command.stamp(alembic_config, POO_BASELINE_REVISION) command.stamp(alembic_config, POO_BASELINE_REVISION)
if POO_BASELINE_REVISION != expected_revision:
command.upgrade(alembic_config, "head")
return "upgraded"
return "adopted" return "adopted"
database_path.parent.mkdir(parents=True, exist_ok=True) database_path.parent.mkdir(parents=True, exist_ok=True)
+25
View File
@@ -0,0 +1,25 @@
from __future__ import annotations
from app.config import get_settings
from scripts.app_db_adopt import adopt_or_initialize_app_db
from scripts.location_db_adopt import adopt_or_initialize_location_db
from scripts.poo_db_adopt import adopt_or_initialize_poo_db
def run_all_migrations() -> dict[str, str]:
settings = get_settings()
return {
"app": adopt_or_initialize_app_db(settings.app_database_url),
"location": adopt_or_initialize_location_db(settings.location_database_url),
"poo": adopt_or_initialize_poo_db(settings.poo_database_url),
}
def main() -> None:
results = run_all_migrations()
for database_name, result in results.items():
print(f"{database_name}: {result}")
if __name__ == "__main__":
main()
+4 -1
View File
@@ -37,12 +37,13 @@ def test_status_endpoint(client: TestClient) -> None:
def test_app_start_fails_when_app_db_missing(tmp_path, monkeypatch: pytest.MonkeyPatch) -> None: def test_app_start_fails_when_app_db_missing(tmp_path, monkeypatch: pytest.MonkeyPatch) -> None:
missing_app_path = tmp_path / "missing_app.db"
poo_database_path = tmp_path / "poo_ready.db" poo_database_path = tmp_path / "poo_ready.db"
location_database_path = tmp_path / "location_ready.db" location_database_path = tmp_path / "location_ready.db"
command.upgrade(_make_poo_alembic_config(f"sqlite:///{poo_database_path}"), "head") command.upgrade(_make_poo_alembic_config(f"sqlite:///{poo_database_path}"), "head")
command.upgrade(_make_alembic_config(f"sqlite:///{location_database_path}"), "head") command.upgrade(_make_alembic_config(f"sqlite:///{location_database_path}"), "head")
monkeypatch.setenv("APP_DATABASE_URL", f"sqlite:///{tmp_path / 'missing_app.db'}") monkeypatch.setenv("APP_DATABASE_URL", f"sqlite:///{missing_app_path}")
monkeypatch.setenv("AUTH_BOOTSTRAP_USERNAME", "admin") monkeypatch.setenv("AUTH_BOOTSTRAP_USERNAME", "admin")
monkeypatch.setenv("AUTH_BOOTSTRAP_PASSWORD", "test-password") monkeypatch.setenv("AUTH_BOOTSTRAP_PASSWORD", "test-password")
monkeypatch.setenv("LOCATION_DATABASE_URL", f"sqlite:///{location_database_path}") monkeypatch.setenv("LOCATION_DATABASE_URL", f"sqlite:///{location_database_path}")
@@ -54,6 +55,8 @@ def test_app_start_fails_when_app_db_missing(tmp_path, monkeypatch: pytest.Monke
with pytest.raises(RuntimeError, match="Run 'python scripts/app_db_adopt.py' first"): with pytest.raises(RuntimeError, match="Run 'python scripts/app_db_adopt.py' first"):
anyio.run(_run_lifespan, app) anyio.run(_run_lifespan, app)
assert not missing_app_path.exists()
get_settings.cache_clear() get_settings.cache_clear()
reset_auth_db_caches() reset_auth_db_caches()
+213
View File
@@ -0,0 +1,213 @@
from pathlib import Path
import sqlite3
import anyio
import pytest
import yaml
from alembic import command
from app.auth_db import reset_auth_db_caches
from app.config import get_settings
from app.main import create_app
from scripts.app_db_adopt import APP_BASELINE_REVISION
from scripts.location_db_adopt import EXPECTED_USER_VERSION as LOCATION_USER_VERSION
from scripts.location_db_adopt import LOCATION_BASELINE_REVISION
from scripts.poo_db_adopt import EXPECTED_USER_VERSION as POO_USER_VERSION
from scripts.poo_db_adopt import POO_BASELINE_REVISION
from scripts.run_migrations import run_all_migrations
from tests.conftest import _make_alembic_config, _make_poo_alembic_config
def _read_yaml(path: str) -> dict:
return yaml.safe_load(Path(path).read_text())
async def _run_lifespan(app) -> None:
async with app.router.lifespan_context(app):
return None
def _configure_database_env(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> dict[str, Path | str]:
app_path = tmp_path / "app.db"
location_path = tmp_path / "location.db"
poo_path = tmp_path / "poo.db"
monkeypatch.setenv("APP_DATABASE_URL", f"sqlite:///{app_path}")
monkeypatch.setenv("LOCATION_DATABASE_URL", f"sqlite:///{location_path}")
monkeypatch.setenv("POO_DATABASE_URL", f"sqlite:///{poo_path}")
monkeypatch.setenv("AUTH_BOOTSTRAP_USERNAME", "admin")
monkeypatch.setenv("AUTH_BOOTSTRAP_PASSWORD", "test-password")
monkeypatch.setenv("AUTH_COOKIE_SECURE_OVERRIDE", "false")
get_settings.cache_clear()
reset_auth_db_caches()
return {
"app_path": app_path,
"app_url": f"sqlite:///{app_path}",
"location_path": location_path,
"location_url": f"sqlite:///{location_path}",
"poo_path": poo_path,
"poo_url": f"sqlite:///{poo_path}",
}
def _create_legacy_location_db(database_path: Path) -> None:
conn = sqlite3.connect(database_path)
conn.execute(
"""
CREATE TABLE location (
person TEXT NOT NULL,
datetime TEXT NOT NULL,
latitude REAL NOT NULL,
longitude REAL NOT NULL,
altitude REAL,
PRIMARY KEY (person, datetime)
)
"""
)
conn.execute(
"INSERT INTO location (person, datetime, latitude, longitude, altitude) VALUES (?, ?, ?, ?, ?)",
("alice", "2026-04-22T10:00:00Z", 1.23, 4.56, 7.89),
)
conn.execute(f"PRAGMA user_version = {LOCATION_USER_VERSION}")
conn.commit()
conn.close()
def _create_legacy_poo_db(database_path: Path) -> None:
conn = sqlite3.connect(database_path)
conn.execute(
"""
CREATE TABLE poo_records (
timestamp TEXT NOT NULL,
status TEXT NOT NULL,
latitude REAL NOT NULL,
longitude REAL NOT NULL,
PRIMARY KEY (timestamp)
)
"""
)
conn.execute(
"INSERT INTO poo_records (timestamp, status, latitude, longitude) VALUES (?, ?, ?, ?)",
("2026-04-22T11:00:00Z", "complete", 9.87, 6.54),
)
conn.execute(f"PRAGMA user_version = {POO_USER_VERSION}")
conn.commit()
conn.close()
def test_compose_uses_migration_job_before_app() -> None:
compose = _read_yaml("/home/tianyu/workspace/home-automation/docker-compose.yml")
override = _read_yaml("/home/tianyu/workspace/home-automation/docker-compose.override.yml")
migration_service = compose["services"]["migration"]
app_service = compose["services"]["app"]
assert migration_service["command"] == ["python", "-m", "scripts.run_migrations"]
assert migration_service["restart"] == "no"
assert app_service["depends_on"]["migration"]["condition"] == "service_completed_successfully"
assert override["services"]["migration"]["build"] == "."
assert override["services"]["app"]["build"] == "."
def test_image_defaults_to_uvicorn_only() -> None:
dockerfile = Path("/home/tianyu/workspace/home-automation/Dockerfile").read_text()
entrypoint = Path("/home/tianyu/workspace/home-automation/docker/entrypoint.sh").read_text()
assert 'CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]' in dockerfile
assert 'exec "$@"' in entrypoint
assert "app_db_adopt" not in entrypoint
assert "location_db_adopt" not in entrypoint
assert "poo_db_adopt" not in entrypoint
def test_migration_runner_initializes_and_is_idempotent(
tmp_path: Path, monkeypatch: pytest.MonkeyPatch
) -> None:
database_urls = _configure_database_env(tmp_path, monkeypatch)
first_run = run_all_migrations()
second_run = run_all_migrations()
assert first_run == {"app": "initialized", "location": "initialized", "poo": "initialized"}
assert second_run == {
"app": "already_managed",
"location": "already_managed",
"poo": "already_managed",
}
conn = sqlite3.connect(database_urls["app_path"])
try:
assert conn.execute("SELECT version_num FROM alembic_version").fetchone()[0] == APP_BASELINE_REVISION
tables = {
row[0]
for row in conn.execute(
"SELECT name FROM sqlite_master WHERE type = 'table' AND name NOT LIKE 'sqlite_%'"
).fetchall()
}
finally:
conn.close()
assert {"auth_users", "auth_sessions", "app_config", "alembic_version"} <= tables
conn = sqlite3.connect(database_urls["location_path"])
try:
assert conn.execute("SELECT version_num FROM alembic_version").fetchone()[0] == LOCATION_BASELINE_REVISION
finally:
conn.close()
conn = sqlite3.connect(database_urls["poo_path"])
try:
assert conn.execute("SELECT version_num FROM alembic_version").fetchone()[0] == POO_BASELINE_REVISION
finally:
conn.close()
get_settings.cache_clear()
reset_auth_db_caches()
def test_migration_runner_adopts_legacy_sqlite_without_data_loss(
tmp_path: Path, monkeypatch: pytest.MonkeyPatch
) -> None:
database_urls = _configure_database_env(tmp_path, monkeypatch)
_create_legacy_location_db(database_urls["location_path"])
_create_legacy_poo_db(database_urls["poo_path"])
results = run_all_migrations()
assert results == {"app": "initialized", "location": "adopted", "poo": "adopted"}
conn = sqlite3.connect(database_urls["location_path"])
try:
assert conn.execute("SELECT version_num FROM alembic_version").fetchone()[0] == LOCATION_BASELINE_REVISION
assert conn.execute("SELECT COUNT(*) FROM location").fetchone()[0] == 1
finally:
conn.close()
conn = sqlite3.connect(database_urls["poo_path"])
try:
assert conn.execute("SELECT version_num FROM alembic_version").fetchone()[0] == POO_BASELINE_REVISION
assert conn.execute("SELECT COUNT(*) FROM poo_records").fetchone()[0] == 1
finally:
conn.close()
get_settings.cache_clear()
reset_auth_db_caches()
def test_app_startup_still_fails_closed_without_running_adoption(
tmp_path: Path, monkeypatch: pytest.MonkeyPatch
) -> None:
database_urls = _configure_database_env(tmp_path, monkeypatch)
missing_app_path = database_urls["app_path"]
command.upgrade(_make_alembic_config(database_urls["location_url"]), "head")
command.upgrade(_make_poo_alembic_config(database_urls["poo_url"]), "head")
app = create_app()
with pytest.raises(RuntimeError, match="Run 'python scripts/app_db_adopt.py' first"):
anyio.run(_run_lifespan, app)
assert not Path(missing_app_path).exists()
get_settings.cache_clear()
reset_auth_db_caches()
+158
View File
@@ -1,5 +1,21 @@
from sqlalchemy import text from sqlalchemy import text
import app.db as app_db
import app.poo_db as poo_db
from app.config import Settings, get_settings
from app.dependencies import get_app_settings, get_homeassistant_client
from app.main import create_app
class _FakeHomeAssistantClient:
def __init__(self) -> None:
self.sensor_calls: list[dict] = []
def publish_sensor(self, *, entity_id: str, state: str, attributes: dict | None = None) -> None:
self.sensor_calls.append(
{"entity_id": entity_id, "state": state, "attributes": attributes or {}}
)
def test_homeassistant_publish_records_location(location_client) -> None: def test_homeassistant_publish_records_location(location_client) -> None:
client, engine = location_client client, engine = location_client
@@ -141,6 +157,148 @@ def test_homeassistant_publish_rejects_invalid_ticktick_content(location_client)
assert response.text == "bad request" assert response.text == "bad request"
def test_homeassistant_publish_poo_get_latest_publishes_latest_status(
ready_location_database,
ready_poo_database,
auth_database,
monkeypatch,
) -> None:
location_engine = app_db.create_engine(
ready_location_database["location_url"],
connect_args={"check_same_thread": False},
)
location_session_local = app_db.sessionmaker(
bind=location_engine,
autoflush=False,
autocommit=False,
)
poo_engine = poo_db.create_engine(
ready_poo_database["poo_url"],
connect_args={"check_same_thread": False},
)
poo_session_local = poo_db.sessionmaker(
bind=poo_engine,
autoflush=False,
autocommit=False,
)
fake_ha = _FakeHomeAssistantClient()
settings = Settings(
poo_sensor_entity_name="sensor.test_poo_status",
poo_sensor_friendly_name="Poo Status",
)
monkeypatch.setattr(app_db, "engine", location_engine)
monkeypatch.setattr(app_db, "SessionLocal", location_session_local)
monkeypatch.setattr(poo_db, "poo_engine", poo_engine)
monkeypatch.setattr(poo_db, "PooSessionLocal", poo_session_local)
test_app = create_app()
test_app.dependency_overrides[get_homeassistant_client] = lambda: fake_ha
test_app.dependency_overrides[get_app_settings] = lambda: settings
with poo_engine.begin() as conn:
conn.execute(
text(
"INSERT INTO poo_records (timestamp, status, latitude, longitude) "
"VALUES (:timestamp, :status, :latitude, :longitude)"
),
{
"timestamp": "2026-04-20T10:05Z",
"status": "done",
"latitude": 1.23,
"longitude": 4.56,
},
)
try:
from fastapi.testclient import TestClient
with TestClient(test_app) as client:
response = client.post(
"/homeassistant/publish",
json={
"target": "poo_recorder",
"action": "get_latest",
"content": "",
},
)
assert response.status_code == 200
assert response.text == ""
assert len(fake_ha.sensor_calls) == 1
assert fake_ha.sensor_calls[0]["entity_id"] == "sensor.test_poo_status"
assert fake_ha.sensor_calls[0]["state"] == "done"
assert fake_ha.sensor_calls[0]["attributes"]["friendly_name"] == "Poo Status"
assert fake_ha.sensor_calls[0]["attributes"]["last_poo"]
finally:
test_app.dependency_overrides.clear()
get_settings.cache_clear()
location_engine.dispose()
poo_engine.dispose()
def test_homeassistant_publish_returns_internal_error_for_unknown_poo_action(
ready_location_database,
ready_poo_database,
auth_database,
monkeypatch,
) -> None:
location_engine = app_db.create_engine(
ready_location_database["location_url"],
connect_args={"check_same_thread": False},
)
location_session_local = app_db.sessionmaker(
bind=location_engine,
autoflush=False,
autocommit=False,
)
poo_engine = poo_db.create_engine(
ready_poo_database["poo_url"],
connect_args={"check_same_thread": False},
)
poo_session_local = poo_db.sessionmaker(
bind=poo_engine,
autoflush=False,
autocommit=False,
)
fake_ha = _FakeHomeAssistantClient()
settings = Settings(
poo_sensor_entity_name="sensor.test_poo_status",
poo_sensor_friendly_name="Poo Status",
)
monkeypatch.setattr(app_db, "engine", location_engine)
monkeypatch.setattr(app_db, "SessionLocal", location_session_local)
monkeypatch.setattr(poo_db, "poo_engine", poo_engine)
monkeypatch.setattr(poo_db, "PooSessionLocal", poo_session_local)
test_app = create_app()
test_app.dependency_overrides[get_homeassistant_client] = lambda: fake_ha
test_app.dependency_overrides[get_app_settings] = lambda: settings
try:
from fastapi.testclient import TestClient
with TestClient(test_app) as client:
response = client.post(
"/homeassistant/publish",
json={
"target": "poo_recorder",
"action": "unknown_action",
"content": "",
},
)
assert response.status_code == 500
assert response.text == "internal server error"
assert fake_ha.sensor_calls == []
finally:
test_app.dependency_overrides.clear()
get_settings.cache_clear()
location_engine.dispose()
poo_engine.dispose()
def test_homeassistant_publish_returns_not_implemented_for_unknown_location_action( def test_homeassistant_publish_returns_not_implemented_for_unknown_location_action(
location_client, location_client,
) -> None: ) -> None:
+1 -1
View File
@@ -343,7 +343,7 @@ def test_location_db_adoption_fails_closed_on_alembic_revision_mismatch(
conn.commit() conn.commit()
conn.close() conn.close()
with pytest.raises(LocationDatabaseAdoptionError, match="revision does not match"): with pytest.raises(LocationDatabaseAdoptionError, match="known migration revision"):
adopt_or_initialize_location_db(f"sqlite:///{database_path}") adopt_or_initialize_location_db(f"sqlite:///{database_path}")