change adoption to separate step
This commit is contained in:
+4
-1
@@ -37,12 +37,13 @@ def test_status_endpoint(client: TestClient) -> None:
|
||||
|
||||
|
||||
def test_app_start_fails_when_app_db_missing(tmp_path, monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
missing_app_path = tmp_path / "missing_app.db"
|
||||
poo_database_path = tmp_path / "poo_ready.db"
|
||||
location_database_path = tmp_path / "location_ready.db"
|
||||
command.upgrade(_make_poo_alembic_config(f"sqlite:///{poo_database_path}"), "head")
|
||||
command.upgrade(_make_alembic_config(f"sqlite:///{location_database_path}"), "head")
|
||||
|
||||
monkeypatch.setenv("APP_DATABASE_URL", f"sqlite:///{tmp_path / 'missing_app.db'}")
|
||||
monkeypatch.setenv("APP_DATABASE_URL", f"sqlite:///{missing_app_path}")
|
||||
monkeypatch.setenv("AUTH_BOOTSTRAP_USERNAME", "admin")
|
||||
monkeypatch.setenv("AUTH_BOOTSTRAP_PASSWORD", "test-password")
|
||||
monkeypatch.setenv("LOCATION_DATABASE_URL", f"sqlite:///{location_database_path}")
|
||||
@@ -54,6 +55,8 @@ def test_app_start_fails_when_app_db_missing(tmp_path, monkeypatch: pytest.Monke
|
||||
with pytest.raises(RuntimeError, match="Run 'python scripts/app_db_adopt.py' first"):
|
||||
anyio.run(_run_lifespan, app)
|
||||
|
||||
assert not missing_app_path.exists()
|
||||
|
||||
get_settings.cache_clear()
|
||||
reset_auth_db_caches()
|
||||
|
||||
|
||||
@@ -0,0 +1,213 @@
|
||||
from pathlib import Path
|
||||
import sqlite3
|
||||
|
||||
import anyio
|
||||
import pytest
|
||||
import yaml
|
||||
from alembic import command
|
||||
|
||||
from app.auth_db import reset_auth_db_caches
|
||||
from app.config import get_settings
|
||||
from app.main import create_app
|
||||
from scripts.app_db_adopt import APP_BASELINE_REVISION
|
||||
from scripts.location_db_adopt import EXPECTED_USER_VERSION as LOCATION_USER_VERSION
|
||||
from scripts.location_db_adopt import LOCATION_BASELINE_REVISION
|
||||
from scripts.poo_db_adopt import EXPECTED_USER_VERSION as POO_USER_VERSION
|
||||
from scripts.poo_db_adopt import POO_BASELINE_REVISION
|
||||
from scripts.run_migrations import run_all_migrations
|
||||
from tests.conftest import _make_alembic_config, _make_poo_alembic_config
|
||||
|
||||
|
||||
def _read_yaml(path: str) -> dict:
|
||||
return yaml.safe_load(Path(path).read_text())
|
||||
|
||||
|
||||
async def _run_lifespan(app) -> None:
|
||||
async with app.router.lifespan_context(app):
|
||||
return None
|
||||
|
||||
|
||||
def _configure_database_env(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> dict[str, Path | str]:
|
||||
app_path = tmp_path / "app.db"
|
||||
location_path = tmp_path / "location.db"
|
||||
poo_path = tmp_path / "poo.db"
|
||||
|
||||
monkeypatch.setenv("APP_DATABASE_URL", f"sqlite:///{app_path}")
|
||||
monkeypatch.setenv("LOCATION_DATABASE_URL", f"sqlite:///{location_path}")
|
||||
monkeypatch.setenv("POO_DATABASE_URL", f"sqlite:///{poo_path}")
|
||||
monkeypatch.setenv("AUTH_BOOTSTRAP_USERNAME", "admin")
|
||||
monkeypatch.setenv("AUTH_BOOTSTRAP_PASSWORD", "test-password")
|
||||
monkeypatch.setenv("AUTH_COOKIE_SECURE_OVERRIDE", "false")
|
||||
get_settings.cache_clear()
|
||||
reset_auth_db_caches()
|
||||
|
||||
return {
|
||||
"app_path": app_path,
|
||||
"app_url": f"sqlite:///{app_path}",
|
||||
"location_path": location_path,
|
||||
"location_url": f"sqlite:///{location_path}",
|
||||
"poo_path": poo_path,
|
||||
"poo_url": f"sqlite:///{poo_path}",
|
||||
}
|
||||
|
||||
|
||||
def _create_legacy_location_db(database_path: Path) -> None:
|
||||
conn = sqlite3.connect(database_path)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE location (
|
||||
person TEXT NOT NULL,
|
||||
datetime TEXT NOT NULL,
|
||||
latitude REAL NOT NULL,
|
||||
longitude REAL NOT NULL,
|
||||
altitude REAL,
|
||||
PRIMARY KEY (person, datetime)
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO location (person, datetime, latitude, longitude, altitude) VALUES (?, ?, ?, ?, ?)",
|
||||
("alice", "2026-04-22T10:00:00Z", 1.23, 4.56, 7.89),
|
||||
)
|
||||
conn.execute(f"PRAGMA user_version = {LOCATION_USER_VERSION}")
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
def _create_legacy_poo_db(database_path: Path) -> None:
|
||||
conn = sqlite3.connect(database_path)
|
||||
conn.execute(
|
||||
"""
|
||||
CREATE TABLE poo_records (
|
||||
timestamp TEXT NOT NULL,
|
||||
status TEXT NOT NULL,
|
||||
latitude REAL NOT NULL,
|
||||
longitude REAL NOT NULL,
|
||||
PRIMARY KEY (timestamp)
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.execute(
|
||||
"INSERT INTO poo_records (timestamp, status, latitude, longitude) VALUES (?, ?, ?, ?)",
|
||||
("2026-04-22T11:00:00Z", "complete", 9.87, 6.54),
|
||||
)
|
||||
conn.execute(f"PRAGMA user_version = {POO_USER_VERSION}")
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
def test_compose_uses_migration_job_before_app() -> None:
|
||||
compose = _read_yaml("/home/tianyu/workspace/home-automation/docker-compose.yml")
|
||||
override = _read_yaml("/home/tianyu/workspace/home-automation/docker-compose.override.yml")
|
||||
|
||||
migration_service = compose["services"]["migration"]
|
||||
app_service = compose["services"]["app"]
|
||||
|
||||
assert migration_service["command"] == ["python", "-m", "scripts.run_migrations"]
|
||||
assert migration_service["restart"] == "no"
|
||||
assert app_service["depends_on"]["migration"]["condition"] == "service_completed_successfully"
|
||||
assert override["services"]["migration"]["build"] == "."
|
||||
assert override["services"]["app"]["build"] == "."
|
||||
|
||||
|
||||
def test_image_defaults_to_uvicorn_only() -> None:
|
||||
dockerfile = Path("/home/tianyu/workspace/home-automation/Dockerfile").read_text()
|
||||
entrypoint = Path("/home/tianyu/workspace/home-automation/docker/entrypoint.sh").read_text()
|
||||
|
||||
assert 'CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]' in dockerfile
|
||||
assert 'exec "$@"' in entrypoint
|
||||
assert "app_db_adopt" not in entrypoint
|
||||
assert "location_db_adopt" not in entrypoint
|
||||
assert "poo_db_adopt" not in entrypoint
|
||||
|
||||
|
||||
def test_migration_runner_initializes_and_is_idempotent(
|
||||
tmp_path: Path, monkeypatch: pytest.MonkeyPatch
|
||||
) -> None:
|
||||
database_urls = _configure_database_env(tmp_path, monkeypatch)
|
||||
|
||||
first_run = run_all_migrations()
|
||||
second_run = run_all_migrations()
|
||||
|
||||
assert first_run == {"app": "initialized", "location": "initialized", "poo": "initialized"}
|
||||
assert second_run == {
|
||||
"app": "already_managed",
|
||||
"location": "already_managed",
|
||||
"poo": "already_managed",
|
||||
}
|
||||
|
||||
conn = sqlite3.connect(database_urls["app_path"])
|
||||
try:
|
||||
assert conn.execute("SELECT version_num FROM alembic_version").fetchone()[0] == APP_BASELINE_REVISION
|
||||
tables = {
|
||||
row[0]
|
||||
for row in conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type = 'table' AND name NOT LIKE 'sqlite_%'"
|
||||
).fetchall()
|
||||
}
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
assert {"auth_users", "auth_sessions", "app_config", "alembic_version"} <= tables
|
||||
|
||||
conn = sqlite3.connect(database_urls["location_path"])
|
||||
try:
|
||||
assert conn.execute("SELECT version_num FROM alembic_version").fetchone()[0] == LOCATION_BASELINE_REVISION
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
conn = sqlite3.connect(database_urls["poo_path"])
|
||||
try:
|
||||
assert conn.execute("SELECT version_num FROM alembic_version").fetchone()[0] == POO_BASELINE_REVISION
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
get_settings.cache_clear()
|
||||
reset_auth_db_caches()
|
||||
|
||||
|
||||
def test_migration_runner_adopts_legacy_sqlite_without_data_loss(
|
||||
tmp_path: Path, monkeypatch: pytest.MonkeyPatch
|
||||
) -> None:
|
||||
database_urls = _configure_database_env(tmp_path, monkeypatch)
|
||||
_create_legacy_location_db(database_urls["location_path"])
|
||||
_create_legacy_poo_db(database_urls["poo_path"])
|
||||
|
||||
results = run_all_migrations()
|
||||
|
||||
assert results == {"app": "initialized", "location": "adopted", "poo": "adopted"}
|
||||
|
||||
conn = sqlite3.connect(database_urls["location_path"])
|
||||
try:
|
||||
assert conn.execute("SELECT version_num FROM alembic_version").fetchone()[0] == LOCATION_BASELINE_REVISION
|
||||
assert conn.execute("SELECT COUNT(*) FROM location").fetchone()[0] == 1
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
conn = sqlite3.connect(database_urls["poo_path"])
|
||||
try:
|
||||
assert conn.execute("SELECT version_num FROM alembic_version").fetchone()[0] == POO_BASELINE_REVISION
|
||||
assert conn.execute("SELECT COUNT(*) FROM poo_records").fetchone()[0] == 1
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
get_settings.cache_clear()
|
||||
reset_auth_db_caches()
|
||||
|
||||
|
||||
def test_app_startup_still_fails_closed_without_running_adoption(
|
||||
tmp_path: Path, monkeypatch: pytest.MonkeyPatch
|
||||
) -> None:
|
||||
database_urls = _configure_database_env(tmp_path, monkeypatch)
|
||||
missing_app_path = database_urls["app_path"]
|
||||
command.upgrade(_make_alembic_config(database_urls["location_url"]), "head")
|
||||
command.upgrade(_make_poo_alembic_config(database_urls["poo_url"]), "head")
|
||||
|
||||
app = create_app()
|
||||
with pytest.raises(RuntimeError, match="Run 'python scripts/app_db_adopt.py' first"):
|
||||
anyio.run(_run_lifespan, app)
|
||||
|
||||
assert not Path(missing_app_path).exists()
|
||||
|
||||
get_settings.cache_clear()
|
||||
reset_auth_db_caches()
|
||||
@@ -343,7 +343,7 @@ def test_location_db_adoption_fails_closed_on_alembic_revision_mismatch(
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
with pytest.raises(LocationDatabaseAdoptionError, match="revision does not match"):
|
||||
with pytest.raises(LocationDatabaseAdoptionError, match="known migration revision"):
|
||||
adopt_or_initialize_location_db(f"sqlite:///{database_path}")
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user