Compare commits
2 Commits
64a2726c73
...
616232b76d
| Author | SHA1 | Date | |
|---|---|---|---|
| 616232b76d | |||
| 738df559cb |
29
.github/workflows/backend-ci.yml
vendored
Normal file
29
.github/workflows/backend-ci.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Backend CI
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
unit-test:
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: backend
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Install deps
|
||||
run: pip install -r dev-requirements.txt
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
pytest -q
|
||||
@@ -4,7 +4,17 @@ line-length = 144
|
||||
[lint]
|
||||
select = ["ALL"]
|
||||
fixable = ["UP034", "I001"]
|
||||
ignore = ["T201", "D", "ANN101", "TD002", "TD003"]
|
||||
ignore = [
|
||||
"T201",
|
||||
"D",
|
||||
"ANN101",
|
||||
"TD002",
|
||||
"TD003",
|
||||
"TRY003",
|
||||
"EM101",
|
||||
"EM102",
|
||||
"PLC0405",
|
||||
]
|
||||
|
||||
[lint.extend-per-file-ignores]
|
||||
"test*.py" = ["S101"]
|
||||
"test*.py" = ["S101"]
|
||||
|
||||
@@ -64,7 +64,7 @@ def test_rollback_on_exception() -> None:
|
||||
s.exec(text("CREATE TABLE IF NOT EXISTS t_rb (id INTEGER PRIMARY KEY, val TEXT);"))
|
||||
s.exec(text("INSERT INTO t_rb (val) VALUES (:v)").bindparams(v="will_rollback"))
|
||||
# simulate handler error -> should trigger rollback in get_session
|
||||
raise RuntimeError("simulated failure") # noqa: TRY003, EM101
|
||||
raise RuntimeError("simulated failure")
|
||||
|
||||
# New session should not see the inserted row
|
||||
with session_ctx(db) as s2:
|
||||
|
||||
77
backend/tests/test_db_migration.py
Normal file
77
backend/tests/test_db_migration.py
Normal file
@@ -0,0 +1,77 @@
|
||||
import pytest
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.pool import StaticPool
|
||||
from sqlmodel import create_engine
|
||||
|
||||
from trading_journal import db_migration
|
||||
|
||||
|
||||
def _base_type_of(compiled: str) -> str:
|
||||
"""Return base type name (e.g. VARCHAR from VARCHAR(13)), upper-cased."""
|
||||
return compiled.split("(")[0].strip().upper()
|
||||
|
||||
|
||||
def test_run_migrations_0_to_1(monkeypatch: pytest.MonkeyPatch) -> None:
|
||||
# in-memory engine that preserves the same connection (StaticPool)
|
||||
engine = create_engine(
|
||||
"sqlite:///:memory:",
|
||||
connect_args={"check_same_thread": False},
|
||||
poolclass=StaticPool,
|
||||
)
|
||||
|
||||
# ensure target is the LATEST_VERSION we expect for the test
|
||||
monkeypatch.setattr(db_migration, "LATEST_VERSION", 1)
|
||||
|
||||
# run real migrations (will import trading_journal.models_v1 inside _mig_0_1)
|
||||
final_version = db_migration.run_migrations(engine)
|
||||
assert final_version == 1
|
||||
|
||||
# import snapshot models to validate schema
|
||||
from trading_journal import models_v1
|
||||
|
||||
expected_tables = {
|
||||
"trades": models_v1.Trades.__table__,
|
||||
"cycles": models_v1.Cycles.__table__,
|
||||
}
|
||||
|
||||
with engine.connect() as conn:
|
||||
# check tables exist
|
||||
rows = conn.execute(
|
||||
text("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
).fetchall()
|
||||
found_tables = {r[0] for r in rows}
|
||||
assert set(expected_tables.keys()).issubset(found_tables), (
|
||||
f"missing tables: {set(expected_tables.keys()) - found_tables}"
|
||||
)
|
||||
|
||||
# check user_version
|
||||
uv = conn.execute(text("PRAGMA user_version")).fetchone()
|
||||
assert uv is not None
|
||||
assert int(uv[0]) == 1
|
||||
|
||||
# validate columns and (base) types for each expected table
|
||||
dialect = conn.dialect
|
||||
for tbl_name, table in expected_tables.items():
|
||||
info_rows = conn.execute(text(f"PRAGMA table_info({tbl_name})")).fetchall()
|
||||
# build mapping: column name -> declared type (upper)
|
||||
actual_cols = {r[1]: (r[2] or "").upper() for r in info_rows}
|
||||
for col in table.columns:
|
||||
assert col.name in actual_cols, (
|
||||
f"column {col.name} missing in table {tbl_name}"
|
||||
)
|
||||
# compile expected type against this dialect
|
||||
try:
|
||||
compiled = col.type.compile(
|
||||
dialect=dialect
|
||||
) # e.g. VARCHAR(13), DATETIME
|
||||
except Exception:
|
||||
compiled = str(col.type)
|
||||
expected_base = _base_type_of(compiled)
|
||||
actual_type = actual_cols[col.name]
|
||||
actual_base = _base_type_of(actual_type) if actual_type else ""
|
||||
# accept either direction (some dialect vs sqlite naming differences)
|
||||
assert (expected_base in actual_base) or (
|
||||
actual_base in expected_base
|
||||
), (
|
||||
f"type mismatch for {tbl_name}.{col.name}: expected {expected_base}, got {actual_base}"
|
||||
)
|
||||
@@ -7,7 +7,7 @@ from sqlalchemy import event
|
||||
from sqlalchemy.pool import StaticPool
|
||||
from sqlmodel import Session, create_engine
|
||||
|
||||
import trading_journal.db_migration
|
||||
from trading_journal import db_migration
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Generator
|
||||
@@ -33,6 +33,8 @@ class Database:
|
||||
def _enable_sqlite_pragmas(dbapi_conn: DBAPIConnection, _connection_record: object) -> None:
|
||||
try:
|
||||
cur = dbapi_conn.cursor()
|
||||
cur.execute("PRAGMA journal_mode=WAL;")
|
||||
cur.execute("PRAGMA synchronous=NORMAL;")
|
||||
cur.execute("PRAGMA foreign_keys=ON;")
|
||||
cur.execute("PRAGMA busy_timeout=30000;")
|
||||
cur.close()
|
||||
@@ -43,7 +45,7 @@ class Database:
|
||||
event.listen(self._engine, "connect", _enable_sqlite_pragmas)
|
||||
|
||||
def init_db(self) -> None:
|
||||
pass
|
||||
db_migration.run_migrations(self._engine)
|
||||
|
||||
def get_session(self) -> Generator[Session, None, None]:
|
||||
session = Session(self._engine)
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Callable
|
||||
from typing import TYPE_CHECKING, Callable
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.engine import Connection, Engine
|
||||
from sqlmodel import SQLModel
|
||||
|
||||
# 最新 schema 版本号
|
||||
if TYPE_CHECKING:
|
||||
from sqlalchemy.engine import Connection, Engine
|
||||
|
||||
LATEST_VERSION = 1
|
||||
|
||||
|
||||
@@ -17,6 +18,8 @@ def _mig_0_1(engine: Engine) -> None:
|
||||
"""
|
||||
# Ensure all models are imported before this is called (import side-effect registers tables)
|
||||
# e.g. trading_journal.models is imported in the caller / app startup.
|
||||
from trading_journal import models_v1 # noqa: PLC0415, F401
|
||||
|
||||
SQLModel.metadata.create_all(bind=engine)
|
||||
|
||||
|
||||
@@ -31,7 +34,7 @@ def _get_sqlite_user_version(conn: Connection) -> int:
|
||||
return int(row[0]) if row and row[0] is not None else 0
|
||||
|
||||
|
||||
def _set_sqlite_user_version(conn, v: int) -> None:
|
||||
def _set_sqlite_user_version(conn: Connection, v: int) -> None:
|
||||
conn.execute(text(f"PRAGMA user_version = {int(v)}"))
|
||||
|
||||
|
||||
@@ -54,23 +57,4 @@ def run_migrations(engine: Engine, target_version: int | None = None) -> int:
|
||||
_set_sqlite_user_version(conn, cur_version + 1)
|
||||
cur_version += 1
|
||||
return cur_version
|
||||
else:
|
||||
# generic migrations table for non-sqlite
|
||||
conn.execute(
|
||||
text("""
|
||||
CREATE TABLE IF NOT EXISTS migrations (
|
||||
version INTEGER PRIMARY KEY,
|
||||
applied_at TEXT DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
)
|
||||
row = conn.execute(text("SELECT MAX(version) FROM migrations")).fetchone()
|
||||
cur_version = int(row[0]) if row and row[0] is not None else 0
|
||||
while cur_version < target:
|
||||
fn = MIGRATIONS.get(cur_version)
|
||||
if fn is None:
|
||||
raise RuntimeError(f"No migration from {cur_version} -> {cur_version + 1}")
|
||||
fn(engine)
|
||||
conn.execute(text("INSERT INTO migrations(version) VALUES (:v)"), {"v": cur_version + 1})
|
||||
cur_version += 1
|
||||
return cur_version
|
||||
return -1 # unknown / unsupported driver; no-op
|
||||
|
||||
@@ -2,7 +2,6 @@ from __future__ import annotations
|
||||
|
||||
from datetime import date, datetime # noqa: TC003
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from sqlmodel import Column, DateTime, Field, Relationship, SQLModel
|
||||
from sqlmodel import Enum as SQLEnum
|
||||
@@ -15,6 +14,13 @@ class TradeType(str, Enum):
|
||||
EXERCISE_CALL = "EXERCISE_CALL"
|
||||
|
||||
|
||||
class TradeStrategy(str, Enum):
|
||||
WHEELS = "WHEEL"
|
||||
FX = "FX"
|
||||
SPOT = "SPOT"
|
||||
OTHER = "OTHER"
|
||||
|
||||
|
||||
class CycleStatus(str, Enum):
|
||||
OPEN = "OPEN"
|
||||
CLOSED = "CLOSED"
|
||||
@@ -33,6 +39,7 @@ class Trades(SQLModel, table=True):
|
||||
symbol: str
|
||||
underlying_currency: str
|
||||
trade_type: TradeType = Field(sa_column=Column(SQLEnum(TradeType, name="trade_type_enum"), nullable=False))
|
||||
trade_strategy: TradeStrategy = Field(sa_column=Column(SQLEnum(TradeStrategy, name="trade_strategy_enum"), nullable=False))
|
||||
trade_time_utc: datetime = Field(sa_column=Column(DateTime(timezone=True), nullable=False))
|
||||
expiry_date: date | None = Field(default=None, nullable=True)
|
||||
strike_price_cents: int | None = Field(default=None, nullable=True)
|
||||
|
||||
68
backend/trading_journal/models_v1.py
Normal file
68
backend/trading_journal/models_v1.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date, datetime # noqa: TC003
|
||||
from enum import Enum
|
||||
|
||||
from sqlmodel import Column, DateTime, Field, Relationship, SQLModel
|
||||
from sqlmodel import Enum as SQLEnum
|
||||
|
||||
|
||||
class TradeType(str, Enum):
|
||||
SELL_PUT = "SELL_PUT"
|
||||
ASSIGNMENT = "ASSIGNMENT"
|
||||
SELL_CALL = "SELL_CALL"
|
||||
EXERCISE_CALL = "EXERCISE_CALL"
|
||||
|
||||
|
||||
class TradeStrategy(str, Enum):
|
||||
WHEELS = "WHEEL"
|
||||
FX = "FX"
|
||||
SPOT = "SPOT"
|
||||
OTHER = "OTHER"
|
||||
|
||||
|
||||
class CycleStatus(str, Enum):
|
||||
OPEN = "OPEN"
|
||||
CLOSED = "CLOSED"
|
||||
|
||||
|
||||
class FundingSource(str, Enum):
|
||||
CASH = "CASH"
|
||||
MARGIN = "MARGIN"
|
||||
MIXED = "MIXED"
|
||||
|
||||
|
||||
class Trades(SQLModel, table=True):
|
||||
__tablename__ = "trades"
|
||||
id: str | None = Field(default=None, primary_key=True)
|
||||
user_id: str
|
||||
symbol: str
|
||||
underlying_currency: str
|
||||
trade_type: TradeType = Field(sa_column=Column(SQLEnum(TradeType, name="trade_type_enum"), nullable=False))
|
||||
trade_strategy: TradeStrategy = Field(sa_column=Column(SQLEnum(TradeStrategy, name="trade_strategy_enum"), nullable=False))
|
||||
trade_time_utc: datetime = Field(sa_column=Column(DateTime(timezone=True), nullable=False))
|
||||
expiry_date: date | None = Field(default=None, nullable=True)
|
||||
strike_price_cents: int | None = Field(default=None, nullable=True)
|
||||
quantity: int
|
||||
price_cents: int
|
||||
gross_cash_flow_cents: int
|
||||
commission_cents: int
|
||||
net_cash_flow_cents: int
|
||||
cycle_id: str | None = Field(default=None, foreign_key="cycles.id", nullable=True)
|
||||
cycle: Cycles | None = Relationship(back_populates="trades")
|
||||
|
||||
|
||||
class Cycles(SQLModel, table=True):
|
||||
__tablename__ = "cycles"
|
||||
id: str | None = Field(default=None, primary_key=True)
|
||||
user_id: str
|
||||
symbol: str
|
||||
underlying_currency: str
|
||||
start_date: date
|
||||
end_date: date | None = Field(default=None, nullable=True)
|
||||
status: CycleStatus = Field(sa_column=Column(SQLEnum(CycleStatus, name="cycle_status_enum"), nullable=False))
|
||||
funding_source: FundingSource = Field(sa_column=Column(SQLEnum(FundingSource, name="funding_source_enum"), nullable=False))
|
||||
capital_exposure_cents: int
|
||||
loan_amount_cents: int | None = Field(default=None, nullable=True)
|
||||
loan_interest_rate_bps: int | None = Field(default=None, nullable=True)
|
||||
trades: list[Trades] = Relationship(back_populates="cycle")
|
||||
Reference in New Issue
Block a user