feat(l4d2-web): script overlay schema — add overlay.script + last_build_status, drop globals tables

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
mwiegand 2026-05-08 15:33:04 +02:00
parent 78ead0b41d
commit 43dc9b0ccf
No known key found for this signature in database
4 changed files with 186 additions and 63 deletions

View file

@ -0,0 +1,79 @@
"""script overlays
Revision ID: 0005_script_overlays
Revises: 0004_drop_legacy_external_overlay_type
Create Date: 2026-05-08
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = "0005_script_overlays"
down_revision: Union[str, Sequence[str], None] = "0004_drop_legacy_external_overlay_type"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# 1. Wipe legacy global-type overlay rows and any references to them.
op.execute(
"DELETE FROM jobs "
"WHERE overlay_id IN (SELECT id FROM overlays "
"WHERE type IN ('l4d2center_maps', 'cedapug_maps'))"
)
op.execute(
"DELETE FROM blueprint_overlays "
"WHERE overlay_id IN (SELECT id FROM overlays "
"WHERE type IN ('l4d2center_maps', 'cedapug_maps'))"
)
op.execute(
"DELETE FROM overlay_workshop_items "
"WHERE overlay_id IN (SELECT id FROM overlays "
"WHERE type IN ('l4d2center_maps', 'cedapug_maps'))"
)
op.execute(
"DELETE FROM overlays WHERE type IN ('l4d2center_maps', 'cedapug_maps')"
)
# 2. Drop globals tables in FK order: item_files -> items -> sources.
op.drop_index(
"ix_global_overlay_item_files_item",
table_name="global_overlay_item_files",
)
op.drop_table("global_overlay_item_files")
op.drop_index(
"ix_global_overlay_items_source", table_name="global_overlay_items"
)
op.drop_table("global_overlay_items")
op.drop_index(
"ix_global_overlay_sources_type", table_name="global_overlay_sources"
)
op.drop_table("global_overlay_sources")
# 3. Add new columns on overlays.
with op.batch_alter_table("overlays") as batch_op:
batch_op.add_column(
sa.Column(
"script",
sa.Text(),
nullable=False,
server_default="",
)
)
batch_op.add_column(
sa.Column(
"last_build_status",
sa.String(length=16),
nullable=False,
server_default="",
)
)
def downgrade() -> None:
# data is gone; intentional one-way migration
pass

View file

@ -59,69 +59,8 @@ class Overlay(Base):
path: Mapped[str] = mapped_column(String(512), nullable=False) path: Mapped[str] = mapped_column(String(512), nullable=False)
type: Mapped[str] = mapped_column(String(16), nullable=False, default="workshop") type: Mapped[str] = mapped_column(String(16), nullable=False, default="workshop")
user_id: Mapped[int | None] = mapped_column(ForeignKey("users.id"), nullable=True) user_id: Mapped[int | None] = mapped_column(ForeignKey("users.id"), nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False) script: Mapped[str] = mapped_column(Text, default="", nullable=False)
updated_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False) last_build_status: Mapped[str] = mapped_column(String(16), default="", nullable=False)
class GlobalOverlaySource(Base):
__tablename__ = "global_overlay_sources"
__table_args__ = (Index("ix_global_overlay_sources_type", "source_type"),)
id: Mapped[int] = mapped_column(Integer, primary_key=True)
overlay_id: Mapped[int] = mapped_column(
ForeignKey("overlays.id", ondelete="CASCADE"), unique=True, nullable=False
)
source_key: Mapped[str] = mapped_column(String(64), unique=True, nullable=False)
source_type: Mapped[str] = mapped_column(String(32), nullable=False)
source_url: Mapped[str] = mapped_column(Text, nullable=False)
last_manifest_hash: Mapped[str] = mapped_column(String(64), default="", nullable=False)
last_refreshed_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
last_error: Mapped[str] = mapped_column(Text, default="", nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False)
updated_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False)
class GlobalOverlayItem(Base):
__tablename__ = "global_overlay_items"
__table_args__ = (
UniqueConstraint("source_id", "item_key", name="uq_global_overlay_item_source_key"),
Index("ix_global_overlay_items_source", "source_id"),
)
id: Mapped[int] = mapped_column(Integer, primary_key=True)
source_id: Mapped[int] = mapped_column(
ForeignKey("global_overlay_sources.id", ondelete="CASCADE"), nullable=False
)
item_key: Mapped[str] = mapped_column(String(255), nullable=False)
display_name: Mapped[str] = mapped_column(String(255), default="", nullable=False)
download_url: Mapped[str] = mapped_column(Text, nullable=False)
expected_vpk_name: Mapped[str] = mapped_column(String(255), default="", nullable=False)
expected_size: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
expected_md5: Mapped[str] = mapped_column(String(32), default="", nullable=False)
etag: Mapped[str] = mapped_column(String(255), default="", nullable=False)
last_modified: Mapped[str] = mapped_column(String(255), default="", nullable=False)
content_length: Mapped[int | None] = mapped_column(BigInteger, nullable=True)
last_downloaded_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
last_error: Mapped[str] = mapped_column(Text, default="", nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False)
updated_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False)
class GlobalOverlayItemFile(Base):
__tablename__ = "global_overlay_item_files"
__table_args__ = (
UniqueConstraint("item_id", "vpk_name", name="uq_global_overlay_item_file_name"),
Index("ix_global_overlay_item_files_item", "item_id"),
)
id: Mapped[int] = mapped_column(Integer, primary_key=True)
item_id: Mapped[int] = mapped_column(
ForeignKey("global_overlay_items.id", ondelete="CASCADE"), nullable=False
)
vpk_name: Mapped[str] = mapped_column(String(255), nullable=False)
cache_path: Mapped[str] = mapped_column(Text, nullable=False)
size: Mapped[int] = mapped_column(BigInteger, nullable=False)
md5: Mapped[str] = mapped_column(String(32), default="", nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False)
updated_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False) updated_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False)

View file

@ -0,0 +1,96 @@
"""Tests for the alembic migration history.
The 0005 migration adds `script` and `last_build_status` columns to `overlays`,
drops the global_overlay_* tables, and wipes legacy l4d2center_maps/cedapug_maps
overlay rows. This module pins those behaviors.
"""
from pathlib import Path
import pytest
from alembic import command
from alembic.config import Config
from sqlalchemy import create_engine, inspect, text
_ALEMBIC_DIR = Path(__file__).resolve().parents[1] / "alembic"
def _alembic_config(db_url: str) -> Config:
cfg = Config()
cfg.set_main_option("script_location", str(_ALEMBIC_DIR))
cfg.set_main_option("sqlalchemy.url", db_url)
return cfg
@pytest.fixture
def db_url(tmp_path, monkeypatch):
path = tmp_path / "alembic.db"
url = f"sqlite:///{path}"
monkeypatch.setenv("DATABASE_URL", url)
yield url
def test_upgrade_0005_adds_script_columns(db_url) -> None:
cfg = _alembic_config(db_url)
command.upgrade(cfg, "0004_drop_legacy_external_overlay_type")
engine = create_engine(db_url)
with engine.begin() as conn:
# Seed legacy global-type overlay rows that the migration must wipe.
conn.execute(
text(
"INSERT INTO overlays (name, path, type, created_at, updated_at) "
"VALUES ('legacy-l4d2center', '1', 'l4d2center_maps', "
"'2026-01-01', '2026-01-01')"
)
)
conn.execute(
text(
"INSERT INTO overlays (name, path, type, created_at, updated_at) "
"VALUES ('legacy-cedapug', '2', 'cedapug_maps', "
"'2026-01-01', '2026-01-01')"
)
)
conn.execute(
text(
"INSERT INTO overlays (name, path, type, created_at, updated_at) "
"VALUES ('keep-workshop', '3', 'workshop', "
"'2026-01-01', '2026-01-01')"
)
)
command.upgrade(cfg, "0005_script_overlays")
inspector = inspect(engine)
overlay_cols = {c["name"]: c for c in inspector.get_columns("overlays")}
assert "script" in overlay_cols
assert "last_build_status" in overlay_cols
assert overlay_cols["script"]["nullable"] is False
assert overlay_cols["last_build_status"]["nullable"] is False
table_names = set(inspector.get_table_names())
assert "global_overlay_sources" not in table_names
assert "global_overlay_items" not in table_names
assert "global_overlay_item_files" not in table_names
with engine.connect() as conn:
rows = conn.execute(
text("SELECT name, type FROM overlays ORDER BY name")
).all()
assert rows == [("keep-workshop", "workshop")]
defaults = conn.execute(
text(
"SELECT script, last_build_status FROM overlays "
"WHERE name = 'keep-workshop'"
)
).one()
assert defaults == ("", "")
def test_downgrade_0005_skipped() -> None:
"""Per the project convention (see 0004) destructive migrations are
intentionally one-way; do not test or maintain a downgrade."""
pytest.skip("0005 is one-way: globals data is gone after upgrade")

View file

@ -38,6 +38,15 @@ def test_overlay_has_type_and_user_id(db) -> None:
assert row.user_id is None assert row.user_id is None
def test_overlay_has_script_columns(db) -> None:
with session_scope() as s:
s.add(Overlay(name="defaulted", path="1"))
s.flush()
row = s.query(Overlay).filter_by(name="defaulted").one()
assert row.script == ""
assert row.last_build_status == ""
def test_two_system_overlays_with_same_name_are_rejected(db) -> None: def test_two_system_overlays_with_same_name_are_rejected(db) -> None:
with session_scope() as s: with session_scope() as s:
s.add(Overlay(name="shared", path="shared", type="l4d2center_maps", user_id=None)) s.add(Overlay(name="shared", path="shared", type="l4d2center_maps", user_id=None))