feat(l4d2-web): typed overlays + workshop schema migration

Adds Overlay.type and Overlay.user_id with two partial unique indexes
(externals globally unique by name; user overlays unique per user).
Adds WorkshopItem registry keyed on steam_id and a pure many-to-many
overlay_workshop_items association. Adds Job.overlay_id for build_overlay
job tracking. Switches overlays.id to AUTOINCREMENT so deleted IDs are
never reused.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
mwiegand 2026-05-07 16:35:13 +02:00
parent b46f52258d
commit 2543a05c12
No known key found for this signature in database
3 changed files with 424 additions and 3 deletions

View file

@ -0,0 +1,174 @@
"""workshop overlays
Revision ID: 0002_workshop_overlays
Revises: b2c684fddbd3
Create Date: 2026-05-07
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = "0002_workshop_overlays"
down_revision: Union[str, Sequence[str], None] = "b2c684fddbd3"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def _existing_overlays_table() -> sa.Table:
"""Pre-migration shape used as `copy_from` so batch_alter_table rebuilds
overlays without the inline UNIQUE on `name` (replaced by partial unique
indexes after the recreate)."""
metadata = sa.MetaData()
return sa.Table(
"overlays",
metadata,
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("name", sa.String(length=128), nullable=False),
sa.Column("path", sa.String(length=512), nullable=False),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
)
def upgrade() -> None:
# 1. Recreate `overlays` with `type`, `user_id`, autoincrement, and no inline UNIQUE on name.
with op.batch_alter_table(
"overlays",
recreate="always",
copy_from=_existing_overlays_table(),
table_kwargs={"sqlite_autoincrement": True},
) as batch_op:
batch_op.add_column(
sa.Column(
"type",
sa.String(length=16),
nullable=False,
server_default="external",
)
)
batch_op.add_column(
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey("users.id", name="fk_overlays_user_id_users"),
nullable=True,
)
)
batch_op.create_index("ix_overlays_type_user_id", ["type", "user_id"])
# Drop the temporary server_default once existing rows are backfilled.
with op.batch_alter_table("overlays") as batch_op:
batch_op.alter_column("type", server_default=None)
# 2. Partial unique indexes for name uniqueness:
# - system overlays (user_id IS NULL): globally unique by name
# - user overlays (user_id IS NOT NULL): unique per user by name
op.create_index(
"uq_overlay_name_system",
"overlays",
["name"],
unique=True,
sqlite_where=sa.text("user_id IS NULL"),
)
op.create_index(
"uq_overlay_name_per_user",
"overlays",
["name", "user_id"],
unique=True,
sqlite_where=sa.text("user_id IS NOT NULL"),
)
# 3. workshop_items registry (global, deduplicated by steam_id).
op.create_table(
"workshop_items",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("steam_id", sa.String(length=20), nullable=False, unique=True),
sa.Column("title", sa.String(length=255), nullable=False, server_default=""),
sa.Column("filename", sa.String(length=255), nullable=False, server_default=""),
sa.Column("file_url", sa.Text(), nullable=False, server_default=""),
sa.Column("file_size", sa.BigInteger(), nullable=False, server_default="0"),
sa.Column("time_updated", sa.Integer(), nullable=False, server_default="0"),
sa.Column("preview_url", sa.Text(), nullable=False, server_default=""),
sa.Column("last_downloaded_at", sa.DateTime(), nullable=True),
sa.Column("last_error", sa.Text(), nullable=False, server_default=""),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
)
# 4. overlay_workshop_items association.
op.create_table(
"overlay_workshop_items",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column(
"overlay_id",
sa.Integer(),
sa.ForeignKey("overlays.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"workshop_item_id",
sa.Integer(),
sa.ForeignKey("workshop_items.id", ondelete="RESTRICT"),
nullable=False,
),
sa.UniqueConstraint(
"overlay_id", "workshop_item_id", name="uq_overlay_workshop_item"
),
)
op.create_index(
"ix_owi_workshop_item",
"overlay_workshop_items",
["workshop_item_id"],
)
# 5. Add overlay_id to jobs for build_overlay tracking.
with op.batch_alter_table("jobs") as batch_op:
batch_op.add_column(
sa.Column(
"overlay_id",
sa.Integer(),
sa.ForeignKey("overlays.id", name="fk_jobs_overlay_id_overlays"),
nullable=True,
)
)
def downgrade() -> None:
with op.batch_alter_table("jobs") as batch_op:
batch_op.drop_column("overlay_id")
op.drop_index("ix_owi_workshop_item", table_name="overlay_workshop_items")
op.drop_table("overlay_workshop_items")
op.drop_table("workshop_items")
op.drop_index("uq_overlay_name_per_user", table_name="overlays")
op.drop_index("uq_overlay_name_system", table_name="overlays")
op.drop_index("ix_overlays_type_user_id", table_name="overlays")
# Recreate `overlays` to drop type/user_id and restore single-column UNIQUE on name.
current_overlays = sa.Table(
"overlays",
sa.MetaData(),
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("name", sa.String(length=128), nullable=False),
sa.Column("path", sa.String(length=512), nullable=False),
sa.Column("type", sa.String(length=16), nullable=False),
sa.Column(
"user_id",
sa.Integer(),
sa.ForeignKey("users.id", name="fk_overlays_user_id_users"),
nullable=True,
),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("updated_at", sa.DateTime(), nullable=False),
)
with op.batch_alter_table(
"overlays",
recreate="always",
copy_from=current_overlays,
) as batch_op:
batch_op.drop_column("user_id")
batch_op.drop_column("type")
batch_op.create_unique_constraint("uq_overlays_name", ["name"])

View file

@ -1,6 +1,17 @@
from datetime import UTC, datetime
from sqlalchemy import Boolean, DateTime, ForeignKey, Integer, String, Text
from sqlalchemy import (
BigInteger,
Boolean,
DateTime,
ForeignKey,
Index,
Integer,
String,
Text,
UniqueConstraint,
text,
)
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
@ -25,14 +36,66 @@ class User(Base):
class Overlay(Base):
__tablename__ = "overlays"
__table_args__ = (
Index(
"uq_overlay_name_system",
"name",
unique=True,
sqlite_where=text("user_id IS NULL"),
),
Index(
"uq_overlay_name_per_user",
"name",
"user_id",
unique=True,
sqlite_where=text("user_id IS NOT NULL"),
),
Index("ix_overlays_type_user_id", "type", "user_id"),
{"sqlite_autoincrement": True},
)
id: Mapped[int] = mapped_column(Integer, primary_key=True)
name: Mapped[str] = mapped_column(String(128), unique=True, nullable=False)
id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True)
name: Mapped[str] = mapped_column(String(128), nullable=False)
path: Mapped[str] = mapped_column(String(512), nullable=False)
type: Mapped[str] = mapped_column(String(16), nullable=False, default="external")
user_id: Mapped[int | None] = mapped_column(ForeignKey("users.id"), nullable=True)
created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False)
updated_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False)
class WorkshopItem(Base):
__tablename__ = "workshop_items"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
steam_id: Mapped[str] = mapped_column(String(20), unique=True, nullable=False)
title: Mapped[str] = mapped_column(String(255), default="", nullable=False)
filename: Mapped[str] = mapped_column(String(255), default="", nullable=False)
file_url: Mapped[str] = mapped_column(Text, default="", nullable=False)
file_size: Mapped[int] = mapped_column(BigInteger, default=0, nullable=False)
time_updated: Mapped[int] = mapped_column(Integer, default=0, nullable=False)
preview_url: Mapped[str] = mapped_column(Text, default="", nullable=False)
last_downloaded_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True)
last_error: Mapped[str] = mapped_column(Text, default="", nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False)
updated_at: Mapped[datetime] = mapped_column(DateTime, default=now_utc, nullable=False)
class OverlayWorkshopItem(Base):
__tablename__ = "overlay_workshop_items"
__table_args__ = (
UniqueConstraint("overlay_id", "workshop_item_id", name="uq_overlay_workshop_item"),
Index("ix_owi_workshop_item", "workshop_item_id"),
)
id: Mapped[int] = mapped_column(Integer, primary_key=True)
overlay_id: Mapped[int] = mapped_column(
ForeignKey("overlays.id", ondelete="CASCADE"), nullable=False
)
workshop_item_id: Mapped[int] = mapped_column(
ForeignKey("workshop_items.id", ondelete="RESTRICT"), nullable=False
)
class Blueprint(Base):
__tablename__ = "blueprints"
@ -78,6 +141,7 @@ class Job(Base):
id: Mapped[int] = mapped_column(Integer, primary_key=True)
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), nullable=False)
server_id: Mapped[int | None] = mapped_column(ForeignKey("servers.id"), nullable=True)
overlay_id: Mapped[int | None] = mapped_column(ForeignKey("overlays.id"), nullable=True)
operation: Mapped[str] = mapped_column(String(32), nullable=False)
state: Mapped[str] = mapped_column(String(16), default="queued", nullable=False)
exit_code: Mapped[int | None] = mapped_column(Integer, nullable=True)

View file

@ -0,0 +1,183 @@
"""Tests for the workshop-overlay schema additions: typed Overlay, partial
unique indexes, WorkshopItem registry, and overlay_workshop_items association.
"""
import pytest
from sqlalchemy.exc import IntegrityError
from l4d2web.db import init_db, session_scope
from l4d2web.models import (
Job,
Overlay,
OverlayWorkshopItem,
User,
WorkshopItem,
)
@pytest.fixture
def db(tmp_path, monkeypatch):
monkeypatch.setenv("DATABASE_URL", f"sqlite:///{tmp_path/'workshop.db'}")
init_db()
yield
def _make_user(username: str) -> int:
with session_scope() as s:
user = User(username=username, password_digest="x")
s.add(user)
s.flush()
return user.id
def test_overlay_has_type_and_user_id(db) -> None:
with session_scope() as s:
s.add(Overlay(name="standard", path="standard"))
s.flush()
row = s.query(Overlay).filter_by(name="standard").one()
assert row.type == "external"
assert row.user_id is None
def test_two_externals_with_same_name_are_rejected(db) -> None:
with session_scope() as s:
s.add(Overlay(name="shared", path="shared", type="external", user_id=None))
s.flush()
with pytest.raises(IntegrityError):
with session_scope() as s:
s.add(Overlay(name="shared", path="other", type="external", user_id=None))
s.flush()
def test_two_users_can_share_workshop_overlay_name(db) -> None:
alice_id = _make_user("alice")
bob_id = _make_user("bob")
with session_scope() as s:
s.add(Overlay(name="my-maps", path="1", type="workshop", user_id=alice_id))
s.add(Overlay(name="my-maps", path="2", type="workshop", user_id=bob_id))
s.flush()
with session_scope() as s:
rows = s.query(Overlay).filter_by(name="my-maps").all()
assert {r.user_id for r in rows} == {alice_id, bob_id}
def test_same_user_cannot_have_duplicate_workshop_name(db) -> None:
user_id = _make_user("alice")
with session_scope() as s:
s.add(Overlay(name="dupe", path="1", type="workshop", user_id=user_id))
s.flush()
with pytest.raises(IntegrityError):
with session_scope() as s:
s.add(Overlay(name="dupe", path="2", type="workshop", user_id=user_id))
s.flush()
def test_workshop_item_steam_id_is_unique(db) -> None:
with session_scope() as s:
s.add(WorkshopItem(steam_id="123", title="Map A"))
s.flush()
with pytest.raises(IntegrityError):
with session_scope() as s:
s.add(WorkshopItem(steam_id="123", title="Map A duplicate"))
s.flush()
def test_overlay_workshop_item_unique_per_overlay(db) -> None:
user_id = _make_user("alice")
with session_scope() as s:
ov = Overlay(name="my-maps", path="1", type="workshop", user_id=user_id)
wi = WorkshopItem(steam_id="555", title="A")
s.add_all([ov, wi])
s.flush()
s.add(OverlayWorkshopItem(overlay_id=ov.id, workshop_item_id=wi.id))
s.flush()
overlay_id = ov.id
workshop_item_id = wi.id
with pytest.raises(IntegrityError):
with session_scope() as s:
s.add(
OverlayWorkshopItem(
overlay_id=overlay_id, workshop_item_id=workshop_item_id
)
)
s.flush()
def test_deleting_overlay_cascades_associations_but_not_workshop_items(db) -> None:
user_id = _make_user("alice")
with session_scope() as s:
ov = Overlay(name="my-maps", path="1", type="workshop", user_id=user_id)
wi = WorkshopItem(steam_id="777", title="A")
s.add_all([ov, wi])
s.flush()
s.add(OverlayWorkshopItem(overlay_id=ov.id, workshop_item_id=wi.id))
s.flush()
overlay_id = ov.id
# Delete via raw connection to actually exercise ON DELETE CASCADE / RESTRICT.
from l4d2web.db import get_engine
engine = get_engine()
with engine.begin() as conn:
conn.exec_driver_sql("PRAGMA foreign_keys=ON")
conn.exec_driver_sql(f"DELETE FROM overlays WHERE id = {overlay_id}")
with session_scope() as s:
assert s.query(OverlayWorkshopItem).count() == 0
assert s.query(WorkshopItem).filter_by(steam_id="777").count() == 1
def test_job_has_overlay_id_column(db) -> None:
user_id = _make_user("alice")
with session_scope() as s:
ov = Overlay(name="my-maps", path="1", type="workshop", user_id=user_id)
s.add(ov)
s.flush()
s.add(
Job(
user_id=user_id,
server_id=None,
overlay_id=ov.id,
operation="build_overlay",
state="queued",
)
)
s.flush()
with session_scope() as s:
job = s.query(Job).filter_by(operation="build_overlay").one()
assert job.overlay_id is not None
assert job.server_id is None
def test_overlay_id_does_not_reuse_after_delete(db) -> None:
"""SQLite AUTOINCREMENT must guarantee deleted IDs are never reused."""
with session_scope() as s:
s.add(Overlay(name="first", path="1", type="external", user_id=None))
s.add(Overlay(name="second", path="2", type="external", user_id=None))
s.flush()
ids_before = sorted(o.id for o in s.query(Overlay).all())
last_id = ids_before[-1]
with session_scope() as s:
last = s.query(Overlay).filter_by(id=last_id).one()
s.delete(last)
s.flush()
with session_scope() as s:
s.add(Overlay(name="third", path="3", type="external", user_id=None))
s.flush()
new_id = s.query(Overlay).filter_by(name="third").one().id
assert new_id > last_id, (
f"AUTOINCREMENT should never reuse IDs, but got {new_id} after deleting {last_id}"
)