left4me/l4d2web/services/l4d2_facade.py
mwiegand 92d6ebbe82
feat(l4d2-web): managed global map overlays with daily refresh
Adds two managed system overlays (l4d2center-maps, cedapug-maps) that
fetch curated map archives from upstream sources and reconcile addons
symlinks for non-Steam maps. A daily systemd timer enqueues a coalesced
refresh_global_overlays worker job; downloads, extraction, and rebuilds
run in the existing job worker and surface in the job log UI.

Schema: GlobalOverlaySource / GlobalOverlayItem / GlobalOverlayItemFile
plus nullable Job.user_id so system jobs render as "system" in the UI.
The new builder reconciles symlinks against the per-source vpk cache
and leaves foreign symlinks untouched. Initialize-time guard refuses
to mount a partial overlay if any expected vpk is missing from cache.

Refresh service uses shutil.move to handle EXDEV when /tmp and the
cache live on different filesystems.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-08 08:05:14 +02:00

254 lines
9 KiB
Python

from dataclasses import dataclass
import json
from pathlib import Path
from sqlalchemy import select
from l4d2web.db import session_scope
from l4d2web.models import (
Blueprint,
BlueprintOverlay,
GlobalOverlayItem,
GlobalOverlayItemFile,
GlobalOverlaySource,
Overlay,
OverlayWorkshopItem,
Server,
WorkshopItem,
)
from l4d2web.services import host_commands
from l4d2web.services.global_map_cache import global_overlay_cache_root
from l4d2web.services.spec_yaml import write_temp_spec
from l4d2web.services.workshop_paths import cache_path
@dataclass(slots=True)
class ServerStatus:
state: str
raw_active_state: str
raw_sub_state: str
def build_server_spec_payload(server: Server, blueprint: Blueprint, overlay_refs: list[str]) -> dict:
return {
"port": server.port,
"overlays": overlay_refs,
"arguments": json.loads(blueprint.arguments),
"config": json.loads(blueprint.config),
}
def load_server_blueprint_bundle(server_id: int) -> tuple[Server, Blueprint, list[str]]:
with session_scope() as db:
server = db.scalar(select(Server).where(Server.id == server_id))
if server is None:
raise ValueError("server not found")
blueprint = db.scalar(select(Blueprint).where(Blueprint.id == server.blueprint_id))
if blueprint is None:
raise ValueError("blueprint not found")
rows = db.execute(
select(Overlay.path)
.join(BlueprintOverlay, BlueprintOverlay.overlay_id == Overlay.id)
.where(BlueprintOverlay.blueprint_id == blueprint.id)
.order_by(BlueprintOverlay.position)
).all()
overlay_refs = [row[0] for row in rows]
return server, blueprint, overlay_refs
def install_runtime(on_stdout=None, on_stderr=None, should_cancel=None) -> None:
host_commands.run_command(
["l4d2ctl", "install"],
on_stdout=on_stdout,
on_stderr=on_stderr,
should_cancel=should_cancel,
)
def initialize_server(server_id: int, on_stdout=None, on_stderr=None, should_cancel=None) -> None:
server, blueprint, overlay_refs = load_server_blueprint_bundle(server_id)
# Run each overlay's builder synchronously so symlinks/dirs are present
# before l4d2ctl initialize composes the lowerdirs.
_run_blueprint_builders(
blueprint_id=blueprint.id,
on_stdout=on_stdout,
on_stderr=on_stderr,
should_cancel=should_cancel,
)
# Workshop overlays may have items not yet downloaded. The builders skip
# them, but we don't want to mount a partial overlay silently — fail
# loudly with the missing IDs.
_check_workshop_overlay_caches(blueprint_id=blueprint.id)
_check_global_overlay_caches(blueprint_id=blueprint.id)
spec_path = write_temp_spec(build_server_spec_payload(server, blueprint, overlay_refs))
try:
host_commands.run_command(
["l4d2ctl", "initialize", server.name, "-f", str(spec_path)],
on_stdout=on_stdout,
on_stderr=on_stderr,
should_cancel=should_cancel,
)
finally:
spec_path.unlink(missing_ok=True)
def _run_blueprint_builders(
*,
blueprint_id: int,
on_stdout=None,
on_stderr=None,
should_cancel=None,
) -> None:
"""Synchronously invoke each overlay's builder for the given blueprint."""
from l4d2web.services.overlay_builders import BUILDERS
with session_scope() as db:
overlays = db.scalars(
select(Overlay)
.join(BlueprintOverlay, BlueprintOverlay.overlay_id == Overlay.id)
.where(BlueprintOverlay.blueprint_id == blueprint_id)
.order_by(BlueprintOverlay.position)
).all()
for overlay in overlays:
db.expunge(overlay)
log_stdout = on_stdout if on_stdout is not None else (lambda _line: None)
log_stderr = on_stderr if on_stderr is not None else (lambda _line: None)
cancel = should_cancel if should_cancel is not None else (lambda: False)
for overlay in overlays:
builder = BUILDERS.get(overlay.type)
if builder is None:
raise ValueError(f"no builder registered for overlay type {overlay.type!r}")
builder.build(
overlay,
on_stdout=log_stdout,
on_stderr=log_stderr,
should_cancel=cancel,
)
def _check_workshop_overlay_caches(*, blueprint_id: int) -> None:
"""Raise if any workshop overlay attached to this blueprint has items
that aren't yet in the workshop_cache. Mounting a partial overlay would
leave maps mysteriously missing in-game; surface the issue here instead.
"""
with session_scope() as db:
rows = db.execute(
select(Overlay.id, Overlay.name, WorkshopItem.steam_id)
.join(BlueprintOverlay, BlueprintOverlay.overlay_id == Overlay.id)
.join(
OverlayWorkshopItem,
OverlayWorkshopItem.overlay_id == Overlay.id,
)
.join(
WorkshopItem,
WorkshopItem.id == OverlayWorkshopItem.workshop_item_id,
)
.where(
BlueprintOverlay.blueprint_id == blueprint_id,
Overlay.type == "workshop",
)
).all()
missing: dict[tuple[int, str], list[str]] = {}
for overlay_id, overlay_name, steam_id in rows:
if not cache_path(steam_id).exists():
missing.setdefault((overlay_id, overlay_name), []).append(steam_id)
if not missing:
return
parts = []
for (overlay_id, overlay_name), steam_ids in missing.items():
ids = ", ".join(steam_ids)
parts.append(
f"overlay {overlay_name!r} (id={overlay_id}): items {ids} not yet downloaded"
)
detail = "; ".join(parts)
raise RuntimeError(
f"workshop content missing — {detail}. "
f"Open the overlay page and click Build (or wait for the auto-rebuild job), "
f"then retry."
)
def _check_global_overlay_caches(*, blueprint_id: int) -> None:
"""Raise if any global map overlay attached to this blueprint has manifest
items that aren't yet in the global_overlay_cache. Mirrors the workshop
cache check — surface partial cache state at initialize time.
"""
with session_scope() as db:
rows = db.execute(
select(Overlay.name, GlobalOverlayItemFile.vpk_name, GlobalOverlayItemFile.cache_path)
.join(BlueprintOverlay, BlueprintOverlay.overlay_id == Overlay.id)
.join(GlobalOverlaySource, GlobalOverlaySource.overlay_id == Overlay.id)
.join(GlobalOverlayItem, GlobalOverlayItem.source_id == GlobalOverlaySource.id)
.join(GlobalOverlayItemFile, GlobalOverlayItemFile.item_id == GlobalOverlayItem.id)
.where(BlueprintOverlay.blueprint_id == blueprint_id)
).all()
missing: dict[str, list[str]] = {}
root = global_overlay_cache_root()
for overlay_name, vpk_name, cache_path_value in rows:
if not (root / cache_path_value).exists():
missing.setdefault(overlay_name, []).append(vpk_name)
if not missing:
return
details = []
for overlay_name, names in sorted(missing.items()):
details.append(f"overlay {overlay_name!r}: missing {', '.join(sorted(names))}")
raise RuntimeError("global overlay content missing — " + "; ".join(details))
def start_server(server_id: int, on_stdout=None, on_stderr=None, should_cancel=None) -> None:
server, _, _ = load_server_blueprint_bundle(server_id)
host_commands.run_command(
["l4d2ctl", "start", server.name],
on_stdout=on_stdout,
on_stderr=on_stderr,
should_cancel=should_cancel,
)
def stop_server(server_id: int, on_stdout=None, on_stderr=None, should_cancel=None) -> None:
server, _, _ = load_server_blueprint_bundle(server_id)
host_commands.run_command(
["l4d2ctl", "stop", server.name],
on_stdout=on_stdout,
on_stderr=on_stderr,
should_cancel=should_cancel,
)
def delete_server(server_id: int, on_stdout=None, on_stderr=None, should_cancel=None) -> None:
server, _, _ = load_server_blueprint_bundle(server_id)
host_commands.run_command(
["l4d2ctl", "delete", server.name],
on_stdout=on_stdout,
on_stderr=on_stderr,
should_cancel=should_cancel,
)
def server_status(server_name: str) -> ServerStatus:
result = host_commands.run_command(["l4d2ctl", "status", server_name, "--json"])
payload = json.loads(result.stdout or "{}")
return ServerStatus(
state=str(payload.get("state", "unknown")),
raw_active_state=str(payload.get("raw_active_state", "unknown")),
raw_sub_state=str(payload.get("raw_sub_state", "unknown")),
)
def stream_server_logs(server_name: str, *, lines: int = 200, follow: bool = True):
command = ["l4d2ctl", "logs", server_name, "--lines", str(lines)]
command.append("--follow" if follow else "--no-follow")
return host_commands.stream_command(command)