feat(l4d2-web): ScriptBuilder + BUILDERS registry update

Adds ScriptBuilder that runs user-authored bash inside the
left4me-script-sandbox helper via run_command, with a 20 GB post-build
disk cap. Registry now {"workshop", "script"}.
finish_job writes Overlay.last_build_status on build_overlay completion.
Drops GlobalMapOverlayBuilder and the now-unreachable
_check_global_overlay_caches in l4d2_facade.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
mwiegand 2026-05-08 15:39:13 +02:00
parent 43dc9b0ccf
commit d29afa41fa
No known key found for this signature in database
5 changed files with 280 additions and 121 deletions

View file

@ -540,6 +540,11 @@ def finish_job(job_id: int, state: str, exit_code: int | None, error: str = "")
if server is not None:
server.last_error = "" if state == "succeeded" else error
server.updated_at = now
if job.operation == "build_overlay" and job.overlay_id is not None:
overlay = db.scalar(select(Overlay).where(Overlay.id == job.overlay_id))
if overlay is not None:
overlay.last_build_status = "ok" if state == "succeeded" else "failed"
overlay.updated_at = now
def append_job_log_line(job_id: int, stream: str, line: str, max_chars: int = 4096) -> int:

View file

@ -8,16 +8,12 @@ from l4d2web.db import session_scope
from l4d2web.models import (
Blueprint,
BlueprintOverlay,
GlobalOverlayItem,
GlobalOverlayItemFile,
GlobalOverlaySource,
Overlay,
OverlayWorkshopItem,
Server,
WorkshopItem,
)
from l4d2web.services import host_commands
from l4d2web.services.global_map_cache import global_overlay_cache_root
from l4d2web.services.spec_yaml import write_temp_spec
from l4d2web.services.workshop_paths import cache_path
@ -83,7 +79,6 @@ def initialize_server(server_id: int, on_stdout=None, on_stderr=None, should_can
# them, but we don't want to mount a partial overlay silently — fail
# loudly with the missing IDs.
_check_workshop_overlay_caches(blueprint_id=blueprint.id)
_check_global_overlay_caches(blueprint_id=blueprint.id)
spec_path = write_temp_spec(build_server_spec_payload(server, blueprint, overlay_refs))
try:
@ -178,36 +173,6 @@ def _check_workshop_overlay_caches(*, blueprint_id: int) -> None:
)
def _check_global_overlay_caches(*, blueprint_id: int) -> None:
"""Raise if any global map overlay attached to this blueprint has manifest
items that aren't yet in the global_overlay_cache. Mirrors the workshop
cache check surface partial cache state at initialize time.
"""
with session_scope() as db:
rows = db.execute(
select(Overlay.name, GlobalOverlayItemFile.vpk_name, GlobalOverlayItemFile.cache_path)
.join(BlueprintOverlay, BlueprintOverlay.overlay_id == Overlay.id)
.join(GlobalOverlaySource, GlobalOverlaySource.overlay_id == Overlay.id)
.join(GlobalOverlayItem, GlobalOverlayItem.source_id == GlobalOverlaySource.id)
.join(GlobalOverlayItemFile, GlobalOverlayItemFile.item_id == GlobalOverlayItem.id)
.where(BlueprintOverlay.blueprint_id == blueprint_id)
).all()
missing: dict[str, list[str]] = {}
root = global_overlay_cache_root()
for overlay_name, vpk_name, cache_path_value in rows:
if not (root / cache_path_value).exists():
missing.setdefault(overlay_name, []).append(vpk_name)
if not missing:
return
details = []
for overlay_name, names in sorted(missing.items()):
details.append(f"overlay {overlay_name!r}: missing {', '.join(sorted(names))}")
raise RuntimeError("global overlay content missing — " + "; ".join(details))
def start_server(server_id: int, on_stdout=None, on_stderr=None, should_cancel=None) -> None:
server, _, _ = load_server_blueprint_bundle(server_id)
host_commands.run_command(

View file

@ -8,6 +8,8 @@ changes to the worker, the mount layer, or the blueprint editor.
from __future__ import annotations
import os
import subprocess
import tempfile
from pathlib import Path
from typing import Callable, Protocol
@ -16,8 +18,8 @@ from sqlalchemy import select
from l4d2host.paths import get_left4me_root
from l4d2web.db import session_scope
from l4d2web.models import GlobalOverlayItem, GlobalOverlayItemFile, GlobalOverlaySource, Overlay, OverlayWorkshopItem, WorkshopItem
from l4d2web.services.global_map_cache import global_overlay_cache_root
from l4d2web.models import Overlay, OverlayWorkshopItem, WorkshopItem
from l4d2web.services.host_commands import run_command
from l4d2web.services.workshop_paths import cache_path, workshop_cache_root
@ -25,6 +27,16 @@ CancelCheck = Callable[[], bool]
LogSink = Callable[[str], None]
SCRIPT_SANDBOX_HELPER = "/usr/local/libexec/left4me/left4me-script-sandbox"
DISK_BUDGET_BYTES = 20 * 1024**3
class BuildError(RuntimeError):
"""Raised by builders when a build fails for a builder-specific reason
(e.g. disk-budget exceeded). Distinct from subprocess-level
HostCommandError / CommandCancelledError."""
class OverlayBuilder(Protocol):
def build(
self,
@ -40,6 +52,10 @@ def _overlay_root(overlay: Overlay) -> Path:
return get_left4me_root() / "overlays" / overlay.path
def overlay_path_for_id(overlay_id: int) -> Path:
return get_left4me_root() / "overlays" / str(overlay_id)
class WorkshopBuilder:
"""Diff-apply symlinks under `left4dead2/addons/` against the overlay's
current `WorkshopItem` associations. Cached items get an absolute symlink
@ -163,8 +179,45 @@ class WorkshopBuilder:
)
class GlobalMapOverlayBuilder:
"""Reconcile symlinks for managed global map overlays."""
def run_sandboxed_script(
overlay_id: int,
script_text: str,
*,
on_stdout: LogSink,
on_stderr: LogSink,
should_cancel: CancelCheck,
) -> None:
"""Write `script_text` to a tmpfile and exec it inside the privileged
sandbox helper. Used by ScriptBuilder.build and by the wipe route."""
with tempfile.NamedTemporaryFile("w", suffix=".sh", delete=False) as f:
f.write(script_text or "")
script_path = f.name
try:
cmd = [
"sudo",
"-n",
SCRIPT_SANDBOX_HELPER,
str(overlay_id),
script_path,
]
run_command(
cmd,
on_stdout=on_stdout,
on_stderr=on_stderr,
should_cancel=should_cancel,
)
finally:
try:
os.unlink(script_path)
except FileNotFoundError:
pass
class ScriptBuilder:
"""Run an arbitrary user-authored bash script against the overlay dir
inside a bubblewrap + systemd-run sandbox. The script sees the overlay
dir as RW `/overlay` and a curated host RO mount; everything else is
isolated. After exit, enforce a 20 GB cap on `du -sb /overlay`."""
def build(
self,
@ -174,84 +227,28 @@ class GlobalMapOverlayBuilder:
on_stderr: LogSink,
should_cancel: CancelCheck,
) -> None:
addons_dir = _overlay_root(overlay) / "left4dead2" / "addons"
addons_dir.mkdir(parents=True, exist_ok=True)
# Ensure target dir exists so the helper's bind-mount validation passes.
overlay_path_for_id(overlay.id).mkdir(parents=True, exist_ok=True)
with session_scope() as db:
source = db.scalar(select(GlobalOverlaySource).where(GlobalOverlaySource.overlay_id == overlay.id))
if source is None:
raise ValueError(f"global overlay source for overlay {overlay.id} not found")
rows = db.execute(
select(GlobalOverlayItemFile.vpk_name, GlobalOverlayItemFile.cache_path)
.join(GlobalOverlayItem, GlobalOverlayItem.id == GlobalOverlayItemFile.item_id)
.where(GlobalOverlayItem.source_id == source.id)
).all()
source_key = source.source_key
cache_root = global_overlay_cache_root().resolve()
source_vpk_root = (global_overlay_cache_root() / source_key / "vpks").resolve()
desired: dict[str, Path] = {}
skipped = 0
for vpk_name, cache_path_value in rows:
target = (global_overlay_cache_root() / cache_path_value).resolve()
if not _is_under(target, source_vpk_root) or not target.exists():
on_stderr(f"global overlay {overlay.name!r}: missing cache file for {vpk_name}")
skipped += 1
continue
desired[vpk_name] = target
existing: dict[str, Path] = {}
for entry in os.scandir(addons_dir):
if not entry.is_symlink():
continue
try:
resolved = Path(os.readlink(entry.path)).resolve(strict=False)
except OSError:
continue
if _is_under(resolved, source_vpk_root):
existing[entry.name] = resolved
elif _is_under(resolved, cache_root):
on_stderr(f"global overlay {overlay.name!r}: leaving foreign cache symlink {entry.name}")
created = 0
removed = 0
unchanged = 0
for name, current_target in existing.items():
if should_cancel():
on_stderr("global overlay build cancelled mid-removal")
return
desired_target = desired.get(name)
if desired_target is None:
os.unlink(addons_dir / name)
removed += 1
elif current_target == desired_target:
unchanged += 1
else:
os.unlink(addons_dir / name)
current_names = {
name for name, current_target in existing.items() if name in desired and current_target == desired[name]
}
for name, target in desired.items():
if should_cancel():
on_stderr("global overlay build cancelled mid-creation")
return
if name in current_names:
continue
link_path = addons_dir / name
if link_path.exists() and not link_path.is_symlink():
on_stderr(f"refusing to overwrite non-symlink at {link_path}")
continue
if link_path.is_symlink():
on_stderr(f"refusing to overwrite foreign symlink at {link_path}")
continue
os.symlink(str(target), str(link_path))
created += 1
on_stdout(
f"global overlay {overlay.name!r}: created={created} removed={removed} "
f"unchanged={unchanged} skipped(missing)={skipped}"
run_sandboxed_script(
overlay.id,
overlay.script or "",
on_stdout=on_stdout,
on_stderr=on_stderr,
should_cancel=should_cancel,
)
self._enforce_disk_budget(overlay.id, on_stderr)
def _enforce_disk_budget(self, overlay_id: int, on_stderr: LogSink) -> None:
target = overlay_path_for_id(overlay_id)
size_output = subprocess.check_output(["du", "-sb", str(target)])
size_bytes = int(size_output.split()[0])
if size_bytes > DISK_BUDGET_BYTES:
on_stderr(
f"overlay exceeded 20 GB disk cap: {size_bytes} bytes > "
f"{DISK_BUDGET_BYTES} bytes"
)
raise BuildError("disk-cap-exceeded")
def _is_under(path: Path, root: Path) -> bool:
@ -264,6 +261,5 @@ def _is_under(path: Path, root: Path) -> bool:
BUILDERS: dict[str, OverlayBuilder] = {
"workshop": WorkshopBuilder(),
"l4d2center_maps": GlobalMapOverlayBuilder(),
"cedapug_maps": GlobalMapOverlayBuilder(),
"script": ScriptBuilder(),
}

View file

@ -564,6 +564,56 @@ def test_run_worker_once_dispatches_build_overlay(overlay_seeded_worker, monkeyp
assert (addons / "1001.vpk").is_symlink()
def test_build_overlay_writes_last_build_status_ok(
overlay_seeded_worker, monkeypatch, tmp_path
) -> None:
app, ids = overlay_seeded_worker
monkeypatch.setenv("LEFT4ME_ROOT", str(tmp_path))
from l4d2web.services import overlay_builders
class _StubBuilder:
def build(self, overlay, *, on_stdout, on_stderr, should_cancel):
on_stdout("stub build ok")
monkeypatch.setitem(overlay_builders.BUILDERS, "workshop", _StubBuilder())
job_id = add_job(ids.user, "build_overlay", server_id=None, overlay_id=ids.overlay)
with app.app_context():
assert run_worker_once() is True
assert load_job(job_id).state == "succeeded"
with session_scope() as s:
overlay = s.query(Overlay).filter_by(id=ids.overlay).one()
assert overlay.last_build_status == "ok"
def test_build_overlay_writes_last_build_status_failed(
overlay_seeded_worker, monkeypatch, tmp_path
) -> None:
app, ids = overlay_seeded_worker
monkeypatch.setenv("LEFT4ME_ROOT", str(tmp_path))
from l4d2web.services import overlay_builders
class _FailingBuilder:
def build(self, overlay, *, on_stdout, on_stderr, should_cancel):
raise RuntimeError("synthetic build failure")
monkeypatch.setitem(overlay_builders.BUILDERS, "workshop", _FailingBuilder())
job_id = add_job(ids.user, "build_overlay", server_id=None, overlay_id=ids.overlay)
with app.app_context():
assert run_worker_once() is True
assert load_job(job_id).state == "failed"
with session_scope() as s:
overlay = s.query(Overlay).filter_by(id=ids.overlay).one()
assert overlay.last_build_status == "failed"
def test_run_worker_once_dispatches_refresh(overlay_seeded_worker, monkeypatch, tmp_path) -> None:
app, ids = overlay_seeded_worker
monkeypatch.setenv("LEFT4ME_ROOT", str(tmp_path))

View file

@ -1,15 +1,17 @@
"""Tests for overlay builders (registry, WorkshopBuilder)."""
"""Tests for overlay builders (registry, WorkshopBuilder, ScriptBuilder)."""
from __future__ import annotations
import os
from datetime import UTC, datetime
from pathlib import Path
from types import SimpleNamespace
import pytest
from l4d2web.db import init_db, session_scope
from l4d2web.models import Overlay, OverlayWorkshopItem, User, WorkshopItem
from l4d2web.services import overlay_builders
from l4d2web.services.host_commands import CommandCancelledError, CommandResult
@pytest.fixture
@ -61,9 +63,13 @@ def _capture_logs():
return out, err, out.append, err.append
def test_registry_has_workshop() -> None:
assert "workshop" in overlay_builders.BUILDERS
assert "external" not in overlay_builders.BUILDERS
def test_builders_registry() -> None:
assert set(overlay_builders.BUILDERS) == {"workshop", "script"}
def test_registry_excludes_legacy_types() -> None:
for legacy in ("external", "l4d2center_maps", "cedapug_maps"):
assert legacy not in overlay_builders.BUILDERS
def test_registry_unknown_type_raises_keyerror() -> None:
@ -71,6 +77,13 @@ def test_registry_unknown_type_raises_keyerror() -> None:
overlay_builders.BUILDERS["nope"]
def test_workshop_builder_unchanged() -> None:
"""Regression guard against accidental removal during refactor."""
builder = overlay_builders.BUILDERS["workshop"]
assert isinstance(builder, overlay_builders.WorkshopBuilder)
assert hasattr(builder, "build")
def test_workshop_builder_creates_absolute_symlinks(env: Path) -> None:
_, overlay_id = _create_user_and_overlay("ws", "workshop")
cache_root = env / "workshop_cache"
@ -214,3 +227,133 @@ def test_workshop_builder_honors_should_cancel(env: Path) -> None:
overlay_builders.BUILDERS["workshop"].build(
overlay, on_stdout=lambda _x: None, on_stderr=lambda _x: None, should_cancel=cancel
)
# --- ScriptBuilder ---------------------------------------------------------
def _script_overlay(*, id_: int = 42, script: str = "echo hi") -> SimpleNamespace:
return SimpleNamespace(id=id_, type="script", path=str(id_), script=script)
def test_script_builder_invokes_helper(env, monkeypatch) -> None:
captured: dict = {}
def fake_run(cmd, *, on_stdout, on_stderr, should_cancel):
captured["cmd"] = list(cmd)
captured["script_text"] = open(cmd[-1]).read()
captured["script_path_existed"] = os.path.exists(cmd[-1])
return CommandResult(returncode=0, stdout="", stderr="")
monkeypatch.setattr(overlay_builders, "run_command", fake_run)
monkeypatch.setattr(
overlay_builders.ScriptBuilder, "_enforce_disk_budget", lambda *a, **kw: None
)
overlay = _script_overlay()
overlay_builders.ScriptBuilder().build(
overlay,
on_stdout=lambda _x: None,
on_stderr=lambda _x: None,
should_cancel=lambda: False,
)
assert captured["cmd"][:4] == [
"sudo",
"-n",
"/usr/local/libexec/left4me/left4me-script-sandbox",
"42",
]
assert captured["script_text"] == "echo hi"
assert captured["script_path_existed"] is True
# Tmpfile is unlinked after build.
assert not os.path.exists(captured["cmd"][-1])
def test_script_builder_disk_cap(env, monkeypatch) -> None:
monkeypatch.setattr(
overlay_builders,
"run_command",
lambda *a, **kw: CommandResult(returncode=0, stdout="", stderr=""),
)
monkeypatch.setattr(
overlay_builders.subprocess,
"check_output",
lambda *a, **kw: b"25000000000\t/some/path\n",
)
err: list[str] = []
overlay = _script_overlay(script="")
with pytest.raises(overlay_builders.BuildError):
overlay_builders.ScriptBuilder().build(
overlay,
on_stdout=lambda _x: None,
on_stderr=err.append,
should_cancel=lambda: False,
)
assert any("20" in line and "GB" in line for line in err), err
def test_script_builder_streams_output(env, monkeypatch) -> None:
def fake_run(cmd, *, on_stdout, on_stderr, should_cancel):
on_stdout("hello")
on_stderr("warn")
return CommandResult(returncode=0, stdout="hello", stderr="warn")
monkeypatch.setattr(overlay_builders, "run_command", fake_run)
monkeypatch.setattr(
overlay_builders.ScriptBuilder, "_enforce_disk_budget", lambda *a, **kw: None
)
out: list[str] = []
err: list[str] = []
overlay = _script_overlay(script="")
overlay_builders.ScriptBuilder().build(
overlay, on_stdout=out.append, on_stderr=err.append, should_cancel=lambda: False
)
assert out == ["hello"]
assert err == ["warn"]
def test_script_builder_passes_should_cancel_through(env, monkeypatch) -> None:
captured: dict = {}
def fake_run(cmd, *, on_stdout, on_stderr, should_cancel):
captured["should_cancel"] = should_cancel
raise CommandCancelledError(returncode=1, cmd=cmd, output="", stderr="")
monkeypatch.setattr(overlay_builders, "run_command", fake_run)
monkeypatch.setattr(
overlay_builders.ScriptBuilder, "_enforce_disk_budget", lambda *a, **kw: None
)
overlay = _script_overlay(script="")
with pytest.raises(CommandCancelledError):
overlay_builders.ScriptBuilder().build(
overlay,
on_stdout=lambda _x: None,
on_stderr=lambda _x: None,
should_cancel=lambda: True,
)
assert captured["should_cancel"]() is True
def test_script_builder_cleans_up_tmpfile_on_failure(env, monkeypatch) -> None:
captured: dict = {}
def fake_run(cmd, *, on_stdout, on_stderr, should_cancel):
captured["script_path"] = cmd[-1]
raise CommandCancelledError(returncode=1, cmd=cmd, output="", stderr="")
monkeypatch.setattr(overlay_builders, "run_command", fake_run)
overlay = _script_overlay(script="")
with pytest.raises(CommandCancelledError):
overlay_builders.ScriptBuilder().build(
overlay,
on_stdout=lambda _x: None,
on_stderr=lambda _x: None,
should_cancel=lambda: False,
)
assert not os.path.exists(captured["script_path"])