Adds two managed system overlays (l4d2center-maps, cedapug-maps) that fetch curated map archives from upstream sources and reconcile addons symlinks for non-Steam maps. A daily systemd timer enqueues a coalesced refresh_global_overlays worker job; downloads, extraction, and rebuilds run in the existing job worker and surface in the job log UI. Schema: GlobalOverlaySource / GlobalOverlayItem / GlobalOverlayItemFile plus nullable Job.user_id so system jobs render as "system" in the UI. The new builder reconciles symlinks against the per-source vpk cache and leaves foreign symlinks untouched. Initialize-time guard refuses to mount a partial overlay if any expected vpk is missing from cache. Refresh service uses shutil.move to handle EXDEV when /tmp and the cache live on different filesystems. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
168 lines
7.9 KiB
Python
168 lines
7.9 KiB
Python
from __future__ import annotations
|
|
|
|
import shutil
|
|
from datetime import UTC, datetime
|
|
from pathlib import Path
|
|
import tempfile
|
|
|
|
from sqlalchemy import select
|
|
|
|
from l4d2web.db import session_scope
|
|
from l4d2web.models import GlobalOverlayItem, GlobalOverlayItemFile, GlobalOverlaySource, Overlay
|
|
from l4d2web.services.global_map_cache import (
|
|
archive_dir,
|
|
download_archive,
|
|
extracted_vpk_md5,
|
|
safe_extract_7z_vpks,
|
|
safe_extract_zip_vpks,
|
|
vpk_dir,
|
|
)
|
|
from l4d2web.services.global_map_sources import (
|
|
GlobalMapManifestItem,
|
|
fetch_cedapug_manifest,
|
|
fetch_l4d2center_manifest,
|
|
)
|
|
from l4d2web.services.global_overlays import ensure_global_overlays
|
|
|
|
|
|
def refresh_global_overlays(*, on_stdout, on_stderr, should_cancel) -> list[str]:
|
|
with session_scope() as db:
|
|
ensure_global_overlays(db)
|
|
|
|
refreshed: list[str] = []
|
|
for source_key, fetcher in (
|
|
("l4d2center-maps", fetch_l4d2center_manifest),
|
|
("cedapug-maps", fetch_cedapug_manifest),
|
|
):
|
|
if should_cancel():
|
|
on_stderr("global overlay refresh cancelled before manifest fetch")
|
|
return refreshed
|
|
manifest_hash, manifest_items = fetcher()
|
|
on_stdout(f"{source_key}: fetched manifest with {len(manifest_items)} item(s)")
|
|
overlay = _refresh_source(
|
|
source_key,
|
|
manifest_hash,
|
|
manifest_items,
|
|
on_stdout=on_stdout,
|
|
on_stderr=on_stderr,
|
|
should_cancel=should_cancel,
|
|
)
|
|
build_global_overlay(overlay, on_stdout=on_stdout, on_stderr=on_stderr, should_cancel=should_cancel)
|
|
refreshed.append(source_key)
|
|
return sorted(refreshed)
|
|
|
|
|
|
def _refresh_source(source_key: str, manifest_hash: str, manifest_items: list[GlobalMapManifestItem], *, on_stdout, on_stderr, should_cancel) -> Overlay:
|
|
now = datetime.now(UTC)
|
|
desired_keys = {item.item_key for item in manifest_items}
|
|
with session_scope() as db:
|
|
source = db.scalar(select(GlobalOverlaySource).where(GlobalOverlaySource.source_key == source_key))
|
|
if source is None:
|
|
raise ValueError(f"global overlay source {source_key!r} not found")
|
|
overlay = db.scalar(select(Overlay).where(Overlay.id == source.overlay_id))
|
|
if overlay is None:
|
|
raise ValueError(f"overlay for source {source_key!r} not found")
|
|
existing_items = {item.item_key: item for item in db.scalars(select(GlobalOverlayItem).where(GlobalOverlayItem.source_id == source.id)).all()}
|
|
for old_key, old_item in list(existing_items.items()):
|
|
if old_key not in desired_keys:
|
|
db.delete(old_item)
|
|
for manifest_item in manifest_items:
|
|
item = existing_items.get(manifest_item.item_key)
|
|
if item is None:
|
|
item = GlobalOverlayItem(source_id=source.id, item_key=manifest_item.item_key, download_url=manifest_item.download_url)
|
|
db.add(item)
|
|
db.flush()
|
|
item.display_name = manifest_item.display_name
|
|
item.download_url = manifest_item.download_url
|
|
item.expected_vpk_name = manifest_item.expected_vpk_name
|
|
item.expected_size = manifest_item.expected_size
|
|
item.expected_md5 = manifest_item.expected_md5
|
|
item.updated_at = now
|
|
source.last_manifest_hash = manifest_hash
|
|
source.last_refreshed_at = now
|
|
source.last_error = ""
|
|
source.updated_at = now
|
|
db.expunge(overlay)
|
|
|
|
for manifest_item in manifest_items:
|
|
if should_cancel():
|
|
on_stderr(f"{source_key}: refresh cancelled during downloads")
|
|
return overlay
|
|
_refresh_item(source_key, manifest_item, on_stdout=on_stdout, on_stderr=on_stderr, should_cancel=should_cancel)
|
|
return overlay
|
|
|
|
|
|
def _refresh_item(source_key: str, manifest_item: GlobalMapManifestItem, *, on_stdout, on_stderr, should_cancel) -> None:
|
|
try:
|
|
files, etag, last_modified, content_length = download_and_extract_item(source_key, manifest_item, should_cancel=should_cancel)
|
|
except Exception as exc:
|
|
with session_scope() as db:
|
|
source = db.scalar(select(GlobalOverlaySource).where(GlobalOverlaySource.source_key == source_key))
|
|
if source is not None:
|
|
item = db.scalar(select(GlobalOverlayItem).where(GlobalOverlayItem.source_id == source.id, GlobalOverlayItem.item_key == manifest_item.item_key))
|
|
if item is not None:
|
|
item.last_error = str(exc)
|
|
on_stderr(f"{source_key}: {manifest_item.item_key}: {exc}")
|
|
return
|
|
|
|
now = datetime.now(UTC)
|
|
with session_scope() as db:
|
|
source = db.scalar(select(GlobalOverlaySource).where(GlobalOverlaySource.source_key == source_key))
|
|
if source is None:
|
|
raise ValueError(f"global overlay source {source_key!r} not found")
|
|
item = db.scalar(select(GlobalOverlayItem).where(GlobalOverlayItem.source_id == source.id, GlobalOverlayItem.item_key == manifest_item.item_key))
|
|
if item is None:
|
|
raise ValueError(f"global overlay item {manifest_item.item_key!r} not found")
|
|
db.query(GlobalOverlayItemFile).filter_by(item_id=item.id).delete()
|
|
for vpk_name, cache_path, size, md5 in files:
|
|
db.add(GlobalOverlayItemFile(item_id=item.id, vpk_name=vpk_name, cache_path=cache_path, size=size, md5=md5))
|
|
item.etag = etag
|
|
item.last_modified = last_modified
|
|
item.content_length = content_length
|
|
item.last_downloaded_at = now
|
|
item.last_error = ""
|
|
item.updated_at = now
|
|
on_stdout(f"{source_key}: refreshed {manifest_item.item_key} ({len(files)} vpk file(s))")
|
|
|
|
|
|
def download_and_extract_item(source_key: str, item: GlobalMapManifestItem, *, should_cancel) -> tuple[list[tuple[str, str, int, str]], str, str, int | None]:
|
|
archives = archive_dir(source_key)
|
|
vpks = vpk_dir(source_key)
|
|
archives.mkdir(parents=True, exist_ok=True)
|
|
vpks.mkdir(parents=True, exist_ok=True)
|
|
archive_name = item.download_url.rsplit("/", 1)[-1]
|
|
archive_path = archives / archive_name
|
|
etag, last_modified, content_length = download_archive(item.download_url, archive_path, should_cancel=should_cancel)
|
|
with tempfile.TemporaryDirectory(prefix="left4me-global-map-") as tmp:
|
|
tmp_dir = Path(tmp)
|
|
if archive_name.lower().endswith(".7z"):
|
|
extracted = safe_extract_7z_vpks(archive_path, tmp_dir)
|
|
elif archive_name.lower().endswith(".zip"):
|
|
extracted = safe_extract_zip_vpks(archive_path, tmp_dir)
|
|
else:
|
|
raise ValueError(f"unsupported archive extension for {archive_name}")
|
|
results: list[tuple[str, str, int, str]] = []
|
|
for path in extracted:
|
|
if item.expected_vpk_name and path.name != item.expected_vpk_name:
|
|
continue
|
|
size = path.stat().st_size
|
|
md5 = extracted_vpk_md5(path)
|
|
if item.expected_size is not None and size != item.expected_size:
|
|
raise ValueError(f"{path.name} size mismatch: expected {item.expected_size}, got {size}")
|
|
if item.expected_md5 and md5 != item.expected_md5:
|
|
raise ValueError(f"{path.name} md5 mismatch: expected {item.expected_md5}, got {md5}")
|
|
final = vpks / path.name
|
|
shutil.move(str(path), str(final))
|
|
results.append((path.name, f"{source_key}/vpks/{path.name}", size, md5))
|
|
if not results:
|
|
raise ValueError(f"no expected .vpk files extracted from {archive_name}")
|
|
return results, etag, last_modified, content_length
|
|
|
|
|
|
def build_global_overlay(overlay: Overlay, *, on_stdout, on_stderr, should_cancel) -> None:
|
|
from l4d2web.services.overlay_builders import BUILDERS
|
|
|
|
builder = BUILDERS.get(overlay.type)
|
|
if builder is None:
|
|
raise ValueError(f"no builder registered for overlay type {overlay.type!r}")
|
|
builder.build(overlay, on_stdout=on_stdout, on_stderr=on_stderr, should_cancel=should_cancel)
|