left4me/l4d2web/tests/test_global_overlay_models.py
mwiegand 92d6ebbe82
feat(l4d2-web): managed global map overlays with daily refresh
Adds two managed system overlays (l4d2center-maps, cedapug-maps) that
fetch curated map archives from upstream sources and reconcile addons
symlinks for non-Steam maps. A daily systemd timer enqueues a coalesced
refresh_global_overlays worker job; downloads, extraction, and rebuilds
run in the existing job worker and surface in the job log UI.

Schema: GlobalOverlaySource / GlobalOverlayItem / GlobalOverlayItemFile
plus nullable Job.user_id so system jobs render as "system" in the UI.
The new builder reconciles symlinks against the per-source vpk cache
and leaves foreign symlinks untouched. Initialize-time guard refuses
to mount a partial overlay if any expected vpk is missing from cache.

Refresh service uses shutil.move to handle EXDEV when /tmp and the
cache live on different filesystems.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-08 08:05:14 +02:00

154 lines
4.7 KiB
Python

from sqlalchemy.exc import IntegrityError
from l4d2web.db import init_db, session_scope
from l4d2web.models import (
GlobalOverlayItem,
GlobalOverlayItemFile,
GlobalOverlaySource,
Job,
Overlay,
User,
)
def test_system_job_allows_null_user_id(tmp_path, monkeypatch):
monkeypatch.setenv("DATABASE_URL", f"sqlite:///{tmp_path/'models.db'}")
init_db()
with session_scope() as db:
job = Job(
user_id=None,
server_id=None,
overlay_id=None,
operation="refresh_global_overlays",
)
db.add(job)
db.flush()
assert job.id is not None
assert job.user_id is None
def test_global_overlay_source_uniqueness(tmp_path, monkeypatch):
monkeypatch.setenv("DATABASE_URL", f"sqlite:///{tmp_path/'sources.db'}")
init_db()
with session_scope() as db:
overlay = Overlay(
name="l4d2center-maps", path="1", type="l4d2center_maps", user_id=None
)
db.add(overlay)
db.flush()
db.add(
GlobalOverlaySource(
overlay_id=overlay.id,
source_key="l4d2center-maps",
source_type="l4d2center_csv",
source_url="https://l4d2center.com/maps/servers/index.csv",
)
)
try:
with session_scope() as db:
other = Overlay(
name="cedapug-maps", path="2", type="cedapug_maps", user_id=None
)
db.add(other)
db.flush()
db.add(
GlobalOverlaySource(
overlay_id=other.id,
source_key="l4d2center-maps",
source_type="l4d2center_csv",
source_url="https://example.invalid/duplicate",
)
)
except IntegrityError:
pass
else:
raise AssertionError("duplicate source_key must fail")
def test_global_overlay_items_and_files_are_unique_per_parent(tmp_path, monkeypatch):
monkeypatch.setenv("DATABASE_URL", f"sqlite:///{tmp_path/'items.db'}")
init_db()
with session_scope() as db:
overlay = Overlay(name="cedapug-maps", path="1", type="cedapug_maps", user_id=None)
db.add(overlay)
db.flush()
source = GlobalOverlaySource(
overlay_id=overlay.id,
source_key="cedapug-maps",
source_type="cedapug_custom_page",
source_url="https://cedapug.com/custom",
)
db.add(source)
db.flush()
item = GlobalOverlayItem(
source_id=source.id,
item_key="FatalFreight.zip",
display_name="Fatal Freight",
download_url="https://cedapug.com/maps/FatalFreight.zip",
expected_vpk_name="FatalFreight.vpk",
)
db.add(item)
db.flush()
db.add(
GlobalOverlayItemFile(
item_id=item.id,
vpk_name="FatalFreight.vpk",
cache_path="cedapug-maps/vpks/FatalFreight.vpk",
size=123,
md5="",
)
)
item_id = item.id
try:
with session_scope() as db:
source = db.query(GlobalOverlaySource).filter_by(source_key="cedapug-maps").one()
db.add(
GlobalOverlayItem(
source_id=source.id,
item_key="FatalFreight.zip",
display_name="Fatal Freight duplicate",
download_url="https://cedapug.com/maps/FatalFreight.zip",
expected_vpk_name="FatalFreight.vpk",
)
)
except IntegrityError:
pass
else:
raise AssertionError("duplicate item_key per source must fail")
try:
with session_scope() as db:
db.add(
GlobalOverlayItemFile(
item_id=item_id,
vpk_name="FatalFreight.vpk",
cache_path="cedapug-maps/vpks/FatalFreight-copy.vpk",
size=456,
md5="",
)
)
except IntegrityError:
pass
else:
raise AssertionError("duplicate vpk_name per item must fail")
def test_normal_user_rows_still_require_real_users(tmp_path, monkeypatch):
monkeypatch.setenv("DATABASE_URL", f"sqlite:///{tmp_path/'users.db'}")
init_db()
with session_scope() as db:
user = User(username="alice", password_digest="digest", admin=False)
db.add(user)
db.flush()
job = Job(user_id=user.id, server_id=None, operation="install", state="queued")
db.add(job)
db.flush()
assert job.id is not None
assert job.user_id == user.id