from sqlalchemy.exc import IntegrityError from l4d2web.db import init_db, session_scope from l4d2web.models import ( GlobalOverlayItem, GlobalOverlayItemFile, GlobalOverlaySource, Job, Overlay, User, ) def test_system_job_allows_null_user_id(tmp_path, monkeypatch): monkeypatch.setenv("DATABASE_URL", f"sqlite:///{tmp_path/'models.db'}") init_db() with session_scope() as db: job = Job( user_id=None, server_id=None, overlay_id=None, operation="refresh_global_overlays", ) db.add(job) db.flush() assert job.id is not None assert job.user_id is None def test_global_overlay_source_uniqueness(tmp_path, monkeypatch): monkeypatch.setenv("DATABASE_URL", f"sqlite:///{tmp_path/'sources.db'}") init_db() with session_scope() as db: overlay = Overlay( name="l4d2center-maps", path="1", type="l4d2center_maps", user_id=None ) db.add(overlay) db.flush() db.add( GlobalOverlaySource( overlay_id=overlay.id, source_key="l4d2center-maps", source_type="l4d2center_csv", source_url="https://l4d2center.com/maps/servers/index.csv", ) ) try: with session_scope() as db: other = Overlay( name="cedapug-maps", path="2", type="cedapug_maps", user_id=None ) db.add(other) db.flush() db.add( GlobalOverlaySource( overlay_id=other.id, source_key="l4d2center-maps", source_type="l4d2center_csv", source_url="https://example.invalid/duplicate", ) ) except IntegrityError: pass else: raise AssertionError("duplicate source_key must fail") def test_global_overlay_items_and_files_are_unique_per_parent(tmp_path, monkeypatch): monkeypatch.setenv("DATABASE_URL", f"sqlite:///{tmp_path/'items.db'}") init_db() with session_scope() as db: overlay = Overlay(name="cedapug-maps", path="1", type="cedapug_maps", user_id=None) db.add(overlay) db.flush() source = GlobalOverlaySource( overlay_id=overlay.id, source_key="cedapug-maps", source_type="cedapug_custom_page", source_url="https://cedapug.com/custom", ) db.add(source) db.flush() item = GlobalOverlayItem( source_id=source.id, item_key="FatalFreight.zip", display_name="Fatal Freight", download_url="https://cedapug.com/maps/FatalFreight.zip", expected_vpk_name="FatalFreight.vpk", ) db.add(item) db.flush() db.add( GlobalOverlayItemFile( item_id=item.id, vpk_name="FatalFreight.vpk", cache_path="cedapug-maps/vpks/FatalFreight.vpk", size=123, md5="", ) ) item_id = item.id try: with session_scope() as db: source = db.query(GlobalOverlaySource).filter_by(source_key="cedapug-maps").one() db.add( GlobalOverlayItem( source_id=source.id, item_key="FatalFreight.zip", display_name="Fatal Freight duplicate", download_url="https://cedapug.com/maps/FatalFreight.zip", expected_vpk_name="FatalFreight.vpk", ) ) except IntegrityError: pass else: raise AssertionError("duplicate item_key per source must fail") try: with session_scope() as db: db.add( GlobalOverlayItemFile( item_id=item_id, vpk_name="FatalFreight.vpk", cache_path="cedapug-maps/vpks/FatalFreight-copy.vpk", size=456, md5="", ) ) except IntegrityError: pass else: raise AssertionError("duplicate vpk_name per item must fail") def test_normal_user_rows_still_require_real_users(tmp_path, monkeypatch): monkeypatch.setenv("DATABASE_URL", f"sqlite:///{tmp_path/'users.db'}") init_db() with session_scope() as db: user = User(username="alice", password_digest="digest", admin=False) db.add(user) db.flush() job = Job(user_id=user.id, server_id=None, operation="install", state="queued") db.add(job) db.flush() assert job.id is not None assert job.user_id == user.id