chore: remove redundant tests, keep E2E only (29 tests)

Removed 7 test files (135 tests) fully covered by test_e2e.py.
The E2E creates all fixtures in setUp, exercises every code path
with real files and real functions. Single source of regression.
This commit is contained in:
Abdessamad Derraz
2026-03-19 12:22:56 +01:00
parent eb270a030f
commit 13e5dd37f4
14 changed files with 1 additions and 2615 deletions

View File

@@ -1,5 +0,0 @@
emulator: TestAlias
type: alias
alias_of: test_emu_with_aliases
systems: [test-system]
files: []

View File

@@ -1,12 +0,0 @@
emulator: TestEmulator
type: standalone + libretro
systems: [test-system]
files:
- name: correct_hash.bin
required: true
aliases: [alt1.bin, alt2.bin]
- name: optional_standalone.rom
required: false
mode: standalone
- name: undeclared.bin
required: true

View File

@@ -1,7 +0,0 @@
shared_groups:
test_group:
- name: shared_file.bin
sha1: "0000000000000000000000000000000000shared"
md5: "sharedmd5sharedmd5sharedmd5share"
destination: "shared/shared_file.bin"
required: false

View File

@@ -1,22 +0,0 @@
platform: TestExistence
verification_mode: existence
base_destination: system
systems:
test-system:
files:
- name: required_present.bin
destination: required_present.bin
required: true
sha1: placeholder
- name: required_missing.bin
destination: required_missing.bin
required: true
sha1: "0000000000000000000000000000000000000000"
- name: optional_present.bin
destination: optional_present.bin
required: false
sha1: placeholder
- name: optional_missing.bin
destination: optional_missing.bin
required: false
sha1: "0000000000000000000000000000000000000001"

View File

@@ -1,3 +0,0 @@
inherits: test_md5
platform: TestInherited
base_destination: BIOS

View File

@@ -1,58 +0,0 @@
platform: TestMD5
verification_mode: md5
base_destination: bios
systems:
test-system:
files:
- name: correct_hash.bin
destination: correct_hash.bin
required: true
md5: placeholder
- name: wrong_hash.bin
destination: wrong_hash.bin
required: true
md5: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
- name: no_md5_present.bin
destination: no_md5_present.bin
required: true
- name: required_missing.bin
destination: required_missing.bin
required: true
md5: "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"
- name: optional_missing.bin
destination: optional_missing.bin
required: false
md5: "cccccccccccccccccccccccccccccccc"
test-zip-system:
files:
- name: test.zip
destination: test.zip
required: true
md5: placeholder_zip_md5
zipped_file: inner.rom
- name: test_bad.zip
destination: test_bad.zip
required: true
md5: placeholder_bad_zip_md5
zipped_file: inner.rom
- name: test_missing_inner.zip
destination: test_missing_inner.zip
required: true
md5: placeholder_missing_inner_md5
zipped_file: not_there.rom
test-recalbox-system:
files:
- name: multi_hash.bin
destination: multi_hash.bin
required: true
md5: placeholder_multi
- name: truncated_md5.bin
destination: truncated_md5.bin
required: true
md5: placeholder_truncated
test-dedup-system:
files:
- name: correct_hash.bin
destination: correct_hash.bin
required: true
md5: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"

View File

@@ -1,464 +0,0 @@
"""Advanced integration tests covering remaining edge cases.
Covers: md5_composite, storage tiers (external/user_provided/release_asset),
data_directories gap suppression, shared groups, and pipeline flags.
"""
from __future__ import annotations
import hashlib
import json
import os
import shutil
import sys
import tempfile
import unittest
import zipfile
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "scripts"))
from common import (
check_inside_zip, load_platform_config, md5_composite, md5sum,
resolve_local_file, load_emulator_profiles,
)
from verify import (
Severity, Status, compute_severity, find_undeclared_files,
verify_platform,
)
def _sha1(data: bytes) -> str:
return hashlib.sha1(data).hexdigest()
def _md5(data: bytes) -> str:
return hashlib.md5(data).hexdigest()
class TestMd5Composite(unittest.TestCase):
"""Recalbox Zip::Md5Composite — sort filenames, hash all contents."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_composite_matches_manual_calculation(self):
"""md5_composite = md5(sorted_file_a_content + sorted_file_b_content)."""
zpath = os.path.join(self.tmpdir, "test.zip")
with zipfile.ZipFile(zpath, "w") as zf:
zf.writestr("b_second.rom", b"BBBB")
zf.writestr("a_first.rom", b"AAAA")
# Manual: sort names → a_first.rom, b_second.rom → md5(AAAA + BBBB)
expected = hashlib.md5(b"AAAA" + b"BBBB").hexdigest()
actual = md5_composite(zpath)
self.assertEqual(actual, expected)
def test_composite_ignores_directories(self):
"""Directory entries in ZIP are excluded from hash."""
zpath = os.path.join(self.tmpdir, "withdir.zip")
with zipfile.ZipFile(zpath, "w") as zf:
zf.writestr("subdir/", b"") # directory entry
zf.writestr("file.rom", b"DATA")
expected = hashlib.md5(b"DATA").hexdigest()
self.assertEqual(md5_composite(zpath), expected)
def test_composite_independent_of_compression(self):
"""Same content, different compression → same composite hash."""
z_stored = os.path.join(self.tmpdir, "stored.zip")
z_deflated = os.path.join(self.tmpdir, "deflated.zip")
with zipfile.ZipFile(z_stored, "w", zipfile.ZIP_STORED) as zf:
zf.writestr("rom.bin", b"X" * 1000)
with zipfile.ZipFile(z_deflated, "w", zipfile.ZIP_DEFLATED) as zf:
zf.writestr("rom.bin", b"X" * 1000)
self.assertEqual(md5_composite(z_stored), md5_composite(z_deflated))
def test_composite_used_in_resolve(self):
"""resolve_local_file uses md5_composite for ZIP files in step 4."""
zpath = os.path.join(self.tmpdir, "recalbox.zip")
with zipfile.ZipFile(zpath, "w") as zf:
zf.writestr("inner.rom", b"RECALBOX")
composite = md5_composite(zpath)
with open(zpath, "rb") as f:
zdata = f.read()
sha1 = _sha1(zdata)
container_md5 = _md5(zdata)
db = {
"files": {sha1: {"path": zpath, "md5": container_md5, "name": "recalbox.zip"}},
"indexes": {
"by_md5": {container_md5: sha1},
"by_name": {"recalbox.zip": [sha1]},
},
}
# Entry with composite MD5 (what Recalbox would provide)
entry = {"name": "recalbox.zip", "md5": composite}
path, status = resolve_local_file(entry, db)
self.assertEqual(path, zpath)
self.assertEqual(status, "exact")
class TestStorageTiers(unittest.TestCase):
"""Test storage: external, user_provided, and release_asset."""
def test_resolve_file_external(self):
"""storage: external → returns (None, 'external')."""
from generate_pack import resolve_file
entry = {"name": "PS3UPDAT.PUP", "storage": "external", "sha1": "abc"}
path, status = resolve_file(entry, {}, "bios")
self.assertIsNone(path)
self.assertEqual(status, "external")
def test_resolve_file_user_provided(self):
"""storage: user_provided → returns (None, 'user_provided')."""
from generate_pack import resolve_file
entry = {"name": "user_bios.bin", "storage": "user_provided"}
path, status = resolve_file(entry, {}, "bios")
self.assertIsNone(path)
self.assertEqual(status, "user_provided")
def test_resolve_file_embedded_normal(self):
"""storage: embedded (default) → delegates to resolve_local_file."""
from generate_pack import resolve_file
tmpdir = tempfile.mkdtemp()
try:
fpath = os.path.join(tmpdir, "test.bin")
with open(fpath, "wb") as f:
f.write(b"EMBEDDED")
sha1 = _sha1(b"EMBEDDED")
db = {
"files": {sha1: {"path": fpath, "md5": _md5(b"EMBEDDED"), "name": "test.bin"}},
"indexes": {"by_md5": {_md5(b"EMBEDDED"): sha1}, "by_name": {"test.bin": [sha1]}},
}
entry = {"name": "test.bin", "sha1": sha1}
path, status = resolve_file(entry, db, tmpdir)
self.assertEqual(path, fpath)
self.assertEqual(status, "exact")
finally:
shutil.rmtree(tmpdir)
def test_fetch_large_file_cached(self):
"""fetch_large_file returns cached file if it exists and hash matches."""
from generate_pack import fetch_large_file
tmpdir = tempfile.mkdtemp()
try:
cached = os.path.join(tmpdir, "big.bin")
with open(cached, "wb") as f:
f.write(b"BIGDATA")
result = fetch_large_file("big.bin", dest_dir=tmpdir)
self.assertEqual(result, cached)
finally:
shutil.rmtree(tmpdir)
def test_fetch_large_file_bad_hash_rejected(self):
"""fetch_large_file rejects cached file with wrong hash."""
from generate_pack import fetch_large_file
tmpdir = tempfile.mkdtemp()
try:
cached = os.path.join(tmpdir, "big.bin")
with open(cached, "wb") as f:
f.write(b"WRONG")
result = fetch_large_file("big.bin", dest_dir=tmpdir,
expected_md5="0" * 32)
# File should be rejected (wrong hash) and since URL won't work, returns None
self.assertIsNone(result)
# File should have been deleted
self.assertFalse(os.path.exists(cached))
finally:
shutil.rmtree(tmpdir)
class TestDataDirectoriesSuppressGaps(unittest.TestCase):
"""data_directories refs in platform suppress cross-reference gaps."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.platforms_dir = os.path.join(self.tmpdir, "platforms")
self.emulators_dir = os.path.join(self.tmpdir, "emulators")
os.makedirs(self.platforms_dir)
os.makedirs(self.emulators_dir)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_data_dir_suppresses_emulator_gaps(self):
"""Emulator files covered by shared data_directory not reported as gaps."""
import yaml
# Platform declares dolphin-sys data directory
platform = {
"platform": "TestPlatform",
"verification_mode": "existence",
"systems": {
"gamecube": {
"files": [
{"name": "gc_bios.bin", "destination": "gc_bios.bin", "required": True},
],
"data_directories": [
{"ref": "dolphin-sys", "destination": "dolphin-emu/Sys"},
],
},
},
}
with open(os.path.join(self.platforms_dir, "testplat.yml"), "w") as f:
yaml.dump(platform, f)
# Emulator profile with data_directories ref matching platform
emu = {
"emulator": "Dolphin",
"type": "standalone + libretro",
"systems": ["gamecube"],
"data_directories": [{"ref": "dolphin-sys"}],
"files": [
{"name": "dsp_rom.bin", "required": False},
{"name": "font_western.bin", "required": False},
],
}
with open(os.path.join(self.emulators_dir, "dolphin.yml"), "w") as f:
yaml.dump(emu, f)
config = load_platform_config("testplat", self.platforms_dir)
db = {"indexes": {"by_name": {}}}
profiles = load_emulator_profiles(self.emulators_dir)
undeclared = find_undeclared_files(config, self.emulators_dir, db, profiles)
# dsp_rom.bin and font_western.bin should NOT appear as gaps
# because dolphin-sys data_directory covers them
gap_names = {u["name"] for u in undeclared}
self.assertNotIn("dsp_rom.bin", gap_names)
self.assertNotIn("font_western.bin", gap_names)
def test_unmatched_data_dir_shows_gaps(self):
"""Emulator without matching data_directory in platform shows gaps."""
import yaml
platform = {
"platform": "TestPlatform",
"verification_mode": "existence",
"systems": {
"gamecube": {
"files": [
{"name": "gc_bios.bin", "destination": "gc_bios.bin", "required": True},
],
# NO data_directories declared
},
},
}
with open(os.path.join(self.platforms_dir, "testplat.yml"), "w") as f:
yaml.dump(platform, f)
emu = {
"emulator": "Dolphin",
"type": "standalone + libretro",
"systems": ["gamecube"],
"data_directories": [{"ref": "dolphin-sys"}],
"files": [
{"name": "dsp_rom.bin", "required": False},
],
}
with open(os.path.join(self.emulators_dir, "dolphin.yml"), "w") as f:
yaml.dump(emu, f)
config = load_platform_config("testplat", self.platforms_dir)
db = {"indexes": {"by_name": {}}}
profiles = load_emulator_profiles(self.emulators_dir)
undeclared = find_undeclared_files(config, self.emulators_dir, db, profiles)
gap_names = {u["name"] for u in undeclared}
self.assertIn("dsp_rom.bin", gap_names)
class TestSharedGroupsIncludes(unittest.TestCase):
"""Shared groups (_shared.yml) injected via includes:."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.platforms_dir = os.path.join(self.tmpdir, "platforms")
os.makedirs(self.platforms_dir)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_includes_injects_shared_files(self):
"""includes: [group] injects files from _shared.yml into platform config."""
import yaml
shared = {
"shared_groups": {
"mt32": [
{"name": "MT32_CONTROL.ROM", "destination": "MT32_CONTROL.ROM", "required": False},
{"name": "MT32_PCM.ROM", "destination": "MT32_PCM.ROM", "required": False},
],
},
}
with open(os.path.join(self.platforms_dir, "_shared.yml"), "w") as f:
yaml.dump(shared, f)
platform = {
"platform": "TestShared",
"verification_mode": "existence",
"systems": {
"dos": {
"includes": ["mt32"],
"files": [
{"name": "dosbox.conf", "destination": "dosbox.conf", "required": False},
],
},
},
}
with open(os.path.join(self.platforms_dir, "testshared.yml"), "w") as f:
yaml.dump(platform, f)
config = load_platform_config("testshared", self.platforms_dir)
dos_files = config["systems"]["dos"]["files"]
names = [f["name"] for f in dos_files]
self.assertIn("MT32_CONTROL.ROM", names)
self.assertIn("MT32_PCM.ROM", names)
self.assertIn("dosbox.conf", names)
def test_includes_empty_group_no_crash(self):
"""Referencing a non-existent shared group doesn't crash."""
import yaml
shared = {"shared_groups": {}}
with open(os.path.join(self.platforms_dir, "_shared.yml"), "w") as f:
yaml.dump(shared, f)
platform = {
"platform": "TestEmpty",
"verification_mode": "existence",
"systems": {
"test": {
"includes": ["nonexistent"],
"files": [{"name": "a.bin", "destination": "a.bin", "required": True}],
},
},
}
with open(os.path.join(self.platforms_dir, "testempty.yml"), "w") as f:
yaml.dump(platform, f)
config = load_platform_config("testempty", self.platforms_dir)
# Should not crash, files should still load
self.assertIn("test", config["systems"])
class TestYAMLInheritance(unittest.TestCase):
"""Platform inheritance via inherits: field."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.platforms_dir = os.path.join(self.tmpdir, "platforms")
os.makedirs(self.platforms_dir)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_child_inherits_parent_systems(self):
"""Child platform gets all parent systems."""
import yaml
parent = {
"platform": "Parent",
"verification_mode": "existence",
"base_destination": "system",
"systems": {
"nes": {"files": [{"name": "nes.bin", "destination": "nes.bin", "required": True}]},
"snes": {"files": [{"name": "snes.bin", "destination": "snes.bin", "required": True}]},
},
}
with open(os.path.join(self.platforms_dir, "parent.yml"), "w") as f:
yaml.dump(parent, f)
child = {
"inherits": "parent",
"platform": "Child",
"base_destination": "BIOS",
}
with open(os.path.join(self.platforms_dir, "child.yml"), "w") as f:
yaml.dump(child, f)
config = load_platform_config("child", self.platforms_dir)
self.assertEqual(config["platform"], "Child")
self.assertEqual(config["base_destination"], "BIOS")
self.assertIn("nes", config["systems"])
self.assertIn("snes", config["systems"])
def test_child_overrides_verification_mode(self):
"""Child can override parent's verification_mode."""
import yaml
parent = {
"platform": "Parent",
"verification_mode": "existence",
"systems": {"sys": {"files": [{"name": "a.bin", "destination": "a.bin"}]}},
}
with open(os.path.join(self.platforms_dir, "parent2.yml"), "w") as f:
yaml.dump(parent, f)
child = {
"inherits": "parent2",
"platform": "ChildMD5",
"verification_mode": "md5",
}
with open(os.path.join(self.platforms_dir, "child2.yml"), "w") as f:
yaml.dump(child, f)
config = load_platform_config("child2", self.platforms_dir)
self.assertEqual(config["verification_mode"], "md5")
class TestPlatformGrouping(unittest.TestCase):
"""group_identical_platforms merges same-content platforms."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.platforms_dir = os.path.join(self.tmpdir, "platforms")
os.makedirs(self.platforms_dir)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_identical_platforms_grouped(self):
"""Two platforms with same files + base_dest are grouped."""
import yaml
from common import group_identical_platforms
for name in ("plat_a", "plat_b"):
p = {
"platform": name,
"verification_mode": "existence",
"base_destination": "system",
"systems": {"sys": {"files": [{"name": "x.bin", "destination": "x.bin"}]}},
}
with open(os.path.join(self.platforms_dir, f"{name}.yml"), "w") as f:
yaml.dump(p, f)
groups = group_identical_platforms(["plat_a", "plat_b"], self.platforms_dir)
self.assertEqual(len(groups), 1)
self.assertEqual(len(groups[0][0]), 2)
def test_different_base_dest_separated(self):
"""Same files but different base_destination → separate groups."""
import yaml
from common import group_identical_platforms
for name, dest in [("plat_sys", "system"), ("plat_bios", "BIOS")]:
p = {
"platform": name,
"verification_mode": "existence",
"base_destination": dest,
"systems": {"sys": {"files": [{"name": "x.bin", "destination": "x.bin"}]}},
}
with open(os.path.join(self.platforms_dir, f"{name}.yml"), "w") as f:
yaml.dump(p, f)
groups = group_identical_platforms(["plat_sys", "plat_bios"], self.platforms_dir)
self.assertEqual(len(groups), 2)
if __name__ == "__main__":
unittest.main()

View File

@@ -1,201 +0,0 @@
"""Tests for alias support in resolve_local_file and generate_db."""
from __future__ import annotations
import os
import sys
import tempfile
import unittest
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "scripts"))
from common import compute_hashes, resolve_local_file
from generate_db import build_indexes
class TestAliasesInResolve(unittest.TestCase):
"""Test that aliases field in file_entry enables resolution."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.content = b"colecovision bios content"
self.file_path = os.path.join(self.tmpdir, "colecovision.rom")
with open(self.file_path, "wb") as f:
f.write(self.content)
hashes = compute_hashes(self.file_path)
self.sha1 = hashes["sha1"]
self.md5 = hashes["md5"]
self.db = {
"files": {
self.sha1: {
"path": self.file_path,
"name": "colecovision.rom",
"md5": self.md5,
"size": len(self.content),
},
},
"indexes": {
"by_md5": {self.md5: self.sha1},
"by_name": {
"colecovision.rom": [self.sha1],
"coleco.rom": [self.sha1],
},
"by_crc32": {},
},
}
def tearDown(self):
import shutil
shutil.rmtree(self.tmpdir, ignore_errors=True)
def test_aliases_field_enables_name_resolution(self):
"""file_entry with aliases: names_to_try includes aliases."""
entry = {
"name": "BIOS.col",
"aliases": ["coleco.rom"],
}
path, status = resolve_local_file(entry, self.db)
self.assertIsNotNone(path)
self.assertEqual(path, self.file_path)
def test_primary_name_tried_first(self):
entry = {
"name": "colecovision.rom",
"aliases": ["coleco.rom"],
}
path, status = resolve_local_file(entry, self.db)
self.assertEqual(status, "exact")
self.assertEqual(path, self.file_path)
def test_alias_duplicate_of_name_ignored(self):
"""If alias == name, it's not added twice to names_to_try."""
entry = {
"name": "colecovision.rom",
"aliases": ["colecovision.rom"],
}
path, status = resolve_local_file(entry, self.db)
self.assertEqual(status, "exact")
class TestBuildIndexesWithAliases(unittest.TestCase):
"""Test that build_indexes merges alias names into by_name."""
def test_aliases_indexed_in_by_name(self):
files = {
"sha1abc": {
"name": "gb_bios.bin",
"md5": "md5abc",
"crc32": "crc32abc",
},
}
aliases = {
"sha1abc": [
{"name": "dmg_boot.bin", "path": ""},
{"name": "dmg_rom.bin", "path": ""},
],
}
indexes = build_indexes(files, aliases)
self.assertIn("gb_bios.bin", indexes["by_name"])
self.assertIn("dmg_boot.bin", indexes["by_name"])
self.assertIn("dmg_rom.bin", indexes["by_name"])
self.assertEqual(indexes["by_name"]["dmg_boot.bin"], ["sha1abc"])
self.assertEqual(indexes["by_name"]["gb_bios.bin"], ["sha1abc"])
def test_alias_not_duplicated(self):
"""Same SHA1 not added twice for same alias name."""
files = {
"sha1abc": {
"name": "gb_bios.bin",
"md5": "md5abc",
"crc32": "crc32abc",
},
}
aliases = {
"sha1abc": [
{"name": "dmg_boot.bin", "path": ""},
{"name": "dmg_boot.bin", "path": "other/path"},
],
}
indexes = build_indexes(files, aliases)
# SHA1 should appear only once
self.assertEqual(indexes["by_name"]["dmg_boot.bin"].count("sha1abc"), 1)
class TestKnownAliasGroups(unittest.TestCase):
"""Test that KNOWN_ALIAS_GROUPS cross-linking works via build_indexes."""
def test_known_alias_groups_structure(self):
"""Verify KNOWN_ALIAS_GROUPS is a list of lists of strings."""
from generate_db import _collect_all_aliases
# We can't easily call _collect_all_aliases without the real repo,
# but we can verify the constant exists and has the right structure.
# Import the source to check the constant inline.
import importlib
import generate_db
with open(generate_db.__file__) as fh:
source = fh.read()
self.assertIn("KNOWN_ALIAS_GROUPS", source)
self.assertIn("colecovision.rom", source)
self.assertIn("coleco.rom", source)
self.assertIn("gb_bios.bin", source)
self.assertIn("dmg_boot.bin", source)
class TestBeetlePsxAliasField(unittest.TestCase):
"""Verify aliases field (renamed from alt_names) is used in resolution."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.content = b"psx bios"
self.file_path = os.path.join(self.tmpdir, "scph5501.bin")
with open(self.file_path, "wb") as f:
f.write(self.content)
hashes = compute_hashes(self.file_path)
self.sha1 = hashes["sha1"]
self.md5 = hashes["md5"]
self.db = {
"files": {
self.sha1: {
"path": self.file_path,
"name": "scph5501.bin",
"md5": self.md5,
"size": len(self.content),
},
},
"indexes": {
"by_md5": {self.md5: self.sha1},
"by_name": {
"scph5501.bin": [self.sha1],
"ps-22a.bin": [self.sha1],
},
"by_crc32": {},
},
}
def tearDown(self):
import shutil
shutil.rmtree(self.tmpdir, ignore_errors=True)
def test_aliases_field_not_alt_names(self):
"""The field is 'aliases', not 'alt_names'."""
entry = {
"name": "ps-22a.bin",
"aliases": ["scph5501.bin"],
}
path, status = resolve_local_file(entry, self.db)
self.assertIsNotNone(path)
def test_alt_names_field_ignored(self):
"""'alt_names' field is not recognized, only 'aliases'."""
entry = {
"name": "nonexistent.bin",
"alt_names": ["scph5501.bin"],
}
path, status = resolve_local_file(entry, self.db)
self.assertIsNone(path)
self.assertEqual(status, "not_found")
if __name__ == "__main__":
unittest.main()

View File

@@ -1,920 +0,0 @@
"""Integration tests using synthetic YAML fixtures and real BIOS files.
Tests the full pipeline: load_platform_config -> resolve_local_file ->
verify_platform -> find_undeclared_files -> cross_reference, all with
real file I/O, real hashes, and real ZIP handling.
"""
from __future__ import annotations
import hashlib
import json
import os
import shutil
import sys
import tempfile
import unittest
import zipfile
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "scripts"))
from common import (
compute_hashes,
load_emulator_profiles,
load_platform_config,
md5sum,
resolve_local_file,
)
from verify import (
Severity,
Status,
find_undeclared_files,
verify_platform,
)
from cross_reference import cross_reference, load_platform_files
# ---------------------------------------------------------------------------
# Helpers to build synthetic BIOS files with known hashes
# ---------------------------------------------------------------------------
def _make_file(directory: str, name: str, content: bytes) -> str:
path = os.path.join(directory, name)
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, "wb") as f:
f.write(content)
return path
def _md5(data: bytes) -> str:
return hashlib.md5(data).hexdigest()
def _sha1(data: bytes) -> str:
return hashlib.sha1(data).hexdigest()
def _make_zip(directory: str, zip_name: str, inner_name: str, inner_content: bytes) -> str:
path = os.path.join(directory, zip_name)
with zipfile.ZipFile(path, "w") as zf:
zf.writestr(inner_name, inner_content)
return path
def _build_db(files_dict: dict, aliases: dict | None = None) -> dict:
"""Build a minimal database.json structure from {sha1: {path, name, md5, size}}."""
by_md5 = {}
by_name: dict[str, list[str]] = {}
by_crc32 = {}
for sha1, info in files_dict.items():
md5 = info.get("md5", "")
name = info.get("name", "")
crc32 = info.get("crc32", "")
if md5:
by_md5[md5] = sha1
if name:
by_name.setdefault(name, [])
if sha1 not in by_name[name]:
by_name[name].append(sha1)
if crc32:
by_crc32[crc32] = sha1
# Merge alias names into by_name
if aliases:
for sha1, alias_list in aliases.items():
for alias in alias_list:
aname = alias if isinstance(alias, str) else alias.get("name", "")
if aname:
by_name.setdefault(aname, [])
if sha1 not in by_name[aname]:
by_name[aname].append(sha1)
return {
"files": files_dict,
"indexes": {
"by_md5": by_md5,
"by_name": by_name,
"by_crc32": by_crc32,
},
}
# ---------------------------------------------------------------------------
# Fixture setup shared across integration tests
# ---------------------------------------------------------------------------
class FixtureMixin:
"""Creates all synthetic files and patches YAML fixtures with real hashes."""
def _setup_fixtures(self):
self.tmpdir = tempfile.mkdtemp(prefix="retrobios_test_")
self.bios_dir = os.path.join(self.tmpdir, "bios")
os.makedirs(self.bios_dir)
# Fixture directories
self.fixtures_dir = os.path.join(os.path.dirname(__file__), "fixtures")
self.platforms_dir = os.path.join(self.tmpdir, "platforms")
self.emulators_dir = os.path.join(self.tmpdir, "emulators")
os.makedirs(self.platforms_dir)
os.makedirs(self.emulators_dir)
# -- Synthetic BIOS files with deterministic content --
self.content_a = b"\x01\x02\x03\x04" # required_present / correct_hash
self.content_b = b"\x05\x06\x07\x08" # optional_present / no_md5_present
self.content_c = b"\x09\x0a\x0b\x0c" # wrong_hash (on-disk content differs from expected)
self.content_inner = b"\x10\x11\x12\x13" # ZIP inner ROM
self.content_inner_bad = b"\x20\x21\x22\x23" # ZIP inner ROM (wrong content)
self.content_multi = b"\x30\x31\x32\x33" # multi-hash / truncated
# Create bios files
self.path_a = _make_file(self.bios_dir, "required_present.bin", self.content_a)
self.path_b = _make_file(self.bios_dir, "optional_present.bin", self.content_b)
self.path_c = _make_file(self.bios_dir, "wrong_hash.bin", self.content_c)
self.path_no_md5 = _make_file(self.bios_dir, "no_md5_present.bin", self.content_b)
self.path_correct = _make_file(self.bios_dir, "correct_hash.bin", self.content_a)
self.path_multi = _make_file(self.bios_dir, "multi_hash.bin", self.content_multi)
self.path_trunc = _make_file(self.bios_dir, "truncated_md5.bin", self.content_multi)
# Compute real hashes
self.hashes_a = compute_hashes(self.path_a)
self.hashes_b = compute_hashes(self.path_b)
self.hashes_c = compute_hashes(self.path_c)
self.hashes_multi = compute_hashes(self.path_multi)
# ZIP with correct inner ROM
self.zip_good = _make_zip(self.bios_dir, "test.zip", "inner.rom", self.content_inner)
self.hashes_zip_good = compute_hashes(self.zip_good)
self.inner_md5 = _md5(self.content_inner)
# ZIP with wrong inner ROM
self.zip_bad = _make_zip(self.bios_dir, "test_bad.zip", "inner.rom", self.content_inner_bad)
self.hashes_zip_bad = compute_hashes(self.zip_bad)
self.inner_bad_md5 = _md5(self.content_inner_bad)
# ZIP for missing-inner test: same as good zip but entry references "not_there.rom"
self.zip_missing_inner = _make_zip(
self.bios_dir, "test_missing_inner.zip", "inner.rom", self.content_inner,
)
self.hashes_zip_missing_inner = compute_hashes(self.zip_missing_inner)
# -- Build database --
files_dict = {}
for path in [
self.path_a, self.path_b, self.path_c, self.path_no_md5,
self.path_correct, self.path_multi, self.path_trunc,
self.zip_good, self.zip_bad, self.zip_missing_inner,
]:
h = compute_hashes(path)
files_dict[h["sha1"]] = {
"path": path,
"name": os.path.basename(path),
"md5": h["md5"],
"crc32": h["crc32"],
"size": os.path.getsize(path),
}
self.db = _build_db(files_dict)
# -- Write patched YAML fixtures --
self._write_existence_yaml()
self._write_md5_yaml()
self._write_inherit_yaml()
self._write_shared_yaml()
self._write_emulator_yamls()
# Write database.json
db_path = os.path.join(self.tmpdir, "database.json")
with open(db_path, "w") as f:
json.dump(self.db, f)
self.db_path = db_path
def _write_existence_yaml(self):
import yaml
config = {
"platform": "TestExistence",
"verification_mode": "existence",
"base_destination": "system",
"systems": {
"test-system": {
"files": [
{
"name": "required_present.bin",
"destination": "required_present.bin",
"required": True,
"sha1": self.hashes_a["sha1"],
},
{
"name": "required_missing.bin",
"destination": "required_missing.bin",
"required": True,
"sha1": "0" * 40,
},
{
"name": "optional_present.bin",
"destination": "optional_present.bin",
"required": False,
"sha1": self.hashes_b["sha1"],
},
{
"name": "optional_missing.bin",
"destination": "optional_missing.bin",
"required": False,
"sha1": "0" * 40 + "1",
},
]
}
},
}
with open(os.path.join(self.platforms_dir, "test_existence.yml"), "w") as f:
yaml.dump(config, f, default_flow_style=False)
def _write_md5_yaml(self):
import yaml
wrong_md5 = "a" * 32
multi_md5 = f"{'f' * 32},{self.hashes_multi['md5']}"
truncated_md5 = self.hashes_multi["md5"][:29]
config = {
"platform": "TestMD5",
"verification_mode": "md5",
"base_destination": "bios",
"systems": {
"test-system": {
"files": [
{
"name": "correct_hash.bin",
"destination": "correct_hash.bin",
"required": True,
"md5": self.hashes_a["md5"],
},
{
"name": "wrong_hash.bin",
"destination": "wrong_hash.bin",
"required": True,
"md5": wrong_md5,
},
{
"name": "no_md5_present.bin",
"destination": "no_md5_present.bin",
"required": True,
},
{
"name": "required_missing.bin",
"destination": "required_missing.bin",
"required": True,
"md5": "b" * 32,
},
{
"name": "optional_missing.bin",
"destination": "optional_missing.bin",
"required": False,
"md5": "c" * 32,
},
]
},
"test-zip-system": {
"files": [
{
"name": "test.zip",
"destination": "test.zip",
"required": True,
"md5": self.inner_md5,
"zipped_file": "inner.rom",
},
{
"name": "test_bad.zip",
"destination": "test_bad.zip",
"required": True,
"md5": "e" * 32,
"zipped_file": "inner.rom",
},
{
"name": "test_missing_inner.zip",
"destination": "test_missing_inner.zip",
"required": True,
"md5": self.inner_md5,
"zipped_file": "not_there.rom",
},
]
},
"test-recalbox-system": {
"files": [
{
"name": "multi_hash.bin",
"destination": "multi_hash.bin",
"required": True,
"md5": multi_md5,
},
{
"name": "truncated_md5.bin",
"destination": "truncated_md5.bin",
"required": True,
"md5": truncated_md5,
},
]
},
"test-dedup-system": {
"files": [
{
"name": "correct_hash.bin",
"destination": "correct_hash.bin",
"required": True,
"md5": wrong_md5,
},
]
},
},
}
with open(os.path.join(self.platforms_dir, "test_md5.yml"), "w") as f:
yaml.dump(config, f, default_flow_style=False)
def _write_inherit_yaml(self):
import yaml
config = {
"inherits": "test_md5",
"platform": "TestInherited",
"base_destination": "BIOS",
}
with open(os.path.join(self.platforms_dir, "test_inherit.yml"), "w") as f:
yaml.dump(config, f, default_flow_style=False)
def _write_shared_yaml(self):
import yaml
shared = {
"shared_groups": {
"test_group": [
{
"name": "shared_file.bin",
"sha1": "0" * 40,
"md5": "d" * 32,
"destination": "shared/shared_file.bin",
"required": False,
},
],
},
}
with open(os.path.join(self.platforms_dir, "_shared.yml"), "w") as f:
yaml.dump(shared, f, default_flow_style=False)
def _write_emulator_yamls(self):
import yaml
emu_profile = {
"emulator": "TestEmulator",
"type": "standalone + libretro",
"systems": ["test-system"],
"files": [
{
"name": "correct_hash.bin",
"required": True,
"aliases": ["alt1.bin", "alt2.bin"],
},
{
"name": "optional_standalone.rom",
"required": False,
"mode": "standalone",
},
{
"name": "undeclared.bin",
"required": True,
},
],
}
alias_profile = {
"emulator": "TestAlias",
"type": "alias",
"alias_of": "test_emu_with_aliases",
"systems": ["test-system"],
"files": [],
}
with open(os.path.join(self.emulators_dir, "test_emu_with_aliases.yml"), "w") as f:
yaml.dump(emu_profile, f, default_flow_style=False)
with open(os.path.join(self.emulators_dir, "test_emu_alias_only.yml"), "w") as f:
yaml.dump(alias_profile, f, default_flow_style=False)
def _teardown_fixtures(self):
shutil.rmtree(self.tmpdir, ignore_errors=True)
# ---------------------------------------------------------------------------
# Existence mode tests
# ---------------------------------------------------------------------------
class TestVerifyExistenceMode(FixtureMixin, unittest.TestCase):
"""Existence platform: verify_platform with real file resolution."""
def setUp(self):
self._setup_fixtures()
def tearDown(self):
self._teardown_fixtures()
def test_existence_mode_counts(self):
"""Existence: 2 present (1 required OK, 1 optional OK), 2 missing."""
config = load_platform_config("test_existence", self.platforms_dir)
result = verify_platform(config, self.db, self.emulators_dir)
self.assertEqual(result["verification_mode"], "existence")
counts = result["severity_counts"]
# required_present + optional_present = 2 OK
self.assertEqual(counts[Severity.OK], 2)
# required_missing = WARNING
self.assertEqual(counts[Severity.WARNING], 1)
# optional_missing = INFO
self.assertEqual(counts[Severity.INFO], 1)
self.assertEqual(result["total_files"], 4)
def test_severity_counts_sum_to_total(self):
config = load_platform_config("test_existence", self.platforms_dir)
result = verify_platform(config, self.db, self.emulators_dir)
total_from_counts = sum(result["severity_counts"].values())
self.assertEqual(total_from_counts, result["total_files"])
def test_required_field_propagated(self):
config = load_platform_config("test_existence", self.platforms_dir)
result = verify_platform(config, self.db, self.emulators_dir)
for detail in result["details"]:
if detail["name"] == "optional_present.bin":
self.assertFalse(detail["required"])
elif detail["name"] == "required_present.bin":
self.assertTrue(detail["required"])
# ---------------------------------------------------------------------------
# MD5 mode tests
# ---------------------------------------------------------------------------
class TestVerifyMD5Mode(FixtureMixin, unittest.TestCase):
"""MD5 platform: verify_platform with hash checks, ZIPs, multi-hash."""
def setUp(self):
self._setup_fixtures()
def tearDown(self):
self._teardown_fixtures()
def _get_result(self):
config = load_platform_config("test_md5", self.platforms_dir)
return verify_platform(config, self.db, self.emulators_dir)
def _find_detail(self, result: dict, name: str, system: str | None = None) -> dict | None:
for d in result["details"]:
if d["name"] == name:
if system is None or d.get("system") == system:
return d
return None
def test_md5_mode_correct_hash(self):
result = self._get_result()
detail = self._find_detail(result, "correct_hash.bin", system="test-system")
self.assertIsNotNone(detail)
self.assertEqual(detail["status"], Status.OK)
def test_md5_mode_wrong_hash(self):
result = self._get_result()
detail = self._find_detail(result, "wrong_hash.bin")
self.assertIsNotNone(detail)
self.assertEqual(detail["status"], Status.UNTESTED)
def test_md5_mode_no_md5_present(self):
"""File present with no expected MD5 in md5-mode platform = OK."""
result = self._get_result()
detail = self._find_detail(result, "no_md5_present.bin")
self.assertIsNotNone(detail)
self.assertEqual(detail["status"], Status.OK)
def test_md5_mode_missing_required(self):
result = self._get_result()
detail = self._find_detail(result, "required_missing.bin")
self.assertIsNotNone(detail)
self.assertEqual(detail["status"], Status.MISSING)
def test_md5_mode_missing_optional(self):
result = self._get_result()
detail = self._find_detail(result, "optional_missing.bin")
self.assertIsNotNone(detail)
self.assertEqual(detail["status"], Status.MISSING)
self.assertFalse(detail["required"])
def test_md5_severity_missing_required_is_critical(self):
result = self._get_result()
counts = result["severity_counts"]
self.assertGreater(counts[Severity.CRITICAL], 0)
def test_md5_severity_missing_optional_is_warning(self):
"""optional_missing -> WARNING severity in md5 mode."""
result = self._get_result()
# At least 1 WARNING for optional_missing + wrong_hash
counts = result["severity_counts"]
self.assertGreater(counts[Severity.WARNING], 0)
def test_severity_counts_sum_to_total(self):
result = self._get_result()
total_from_counts = sum(result["severity_counts"].values())
self.assertEqual(total_from_counts, result["total_files"])
# ---------------------------------------------------------------------------
# ZIP verification tests
# ---------------------------------------------------------------------------
class TestVerifyZippedFiles(FixtureMixin, unittest.TestCase):
"""zipped_file entries: inner ROM hash matching via check_inside_zip."""
def setUp(self):
self._setup_fixtures()
def tearDown(self):
self._teardown_fixtures()
def _get_result(self):
config = load_platform_config("test_md5", self.platforms_dir)
return verify_platform(config, self.db, self.emulators_dir)
def _find_detail(self, result: dict, name: str) -> dict | None:
for d in result["details"]:
if d["name"] == name:
return d
return None
def test_zipped_file_correct_inner(self):
"""test.zip with inner.rom matching expected MD5 = OK."""
result = self._get_result()
detail = self._find_detail(result, "test.zip")
self.assertIsNotNone(detail)
self.assertEqual(detail["status"], Status.OK)
def test_zipped_file_wrong_inner(self):
"""test_bad.zip with inner.rom not matching expected MD5."""
result = self._get_result()
detail = self._find_detail(result, "test_bad.zip")
self.assertIsNotNone(detail)
# Inner ROM exists but MD5 doesn't match the expected "e"*32
self.assertIn(detail["status"], (Status.UNTESTED, Status.MISSING))
def test_zipped_file_inner_not_found(self):
"""test_missing_inner.zip: zipped_file references not_there.rom which doesn't exist."""
result = self._get_result()
detail = self._find_detail(result, "test_missing_inner.zip")
self.assertIsNotNone(detail)
self.assertIn(detail["status"], (Status.UNTESTED, Status.MISSING))
# ---------------------------------------------------------------------------
# Multi-hash and truncated MD5 tests
# ---------------------------------------------------------------------------
class TestVerifyRecalboxEdgeCases(FixtureMixin, unittest.TestCase):
"""Comma-separated multi-hash and truncated 29-char MD5."""
def setUp(self):
self._setup_fixtures()
def tearDown(self):
self._teardown_fixtures()
def _get_result(self):
config = load_platform_config("test_md5", self.platforms_dir)
return verify_platform(config, self.db, self.emulators_dir)
def _find_detail(self, result: dict, name: str) -> dict | None:
for d in result["details"]:
if d["name"] == name:
return d
return None
def test_multi_hash_recalbox(self):
"""Comma-separated MD5 list: any match = OK."""
result = self._get_result()
detail = self._find_detail(result, "multi_hash.bin")
self.assertIsNotNone(detail)
self.assertEqual(detail["status"], Status.OK)
def test_truncated_md5_batocera(self):
"""29-char MD5 prefix match = OK."""
result = self._get_result()
detail = self._find_detail(result, "truncated_md5.bin")
self.assertIsNotNone(detail)
self.assertEqual(detail["status"], Status.OK)
# ---------------------------------------------------------------------------
# Same-destination worst-status aggregation
# ---------------------------------------------------------------------------
class TestWorstStatusAggregation(FixtureMixin, unittest.TestCase):
"""Two entries for same destination: worst status wins."""
def setUp(self):
self._setup_fixtures()
def tearDown(self):
self._teardown_fixtures()
def test_same_dest_worst_status_wins(self):
"""correct_hash.bin: test-system has correct MD5, test-dedup-system has wrong MD5.
Worst status (UNTESTED from wrong hash) should be the aggregated result."""
config = load_platform_config("test_md5", self.platforms_dir)
result = verify_platform(config, self.db, self.emulators_dir)
# correct_hash.bin appears in both test-system (OK) and test-dedup-system (UNTESTED)
# Worst status should be reflected in severity_counts
# The destination "correct_hash.bin" should have the worst severity
dest_severities = {}
for detail in result["details"]:
dest = detail.get("name", "")
if dest == "correct_hash.bin":
# At least one should be OK and another UNTESTED
if detail.get("status") == Status.UNTESTED:
dest_severities["untested"] = True
elif detail.get("status") == Status.OK:
dest_severities["ok"] = True
# Both statuses should appear in details
self.assertTrue(dest_severities.get("ok"), "Expected OK detail for correct_hash.bin")
self.assertTrue(dest_severities.get("untested"), "Expected UNTESTED detail for correct_hash.bin")
# But total_files should count correct_hash.bin only once (deduped by destination)
dest_count = sum(
1 for dest in result["severity_counts"].values()
)
# severity_counts is a dict of severity->count, total_files < len(details)
self.assertLess(result["total_files"], len(result["details"]))
# ---------------------------------------------------------------------------
# Inheritance tests
# ---------------------------------------------------------------------------
class TestInheritance(FixtureMixin, unittest.TestCase):
"""Platform with inherits: loads parent files + own overrides."""
def setUp(self):
self._setup_fixtures()
def tearDown(self):
self._teardown_fixtures()
def test_inherited_platform_loads_parent_systems(self):
config = load_platform_config("test_inherit", self.platforms_dir)
self.assertEqual(config["platform"], "TestInherited")
self.assertEqual(config["base_destination"], "BIOS")
# Should have inherited systems from test_md5
self.assertIn("test-system", config.get("systems", {}))
self.assertIn("test-zip-system", config.get("systems", {}))
self.assertIn("test-recalbox-system", config.get("systems", {}))
self.assertIn("test-dedup-system", config.get("systems", {}))
def test_inherited_verification_mode(self):
"""Inherited platform keeps parent's verification_mode."""
config = load_platform_config("test_inherit", self.platforms_dir)
self.assertEqual(config["verification_mode"], "md5")
def test_inherited_verify_produces_results(self):
config = load_platform_config("test_inherit", self.platforms_dir)
result = verify_platform(config, self.db, self.emulators_dir)
self.assertEqual(result["platform"], "TestInherited")
self.assertGreater(result["total_files"], 0)
total_from_counts = sum(result["severity_counts"].values())
self.assertEqual(total_from_counts, result["total_files"])
# ---------------------------------------------------------------------------
# Cross-reference / undeclared files tests
# ---------------------------------------------------------------------------
class TestCrossReference(FixtureMixin, unittest.TestCase):
"""find_undeclared_files and cross_reference with emulator profiles."""
def setUp(self):
self._setup_fixtures()
def tearDown(self):
self._teardown_fixtures()
def test_cross_reference_finds_undeclared(self):
"""undeclared.bin from emulator profile not in platform config."""
config = load_platform_config("test_md5", self.platforms_dir)
undeclared = find_undeclared_files(config, self.emulators_dir, self.db)
names = [u["name"] for u in undeclared]
self.assertIn("undeclared.bin", names)
def test_cross_reference_skips_standalone(self):
"""mode: standalone files excluded from undeclared list."""
config = load_platform_config("test_md5", self.platforms_dir)
undeclared = find_undeclared_files(config, self.emulators_dir, self.db)
names = [u["name"] for u in undeclared]
self.assertNotIn("optional_standalone.rom", names)
def test_cross_reference_skips_alias_profiles(self):
"""type: alias emulator profiles are not loaded by default."""
profiles = load_emulator_profiles(self.emulators_dir, skip_aliases=True)
self.assertNotIn("test_emu_alias_only", profiles)
self.assertIn("test_emu_with_aliases", profiles)
def test_cross_reference_declared_not_in_undeclared(self):
"""correct_hash.bin is in platform config, not reported as undeclared."""
config = load_platform_config("test_md5", self.platforms_dir)
undeclared = find_undeclared_files(config, self.emulators_dir, self.db)
names = [u["name"] for u in undeclared]
self.assertNotIn("correct_hash.bin", names)
def test_cross_reference_function(self):
"""cross_reference() produces gap report with expected structure."""
profiles = load_emulator_profiles(self.emulators_dir)
declared = {}
for sys_id in ["test-system"]:
declared[sys_id] = {"correct_hash.bin", "wrong_hash.bin", "no_md5_present.bin",
"required_missing.bin", "optional_missing.bin"}
report = cross_reference(profiles, declared, self.db)
self.assertIn("test_emu_with_aliases", report)
emu_report = report["test_emu_with_aliases"]
self.assertEqual(emu_report["emulator"], "TestEmulator")
self.assertGreater(emu_report["total_files"], 0)
gap_names = [g["name"] for g in emu_report["gap_details"]]
self.assertIn("undeclared.bin", gap_names)
# standalone excluded
self.assertNotIn("optional_standalone.rom", gap_names)
# ---------------------------------------------------------------------------
# Alias resolution tests
# ---------------------------------------------------------------------------
class TestAliasResolution(FixtureMixin, unittest.TestCase):
"""File entries with aliases resolve via alternate names."""
def setUp(self):
self._setup_fixtures()
# Add alias names to the database by_name index
sha1_a = self.hashes_a["sha1"]
self.db["indexes"]["by_name"]["alt1.bin"] = [sha1_a]
self.db["indexes"]["by_name"]["alt2.bin"] = [sha1_a]
def tearDown(self):
self._teardown_fixtures()
def test_alias_resolves_file(self):
"""File not found by primary name resolves via alias in by_name."""
entry = {
"name": "nonexistent_primary.bin",
"aliases": ["alt1.bin"],
}
path, status = resolve_local_file(entry, self.db)
self.assertIsNotNone(path)
self.assertEqual(os.path.basename(path), "correct_hash.bin")
def test_primary_name_preferred_over_alias(self):
entry = {
"name": "correct_hash.bin",
"aliases": ["alt1.bin"],
}
path, status = resolve_local_file(entry, self.db)
self.assertEqual(status, "exact")
self.assertEqual(os.path.basename(path), "correct_hash.bin")
# ---------------------------------------------------------------------------
# Pack consistency test
# ---------------------------------------------------------------------------
class TestPackConsistency(FixtureMixin, unittest.TestCase):
"""verify and pack produce consistent OK counts for the same platform."""
def setUp(self):
self._setup_fixtures()
def tearDown(self):
self._teardown_fixtures()
def test_existence_ok_count_matches_present_files(self):
"""For existence mode, OK count should match files resolved on disk."""
config = load_platform_config("test_existence", self.platforms_dir)
result = verify_platform(config, self.db, self.emulators_dir)
# Count how many files actually resolve
resolved_count = 0
for sys_id, system in config.get("systems", {}).items():
for fe in system.get("files", []):
path, status = resolve_local_file(fe, self.db)
if path is not None:
resolved_count += 1
# Deduplicate by destination (same logic as verify_platform)
dest_resolved = set()
for sys_id, system in config.get("systems", {}).items():
for fe in system.get("files", []):
path, status = resolve_local_file(fe, self.db)
dest = fe.get("destination", fe.get("name", ""))
if path is not None:
dest_resolved.add(dest)
self.assertEqual(result["severity_counts"][Severity.OK], len(dest_resolved))
# ---------------------------------------------------------------------------
# Database.json fixture
# ---------------------------------------------------------------------------
class TestDatabaseFixture(FixtureMixin, unittest.TestCase):
"""Verify the synthetic database.json has correct structure and indexes."""
def setUp(self):
self._setup_fixtures()
def tearDown(self):
self._teardown_fixtures()
def test_db_has_required_keys(self):
self.assertIn("files", self.db)
self.assertIn("indexes", self.db)
self.assertIn("by_md5", self.db["indexes"])
self.assertIn("by_name", self.db["indexes"])
self.assertIn("by_crc32", self.db["indexes"])
def test_db_sha1_keys_match(self):
"""Every SHA1 key in files is reachable via by_md5 or by_name."""
by_md5 = self.db["indexes"]["by_md5"]
by_name = self.db["indexes"]["by_name"]
for sha1, info in self.db["files"].items():
md5 = info.get("md5", "")
name = info.get("name", "")
found = False
if md5 in by_md5 and by_md5[md5] == sha1:
found = True
if name in by_name and sha1 in by_name[name]:
found = True
self.assertTrue(found, f"SHA1 {sha1} not reachable via indexes")
def test_db_file_paths_exist(self):
for sha1, info in self.db["files"].items():
path = info.get("path", "")
self.assertTrue(os.path.exists(path), f"File missing: {path}")
def test_db_hashes_match_disk(self):
"""MD5 in database matches actual file on disk."""
for sha1, info in self.db["files"].items():
actual = md5sum(info["path"])
self.assertEqual(actual, info["md5"], f"MD5 mismatch for {info['path']}")
def test_db_json_roundtrip(self):
"""database.json written to disk can be loaded back."""
with open(self.db_path) as f:
loaded = json.load(f)
self.assertEqual(set(loaded["files"].keys()), set(self.db["files"].keys()))
# ---------------------------------------------------------------------------
# Shared groups test
# ---------------------------------------------------------------------------
class TestSharedGroups(FixtureMixin, unittest.TestCase):
"""_shared.yml groups injected via includes."""
def setUp(self):
self._setup_fixtures()
def tearDown(self):
self._teardown_fixtures()
def test_shared_group_loaded(self):
"""_shared.yml exists and can be parsed."""
import yaml
shared_path = os.path.join(self.platforms_dir, "_shared.yml")
self.assertTrue(os.path.exists(shared_path))
with open(shared_path) as f:
data = yaml.safe_load(f)
self.assertIn("shared_groups", data)
self.assertIn("test_group", data["shared_groups"])
def test_includes_injects_shared_files(self):
"""Platform with includes: [test_group] gets shared_file.bin."""
import yaml
# Create a platform that uses includes
config = {
"platform": "TestWithShared",
"verification_mode": "existence",
"systems": {
"test-shared-system": {
"includes": ["test_group"],
"files": [
{
"name": "local_file.bin",
"destination": "local_file.bin",
"required": True,
"sha1": "0" * 40,
},
],
}
},
}
with open(os.path.join(self.platforms_dir, "test_with_shared.yml"), "w") as f:
yaml.dump(config, f, default_flow_style=False)
loaded = load_platform_config("test_with_shared", self.platforms_dir)
files = loaded["systems"]["test-shared-system"]["files"]
names = [fe["name"] for fe in files]
self.assertIn("local_file.bin", names)
self.assertIn("shared_file.bin", names)
if __name__ == "__main__":
unittest.main()

View File

@@ -1,238 +0,0 @@
"""Tests for pack generation logic in generate_pack.py."""
from __future__ import annotations
import hashlib
import os
import sys
import tempfile
import unittest
import zipfile
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "scripts"))
from common import compute_hashes
from generate_pack import build_zip_contents_index
class TestBuildZipContentsIndex(unittest.TestCase):
"""Test build_zip_contents_index: maps inner ROM MD5 to container SHA1."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.inner_content = b"inner rom data for index test"
self.inner_md5 = hashlib.md5(self.inner_content).hexdigest()
self.zip_path = os.path.join(self.tmpdir, "container.zip")
with zipfile.ZipFile(self.zip_path, "w") as zf:
zf.writestr("rom.bin", self.inner_content)
hashes = compute_hashes(self.zip_path)
self.zip_sha1 = hashes["sha1"]
self.zip_md5 = hashes["md5"]
self.db = {
"files": {
self.zip_sha1: {
"path": self.zip_path,
"name": "container.zip",
"md5": self.zip_md5,
"size": os.path.getsize(self.zip_path),
},
},
"indexes": {
"by_md5": {self.zip_md5: self.zip_sha1},
"by_name": {"container.zip": [self.zip_sha1]},
"by_crc32": {},
},
}
def tearDown(self):
import shutil
shutil.rmtree(self.tmpdir, ignore_errors=True)
def test_inner_md5_maps_to_container_sha1(self):
index = build_zip_contents_index(self.db)
self.assertIn(self.inner_md5, index)
self.assertEqual(index[self.inner_md5], self.zip_sha1)
def test_non_zip_files_skipped(self):
"""Non-ZIP files in db don't appear in index."""
plain_path = os.path.join(self.tmpdir, "plain.bin")
with open(plain_path, "wb") as f:
f.write(b"not a zip")
hashes = compute_hashes(plain_path)
self.db["files"][hashes["sha1"]] = {
"path": plain_path,
"name": "plain.bin",
"md5": hashes["md5"],
"size": 9,
}
index = build_zip_contents_index(self.db)
# Only the inner_md5 from the ZIP should be present
self.assertEqual(len(index), 1)
def test_missing_file_skipped(self):
"""ZIP path that doesn't exist on disk is skipped."""
self.db["files"]["fake_sha1"] = {
"path": "/nonexistent/file.zip",
"name": "file.zip",
"md5": "a" * 32,
"size": 0,
}
index = build_zip_contents_index(self.db)
self.assertEqual(len(index), 1)
def test_bad_zip_skipped(self):
"""Corrupt ZIP file is skipped without error."""
bad_path = os.path.join(self.tmpdir, "bad.zip")
with open(bad_path, "wb") as f:
f.write(b"corrupt data")
hashes = compute_hashes(bad_path)
self.db["files"][hashes["sha1"]] = {
"path": bad_path,
"name": "bad.zip",
"md5": hashes["md5"],
"size": 12,
}
index = build_zip_contents_index(self.db)
self.assertEqual(len(index), 1)
class TestFileStatusAggregation(unittest.TestCase):
"""Test worst-status-wins logic for pack file aggregation."""
def test_worst_status_wins(self):
"""Simulate the worst-status-wins dict pattern from generate_pack."""
sev_order = {"ok": 0, "untested": 1, "missing": 2}
file_status = {}
def update_status(dest, status):
prev = file_status.get(dest)
if prev is None or sev_order.get(status, 0) > sev_order.get(prev, 0):
file_status[dest] = status
update_status("system/bios.bin", "ok")
update_status("system/bios.bin", "missing")
self.assertEqual(file_status["system/bios.bin"], "missing")
update_status("system/other.bin", "untested")
update_status("system/other.bin", "ok")
self.assertEqual(file_status["system/other.bin"], "untested")
def test_dedup_same_destination_packed_once(self):
"""Same destination from multiple systems: only first is packed."""
seen = set()
packed = []
entries = [
{"dest": "shared/bios.bin", "source": "sys1"},
{"dest": "shared/bios.bin", "source": "sys2"},
{"dest": "unique/other.bin", "source": "sys3"},
]
for e in entries:
if e["dest"] in seen:
continue
seen.add(e["dest"])
packed.append(e["dest"])
self.assertEqual(len(packed), 2)
self.assertIn("shared/bios.bin", packed)
self.assertIn("unique/other.bin", packed)
class TestEmuDeckNoDestination(unittest.TestCase):
"""EmuDeck entries with no destination are counted as checks."""
def test_no_destination_counted_as_check(self):
"""EmuDeck-style entries (md5 whitelist, no filename) are tracked."""
file_status = {}
# Simulate generate_pack logic for empty dest
sys_id = "psx"
name = ""
md5 = "abc123"
by_md5 = {"abc123": "sha1_match"}
dest = "" # empty destination
if not dest:
fkey = f"{sys_id}/{name}"
if md5 and md5 in by_md5:
file_status.setdefault(fkey, "ok")
else:
file_status[fkey] = "missing"
self.assertIn("psx/", file_status)
self.assertEqual(file_status["psx/"], "ok")
def test_no_destination_missing(self):
file_status = {}
sys_id = "psx"
name = ""
md5 = "abc123"
by_md5 = {}
dest = ""
if not dest:
fkey = f"{sys_id}/{name}"
if md5 and md5 in by_md5:
file_status.setdefault(fkey, "ok")
else:
file_status[fkey] = "missing"
self.assertEqual(file_status["psx/"], "missing")
class TestUserProvidedEntries(unittest.TestCase):
"""Test user_provided storage handling."""
def test_user_provided_creates_instruction_file(self):
"""Simulate user_provided entry packing logic."""
tmpdir = tempfile.mkdtemp()
try:
zip_path = os.path.join(tmpdir, "test_pack.zip")
with zipfile.ZipFile(zip_path, "w") as zf:
entry = {
"name": "PS3UPDAT.PUP",
"storage": "user_provided",
"instructions": "Download from sony.com",
}
instr_name = f"INSTRUCTIONS_{entry['name']}.txt"
zf.writestr(instr_name, f"File needed: {entry['name']}\n\n{entry['instructions']}\n")
with zipfile.ZipFile(zip_path, "r") as zf:
names = zf.namelist()
self.assertIn("INSTRUCTIONS_PS3UPDAT.PUP.txt", names)
content = zf.read("INSTRUCTIONS_PS3UPDAT.PUP.txt").decode()
self.assertIn("PS3UPDAT.PUP", content)
self.assertIn("sony.com", content)
finally:
import shutil
shutil.rmtree(tmpdir, ignore_errors=True)
class TestZippedFileHashMismatch(unittest.TestCase):
"""Test zipped_file with hash_mismatch triggers check_inside_zip."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.inner_content = b"correct inner rom"
self.inner_md5 = hashlib.md5(self.inner_content).hexdigest()
self.zip_path = os.path.join(self.tmpdir, "game.zip")
with zipfile.ZipFile(self.zip_path, "w") as zf:
zf.writestr("rom.bin", self.inner_content)
def tearDown(self):
import shutil
shutil.rmtree(self.tmpdir, ignore_errors=True)
def test_hash_mismatch_zip_inner_ok(self):
"""hash_mismatch on container, but inner ROM MD5 matches."""
from verify import check_inside_zip, Status
result = check_inside_zip(self.zip_path, "rom.bin", self.inner_md5)
self.assertEqual(result, Status.OK)
def test_hash_mismatch_zip_inner_not_found(self):
from verify import check_inside_zip
result = check_inside_zip(self.zip_path, "missing.bin", self.inner_md5)
self.assertEqual(result, "not_in_zip")
if __name__ == "__main__":
unittest.main()

View File

@@ -1,166 +0,0 @@
"""Tests for resolve_local_file from common.py."""
from __future__ import annotations
import hashlib
import os
import sys
import tempfile
import unittest
import zipfile
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "scripts"))
from common import resolve_local_file, compute_hashes, md5_composite
class TestResolveLocalFile(unittest.TestCase):
"""Test resolve_local_file resolution chain."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
# Create a fake BIOS file
self.bios_content = b"fake bios data for testing"
self.bios_path = os.path.join(self.tmpdir, "bios.bin")
with open(self.bios_path, "wb") as f:
f.write(self.bios_content)
hashes = compute_hashes(self.bios_path)
self.sha1 = hashes["sha1"]
self.md5 = hashes["md5"]
self.crc32 = hashes["crc32"]
# Create a second file in .variants/
self.variant_path = os.path.join(self.tmpdir, ".variants", "bios.bin.abcd1234")
os.makedirs(os.path.dirname(self.variant_path), exist_ok=True)
self.variant_content = b"variant bios data"
with open(self.variant_path, "wb") as f:
f.write(self.variant_content)
variant_hashes = compute_hashes(self.variant_path)
self.variant_sha1 = variant_hashes["sha1"]
self.variant_md5 = variant_hashes["md5"]
# Create a ZIP file with an inner ROM
self.zip_path = os.path.join(self.tmpdir, "game.zip")
self.inner_content = b"inner rom data"
self.inner_md5 = hashlib.md5(self.inner_content).hexdigest()
with zipfile.ZipFile(self.zip_path, "w") as zf:
zf.writestr("rom.bin", self.inner_content)
zip_hashes = compute_hashes(self.zip_path)
self.zip_sha1 = zip_hashes["sha1"]
self.zip_md5 = zip_hashes["md5"]
# Build a minimal database
self.db = {
"files": {
self.sha1: {
"path": self.bios_path,
"name": "bios.bin",
"md5": self.md5,
"size": len(self.bios_content),
},
self.variant_sha1: {
"path": self.variant_path,
"name": "bios.bin",
"md5": self.variant_md5,
"size": len(self.variant_content),
},
self.zip_sha1: {
"path": self.zip_path,
"name": "game.zip",
"md5": self.zip_md5,
"size": os.path.getsize(self.zip_path),
},
},
"indexes": {
"by_md5": {
self.md5: self.sha1,
self.variant_md5: self.variant_sha1,
self.zip_md5: self.zip_sha1,
},
"by_name": {
"bios.bin": [self.sha1, self.variant_sha1],
"game.zip": [self.zip_sha1],
"alias.bin": [self.sha1],
},
"by_crc32": {
self.crc32: self.sha1,
},
},
}
def tearDown(self):
import shutil
shutil.rmtree(self.tmpdir, ignore_errors=True)
def test_sha1_exact_match(self):
entry = {"sha1": self.sha1, "name": "bios.bin"}
path, status = resolve_local_file(entry, self.db)
self.assertEqual(status, "exact")
self.assertEqual(path, self.bios_path)
def test_md5_direct_match(self):
entry = {"md5": self.md5, "name": "something_else.bin"}
path, status = resolve_local_file(entry, self.db)
self.assertEqual(status, "md5_exact")
self.assertEqual(path, self.bios_path)
def test_name_match_no_md5(self):
"""No MD5 provided: resolve by name from by_name index."""
entry = {"name": "bios.bin"}
path, status = resolve_local_file(entry, self.db)
self.assertEqual(status, "exact")
# Primary (non-.variants/) path preferred
self.assertEqual(path, self.bios_path)
def test_alias_match_no_md5(self):
"""Alias name in by_name index resolves the file."""
entry = {"name": "unknown.bin", "aliases": ["alias.bin"]}
path, status = resolve_local_file(entry, self.db)
self.assertEqual(status, "exact")
self.assertEqual(path, self.bios_path)
def test_not_found(self):
entry = {"sha1": "0000000000000000000000000000000000000000", "name": "missing.bin"}
path, status = resolve_local_file(entry, self.db)
self.assertIsNone(path)
self.assertEqual(status, "not_found")
def test_hash_mismatch_fallback(self):
"""File found by name but MD5 doesn't match -> hash_mismatch."""
wrong_md5 = "a" * 32
entry = {"name": "bios.bin", "md5": wrong_md5}
path, status = resolve_local_file(entry, self.db)
self.assertEqual(status, "hash_mismatch")
# Should prefer primary over .variants/
self.assertEqual(path, self.bios_path)
def test_zipped_file_resolution_via_zip_contents(self):
"""zipped_file entry resolved through zip_contents index."""
zip_contents = {self.inner_md5: self.zip_sha1}
entry = {
"name": "nonexistent_zip.zip",
"md5": self.inner_md5,
"zipped_file": "rom.bin",
}
path, status = resolve_local_file(entry, self.db, zip_contents)
self.assertEqual(status, "zip_exact")
self.assertEqual(path, self.zip_path)
def test_variants_deprioritized(self):
"""Primary path preferred over .variants/ path."""
# Both bios_path and variant_path have name "bios.bin" in by_name
entry = {"name": "bios.bin"}
path, status = resolve_local_file(entry, self.db)
self.assertEqual(status, "exact")
self.assertNotIn(".variants", path)
def test_truncated_md5_match(self):
"""Batocera truncated MD5 (29 chars) matches via prefix."""
truncated = self.md5[:29]
entry = {"md5": truncated, "name": "something.bin"}
path, status = resolve_local_file(entry, self.db)
self.assertEqual(status, "md5_exact")
self.assertEqual(path, self.bios_path)
if __name__ == "__main__":
unittest.main()

View File

@@ -1,184 +0,0 @@
"""Exhaustive severity mapping tests across all modes and statuses."""
from __future__ import annotations
import os
import sys
import tempfile
import unittest
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "scripts"))
from verify import Status, Severity, compute_severity
class TestSeverityMappingExistence(unittest.TestCase):
"""Existence mode: RetroArch/Lakka/RetroPie behavior.
- OK = OK
- UNTESTED = OK (existence doesn't care about hash)
- MISSING + required = WARNING
- MISSING + optional = INFO
"""
MODE = "existence"
def test_ok_required(self):
self.assertEqual(compute_severity(Status.OK, True, self.MODE), Severity.OK)
def test_ok_optional(self):
self.assertEqual(compute_severity(Status.OK, False, self.MODE), Severity.OK)
def test_untested_required(self):
self.assertEqual(compute_severity(Status.UNTESTED, True, self.MODE), Severity.OK)
def test_untested_optional(self):
self.assertEqual(compute_severity(Status.UNTESTED, False, self.MODE), Severity.OK)
def test_missing_required(self):
self.assertEqual(compute_severity(Status.MISSING, True, self.MODE), Severity.WARNING)
def test_missing_optional(self):
self.assertEqual(compute_severity(Status.MISSING, False, self.MODE), Severity.INFO)
class TestSeverityMappingMd5(unittest.TestCase):
"""MD5 mode: Batocera/RetroBat/EmuDeck behavior.
- OK = OK
- UNTESTED + required = WARNING
- UNTESTED + optional = WARNING
- MISSING + required = CRITICAL
- MISSING + optional = WARNING
Batocera has no required/optional distinction in practice,
but the severity function handles it for Recalbox compatibility.
"""
MODE = "md5"
def test_ok_required(self):
self.assertEqual(compute_severity(Status.OK, True, self.MODE), Severity.OK)
def test_ok_optional(self):
self.assertEqual(compute_severity(Status.OK, False, self.MODE), Severity.OK)
def test_untested_required(self):
self.assertEqual(compute_severity(Status.UNTESTED, True, self.MODE), Severity.WARNING)
def test_untested_optional(self):
self.assertEqual(compute_severity(Status.UNTESTED, False, self.MODE), Severity.WARNING)
def test_missing_required(self):
self.assertEqual(compute_severity(Status.MISSING, True, self.MODE), Severity.CRITICAL)
def test_missing_optional(self):
self.assertEqual(compute_severity(Status.MISSING, False, self.MODE), Severity.WARNING)
class TestSeverityBatoceraBehavior(unittest.TestCase):
"""Batocera has no required distinction: all files are treated equally.
In practice, Batocera YAMLs don't set required=True/False,
so the default (True) applies. Both required and optional
untested files get WARNING severity.
"""
def test_batocera_no_required_distinction_for_untested(self):
sev_req = compute_severity(Status.UNTESTED, True, "md5")
sev_opt = compute_severity(Status.UNTESTED, False, "md5")
self.assertEqual(sev_req, sev_opt)
self.assertEqual(sev_req, Severity.WARNING)
class TestSeverityRecalboxBehavior(unittest.TestCase):
"""Recalbox has mandatory field: missing mandatory = CRITICAL (RED).
Recalbox uses md5 mode with mandatory (required) distinction.
Missing mandatory = CRITICAL (Bios.cpp RED)
Missing optional = WARNING (Bios.cpp YELLOW)
"""
def test_recalbox_mandatory_missing_is_critical(self):
self.assertEqual(
compute_severity(Status.MISSING, True, "md5"),
Severity.CRITICAL,
)
def test_recalbox_optional_missing_is_warning(self):
self.assertEqual(
compute_severity(Status.MISSING, False, "md5"),
Severity.WARNING,
)
def test_recalbox_ok_is_ok(self):
self.assertEqual(
compute_severity(Status.OK, True, "md5"),
Severity.OK,
)
class TestSeverityRetroArchBehavior(unittest.TestCase):
"""RetroArch existence mode: required missing = WARNING, optional = INFO."""
def test_retroarch_required_missing_is_warning(self):
self.assertEqual(
compute_severity(Status.MISSING, True, "existence"),
Severity.WARNING,
)
def test_retroarch_optional_missing_is_info(self):
self.assertEqual(
compute_severity(Status.MISSING, False, "existence"),
Severity.INFO,
)
def test_retroarch_untested_ignored(self):
"""Existence mode ignores untested (hash doesn't matter)."""
self.assertEqual(
compute_severity(Status.UNTESTED, True, "existence"),
Severity.OK,
)
class TestSeverityAllCombinations(unittest.TestCase):
"""Exhaustive matrix: all status x required x mode combinations."""
EXPECTED = {
# (status, required, mode): severity
(Status.OK, True, "existence"): Severity.OK,
(Status.OK, False, "existence"): Severity.OK,
(Status.OK, True, "md5"): Severity.OK,
(Status.OK, False, "md5"): Severity.OK,
(Status.UNTESTED, True, "existence"): Severity.OK,
(Status.UNTESTED, False, "existence"): Severity.OK,
(Status.UNTESTED, True, "md5"): Severity.WARNING,
(Status.UNTESTED, False, "md5"): Severity.WARNING,
(Status.MISSING, True, "existence"): Severity.WARNING,
(Status.MISSING, False, "existence"): Severity.INFO,
(Status.MISSING, True, "md5"): Severity.CRITICAL,
(Status.MISSING, False, "md5"): Severity.WARNING,
}
def test_all_combinations(self):
for (status, required, mode), expected_severity in self.EXPECTED.items():
with self.subTest(status=status, required=required, mode=mode):
actual = compute_severity(status, required, mode)
self.assertEqual(
actual,
expected_severity,
f"compute_severity({status!r}, {required}, {mode!r}) = "
f"{actual!r}, expected {expected_severity!r}",
)
def test_all_12_combinations_covered(self):
statuses = [Status.OK, Status.UNTESTED, Status.MISSING]
requireds = [True, False]
modes = ["existence", "md5"]
all_combos = {
(s, r, m) for s in statuses for r in requireds for m in modes
}
self.assertEqual(all_combos, set(self.EXPECTED.keys()))
if __name__ == "__main__":
unittest.main()

View File

@@ -1,334 +0,0 @@
"""Tests for verification logic in verify.py."""
from __future__ import annotations
import hashlib
import os
import sys
import tempfile
import unittest
import zipfile
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "scripts"))
from common import md5sum
from verify import (
Status,
Severity,
check_inside_zip,
compute_severity,
verify_entry_existence,
verify_entry_md5,
verify_platform,
)
class TestComputeSeverity(unittest.TestCase):
"""Exhaustive test of compute_severity for all 12 combinations."""
# existence mode
def test_existence_ok_required(self):
self.assertEqual(compute_severity(Status.OK, True, "existence"), Severity.OK)
def test_existence_ok_optional(self):
self.assertEqual(compute_severity(Status.OK, False, "existence"), Severity.OK)
def test_existence_missing_required(self):
self.assertEqual(compute_severity(Status.MISSING, True, "existence"), Severity.WARNING)
def test_existence_missing_optional(self):
self.assertEqual(compute_severity(Status.MISSING, False, "existence"), Severity.INFO)
def test_existence_untested_required(self):
self.assertEqual(compute_severity(Status.UNTESTED, True, "existence"), Severity.OK)
def test_existence_untested_optional(self):
self.assertEqual(compute_severity(Status.UNTESTED, False, "existence"), Severity.OK)
# md5 mode
def test_md5_ok_required(self):
self.assertEqual(compute_severity(Status.OK, True, "md5"), Severity.OK)
def test_md5_ok_optional(self):
self.assertEqual(compute_severity(Status.OK, False, "md5"), Severity.OK)
def test_md5_missing_required(self):
self.assertEqual(compute_severity(Status.MISSING, True, "md5"), Severity.CRITICAL)
def test_md5_missing_optional(self):
self.assertEqual(compute_severity(Status.MISSING, False, "md5"), Severity.WARNING)
def test_md5_untested_required(self):
self.assertEqual(compute_severity(Status.UNTESTED, True, "md5"), Severity.WARNING)
def test_md5_untested_optional(self):
self.assertEqual(compute_severity(Status.UNTESTED, False, "md5"), Severity.WARNING)
class TestVerifyEntryExistence(unittest.TestCase):
"""Test verify_entry_existence: present, missing+required, missing+optional."""
def test_present(self):
entry = {"name": "bios.bin", "required": True}
result = verify_entry_existence(entry, "/some/path")
self.assertEqual(result["status"], Status.OK)
self.assertTrue(result["required"])
def test_missing_required(self):
entry = {"name": "bios.bin", "required": True}
result = verify_entry_existence(entry, None)
self.assertEqual(result["status"], Status.MISSING)
self.assertTrue(result["required"])
def test_missing_optional(self):
entry = {"name": "bios.bin", "required": False}
result = verify_entry_existence(entry, None)
self.assertEqual(result["status"], Status.MISSING)
self.assertFalse(result["required"])
def test_required_defaults_true(self):
entry = {"name": "bios.bin"}
result = verify_entry_existence(entry, None)
self.assertTrue(result["required"])
class TestVerifyEntryMd5(unittest.TestCase):
"""Test verify_entry_md5 with various scenarios."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.content = b"test bios content for md5"
self.file_path = os.path.join(self.tmpdir, "bios.bin")
with open(self.file_path, "wb") as f:
f.write(self.content)
self.actual_md5 = md5sum(self.file_path)
def tearDown(self):
import shutil
shutil.rmtree(self.tmpdir, ignore_errors=True)
def test_md5_match(self):
entry = {"name": "bios.bin", "md5": self.actual_md5}
result = verify_entry_md5(entry, self.file_path)
self.assertEqual(result["status"], Status.OK)
def test_md5_mismatch(self):
entry = {"name": "bios.bin", "md5": "a" * 32}
result = verify_entry_md5(entry, self.file_path)
self.assertEqual(result["status"], Status.UNTESTED)
self.assertIn("reason", result)
def test_multi_hash_recalbox(self):
"""Recalbox comma-separated MD5 list: any match = OK."""
wrong_md5 = "b" * 32
entry = {"name": "bios.bin", "md5": f"{wrong_md5},{self.actual_md5}"}
result = verify_entry_md5(entry, self.file_path)
self.assertEqual(result["status"], Status.OK)
def test_truncated_md5_batocera(self):
"""Batocera 29-char truncated MD5 matches via prefix."""
truncated = self.actual_md5[:29]
entry = {"name": "bios.bin", "md5": truncated}
result = verify_entry_md5(entry, self.file_path)
self.assertEqual(result["status"], Status.OK)
def test_no_md5_is_ok(self):
"""No MD5 expected: file present = OK."""
entry = {"name": "bios.bin"}
result = verify_entry_md5(entry, self.file_path)
self.assertEqual(result["status"], Status.OK)
def test_md5_exact_resolve_status_bypass(self):
"""resolve_status='md5_exact' skips hash computation."""
entry = {"name": "bios.bin", "md5": "wrong" * 8}
result = verify_entry_md5(entry, self.file_path, resolve_status="md5_exact")
self.assertEqual(result["status"], Status.OK)
def test_missing_file(self):
entry = {"name": "bios.bin", "md5": self.actual_md5, "required": True}
result = verify_entry_md5(entry, None)
self.assertEqual(result["status"], Status.MISSING)
def test_required_propagated(self):
entry = {"name": "bios.bin", "md5": self.actual_md5, "required": False}
result = verify_entry_md5(entry, self.file_path)
self.assertFalse(result["required"])
class TestCheckInsideZip(unittest.TestCase):
"""Test check_inside_zip for various scenarios."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.inner_content = b"inner rom content"
self.inner_md5 = hashlib.md5(self.inner_content).hexdigest()
self.zip_path = os.path.join(self.tmpdir, "container.zip")
with zipfile.ZipFile(self.zip_path, "w") as zf:
zf.writestr("ROM.BIN", self.inner_content)
self.bad_zip = os.path.join(self.tmpdir, "bad.zip")
with open(self.bad_zip, "wb") as f:
f.write(b"not a zip file")
def tearDown(self):
import shutil
shutil.rmtree(self.tmpdir, ignore_errors=True)
def test_found_and_match(self):
result = check_inside_zip(self.zip_path, "ROM.BIN", self.inner_md5)
self.assertEqual(result, Status.OK)
def test_found_and_mismatch(self):
result = check_inside_zip(self.zip_path, "ROM.BIN", "f" * 32)
self.assertEqual(result, Status.UNTESTED)
def test_not_in_zip(self):
result = check_inside_zip(self.zip_path, "MISSING.BIN", self.inner_md5)
self.assertEqual(result, "not_in_zip")
def test_bad_zip(self):
result = check_inside_zip(self.bad_zip, "ROM.BIN", self.inner_md5)
self.assertEqual(result, "error")
def test_casefold_match(self):
"""Batocera uses casefold() for filename comparison."""
result = check_inside_zip(self.zip_path, "rom.bin", self.inner_md5)
self.assertEqual(result, Status.OK)
def test_empty_md5_means_ok(self):
"""Empty expected_md5 -> OK if file found (existence check inside ZIP)."""
result = check_inside_zip(self.zip_path, "ROM.BIN", "")
self.assertEqual(result, Status.OK)
class TestVerifyPlatform(unittest.TestCase):
"""Test verify_platform aggregation logic."""
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
# Create two files
self.file_a = os.path.join(self.tmpdir, "a.bin")
self.file_b = os.path.join(self.tmpdir, "b.bin")
with open(self.file_a, "wb") as f:
f.write(b"file a content")
with open(self.file_b, "wb") as f:
f.write(b"file b content")
from common import compute_hashes
ha = compute_hashes(self.file_a)
hb = compute_hashes(self.file_b)
self.db = {
"files": {
ha["sha1"]: {"path": self.file_a, "name": "a.bin", "md5": ha["md5"], "size": 14},
hb["sha1"]: {"path": self.file_b, "name": "b.bin", "md5": hb["md5"], "size": 14},
},
"indexes": {
"by_md5": {
ha["md5"]: ha["sha1"],
hb["md5"]: hb["sha1"],
},
"by_name": {
"a.bin": [ha["sha1"]],
"b.bin": [hb["sha1"]],
},
"by_crc32": {},
},
}
self.sha1_a = ha["sha1"]
self.sha1_b = hb["sha1"]
self.md5_a = ha["md5"]
self.md5_b = hb["md5"]
def tearDown(self):
import shutil
shutil.rmtree(self.tmpdir, ignore_errors=True)
def test_all_ok_existence(self):
config = {
"platform": "TestPlatform",
"verification_mode": "existence",
"systems": {
"sys1": {
"files": [
{"name": "a.bin", "sha1": self.sha1_a, "required": True},
{"name": "b.bin", "sha1": self.sha1_b, "required": False},
]
}
},
}
# No emulators dir needed for basic test
emu_dir = os.path.join(self.tmpdir, "emulators")
os.makedirs(emu_dir, exist_ok=True)
result = verify_platform(config, self.db, emu_dir)
self.assertEqual(result["platform"], "TestPlatform")
self.assertEqual(result["verification_mode"], "existence")
self.assertEqual(result["total_files"], 2)
self.assertEqual(result["severity_counts"][Severity.OK], 2)
def test_worst_status_wins_per_destination(self):
"""Two entries for same destination: worst status wins."""
config = {
"platform": "Test",
"verification_mode": "existence",
"systems": {
"sys1": {
"files": [
{"name": "a.bin", "sha1": self.sha1_a, "destination": "shared.bin", "required": True},
]
},
"sys2": {
"files": [
{"name": "missing.bin", "sha1": "0" * 40, "destination": "shared.bin", "required": True},
]
},
},
}
emu_dir = os.path.join(self.tmpdir, "emulators")
os.makedirs(emu_dir, exist_ok=True)
result = verify_platform(config, self.db, emu_dir)
# shared.bin should have worst status (missing)
self.assertEqual(result["total_files"], 1)
# The worst severity for required+missing in existence mode = WARNING
self.assertEqual(result["severity_counts"][Severity.WARNING], 1)
def test_severity_counts_sum_to_total(self):
config = {
"platform": "Test",
"verification_mode": "md5",
"systems": {
"sys1": {
"files": [
{"name": "a.bin", "sha1": self.sha1_a, "md5": self.md5_a, "required": True},
{"name": "missing.bin", "sha1": "0" * 40, "md5": "f" * 32, "required": True},
]
}
},
}
emu_dir = os.path.join(self.tmpdir, "emulators")
os.makedirs(emu_dir, exist_ok=True)
result = verify_platform(config, self.db, emu_dir)
total_from_counts = sum(result["severity_counts"].values())
self.assertEqual(total_from_counts, result["total_files"])
def test_required_field_in_details(self):
config = {
"platform": "Test",
"verification_mode": "existence",
"systems": {
"sys1": {
"files": [
{"name": "a.bin", "sha1": self.sha1_a, "required": False},
]
}
},
}
emu_dir = os.path.join(self.tmpdir, "emulators")
os.makedirs(emu_dir, exist_ok=True)
result = verify_platform(config, self.db, emu_dir)
detail = result["details"][0]
self.assertFalse(detail["required"])
if __name__ == "__main__":
unittest.main()