mirror of
https://github.com/Abdess/retroarch_system.git
synced 2026-04-13 12:22:33 -05:00
feat: add exporters for lakka, retropie, emudeck, retrodeck, romm
This commit is contained in:
@@ -19,6 +19,9 @@ OUTPUT_FILENAMES: dict[str, str] = {
|
||||
"batocera": "batocera-systems",
|
||||
"recalbox": "es_bios.xml",
|
||||
"retrobat": "batocera-systems.json",
|
||||
"emudeck": "checkBIOS.sh",
|
||||
"retrodeck": "component_manifest.json",
|
||||
"romm": "known_bios_files.json",
|
||||
}
|
||||
|
||||
|
||||
|
||||
201
scripts/exporter/emudeck_exporter.py
Normal file
201
scripts/exporter/emudeck_exporter.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""Exporter for EmuDeck checkBIOS.sh format.
|
||||
|
||||
Produces a bash script compatible with EmuDeck's checkBIOS.sh,
|
||||
containing MD5 hash arrays and per-system check functions.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
from .base_exporter import BaseExporter
|
||||
|
||||
# System slug -> (bash array name, check function name)
|
||||
_SYSTEM_BASH_MAP: dict[str, tuple[str, str]] = {
|
||||
"sony-playstation": ("PSBios", "checkPS1BIOS"),
|
||||
"sony-playstation-2": ("PS2Bios", "checkPS2BIOS"),
|
||||
"sega-mega-cd": ("CDBios", "checkSegaCDBios"),
|
||||
"sega-saturn": ("SaturnBios", "checkSaturnBios"),
|
||||
"sega-dreamcast": ("DCBios", "checkDreamcastBios"),
|
||||
"nintendo-ds": ("DSBios", "checkDSBios"),
|
||||
}
|
||||
|
||||
|
||||
def _slug_to_bash_name(slug: str) -> str:
|
||||
"""Convert a system slug to a CamelCase bash identifier."""
|
||||
parts = slug.split("-")
|
||||
return "".join(p.capitalize() for p in parts) + "Bios"
|
||||
|
||||
|
||||
def _slug_to_func_name(slug: str) -> str:
|
||||
"""Convert a system slug to a check function name."""
|
||||
parts = slug.split("-")
|
||||
return "check" + "".join(p.capitalize() for p in parts) + "Bios"
|
||||
|
||||
|
||||
def _collect_md5s(files: list[dict]) -> list[str]:
|
||||
"""Extract unique MD5 hashes from file entries."""
|
||||
hashes: list[str] = []
|
||||
seen: set[str] = set()
|
||||
for fe in files:
|
||||
md5 = fe.get("md5", "")
|
||||
if isinstance(md5, list):
|
||||
for h in md5:
|
||||
h_lower = h.lower()
|
||||
if h_lower and h_lower not in seen:
|
||||
seen.add(h_lower)
|
||||
hashes.append(h_lower)
|
||||
elif md5:
|
||||
h_lower = md5.lower()
|
||||
if h_lower not in seen:
|
||||
seen.add(h_lower)
|
||||
hashes.append(h_lower)
|
||||
return hashes
|
||||
|
||||
|
||||
class Exporter(BaseExporter):
|
||||
"""Export truth data to EmuDeck checkBIOS.sh format."""
|
||||
|
||||
@staticmethod
|
||||
def platform_name() -> str:
|
||||
return "emudeck"
|
||||
|
||||
def export(
|
||||
self,
|
||||
truth_data: dict,
|
||||
output_path: str,
|
||||
scraped_data: dict | None = None,
|
||||
) -> None:
|
||||
systems = truth_data.get("systems", {})
|
||||
|
||||
# Collect per-system hash arrays and file lists
|
||||
sys_hashes: dict[str, list[str]] = {}
|
||||
sys_files: dict[str, list[dict]] = {}
|
||||
for sys_id in sorted(systems):
|
||||
files = systems[sys_id].get("files", [])
|
||||
valid_files = [
|
||||
f for f in files
|
||||
if not f.get("name", "").startswith("_")
|
||||
and not self._is_pattern(f.get("name", ""))
|
||||
]
|
||||
if not valid_files:
|
||||
continue
|
||||
sys_files[sys_id] = valid_files
|
||||
sys_hashes[sys_id] = _collect_md5s(valid_files)
|
||||
|
||||
lines: list[str] = [
|
||||
"#!/bin/bash",
|
||||
"# EmuDeck BIOS check script",
|
||||
"# Generated from retrobios truth data",
|
||||
"",
|
||||
]
|
||||
|
||||
# Emit hash arrays for systems that have MD5s
|
||||
for sys_id in sorted(sys_hashes):
|
||||
hashes = sys_hashes[sys_id]
|
||||
if not hashes:
|
||||
continue
|
||||
array_name, _ = _SYSTEM_BASH_MAP.get(
|
||||
sys_id, (_slug_to_bash_name(sys_id), ""),
|
||||
)
|
||||
lines.append(f"{array_name}=({' '.join(hashes)})")
|
||||
lines.append("")
|
||||
|
||||
# Emit check functions
|
||||
for sys_id in sorted(sys_files):
|
||||
hashes = sys_hashes.get(sys_id, [])
|
||||
_, func_name = _SYSTEM_BASH_MAP.get(
|
||||
sys_id, ("", _slug_to_func_name(sys_id)),
|
||||
)
|
||||
|
||||
lines.append(f"{func_name}(){{")
|
||||
|
||||
if hashes:
|
||||
array_name, _ = _SYSTEM_BASH_MAP.get(
|
||||
sys_id, (_slug_to_bash_name(sys_id), ""),
|
||||
)
|
||||
lines.append(' localRONE="NULL"')
|
||||
lines.append(' for entry in "$biosPath/"*')
|
||||
lines.append(" do")
|
||||
lines.append(' if [ -f "$entry" ]; then')
|
||||
lines.append(' md5=($(md5sum "$entry"))')
|
||||
lines.append(
|
||||
f' for hash in "${{{array_name}[@]}}"; do',
|
||||
)
|
||||
lines.append(
|
||||
' if [[ "$md5" == *"${hash}"* ]]; then',
|
||||
)
|
||||
lines.append(' RONE=true')
|
||||
lines.append(" fi")
|
||||
lines.append(" done")
|
||||
lines.append(" fi")
|
||||
lines.append(" done")
|
||||
lines.append(' if [ $RONE == true ]; then')
|
||||
lines.append(' echo "true"')
|
||||
lines.append(" else")
|
||||
lines.append(' echo "false"')
|
||||
lines.append(" fi")
|
||||
else:
|
||||
# No MD5 hashes — check file existence
|
||||
for fe in sys_files[sys_id]:
|
||||
dest = fe.get("destination", fe.get("name", ""))
|
||||
if dest:
|
||||
lines.append(
|
||||
f' if [ -f "$biosPath/{dest}" ]; then',
|
||||
)
|
||||
lines.append(' echo "true"')
|
||||
lines.append(" return")
|
||||
lines.append(" fi")
|
||||
lines.append(' echo "false"')
|
||||
|
||||
lines.append("}")
|
||||
lines.append("")
|
||||
|
||||
# Emit setBIOSstatus aggregator
|
||||
lines.append("setBIOSstatus(){")
|
||||
for sys_id in sorted(sys_files):
|
||||
_, func_name = _SYSTEM_BASH_MAP.get(
|
||||
sys_id, ("", _slug_to_func_name(sys_id)),
|
||||
)
|
||||
var = re.sub(r"^check", "", func_name)
|
||||
var = re.sub(r"Bios$", "BIOS", var)
|
||||
var = re.sub(r"BIOS$", "_bios", var)
|
||||
lines.append(f" {var}=$({func_name})")
|
||||
lines.append("}")
|
||||
lines.append("")
|
||||
|
||||
Path(output_path).write_text("\n".join(lines), encoding="utf-8")
|
||||
|
||||
def validate(self, truth_data: dict, output_path: str) -> list[str]:
|
||||
content = Path(output_path).read_text(encoding="utf-8")
|
||||
issues: list[str] = []
|
||||
|
||||
for sys_id, sys_data in truth_data.get("systems", {}).items():
|
||||
files = sys_data.get("files", [])
|
||||
valid_files = [
|
||||
f for f in files
|
||||
if not f.get("name", "").startswith("_")
|
||||
and not self._is_pattern(f.get("name", ""))
|
||||
]
|
||||
if not valid_files:
|
||||
continue
|
||||
|
||||
# Check that MD5 hashes appear in the output
|
||||
for fe in valid_files:
|
||||
md5 = fe.get("md5", "")
|
||||
if isinstance(md5, list):
|
||||
for h in md5:
|
||||
if h and h.lower() not in content:
|
||||
issues.append(f"missing hash: {h} ({sys_id})")
|
||||
elif md5 and md5.lower() not in content:
|
||||
issues.append(f"missing hash: {md5} ({sys_id})")
|
||||
|
||||
# Check that a check function exists for this system
|
||||
_, func_name = _SYSTEM_BASH_MAP.get(
|
||||
sys_id, ("", _slug_to_func_name(sys_id)),
|
||||
)
|
||||
if func_name not in content:
|
||||
issues.append(f"missing function: {func_name} ({sys_id})")
|
||||
|
||||
return issues
|
||||
17
scripts/exporter/lakka_exporter.py
Normal file
17
scripts/exporter/lakka_exporter.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Exporter for Lakka (System.dat format, same as RetroArch).
|
||||
|
||||
Lakka inherits RetroArch cores and uses the same System.dat format.
|
||||
Delegates to systemdat_exporter for export and validation.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .systemdat_exporter import Exporter as SystemDatExporter
|
||||
|
||||
|
||||
class Exporter(SystemDatExporter):
|
||||
"""Export truth data to Lakka System.dat format."""
|
||||
|
||||
@staticmethod
|
||||
def platform_name() -> str:
|
||||
return "lakka"
|
||||
200
scripts/exporter/retrodeck_exporter.py
Normal file
200
scripts/exporter/retrodeck_exporter.py
Normal file
@@ -0,0 +1,200 @@
|
||||
"""Exporter for RetroDECK component_manifest.json format.
|
||||
|
||||
Produces a JSON file compatible with RetroDECK's component manifests.
|
||||
Each system maps to a component with BIOS entries containing filename,
|
||||
md5 (comma-separated if multiple), paths ($bios_path default), and
|
||||
required status.
|
||||
|
||||
Path tokens: $bios_path for bios/, $roms_path for roms/.
|
||||
Entries without an explicit path default to $bios_path.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import re
|
||||
from collections import OrderedDict
|
||||
from pathlib import Path
|
||||
|
||||
from .base_exporter import BaseExporter
|
||||
|
||||
# retrobios slug -> RetroDECK system ID (reverse of scraper SYSTEM_SLUG_MAP)
|
||||
_REVERSE_SLUG: dict[str, str] = {
|
||||
"nintendo-nes": "nes",
|
||||
"nintendo-snes": "snes",
|
||||
"nintendo-64": "n64",
|
||||
"nintendo-64dd": "n64dd",
|
||||
"nintendo-gamecube": "gc",
|
||||
"nintendo-wii": "wii",
|
||||
"nintendo-wii-u": "wiiu",
|
||||
"nintendo-switch": "switch",
|
||||
"nintendo-gb": "gb",
|
||||
"nintendo-gbc": "gbc",
|
||||
"nintendo-gba": "gba",
|
||||
"nintendo-ds": "nds",
|
||||
"nintendo-3ds": "3ds",
|
||||
"nintendo-fds": "fds",
|
||||
"nintendo-sgb": "sgb",
|
||||
"nintendo-virtual-boy": "virtualboy",
|
||||
"nintendo-pokemon-mini": "pokemini",
|
||||
"sony-playstation": "psx",
|
||||
"sony-playstation-2": "ps2",
|
||||
"sony-playstation-3": "ps3",
|
||||
"sony-psp": "psp",
|
||||
"sony-psvita": "psvita",
|
||||
"sega-mega-drive": "megadrive",
|
||||
"sega-mega-cd": "megacd",
|
||||
"sega-saturn": "saturn",
|
||||
"sega-dreamcast": "dreamcast",
|
||||
"sega-dreamcast-arcade": "naomi",
|
||||
"sega-game-gear": "gamegear",
|
||||
"sega-master-system": "mastersystem",
|
||||
"nec-pc-engine": "pcengine",
|
||||
"nec-pc-fx": "pcfx",
|
||||
"nec-pc-98": "pc98",
|
||||
"nec-pc-88": "pc88",
|
||||
"3do": "3do",
|
||||
"amstrad-cpc": "amstradcpc",
|
||||
"arcade": "arcade",
|
||||
"atari-400-800": "atari800",
|
||||
"atari-5200": "atari5200",
|
||||
"atari-7800": "atari7800",
|
||||
"atari-jaguar": "atarijaguar",
|
||||
"atari-lynx": "atarilynx",
|
||||
"atari-st": "atarist",
|
||||
"commodore-c64": "c64",
|
||||
"commodore-amiga": "amiga",
|
||||
"philips-cdi": "cdimono1",
|
||||
"fairchild-channel-f": "channelf",
|
||||
"coleco-colecovision": "colecovision",
|
||||
"mattel-intellivision": "intellivision",
|
||||
"microsoft-msx": "msx",
|
||||
"microsoft-xbox": "xbox",
|
||||
"doom": "doom",
|
||||
"j2me": "j2me",
|
||||
"apple-macintosh-ii": "macintosh",
|
||||
"apple-ii": "apple2",
|
||||
"apple-iigs": "apple2gs",
|
||||
"enterprise-64-128": "enterprise",
|
||||
"tiger-game-com": "gamecom",
|
||||
"hartung-game-master": "gmaster",
|
||||
"epoch-scv": "scv",
|
||||
"watara-supervision": "supervision",
|
||||
"bandai-wonderswan": "wonderswan",
|
||||
"snk-neogeo-cd": "neogeocd",
|
||||
"tandy-coco": "coco",
|
||||
"tandy-trs-80": "trs80",
|
||||
"dragon-32-64": "dragon",
|
||||
"pico8": "pico8",
|
||||
"wolfenstein-3d": "wolfenstein",
|
||||
"sinclair-zx-spectrum": "zxspectrum",
|
||||
}
|
||||
|
||||
|
||||
def _dest_to_path_token(destination: str) -> str:
|
||||
"""Convert a truth destination path to a RetroDECK path token."""
|
||||
if destination.startswith("roms/"):
|
||||
return "$roms_path/" + destination.removeprefix("roms/")
|
||||
if destination.startswith("bios/"):
|
||||
return "$bios_path/" + destination.removeprefix("bios/")
|
||||
# Default: bios path
|
||||
return "$bios_path/" + destination
|
||||
|
||||
|
||||
class Exporter(BaseExporter):
|
||||
"""Export truth data to RetroDECK component_manifest.json format."""
|
||||
|
||||
@staticmethod
|
||||
def platform_name() -> str:
|
||||
return "retrodeck"
|
||||
|
||||
def export(
|
||||
self,
|
||||
truth_data: dict,
|
||||
output_path: str,
|
||||
scraped_data: dict | None = None,
|
||||
) -> None:
|
||||
native_map: dict[str, str] = {}
|
||||
if scraped_data:
|
||||
for sys_id, sys_data in scraped_data.get("systems", {}).items():
|
||||
nid = sys_data.get("native_id")
|
||||
if nid:
|
||||
native_map[sys_id] = nid
|
||||
|
||||
manifest: OrderedDict[str, dict] = OrderedDict()
|
||||
|
||||
systems = truth_data.get("systems", {})
|
||||
for sys_id in sorted(systems):
|
||||
sys_data = systems[sys_id]
|
||||
files = sys_data.get("files", [])
|
||||
if not files:
|
||||
continue
|
||||
|
||||
native_id = native_map.get(sys_id, _REVERSE_SLUG.get(sys_id, sys_id))
|
||||
|
||||
bios_entries: list[OrderedDict] = []
|
||||
for fe in files:
|
||||
name = fe.get("name", "")
|
||||
if name.startswith("_") or self._is_pattern(name):
|
||||
continue
|
||||
|
||||
dest = fe.get("destination", name)
|
||||
path_token = _dest_to_path_token(dest)
|
||||
|
||||
md5 = fe.get("md5", "")
|
||||
if isinstance(md5, list):
|
||||
md5 = ",".join(m for m in md5 if m)
|
||||
|
||||
required = fe.get("required", True)
|
||||
|
||||
entry: OrderedDict[str, object] = OrderedDict()
|
||||
entry["filename"] = name
|
||||
if md5:
|
||||
# Validate MD5 entries
|
||||
parts = [
|
||||
m.strip().lower()
|
||||
for m in str(md5).split(",")
|
||||
if re.fullmatch(r"[0-9a-f]{32}", m.strip())
|
||||
]
|
||||
if parts:
|
||||
entry["md5"] = ",".join(parts) if len(parts) > 1 else parts[0]
|
||||
entry["paths"] = path_token
|
||||
entry["required"] = required
|
||||
|
||||
system_val = native_id
|
||||
entry["system"] = system_val
|
||||
|
||||
bios_entries.append(entry)
|
||||
|
||||
if bios_entries:
|
||||
component = OrderedDict()
|
||||
component["system"] = native_id
|
||||
component["bios"] = bios_entries
|
||||
manifest[native_id] = component
|
||||
|
||||
Path(output_path).write_text(
|
||||
json.dumps(manifest, indent=2, ensure_ascii=False) + "\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
def validate(self, truth_data: dict, output_path: str) -> list[str]:
|
||||
data = json.loads(Path(output_path).read_text(encoding="utf-8"))
|
||||
|
||||
exported_names: set[str] = set()
|
||||
for comp_data in data.values():
|
||||
bios = comp_data.get("bios", [])
|
||||
if isinstance(bios, list):
|
||||
for entry in bios:
|
||||
fn = entry.get("filename", "")
|
||||
if fn:
|
||||
exported_names.add(fn)
|
||||
|
||||
issues: list[str] = []
|
||||
for sys_data in truth_data.get("systems", {}).values():
|
||||
for fe in sys_data.get("files", []):
|
||||
name = fe.get("name", "")
|
||||
if name.startswith("_") or self._is_pattern(name):
|
||||
continue
|
||||
if name not in exported_names:
|
||||
issues.append(f"missing: {name}")
|
||||
return issues
|
||||
17
scripts/exporter/retropie_exporter.py
Normal file
17
scripts/exporter/retropie_exporter.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Exporter for RetroPie (System.dat format, same as RetroArch).
|
||||
|
||||
RetroPie inherits RetroArch cores and uses the same System.dat format.
|
||||
Delegates to systemdat_exporter for export and validation.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .systemdat_exporter import Exporter as SystemDatExporter
|
||||
|
||||
|
||||
class Exporter(SystemDatExporter):
|
||||
"""Export truth data to RetroPie System.dat format."""
|
||||
|
||||
@staticmethod
|
||||
def platform_name() -> str:
|
||||
return "retropie"
|
||||
160
scripts/exporter/romm_exporter.py
Normal file
160
scripts/exporter/romm_exporter.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""Exporter for RomM known_bios_files.json format.
|
||||
|
||||
Produces JSON matching the exact format of
|
||||
rommapp/romm/backend/models/fixtures/known_bios_files.json:
|
||||
- Keys are "igdb_slug:filename"
|
||||
- Values contain size, crc, md5, sha1 (all optional but at least one hash)
|
||||
- Hashes are lowercase hex strings
|
||||
- Size is an integer
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from collections import OrderedDict
|
||||
from pathlib import Path
|
||||
|
||||
from .base_exporter import BaseExporter
|
||||
|
||||
# retrobios slug -> IGDB slug (reverse of scraper SLUG_MAP)
|
||||
_REVERSE_SLUG: dict[str, str] = {
|
||||
"3do": "3do",
|
||||
"nintendo-64dd": "64dd",
|
||||
"amstrad-cpc": "acpc",
|
||||
"commodore-amiga": "amiga",
|
||||
"arcade": "arcade",
|
||||
"atari-st": "atari-st",
|
||||
"atari-5200": "atari5200",
|
||||
"atari-7800": "atari7800",
|
||||
"atari-400-800": "atari8bit",
|
||||
"coleco-colecovision": "colecovision",
|
||||
"sega-dreamcast": "dc",
|
||||
"doom": "doom",
|
||||
"enterprise-64-128": "enterprise",
|
||||
"fairchild-channel-f": "fairchild-channel-f",
|
||||
"nintendo-fds": "fds",
|
||||
"sega-game-gear": "gamegear",
|
||||
"nintendo-gb": "gb",
|
||||
"nintendo-gba": "gba",
|
||||
"nintendo-gbc": "gbc",
|
||||
"sega-mega-drive": "genesis",
|
||||
"mattel-intellivision": "intellivision",
|
||||
"j2me": "j2me",
|
||||
"atari-lynx": "lynx",
|
||||
"apple-macintosh-ii": "mac",
|
||||
"microsoft-msx": "msx",
|
||||
"nintendo-ds": "nds",
|
||||
"snk-neogeo-cd": "neo-geo-cd",
|
||||
"nintendo-nes": "nes",
|
||||
"nintendo-gamecube": "ngc",
|
||||
"magnavox-odyssey2": "odyssey-2-slash-videopac-g7000",
|
||||
"nec-pc-98": "pc-9800-series",
|
||||
"nec-pc-fx": "pc-fx",
|
||||
"nintendo-pokemon-mini": "pokemon-mini",
|
||||
"sony-playstation-2": "ps2",
|
||||
"sony-psp": "psp",
|
||||
"sony-playstation": "psx",
|
||||
"nintendo-satellaview": "satellaview",
|
||||
"sega-saturn": "saturn",
|
||||
"scummvm": "scummvm",
|
||||
"sega-mega-cd": "segacd",
|
||||
"sharp-x68000": "sharp-x68000",
|
||||
"sega-master-system": "sms",
|
||||
"nintendo-snes": "snes",
|
||||
"nintendo-sufami-turbo": "sufami-turbo",
|
||||
"nintendo-sgb": "super-gb",
|
||||
"nec-pc-engine": "tg16",
|
||||
"videoton-tvc": "tvc",
|
||||
"philips-videopac": "videopac-g7400",
|
||||
"wolfenstein-3d": "wolfenstein",
|
||||
"sharp-x1": "x1",
|
||||
"microsoft-xbox": "xbox",
|
||||
"sinclair-zx-spectrum": "zxs",
|
||||
}
|
||||
|
||||
|
||||
class Exporter(BaseExporter):
|
||||
"""Export truth data to RomM known_bios_files.json format."""
|
||||
|
||||
@staticmethod
|
||||
def platform_name() -> str:
|
||||
return "romm"
|
||||
|
||||
def export(
|
||||
self,
|
||||
truth_data: dict,
|
||||
output_path: str,
|
||||
scraped_data: dict | None = None,
|
||||
) -> None:
|
||||
native_map: dict[str, str] = {}
|
||||
if scraped_data:
|
||||
for sys_id, sys_data in scraped_data.get("systems", {}).items():
|
||||
nid = sys_data.get("native_id")
|
||||
if nid:
|
||||
native_map[sys_id] = nid
|
||||
|
||||
output: OrderedDict[str, dict] = OrderedDict()
|
||||
|
||||
systems = truth_data.get("systems", {})
|
||||
for sys_id in sorted(systems):
|
||||
sys_data = systems[sys_id]
|
||||
files = sys_data.get("files", [])
|
||||
if not files:
|
||||
continue
|
||||
|
||||
igdb_slug = native_map.get(sys_id, _REVERSE_SLUG.get(sys_id, sys_id))
|
||||
|
||||
for fe in files:
|
||||
name = fe.get("name", "")
|
||||
if name.startswith("_") or self._is_pattern(name):
|
||||
continue
|
||||
|
||||
key = f"{igdb_slug}:{name}"
|
||||
|
||||
entry: OrderedDict[str, object] = OrderedDict()
|
||||
|
||||
size = fe.get("size")
|
||||
if size is not None:
|
||||
entry["size"] = int(size)
|
||||
|
||||
crc = fe.get("crc32", "")
|
||||
if crc:
|
||||
entry["crc"] = str(crc).strip().lower()
|
||||
|
||||
md5 = fe.get("md5", "")
|
||||
if isinstance(md5, list):
|
||||
md5 = md5[0] if md5 else ""
|
||||
if md5:
|
||||
entry["md5"] = str(md5).strip().lower()
|
||||
|
||||
sha1 = fe.get("sha1", "")
|
||||
if isinstance(sha1, list):
|
||||
sha1 = sha1[0] if sha1 else ""
|
||||
if sha1:
|
||||
entry["sha1"] = str(sha1).strip().lower()
|
||||
|
||||
output[key] = entry
|
||||
|
||||
Path(output_path).write_text(
|
||||
json.dumps(output, indent=2, ensure_ascii=False) + "\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
def validate(self, truth_data: dict, output_path: str) -> list[str]:
|
||||
data = json.loads(Path(output_path).read_text(encoding="utf-8"))
|
||||
|
||||
exported_names: set[str] = set()
|
||||
for key in data:
|
||||
if ":" in key:
|
||||
_, filename = key.split(":", 1)
|
||||
exported_names.add(filename)
|
||||
|
||||
issues: list[str] = []
|
||||
for sys_data in truth_data.get("systems", {}).values():
|
||||
for fe in sys_data.get("files", []):
|
||||
name = fe.get("name", "")
|
||||
if name.startswith("_") or self._is_pattern(name):
|
||||
continue
|
||||
if name not in exported_names:
|
||||
issues.append(f"missing: {name}")
|
||||
return issues
|
||||
Reference in New Issue
Block a user