mirror of
https://github.com/Abdess/retroarch_system.git
synced 2026-04-13 12:22:33 -05:00
fix: clone resolution in common.py, move clone map to root
moved _mame_clones.json out of bios/ (was indexed by generate_db.py as BIOS file). clone resolution now in common.py resolve_local_file so all tools (verify, pack, cross_reference) resolve clones transparently. removed duplicate clone code from generate_pack.py. added error handling on os.remove in dedup.py. consistency check now passes for Batocera/EmuDeck/Lakka/RetroArch (4/6 platforms).
This commit is contained in:
@@ -292,9 +292,38 @@ def resolve_local_file(
|
||||
if os.path.exists(path):
|
||||
return path, "zip_exact"
|
||||
|
||||
# MAME clone fallback: if a file was deduped, resolve via canonical
|
||||
clone_map = _get_mame_clone_map()
|
||||
canonical = clone_map.get(name)
|
||||
if canonical and canonical != name:
|
||||
canonical_entry = {"name": canonical}
|
||||
result = resolve_local_file(canonical_entry, db, zip_contents, dest_hint)
|
||||
if result[0]:
|
||||
return result[0], "mame_clone"
|
||||
|
||||
return None, "not_found"
|
||||
|
||||
|
||||
def _get_mame_clone_map() -> dict[str, str]:
|
||||
"""Load and cache the MAME clone map (clone_name -> canonical_name)."""
|
||||
if not hasattr(_get_mame_clone_map, "_cache"):
|
||||
clone_path = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
||||
"_mame_clones.json",
|
||||
)
|
||||
if os.path.exists(clone_path):
|
||||
import json as _json
|
||||
with open(clone_path) as f:
|
||||
data = _json.load(f)
|
||||
_get_mame_clone_map._cache = {}
|
||||
for canonical, info in data.items():
|
||||
for clone in info.get("clones", []):
|
||||
_get_mame_clone_map._cache[clone] = canonical
|
||||
else:
|
||||
_get_mame_clone_map._cache = {}
|
||||
return _get_mame_clone_map._cache
|
||||
|
||||
|
||||
def check_inside_zip(container: str, file_name: str, expected_md5: str) -> str:
|
||||
"""Check a ROM inside a ZIP — replicates Batocera checkInsideZip().
|
||||
|
||||
|
||||
@@ -170,7 +170,11 @@ def deduplicate(bios_dir: str, dry_run: bool = False) -> dict:
|
||||
if dry_run:
|
||||
print(f" WOULD REMOVE: {dup}")
|
||||
else:
|
||||
os.remove(dup)
|
||||
try:
|
||||
os.remove(dup)
|
||||
except OSError as e:
|
||||
print(f" WARNING: cannot remove {dup}: {e}")
|
||||
continue
|
||||
# Clean up empty .variants/ directories
|
||||
parent = os.path.dirname(dup)
|
||||
if os.path.basename(parent) == ".variants" and not os.listdir(parent):
|
||||
@@ -204,7 +208,7 @@ def deduplicate(bios_dir: str, dry_run: bool = False) -> dict:
|
||||
|
||||
# Write MAME clone mapping
|
||||
if mame_clones:
|
||||
clone_path = os.path.join(bios_dir, "_mame_clones.json")
|
||||
clone_path = "_mame_clones.json"
|
||||
if dry_run:
|
||||
print(f"\nWould write MAME clone map: {clone_path}")
|
||||
print(f" {len(mame_clones)} canonical ZIPs with "
|
||||
|
||||
@@ -100,31 +100,6 @@ def _sanitize_path(raw: str) -> str:
|
||||
return "/".join(parts)
|
||||
|
||||
|
||||
def _load_mame_clones(bios_dir: str) -> dict[str, str]:
|
||||
"""Load MAME clone mapping: clone_name -> canonical_name."""
|
||||
clone_path = os.path.join(bios_dir, "_mame_clones.json")
|
||||
if not os.path.exists(clone_path):
|
||||
return {}
|
||||
with open(clone_path) as f:
|
||||
data = json.load(f)
|
||||
# Invert: clone_name -> canonical_name
|
||||
result = {}
|
||||
for canonical, info in data.items():
|
||||
for clone in info.get("clones", []):
|
||||
result[clone] = canonical
|
||||
return result
|
||||
|
||||
|
||||
_MAME_CLONE_MAP: dict[str, str] | None = None
|
||||
|
||||
|
||||
def _get_mame_clone_map(bios_dir: str) -> dict[str, str]:
|
||||
global _MAME_CLONE_MAP
|
||||
if _MAME_CLONE_MAP is None:
|
||||
_MAME_CLONE_MAP = _load_mame_clones(bios_dir)
|
||||
return _MAME_CLONE_MAP
|
||||
|
||||
|
||||
def resolve_file(file_entry: dict, db: dict, bios_dir: str,
|
||||
zip_contents: dict | None = None,
|
||||
dest_hint: str = "") -> tuple[str | None, str]:
|
||||
@@ -145,17 +120,8 @@ def resolve_file(file_entry: dict, db: dict, bios_dir: str,
|
||||
if path:
|
||||
return path, status
|
||||
|
||||
# MAME clone fallback: if the file was deduped, resolve via canonical
|
||||
name = file_entry.get("name", "")
|
||||
clone_map = _get_mame_clone_map(bios_dir)
|
||||
canonical = clone_map.get(name)
|
||||
if canonical:
|
||||
canonical_entry = {"name": canonical}
|
||||
cpath, cstatus = resolve_local_file(canonical_entry, db, zip_contents)
|
||||
if cpath:
|
||||
return cpath, "mame_clone"
|
||||
|
||||
# Last resort: large files from GitHub release assets
|
||||
name = file_entry.get("name", "")
|
||||
sha1 = file_entry.get("sha1")
|
||||
md5_raw = file_entry.get("md5", "")
|
||||
md5_list = [m.strip().lower() for m in md5_raw.split(",") if m.strip()] if md5_raw else []
|
||||
|
||||
Reference in New Issue
Block a user