Files
libretro/scripts/scraper/retrobat_scraper.py
Abdessamad Derraz 2466fc4a97 refactor: extract scraper_cli() to base_scraper.py (DRY)
Shared CLI boilerplate for all scrapers: argparse, dry-run, json, yaml output.
4 scrapers (libretro, batocera, retrobat, emudeck) reduced from ~58 lines
main() each to 3 lines calling scraper_cli().

~220 lines of duplicated boilerplate eliminated.
recalbox + coreinfo keep custom main() (extra flags: --full, --compare-db).
2026-03-18 08:17:14 +01:00

167 lines
4.9 KiB
Python

#!/usr/bin/env python3
"""Scraper for RetroBat batocera-systems.json.
Source: https://github.com/RetroBat-Official/emulatorlauncher
Format: JSON with system keys containing biosFiles arrays
Hash: MD5 primary
"""
from __future__ import annotations
import json
import sys
import urllib.request
import urllib.error
try:
from .base_scraper import BaseScraper, BiosRequirement, fetch_github_latest_version
except ImportError:
from base_scraper import BaseScraper, BiosRequirement, fetch_github_latest_version
PLATFORM_NAME = "retrobat"
SOURCE_URL = (
"https://raw.githubusercontent.com/RetroBat-Official/emulatorlauncher/"
"master/batocera-systems/Resources/batocera-systems.json"
)
GITHUB_REPO = "RetroBat-Official/retrobat"
class Scraper(BaseScraper):
"""Scraper for RetroBat batocera-systems.json."""
def __init__(self, url: str = SOURCE_URL):
self.url = url
self._raw_data: str | None = None
self._parsed: dict | None = None
def _fetch_raw(self) -> str:
if self._raw_data is not None:
return self._raw_data
try:
req = urllib.request.Request(self.url, headers={"User-Agent": "retrobios-scraper/1.0"})
with urllib.request.urlopen(req, timeout=30) as resp:
self._raw_data = resp.read().decode("utf-8")
return self._raw_data
except urllib.error.URLError as e:
raise ConnectionError(f"Failed to fetch {self.url}: {e}") from e
def _parse_json(self) -> dict:
if self._parsed is not None:
return self._parsed
raw = self._fetch_raw()
try:
self._parsed = json.loads(raw)
except json.JSONDecodeError as e:
raise ValueError(f"Failed to parse JSON: {e}") from e
return self._parsed
def fetch_requirements(self) -> list[BiosRequirement]:
"""Parse batocera-systems.json and return BIOS requirements."""
raw = self._fetch_raw()
if not self.validate_format(raw):
raise ValueError("batocera-systems.json format validation failed")
data = self._parse_json()
requirements = []
for sys_key, sys_data in data.items():
if not isinstance(sys_data, dict):
continue
bios_files = sys_data.get("biosFiles", [])
if not isinstance(bios_files, list):
continue
for bios in bios_files:
if not isinstance(bios, dict):
continue
file_path = bios.get("file", "")
md5 = bios.get("md5", "")
if not file_path:
continue
# Strip bios/ prefix from file paths
if file_path.startswith("bios/"):
file_path = file_path[5:]
name = file_path.split("/")[-1] if "/" in file_path else file_path
requirements.append(BiosRequirement(
name=name,
system=sys_key,
md5=md5 or None,
destination=file_path,
required=True,
))
return requirements
def validate_format(self, raw_data: str) -> bool:
"""Validate that raw_data is valid JSON containing biosFiles entries."""
try:
data = json.loads(raw_data)
except (json.JSONDecodeError, TypeError):
return False
if not isinstance(data, dict):
return False
has_bios = False
for sys_key, sys_data in data.items():
if isinstance(sys_data, dict) and "biosFiles" in sys_data:
has_bios = True
break
return has_bios
def generate_platform_yaml(self) -> dict:
"""Generate a platform YAML config dict from scraped data."""
requirements = self.fetch_requirements()
systems = {}
for req in requirements:
if req.system not in systems:
systems[req.system] = {"files": []}
entry = {
"name": req.name,
"destination": req.destination,
"required": req.required,
}
if req.md5:
entry["md5"] = req.md5
systems[req.system]["files"].append(entry)
version = ""
tag = fetch_github_latest_version(GITHUB_REPO)
if tag:
version = tag
return {
"platform": "RetroBat",
"version": version,
"homepage": "https://www.retrobat.org",
"source": SOURCE_URL,
"base_destination": "bios",
"hash_type": "md5",
"verification_mode": "md5",
"systems": systems,
}
def main():
from scripts.scraper.base_scraper import scraper_cli
scraper_cli(Scraper, "Scrape retrobat BIOS requirements")
if __name__ == "__main__":
main()