mirror of
https://github.com/Abdess/retroarch_system.git
synced 2026-04-13 12:22:33 -05:00
refactor: skip writing generated files when content unchanged
write_if_changed in common.py compares content after stripping timestamps (generated_at, Auto-generated on, Generated on). applied to generate_db, generate_readme, generate_site. eliminates timestamp-only diffs in database.json, README.md, mkdocs.yml, and 423 docs pages.
This commit is contained in:
@@ -968,6 +968,39 @@ def expand_platform_declared_names(config: dict, db: dict) -> set[str]:
|
|||||||
return declared
|
return declared
|
||||||
|
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
_TIMESTAMP_PATTERNS = [
|
||||||
|
re.compile(r'"generated_at":\s*"[^"]*"'), # database.json
|
||||||
|
re.compile(r'\*Auto-generated on [^*]*\*'), # README.md
|
||||||
|
re.compile(r'\*Generated on [^*]*\*'), # docs site pages
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def write_if_changed(path: str, content: str) -> bool:
|
||||||
|
"""Write content to path only if the non-timestamp content differs.
|
||||||
|
|
||||||
|
Compares new and existing content after stripping timestamp lines.
|
||||||
|
Returns True if the file was written, False if skipped (unchanged).
|
||||||
|
"""
|
||||||
|
if os.path.exists(path):
|
||||||
|
with open(path) as f:
|
||||||
|
existing = f.read()
|
||||||
|
if _strip_timestamps(existing) == _strip_timestamps(content):
|
||||||
|
return False
|
||||||
|
with open(path, "w") as f:
|
||||||
|
f.write(content)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _strip_timestamps(text: str) -> str:
|
||||||
|
"""Remove known timestamp patterns for content comparison."""
|
||||||
|
result = text
|
||||||
|
for pattern in _TIMESTAMP_PATTERNS:
|
||||||
|
result = pattern.sub("", result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
# Validation and mode filtering -extracted to validation.py for SoC.
|
# Validation and mode filtering -extracted to validation.py for SoC.
|
||||||
# Re-exported below for backward compatibility.
|
# Re-exported below for backward compatibility.
|
||||||
|
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ from datetime import datetime, timezone
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(__file__))
|
sys.path.insert(0, os.path.dirname(__file__))
|
||||||
from common import compute_hashes, list_registered_platforms
|
from common import compute_hashes, list_registered_platforms, write_if_changed
|
||||||
|
|
||||||
CACHE_DIR = ".cache"
|
CACHE_DIR = ".cache"
|
||||||
CACHE_FILE = os.path.join(CACHE_DIR, "db_cache.json")
|
CACHE_FILE = os.path.join(CACHE_DIR, "db_cache.json")
|
||||||
@@ -315,14 +315,15 @@ def main():
|
|||||||
"indexes": indexes,
|
"indexes": indexes,
|
||||||
}
|
}
|
||||||
|
|
||||||
with open(args.output, "w") as f:
|
new_content = json.dumps(database, indent=2)
|
||||||
json.dump(database, f, indent=2)
|
written = write_if_changed(args.output, new_content)
|
||||||
|
|
||||||
save_cache(CACHE_FILE, new_cache)
|
save_cache(CACHE_FILE, new_cache)
|
||||||
|
|
||||||
alias_count = sum(len(v) for v in aliases.values())
|
alias_count = sum(len(v) for v in aliases.values())
|
||||||
name_count = len(indexes["by_name"])
|
name_count = len(indexes["by_name"])
|
||||||
print(f"Generated {args.output}: {len(files)} files, {total_size:,} bytes total")
|
status = "Generated" if written else "Unchanged"
|
||||||
|
print(f"{status} {args.output}: {len(files)} files, {total_size:,} bytes total")
|
||||||
print(f" Name index: {name_count} names ({alias_count} aliases)")
|
print(f" Name index: {name_count} names ({alias_count} aliases)")
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ from datetime import datetime, timezone
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(__file__))
|
sys.path.insert(0, os.path.dirname(__file__))
|
||||||
from common import list_registered_platforms, load_database, load_platform_config
|
from common import list_registered_platforms, load_database, load_platform_config, write_if_changed
|
||||||
from verify import verify_platform
|
from verify import verify_platform
|
||||||
|
|
||||||
def compute_coverage(platform_name: str, platforms_dir: str, db: dict) -> dict:
|
def compute_coverage(platform_name: str, platforms_dir: str, db: dict) -> dict:
|
||||||
@@ -316,14 +316,12 @@ def main():
|
|||||||
db = load_database(args.db)
|
db = load_database(args.db)
|
||||||
|
|
||||||
readme = generate_readme(db, args.platforms_dir)
|
readme = generate_readme(db, args.platforms_dir)
|
||||||
with open("README.md", "w") as f:
|
status = "Generated" if write_if_changed("README.md", readme) else "Unchanged"
|
||||||
f.write(readme)
|
print(f"{status} ./README.md")
|
||||||
print(f"Generated ./README.md")
|
|
||||||
|
|
||||||
contributing = generate_contributing()
|
contributing = generate_contributing()
|
||||||
with open("CONTRIBUTING.md", "w") as f:
|
status = "Generated" if write_if_changed("CONTRIBUTING.md", contributing) else "Unchanged"
|
||||||
f.write(contributing)
|
print(f"{status} ./CONTRIBUTING.md")
|
||||||
print(f"Generated ./CONTRIBUTING.md")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ from datetime import datetime, timezone
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(__file__))
|
sys.path.insert(0, os.path.dirname(__file__))
|
||||||
from common import list_registered_platforms, load_database, load_emulator_profiles, load_platform_config, require_yaml
|
from common import list_registered_platforms, load_database, load_emulator_profiles, load_platform_config, require_yaml, write_if_changed
|
||||||
|
|
||||||
yaml = require_yaml()
|
yaml = require_yaml()
|
||||||
from generate_readme import compute_coverage
|
from generate_readme import compute_coverage
|
||||||
@@ -2064,7 +2064,7 @@ def main():
|
|||||||
|
|
||||||
# Generate home
|
# Generate home
|
||||||
print("Generating home page...")
|
print("Generating home page...")
|
||||||
(docs / "index.md").write_text(generate_home(db, coverages, profiles, registry))
|
write_if_changed(str(docs / "index.md"), generate_home(db, coverages, profiles, registry))
|
||||||
|
|
||||||
# Build system_id -> manufacturer page map (needed by all generators)
|
# Build system_id -> manufacturer page map (needed by all generators)
|
||||||
print("Building system cross-reference map...")
|
print("Building system cross-reference map...")
|
||||||
@@ -2074,37 +2074,35 @@ def main():
|
|||||||
|
|
||||||
# Generate platform pages
|
# Generate platform pages
|
||||||
print("Generating platform pages...")
|
print("Generating platform pages...")
|
||||||
(docs / "platforms" / "index.md").write_text(generate_platform_index(coverages))
|
write_if_changed(str(docs / "platforms" / "index.md"), generate_platform_index(coverages))
|
||||||
for name, cov in coverages.items():
|
for name, cov in coverages.items():
|
||||||
(docs / "platforms" / f"{name}.md").write_text(generate_platform_page(name, cov, registry, emulator_files))
|
write_if_changed(str(docs / "platforms" / f"{name}.md"), generate_platform_page(name, cov, registry, emulator_files))
|
||||||
|
|
||||||
# Generate system pages
|
# Generate system pages
|
||||||
print("Generating system pages...")
|
print("Generating system pages...")
|
||||||
|
|
||||||
(docs / "systems" / "index.md").write_text(generate_systems_index(manufacturers))
|
write_if_changed(str(docs / "systems" / "index.md"), generate_systems_index(manufacturers))
|
||||||
for mfr, consoles in manufacturers.items():
|
for mfr, consoles in manufacturers.items():
|
||||||
slug = mfr.lower().replace(" ", "-")
|
slug = mfr.lower().replace(" ", "-")
|
||||||
page = generate_system_page(mfr, consoles, platform_files, emulator_files)
|
page = generate_system_page(mfr, consoles, platform_files, emulator_files)
|
||||||
(docs / "systems" / f"{slug}.md").write_text(page)
|
write_if_changed(str(docs / "systems" / f"{slug}.md"), page)
|
||||||
|
|
||||||
# Generate emulator pages
|
# Generate emulator pages
|
||||||
print("Generating emulator pages...")
|
print("Generating emulator pages...")
|
||||||
(docs / "emulators" / "index.md").write_text(generate_emulators_index(profiles))
|
write_if_changed(str(docs / "emulators" / "index.md"), generate_emulators_index(profiles))
|
||||||
for name, profile in profiles.items():
|
for name, profile in profiles.items():
|
||||||
page = generate_emulator_page(name, profile, db, platform_files)
|
page = generate_emulator_page(name, profile, db, platform_files)
|
||||||
(docs / "emulators" / f"{name}.md").write_text(page)
|
write_if_changed(str(docs / "emulators" / f"{name}.md"), page)
|
||||||
|
|
||||||
# Generate cross-reference page
|
# Generate cross-reference page
|
||||||
print("Generating cross-reference page...")
|
print("Generating cross-reference page...")
|
||||||
(docs / "cross-reference.md").write_text(
|
write_if_changed(str(docs / "cross-reference.md"),
|
||||||
generate_cross_reference(coverages, profiles)
|
generate_cross_reference(coverages, profiles))
|
||||||
)
|
|
||||||
|
|
||||||
# Generate gap analysis page
|
# Generate gap analysis page
|
||||||
print("Generating gap analysis page...")
|
print("Generating gap analysis page...")
|
||||||
(docs / "gaps.md").write_text(
|
write_if_changed(str(docs / "gaps.md"),
|
||||||
generate_gap_analysis(profiles, coverages, db)
|
generate_gap_analysis(profiles, coverages, db))
|
||||||
)
|
|
||||||
|
|
||||||
# Wiki pages: copy manually maintained sources + generate dynamic ones
|
# Wiki pages: copy manually maintained sources + generate dynamic ones
|
||||||
print("Generating wiki pages...")
|
print("Generating wiki pages...")
|
||||||
@@ -2115,11 +2113,11 @@ def main():
|
|||||||
for src_file in wiki_src.glob("*.md"):
|
for src_file in wiki_src.glob("*.md"):
|
||||||
shutil.copy2(src_file, wiki_dest / src_file.name)
|
shutil.copy2(src_file, wiki_dest / src_file.name)
|
||||||
# data-model.md is generated (contains live DB stats)
|
# data-model.md is generated (contains live DB stats)
|
||||||
(wiki_dest / "data-model.md").write_text(generate_wiki_data_model(db, profiles))
|
write_if_changed(str(wiki_dest / "data-model.md"), generate_wiki_data_model(db, profiles))
|
||||||
|
|
||||||
# Generate contributing
|
# Generate contributing
|
||||||
print("Generating contributing page...")
|
print("Generating contributing page...")
|
||||||
(docs / "contributing.md").write_text(generate_contributing())
|
write_if_changed(str(docs / "contributing.md"), generate_contributing())
|
||||||
|
|
||||||
# Update mkdocs.yml nav section only (avoid yaml.dump round-trip mangling quotes)
|
# Update mkdocs.yml nav section only (avoid yaml.dump round-trip mangling quotes)
|
||||||
print("Updating mkdocs.yml nav...")
|
print("Updating mkdocs.yml nav...")
|
||||||
@@ -2173,9 +2171,7 @@ markdown_extensions:
|
|||||||
plugins:
|
plugins:
|
||||||
- search
|
- search
|
||||||
"""
|
"""
|
||||||
with open("mkdocs.yml", "w") as f:
|
write_if_changed("mkdocs.yml", mkdocs_static + nav_yaml)
|
||||||
f.write(mkdocs_static)
|
|
||||||
f.write(nav_yaml)
|
|
||||||
|
|
||||||
total_pages = (
|
total_pages = (
|
||||||
1 # home
|
1 # home
|
||||||
|
|||||||
Reference in New Issue
Block a user