name: Deploy Site on: push: branches: [main] paths: - "platforms/**" - "emulators/**" - "wiki/**" - "scripts/generate_site.py" - "scripts/generate_readme.py" - "scripts/verify.py" - "scripts/common.py" - "database.json" - "mkdocs.yml" workflow_dispatch: permissions: contents: read pages: write id-token: write concurrency: group: deploy-site cancel-in-progress: true jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v6 - uses: actions/setup-python@v6 with: python-version: "3.12" - run: pip install pyyaml mkdocs-material pymdown-extensions - name: Restore large files from release run: | mkdir -p .cache/large gh release download large-files -D .cache/large/ 2>/dev/null || true python3 -c " import hashlib, json, os, shutil db = json.load(open('database.json')) with open('.gitignore') as f: ignored = {l.strip() for l in f if l.strip().startswith('bios/')} cache = '.cache/large' if not os.path.isdir(cache): exit(0) idx = {} for fn in os.listdir(cache): fp = os.path.join(cache, fn) if os.path.isfile(fp): h = hashlib.sha1(open(fp, 'rb').read()).hexdigest() idx[h] = fp restored = 0 for sha1, entry in db['files'].items(): path = entry['path'] if path in ignored and not os.path.exists(path): src = idx.get(sha1) if src: os.makedirs(os.path.dirname(path), exist_ok=True) shutil.copy2(src, path) print(f'Restored: {path}') restored += 1 print(f'Total: {restored} files restored') " env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Refresh data directories run: python scripts/refresh_data_dirs.py continue-on-error: true - name: Generate site run: | python scripts/generate_site.py python scripts/generate_readme.py --db database.json --platforms-dir platforms mkdocs build - name: Upload artifact uses: actions/upload-pages-artifact@v4 with: path: site/ deploy: needs: build runs-on: ubuntu-latest environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} steps: - name: Deploy to GitHub Pages id: deployment uses: actions/deploy-pages@v4