Skip to content

Fetch BYOND Builds #5330

Fetch BYOND Builds

Fetch BYOND Builds #5330

name: Fetch BYOND Builds
on:
schedule:
- cron: "0 */1 * * *" # hourly
workflow_dispatch:
inputs:
force_publish:
description: 'Ignore early exit check and force publish anyways'
required: false
default: false
type: boolean
# Ensure only one run publishes at a time; cancel overlapping runs
concurrency:
group: byond-sync-pages
cancel-in-progress: true
permissions:
contents: read
jobs:
fetch_byond_builds:
runs-on: ubuntu-latest
env:
PAGES_BASE: https://spacestation13.github.io/byond-tracy-offset-extractor
BUILDS_BASE: https://byond-builds.dm-lang.org
MIN_MAJOR: "515"
CHUNK_SIZE: "15"
outputs:
chunks: ${{ steps.finalize.outputs.chunks }}
count: ${{ steps.finalize.outputs.count }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
- name: Tools
run: |
set -euo pipefail
sudo apt-get update -y
sudo apt-get install -y jq curl
- name: Determine majors [515..latest] and upstream latest
id: majors
run: |
set -euo pipefail
UPSTREAM_VER="$(curl -fsS "$BUILDS_BASE/version.txt" | tr -d '\r\n')"
LATEST_MAJOR="${UPSTREAM_VER%%.*}"
echo "upstream_ver=$UPSTREAM_VER" >> "$GITHUB_OUTPUT"
seq "$MIN_MAJOR" "$LATEST_MAJOR" > majors.txt
echo "Upstream latest: $UPSTREAM_VER"
echo "Majors: $(tr '\n' ' ' < majors.txt)"
- name: Early exit if upstream latest is already published
id: early
run: |
set -euo pipefail
FORCE_PUBLISH="${{ github.event_name == 'workflow_dispatch' && github.event.inputs.force_publish == 'true' }}"
if [ "$FORCE_PUBLISH" = "true" ]; then
echo "Manual force publish - bypassing early exit."
echo "skip=false" >> "$GITHUB_OUTPUT"
exit 0
fi
UPSTREAM_VER="${{ steps.majors.outputs.upstream_ver }}"
MAJOR="${UPSTREAM_VER%%.*}"
# Which platforms exist upstream for the latest?
have_win=0; have_lin=0
curl -fsI "$BUILDS_BASE/$MAJOR/${UPSTREAM_VER}_byond.zip" >/dev/null 2>&1 && have_win=1 || true
curl -fsI "$BUILDS_BASE/$MAJOR/${UPSTREAM_VER}_byond_linux.zip" >/dev/null 2>&1 && have_lin=1 || true
# Consider "up to date" only for platforms that exist upstream
ok_win=0; ok_lin=0
if [ "$have_win" -eq 1 ]; then
curl -fsI "$PAGES_BASE/v/$UPSTREAM_VER/windows.json" >/dev/null 2>&1 && ok_win=1 || ok_win=0
else
ok_win=1
fi
if [ "$have_lin" -eq 1 ]; then
curl -fsI "$PAGES_BASE/v/$UPSTREAM_VER/linux.json" >/dev/null 2>&1 && ok_lin=1 || ok_lin=0
else
ok_lin=1
fi
if [ "$ok_win" -eq 1 ] && [ "$ok_lin" -eq 1 ]; then
echo "Already published latest ($UPSTREAM_VER) for all upstream platforms; skipping."
echo "skip=true" >> "$GITHUB_OUTPUT"
echo "chunks=[]" >> "$GITHUB_OUTPUT"
echo "count=0" >> "$GITHUB_OUTPUT"
else
echo "Not up to date with latest ($UPSTREAM_VER); proceeding."
echo "skip=false" >> "$GITHUB_OUTPUT"
fi
- name: Crawl upstream versions (keep if ≥1 ZIP exists)
if: ${{ steps.early.outputs.skip != 'true' }}
run: |
set -euo pipefail
have_any_zip() {
local major="$1" ver="$2"
local win="$BUILDS_BASE/${major}/${ver}_byond.zip"
local lin="$BUILDS_BASE/${major}/${ver}_byond_linux.zip"
# Accept releases where either platform exists (there are win-only / linux-only hotfixes)
curl -fsI "$win" >/dev/null 2>&1 && return 0
curl -fsI "$lin" >/dev/null 2>&1 && return 0
return 1
}
: > upstream.sorted.txt
while read -r MAJOR; do
HTML="$(curl -fsS "$BUILDS_BASE/$MAJOR/" || true)"
[ -z "$HTML" ] && continue
printf '%s\n' "$HTML" \
| grep -oE "$MAJOR\.[0-9]{4}" \
| sort -u \
| while read -r VER; do
have_any_zip "$MAJOR" "$VER" && echo "$VER"
done
done < majors.txt | sort -u > upstream.sorted.txt
echo "Found upstream versions: $(wc -l < upstream.sorted.txt)"
echo "Sample:"; head -n 5 upstream.sorted.txt || true
- name: Prepare chunks of ${{ env.CHUNK_SIZE }}
id: prepare_chunks
if: ${{ steps.early.outputs.skip != 'true' }}
run: |
set -euo pipefail
# Make a JSON array of all upstream versions
ALL_JSON="$(jq -R -s 'split("\n")[:-1]' upstream.sorted.txt)"
# CHUNKS is an array of JSON strings; each string is a JSON array of versions:
# ["[\"515.1646\",\"515.1647\"]","[\"516.1666\",\"516.1667\"]", ...]
CHUNKS="$(
jq -c --argjson n "${CHUNK_SIZE}" '
def chunks($n):
if length==0 then []
else [ range(0; length; $n) as $i
| ( .[$i:($i+$n)] | @json ) ]
end;
. as $all | ($all | chunks($n))
' <<< "$ALL_JSON"
)"
COUNT="$(jq 'length' <<< "$ALL_JSON")"
# Single-line outputs (matrix parser hates newlines)
CHUNKS_ONE_LINE="$(printf %s "$CHUNKS" | tr -d '\n')"
echo "chunks=$CHUNKS_ONE_LINE" >> "$GITHUB_OUTPUT"
echo "count=$COUNT" >> "$GITHUB_OUTPUT"
echo "Prepared $(jq 'length' <<< "$CHUNKS") chunks from $COUNT versions."
- name: Finalize job outputs
id: finalize
run: |
set -euo pipefail
# Choose outputs from early-skip or from prepared chunks
if [ "${{ steps.early.outputs.skip }}" = "true" ]; then
echo "chunks=[]" >> "$GITHUB_OUTPUT"
echo "count=0" >> "$GITHUB_OUTPUT"
echo "Finalized: skip=true"
else
echo "chunks=${{ steps.prepare_chunks.outputs.chunks }}" >> "$GITHUB_OUTPUT"
echo "count=${{ steps.prepare_chunks.outputs.count }}" >> "$GITHUB_OUTPUT"
echo "Finalized: skip=false"
fi
extract_chunks:
needs: fetch_byond_builds
if: ${{ fromJson(needs.fetch_byond_builds.outputs.count) > 0 }}
permissions:
contents: read
strategy:
fail-fast: false
matrix:
versions: ${{ fromJson(needs.fetch_byond_builds.outputs.chunks) }}
max-parallel: 3
uses: ./.github/workflows/extract_sigs.yml
with:
versions: ${{ toJson(matrix.versions) }}
publish_pages:
needs: [fetch_byond_builds, extract_chunks]
if: ${{ fromJson(needs.fetch_byond_builds.outputs.count) > 0 }}
runs-on: ubuntu-latest
permissions:
pages: write
id-token: write
environment:
name: github-pages
steps:
- name: Checkout repo (for pages/index.html and any other assets)
uses: actions/checkout@v6
- name: Download ALL fragments
uses: actions/download-artifact@v8
with:
pattern: site-fragments-*
merge-multiple: true
path: fragments
- name: Build static site files
run: |
set -euo pipefail
mkdir -p public
# Per-version/per-platform → public/v/<version>/<platform>.json
find fragments -name '*.json' -print | sort | while read -r f; do
base=$(basename "$f" .json)
plat=${base%%-*}
ver=${base#*-}
mkdir -p "public/v/$ver"
jq -c '.' "$f" > "public/v/$ver/$plat.json"
done
# data.json = flattened + sorted
if ls public/v/*/*.json >/dev/null 2>&1; then
jq -s '[ .[] ]' public/v/*/*.json \
| jq 'sort_by(.version | split(".") | map(tonumber)) | sort_by(.platform)' \
> public/data.json
else
echo '[]' > public/data.json
fi
# index.json (versions + latest per-platform)
jq -r '.[].version' public/data.json \
| sort -t. -k1,1n -k2,2n \
| uniq > versions.txt || true
latest_linux=$(jq -r '
[.[] | select(.platform=="linux")]
| (max_by(.version | split(".") | map(tonumber)) // empty)
| if . == null then "" else .version end
' public/data.json)
latest_windows=$(jq -r '
[.[] | select(.platform=="windows")]
| (max_by(.version | split(".") | map(tonumber)) // empty)
| if . == null then "" else .version end
' public/data.json)
jq -n --argjson versions "$(jq -R -s 'split("\n")[:-1]' versions.txt 2>/dev/null || echo '[]')" \
--arg linux "$latest_linux" --arg windows "$latest_windows" \
'{schema:1, versions:$versions, latest:{linux:$linux, windows:$windows}}' \
> public/index.json
# latest aliases
if [ -n "$latest_linux" ] && [ -f "public/v/$latest_linux/linux.json" ]; then
jq -c '.' "public/v/$latest_linux/linux.json" > public/latest-linux.json
fi
if [ -n "$latest_windows" ] && [ -f "public/v/$latest_windows/windows.json" ]; then
jq -c '.' "public/v/$latest_windows/windows.json" > public/latest-windows.json
fi
if [ -f public/latest-linux.json ]; then
cp public/latest-linux.json public/latest.json
elif [ -f public/latest-windows.json ]; then
cp public/latest-windows.json public/latest.json
else
echo '{}' > public/latest.json
fi
- name: Copy site assets (index.html etc.)
run: |
mkdir -p public
cp -R pages/* public/ || true
- name: Upload Pages artifact
uses: actions/upload-pages-artifact@v5
with:
path: public
- name: Deploy to GitHub Pages
id: deploy
uses: actions/deploy-pages@v5