161 lines
5.4 KiB
Python
Executable File
161 lines
5.4 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
import json
|
|
import re
|
|
import subprocess
|
|
import sys
|
|
from pathlib import Path
|
|
from urllib.request import Request, urlopen
|
|
from urllib.error import HTTPError
|
|
|
|
GITHUB_API = "https://api.github.com"
|
|
CODEBERG_API = "https://codeberg.org/api/v1"
|
|
|
|
def run(cmd):
|
|
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
|
if p.returncode != 0:
|
|
raise RuntimeError(f"Command failed: {' '.join(cmd)}\n{p.stderr.strip()}")
|
|
return p.stdout.strip()
|
|
|
|
def http_get_json(url, token=None):
|
|
headers = {"Accept": "application/json"}
|
|
if token:
|
|
headers["Authorization"] = f"token {token}"
|
|
req = Request(url, headers=headers)
|
|
with urlopen(req) as resp:
|
|
return json.load(resp)
|
|
|
|
def github_latest_release(owner, repo, token=None):
|
|
url = f"{GITHUB_API}/repos/{owner}/{repo}/releases/latest"
|
|
return http_get_json(url, token=token)
|
|
|
|
def github_latest_commit(owner, repo, token=None):
|
|
url = f"{GITHUB_API}/repos/{owner}/{repo}/commits?per_page=1"
|
|
data = http_get_json(url, token=token)
|
|
return data[0]["sha"]
|
|
|
|
def codeberg_latest_release(owner, repo, token=None):
|
|
url = f"{CODEBERG_API}/repos/{owner}/{repo}/releases/latest"
|
|
return http_get_json(url, token=token)
|
|
|
|
def codeberg_latest_commit(owner, repo, token=None):
|
|
url = f"{CODEBERG_API}/repos/{owner}/{repo}/commits?limit=1"
|
|
data = http_get_json(url, token=token)
|
|
return data[0]["sha"]
|
|
|
|
def nix_hash_to_sri(hash_str):
|
|
# Convert nix-base32 to SRI
|
|
return run(["nix", "hash", "to-sri", "--type", "sha256", hash_str])
|
|
|
|
def prefetch_git(url, rev):
|
|
out = run(["nix-prefetch-git", "--url", url, "--rev", rev, "--fetch-submodules"])
|
|
data = json.loads(out)
|
|
return nix_hash_to_sri(data["sha256"])
|
|
|
|
def prefetch_url(url, unpack=False):
|
|
cmd = ["nix-prefetch-url", url]
|
|
if unpack:
|
|
cmd.insert(1, "--unpack")
|
|
hash_str = run(cmd)
|
|
return nix_hash_to_sri(hash_str)
|
|
|
|
def is_archive_url(url):
|
|
return bool(re.search(r"\.(tar\.gz|tar\.xz|tar\.bz2|zip)$", url))
|
|
|
|
def build_repo_url(location, owner, repo):
|
|
if location == "github":
|
|
return f"https://github.com/{owner}/{repo}.git"
|
|
if location == "codeberg":
|
|
return f"https://codeberg.org/{owner}/{repo}.git"
|
|
raise ValueError(f"Unknown repo location: {location}")
|
|
|
|
def build_release_tarball_url(location, owner, repo, tag):
|
|
if location == "github":
|
|
return f"https://github.com/{owner}/{repo}/archive/refs/tags/{tag}.tar.gz"
|
|
if location == "codeberg":
|
|
return f"https://codeberg.org/{owner}/{repo}/archive/{tag}.tar.gz"
|
|
raise ValueError(f"Unknown repo location: {location}")
|
|
|
|
def update_entry(name, entry, gh_token=None, cb_token=None):
|
|
location = entry.get("location")
|
|
owner = entry.get("owner")
|
|
repo = entry.get("repo")
|
|
url = entry.get("url")
|
|
|
|
if url and (location == "url" or location == "archive"):
|
|
# Direct URL source
|
|
unpack = is_archive_url(url)
|
|
new_hash = prefetch_url(url, unpack=unpack)
|
|
entry["hash"] = new_hash
|
|
return True
|
|
|
|
if location in ("github", "codeberg"):
|
|
if entry.get("tag"):
|
|
# Use latest release tag
|
|
if location == "github":
|
|
rel = github_latest_release(owner, repo, token=gh_token)
|
|
tag = rel["tag_name"]
|
|
else:
|
|
rel = codeberg_latest_release(owner, repo, token=cb_token)
|
|
tag = rel["tag_name"]
|
|
if tag != entry["tag"]:
|
|
entry["tag"] = tag
|
|
tar_url = build_release_tarball_url(location, owner, repo, tag)
|
|
entry["hash"] = prefetch_url(tar_url, unpack=True)
|
|
return True
|
|
|
|
if entry.get("rev"):
|
|
# Use latest commit
|
|
if location == "github":
|
|
sha = github_latest_commit(owner, repo, token=gh_token)
|
|
else:
|
|
sha = codeberg_latest_commit(owner, repo, token=cb_token)
|
|
if sha != entry["rev"]:
|
|
entry["rev"] = sha
|
|
repo_url = build_repo_url(location, owner, repo)
|
|
entry["hash"] = prefetch_git(repo_url, sha)
|
|
return True
|
|
|
|
return False
|
|
|
|
def process_file(path, gh_token=None, cb_token=None):
|
|
data = json.loads(path.read_text())
|
|
changed = False
|
|
for name, entry in data.items():
|
|
try:
|
|
changed = update_entry(name, entry, gh_token=gh_token, cb_token=cb_token)
|
|
except HTTPError as e:
|
|
print(f"[WARN] {path}: {name}: HTTP error {e.code}", file=sys.stderr)
|
|
except Exception as e:
|
|
print(f"[WARN] {path}: {name}: {e}", file=sys.stderr)
|
|
if changed:
|
|
path.write_text(json.dumps(data, indent=2, sort_keys=True) + "\n")
|
|
return changed
|
|
|
|
def main(root):
|
|
gh_token = None
|
|
cb_token = None
|
|
# Optional tokens from environment
|
|
# import os
|
|
# gh_token = os.environ.get("GITHUB_TOKEN")
|
|
# cb_token = os.environ.get("CODEBERG_TOKEN")
|
|
|
|
root = Path(root)
|
|
files = list(root.rglob("version*.json"))
|
|
if not files:
|
|
print("No version*.json files found")
|
|
return 1
|
|
|
|
updated = 0
|
|
for f in files:
|
|
if process_file(f, gh_token=gh_token, cb_token=cb_token):
|
|
print(f"Updated: {f}")
|
|
updated += 1
|
|
|
|
print(f"Done. Updated {updated} file(s).")
|
|
return 0
|
|
|
|
if __name__ == "__main__":
|
|
if len(sys.argv) != 2:
|
|
print(f"Usage: {sys.argv[0]} <root-folder>")
|
|
sys.exit(2)
|
|
sys.exit(main(sys.argv[1])) |