fix avahi

This commit is contained in:
mjallen18
2026-03-31 13:33:42 -05:00
parent 6ca55504f0
commit bd799661b9
59 changed files with 3758 additions and 3829 deletions

245
scripts/hooks.py Normal file
View File

@@ -0,0 +1,245 @@
#!/usr/bin/env python3
"""
Per-package hooks for version management.
Each hook is a callable registered by package name (the relative path under
packages/, e.g. 'raspberrypi/linux-rpi') and source component name.
A hook can override:
- fetch_candidates(comp, merged_vars) -> Candidates
- prefetch_source(comp, merged_vars) -> Optional[str] (not yet needed)
Hooks are invoked by both the CLI updater and the TUI.
Adding a new hook:
1. Define a function or class with the required signature.
2. Register it via register_candidates_hook(pkg_name, src_name, fn) at module
level below.
"""
from __future__ import annotations
import re
from typing import Callable, Dict, Optional, Tuple
from lib import (
Candidates,
Json,
gh_head_commit,
gh_list_tags,
gh_ref_date,
gh_release_date,
http_get_text,
)
# ---------------------------------------------------------------------------
# Hook registry
# ---------------------------------------------------------------------------
# (pkg_name, src_name) -> fn(comp, merged_vars) -> Candidates
_CANDIDATES_HOOKS: Dict[Tuple[str, str], Callable] = {}
def register_candidates_hook(pkg: str, src: str, fn: Callable) -> None:
_CANDIDATES_HOOKS[(pkg, src)] = fn
def get_candidates_hook(pkg: str, src: str) -> Optional[Callable]:
return _CANDIDATES_HOOKS.get((pkg, src))
# ---------------------------------------------------------------------------
# Raspberry Pi linux — stable_YYYYMMDD tag selection
# ---------------------------------------------------------------------------
def _rpi_linux_stable_candidates(comp: Json, merged_vars: Json) -> Candidates:
from lib import render, gh_latest_release, gh_latest_tag
c = Candidates()
owner = comp.get("owner", "raspberrypi")
repo = comp.get("repo", "linux")
branch: Optional[str] = comp.get("branch") or None
tags_all = gh_list_tags(owner, repo)
rendered = render(comp, merged_vars)
cur_tag = str(rendered.get("tag") or "")
if cur_tag.startswith("stable_") or not branch:
# Pick the most recent stable_YYYYMMDD tag
stable_tags = sorted(
[t for t in tags_all if re.match(r"^stable_\d{8}$", t)],
reverse=True,
)
if stable_tags:
c.tag = stable_tags[0]
c.tag_date = gh_ref_date(owner, repo, c.tag)
else:
# Series-based tracking: pick latest rpi-X.Y.* tag
mm = str(merged_vars.get("modDirVersion") or "")
m = re.match(r"^(\d+)\.(\d+)", mm)
if m:
base = f"rpi-{m.group(1)}.{m.group(2)}"
series = [
t
for t in tags_all
if t == f"{base}.y"
or t.startswith(f"{base}.y")
or t.startswith(f"{base}.")
]
series.sort(reverse=True)
if series:
c.tag = series[0]
c.tag_date = gh_ref_date(owner, repo, c.tag)
if branch:
commit = gh_head_commit(owner, repo, branch)
if commit:
c.commit = commit
c.commit_date = gh_ref_date(owner, repo, commit)
return c
register_candidates_hook(
"raspberrypi/linux-rpi", "stable", _rpi_linux_stable_candidates
)
register_candidates_hook(
"raspberrypi/linux-rpi", "unstable", _rpi_linux_stable_candidates
)
# ---------------------------------------------------------------------------
# CachyOS linux — version from upstream PKGBUILD / .SRCINFO
# ---------------------------------------------------------------------------
def _parse_cachyos_linux_version(text: str, is_srcinfo: bool) -> Optional[str]:
if is_srcinfo:
m = re.search(r"^\s*pkgver\s*=\s*([^\s#]+)\s*$", text, re.MULTILINE)
if m:
v = m.group(1).strip().replace(".rc", "-rc")
return v
return None
# PKGBUILD
env: Dict[str, str] = {}
for line in text.splitlines():
line = line.strip()
if not line or line.startswith("#"):
continue
ma = re.match(r"^\s*([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.+)$", line)
if ma:
key, val = ma.group(1), ma.group(2).strip()
val = re.sub(r"\s+#.*$", "", val).strip()
if (val.startswith('"') and val.endswith('"')) or (
val.startswith("'") and val.endswith("'")
):
val = val[1:-1]
env[key] = val
m2 = re.search(r"^\s*pkgver\s*=\s*(.+)$", text, re.MULTILINE)
if not m2:
return None
raw = m2.group(1).strip().strip("\"'")
def expand(s: str) -> str:
s = re.sub(r"\$\{([^}]+)\}", lambda mb: env.get(mb.group(1), mb.group(0)), s)
s = re.sub(
r"\$([A-Za-z_][A-Za-z0-9_]*)",
lambda mu: env.get(mu.group(1), mu.group(0)),
s,
)
return s
return expand(raw).strip().replace(".rc", "-rc")
def _cachyos_linux_suffix(variant_name: Optional[str]) -> str:
if not variant_name:
return ""
return {"rc": "-rc", "hardened": "-hardened", "lts": "-lts"}.get(variant_name, "")
def fetch_cachyos_linux_version(suffix: str) -> Optional[str]:
bases = [
"https://raw.githubusercontent.com/CachyOS/linux-cachyos/master",
"https://raw.githubusercontent.com/cachyos/linux-cachyos/master",
]
for base in bases:
text = http_get_text(f"{base}/linux-cachyos{suffix}/.SRCINFO")
if text:
v = _parse_cachyos_linux_version(text, is_srcinfo=True)
if v:
return v
text = http_get_text(f"{base}/linux-cachyos{suffix}/PKGBUILD")
if text:
v = _parse_cachyos_linux_version(text, is_srcinfo=False)
if v:
return v
return None
def linux_tarball_url(version: str) -> str:
if "-rc" in version:
return f"https://git.kernel.org/torvalds/t/linux-{version}.tar.gz"
parts = version.split(".")
major = parts[0] if parts else "6"
ver_for_tar = ".".join(parts[:2]) if version.endswith(".0") else version
return (
f"https://cdn.kernel.org/pub/linux/kernel/v{major}.x/linux-{ver_for_tar}.tar.xz"
)
# Note: linux-cachyos is not yet in the repo, but the hook is defined here
# so it can be activated when that package is added.
def _cachyos_linux_candidates(comp: Json, merged_vars: Json) -> Candidates:
c = Candidates()
# The variant name is not available here; the TUI/CLI must pass it via merged_vars
suffix = str(merged_vars.get("_cachyos_suffix") or "")
latest = fetch_cachyos_linux_version(suffix)
if latest:
c.tag = latest # use tag slot for display consistency
return c
register_candidates_hook("linux-cachyos", "linux", _cachyos_linux_candidates)
# ---------------------------------------------------------------------------
# CachyOS ZFS — commit pinned in PKGBUILD
# ---------------------------------------------------------------------------
def fetch_cachyos_zfs_commit(suffix: str) -> Optional[str]:
bases = [
"https://raw.githubusercontent.com/CachyOS/linux-cachyos/master",
"https://raw.githubusercontent.com/cachyos/linux-cachyos/master",
]
for base in bases:
text = http_get_text(f"{base}/linux-cachyos{suffix}/PKGBUILD")
if not text:
continue
m = re.search(
r"git\+https://github\.com/cachyos/zfs\.git#commit=([0-9a-f]+)", text
)
if m:
return m.group(1)
return None
def _cachyos_zfs_candidates(comp: Json, merged_vars: Json) -> Candidates:
c = Candidates()
suffix = str(merged_vars.get("_cachyos_suffix") or "")
sha = fetch_cachyos_zfs_commit(suffix)
if sha:
c.commit = sha
url = comp.get("url") or ""
c.commit_date = (
gh_ref_date("cachyos", "zfs", sha) if "github.com" in url else ""
)
return c
register_candidates_hook("linux-cachyos", "zfs", _cachyos_zfs_candidates)

857
scripts/lib.py Normal file
View File

@@ -0,0 +1,857 @@
#!/usr/bin/env python3
"""
Shared library for version.json management.
Provides:
- JSON load/save
- Variable template rendering
- Base+variant merge (mirrors lib/versioning/default.nix)
- GitHub/Git candidate fetching
- Nix hash prefetching (fetchFromGitHub, fetchgit, fetchurl, fetchzip, cargo vendor)
- Package scanning
"""
from __future__ import annotations
import json
import os
import re
import subprocess
import sys
import urllib.error
import urllib.parse
import urllib.request
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple
Json = Dict[str, Any]
ROOT = Path(__file__).resolve().parents[1]
PKGS_DIR = ROOT / "packages"
# ---------------------------------------------------------------------------
# I/O
# ---------------------------------------------------------------------------
def load_json(path: Path) -> Json:
with path.open("r", encoding="utf-8") as f:
return json.load(f)
def save_json(path: Path, data: Json) -> None:
tmp = path.with_suffix(".tmp")
with tmp.open("w", encoding="utf-8") as f:
json.dump(data, f, indent=2, ensure_ascii=False)
f.write("\n")
tmp.replace(path)
def eprint(*args: Any, **kwargs: Any) -> None:
print(*args, file=sys.stderr, **kwargs)
# ---------------------------------------------------------------------------
# Template rendering
# ---------------------------------------------------------------------------
def render(value: Any, variables: Dict[str, Any]) -> Any:
"""Recursively substitute ${var} in strings using the given variable map."""
if isinstance(value, str):
return re.sub(
r"\$\{([^}]+)\}",
lambda m: str(variables.get(m.group(1), m.group(0))),
value,
)
if isinstance(value, dict):
return {k: render(v, variables) for k, v in value.items()}
if isinstance(value, list):
return [render(v, variables) for v in value]
return value
# ---------------------------------------------------------------------------
# Merge (matches lib/versioning/default.nix)
# ---------------------------------------------------------------------------
def _deep_merge(a: Json, b: Json) -> Json:
out = dict(a)
for k, v in b.items():
if k in out and isinstance(out[k], dict) and isinstance(v, dict):
out[k] = _deep_merge(out[k], v)
else:
out[k] = v
return out
def _merge_sources(base: Json, overrides: Json) -> Json:
names = set(base) | set(overrides)
result: Json = {}
for n in names:
if n in base and n in overrides:
b, o = base[n], overrides[n]
result[n] = (
_deep_merge(b, o) if isinstance(b, dict) and isinstance(o, dict) else o
)
elif n in overrides:
result[n] = overrides[n]
else:
result[n] = base[n]
return result
def merged_view(spec: Json, variant_name: Optional[str]) -> Tuple[Json, Json, Json]:
"""
Return (merged_variables, merged_sources, write_target).
merged_variables / merged_sources: what to use for display and prefetching.
write_target: the dict to mutate when saving changes (base spec or the
variant sub-dict).
"""
base_vars: Json = spec.get("variables") or {}
base_srcs: Json = spec.get("sources") or {}
if variant_name:
vdict = (spec.get("variants") or {}).get(variant_name)
if not isinstance(vdict, dict):
raise ValueError(f"Variant '{variant_name}' not found in spec")
v_vars: Json = vdict.get("variables") or {}
v_srcs: Json = vdict.get("sources") or {}
merged_vars = {**base_vars, **v_vars}
merged_srcs = _merge_sources(base_srcs, v_srcs)
return merged_vars, merged_srcs, vdict
return dict(base_vars), dict(base_srcs), spec
# ---------------------------------------------------------------------------
# Shell helpers
# ---------------------------------------------------------------------------
def _run(args: List[str], *, capture_stderr: bool = True) -> Tuple[int, str, str]:
p = subprocess.run(
args,
text=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE if capture_stderr else None,
check=False,
)
return p.returncode, (p.stdout or "").strip(), (p.stderr or "").strip()
def _run_out(args: List[str]) -> Optional[str]:
code, out, err = _run(args)
if code != 0:
eprint(f"Command failed: {' '.join(args)}\n{err}")
return None
return out
# ---------------------------------------------------------------------------
# HTTP helpers
# ---------------------------------------------------------------------------
def http_get_json(url: str, token: Optional[str] = None) -> Optional[Any]:
try:
req = urllib.request.Request(
url, headers={"Accept": "application/vnd.github+json"}
)
if token:
req.add_header("Authorization", f"Bearer {token}")
with urllib.request.urlopen(req, timeout=15) as resp:
return json.loads(resp.read().decode("utf-8"))
except urllib.error.HTTPError as e:
eprint(f"HTTP {e.code} for {url}: {e.reason}")
except Exception as e:
eprint(f"Request failed for {url}: {e}")
return None
def http_get_text(url: str) -> Optional[str]:
try:
req = urllib.request.Request(
url, headers={"User-Agent": "nix-version-manager/2.0"}
)
with urllib.request.urlopen(req, timeout=15) as resp:
return resp.read().decode("utf-8")
except urllib.error.HTTPError as e:
eprint(f"HTTP {e.code} for {url}: {e.reason}")
except Exception as e:
eprint(f"Request failed for {url}: {e}")
return None
# ---------------------------------------------------------------------------
# GitHub API helpers
# ---------------------------------------------------------------------------
def gh_token() -> Optional[str]:
return os.environ.get("GITHUB_TOKEN")
def gh_latest_release(owner: str, repo: str) -> Optional[str]:
data = http_get_json(
f"https://api.github.com/repos/{owner}/{repo}/releases/latest", gh_token()
)
return data.get("tag_name") if isinstance(data, dict) else None
def gh_latest_tag(
owner: str, repo: str, *, tag_regex: Optional[str] = None
) -> Optional[str]:
data = http_get_json(
f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=100", gh_token()
)
if not isinstance(data, list):
return None
tags = [t["name"] for t in data if isinstance(t, dict) and t.get("name")]
if tag_regex:
rx = re.compile(tag_regex)
tags = [t for t in tags if rx.search(t)]
return tags[0] if tags else None
def gh_list_tags(owner: str, repo: str) -> List[str]:
data = http_get_json(
f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=100", gh_token()
)
if not isinstance(data, list):
return []
return [t["name"] for t in data if isinstance(t, dict) and t.get("name")]
def gh_head_commit(
owner: str, repo: str, branch: Optional[str] = None
) -> Optional[str]:
ref = f"refs/heads/{branch}" if branch else "HEAD"
out = _run_out(["git", "ls-remote", f"https://github.com/{owner}/{repo}.git", ref])
if not out:
return None
for line in out.splitlines():
parts = line.split()
if parts:
return parts[0]
return None
def gh_release_tags(owner: str, repo: str) -> List[str]:
data = http_get_json(
f"https://api.github.com/repos/{owner}/{repo}/releases?per_page=50", gh_token()
)
if not isinstance(data, list):
return []
return [r["tag_name"] for r in data if isinstance(r, dict) and r.get("tag_name")]
def _iso_to_date(iso: str) -> str:
return iso[:10] if iso and len(iso) >= 10 else ""
def gh_ref_date(owner: str, repo: str, ref: str) -> str:
data = http_get_json(
f"https://api.github.com/repos/{owner}/{repo}/commits/{urllib.parse.quote(ref, safe='')}",
gh_token(),
)
if not isinstance(data, dict):
return ""
iso = (
(data.get("commit") or {}).get("committer", {}).get("date")
or (data.get("commit") or {}).get("author", {}).get("date")
or ""
)
return _iso_to_date(iso)
def gh_release_date(owner: str, repo: str, tag: str) -> str:
data = http_get_json(
f"https://api.github.com/repos/{owner}/{repo}/releases/tags/{urllib.parse.quote(tag, safe='')}",
gh_token(),
)
if isinstance(data, dict):
iso = data.get("published_at") or data.get("created_at") or ""
if iso:
return _iso_to_date(iso)
return gh_ref_date(owner, repo, tag)
def git_branch_commit(url: str, branch: Optional[str] = None) -> Optional[str]:
ref = f"refs/heads/{branch}" if branch else "HEAD"
out = _run_out(["git", "ls-remote", url, ref])
if not out:
return None
for line in out.splitlines():
parts = line.split()
if parts:
return parts[0]
return None
def git_commit_date_for_github(url: str, sha: str) -> str:
"""Only works for github.com URLs; returns YYYY-MM-DD or empty string."""
try:
parsed = urllib.parse.urlparse(url)
if parsed.hostname != "github.com":
return ""
parts = [p for p in parsed.path.split("/") if p]
if len(parts) < 2:
return ""
owner = parts[0]
repo = parts[1].removesuffix(".git")
return gh_ref_date(owner, repo, sha)
except Exception:
return ""
# ---------------------------------------------------------------------------
# Nix prefetch helpers
# ---------------------------------------------------------------------------
def _nix_fakehash_build(expr: str) -> Optional[str]:
"""
Build a Nix expression that intentionally uses lib.fakeHash, parse the
correct hash from the 'got:' line in the error output.
"""
p = subprocess.run(
["nix", "build", "--impure", "--expr", expr],
text=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False,
)
m = re.search(r"got:\s+(sha256-[A-Za-z0-9+/=]+)", p.stderr)
if m:
return m.group(1)
eprint(f"nix fakeHash build failed:\n{p.stderr[-800:]}")
return None
def prefetch_github(
owner: str, repo: str, rev: str, *, submodules: bool = False
) -> Optional[str]:
"""
Hash for fetchFromGitHub — NAR hash of unpacked tarball.
Must use the fakeHash trick; nix store prefetch-file gives the wrong hash.
"""
sub = "true" if submodules else "false"
expr = (
f"let pkgs = import <nixpkgs> {{}};\n"
f"in pkgs.fetchFromGitHub {{\n"
f' owner = "{owner}";\n'
f' repo = "{repo}";\n'
f' rev = "{rev}";\n'
f" fetchSubmodules = {sub};\n"
f" hash = pkgs.lib.fakeHash;\n"
f"}}"
)
return _nix_fakehash_build(expr)
def prefetch_url(url: str) -> Optional[str]:
"""
Flat (non-unpacked) hash for fetchurl.
Uses nix store prefetch-file; falls back to nix-prefetch-url.
"""
out = _run_out(
["nix", "store", "prefetch-file", "--hash-type", "sha256", "--json", url]
)
if out:
try:
data = json.loads(out)
if "hash" in data:
return data["hash"]
except Exception:
pass
out = _run_out(["nix-prefetch-url", "--type", "sha256", url])
if out is None:
out = _run_out(["nix-prefetch-url", url])
if out is None:
return None
return _run_out(["nix", "hash", "to-sri", "--type", "sha256", out])
def prefetch_fetchzip(url: str, *, strip_root: bool = True) -> Optional[str]:
"""Hash for fetchzip — NAR of unpacked archive. Must use the fakeHash trick."""
expr = (
f"let pkgs = import <nixpkgs> {{}};\n"
f"in pkgs.fetchzip {{\n"
f' url = "{url}";\n'
f" stripRoot = {'true' if strip_root else 'false'};\n"
f" hash = pkgs.lib.fakeHash;\n"
f"}}"
)
return _nix_fakehash_build(expr)
def prefetch_git(url: str, rev: str) -> Optional[str]:
"""Hash for fetchgit."""
out = _run_out(["nix-prefetch-git", "--no-deepClone", "--rev", rev, url])
if out is not None:
base32 = None
try:
data = json.loads(out)
base32 = data.get("sha256") or data.get("hash")
except Exception:
lines = [l for l in out.splitlines() if l.strip()]
if lines:
base32 = lines[-1].strip()
if base32:
return _run_out(["nix", "hash", "to-sri", "--type", "sha256", base32])
# Fallback: builtins.fetchGit + nix hash path (commit SHA only)
if re.match(r"^[0-9a-f]{40}$", rev):
expr = f'builtins.fetchGit {{ url = "{url}"; rev = "{rev}"; }}'
store_path = _run_out(["nix", "eval", "--raw", "--expr", expr])
if store_path:
return _run_out(["nix", "hash", "path", "--type", "sha256", store_path])
return None
def prefetch_cargo_vendor(
fetcher: str,
src_hash: str,
*,
url: str = "",
owner: str = "",
repo: str = "",
rev: str = "",
subdir: str = "",
) -> Optional[str]:
"""Compute the cargo vendor hash via fetchCargoVendor + fakeHash."""
if fetcher == "github" and owner and repo and rev and src_hash:
src_expr = (
f'pkgs.fetchFromGitHub {{ owner = "{owner}"; repo = "{repo}";'
f' rev = "{rev}"; hash = "{src_hash}"; }}'
)
elif fetcher == "git" and url and rev and src_hash:
parsed = urllib.parse.urlparse(url)
parts = [p for p in parsed.path.split("/") if p]
if parsed.hostname == "github.com" and len(parts) >= 2:
gh_owner, gh_repo = parts[0], parts[1]
src_expr = (
f'pkgs.fetchFromGitHub {{ owner = "{gh_owner}"; repo = "{gh_repo}";'
f' rev = "{rev}"; hash = "{src_hash}"; }}'
)
else:
src_expr = f'pkgs.fetchgit {{ url = "{url}"; rev = "{rev}"; hash = "{src_hash}"; }}'
else:
return None
subdir_attr = f'sourceRoot = "${{src.name}}/{subdir}";' if subdir else ""
expr = (
f"let pkgs = import <nixpkgs> {{}};\n"
f" src = {src_expr};\n"
f"in pkgs.rustPlatform.fetchCargoVendor {{\n"
f" inherit src;\n"
f" {subdir_attr}\n"
f" hash = pkgs.lib.fakeHash;\n"
f"}}"
)
p = subprocess.run(
["nix", "build", "--impure", "--expr", expr],
text=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False,
)
m = re.search(r"got:\s+(sha256-[A-Za-z0-9+/=]+)", p.stderr)
if m:
return m.group(1)
eprint(f"cargo vendor prefetch failed:\n{p.stderr[-600:]}")
return None
# ---------------------------------------------------------------------------
# Source prefetch dispatch
# ---------------------------------------------------------------------------
def prefetch_source(comp: Json, merged_vars: Json) -> Optional[str]:
"""
Compute and return the SRI hash for a source component using the correct
Nix fetcher. Returns None on failure.
"""
fetcher = comp.get("fetcher", "none")
rendered = render(comp, merged_vars)
if fetcher == "github":
owner = comp.get("owner") or ""
repo = comp.get("repo") or ""
ref = rendered.get("tag") or rendered.get("rev") or ""
submodules = bool(comp.get("submodules", False))
if owner and repo and ref:
return prefetch_github(owner, repo, ref, submodules=submodules)
elif fetcher == "git":
url = comp.get("url") or ""
rev = rendered.get("rev") or rendered.get("tag") or ""
if url and rev:
return prefetch_git(url, rev)
elif fetcher == "url":
url = rendered.get("url") or rendered.get("urlTemplate") or ""
if url:
extra = comp.get("extra") or {}
if extra.get("unpack") == "zip":
return prefetch_fetchzip(url, strip_root=extra.get("stripRoot", True))
return prefetch_url(url)
return None
# ---------------------------------------------------------------------------
# Candidate fetching (what versions are available upstream)
# ---------------------------------------------------------------------------
class Candidates:
"""Latest available refs for a source component."""
__slots__ = ("release", "release_date", "tag", "tag_date", "commit", "commit_date")
def __init__(self) -> None:
self.release = self.release_date = ""
self.tag = self.tag_date = ""
self.commit = self.commit_date = ""
def fetch_candidates(comp: Json, merged_vars: Json) -> Candidates:
"""
Fetch the latest release, tag, and commit for a source component.
For 'url' fetcher with github variables, fetches the latest release tag.
"""
c = Candidates()
fetcher = comp.get("fetcher", "none")
branch: Optional[str] = comp.get("branch") or None
if fetcher == "github":
owner = comp.get("owner") or ""
repo = comp.get("repo") or ""
if not (owner and repo):
return c
if not branch:
r = gh_latest_release(owner, repo)
if r:
c.release = r
c.release_date = gh_release_date(owner, repo, r)
t = gh_latest_tag(owner, repo)
if t:
c.tag = t
c.tag_date = gh_ref_date(owner, repo, t)
m = gh_head_commit(owner, repo, branch)
if m:
c.commit = m
c.commit_date = gh_ref_date(owner, repo, m)
elif fetcher == "git":
url = comp.get("url") or ""
if url:
m = git_branch_commit(url, branch)
if m:
c.commit = m
c.commit_date = git_commit_date_for_github(url, m)
elif fetcher == "url":
url_info = _url_source_info(comp, merged_vars)
kind = url_info.get("kind")
if kind == "github":
owner = url_info["owner"]
repo = url_info["repo"]
tags = gh_release_tags(owner, repo)
prefix = str(merged_vars.get("releasePrefix") or "")
suffix = str(merged_vars.get("releaseSuffix") or "")
if prefix or suffix:
latest = next(
(t for t in tags if t.startswith(prefix) and t.endswith(suffix)),
None,
)
else:
latest = tags[0] if tags else None
if latest:
c.release = latest
c.release_date = gh_release_date(owner, repo, latest)
elif kind == "pypi":
name = url_info["name"]
latest = pypi_latest_version(name)
if latest:
c.release = latest
elif kind == "openvsx":
publisher = url_info["publisher"]
ext_name = url_info["ext_name"]
latest = openvsx_latest_version(publisher, ext_name)
if latest:
c.release = latest
return c
# ---------------------------------------------------------------------------
# Non-git upstream version helpers
# ---------------------------------------------------------------------------
def pypi_latest_version(name: str) -> Optional[str]:
"""Return the latest stable release version from PyPI."""
data = http_get_json(f"https://pypi.org/pypi/{urllib.parse.quote(name)}/json")
if not isinstance(data, dict):
return None
return (data.get("info") or {}).get("version") or None
def pypi_hash(name: str, version: str) -> Optional[str]:
"""
Return the SRI hash for a PyPI sdist or wheel using nix-prefetch.
Falls back to a fake-hash Nix build if nix-prefetch-url is unavailable.
"""
data = http_get_json(
f"https://pypi.org/pypi/{urllib.parse.quote(name)}/{urllib.parse.quote(version)}/json"
)
if not isinstance(data, dict):
return None
urls = data.get("urls") or []
# Prefer sdist; fall back to any wheel
sdist_url = next((u["url"] for u in urls if u.get("packagetype") == "sdist"), None)
wheel_url = next(
(u["url"] for u in urls if u.get("packagetype") == "bdist_wheel"), None
)
url = sdist_url or wheel_url
if not url:
return None
return prefetch_url(url)
def openvsx_latest_version(publisher: str, ext_name: str) -> Optional[str]:
"""Return the latest version of an extension from Open VSX Registry."""
data = http_get_json(
f"https://open-vsx.org/api/{urllib.parse.quote(publisher)}/{urllib.parse.quote(ext_name)}"
)
if not isinstance(data, dict):
return None
return data.get("version") or None
def _url_source_info(comp: Json, merged_vars: Json) -> Json:
"""
Classify a url-fetcher source and extract the relevant identifiers.
Returns a dict with at least 'kind' in:
'github' — GitHub release asset; includes 'owner', 'repo'
'pypi' — PyPI package; includes 'name', 'version_var'
'openvsx' — Open VSX extension; includes 'publisher', 'ext_name', 'version_var'
'plain' — plain URL with a version variable; includes 'version_var' if found
'static' — hardcoded URL with no variable parts
"""
tmpl = comp.get("urlTemplate") or comp.get("url") or ""
# Check merged_vars for explicit github owner/repo
owner = str(merged_vars.get("owner") or "")
repo = str(merged_vars.get("repo") or "")
if owner and repo:
return {"kind": "github", "owner": owner, "repo": repo}
# Detect from URL template
gh_m = re.search(r"github\.com/([^/\$]+)/([^/\$]+)/releases/download", tmpl)
if gh_m:
vvar = _find_version_var(tmpl, merged_vars)
return {
"kind": "github",
"owner": gh_m.group(1),
"repo": gh_m.group(2),
"version_var": vvar,
}
# Open VSX (open-vsx.org/api/${publisher}/${name}/${version}/...)
vsx_m = re.search(
r"open-vsx\.org/api/([^/\$]+)/([^/\$]+)/(?:\$\{[^}]+\}|[^/]+)/file", tmpl
)
if not vsx_m:
# Also match when publisher/name come from variables
if "open-vsx.org/api/" in tmpl:
publisher = str(merged_vars.get("publisher") or "")
ext_name = str(merged_vars.get("name") or "")
if publisher and ext_name:
vvar = _find_version_var(tmpl, merged_vars)
return {
"kind": "openvsx",
"publisher": publisher,
"ext_name": ext_name,
"version_var": vvar,
}
if vsx_m:
publisher = vsx_m.group(1)
ext_name = vsx_m.group(2)
# publisher/ext_name may be literal or variable refs
publisher = str(merged_vars.get(publisher.lstrip("${").rstrip("}"), publisher))
ext_name = str(merged_vars.get(ext_name.lstrip("${").rstrip("}"), ext_name))
vvar = _find_version_var(tmpl, merged_vars)
return {
"kind": "openvsx",
"publisher": publisher,
"ext_name": ext_name,
"version_var": vvar,
}
# PyPI: files.pythonhosted.org URLs
if "files.pythonhosted.org" in tmpl or "pypi.org" in tmpl:
pypi_name = str(merged_vars.get("name") or "")
if not pypi_name:
m = re.search(r"/packages/[^/]+/[^/]+/([^/]+)-\d", tmpl)
pypi_name = m.group(1).replace("_", "-") if m else ""
vvar = _find_version_var(tmpl, merged_vars)
return {"kind": "pypi", "name": pypi_name, "version_var": vvar}
vvar = _find_version_var(tmpl, merged_vars)
if vvar:
return {"kind": "plain", "version_var": vvar}
return {"kind": "static"}
def _find_version_var(tmpl: str, merged_vars: Json) -> str:
"""
Return the name of the variable in merged_vars that looks most like
a version string and appears in the template, or '' if none found.
Prefers keys named 'version', then anything whose value looks like a
semver/calver string.
"""
candidates = [k for k in merged_vars if f"${{{k}}}" in tmpl]
if "version" in candidates:
return "version"
# Pick the one whose value most resembles a version
ver_re = re.compile(r"^\d+[\.\-]\d")
for k in candidates:
if ver_re.match(str(merged_vars.get(k, ""))):
return k
return candidates[0] if candidates else ""
def apply_version_update(
comp: Json,
merged_vars: Json,
target_dict: Json,
new_version: str,
version_var: str = "version",
) -> None:
"""
Write `new_version` into the correct location in `target_dict`.
For url sources the version lives in `variables.<version_var>`.
For pypi sources it also lives in `variables.version` (the name is fixed).
Clears any URL-path hash so it gets re-prefetched.
"""
# Update the variable
vs = target_dict.setdefault("variables", {})
vs[version_var] = new_version
# Clear the old hash on the source so it must be re-prefetched
src_name = None
for k, v in (target_dict.get("sources") or {}).items():
if isinstance(v, dict) and "hash" in v:
src_name = k
break
# If no source entry yet, or the hash is on the base spec, clear it there too
if src_name:
target_dict["sources"][src_name].pop("hash", None)
else:
# Hash might be at base level (non-variant path)
for k, v in (comp if isinstance(comp, dict) else {}).items():
pass # read-only; we write through target_dict only
# ---------------------------------------------------------------------------
# Package discovery
# ---------------------------------------------------------------------------
def find_packages() -> List[Tuple[str, Path]]:
"""
Scan packages/ for version.json files.
Returns sorted list of (display_name, path) tuples.
"""
results: List[Tuple[str, Path]] = []
for p in PKGS_DIR.rglob("version.json"):
rel = p.relative_to(PKGS_DIR).parent
results.append((str(rel), p))
results.sort()
return results
# ---------------------------------------------------------------------------
# Source display helper
# ---------------------------------------------------------------------------
def source_ref_label(comp: Json, merged_vars: Json) -> str:
"""Return a short human-readable reference string for a source."""
fetcher = comp.get("fetcher", "none")
rendered = render(comp, merged_vars)
if fetcher == "github":
tag = rendered.get("tag") or ""
rev = rendered.get("rev") or ""
owner = rendered.get("owner") or str(merged_vars.get("owner") or "")
repo = rendered.get("repo") or str(merged_vars.get("repo") or "")
if tag and owner and repo:
return f"{owner}/{repo}@{tag}"
if tag:
return tag
if rev and owner and repo:
return f"{owner}/{repo}@{rev[:7]}"
if rev:
return rev[:12]
return ""
if fetcher == "git":
ref = rendered.get("tag") or rendered.get("rev") or comp.get("version") or ""
if len(ref) == 40 and all(c in "0123456789abcdef" for c in ref):
return ref[:12]
return ref
if fetcher == "url":
url = rendered.get("url") or rendered.get("urlTemplate") or ""
if not url:
return ""
if "${" in url:
tmpl = comp.get("urlTemplate") or comp.get("url") or url
filename = os.path.basename(urllib.parse.urlparse(tmpl).path)
return re.sub(r"\$\{([^}]+)\}", r"<\1>", filename)
filename = os.path.basename(urllib.parse.urlparse(url).path)
owner = str(merged_vars.get("owner") or "")
repo = str(merged_vars.get("repo") or "")
rp = str(merged_vars.get("releasePrefix") or "")
rs = str(merged_vars.get("releaseSuffix") or "")
base = str(merged_vars.get("base") or "")
rel = str(merged_vars.get("release") or "")
tag = f"{rp}{base}-{rel}{rs}" if (base and rel) else ""
if owner and repo and tag and filename:
return f"{owner}/{repo}@{tag} · {filename}"
return filename or url
return str(comp.get("version") or comp.get("tag") or comp.get("rev") or "")
# ---------------------------------------------------------------------------
# Deep set helper
# ---------------------------------------------------------------------------
def deep_set(obj: Json, path: List[str], value: Any) -> None:
cur = obj
for key in path[:-1]:
if key not in cur or not isinstance(cur[key], dict):
cur[key] = {}
cur = cur[key]
cur[path[-1]] = value

472
scripts/update.py Normal file
View File

@@ -0,0 +1,472 @@
#!/usr/bin/env python3
"""
version.json CLI updater.
Usage examples:
# Update a GitHub source to its latest release tag, then recompute hash
scripts/update.py --file packages/edk2/version.json --github-latest-release --prefetch
# Update a specific component to the latest commit
scripts/update.py --file packages/edk2/version.json --component edk2 --github-latest-commit --prefetch
# Update all URL-based sources in a file (recompute hash only)
scripts/update.py --file packages/uboot/version.json --url-prefetch
# Update a variant's variables
scripts/update.py --file packages/proton-cachyos/version.json --variant cachyos-v4 \\
--set variables.base=10.0 --set variables.release=20260301
# Filter tags with a regex (e.g. only stable_* tags)
scripts/update.py --file packages/raspberrypi/linux-rpi/version.json \\
--component stable --github-latest-tag --tag-regex '^stable_\\d{8}$' --prefetch
# Update a fetchgit source to HEAD
scripts/update.py --file packages/linux-cachyos/version.json --component zfs --git-latest --prefetch
# Dry run (show what would change, don't write)
scripts/update.py --file packages/edk2/version.json --github-latest-release --prefetch --dry-run
"""
from __future__ import annotations
import argparse
import os
import sys
from pathlib import Path
from typing import List, Optional
# Ensure scripts/ is on the path so we can import lib and hooks
sys.path.insert(0, str(Path(__file__).resolve().parent))
import lib
import hooks # noqa: F401 — registers hooks as a side effect
def _apply_set_pairs(target: lib.Json, pairs: List[str]) -> bool:
changed = False
for pair in pairs:
if "=" not in pair:
lib.eprint(f"--set: expected KEY=VALUE, got: {pair!r}")
continue
key, val = pair.split("=", 1)
path = [p for p in key.strip().split(".") if p]
lib.deep_set(target, path, val)
lib.eprint(f" set {'.'.join(path)} = {val!r}")
changed = True
return changed
def update_components(
spec: lib.Json,
variant: Optional[str],
components: Optional[List[str]],
args: argparse.Namespace,
) -> bool:
changed = False
merged_vars, merged_srcs, target_dict = lib.merged_view(spec, variant)
target_sources: lib.Json = target_dict.setdefault("sources", {})
names = (
list(merged_srcs.keys())
if not components
else [c for c in components if c in merged_srcs]
)
if components:
missing = [c for c in components if c not in merged_srcs]
for m in missing:
lib.eprint(f" [warn] component '{m}' not found in merged sources")
for name in names:
view_comp = merged_srcs[name]
fetcher = view_comp.get("fetcher", "none")
comp = target_sources.setdefault(name, {})
if fetcher == "github":
owner = view_comp.get("owner") or ""
repo = view_comp.get("repo") or ""
if not (owner and repo):
lib.eprint(f" [{name}] missing owner/repo, skipping")
continue
# --set-branch: update branch field and fetch HEAD of that branch
if args.set_branch is not None:
new_branch = args.set_branch or None # empty string → clear branch
if new_branch:
comp["branch"] = new_branch
lib.eprint(f" [{name}] branch -> {new_branch!r}")
else:
comp.pop("branch", None)
lib.eprint(f" [{name}] branch cleared")
changed = True
rev = lib.gh_head_commit(owner, repo, new_branch)
if rev:
comp["rev"] = rev
comp.pop("tag", None)
lib.eprint(f" [{name}] rev -> {rev}")
changed = True
if args.prefetch:
sri = lib.prefetch_github(
owner,
repo,
rev,
submodules=bool(view_comp.get("submodules", False)),
)
if sri:
comp["hash"] = sri
lib.eprint(f" [{name}] hash -> {sri}")
changed = True
else:
lib.eprint(
f" [{name}] could not resolve HEAD for branch {new_branch!r}"
)
continue # skip the normal ref-update logic for this component
new_ref: Optional[str] = None
ref_kind = ""
if args.github_latest_release:
tag = lib.gh_latest_release(owner, repo)
if tag:
new_ref, ref_kind = tag, "tag"
elif args.github_latest_tag:
tag = lib.gh_latest_tag(owner, repo, tag_regex=args.tag_regex)
if tag:
new_ref, ref_kind = tag, "tag"
elif args.github_latest_commit:
rev = lib.gh_head_commit(owner, repo)
if rev:
new_ref, ref_kind = rev, "rev"
if new_ref:
if ref_kind == "tag":
comp["tag"] = new_ref
comp.pop("rev", None)
else:
comp["rev"] = new_ref
comp.pop("tag", None)
lib.eprint(f" [{name}] {ref_kind} -> {new_ref}")
changed = True
if args.prefetch and (new_ref or args.url_prefetch):
# Use merged view with the updated ref for prefetching
merged_vars2, merged_srcs2, _ = lib.merged_view(spec, variant)
view2 = lib.render(merged_srcs2.get(name, view_comp), merged_vars2)
sri = lib.prefetch_github(
owner,
repo,
view2.get("tag") or view2.get("rev") or new_ref or "",
submodules=bool(view_comp.get("submodules", False)),
)
if sri:
comp["hash"] = sri
lib.eprint(f" [{name}] hash -> {sri}")
changed = True
elif fetcher == "git":
url = view_comp.get("url") or ""
if not url:
lib.eprint(f" [{name}] missing url for git fetcher, skipping")
continue
# --set-branch: update branch field and fetch HEAD of that branch
if args.set_branch is not None:
new_branch = args.set_branch or None
if new_branch:
comp["branch"] = new_branch
lib.eprint(f" [{name}] branch -> {new_branch!r}")
else:
comp.pop("branch", None)
lib.eprint(f" [{name}] branch cleared")
changed = True
rev = lib.git_branch_commit(url, new_branch)
if rev:
comp["rev"] = rev
lib.eprint(f" [{name}] rev -> {rev}")
changed = True
if args.prefetch:
sri = lib.prefetch_git(url, rev)
if sri:
comp["hash"] = sri
lib.eprint(f" [{name}] hash -> {sri}")
changed = True
else:
lib.eprint(
f" [{name}] could not resolve HEAD for branch {new_branch!r}"
)
continue
if args.git_latest:
rev = lib.git_branch_commit(url, view_comp.get("branch"))
if rev:
comp["rev"] = rev
lib.eprint(f" [{name}] rev -> {rev}")
changed = True
if args.prefetch:
sri = lib.prefetch_git(url, rev)
if sri:
comp["hash"] = sri
lib.eprint(f" [{name}] hash -> {sri}")
changed = True
elif fetcher == "url":
if args.latest_version:
url_info = lib._url_source_info(view_comp, merged_vars)
kind = url_info.get("kind", "plain")
version_var = url_info.get("version_var") or "version"
new_ver: Optional[str] = None
if kind == "github":
owner = url_info.get("owner", "")
repo = url_info.get("repo", "")
tags = lib.gh_release_tags(owner, repo) if owner and repo else []
prefix = str(merged_vars.get("releasePrefix") or "")
suffix = str(merged_vars.get("releaseSuffix") or "")
if prefix or suffix:
tag = next(
(
t
for t in tags
if t.startswith(prefix) and t.endswith(suffix)
),
None,
)
else:
tag = tags[0] if tags else None
if tag:
# Proton-cachyos style: extract base+release from tag
mid = tag
if prefix and mid.startswith(prefix):
mid = mid[len(prefix) :]
if suffix and mid.endswith(suffix):
mid = mid[: -len(suffix)]
parts = mid.split("-")
if (
len(parts) >= 2
and "base" in merged_vars
and "release" in merged_vars
):
lib.eprint(
f" [{name}] latest tag: {tag} (base={parts[0]}, release={parts[-1]})"
)
vs = target_dict.setdefault("variables", {})
vs["base"] = parts[0]
vs["release"] = parts[-1]
changed = True
merged_vars2, merged_srcs2, _ = lib.merged_view(
spec, variant
)
view2 = merged_srcs2.get(name, view_comp)
sri = lib.prefetch_source(view2, merged_vars2)
if sri:
comp["hash"] = sri
lib.eprint(f" [{name}] hash -> {sri}")
changed = True
else:
new_ver = tag
tag = None # avoid fall-through
elif kind == "openvsx":
publisher = url_info.get("publisher", "")
ext_name = url_info.get("ext_name", "")
new_ver = lib.openvsx_latest_version(publisher, ext_name)
elif kind == "plain":
lib.eprint(
f" [{name}] url (plain): cannot auto-detect version; use --set"
)
if new_ver:
lib.eprint(f" [{name}] latest version: {new_ver}")
vs = target_dict.setdefault("variables", {})
vs[version_var] = new_ver
changed = True
if args.prefetch:
# Re-render with updated variable
merged_vars2, merged_srcs2, _ = lib.merged_view(spec, variant)
view2 = merged_srcs2.get(name, view_comp)
sri = lib.prefetch_source(view2, merged_vars2)
if sri:
comp["hash"] = sri
lib.eprint(f" [{name}] hash -> {sri}")
changed = True
elif args.url_prefetch or args.prefetch:
rendered = lib.render(view_comp, merged_vars)
url = rendered.get("url") or rendered.get("urlTemplate") or ""
if not url:
lib.eprint(f" [{name}] no url/urlTemplate for url fetcher")
else:
sri = lib.prefetch_source(view_comp, merged_vars)
if sri:
comp["hash"] = sri
lib.eprint(f" [{name}] hash -> {sri}")
changed = True
elif fetcher == "pypi":
if args.latest_version:
pkg_name = view_comp.get("name") or str(merged_vars.get("name") or name)
new_ver = lib.pypi_latest_version(pkg_name)
if new_ver:
version_var = (
lib._url_source_info(view_comp, merged_vars).get("version_var")
or "version"
)
cur_ver = str(merged_vars.get(version_var) or "")
if new_ver == cur_ver:
lib.eprint(f" [{name}] pypi: already at {new_ver}")
else:
lib.eprint(f" [{name}] pypi: {cur_ver} -> {new_ver}")
vs = target_dict.setdefault("variables", {})
vs[version_var] = new_ver
changed = True
if args.prefetch:
sri = lib.pypi_hash(pkg_name, new_ver)
if sri:
comp["hash"] = sri
lib.eprint(f" [{name}] hash -> {sri}")
changed = True
else:
lib.eprint(f" [{name}] pypi hash prefetch failed")
else:
lib.eprint(
f" [{name}] pypi: could not fetch latest version for {pkg_name!r}"
)
elif args.url_prefetch or args.prefetch:
lib.eprint(
f" [{name}] pypi: use --latest-version --prefetch to update hash"
)
return changed
def main() -> int:
ap = argparse.ArgumentParser(
description="Update version.json files",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=__doc__.split("\n", 2)[2], # show the usage block as epilog
)
ap.add_argument(
"--file", required=True, metavar="PATH", help="Path to version.json"
)
ap.add_argument(
"--variant", metavar="NAME", help="Variant to target (default: base)"
)
ap.add_argument(
"--component",
dest="components",
action="append",
metavar="NAME",
help="Limit to specific component(s); can be repeated",
)
ap.add_argument(
"--github-latest-release",
action="store_true",
help="Update GitHub sources to latest release tag",
)
ap.add_argument(
"--github-latest-tag",
action="store_true",
help="Update GitHub sources to latest tag",
)
ap.add_argument(
"--github-latest-commit",
action="store_true",
help="Update GitHub sources to HEAD commit",
)
ap.add_argument(
"--tag-regex",
metavar="REGEX",
help="Filter tags (used with --github-latest-tag)",
)
ap.add_argument(
"--set-branch",
metavar="BRANCH",
default=None,
help=(
"Set the branch field on github/git sources, resolve its HEAD commit, "
"and (with --prefetch) recompute the hash. "
"Pass an empty string ('') to clear the branch and switch back to tag/release tracking."
),
)
ap.add_argument(
"--git-latest",
action="store_true",
help="Update fetchgit sources to latest HEAD commit",
)
ap.add_argument(
"--latest-version",
action="store_true",
help=(
"Fetch the latest version from upstream (PyPI, Open VSX, GitHub releases) "
"and update the version variable. Use with --prefetch to also recompute the hash."
),
)
ap.add_argument(
"--url-prefetch",
action="store_true",
help="Recompute hash for url/urlTemplate sources",
)
ap.add_argument(
"--prefetch",
action="store_true",
help="After updating ref, also recompute hash",
)
ap.add_argument(
"--set",
dest="sets",
action="append",
default=[],
metavar="KEY=VALUE",
help="Set a field (dot-path relative to base or --variant). Can be repeated.",
)
ap.add_argument(
"--dry-run", action="store_true", help="Show changes without writing"
)
ap.add_argument(
"--print",
dest="do_print",
action="store_true",
help="Print resulting JSON to stdout",
)
args = ap.parse_args()
path = Path(args.file)
if not path.exists():
lib.eprint(f"File not found: {path}")
return 1
spec = lib.load_json(path)
lib.eprint(f"Loaded: {path}")
# Apply --set mutations
target = spec
if args.variant:
target = spec.setdefault("variants", {}).setdefault(args.variant, {})
changed = _apply_set_pairs(target, args.sets)
# Update refs/hashes
if update_components(spec, args.variant, args.components, args):
changed = True
if changed:
if args.dry_run:
lib.eprint("Dry run: no changes written.")
else:
lib.save_json(path, spec)
lib.eprint(f"Saved: {path}")
else:
lib.eprint("No changes.")
if args.do_print:
import json
print(json.dumps(spec, indent=2, ensure_ascii=False))
return 0
if __name__ == "__main__":
try:
sys.exit(main())
except KeyboardInterrupt:
sys.exit(130)

View File

@@ -1,416 +0,0 @@
#!/usr/bin/env python3
"""
Unified version.json updater (TUI-friendly core logic).
Improvements:
- Correctly merges base + variant variables and sources (component-wise deep merge)
- Updates are written back into the correct dictionary:
- Base: top-level spec["sources"][name]
- Variant: spec["variants"][variant]["sources"][name] (created if missing)
- Hash prefetch uses the merged view with rendered variables
Supports:
- Updating GitHub components to latest release tag, latest tag, or latest commit
- Updating Git (fetchgit) components to latest commit on default branch
- Recomputing SRI hash for url/urlTemplate, github tarballs, and fetchgit sources
- Setting arbitrary fields (variables.* or sources.*.*) via --set path=value
- Operating on a specific variant or the base (top-level) of a version.json
Requirements:
- nix-prefetch-url (or `nix prefetch-url`) and `nix hash to-sri` for URL hashing
- nix-prefetch-git + `nix hash to-sri` for Git fetchers
- Network access for GitHub API (optional GITHUB_TOKEN env var)
Examples:
scripts/update_versions.py --file packages/edk2/version.json --github-latest-release --prefetch
scripts/update_versions.py --file packages/edk2/version.json --component edk2 --github-latest-commit --prefetch
scripts/update_versions.py --file packages/uboot/version.json --url-prefetch
scripts/update_versions.py --file packages/proton-cachyos/version.json --variant cachyos-v4 --set variables.base=10.0
scripts/update_versions.py --file packages/linux-cachyos/version.json --component zfs --git-latest --prefetch
"""
import argparse
import json
import os
import re
import subprocess
import sys
import urllib.request
import urllib.error
from typing import Any, Dict, List, Optional, Tuple
Json = Dict[str, Any]
def eprintln(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def load_json(path: str) -> Json:
with open(path, "r", encoding="utf-8") as f:
return json.load(f)
def save_json(path: str, data: Json):
with open(path, "w", encoding="utf-8") as f:
json.dump(data, f, indent=2, ensure_ascii=False)
f.write("\n")
def deep_get(o: Json, path: List[str], default=None):
cur = o
for p in path:
if isinstance(cur, dict) and p in cur:
cur = cur[p]
else:
return default
return cur
def deep_set(o: Json, path: List[str], value: Any):
cur = o
for p in path[:-1]:
if p not in cur or not isinstance(cur[p], dict):
cur[p] = {}
cur = cur[p]
cur[path[-1]] = value
def parse_set_pair(pair: str) -> Tuple[List[str], str]:
if "=" not in pair:
raise ValueError(f"--set requires KEY=VALUE, got: {pair}")
key, val = pair.split("=", 1)
path = key.strip().split(".")
return path, val
def render_templates(value: Any, variables: Dict[str, Any]) -> Any:
# Simple ${var} string replacement across strings/structures
if isinstance(value, str):
def repl(m):
name = m.group(1)
return str(variables.get(name, m.group(0)))
return re.sub(r"\$\{([^}]+)\}", repl, value)
elif isinstance(value, dict):
return {k: render_templates(v, variables) for k, v in value.items()}
elif isinstance(value, list):
return [render_templates(v, variables) for v in value]
return value
def http_get_json(url: str, token: Optional[str] = None) -> Any:
req = urllib.request.Request(url, headers={"Accept": "application/vnd.github+json"})
if token:
req.add_header("Authorization", f"Bearer {token}")
with urllib.request.urlopen(req) as resp:
return json.loads(resp.read().decode("utf-8"))
def github_latest_release_tag(owner: str, repo: str, token: Optional[str] = None) -> Optional[str]:
url = f"https://api.github.com/repos/{owner}/{repo}/releases/latest"
try:
data = http_get_json(url, token)
tag = data.get("tag_name")
return tag
except urllib.error.HTTPError as e:
eprintln(f"GitHub latest release failed: {e}")
return None
def github_latest_tag(owner: str, repo: str, token: Optional[str] = None, tag_regex: Optional[str] = None) -> Optional[str]:
url = f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=100"
try:
data = http_get_json(url, token)
tags = [t.get("name") for t in data if "name" in t]
if tag_regex:
rx = re.compile(tag_regex)
tags = [t for t in tags if rx.search(t)]
return tags[0] if tags else None
except urllib.error.HTTPError as e:
eprintln(f"GitHub tags failed: {e}")
return None
def github_head_commit(owner: str, repo: str, token: Optional[str] = None) -> Optional[str]:
# Prefer git ls-remote to avoid API limits
url = f"https://github.com/{owner}/{repo}.git"
try:
out = subprocess.check_output(["git", "ls-remote", url, "HEAD"], text=True).strip()
if out:
sha = out.split()[0]
return sha
except Exception as e:
eprintln(f"git ls-remote failed for {url}: {e}")
return None
def run_cmd_get_output(args: List[str]) -> str:
eprintln(f"Running: {' '.join(args)}")
return subprocess.check_output(args, text=True).strip()
def nix_prefetch_url(url: str) -> Optional[str]:
# returns SRI (sha256-...)
base32 = None
try:
base32 = run_cmd_get_output(["nix-prefetch-url", "--type", "sha256", url])
except Exception:
try:
base32 = run_cmd_get_output(["nix", "prefetch-url", url])
except Exception as e:
eprintln(f"Failed to prefetch url: {url}: {e}")
return None
try:
sri = run_cmd_get_output(["nix", "hash", "to-sri", "--type", "sha256", base32])
return sri
except Exception as e:
eprintln(f"Failed to convert base32 to SRI: {e}")
return None
def github_tarball_url(owner: str, repo: str, ref: str) -> str:
# codeload is stable for tarball
return f"https://codeload.github.com/{owner}/{repo}/tar.gz/{ref}"
def nix_prefetch_github_tarball(owner: str, repo: str, ref: str) -> Optional[str]:
url = github_tarball_url(owner, repo, ref)
return nix_prefetch_url(url)
def nix_prefetch_git(url: str, rev: str) -> Optional[str]:
# returns SRI
try:
out = run_cmd_get_output(["nix-prefetch-git", "--no-deepClone", "--rev", rev, url])
try:
data = json.loads(out)
base32 = data.get("sha256") or data.get("hash")
except Exception:
base32 = out.splitlines()[-1].strip()
if not base32:
eprintln(f"Could not parse nix-prefetch-git output for {url}@{rev}")
return None
sri = run_cmd_get_output(["nix", "hash", "to-sri", "--type", "sha256", base32])
return sri
except Exception as e:
eprintln(f"nix-prefetch-git failed for {url}@{rev}: {e}")
return None
# -------------------- Merging logic (match lib/versioning.nix) --------------------
def deep_merge(a: Dict[str, Any], b: Dict[str, Any]) -> Dict[str, Any]:
out = dict(a)
for k, v in b.items():
if k in out and isinstance(out[k], dict) and isinstance(v, dict):
out[k] = deep_merge(out[k], v)
else:
out[k] = v
return out
def merge_sources(base_sources: Dict[str, Any], overrides: Dict[str, Any]) -> Dict[str, Any]:
names = set(base_sources.keys()) | set(overrides.keys())
result: Dict[str, Any] = {}
for n in names:
if n in base_sources and n in overrides:
if isinstance(base_sources[n], dict) and isinstance(overrides[n], dict):
result[n] = deep_merge(base_sources[n], overrides[n])
else:
result[n] = overrides[n]
elif n in overrides:
result[n] = overrides[n]
else:
result[n] = base_sources[n]
return result
def merged_view(spec: Json, variant: Optional[str]) -> Tuple[Dict[str, Any], Dict[str, Any], Json, List[str]]:
"""
Returns (merged_variables, merged_sources, target_dict_to_write, base_path)
- merged_*: what to display/prefetch with
- target_dict_to_write: where to write changes (base or variants[variant])
"""
base_vars = spec.get("variables", {}) or {}
base_sources = spec.get("sources", {}) or {}
if variant:
vdict = spec.get("variants", {}).get(variant)
if not isinstance(vdict, dict):
raise ValueError(f"Variant '{variant}' not found")
v_vars = vdict.get("variables", {}) or {}
v_sources = vdict.get("sources", {}) or {}
merged_vars = dict(base_vars)
merged_vars.update(v_vars)
merged_srcs = merge_sources(base_sources, v_sources)
return merged_vars, merged_srcs, vdict, ["variants", variant]
else:
return dict(base_vars), dict(base_sources), spec, []
# -------------------- Update operations --------------------
def update_components(spec: Json,
variant: Optional[str],
components: Optional[List[str]],
args: argparse.Namespace) -> bool:
changed = False
gh_token = os.environ.get("GITHUB_TOKEN")
merged_vars, merged_srcs, target_dict, base_path = merged_view(spec, variant)
src_names = list(merged_srcs.keys()) if not components else [c for c in components if c in merged_srcs]
# Ensure target_dict has a sources dict to write into
target_sources = target_dict.setdefault("sources", {})
for name in src_names:
view_comp = merged_srcs[name]
fetcher = view_comp.get("fetcher", "none")
# Ensure a writable component entry exists (always write to the selected target: base or variant override)
comp = target_sources.setdefault(name, {})
if not isinstance(comp, dict):
comp = target_sources[name] = {}
if fetcher == "github":
owner = view_comp.get("owner")
repo = view_comp.get("repo")
if not owner or not repo:
eprintln(f"Component {name}: missing owner/repo for github fetcher")
continue
new_ref = None
ref_kind = None
if args.github_latest_release:
tag = github_latest_release_tag(owner, repo, gh_token)
if tag:
new_ref = tag
ref_kind = "tag"
elif args.github_latest_tag:
tag = github_latest_tag(owner, repo, gh_token, args.tag_regex)
if tag:
new_ref = tag
ref_kind = "tag"
elif args.github_latest_commit:
rev = github_head_commit(owner, repo, gh_token)
if rev:
new_ref = rev
ref_kind = "rev"
if new_ref:
if ref_kind == "tag":
comp["tag"] = new_ref
if "rev" in comp:
del comp["rev"]
else:
comp["rev"] = new_ref
if "tag" in comp:
del comp["tag"]
eprintln(f"Component {name}: set {ref_kind}={new_ref}")
changed = True
if args.prefetch:
ref = comp.get("tag") or comp.get("rev")
if not ref:
# fallback to merged view if not in override
ref = view_comp.get("tag") or view_comp.get("rev")
if ref:
sri = nix_prefetch_github_tarball(owner, repo, ref)
if sri:
comp["hash"] = sri
eprintln(f"Component {name}: updated hash={sri}")
changed = True
elif fetcher == "git":
url = view_comp.get("url")
if not url:
eprintln(f"Component {name}: missing url for git fetcher")
continue
if args.git_latest:
rev = github_head_commit(owner="", repo="", token=None) # placeholder; we will ls-remote below
try:
out = subprocess.check_output(["git", "ls-remote", url, "HEAD"], text=True).strip()
if out:
new_rev = out.split()[0]
comp["rev"] = new_rev
eprintln(f"Component {name}: set rev={new_rev}")
changed = True
if args.prefetch:
sri = nix_prefetch_git(url, new_rev)
if sri:
comp["hash"] = sri
eprintln(f"Component {name}: updated hash={sri}")
changed = True
except Exception as e:
eprintln(f"git ls-remote failed for {name}: {e}")
elif fetcher == "url":
if args.url_prefetch or args.prefetch:
rendered_comp = render_templates(view_comp, merged_vars)
url = rendered_comp.get("url") or rendered_comp.get("urlTemplate")
if not url:
eprintln(f"Component {name}: missing url/urlTemplate for url fetcher")
else:
sri = nix_prefetch_url(url)
if sri:
comp["hash"] = sri
eprintln(f"Component {name}: updated hash={sri}")
changed = True
elif fetcher == "pypi":
if args.prefetch:
eprintln(f"Component {name} (pypi): prefetch not implemented; use nix-prefetch-pypi or set hash manually.")
else:
# fetcher == "none" or other: no-op unless user --set a value
pass
return changed
# -------------------- Main --------------------
def main():
ap = argparse.ArgumentParser(description="Update unified version.json files")
ap.add_argument("--file", required=True, help="Path to version.json")
ap.add_argument("--variant", help="Variant name to update (default: base/top-level)")
ap.add_argument("--component", dest="components", action="append", help="Limit to specific component(s); can be repeated")
ap.add_argument("--github-latest-release", action="store_true", help="Update GitHub components to latest release tag")
ap.add_argument("--github-latest-tag", action="store_true", help="Update GitHub components to latest tag")
ap.add_argument("--github-latest-commit", action="store_true", help="Update GitHub components to HEAD commit")
ap.add_argument("--tag-regex", help="Regex to filter tags for --github-latest-tag")
ap.add_argument("--git-latest", action="store_true", help="Update fetchgit components to latest commit (HEAD)")
ap.add_argument("--url-prefetch", action="store_true", help="Recompute hash for url/urlTemplate components")
ap.add_argument("--prefetch", action="store_true", help="After changing refs, recompute hash as needed")
ap.add_argument("--set", dest="sets", action="append", default=[], help="Set a field: KEY=VALUE (dot path), relative to variant/base. Value is treated as string.")
ap.add_argument("--dry-run", action="store_true", help="Do not write changes")
ap.add_argument("--print", dest="do_print", action="store_true", help="Print result JSON to stdout")
args = ap.parse_args()
path = args.file
spec = load_json(path)
# Apply --set mutations (relative to base or selected variant)
target = spec if not args.variant else spec.setdefault("variants", {}).setdefault(args.variant, {})
changed = False
for pair in args.sets:
path_tokens, value = parse_set_pair(pair)
deep_set(target, path_tokens, value)
eprintln(f"Set {'.'.join((['variants', args.variant] if args.variant else []) + path_tokens)} = {value}")
changed = True
# Update refs/hashes based on fetcher type and flags with merged view
changed = update_components(spec, args.variant, args.components, args) or changed
if changed and not args.dry_run:
save_json(path, spec)
eprintln(f"Wrote changes to {path}")
else:
eprintln("No changes made.")
if args.do_print:
print(json.dumps(spec, indent=2, ensure_ascii=False))
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
sys.exit(130)

File diff suppressed because it is too large Load Diff