This commit is contained in:
mjallen18
2026-03-04 13:43:18 -06:00
parent 5f79421d9e
commit d17d096a97
6 changed files with 487 additions and 150 deletions

View File

@@ -231,6 +231,79 @@ def nix_prefetch_git(url: str, rev: str) -> Optional[str]:
return sri
def nix_prefetch_cargo_vendor(
fetcher: str,
src_hash: str,
*,
url: str = "",
owner: str = "",
repo: str = "",
rev: str = "",
subdir: str = "",
) -> Optional[str]:
"""
Compute the cargo vendor hash for a Rust source using nix build + fakeHash.
Builds rustPlatform.fetchCargoVendor with lib.fakeHash, parses the correct
hash from the 'got:' line in nix's error output.
Args:
fetcher: "github" or "git"
src_hash: SRI hash of the source (already known)
url: git URL (for "git" fetcher)
owner/repo: GitHub owner and repo (for "github" fetcher)
rev: tag or commit rev
subdir: optional subdirectory within the source that contains Cargo.lock
Returns:
SRI hash string, or None on failure.
"""
if fetcher == "github" and owner and repo and rev and src_hash:
src_expr = (
f'pkgs.fetchFromGitHub {{ owner = "{owner}"; repo = "{repo}";'
f' rev = "{rev}"; hash = "{src_hash}"; }}'
)
elif fetcher == "git" and url and rev and src_hash:
# For GitHub git URLs, fetchFromGitHub is more reliable than fetchgit
parsed = urlparse(url)
parts = [p for p in parsed.path.split("/") if p]
if parsed.hostname in ("github.com",) and len(parts) >= 2:
gh_owner, gh_repo = parts[0], parts[1]
src_expr = (
f'pkgs.fetchFromGitHub {{ owner = "{gh_owner}"; repo = "{gh_repo}";'
f' rev = "{rev}"; hash = "{src_hash}"; }}'
)
else:
src_expr = f'pkgs.fetchgit {{ url = "{url}"; rev = "{rev}"; hash = "{src_hash}"; }}'
else:
return None
subdir_attr = f'sourceRoot = "${{src.name}}/{subdir}";' if subdir else ""
expr = (
f"let pkgs = import <nixpkgs> {{}};\n"
f" src = {src_expr};\n"
f"in pkgs.rustPlatform.fetchCargoVendor {{\n"
f" inherit src;\n"
f" {subdir_attr}\n"
f" hash = pkgs.lib.fakeHash;\n"
f"}}"
)
p = subprocess.run(
["nix", "build", "--impure", "--expr", expr],
text=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False,
)
m = re.search(r"got:\s+(sha256-[A-Za-z0-9+/=]+)", p.stderr)
if m:
return m.group(1)
eprintln(f"nix_prefetch_cargo_vendor failed:\n{p.stderr[-600:]}")
return None
def http_get_json(url: str, token: Optional[str] = None) -> Any:
try:
req = urllib.request.Request(
@@ -312,17 +385,42 @@ def gh_list_tags(owner: str, repo: str, token: Optional[str]) -> List[str]:
return []
def gh_head_commit(owner: str, repo: str) -> Optional[str]:
def gh_head_commit(
owner: str, repo: str, branch: Optional[str] = None
) -> Optional[str]:
"""Return the latest commit SHA for a GitHub repo, optionally restricted to a branch."""
try:
ref = f"refs/heads/{branch}" if branch else "HEAD"
out = run_get_stdout(
["git", "ls-remote", f"https://github.com/{owner}/{repo}.git", "HEAD"]
["git", "ls-remote", f"https://github.com/{owner}/{repo}.git", ref]
)
if not out:
return None
parts = out.split()
return parts[0] if parts else None
# ls-remote can return multiple lines; take the first match
for line in out.splitlines():
parts = line.split()
if parts:
return parts[0]
return None
except Exception as e:
eprintln(f"head_commit failed for {owner}/{repo}: {e}")
eprintln(f"head_commit failed for {owner}/{repo} (branch={branch}): {e}")
return None
def git_branch_commit(url: str, branch: Optional[str] = None) -> Optional[str]:
"""Return the latest commit SHA for a git URL, optionally restricted to a branch."""
try:
ref = f"refs/heads/{branch}" if branch else "HEAD"
out = run_get_stdout(["git", "ls-remote", url, ref])
if not out:
return None
for line in out.splitlines():
parts = line.split()
if parts:
return parts[0]
return None
except Exception as e:
eprintln(f"git_branch_commit failed for {url} (branch={branch}): {e}")
return None
@@ -381,16 +479,77 @@ def find_packages() -> List[Tuple[str, Path, bool, bool]]:
if pkg_dir.is_dir():
nix_file = pkg_dir / "default.nix"
if nix_file.exists():
# name is homeassistant/component-name
# Only treat as an HA component if it uses buildHomeAssistantComponent;
# otherwise fall through to Python package handling.
try:
nix_content = nix_file.read_text(encoding="utf-8")
except Exception:
nix_content = ""
rel = pkg_dir.relative_to(PKGS_DIR)
results.append(
(str(rel), nix_file, False, True)
) # (name, path, is_python, is_homeassistant)
if "buildHomeAssistantComponent" in nix_content:
results.append(
(str(rel), nix_file, False, True)
) # (name, path, is_python, is_homeassistant)
else:
# Treat as a Python package instead
results.append((str(rel), nix_file, True, False))
results.sort()
return results
def _extract_brace_block(content: str, keyword: str) -> Optional[str]:
"""
Find 'keyword {' in content and return the text between the matching braces,
handling nested braces (e.g. ${var} inside strings).
Returns None if not found.
"""
idx = content.find(keyword + " {")
if idx == -1:
idx = content.find(keyword + "{")
if idx == -1:
return None
start = content.find("{", idx + len(keyword))
if start == -1:
return None
depth = 0
for i in range(start, len(content)):
c = content[i]
if c == "{":
depth += 1
elif c == "}":
depth -= 1
if depth == 0:
return content[start + 1 : i]
return None
def _resolve_nix_str(value: str, pname: str, version: str) -> str:
"""Resolve simple Nix string interpolations like ${pname} and ${version}."""
value = re.sub(r"\$\{pname\}", pname, value)
value = re.sub(r"\$\{version\}", version, value)
return value
def _extract_nix_attr(block: str, attr: str) -> str:
"""
Extract attribute value from a Nix attribute set block.
Handles:
attr = "quoted string";
attr = unquoted_ident;
Returns empty string if not found.
"""
# Quoted string value
m = re.search(rf'\b{attr}\s*=\s*"([^"]*)"', block)
if m:
return m.group(1)
# Unquoted identifier (e.g. repo = pname;)
m = re.search(rf"\b{attr}\s*=\s*([A-Za-z_][A-Za-z0-9_-]*)\s*;", block)
if m:
return m.group(1)
return ""
def parse_python_package(path: Path) -> Dict[str, Any]:
"""Parse a Python package's default.nix file to extract version and source information."""
with path.open("r", encoding="utf-8") as f:
@@ -404,46 +563,43 @@ def parse_python_package(path: Path) -> Dict[str, Any]:
pname_match = re.search(r'pname\s*=\s*"([^"]+)"', content)
pname = pname_match.group(1) if pname_match else ""
# Check for fetchFromGitHub pattern
fetch_github_match = re.search(
r"src\s*=\s*fetchFromGitHub\s*\{([^}]+)\}", content, re.DOTALL
)
# Check for fetchPypi pattern
fetch_pypi_match = re.search(
r"src\s*=\s*.*fetchPypi\s*\{([^}]+)\}", content, re.DOTALL
)
# Create a structure similar to version.json for compatibility
result = {"variables": {}, "sources": {}}
result: Dict[str, Any] = {"variables": {}, "sources": {}}
# Only add non-empty values to variables
if version:
result["variables"]["version"] = version
# Determine source name - use pname, repo name, or derive from path
source_name = ""
if pname:
source_name = pname.lower()
else:
# Use directory name as source name
source_name = path.parent.name.lower()
# Determine source name - use pname or derive from path
source_name = pname.lower() if pname else path.parent.name.lower()
# Handle fetchFromGitHub pattern
if fetch_github_match:
fetch_block = fetch_github_match.group(1)
# Try to extract brace-balanced fetchFromGitHub block (handles ${var} inside strings)
fetch_block = _extract_brace_block(content, "fetchFromGitHub")
# Extract GitHub info from the fetchFromGitHub block
owner_match = re.search(r'owner\s*=\s*"([^"]+)"', fetch_block)
repo_match = re.search(r'repo\s*=\s*"([^"]+)"', fetch_block)
rev_match = re.search(r'rev\s*=\s*"([^"]+)"', fetch_block)
# Check for fetchPypi pattern (simple [^}]+ is fine here as PyPI blocks lack ${})
fetch_pypi_match = re.search(
r"src\s*=\s*.*fetchPypi\s*\{([^}]+)\}", content, re.DOTALL
)
if fetch_block is not None:
owner_raw = _extract_nix_attr(fetch_block, "owner")
repo_raw = _extract_nix_attr(fetch_block, "repo")
rev_raw = _extract_nix_attr(fetch_block, "rev")
hash_match = re.search(r'(sha256|hash)\s*=\s*"([^"]+)"', fetch_block)
owner = owner_match.group(1) if owner_match else ""
repo = repo_match.group(1) if repo_match else ""
rev = rev_match.group(1) if rev_match else ""
hash_value = hash_match.group(2) if hash_match else ""
def _resolve_nix_ident(raw: str) -> str:
"""Resolve unquoted Nix identifier or string-with-interpolation to its value."""
if raw == "pname":
return pname
if raw == "version":
return version
return _resolve_nix_str(raw, pname, version)
owner = _resolve_nix_ident(owner_raw)
repo = _resolve_nix_ident(repo_raw)
rev = _resolve_nix_ident(rev_raw)
# Create source entry
result["sources"][source_name] = {
"fetcher": "github",
@@ -452,51 +608,39 @@ def parse_python_package(path: Path) -> Dict[str, Any]:
"hash": hash_value,
}
# Handle rev field which might contain a tag or version reference
# Classify rev as tag or commit ref
if rev:
# Check if it's a tag reference (starts with v)
if rev.startswith("v"):
if rev.startswith("v") or "${version}" in rev_raw:
result["sources"][source_name]["tag"] = rev
# Check if it contains ${version} variable
elif "${version}" in rev:
result["sources"][source_name]["tag"] = rev
# Check if it's "master" or a specific branch
elif rev in ["master", "main"]:
elif rev in ("master", "main"):
result["sources"][source_name]["rev"] = rev
# Otherwise treat as a regular revision
else:
result["sources"][source_name]["rev"] = rev
# Handle fetchPypi pattern
elif fetch_pypi_match:
fetch_block = fetch_pypi_match.group(1)
# Extract PyPI info
hash_match = re.search(r'(sha256|hash)\s*=\s*"([^"]+)"', fetch_block)
elif fetch_pypi_match:
fetch_block_pypi = fetch_pypi_match.group(1)
hash_match = re.search(r'(sha256|hash)\s*=\s*"([^"]+)"', fetch_block_pypi)
hash_value = hash_match.group(2) if hash_match else ""
# Look for GitHub info in meta section
homepage_match = re.search(
r'homepage\s*=\s*"https://github.com/([^/]+)/([^"]+)"', content
r'homepage\s*=\s*"https://github\.com/([^/]+)/([^"]+)"', content
)
if homepage_match:
owner = homepage_match.group(1)
repo = homepage_match.group(2)
# Create source entry with GitHub info
result["sources"][source_name] = {
"fetcher": "github",
"owner": owner,
"repo": repo,
"hash": hash_value,
"pypi": True, # Mark as PyPI source
"pypi": True,
}
# Add version as tag if available
if version:
result["sources"][source_name]["tag"] = f"v{version}"
else:
# Create PyPI source entry
result["sources"][source_name] = {
"fetcher": "pypi",
"pname": pname,
@@ -504,32 +648,28 @@ def parse_python_package(path: Path) -> Dict[str, Any]:
"hash": hash_value,
}
else:
# Try to extract standalone GitHub info if present
# Fallback: scan whole file for GitHub or URL info
owner_match = re.search(r'owner\s*=\s*"([^"]+)"', content)
repo_match = re.search(r'repo\s*=\s*"([^"]+)"', content)
rev_match = re.search(r'rev\s*=\s*"([^"]+)"', content)
tag_match = re.search(r'tag\s*=\s*"([^"]+)"', content)
hash_match = re.search(r'(sha256|hash)\s*=\s*"([^"]+)"', content)
url_match = re.search(r'url\s*=\s*"([^"]+)"', content)
homepage_match = re.search(
r'homepage\s*=\s*"https://github\.com/([^/]+)/([^"]+)"', content
)
owner = owner_match.group(1) if owner_match else ""
repo = repo_match.group(1) if repo_match else ""
rev = rev_match.group(1) if rev_match else ""
tag = tag_match.group(1) if tag_match else ""
hash_value = hash_match.group(2) if hash_match else ""
# Try to extract URL if GitHub info is not present
url_match = re.search(r'url\s*=\s*"([^"]+)"', content)
url = url_match.group(1) if url_match else ""
# Check for GitHub homepage in meta section
homepage_match = re.search(
r'homepage\s*=\s*"https://github.com/([^/]+)/([^"]+)"', content
)
if homepage_match and not (owner and repo):
owner = homepage_match.group(1)
repo = homepage_match.group(2)
# Handle GitHub sources
if owner and repo:
result["sources"][source_name] = {
"fetcher": "github",
@@ -537,23 +677,17 @@ def parse_python_package(path: Path) -> Dict[str, Any]:
"repo": repo,
"hash": hash_value,
}
# Handle tag
if tag:
result["sources"][source_name]["tag"] = tag
# Handle rev
elif rev:
result["sources"][source_name]["rev"] = rev
# Handle URL sources
elif url:
result["sources"][source_name] = {
"fetcher": "url",
"url": url,
"hash": hash_value,
}
# Fallback for packages with no clear source info
else:
# Create a minimal source entry so the package shows up in the UI
result["sources"][source_name] = {"fetcher": "unknown", "hash": hash_value}
return result
@@ -662,14 +796,17 @@ def parse_homeassistant_component(path: Path) -> Dict[str, Any]:
if hash_value:
result["sources"][source_name]["hash"] = hash_value
# Handle tag or rev
# Handle tag or rev; resolve ${version} references
if tag:
result["sources"][source_name]["tag"] = tag
result["sources"][source_name]["tag"] = _resolve_nix_str(tag, "", version)
elif rev:
result["sources"][source_name]["rev"] = rev
elif (
version
): # If no tag or rev specified, but version exists, use version as tag
rev_resolved = _resolve_nix_str(rev, "", version)
# If rev was a ${version} template or equals version, treat as tag
if "${version}" in rev or rev_resolved == version:
result["sources"][source_name]["tag"] = rev_resolved
else:
result["sources"][source_name]["rev"] = rev_resolved
elif version: # fallback: use version as tag
result["sources"][source_name]["tag"] = version
else:
# Fallback for components with no clear source info
@@ -1149,15 +1286,15 @@ class PackagesScreen(ScreenBase):
elif ch in (curses.KEY_UP, ord("k")):
self.idx = max(0, self.idx - 1)
elif ch in (curses.KEY_DOWN, ord("j")):
self.idx = min(len(self.packages) - 1, self.idx + 1)
self.idx = min(len(filtered_packages) - 1, self.idx + 1)
elif ch == curses.KEY_PPAGE: # Page Up
self.idx = max(0, self.idx - (h - 4))
elif ch == curses.KEY_NPAGE: # Page Down
self.idx = min(len(self.packages) - 1, self.idx + (h - 4))
self.idx = min(len(filtered_packages) - 1, self.idx + (h - 4))
elif ch == ord("g"): # Go to top
self.idx = 0
elif ch == ord("G"): # Go to bottom
self.idx = len(self.packages) - 1
self.idx = max(0, len(filtered_packages) - 1)
elif ch == ord("f"):
# Cycle through filter modes
if self.filter_mode == "all":
@@ -1250,62 +1387,72 @@ class PackageDetailScreen(ScreenBase):
def fetch_candidates_for(self, name: str):
comp = self.merged_srcs[name]
fetcher = comp.get("fetcher", "none")
branch = comp.get("branch") or None # optional branch override
c = {"release": "", "tag": "", "commit": ""}
if fetcher == "github":
owner = comp.get("owner")
repo = comp.get("repo")
if owner and repo:
r = gh_latest_release(owner, repo, self.gh_token)
if r:
c["release"] = r
t = gh_latest_tag(owner, repo, self.gh_token)
if t:
c["tag"] = t
m = gh_head_commit(owner, repo)
# Only fetch release/tag candidates when not locked to a specific branch
if not branch:
r = gh_latest_release(owner, repo, self.gh_token)
if r:
c["release"] = r
t = gh_latest_tag(owner, repo, self.gh_token)
if t:
c["tag"] = t
m = gh_head_commit(owner, repo, branch)
if m:
c["commit"] = m
# Special-case raspberrypi/linux: prefer latest stable_* tag or series-specific tags
try:
if owner == "raspberrypi" and repo == "linux":
tags_all = gh_list_tags(owner, repo, self.gh_token)
rendered = render_templates(comp, self.merged_vars)
cur_tag = str(rendered.get("tag") or "")
# If current tag uses stable_YYYYMMDD scheme, pick latest stable_* tag
if cur_tag.startswith("stable_"):
stable_tags = sorted(
[x for x in tags_all if re.match(r"^stable_\d{8}$", x)],
reverse=True,
)
if stable_tags:
c["tag"] = stable_tags[0]
else:
# Try to pick a tag matching the current major.minor series if available
mm = str(self.merged_vars.get("modDirVersion") or "")
m2 = re.match(r"^(\d+)\.(\d+)", mm)
if m2:
base = f"rpi-{m2.group(1)}.{m2.group(2)}"
series_tags = [
x
for x in tags_all
if (
x == f"{base}.y"
or x.startswith(f"{base}.y")
or x.startswith(f"{base}.")
)
]
series_tags.sort(reverse=True)
if series_tags:
c["tag"] = series_tags[0]
except Exception as _e:
# Fallback to previously computed values
pass
# (only when not branch-locked, as branch-locked tracks a rolling branch via commit)
if not branch:
try:
if owner == "raspberrypi" and repo == "linux":
tags_all = gh_list_tags(owner, repo, self.gh_token)
rendered = render_templates(comp, self.merged_vars)
cur_tag = str(rendered.get("tag") or "")
# If current tag uses stable_YYYYMMDD scheme, pick latest stable_* tag
if cur_tag.startswith("stable_"):
stable_tags = sorted(
[
x
for x in tags_all
if re.match(r"^stable_\d{8}$", x)
],
reverse=True,
)
if stable_tags:
c["tag"] = stable_tags[0]
else:
# Try to pick a tag matching the current major.minor series if available
mm = str(self.merged_vars.get("modDirVersion") or "")
m2 = re.match(r"^(\d+)\.(\d+)", mm)
if m2:
base = f"rpi-{m2.group(1)}.{m2.group(2)}"
series_tags = [
x
for x in tags_all
if (
x == f"{base}.y"
or x.startswith(f"{base}.y")
or x.startswith(f"{base}.")
)
]
series_tags.sort(reverse=True)
if series_tags:
c["tag"] = series_tags[0]
except Exception as _e:
# Fallback to previously computed values
pass
elif fetcher == "git":
url = comp.get("url")
if url:
out = run_get_stdout(["git", "ls-remote", url, "HEAD"])
if out:
c["commit"] = out.split()[0]
commit = git_branch_commit(url, branch)
if commit:
c["commit"] = commit
elif fetcher == "url":
# Heuristic for GitHub release assets with variables in version.json (e.g., proton-cachyos)
owner = self.merged_vars.get("owner")
@@ -1362,6 +1509,74 @@ class PackageDetailScreen(ScreenBase):
return nix_prefetch_url(url)
return None
def prefetch_cargo_hash_for(self, name: str) -> Optional[str]:
"""
Compute the cargo vendor hash for a source component that carries a
'cargoHash' field (or a linked 'cargoDeps' sibling source).
Uses nix_prefetch_cargo_vendor() which builds fetchCargoVendor with
lib.fakeHash and parses the correct hash from the error output.
"""
comp = self.merged_srcs[name]
fetcher = comp.get("fetcher", "none")
src_hash = comp.get("hash", "")
subdir = comp.get("cargoSubdir", "")
rendered = render_templates(comp, self.merged_vars)
if fetcher == "github":
owner = comp.get("owner", "")
repo = comp.get("repo", "")
ref = rendered.get("tag") or rendered.get("rev") or ""
if owner and repo and ref and src_hash:
return nix_prefetch_cargo_vendor(
"github",
src_hash,
owner=owner,
repo=repo,
rev=ref,
subdir=subdir,
)
elif fetcher == "git":
url = comp.get("url", "")
rev = rendered.get("rev") or rendered.get("tag") or ""
if url and rev and src_hash:
return nix_prefetch_cargo_vendor(
"git",
src_hash,
url=url,
rev=rev,
subdir=subdir,
)
return None
def _source_has_cargo(self, name: str) -> bool:
"""Return True if this source carries a cargoHash field."""
comp = self.merged_srcs.get(name, {})
return "cargoHash" in comp
def _apply_cargo_hash_to_sibling(self, name: str, cargo_hash: str):
"""
Propagate a freshly-computed cargo hash to any sibling source that mirrors
the cargoDeps pattern — a source whose only meaningful field is "hash" and
which is meant to stay in sync with the main source's cargoHash.
Detection heuristic (any match triggers update):
- Sibling is literally named "cargoDeps", OR
- Sibling has no fetcher and its only field is "hash" (pure hash mirror)
"""
ts = self.target_dict.setdefault("sources", {})
for sibling_name, sibling in list(self.merged_srcs.items()):
if sibling_name == name:
continue
has_fetcher = bool(sibling.get("fetcher"))
non_fetcher_keys = [k for k in sibling.keys() if k != "fetcher"]
is_cargo_deps = sibling_name == "cargoDeps"
is_hash_only = not has_fetcher and non_fetcher_keys == ["hash"]
if is_cargo_deps or is_hash_only:
sw = ts.setdefault(sibling_name, {})
sw["hash"] = cargo_hash
def cachyos_suffix(self) -> str:
if self.vidx == 0:
return ""
@@ -1486,8 +1701,20 @@ class PackageDetailScreen(ScreenBase):
compw = ts.setdefault(name, {})
compw["version"] = latest
compw["hash"] = sri
self._refresh_merged()
self.set_status(f"{name}: updated version to {latest} and refreshed hash")
def _refresh_merged(self):
"""Re-compute merged_vars/merged_srcs/target_dict without resetting sidx."""
variant_name = None if self.vidx == 0 else self.variants[self.vidx]
self.merged_vars, self.merged_srcs, self.target_dict = merged_view(
self.spec, variant_name
)
self.snames = sorted(list(self.merged_srcs.keys()))
# Clamp sidx in case source list changed
if self.snames:
self.sidx = min(self.sidx, len(self.snames) - 1)
def set_ref(self, name: str, kind: str, value: str):
# Write to selected target dict (base or variant override)
ts = self.target_dict.setdefault("sources", {})
@@ -1500,6 +1727,8 @@ class PackageDetailScreen(ScreenBase):
comp["rev"] = value
if "tag" in comp:
del comp["tag"]
# Refresh merged_srcs so prefetch_hash_for sees the updated ref
self._refresh_merged()
def save(self):
if self.is_python:
@@ -1700,11 +1929,15 @@ class PackageDetailScreen(ScreenBase):
# Display fetcher with appropriate color
self.stdscr.addstr(4 + i, 24, fetcher, curses.color_pair(fetcher_color))
# Display reference
# Display reference, with optional branch and cargo indicators
branch = comp.get("branch") or ""
branch_suffix = f" [{branch}]" if branch else ""
cargo_suffix = " [cargo]" if "cargoHash" in comp else ""
ref_with_extras = f"ref={ref_short}{branch_suffix}{cargo_suffix}"
self.stdscr.addstr(
4 + i,
32,
f"ref={ref_short}"[: w - 34],
ref_with_extras[: w - 34],
curses.color_pair(COLOR_NORMAL),
)
@@ -1727,11 +1960,17 @@ class PackageDetailScreen(ScreenBase):
):
self.fetch_candidates_for(_sel_name)
# Latest header with decoration
# Latest header with decoration — show branch if locked
_branch = _comp.get("branch") or ""
_latest_hdr = (
f"Latest Versions: (branch: {_branch})"
if _branch
else "Latest Versions:"
)
self.stdscr.addstr(
y_latest + 1,
2,
"Latest Versions:",
_latest_hdr[: w - 4],
curses.color_pair(COLOR_HEADER) | curses.A_BOLD,
)
@@ -1844,7 +2083,7 @@ class PackageDetailScreen(ScreenBase):
)
# Footer instructions with better formatting
footer = "Enter: component actions | r: refresh | h: hash | e: edit | s: save | Backspace: back | q: quit"
footer = "Enter: component actions | r: refresh | h: hash | i: url | e: edit | s: save | ←/→: variant | Backspace: back | q: quit"
footer_x = (w - len(footer)) // 2
self.stdscr.addstr(h - 4, footer_x, footer, curses.color_pair(COLOR_STATUS))
@@ -1857,10 +2096,10 @@ class PackageDetailScreen(ScreenBase):
return None
elif ch == curses.KEY_BACKSPACE or ch == 127:
return "reload"
elif ch in (curses.KEY_LEFT, ord("h")):
elif ch in (curses.KEY_LEFT,):
self.vidx = max(0, self.vidx - 1)
self.select_variant()
elif ch in (curses.KEY_RIGHT, ord("l")):
elif ch in (curses.KEY_RIGHT,):
self.vidx = min(len(self.variants) - 1, self.vidx + 1)
self.select_variant()
elif ch in (curses.KEY_UP, ord("k")):
@@ -1893,8 +2132,10 @@ class PackageDetailScreen(ScreenBase):
else:
self.fetch_candidates_for(name)
cand = self.candidates.get(name, {})
branch = comp.get("branch") or ""
lines = [
f"Candidates for {name}:",
f"Candidates for {name}:"
+ (f" (branch: {branch})" if branch else ""),
f" latest release: {cand.get('release') or '-'}",
f" latest tag : {cand.get('tag') or '-'}",
f" latest commit : {cand.get('commit') or '-'}",
@@ -1920,7 +2161,25 @@ class PackageDetailScreen(ScreenBase):
ts = self.target_dict.setdefault("sources", {})
compw = ts.setdefault(name, {})
compw["hash"] = sri
self.set_status(f"{name}: updated hash")
self._refresh_merged()
# If this source also has a cargoHash, recompute it now
if self._source_has_cargo(name):
self.set_status(
f"{name}: updated hash; computing cargo hash..."
)
self.stdscr.refresh()
cargo_sri = self.prefetch_cargo_hash_for(name)
if cargo_sri:
compw["cargoHash"] = cargo_sri
self._apply_cargo_hash_to_sibling(name, cargo_sri)
self._refresh_merged()
self.set_status(f"{name}: updated hash + cargo hash")
else:
self.set_status(
f"{name}: updated hash; cargo hash failed"
)
else:
self.set_status(f"{name}: updated hash")
else:
self.set_status(f"{name}: hash prefetch failed")
elif ch in (ord("e"),):
@@ -1952,25 +2211,47 @@ class PackageDetailScreen(ScreenBase):
if name not in self.candidates:
self.fetch_candidates_for(name)
cand = self.candidates.get(name, {})
branch = comp.get("branch") or ""
# Present small menu
items = []
if fetcher == "github":
items = [
(
"Use latest release (tag)",
("release", cand.get("release")),
),
("Use latest tag", ("tag", cand.get("tag"))),
("Use latest commit (rev)", ("commit", cand.get("commit"))),
("Recompute hash", ("hash", None)),
("Cancel", ("cancel", None)),
]
# When branch-locked, only offer latest commit (tags are irrelevant)
if branch:
items = [
(
"Use latest commit (rev)",
("commit", cand.get("commit")),
),
("Recompute hash", ("hash", None)),
("Cancel", ("cancel", None)),
]
else:
items = [
(
"Use latest release (tag)",
("release", cand.get("release")),
),
("Use latest tag", ("tag", cand.get("tag"))),
(
"Use latest commit (rev)",
("commit", cand.get("commit")),
),
("Recompute hash", ("hash", None)),
("Cancel", ("cancel", None)),
]
else:
items = [
("Use latest commit (rev)", ("commit", cand.get("commit"))),
("Recompute hash", ("hash", None)),
("Cancel", ("cancel", None)),
]
# Inject cargo hash option before Cancel when applicable
has_cargo = self._source_has_cargo(name)
if has_cargo:
items = [item for item in items if item[1][0] != "cancel"] + [
("Recompute cargo hash", ("cargo_hash", None)),
("Cancel", ("cancel", None)),
]
# Build header with current and available refs
rendered = render_templates(comp, self.merged_vars)
cur_tag = rendered.get("tag") or ""
@@ -1984,10 +2265,17 @@ class PackageDetailScreen(ScreenBase):
current_str = f"current: version={cur_version}"
else:
current_str = "current: -"
if branch:
current_str += f" (branch: {branch})"
cur_cargo = comp.get("cargoHash", "")
header_lines = [
current_str,
f"available: release={cand.get('release') or '-'} tag={cand.get('tag') or '-'} commit={(cand.get('commit') or '')[:12] or '-'}",
]
if has_cargo:
header_lines.append(
f"cargoHash: {cur_cargo[:32] + '...' if len(cur_cargo) > 32 else cur_cargo or '-'}"
)
choice = select_menu(
self.stdscr,
f"Actions for {name}",
@@ -1999,13 +2287,38 @@ class PackageDetailScreen(ScreenBase):
if kind in ("release", "tag", "commit"):
if val:
self.set_ref(name, kind, val)
# update hash
# update src hash
sri = self.prefetch_hash_for(name)
if sri:
ts = self.target_dict.setdefault("sources", {})
compw = ts.setdefault(name, {})
compw["hash"] = sri
self.set_status(f"{name}: set {kind} and updated hash")
self._refresh_merged()
# also update cargo hash if applicable
if has_cargo:
self.set_status(
f"{name}: set {kind}, hashing (src)..."
)
cargo_sri = self.prefetch_cargo_hash_for(name)
if cargo_sri:
ts = self.target_dict.setdefault("sources", {})
compw = ts.setdefault(name, {})
compw["cargoHash"] = cargo_sri
self._apply_cargo_hash_to_sibling(
name, cargo_sri
)
self._refresh_merged()
self.set_status(
f"{name}: set {kind}, updated src + cargo hash"
)
else:
self.set_status(
f"{name}: set {kind}, updated src hash; cargo hash failed"
)
else:
self.set_status(
f"{name}: set {kind} and updated hash"
)
else:
self.set_status(f"No candidate {kind}")
elif kind == "hash":
@@ -2014,9 +2327,25 @@ class PackageDetailScreen(ScreenBase):
ts = self.target_dict.setdefault("sources", {})
compw = ts.setdefault(name, {})
compw["hash"] = sri
self._refresh_merged()
self.set_status(f"{name}: updated hash")
else:
self.set_status("hash prefetch failed")
elif kind == "cargo_hash":
self.set_status(f"{name}: computing cargo hash...")
self.stdscr.refresh()
cargo_sri = self.prefetch_cargo_hash_for(name)
if cargo_sri:
ts = self.target_dict.setdefault("sources", {})
compw = ts.setdefault(name, {})
compw["cargoHash"] = cargo_sri
self._apply_cargo_hash_to_sibling(name, cargo_sri)
self._refresh_merged()
self.set_status(f"{name}: updated cargo hash")
else:
self.set_status(
f"{name}: cargo hash computation failed"
)
else:
pass
elif fetcher == "url":