fix avahi
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -6,9 +6,9 @@ result*
|
|||||||
.direnv
|
.direnv
|
||||||
shell.nix
|
shell.nix
|
||||||
.vscode
|
.vscode
|
||||||
**/*/*.py
|
|
||||||
.envrc
|
.envrc
|
||||||
.DS_Store
|
.DS_Store
|
||||||
*.qcow2
|
*.qcow2
|
||||||
keys
|
keys
|
||||||
iso-*
|
iso-*
|
||||||
|
**/*/__pycache__
|
||||||
@@ -76,6 +76,7 @@
|
|||||||
"repo": { "type": "string", "description": "GitHub repository (github fetcher)." },
|
"repo": { "type": "string", "description": "GitHub repository (github fetcher)." },
|
||||||
"tag": { "type": "string", "description": "Git tag (github fetcher). Mutually exclusive with 'rev'." },
|
"tag": { "type": "string", "description": "Git tag (github fetcher). Mutually exclusive with 'rev'." },
|
||||||
"rev": { "type": "string", "description": "Commit revision (github/git fetchers)." },
|
"rev": { "type": "string", "description": "Commit revision (github/git fetchers)." },
|
||||||
|
"branch": { "type": "string", "description": "Branch to track for HEAD-commit updates (github/git fetchers). Stored alongside 'rev' to record which branch the pinned commit came from. Has no effect on the Nix fetcher itself — only used by the version management tooling." },
|
||||||
"submodules": { "type": "boolean", "description": "Whether to fetch submodules (github/git fetchers)." },
|
"submodules": { "type": "boolean", "description": "Whether to fetch submodules (github/git fetchers)." },
|
||||||
|
|
||||||
"url": { "type": "string", "description": "Final URL (url fetcher). May be templated." },
|
"url": { "type": "string", "description": "Final URL (url fetcher). May be templated." },
|
||||||
@@ -157,6 +158,7 @@
|
|||||||
"repo": { "type": "string" },
|
"repo": { "type": "string" },
|
||||||
"tag": { "type": "string" },
|
"tag": { "type": "string" },
|
||||||
"rev": { "type": "string" },
|
"rev": { "type": "string" },
|
||||||
|
"branch": { "type": "string" },
|
||||||
"submodules": { "type": "boolean" },
|
"submodules": { "type": "boolean" },
|
||||||
|
|
||||||
"url": { "type": "string" },
|
"url": { "type": "string" },
|
||||||
|
|||||||
@@ -122,6 +122,14 @@ in
|
|||||||
network.wait-online.enable = false;
|
network.wait-online.enable = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
# Restrict Avahi to the configured LAN interface when one is explicitly set.
|
||||||
|
# This prevents Avahi from announcing on virtual/container interfaces (veth*,
|
||||||
|
# podman0, virbr0, etc.) which causes hostname conflicts and suffix mangling
|
||||||
|
# (e.g. "jallen-nas-4.local" instead of "jallen-nas.local").
|
||||||
|
services.avahi = lib.mkIf (cfg.ipv4.interface != "") {
|
||||||
|
allowInterfaces = [ cfg.ipv4.interface ];
|
||||||
|
};
|
||||||
|
|
||||||
networking = {
|
networking = {
|
||||||
hostName = lib.mkForce cfg.hostName;
|
hostName = lib.mkForce cfg.hostName;
|
||||||
|
|
||||||
|
|||||||
@@ -41,14 +41,14 @@ let
|
|||||||
) serviceNames;
|
) serviceNames;
|
||||||
|
|
||||||
hostedServicesByGroup = builtins.groupBy (svc: svc.hostedService.group) (
|
hostedServicesByGroup = builtins.groupBy (svc: svc.hostedService.group) (
|
||||||
builtins.filter (svc: svc.hostedService.enable) (
|
builtins.filter (svc: svc.hostedService != null && svc.hostedService.enable) (
|
||||||
builtins.map (
|
builtins.map (
|
||||||
serviceName:
|
serviceName:
|
||||||
let
|
let
|
||||||
serviceCfg = config.${namespace}.services.${serviceName};
|
serviceCfg = config.${namespace}.services.${serviceName};
|
||||||
in
|
in
|
||||||
{
|
{
|
||||||
inherit (serviceCfg) hostedService;
|
hostedService = serviceCfg.hostedService or null;
|
||||||
}
|
}
|
||||||
) (builtins.attrNames config.${namespace}.services)
|
) (builtins.attrNames config.${namespace}.services)
|
||||||
)
|
)
|
||||||
@@ -349,16 +349,16 @@ let
|
|||||||
first-day-of-week = "sunday";
|
first-day-of-week = "sunday";
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
++ (lib.mkIf cfg.weather.enable {
|
++ lib.optional cfg.weather.enable {
|
||||||
type = "weather";
|
type = "weather";
|
||||||
units = cfg.weather.units;
|
units = cfg.weather.units;
|
||||||
hour-format = cfg.weather.hour-format;
|
hour-format = cfg.weather.hour-format;
|
||||||
location = cfg.weather.location;
|
location = cfg.weather.location;
|
||||||
})
|
}
|
||||||
++ (lib.mkIf (cfg.servers != [ ]) {
|
++ lib.optional (cfg.servers != [ ]) {
|
||||||
type = "server-stats";
|
type = "server-stats";
|
||||||
servers = cfg.servers;
|
servers = cfg.servers;
|
||||||
});
|
};
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
size = "full";
|
size = "full";
|
||||||
@@ -370,7 +370,7 @@ let
|
|||||||
bangs = cfg.search;
|
bangs = cfg.search;
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
++ (lib.mkIf cfg.hostedServiceGroups (
|
++ lib.optionals cfg.hostedServiceGroups (
|
||||||
builtins.map (
|
builtins.map (
|
||||||
groupName:
|
groupName:
|
||||||
makeMonitorWidget groupName (
|
makeMonitorWidget groupName (
|
||||||
@@ -381,11 +381,11 @@ let
|
|||||||
}) (hostedServicesByGroup.${groupName} or [ ])
|
}) (hostedServicesByGroup.${groupName} or [ ])
|
||||||
)
|
)
|
||||||
) (builtins.attrNames hostedServicesByGroup)
|
) (builtins.attrNames hostedServicesByGroup)
|
||||||
))
|
)
|
||||||
++ (lib.mkIf (!cfg.hostedServiceGroups && cfg.enableHostedServices) [
|
++ lib.optionals (!cfg.hostedServiceGroups && cfg.enableHostedServices) [
|
||||||
(makeMonitorWidget "Services" hostedServiceSites)
|
(makeMonitorWidget "Services" hostedServiceSites)
|
||||||
])
|
]
|
||||||
++ (lib.mkIf (cfg.extraSites != [ ]) (
|
++ lib.optionals (cfg.extraSites != [ ]) (
|
||||||
builtins.map (site: {
|
builtins.map (site: {
|
||||||
type = "monitor";
|
type = "monitor";
|
||||||
cache = "1m";
|
cache = "1m";
|
||||||
@@ -401,17 +401,17 @@ let
|
|||||||
)
|
)
|
||||||
];
|
];
|
||||||
}) cfg.extraSites
|
}) cfg.extraSites
|
||||||
))
|
)
|
||||||
++ (lib.mkIf (cfg.bookmarks != [ ]) {
|
++ lib.optional (cfg.bookmarks != [ ]) {
|
||||||
type = "bookmarks";
|
type = "bookmarks";
|
||||||
groups = cfg.bookmarks;
|
groups = cfg.bookmarks;
|
||||||
})
|
}
|
||||||
++ (lib.mkIf (cfg.reddit != [ ]) (
|
++ lib.optionals (cfg.reddit != [ ]) (
|
||||||
builtins.map (subreddit: {
|
builtins.map (subreddit: {
|
||||||
type = "reddit";
|
type = "reddit";
|
||||||
inherit subreddit;
|
inherit subreddit;
|
||||||
}) cfg.reddit
|
}) cfg.reddit
|
||||||
));
|
);
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,21 +1,26 @@
|
|||||||
{
|
{
|
||||||
buildHomeAssistantComponent,
|
lib,
|
||||||
fetchFromGitHub,
|
|
||||||
pkgs,
|
|
||||||
namespace,
|
namespace,
|
||||||
|
pkgs,
|
||||||
|
buildHomeAssistantComponent,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
buildHomeAssistantComponent rec {
|
let
|
||||||
owner = "adamoutler";
|
inherit (lib.trivial) importJSON;
|
||||||
domain = "anycubic_wifi";
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
version = "HACS-10";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
versionSpec = importJSON ./version.json;
|
||||||
owner = owner;
|
selected = selectVariant versionSpec null null;
|
||||||
repo = "anycubic-homeassistant";
|
sources = mkAllSources pkgs selected;
|
||||||
rev = version;
|
src-meta = selected.sources.anycubic;
|
||||||
hash = "sha256-TfZadwgdEJR11MaL+nfIgEYld3trWg3v6lOHSoxQ98Q=";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
};
|
in
|
||||||
|
buildHomeAssistantComponent {
|
||||||
|
owner = src-meta.owner;
|
||||||
|
domain = "anycubic_wifi";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
src = sources.anycubic;
|
||||||
|
|
||||||
nativeBuildInputs = [ pkgs.${namespace}.uart-wifi ];
|
nativeBuildInputs = [ pkgs.${namespace}.uart-wifi ];
|
||||||
|
|
||||||
|
|||||||
15
packages/homeassistant/ha-anycubic/version.json
Normal file
15
packages/homeassistant/ha-anycubic/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "HACS-10"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"anycubic": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "adamoutler",
|
||||||
|
"repo": "anycubic-homeassistant",
|
||||||
|
"tag": "HACS-10",
|
||||||
|
"hash": "sha256-TfZadwgdEJR11MaL+nfIgEYld3trWg3v6lOHSoxQ98Q="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,20 +1,27 @@
|
|||||||
{
|
{
|
||||||
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
buildHomeAssistantComponent,
|
buildHomeAssistantComponent,
|
||||||
fetchFromGitHub,
|
|
||||||
home-assistant,
|
home-assistant,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
buildHomeAssistantComponent rec {
|
let
|
||||||
owner = "greghesp";
|
inherit (lib.trivial) importJSON;
|
||||||
domain = "bambu_lab";
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
version = "v2.2.21";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
versionSpec = importJSON ./version.json;
|
||||||
owner = owner;
|
selected = selectVariant versionSpec null null;
|
||||||
repo = "ha-bambulab";
|
sources = mkAllSources pkgs selected;
|
||||||
tag = version;
|
src-meta = selected.sources.bambu_lab;
|
||||||
hash = "sha256-56aAJAsmn+PzLZijFQ9DbTfHSrbeNk+OM/ibu32UHtg=";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
};
|
in
|
||||||
|
buildHomeAssistantComponent {
|
||||||
|
owner = src-meta.owner;
|
||||||
|
domain = "bambu_lab";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
src = sources.bambu_lab;
|
||||||
|
|
||||||
nativeBuildInputs = with home-assistant.python.pkgs; [
|
nativeBuildInputs = with home-assistant.python.pkgs; [
|
||||||
beautifulsoup4
|
beautifulsoup4
|
||||||
|
|||||||
15
packages/homeassistant/ha-bambulab/version.json
Normal file
15
packages/homeassistant/ha-bambulab/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "v2.2.21"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"bambu_lab": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "greghesp",
|
||||||
|
"repo": "ha-bambulab",
|
||||||
|
"tag": "v2.2.21",
|
||||||
|
"hash": "sha256-56aAJAsmn+PzLZijFQ9DbTfHSrbeNk+OM/ibu32UHtg="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,20 +1,27 @@
|
|||||||
{
|
{
|
||||||
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
buildHomeAssistantComponent,
|
buildHomeAssistantComponent,
|
||||||
fetchFromGitHub,
|
|
||||||
home-assistant,
|
home-assistant,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
buildHomeAssistantComponent rec {
|
let
|
||||||
owner = "natekspencer";
|
inherit (lib.trivial) importJSON;
|
||||||
domain = "bedjet";
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
version = "2.0.1";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
versionSpec = importJSON ./version.json;
|
||||||
owner = owner;
|
selected = selectVariant versionSpec null null;
|
||||||
repo = "ha-bedjet";
|
sources = mkAllSources pkgs selected;
|
||||||
tag = version;
|
src-meta = selected.sources.bedjet;
|
||||||
hash = "sha256-FAuL3A8wtGwt+GM180A7wMlIvJvGoLmxNLCtnomxV3o=";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
};
|
in
|
||||||
|
buildHomeAssistantComponent {
|
||||||
|
owner = src-meta.owner;
|
||||||
|
domain = "bedjet";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
src = sources.bedjet;
|
||||||
|
|
||||||
nativeBuildInputs = with home-assistant.python.pkgs; [
|
nativeBuildInputs = with home-assistant.python.pkgs; [
|
||||||
beautifulsoup4
|
beautifulsoup4
|
||||||
|
|||||||
15
packages/homeassistant/ha-bedjet/version.json
Normal file
15
packages/homeassistant/ha-bedjet/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "2.0.1"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"bedjet": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "natekspencer",
|
||||||
|
"repo": "ha-bedjet",
|
||||||
|
"tag": "2.0.1",
|
||||||
|
"hash": "sha256-FAuL3A8wtGwt+GM180A7wMlIvJvGoLmxNLCtnomxV3o="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,22 +1,27 @@
|
|||||||
{
|
{
|
||||||
buildHomeAssistantComponent,
|
lib,
|
||||||
fetchFromGitHub,
|
|
||||||
home-assistant,
|
|
||||||
pkgs,
|
|
||||||
namespace,
|
namespace,
|
||||||
|
pkgs,
|
||||||
|
buildHomeAssistantComponent,
|
||||||
|
home-assistant,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
buildHomeAssistantComponent rec {
|
let
|
||||||
owner = "simbaja";
|
inherit (lib.trivial) importJSON;
|
||||||
domain = "ge_home";
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
version = "v2026.2.0";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
versionSpec = importJSON ./version.json;
|
||||||
owner = owner;
|
selected = selectVariant versionSpec null null;
|
||||||
repo = "ha_gehome";
|
sources = mkAllSources pkgs selected;
|
||||||
tag = version;
|
src-meta = selected.sources.ge_home;
|
||||||
hash = "sha256-7c2GfTagNsIsSiT/sCqSV+BZZJMcvlsecDD+ZDZx9BA=";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
};
|
in
|
||||||
|
buildHomeAssistantComponent {
|
||||||
|
owner = src-meta.owner;
|
||||||
|
domain = "ge_home";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
src = sources.ge_home;
|
||||||
|
|
||||||
# gehomesdk and magicattr must be built against HA's Python
|
# gehomesdk and magicattr must be built against HA's Python
|
||||||
dependencies = with pkgs.${namespace}; [
|
dependencies = with pkgs.${namespace}; [
|
||||||
|
|||||||
15
packages/homeassistant/ha-gehome/version.json
Normal file
15
packages/homeassistant/ha-gehome/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "v2026.2.0"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"ge_home": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "simbaja",
|
||||||
|
"repo": "ha_gehome",
|
||||||
|
"tag": "v2026.2.0",
|
||||||
|
"hash": "sha256-7c2GfTagNsIsSiT/sCqSV+BZZJMcvlsecDD+ZDZx9BA="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,29 +1,36 @@
|
|||||||
{
|
{
|
||||||
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
buildHomeAssistantComponent,
|
buildHomeAssistantComponent,
|
||||||
fetchFromGitHub,
|
|
||||||
home-assistant,
|
home-assistant,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
buildHomeAssistantComponent rec {
|
let
|
||||||
owner = "LaggAt";
|
inherit (lib.trivial) importJSON;
|
||||||
domain = "govee";
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
version = "2025.7.1";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
versionSpec = importJSON ./version.json;
|
||||||
owner = owner;
|
selected = selectVariant versionSpec null null;
|
||||||
repo = "hacs-govee";
|
sources = mkAllSources pkgs selected;
|
||||||
rev = version;
|
src-meta = selected.sources.govee;
|
||||||
hash = "sha256-3SnYjjQU2qRBcKs40bCpN75Ad3HqMcn/hRj1faSSeHw=";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
};
|
in
|
||||||
|
buildHomeAssistantComponent {
|
||||||
|
owner = src-meta.owner;
|
||||||
|
domain = "govee";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
src = sources.govee;
|
||||||
|
|
||||||
nativeBuildInputs = with home-assistant.python.pkgs; [
|
nativeBuildInputs = with home-assistant.python.pkgs; [
|
||||||
dacite
|
dacite
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
changelog = "https://github.com/${owner}/hacs-govee/releases/tag/${version}";
|
changelog = "https://github.com/${src-meta.owner}/hacs-govee/releases/tag/${version}";
|
||||||
description = "The Govee integration allows you to control and monitor lights and switches using the Govee API.";
|
description = "The Govee integration allows you to control and monitor lights and switches using the Govee API.";
|
||||||
homepage = "https://github.com/${owner}/hacs-govee";
|
homepage = "https://github.com/${src-meta.owner}/hacs-govee";
|
||||||
maintainers = [ ];
|
maintainers = [ ];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
15
packages/homeassistant/ha-govee/version.json
Normal file
15
packages/homeassistant/ha-govee/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "2025.7.1"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"govee": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "LaggAt",
|
||||||
|
"repo": "hacs-govee",
|
||||||
|
"tag": "2025.7.1",
|
||||||
|
"hash": "sha256-3SnYjjQU2qRBcKs40bCpN75Ad3HqMcn/hRj1faSSeHw="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,20 +1,27 @@
|
|||||||
{
|
{
|
||||||
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
buildHomeAssistantComponent,
|
buildHomeAssistantComponent,
|
||||||
fetchFromGitHub,
|
|
||||||
home-assistant,
|
home-assistant,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
buildHomeAssistantComponent rec {
|
let
|
||||||
owner = "gcobb321";
|
inherit (lib.trivial) importJSON;
|
||||||
domain = "icloud3";
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
version = "v3.3.4.4";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
versionSpec = importJSON ./version.json;
|
||||||
owner = owner;
|
selected = selectVariant versionSpec null null;
|
||||||
repo = "icloud3";
|
sources = mkAllSources pkgs selected;
|
||||||
rev = "${version}";
|
src-meta = selected.sources.icloud3;
|
||||||
hash = "sha256-B63iY4OC00PGXx/3aq/rkiO0xK11hXz66KaglwmgxIk=";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
};
|
in
|
||||||
|
buildHomeAssistantComponent {
|
||||||
|
owner = src-meta.owner;
|
||||||
|
domain = "icloud3";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
src = sources.icloud3;
|
||||||
|
|
||||||
nativeBuildInputs = with home-assistant.python.pkgs; [
|
nativeBuildInputs = with home-assistant.python.pkgs; [
|
||||||
fido2
|
fido2
|
||||||
|
|||||||
15
packages/homeassistant/ha-icloud3/version.json
Normal file
15
packages/homeassistant/ha-icloud3/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "v3.3.4.4"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"icloud3": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "gcobb321",
|
||||||
|
"repo": "icloud3",
|
||||||
|
"tag": "v3.3.4.4",
|
||||||
|
"hash": "sha256-B63iY4OC00PGXx/3aq/rkiO0xK11hXz66KaglwmgxIk="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,20 +1,27 @@
|
|||||||
{
|
{
|
||||||
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
buildHomeAssistantComponent,
|
buildHomeAssistantComponent,
|
||||||
fetchFromGitHub,
|
|
||||||
home-assistant,
|
home-assistant,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
buildHomeAssistantComponent rec {
|
let
|
||||||
owner = "acon96";
|
inherit (lib.trivial) importJSON;
|
||||||
domain = "llama_conversation";
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
version = "v0.4.6";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
versionSpec = importJSON ./version.json;
|
||||||
owner = owner;
|
selected = selectVariant versionSpec null null;
|
||||||
repo = "home-llm";
|
sources = mkAllSources pkgs selected;
|
||||||
rev = version;
|
src-meta = selected.sources.llama_conversation;
|
||||||
hash = "sha256-QmpyqNRhmnqFNiKPHm8GKuvZhbuYWDLck3eFC9MlIKQ=";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
};
|
in
|
||||||
|
buildHomeAssistantComponent {
|
||||||
|
owner = src-meta.owner;
|
||||||
|
domain = "llama_conversation";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
src = sources.llama_conversation;
|
||||||
|
|
||||||
nativeBuildInputs = with home-assistant.python.pkgs; [
|
nativeBuildInputs = with home-assistant.python.pkgs; [
|
||||||
anthropic
|
anthropic
|
||||||
|
|||||||
15
packages/homeassistant/ha-local-llm/version.json
Normal file
15
packages/homeassistant/ha-local-llm/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "v0.4.6"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"llama_conversation": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "acon96",
|
||||||
|
"repo": "home-llm",
|
||||||
|
"tag": "v0.4.6",
|
||||||
|
"hash": "sha256-QmpyqNRhmnqFNiKPHm8GKuvZhbuYWDLck3eFC9MlIKQ="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,20 +1,27 @@
|
|||||||
{
|
{
|
||||||
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
buildHomeAssistantComponent,
|
buildHomeAssistantComponent,
|
||||||
fetchFromGitHub,
|
|
||||||
home-assistant,
|
home-assistant,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
buildHomeAssistantComponent rec {
|
let
|
||||||
owner = "moralmunky";
|
inherit (lib.trivial) importJSON;
|
||||||
domain = "mail_and_packages";
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
version = "0.5.0";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
versionSpec = importJSON ./version.json;
|
||||||
owner = owner;
|
selected = selectVariant versionSpec null null;
|
||||||
repo = "Home-Assistant-Mail-And-Packages";
|
sources = mkAllSources pkgs selected;
|
||||||
tag = version;
|
src-meta = selected.sources.mail_and_packages;
|
||||||
hash = "sha256-Am3EYkSYCQuYJmm6xdUwCa0h/ldk4hwTxRTxc0BU2j8=";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
};
|
in
|
||||||
|
buildHomeAssistantComponent {
|
||||||
|
owner = src-meta.owner;
|
||||||
|
domain = "mail_and_packages";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
src = sources.mail_and_packages;
|
||||||
|
|
||||||
nativeBuildInputs = with home-assistant.python.pkgs; [
|
nativeBuildInputs = with home-assistant.python.pkgs; [
|
||||||
aioimaplib
|
aioimaplib
|
||||||
|
|||||||
15
packages/homeassistant/ha-mail-and-packages/version.json
Normal file
15
packages/homeassistant/ha-mail-and-packages/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "0.5.0"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"mail_and_packages": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "moralmunky",
|
||||||
|
"repo": "Home-Assistant-Mail-And-Packages",
|
||||||
|
"tag": "0.5.0",
|
||||||
|
"hash": "sha256-Am3EYkSYCQuYJmm6xdUwCa0h/ldk4hwTxRTxc0BU2j8="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,22 +1,38 @@
|
|||||||
{
|
{
|
||||||
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
buildHomeAssistantComponent,
|
buildHomeAssistantComponent,
|
||||||
fetchFromGitHub,
|
|
||||||
home-assistant,
|
home-assistant,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
|
inherit (lib.trivial) importJSON;
|
||||||
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
|
versionSpec = importJSON ./version.json;
|
||||||
|
selected = selectVariant versionSpec null null;
|
||||||
|
sources = mkAllSources pkgs selected;
|
||||||
|
src-meta = selected.sources.nanokvm;
|
||||||
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
|
|
||||||
|
# python-nanokvm must be built against HA's Python interpreter.
|
||||||
|
# Re-use the source from its own version.json to avoid duplication.
|
||||||
|
nanokvm-ver = importJSON ../../python/python-nanokvm/version.json;
|
||||||
|
nanokvm-selected = selectVariant nanokvm-ver null null;
|
||||||
|
nanokvm-sources = mkAllSources pkgs nanokvm-selected;
|
||||||
|
|
||||||
python3Packages = home-assistant.python.pkgs;
|
python3Packages = home-assistant.python.pkgs;
|
||||||
python-nanokvm = python3Packages.buildPythonPackage rec {
|
python-nanokvm = python3Packages.buildPythonPackage {
|
||||||
pname = "nanokvm";
|
pname = "nanokvm";
|
||||||
version = "0.1.0";
|
version =
|
||||||
|
if nanokvm-selected.sources."python-nanokvm" ? tag then
|
||||||
|
nanokvm-selected.sources."python-nanokvm".tag
|
||||||
|
else
|
||||||
|
nanokvm-selected.sources."python-nanokvm".rev;
|
||||||
format = "pyproject";
|
format = "pyproject";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = nanokvm-sources."python-nanokvm";
|
||||||
owner = "puddly";
|
|
||||||
repo = "python-${pname}";
|
|
||||||
rev = "v${version}";
|
|
||||||
sha256 = "sha256-vIxvQtjaInnWQce7syiOWpP2kaw0IVw03HPovnB2J5M=";
|
|
||||||
};
|
|
||||||
|
|
||||||
prePatch = ''
|
prePatch = ''
|
||||||
rm -f pyproject.toml
|
rm -f pyproject.toml
|
||||||
@@ -50,9 +66,7 @@ let
|
|||||||
EOF
|
EOF
|
||||||
'';
|
'';
|
||||||
|
|
||||||
buildInputs = with python3Packages; [
|
buildInputs = with python3Packages; [ setuptools ];
|
||||||
setuptools
|
|
||||||
];
|
|
||||||
|
|
||||||
propagatedBuildInputs = with python3Packages; [
|
propagatedBuildInputs = with python3Packages; [
|
||||||
aiohttp
|
aiohttp
|
||||||
@@ -66,21 +80,14 @@ let
|
|||||||
doCheck = false;
|
doCheck = false;
|
||||||
};
|
};
|
||||||
in
|
in
|
||||||
buildHomeAssistantComponent rec {
|
buildHomeAssistantComponent {
|
||||||
owner = "Wouter0100";
|
owner = src-meta.owner;
|
||||||
domain = "nanokvm";
|
domain = "nanokvm";
|
||||||
version = "v0.0.4";
|
inherit version;
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = sources.nanokvm;
|
||||||
owner = owner;
|
|
||||||
repo = "homeassistant-nanokvm";
|
|
||||||
rev = "bdd2ca39d8050e4b38bb7917ee4034f2fcd49471";
|
|
||||||
hash = "sha256-S6g9mfPEixqeGQkXVK8PZJ/dnEC5ThKtbELAIAhCANM=";
|
|
||||||
};
|
|
||||||
|
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [ python-nanokvm ];
|
||||||
python-nanokvm
|
|
||||||
];
|
|
||||||
|
|
||||||
postPatch = ''
|
postPatch = ''
|
||||||
substituteInPlace custom_components/nanokvm/manifest.json \
|
substituteInPlace custom_components/nanokvm/manifest.json \
|
||||||
|
|||||||
18
packages/homeassistant/ha-nanokvm/version.json
Normal file
18
packages/homeassistant/ha-nanokvm/version.json
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "v0.0.4"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"nanokvm": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "Wouter0100",
|
||||||
|
"repo": "homeassistant-nanokvm",
|
||||||
|
"rev": "bdd2ca39d8050e4b38bb7917ee4034f2fcd49471",
|
||||||
|
"hash": "sha256-S6g9mfPEixqeGQkXVK8PZJ/dnEC5ThKtbELAIAhCANM="
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"notes": {
|
||||||
|
"hint": "The nanokvm component embeds a vendored copy of python-nanokvm. The dep is tracked separately in packages/python/python-nanokvm/version.json."
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,22 +1,29 @@
|
|||||||
{
|
{
|
||||||
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
buildHomeAssistantComponent,
|
buildHomeAssistantComponent,
|
||||||
fetchFromGitHub,
|
|
||||||
home-assistant,
|
home-assistant,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
buildHomeAssistantComponent rec {
|
let
|
||||||
owner = "HASwitchPlate";
|
inherit (lib.trivial) importJSON;
|
||||||
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
|
versionSpec = importJSON ./version.json;
|
||||||
|
selected = selectVariant versionSpec null null;
|
||||||
|
sources = mkAllSources pkgs selected;
|
||||||
|
src-meta = selected.sources.openhasp;
|
||||||
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
|
in
|
||||||
|
buildHomeAssistantComponent {
|
||||||
|
owner = src-meta.owner;
|
||||||
domain = "openhasp";
|
domain = "openhasp";
|
||||||
version = "0.7.8";
|
inherit version;
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = sources.openhasp;
|
||||||
owner = owner;
|
|
||||||
repo = "openHASP-custom-component";
|
|
||||||
rev = version;
|
|
||||||
hash = "sha256-5h1EqwpnsmWexqB3J/X4OcN9bfBYUxGxLF1Hrmoi5LY=";
|
|
||||||
};
|
|
||||||
|
|
||||||
# Use HA's own Python (3.14) packages to satisfy the manifest check for jsonschema
|
# Use HA's own Python packages to satisfy the manifest check for jsonschema
|
||||||
nativeBuildInputs = [ home-assistant.python.pkgs.jsonschema ];
|
nativeBuildInputs = [ home-assistant.python.pkgs.jsonschema ];
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
|
|||||||
15
packages/homeassistant/ha-openhasp/version.json
Normal file
15
packages/homeassistant/ha-openhasp/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "0.7.8"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"openhasp": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "HASwitchPlate",
|
||||||
|
"repo": "openHASP-custom-component",
|
||||||
|
"tag": "0.7.8",
|
||||||
|
"hash": "sha256-5h1EqwpnsmWexqB3J/X4OcN9bfBYUxGxLF1Hrmoi5LY="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,21 +1,26 @@
|
|||||||
{
|
{
|
||||||
buildHomeAssistantComponent,
|
lib,
|
||||||
fetchFromGitHub,
|
|
||||||
pkgs,
|
|
||||||
namespace,
|
namespace,
|
||||||
|
pkgs,
|
||||||
|
buildHomeAssistantComponent,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
buildHomeAssistantComponent rec {
|
let
|
||||||
owner = "vaparr";
|
inherit (lib.trivial) importJSON;
|
||||||
domain = "overseerr";
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
version = "0.1.42";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
versionSpec = importJSON ./version.json;
|
||||||
owner = owner;
|
selected = selectVariant versionSpec null null;
|
||||||
repo = "ha-overseerr";
|
sources = mkAllSources pkgs selected;
|
||||||
rev = version;
|
src-meta = selected.sources.overseerr;
|
||||||
hash = "sha256-UvUowCgfay9aRV+iC/AQ9vvJzhGZbH+/1kVjxPFBKcI=";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
};
|
in
|
||||||
|
buildHomeAssistantComponent {
|
||||||
|
owner = src-meta.owner;
|
||||||
|
domain = "overseerr";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
src = sources.overseerr;
|
||||||
|
|
||||||
nativeBuildInputs = [ pkgs.${namespace}.pyoverseerr ];
|
nativeBuildInputs = [ pkgs.${namespace}.pyoverseerr ];
|
||||||
|
|
||||||
|
|||||||
15
packages/homeassistant/ha-overseerr/version.json
Normal file
15
packages/homeassistant/ha-overseerr/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "0.1.42"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"overseerr": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "vaparr",
|
||||||
|
"repo": "ha-overseerr",
|
||||||
|
"tag": "0.1.42",
|
||||||
|
"hash": "sha256-UvUowCgfay9aRV+iC/AQ9vvJzhGZbH+/1kVjxPFBKcI="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,15 +1,26 @@
|
|||||||
{ buildHomeAssistantComponent, fetchFromGitHub, ... }:
|
{
|
||||||
buildHomeAssistantComponent rec {
|
lib,
|
||||||
owner = "jjjonesjr33";
|
namespace,
|
||||||
domain = "petlibro";
|
pkgs,
|
||||||
version = "v1.2.30.7";
|
buildHomeAssistantComponent,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
inherit (lib.trivial) importJSON;
|
||||||
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
versionSpec = importJSON ./version.json;
|
||||||
owner = owner;
|
selected = selectVariant versionSpec null null;
|
||||||
repo = "petlibro";
|
sources = mkAllSources pkgs selected;
|
||||||
rev = version;
|
src-meta = selected.sources.petlibro;
|
||||||
hash = "sha256-+zmeUQHRXrBYQ5pEWLAtu9TZ8ELiwCLliRPktKlpI8k=";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
};
|
in
|
||||||
|
buildHomeAssistantComponent {
|
||||||
|
owner = src-meta.owner;
|
||||||
|
domain = "petlibro";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
src = sources.petlibro;
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
changelog = "https://github.com/jjjonesjr33/petlibro/releases/tag/${version}";
|
changelog = "https://github.com/jjjonesjr33/petlibro/releases/tag/${version}";
|
||||||
|
|||||||
15
packages/homeassistant/ha-petlibro/version.json
Normal file
15
packages/homeassistant/ha-petlibro/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "v1.2.30.7"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"petlibro": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "jjjonesjr33",
|
||||||
|
"repo": "petlibro",
|
||||||
|
"tag": "v1.2.30.7",
|
||||||
|
"hash": "sha256-+zmeUQHRXrBYQ5pEWLAtu9TZ8ELiwCLliRPktKlpI8k="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,21 +1,26 @@
|
|||||||
{
|
{
|
||||||
buildHomeAssistantComponent,
|
lib,
|
||||||
fetchFromGitHub,
|
|
||||||
pkgs,
|
|
||||||
namespace,
|
namespace,
|
||||||
|
pkgs,
|
||||||
|
buildHomeAssistantComponent,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
buildHomeAssistantComponent rec {
|
let
|
||||||
owner = "SecKatie";
|
inherit (lib.trivial) importJSON;
|
||||||
domain = "wyzeapi";
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
version = "0.1.36";
|
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
versionSpec = importJSON ./version.json;
|
||||||
owner = owner;
|
selected = selectVariant versionSpec null null;
|
||||||
repo = "ha-wyzeapi";
|
sources = mkAllSources pkgs selected;
|
||||||
rev = version;
|
src-meta = selected.sources.wyzeapi;
|
||||||
hash = "sha256-4i5Ne3LYV7DXn6F6e5MCVZhIdDYR7fe3tT2GeSmYb/k=";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
};
|
in
|
||||||
|
buildHomeAssistantComponent {
|
||||||
|
owner = src-meta.owner;
|
||||||
|
domain = "wyzeapi";
|
||||||
|
inherit version;
|
||||||
|
|
||||||
|
src = sources.wyzeapi;
|
||||||
|
|
||||||
# wyzeapy must be built against HA's Python; pkgs.mjallen.wyzeapy uses home-assistant.python
|
# wyzeapy must be built against HA's Python; pkgs.mjallen.wyzeapy uses home-assistant.python
|
||||||
dependencies = [ pkgs.${namespace}.wyzeapy ];
|
dependencies = [ pkgs.${namespace}.wyzeapy ];
|
||||||
|
|||||||
15
packages/homeassistant/ha-wyzeapi/version.json
Normal file
15
packages/homeassistant/ha-wyzeapi/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "0.1.36"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"wyzeapi": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "SecKatie",
|
||||||
|
"repo": "ha-wyzeapi",
|
||||||
|
"tag": "0.1.36",
|
||||||
|
"hash": "sha256-4i5Ne3LYV7DXn6F6e5MCVZhIdDYR7fe3tT2GeSmYb/k="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,24 +1,38 @@
|
|||||||
{ pkgs, fetchPypi, ... }:
|
{
|
||||||
pkgs.python3Packages.buildPythonPackage rec {
|
lib,
|
||||||
pname = "homeassistant_api";
|
namespace,
|
||||||
version = "5.0.0";
|
pkgs,
|
||||||
format = "pyproject";
|
...
|
||||||
src = fetchPypi {
|
}:
|
||||||
inherit pname version;
|
let
|
||||||
sha256 = "sha256-UNKTtgInrVJtjHb1WVlUbcbhjBOtTX00eHmm54ww0rY=";
|
inherit (lib.trivial) importJSON;
|
||||||
};
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
|
versionSpec = importJSON ./version.json;
|
||||||
|
selected = selectVariant versionSpec null null;
|
||||||
|
sources = mkAllSources pkgs selected;
|
||||||
|
version = selected.variables.version;
|
||||||
|
in
|
||||||
|
pkgs.python3Packages.buildPythonPackage {
|
||||||
|
pname = "homeassistant_api";
|
||||||
|
inherit version;
|
||||||
|
format = "pyproject";
|
||||||
|
|
||||||
|
src = sources.homeassistant_api;
|
||||||
|
|
||||||
# do not run tests
|
|
||||||
doCheck = false;
|
doCheck = false;
|
||||||
|
|
||||||
nativeBuildInputs = with pkgs.python3Packages; [
|
nativeBuildInputs = with pkgs.python3Packages; [
|
||||||
poetry-core
|
poetry-core
|
||||||
requests-cache
|
requests-cache
|
||||||
];
|
];
|
||||||
|
|
||||||
dependencies = with pkgs.python3Packages; [
|
dependencies = with pkgs.python3Packages; [
|
||||||
requests-cache
|
requests-cache
|
||||||
pydantic
|
pydantic
|
||||||
websockets
|
websockets
|
||||||
];
|
];
|
||||||
|
|
||||||
propagatedBuildInputs = with pkgs.python3Packages; [
|
propagatedBuildInputs = with pkgs.python3Packages; [
|
||||||
aiohttp
|
aiohttp
|
||||||
aiohttp-client-cache
|
aiohttp-client-cache
|
||||||
@@ -28,11 +42,13 @@ pkgs.python3Packages.buildPythonPackage rec {
|
|||||||
simplejson
|
simplejson
|
||||||
websockets
|
websockets
|
||||||
];
|
];
|
||||||
|
|
||||||
pythonRelaxDeps = [
|
pythonRelaxDeps = [
|
||||||
"requests-cache"
|
"requests-cache"
|
||||||
"pydantic"
|
"pydantic"
|
||||||
"websockets"
|
"websockets"
|
||||||
];
|
];
|
||||||
|
|
||||||
pythonImportsCheck = [
|
pythonImportsCheck = [
|
||||||
"homeassistant_api"
|
"homeassistant_api"
|
||||||
];
|
];
|
||||||
|
|||||||
13
packages/homeassistant/homeassistant-api/version.json
Normal file
13
packages/homeassistant/homeassistant-api/version.json
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "5.0.0"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"homeassistant_api": {
|
||||||
|
"fetcher": "pypi",
|
||||||
|
"name": "homeassistant_api",
|
||||||
|
"hash": "sha256-UNKTtgInrVJtjHb1WVlUbcbhjBOtTX00eHmm54ww0rY="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -18,12 +18,12 @@
|
|||||||
"cachyos": {
|
"cachyos": {
|
||||||
"variables": {
|
"variables": {
|
||||||
"base": "10.0",
|
"base": "10.0",
|
||||||
"release": "20260227",
|
"release": "20260324",
|
||||||
"tarballSuffix": "-x86_64.tar.xz"
|
"tarballSuffix": "-x86_64.tar.xz"
|
||||||
},
|
},
|
||||||
"sources": {
|
"sources": {
|
||||||
"proton": {
|
"proton": {
|
||||||
"hash": "sha256-kayS0zpBIL2jOM7jxkI0LyhYShQFGCKPdRyiJVOxf6c="
|
"hash": "sha256-vswYkpHuXj/YqfjCj+x779SSOsoOCEeZfr99pi1Mfj0="
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -42,24 +42,24 @@
|
|||||||
"cachyos-v3": {
|
"cachyos-v3": {
|
||||||
"variables": {
|
"variables": {
|
||||||
"base": "10.0",
|
"base": "10.0",
|
||||||
"release": "20260227",
|
"release": "20260324",
|
||||||
"tarballSuffix": "-x86_64_v3.tar.xz"
|
"tarballSuffix": "-x86_64_v3.tar.xz"
|
||||||
},
|
},
|
||||||
"sources": {
|
"sources": {
|
||||||
"proton": {
|
"proton": {
|
||||||
"hash": "sha256-LI3/Hqe7oNYv5dC5jNz7c+HHNzifeON/bnt6jmD2DRA="
|
"hash": "sha256-158b49/TPuYD4kRC9YCd/obVjv1JUBpDIsjjeUP/RRw="
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"cachyos-v4": {
|
"cachyos-v4": {
|
||||||
"variables": {
|
"variables": {
|
||||||
"base": "10.0",
|
"base": "10.0",
|
||||||
"release": "20260227",
|
"release": "20260324",
|
||||||
"tarballSuffix": "-x86_64_v4.tar.xz"
|
"tarballSuffix": "-x86_64_v4.tar.xz"
|
||||||
},
|
},
|
||||||
"sources": {
|
"sources": {
|
||||||
"proton": {
|
"proton": {
|
||||||
"hash": "sha256-kcWSmF+qwClI4qUkv3ShVBQ6plQ8q3jyo59o5uN4ueM="
|
"hash": "sha256-qHNpSh2VneqiwLRYqjR/YRV6HPj1L51u13xNu70tyBw="
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,15 +1,25 @@
|
|||||||
{ python3Packages, fetchFromGitHub, ... }:
|
{
|
||||||
python3Packages.buildPythonPackage rec {
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
|
python3Packages,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
inherit (lib.trivial) importJSON;
|
||||||
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
|
versionSpec = importJSON ./version.json;
|
||||||
|
selected = selectVariant versionSpec null null;
|
||||||
|
sources = mkAllSources pkgs selected;
|
||||||
|
src-meta = selected.sources."comfy-aimdo";
|
||||||
|
in
|
||||||
|
python3Packages.buildPythonPackage {
|
||||||
pname = "comfy-aimdo";
|
pname = "comfy-aimdo";
|
||||||
version = "0.1.7";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
format = "pyproject";
|
format = "pyproject";
|
||||||
# Comfy-Org/comfy-aimdo/releases/tag/v0.1.7
|
|
||||||
src = fetchFromGitHub {
|
src = sources."comfy-aimdo";
|
||||||
owner = "Comfy-Org";
|
|
||||||
repo = "comfy-aimdo";
|
|
||||||
rev = "v${version}";
|
|
||||||
sha256 = "sha256-RNORTKtnTHZ4lcEx5gM3jSr+ZffrV8cd+x74NeRhlsM=";
|
|
||||||
};
|
|
||||||
|
|
||||||
buildInputs = with python3Packages; [
|
buildInputs = with python3Packages; [
|
||||||
setuptools
|
setuptools
|
||||||
|
|||||||
15
packages/python/comfy-aimdo/version.json
Normal file
15
packages/python/comfy-aimdo/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "0.1.7"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"comfy-aimdo": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "Comfy-Org",
|
||||||
|
"repo": "comfy-aimdo",
|
||||||
|
"tag": "v0.1.7",
|
||||||
|
"hash": "sha256-RNORTKtnTHZ4lcEx5gM3jSr+ZffrV8cd+x74NeRhlsM="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,12 +1,25 @@
|
|||||||
{ python3Packages, fetchurl, ... }:
|
{
|
||||||
python3Packages.buildPythonPackage rec {
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
|
python3Packages,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
inherit (lib.trivial) importJSON;
|
||||||
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
|
versionSpec = importJSON ./version.json;
|
||||||
|
selected = selectVariant versionSpec null null;
|
||||||
|
sources = mkAllSources pkgs selected;
|
||||||
|
version = selected.variables.version;
|
||||||
|
in
|
||||||
|
python3Packages.buildPythonPackage {
|
||||||
pname = "comfy-kitchen";
|
pname = "comfy-kitchen";
|
||||||
version = "0.2.7";
|
inherit version;
|
||||||
format = "wheel";
|
format = "wheel";
|
||||||
# https://files.pythonhosted.org/packages/f8/65/d483613734d0b9753bd9bfa297ff334cb2c7766e82306099db6b259b4e2c/comfy_kitchen-0.2.7-py3-none-any.whl
|
|
||||||
src = fetchurl {
|
src = sources."comfy-kitchen";
|
||||||
url = "https://files.pythonhosted.org/packages/f8/65/d483613734d0b9753bd9bfa297ff334cb2c7766e82306099db6b259b4e2c/comfy_kitchen-0.2.7-py3-none-any.whl";
|
|
||||||
sha256 = "sha256-+PqlebadMx0vHqwJ6WqVWGwqa5WKVLwZ5/HBp3hS3TY=";
|
|
||||||
};
|
|
||||||
doCheck = false;
|
doCheck = false;
|
||||||
}
|
}
|
||||||
|
|||||||
13
packages/python/comfy-kitchen/version.json
Normal file
13
packages/python/comfy-kitchen/version.json
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "0.2.7"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"comfy-kitchen": {
|
||||||
|
"fetcher": "url",
|
||||||
|
"url": "https://files.pythonhosted.org/packages/f8/65/d483613734d0b9753bd9bfa297ff334cb2c7766e82306099db6b259b4e2c/comfy_kitchen-0.2.7-py3-none-any.whl",
|
||||||
|
"hash": "sha256-+PqlebadMx0vHqwJ6WqVWGwqa5WKVLwZ5/HBp3hS3TY="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,17 +1,25 @@
|
|||||||
{
|
{
|
||||||
lib,
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
home-assistant,
|
home-assistant,
|
||||||
|
...
|
||||||
}:
|
}:
|
||||||
|
let
|
||||||
|
inherit (lib.trivial) importJSON;
|
||||||
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
home-assistant.python.pkgs.buildPythonPackage rec {
|
versionSpec = importJSON ./version.json;
|
||||||
|
selected = selectVariant versionSpec null null;
|
||||||
|
sources = mkAllSources pkgs selected;
|
||||||
|
version = selected.variables.version;
|
||||||
|
in
|
||||||
|
home-assistant.python.pkgs.buildPythonPackage {
|
||||||
pname = "gehomesdk";
|
pname = "gehomesdk";
|
||||||
version = "2026.2.0";
|
inherit version;
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
src = home-assistant.python.pkgs.fetchPypi {
|
src = sources.gehomesdk;
|
||||||
inherit pname version;
|
|
||||||
hash = "sha256-+BWGkUDKd+9QGbdXuLjmJxLm1xUv0dpIRlPlDkUJ25w=";
|
|
||||||
};
|
|
||||||
|
|
||||||
build-system = with home-assistant.python.pkgs; [ setuptools ];
|
build-system = with home-assistant.python.pkgs; [ setuptools ];
|
||||||
|
|
||||||
|
|||||||
13
packages/python/gehomesdk/version.json
Normal file
13
packages/python/gehomesdk/version.json
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "2026.2.0"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"gehomesdk": {
|
||||||
|
"fetcher": "pypi",
|
||||||
|
"name": "gehomesdk",
|
||||||
|
"hash": "sha256-+BWGkUDKd+9QGbdXuLjmJxLm1xUv0dpIRlPlDkUJ25w="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,20 +1,25 @@
|
|||||||
{
|
{
|
||||||
fetchFromGitHub,
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
home-assistant,
|
home-assistant,
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
|
let
|
||||||
|
inherit (lib.trivial) importJSON;
|
||||||
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
home-assistant.python.pkgs.buildPythonPackage rec {
|
versionSpec = importJSON ./version.json;
|
||||||
|
selected = selectVariant versionSpec null null;
|
||||||
|
sources = mkAllSources pkgs selected;
|
||||||
|
src-meta = selected.sources.magicattr;
|
||||||
|
in
|
||||||
|
home-assistant.python.pkgs.buildPythonPackage {
|
||||||
pname = "magicattr";
|
pname = "magicattr";
|
||||||
version = "0.1.6";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
format = "setuptools";
|
format = "setuptools";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = sources.magicattr;
|
||||||
owner = "frmdstryr";
|
|
||||||
repo = pname;
|
|
||||||
rev = "master";
|
|
||||||
sha256 = "sha256-FJtWU5AuunZbdlndGdfD1c9/0s7oRdoTi202pWjuAd8=";
|
|
||||||
};
|
|
||||||
|
|
||||||
build-system = [ home-assistant.python.pkgs.setuptools ];
|
build-system = [ home-assistant.python.pkgs.setuptools ];
|
||||||
doCheck = false;
|
doCheck = false;
|
||||||
|
|||||||
12
packages/python/magicattr/version.json
Normal file
12
packages/python/magicattr/version.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"sources": {
|
||||||
|
"magicattr": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "frmdstryr",
|
||||||
|
"repo": "magicattr",
|
||||||
|
"rev": "master",
|
||||||
|
"hash": "sha256-FJtWU5AuunZbdlndGdfD1c9/0s7oRdoTi202pWjuAd8="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,15 +1,25 @@
|
|||||||
{ python3Packages, fetchFromGitHub, ... }:
|
{
|
||||||
python3Packages.buildPythonPackage rec {
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
|
python3Packages,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
inherit (lib.trivial) importJSON;
|
||||||
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
|
versionSpec = importJSON ./version.json;
|
||||||
|
selected = selectVariant versionSpec null null;
|
||||||
|
sources = mkAllSources pkgs selected;
|
||||||
|
src-meta = selected.sources."pipewire-python";
|
||||||
|
in
|
||||||
|
python3Packages.buildPythonPackage {
|
||||||
pname = "pipewire-python";
|
pname = "pipewire-python";
|
||||||
version = "0.2.3";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
format = "pyproject";
|
format = "pyproject";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = sources."pipewire-python";
|
||||||
owner = "pablodz";
|
|
||||||
repo = "pipewire_python";
|
|
||||||
rev = "v${version}";
|
|
||||||
sha256 = "sha256-6UIu7vke40q+n91gU8YxwMV/tWjLT6iDmHCMVqnXdMY=";
|
|
||||||
};
|
|
||||||
|
|
||||||
buildInputs = with python3Packages; [ flit-core ];
|
buildInputs = with python3Packages; [ flit-core ];
|
||||||
nativeBuildInputs = with python3Packages; [
|
nativeBuildInputs = with python3Packages; [
|
||||||
|
|||||||
15
packages/python/pipewire-python/version.json
Normal file
15
packages/python/pipewire-python/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "0.2.3"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"pipewire-python": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "pablodz",
|
||||||
|
"repo": "pipewire_python",
|
||||||
|
"tag": "v0.2.3",
|
||||||
|
"hash": "sha256-6UIu7vke40q+n91gU8YxwMV/tWjLT6iDmHCMVqnXdMY="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,15 +1,25 @@
|
|||||||
{ fetchFromGitHub, home-assistant, ... }:
|
{
|
||||||
home-assistant.python.pkgs.buildPythonPackage rec {
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
|
home-assistant,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
inherit (lib.trivial) importJSON;
|
||||||
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
|
versionSpec = importJSON ./version.json;
|
||||||
|
selected = selectVariant versionSpec null null;
|
||||||
|
sources = mkAllSources pkgs selected;
|
||||||
|
src-meta = selected.sources.pyoverseerr;
|
||||||
|
in
|
||||||
|
home-assistant.python.pkgs.buildPythonPackage {
|
||||||
pname = "pyoverseerr";
|
pname = "pyoverseerr";
|
||||||
version = "0.1.40";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
format = "setuptools";
|
format = "setuptools";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = sources.pyoverseerr;
|
||||||
owner = "vaparr";
|
|
||||||
repo = pname;
|
|
||||||
rev = "master";
|
|
||||||
sha256 = "sha256-sWYe6EV/IO/tGGXcnKiebb47eidIj0xnM/aZUfdZXyY=";
|
|
||||||
};
|
|
||||||
|
|
||||||
build-system = [ home-assistant.python.pkgs.setuptools ];
|
build-system = [ home-assistant.python.pkgs.setuptools ];
|
||||||
doCheck = false; # no tests in the PyPI tarball
|
doCheck = false; # no tests in the PyPI tarball
|
||||||
|
|||||||
12
packages/python/pyoverseerr/version.json
Normal file
12
packages/python/pyoverseerr/version.json
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"sources": {
|
||||||
|
"pyoverseerr": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "vaparr",
|
||||||
|
"repo": "pyoverseerr",
|
||||||
|
"rev": "master",
|
||||||
|
"hash": "sha256-sWYe6EV/IO/tGGXcnKiebb47eidIj0xnM/aZUfdZXyY="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,19 +1,25 @@
|
|||||||
{
|
{
|
||||||
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
python3Packages,
|
python3Packages,
|
||||||
fetchFromGitHub,
|
|
||||||
...
|
...
|
||||||
}:
|
}:
|
||||||
python3Packages.buildPythonPackage rec {
|
let
|
||||||
|
inherit (lib.trivial) importJSON;
|
||||||
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
|
versionSpec = importJSON ./version.json;
|
||||||
|
selected = selectVariant versionSpec null null;
|
||||||
|
sources = mkAllSources pkgs selected;
|
||||||
|
src-meta = selected.sources."python-nanokvm";
|
||||||
|
in
|
||||||
|
python3Packages.buildPythonPackage {
|
||||||
pname = "nanokvm";
|
pname = "nanokvm";
|
||||||
version = "0.1.0";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
format = "pyproject";
|
format = "pyproject";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = sources."python-nanokvm";
|
||||||
owner = "puddly";
|
|
||||||
repo = "python-${pname}";
|
|
||||||
rev = "v${version}";
|
|
||||||
sha256 = "sha256-vIxvQtjaInnWQce7syiOWpP2kaw0IVw03HPovnB2J5M=";
|
|
||||||
};
|
|
||||||
|
|
||||||
prePatch = ''
|
prePatch = ''
|
||||||
rm -f pyproject.toml
|
rm -f pyproject.toml
|
||||||
|
|||||||
15
packages/python/python-nanokvm/version.json
Normal file
15
packages/python/python-nanokvm/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "0.1.0"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"python-nanokvm": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "puddly",
|
||||||
|
"repo": "python-nanokvm",
|
||||||
|
"tag": "v0.1.0",
|
||||||
|
"hash": "sha256-vIxvQtjaInnWQce7syiOWpP2kaw0IVw03HPovnB2J5M="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,14 +1,26 @@
|
|||||||
{ python3Packages, fetchPypi, ... }:
|
{
|
||||||
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
|
python3Packages,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
inherit (lib.trivial) importJSON;
|
||||||
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
python3Packages.buildPythonPackage rec {
|
versionSpec = importJSON ./version.json;
|
||||||
|
selected = selectVariant versionSpec null null;
|
||||||
|
sources = mkAllSources pkgs selected;
|
||||||
|
src-meta = selected.sources."python-steam";
|
||||||
|
version = selected.variables.version;
|
||||||
|
in
|
||||||
|
python3Packages.buildPythonPackage {
|
||||||
pname = "steam";
|
pname = "steam";
|
||||||
version = "1.4.4";
|
inherit version;
|
||||||
pyproject = false;
|
pyproject = false;
|
||||||
|
|
||||||
src = fetchPypi {
|
src = sources."python-steam";
|
||||||
inherit pname version;
|
|
||||||
sha256 = "sha256-K1vWkRwNSnMS9EG40WK52NR8i+u478bIhnOTsDI/pS4=";
|
|
||||||
};
|
|
||||||
|
|
||||||
buildInputs = with python3Packages; [ setuptools ];
|
buildInputs = with python3Packages; [ setuptools ];
|
||||||
|
|
||||||
|
|||||||
13
packages/python/python-steam/version.json
Normal file
13
packages/python/python-steam/version.json
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "1.4.4"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"python-steam": {
|
||||||
|
"fetcher": "pypi",
|
||||||
|
"name": "steam",
|
||||||
|
"hash": "sha256-K1vWkRwNSnMS9EG40WK52NR8i+u478bIhnOTsDI/pS4="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,22 +1,28 @@
|
|||||||
{
|
{
|
||||||
lib,
|
lib,
|
||||||
fetchFromGitHub,
|
namespace,
|
||||||
|
pkgs,
|
||||||
python3Packages,
|
python3Packages,
|
||||||
|
...
|
||||||
}:
|
}:
|
||||||
|
let
|
||||||
|
inherit (lib.trivial) importJSON;
|
||||||
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
python3Packages.buildPythonPackage rec {
|
versionSpec = importJSON ./version.json;
|
||||||
|
selected = selectVariant versionSpec null null;
|
||||||
|
sources = mkAllSources pkgs selected;
|
||||||
|
src-meta = selected.sources.pyvesync;
|
||||||
|
version = selected.variables.version;
|
||||||
|
in
|
||||||
|
python3Packages.buildPythonPackage {
|
||||||
pname = "pyvesync";
|
pname = "pyvesync";
|
||||||
version = "3.4.1";
|
inherit version;
|
||||||
pyproject = true;
|
pyproject = true;
|
||||||
|
|
||||||
disabled = python3Packages.pythonOlder "3.11";
|
disabled = python3Packages.pythonOlder "3.11";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = sources.pyvesync;
|
||||||
owner = "webdjoe";
|
|
||||||
repo = "pyvesync";
|
|
||||||
rev = version;
|
|
||||||
hash = "sha256-iqOKBpP/TYgbs6Tq+eWhxBCu/bHYRELXY7r4zjEXU3Q=";
|
|
||||||
};
|
|
||||||
|
|
||||||
build-system = with python3Packages; [ setuptools ];
|
build-system = with python3Packages; [ setuptools ];
|
||||||
|
|
||||||
@@ -31,7 +37,7 @@ python3Packages.buildPythonPackage rec {
|
|||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
description = "Python library to manage Etekcity Devices and Levoit Air Purifier";
|
description = "Python library to manage Etekcity Devices and Levoit Air Purifier";
|
||||||
homepage = "https://github.com/webdjoe/pyvesync";
|
homepage = "https://github.com/webdjoe/pyvesync";
|
||||||
changelog = "https://github.com/webdjoe/pyvesync/releases/tag/${src.tag}";
|
changelog = "https://github.com/webdjoe/pyvesync/releases/tag/${src-meta.tag}";
|
||||||
license = with licenses; [ mit ];
|
license = with licenses; [ mit ];
|
||||||
maintainers = with maintainers; [ fab ];
|
maintainers = with maintainers; [ fab ];
|
||||||
};
|
};
|
||||||
|
|||||||
15
packages/python/pyvesync/version.json
Normal file
15
packages/python/pyvesync/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "3.4.1"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"pyvesync": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "webdjoe",
|
||||||
|
"repo": "pyvesync",
|
||||||
|
"tag": "3.4.1",
|
||||||
|
"hash": "sha256-iqOKBpP/TYgbs6Tq+eWhxBCu/bHYRELXY7r4zjEXU3Q="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,15 +1,25 @@
|
|||||||
{ fetchFromGitHub, home-assistant, ... }:
|
{
|
||||||
home-assistant.python.pkgs.buildPythonPackage rec {
|
lib,
|
||||||
|
namespace,
|
||||||
|
pkgs,
|
||||||
|
home-assistant,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
inherit (lib.trivial) importJSON;
|
||||||
|
inherit (lib.${namespace}) selectVariant mkAllSources;
|
||||||
|
|
||||||
|
versionSpec = importJSON ./version.json;
|
||||||
|
selected = selectVariant versionSpec null null;
|
||||||
|
sources = mkAllSources pkgs selected;
|
||||||
|
src-meta = selected.sources.wyzeapy;
|
||||||
|
in
|
||||||
|
home-assistant.python.pkgs.buildPythonPackage {
|
||||||
pname = "wyzeapy";
|
pname = "wyzeapy";
|
||||||
version = "0.5.31";
|
version = if src-meta ? tag then src-meta.tag else src-meta.rev;
|
||||||
format = "pyproject";
|
format = "pyproject";
|
||||||
|
|
||||||
src = fetchFromGitHub {
|
src = sources.wyzeapy;
|
||||||
owner = "SecKatie";
|
|
||||||
repo = "wyzeapy";
|
|
||||||
rev = "v${version}";
|
|
||||||
sha256 = "sha256-KDCd1G5Tj0YWM2WA3DJK9rTf1rMzz4qBSUl8FOUbvdM=";
|
|
||||||
};
|
|
||||||
|
|
||||||
build-system = with home-assistant.python.pkgs; [
|
build-system = with home-assistant.python.pkgs; [
|
||||||
poetry-core
|
poetry-core
|
||||||
|
|||||||
15
packages/python/wyzeapy/version.json
Normal file
15
packages/python/wyzeapy/version.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"schemaVersion": 1,
|
||||||
|
"variables": {
|
||||||
|
"version": "0.5.31"
|
||||||
|
},
|
||||||
|
"sources": {
|
||||||
|
"wyzeapy": {
|
||||||
|
"fetcher": "github",
|
||||||
|
"owner": "SecKatie",
|
||||||
|
"repo": "wyzeapy",
|
||||||
|
"tag": "v0.5.31",
|
||||||
|
"hash": "sha256-KDCd1G5Tj0YWM2WA3DJK9rTf1rMzz4qBSUl8FOUbvdM="
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
245
scripts/hooks.py
Normal file
245
scripts/hooks.py
Normal file
@@ -0,0 +1,245 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Per-package hooks for version management.
|
||||||
|
|
||||||
|
Each hook is a callable registered by package name (the relative path under
|
||||||
|
packages/, e.g. 'raspberrypi/linux-rpi') and source component name.
|
||||||
|
|
||||||
|
A hook can override:
|
||||||
|
- fetch_candidates(comp, merged_vars) -> Candidates
|
||||||
|
- prefetch_source(comp, merged_vars) -> Optional[str] (not yet needed)
|
||||||
|
|
||||||
|
Hooks are invoked by both the CLI updater and the TUI.
|
||||||
|
|
||||||
|
Adding a new hook:
|
||||||
|
1. Define a function or class with the required signature.
|
||||||
|
2. Register it via register_candidates_hook(pkg_name, src_name, fn) at module
|
||||||
|
level below.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
from typing import Callable, Dict, Optional, Tuple
|
||||||
|
|
||||||
|
from lib import (
|
||||||
|
Candidates,
|
||||||
|
Json,
|
||||||
|
gh_head_commit,
|
||||||
|
gh_list_tags,
|
||||||
|
gh_ref_date,
|
||||||
|
gh_release_date,
|
||||||
|
http_get_text,
|
||||||
|
)
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Hook registry
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
# (pkg_name, src_name) -> fn(comp, merged_vars) -> Candidates
|
||||||
|
_CANDIDATES_HOOKS: Dict[Tuple[str, str], Callable] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def register_candidates_hook(pkg: str, src: str, fn: Callable) -> None:
|
||||||
|
_CANDIDATES_HOOKS[(pkg, src)] = fn
|
||||||
|
|
||||||
|
|
||||||
|
def get_candidates_hook(pkg: str, src: str) -> Optional[Callable]:
|
||||||
|
return _CANDIDATES_HOOKS.get((pkg, src))
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Raspberry Pi linux — stable_YYYYMMDD tag selection
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _rpi_linux_stable_candidates(comp: Json, merged_vars: Json) -> Candidates:
|
||||||
|
from lib import render, gh_latest_release, gh_latest_tag
|
||||||
|
|
||||||
|
c = Candidates()
|
||||||
|
owner = comp.get("owner", "raspberrypi")
|
||||||
|
repo = comp.get("repo", "linux")
|
||||||
|
branch: Optional[str] = comp.get("branch") or None
|
||||||
|
|
||||||
|
tags_all = gh_list_tags(owner, repo)
|
||||||
|
|
||||||
|
rendered = render(comp, merged_vars)
|
||||||
|
cur_tag = str(rendered.get("tag") or "")
|
||||||
|
|
||||||
|
if cur_tag.startswith("stable_") or not branch:
|
||||||
|
# Pick the most recent stable_YYYYMMDD tag
|
||||||
|
stable_tags = sorted(
|
||||||
|
[t for t in tags_all if re.match(r"^stable_\d{8}$", t)],
|
||||||
|
reverse=True,
|
||||||
|
)
|
||||||
|
if stable_tags:
|
||||||
|
c.tag = stable_tags[0]
|
||||||
|
c.tag_date = gh_ref_date(owner, repo, c.tag)
|
||||||
|
else:
|
||||||
|
# Series-based tracking: pick latest rpi-X.Y.* tag
|
||||||
|
mm = str(merged_vars.get("modDirVersion") or "")
|
||||||
|
m = re.match(r"^(\d+)\.(\d+)", mm)
|
||||||
|
if m:
|
||||||
|
base = f"rpi-{m.group(1)}.{m.group(2)}"
|
||||||
|
series = [
|
||||||
|
t
|
||||||
|
for t in tags_all
|
||||||
|
if t == f"{base}.y"
|
||||||
|
or t.startswith(f"{base}.y")
|
||||||
|
or t.startswith(f"{base}.")
|
||||||
|
]
|
||||||
|
series.sort(reverse=True)
|
||||||
|
if series:
|
||||||
|
c.tag = series[0]
|
||||||
|
c.tag_date = gh_ref_date(owner, repo, c.tag)
|
||||||
|
|
||||||
|
if branch:
|
||||||
|
commit = gh_head_commit(owner, repo, branch)
|
||||||
|
if commit:
|
||||||
|
c.commit = commit
|
||||||
|
c.commit_date = gh_ref_date(owner, repo, commit)
|
||||||
|
|
||||||
|
return c
|
||||||
|
|
||||||
|
|
||||||
|
register_candidates_hook(
|
||||||
|
"raspberrypi/linux-rpi", "stable", _rpi_linux_stable_candidates
|
||||||
|
)
|
||||||
|
register_candidates_hook(
|
||||||
|
"raspberrypi/linux-rpi", "unstable", _rpi_linux_stable_candidates
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# CachyOS linux — version from upstream PKGBUILD / .SRCINFO
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_cachyos_linux_version(text: str, is_srcinfo: bool) -> Optional[str]:
|
||||||
|
if is_srcinfo:
|
||||||
|
m = re.search(r"^\s*pkgver\s*=\s*([^\s#]+)\s*$", text, re.MULTILINE)
|
||||||
|
if m:
|
||||||
|
v = m.group(1).strip().replace(".rc", "-rc")
|
||||||
|
return v
|
||||||
|
return None
|
||||||
|
|
||||||
|
# PKGBUILD
|
||||||
|
env: Dict[str, str] = {}
|
||||||
|
for line in text.splitlines():
|
||||||
|
line = line.strip()
|
||||||
|
if not line or line.startswith("#"):
|
||||||
|
continue
|
||||||
|
ma = re.match(r"^\s*([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.+)$", line)
|
||||||
|
if ma:
|
||||||
|
key, val = ma.group(1), ma.group(2).strip()
|
||||||
|
val = re.sub(r"\s+#.*$", "", val).strip()
|
||||||
|
if (val.startswith('"') and val.endswith('"')) or (
|
||||||
|
val.startswith("'") and val.endswith("'")
|
||||||
|
):
|
||||||
|
val = val[1:-1]
|
||||||
|
env[key] = val
|
||||||
|
|
||||||
|
m2 = re.search(r"^\s*pkgver\s*=\s*(.+)$", text, re.MULTILINE)
|
||||||
|
if not m2:
|
||||||
|
return None
|
||||||
|
raw = m2.group(1).strip().strip("\"'")
|
||||||
|
|
||||||
|
def expand(s: str) -> str:
|
||||||
|
s = re.sub(r"\$\{([^}]+)\}", lambda mb: env.get(mb.group(1), mb.group(0)), s)
|
||||||
|
s = re.sub(
|
||||||
|
r"\$([A-Za-z_][A-Za-z0-9_]*)",
|
||||||
|
lambda mu: env.get(mu.group(1), mu.group(0)),
|
||||||
|
s,
|
||||||
|
)
|
||||||
|
return s
|
||||||
|
|
||||||
|
return expand(raw).strip().replace(".rc", "-rc")
|
||||||
|
|
||||||
|
|
||||||
|
def _cachyos_linux_suffix(variant_name: Optional[str]) -> str:
|
||||||
|
if not variant_name:
|
||||||
|
return ""
|
||||||
|
return {"rc": "-rc", "hardened": "-hardened", "lts": "-lts"}.get(variant_name, "")
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_cachyos_linux_version(suffix: str) -> Optional[str]:
|
||||||
|
bases = [
|
||||||
|
"https://raw.githubusercontent.com/CachyOS/linux-cachyos/master",
|
||||||
|
"https://raw.githubusercontent.com/cachyos/linux-cachyos/master",
|
||||||
|
]
|
||||||
|
for base in bases:
|
||||||
|
text = http_get_text(f"{base}/linux-cachyos{suffix}/.SRCINFO")
|
||||||
|
if text:
|
||||||
|
v = _parse_cachyos_linux_version(text, is_srcinfo=True)
|
||||||
|
if v:
|
||||||
|
return v
|
||||||
|
text = http_get_text(f"{base}/linux-cachyos{suffix}/PKGBUILD")
|
||||||
|
if text:
|
||||||
|
v = _parse_cachyos_linux_version(text, is_srcinfo=False)
|
||||||
|
if v:
|
||||||
|
return v
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def linux_tarball_url(version: str) -> str:
|
||||||
|
if "-rc" in version:
|
||||||
|
return f"https://git.kernel.org/torvalds/t/linux-{version}.tar.gz"
|
||||||
|
parts = version.split(".")
|
||||||
|
major = parts[0] if parts else "6"
|
||||||
|
ver_for_tar = ".".join(parts[:2]) if version.endswith(".0") else version
|
||||||
|
return (
|
||||||
|
f"https://cdn.kernel.org/pub/linux/kernel/v{major}.x/linux-{ver_for_tar}.tar.xz"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Note: linux-cachyos is not yet in the repo, but the hook is defined here
|
||||||
|
# so it can be activated when that package is added.
|
||||||
|
def _cachyos_linux_candidates(comp: Json, merged_vars: Json) -> Candidates:
|
||||||
|
c = Candidates()
|
||||||
|
# The variant name is not available here; the TUI/CLI must pass it via merged_vars
|
||||||
|
suffix = str(merged_vars.get("_cachyos_suffix") or "")
|
||||||
|
latest = fetch_cachyos_linux_version(suffix)
|
||||||
|
if latest:
|
||||||
|
c.tag = latest # use tag slot for display consistency
|
||||||
|
return c
|
||||||
|
|
||||||
|
|
||||||
|
register_candidates_hook("linux-cachyos", "linux", _cachyos_linux_candidates)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# CachyOS ZFS — commit pinned in PKGBUILD
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_cachyos_zfs_commit(suffix: str) -> Optional[str]:
|
||||||
|
bases = [
|
||||||
|
"https://raw.githubusercontent.com/CachyOS/linux-cachyos/master",
|
||||||
|
"https://raw.githubusercontent.com/cachyos/linux-cachyos/master",
|
||||||
|
]
|
||||||
|
for base in bases:
|
||||||
|
text = http_get_text(f"{base}/linux-cachyos{suffix}/PKGBUILD")
|
||||||
|
if not text:
|
||||||
|
continue
|
||||||
|
m = re.search(
|
||||||
|
r"git\+https://github\.com/cachyos/zfs\.git#commit=([0-9a-f]+)", text
|
||||||
|
)
|
||||||
|
if m:
|
||||||
|
return m.group(1)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _cachyos_zfs_candidates(comp: Json, merged_vars: Json) -> Candidates:
|
||||||
|
c = Candidates()
|
||||||
|
suffix = str(merged_vars.get("_cachyos_suffix") or "")
|
||||||
|
sha = fetch_cachyos_zfs_commit(suffix)
|
||||||
|
if sha:
|
||||||
|
c.commit = sha
|
||||||
|
url = comp.get("url") or ""
|
||||||
|
c.commit_date = (
|
||||||
|
gh_ref_date("cachyos", "zfs", sha) if "github.com" in url else ""
|
||||||
|
)
|
||||||
|
return c
|
||||||
|
|
||||||
|
|
||||||
|
register_candidates_hook("linux-cachyos", "zfs", _cachyos_zfs_candidates)
|
||||||
857
scripts/lib.py
Normal file
857
scripts/lib.py
Normal file
@@ -0,0 +1,857 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Shared library for version.json management.
|
||||||
|
|
||||||
|
Provides:
|
||||||
|
- JSON load/save
|
||||||
|
- Variable template rendering
|
||||||
|
- Base+variant merge (mirrors lib/versioning/default.nix)
|
||||||
|
- GitHub/Git candidate fetching
|
||||||
|
- Nix hash prefetching (fetchFromGitHub, fetchgit, fetchurl, fetchzip, cargo vendor)
|
||||||
|
- Package scanning
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import urllib.error
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
Json = Dict[str, Any]
|
||||||
|
|
||||||
|
ROOT = Path(__file__).resolve().parents[1]
|
||||||
|
PKGS_DIR = ROOT / "packages"
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# I/O
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def load_json(path: Path) -> Json:
|
||||||
|
with path.open("r", encoding="utf-8") as f:
|
||||||
|
return json.load(f)
|
||||||
|
|
||||||
|
|
||||||
|
def save_json(path: Path, data: Json) -> None:
|
||||||
|
tmp = path.with_suffix(".tmp")
|
||||||
|
with tmp.open("w", encoding="utf-8") as f:
|
||||||
|
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||||
|
f.write("\n")
|
||||||
|
tmp.replace(path)
|
||||||
|
|
||||||
|
|
||||||
|
def eprint(*args: Any, **kwargs: Any) -> None:
|
||||||
|
print(*args, file=sys.stderr, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Template rendering
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def render(value: Any, variables: Dict[str, Any]) -> Any:
|
||||||
|
"""Recursively substitute ${var} in strings using the given variable map."""
|
||||||
|
if isinstance(value, str):
|
||||||
|
return re.sub(
|
||||||
|
r"\$\{([^}]+)\}",
|
||||||
|
lambda m: str(variables.get(m.group(1), m.group(0))),
|
||||||
|
value,
|
||||||
|
)
|
||||||
|
if isinstance(value, dict):
|
||||||
|
return {k: render(v, variables) for k, v in value.items()}
|
||||||
|
if isinstance(value, list):
|
||||||
|
return [render(v, variables) for v in value]
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Merge (matches lib/versioning/default.nix)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _deep_merge(a: Json, b: Json) -> Json:
|
||||||
|
out = dict(a)
|
||||||
|
for k, v in b.items():
|
||||||
|
if k in out and isinstance(out[k], dict) and isinstance(v, dict):
|
||||||
|
out[k] = _deep_merge(out[k], v)
|
||||||
|
else:
|
||||||
|
out[k] = v
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def _merge_sources(base: Json, overrides: Json) -> Json:
|
||||||
|
names = set(base) | set(overrides)
|
||||||
|
result: Json = {}
|
||||||
|
for n in names:
|
||||||
|
if n in base and n in overrides:
|
||||||
|
b, o = base[n], overrides[n]
|
||||||
|
result[n] = (
|
||||||
|
_deep_merge(b, o) if isinstance(b, dict) and isinstance(o, dict) else o
|
||||||
|
)
|
||||||
|
elif n in overrides:
|
||||||
|
result[n] = overrides[n]
|
||||||
|
else:
|
||||||
|
result[n] = base[n]
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def merged_view(spec: Json, variant_name: Optional[str]) -> Tuple[Json, Json, Json]:
|
||||||
|
"""
|
||||||
|
Return (merged_variables, merged_sources, write_target).
|
||||||
|
|
||||||
|
merged_variables / merged_sources: what to use for display and prefetching.
|
||||||
|
write_target: the dict to mutate when saving changes (base spec or the
|
||||||
|
variant sub-dict).
|
||||||
|
"""
|
||||||
|
base_vars: Json = spec.get("variables") or {}
|
||||||
|
base_srcs: Json = spec.get("sources") or {}
|
||||||
|
|
||||||
|
if variant_name:
|
||||||
|
vdict = (spec.get("variants") or {}).get(variant_name)
|
||||||
|
if not isinstance(vdict, dict):
|
||||||
|
raise ValueError(f"Variant '{variant_name}' not found in spec")
|
||||||
|
v_vars: Json = vdict.get("variables") or {}
|
||||||
|
v_srcs: Json = vdict.get("sources") or {}
|
||||||
|
merged_vars = {**base_vars, **v_vars}
|
||||||
|
merged_srcs = _merge_sources(base_srcs, v_srcs)
|
||||||
|
return merged_vars, merged_srcs, vdict
|
||||||
|
|
||||||
|
return dict(base_vars), dict(base_srcs), spec
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Shell helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _run(args: List[str], *, capture_stderr: bool = True) -> Tuple[int, str, str]:
|
||||||
|
p = subprocess.run(
|
||||||
|
args,
|
||||||
|
text=True,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE if capture_stderr else None,
|
||||||
|
check=False,
|
||||||
|
)
|
||||||
|
return p.returncode, (p.stdout or "").strip(), (p.stderr or "").strip()
|
||||||
|
|
||||||
|
|
||||||
|
def _run_out(args: List[str]) -> Optional[str]:
|
||||||
|
code, out, err = _run(args)
|
||||||
|
if code != 0:
|
||||||
|
eprint(f"Command failed: {' '.join(args)}\n{err}")
|
||||||
|
return None
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# HTTP helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def http_get_json(url: str, token: Optional[str] = None) -> Optional[Any]:
|
||||||
|
try:
|
||||||
|
req = urllib.request.Request(
|
||||||
|
url, headers={"Accept": "application/vnd.github+json"}
|
||||||
|
)
|
||||||
|
if token:
|
||||||
|
req.add_header("Authorization", f"Bearer {token}")
|
||||||
|
with urllib.request.urlopen(req, timeout=15) as resp:
|
||||||
|
return json.loads(resp.read().decode("utf-8"))
|
||||||
|
except urllib.error.HTTPError as e:
|
||||||
|
eprint(f"HTTP {e.code} for {url}: {e.reason}")
|
||||||
|
except Exception as e:
|
||||||
|
eprint(f"Request failed for {url}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def http_get_text(url: str) -> Optional[str]:
|
||||||
|
try:
|
||||||
|
req = urllib.request.Request(
|
||||||
|
url, headers={"User-Agent": "nix-version-manager/2.0"}
|
||||||
|
)
|
||||||
|
with urllib.request.urlopen(req, timeout=15) as resp:
|
||||||
|
return resp.read().decode("utf-8")
|
||||||
|
except urllib.error.HTTPError as e:
|
||||||
|
eprint(f"HTTP {e.code} for {url}: {e.reason}")
|
||||||
|
except Exception as e:
|
||||||
|
eprint(f"Request failed for {url}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# GitHub API helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def gh_token() -> Optional[str]:
|
||||||
|
return os.environ.get("GITHUB_TOKEN")
|
||||||
|
|
||||||
|
|
||||||
|
def gh_latest_release(owner: str, repo: str) -> Optional[str]:
|
||||||
|
data = http_get_json(
|
||||||
|
f"https://api.github.com/repos/{owner}/{repo}/releases/latest", gh_token()
|
||||||
|
)
|
||||||
|
return data.get("tag_name") if isinstance(data, dict) else None
|
||||||
|
|
||||||
|
|
||||||
|
def gh_latest_tag(
|
||||||
|
owner: str, repo: str, *, tag_regex: Optional[str] = None
|
||||||
|
) -> Optional[str]:
|
||||||
|
data = http_get_json(
|
||||||
|
f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=100", gh_token()
|
||||||
|
)
|
||||||
|
if not isinstance(data, list):
|
||||||
|
return None
|
||||||
|
tags = [t["name"] for t in data if isinstance(t, dict) and t.get("name")]
|
||||||
|
if tag_regex:
|
||||||
|
rx = re.compile(tag_regex)
|
||||||
|
tags = [t for t in tags if rx.search(t)]
|
||||||
|
return tags[0] if tags else None
|
||||||
|
|
||||||
|
|
||||||
|
def gh_list_tags(owner: str, repo: str) -> List[str]:
|
||||||
|
data = http_get_json(
|
||||||
|
f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=100", gh_token()
|
||||||
|
)
|
||||||
|
if not isinstance(data, list):
|
||||||
|
return []
|
||||||
|
return [t["name"] for t in data if isinstance(t, dict) and t.get("name")]
|
||||||
|
|
||||||
|
|
||||||
|
def gh_head_commit(
|
||||||
|
owner: str, repo: str, branch: Optional[str] = None
|
||||||
|
) -> Optional[str]:
|
||||||
|
ref = f"refs/heads/{branch}" if branch else "HEAD"
|
||||||
|
out = _run_out(["git", "ls-remote", f"https://github.com/{owner}/{repo}.git", ref])
|
||||||
|
if not out:
|
||||||
|
return None
|
||||||
|
for line in out.splitlines():
|
||||||
|
parts = line.split()
|
||||||
|
if parts:
|
||||||
|
return parts[0]
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def gh_release_tags(owner: str, repo: str) -> List[str]:
|
||||||
|
data = http_get_json(
|
||||||
|
f"https://api.github.com/repos/{owner}/{repo}/releases?per_page=50", gh_token()
|
||||||
|
)
|
||||||
|
if not isinstance(data, list):
|
||||||
|
return []
|
||||||
|
return [r["tag_name"] for r in data if isinstance(r, dict) and r.get("tag_name")]
|
||||||
|
|
||||||
|
|
||||||
|
def _iso_to_date(iso: str) -> str:
|
||||||
|
return iso[:10] if iso and len(iso) >= 10 else ""
|
||||||
|
|
||||||
|
|
||||||
|
def gh_ref_date(owner: str, repo: str, ref: str) -> str:
|
||||||
|
data = http_get_json(
|
||||||
|
f"https://api.github.com/repos/{owner}/{repo}/commits/{urllib.parse.quote(ref, safe='')}",
|
||||||
|
gh_token(),
|
||||||
|
)
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return ""
|
||||||
|
iso = (
|
||||||
|
(data.get("commit") or {}).get("committer", {}).get("date")
|
||||||
|
or (data.get("commit") or {}).get("author", {}).get("date")
|
||||||
|
or ""
|
||||||
|
)
|
||||||
|
return _iso_to_date(iso)
|
||||||
|
|
||||||
|
|
||||||
|
def gh_release_date(owner: str, repo: str, tag: str) -> str:
|
||||||
|
data = http_get_json(
|
||||||
|
f"https://api.github.com/repos/{owner}/{repo}/releases/tags/{urllib.parse.quote(tag, safe='')}",
|
||||||
|
gh_token(),
|
||||||
|
)
|
||||||
|
if isinstance(data, dict):
|
||||||
|
iso = data.get("published_at") or data.get("created_at") or ""
|
||||||
|
if iso:
|
||||||
|
return _iso_to_date(iso)
|
||||||
|
return gh_ref_date(owner, repo, tag)
|
||||||
|
|
||||||
|
|
||||||
|
def git_branch_commit(url: str, branch: Optional[str] = None) -> Optional[str]:
|
||||||
|
ref = f"refs/heads/{branch}" if branch else "HEAD"
|
||||||
|
out = _run_out(["git", "ls-remote", url, ref])
|
||||||
|
if not out:
|
||||||
|
return None
|
||||||
|
for line in out.splitlines():
|
||||||
|
parts = line.split()
|
||||||
|
if parts:
|
||||||
|
return parts[0]
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def git_commit_date_for_github(url: str, sha: str) -> str:
|
||||||
|
"""Only works for github.com URLs; returns YYYY-MM-DD or empty string."""
|
||||||
|
try:
|
||||||
|
parsed = urllib.parse.urlparse(url)
|
||||||
|
if parsed.hostname != "github.com":
|
||||||
|
return ""
|
||||||
|
parts = [p for p in parsed.path.split("/") if p]
|
||||||
|
if len(parts) < 2:
|
||||||
|
return ""
|
||||||
|
owner = parts[0]
|
||||||
|
repo = parts[1].removesuffix(".git")
|
||||||
|
return gh_ref_date(owner, repo, sha)
|
||||||
|
except Exception:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Nix prefetch helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def _nix_fakehash_build(expr: str) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Build a Nix expression that intentionally uses lib.fakeHash, parse the
|
||||||
|
correct hash from the 'got:' line in the error output.
|
||||||
|
"""
|
||||||
|
p = subprocess.run(
|
||||||
|
["nix", "build", "--impure", "--expr", expr],
|
||||||
|
text=True,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
check=False,
|
||||||
|
)
|
||||||
|
m = re.search(r"got:\s+(sha256-[A-Za-z0-9+/=]+)", p.stderr)
|
||||||
|
if m:
|
||||||
|
return m.group(1)
|
||||||
|
eprint(f"nix fakeHash build failed:\n{p.stderr[-800:]}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def prefetch_github(
|
||||||
|
owner: str, repo: str, rev: str, *, submodules: bool = False
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Hash for fetchFromGitHub — NAR hash of unpacked tarball.
|
||||||
|
Must use the fakeHash trick; nix store prefetch-file gives the wrong hash.
|
||||||
|
"""
|
||||||
|
sub = "true" if submodules else "false"
|
||||||
|
expr = (
|
||||||
|
f"let pkgs = import <nixpkgs> {{}};\n"
|
||||||
|
f"in pkgs.fetchFromGitHub {{\n"
|
||||||
|
f' owner = "{owner}";\n'
|
||||||
|
f' repo = "{repo}";\n'
|
||||||
|
f' rev = "{rev}";\n'
|
||||||
|
f" fetchSubmodules = {sub};\n"
|
||||||
|
f" hash = pkgs.lib.fakeHash;\n"
|
||||||
|
f"}}"
|
||||||
|
)
|
||||||
|
return _nix_fakehash_build(expr)
|
||||||
|
|
||||||
|
|
||||||
|
def prefetch_url(url: str) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Flat (non-unpacked) hash for fetchurl.
|
||||||
|
Uses nix store prefetch-file; falls back to nix-prefetch-url.
|
||||||
|
"""
|
||||||
|
out = _run_out(
|
||||||
|
["nix", "store", "prefetch-file", "--hash-type", "sha256", "--json", url]
|
||||||
|
)
|
||||||
|
if out:
|
||||||
|
try:
|
||||||
|
data = json.loads(out)
|
||||||
|
if "hash" in data:
|
||||||
|
return data["hash"]
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
out = _run_out(["nix-prefetch-url", "--type", "sha256", url])
|
||||||
|
if out is None:
|
||||||
|
out = _run_out(["nix-prefetch-url", url])
|
||||||
|
if out is None:
|
||||||
|
return None
|
||||||
|
return _run_out(["nix", "hash", "to-sri", "--type", "sha256", out])
|
||||||
|
|
||||||
|
|
||||||
|
def prefetch_fetchzip(url: str, *, strip_root: bool = True) -> Optional[str]:
|
||||||
|
"""Hash for fetchzip — NAR of unpacked archive. Must use the fakeHash trick."""
|
||||||
|
expr = (
|
||||||
|
f"let pkgs = import <nixpkgs> {{}};\n"
|
||||||
|
f"in pkgs.fetchzip {{\n"
|
||||||
|
f' url = "{url}";\n'
|
||||||
|
f" stripRoot = {'true' if strip_root else 'false'};\n"
|
||||||
|
f" hash = pkgs.lib.fakeHash;\n"
|
||||||
|
f"}}"
|
||||||
|
)
|
||||||
|
return _nix_fakehash_build(expr)
|
||||||
|
|
||||||
|
|
||||||
|
def prefetch_git(url: str, rev: str) -> Optional[str]:
|
||||||
|
"""Hash for fetchgit."""
|
||||||
|
out = _run_out(["nix-prefetch-git", "--no-deepClone", "--rev", rev, url])
|
||||||
|
if out is not None:
|
||||||
|
base32 = None
|
||||||
|
try:
|
||||||
|
data = json.loads(out)
|
||||||
|
base32 = data.get("sha256") or data.get("hash")
|
||||||
|
except Exception:
|
||||||
|
lines = [l for l in out.splitlines() if l.strip()]
|
||||||
|
if lines:
|
||||||
|
base32 = lines[-1].strip()
|
||||||
|
if base32:
|
||||||
|
return _run_out(["nix", "hash", "to-sri", "--type", "sha256", base32])
|
||||||
|
|
||||||
|
# Fallback: builtins.fetchGit + nix hash path (commit SHA only)
|
||||||
|
if re.match(r"^[0-9a-f]{40}$", rev):
|
||||||
|
expr = f'builtins.fetchGit {{ url = "{url}"; rev = "{rev}"; }}'
|
||||||
|
store_path = _run_out(["nix", "eval", "--raw", "--expr", expr])
|
||||||
|
if store_path:
|
||||||
|
return _run_out(["nix", "hash", "path", "--type", "sha256", store_path])
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def prefetch_cargo_vendor(
|
||||||
|
fetcher: str,
|
||||||
|
src_hash: str,
|
||||||
|
*,
|
||||||
|
url: str = "",
|
||||||
|
owner: str = "",
|
||||||
|
repo: str = "",
|
||||||
|
rev: str = "",
|
||||||
|
subdir: str = "",
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""Compute the cargo vendor hash via fetchCargoVendor + fakeHash."""
|
||||||
|
if fetcher == "github" and owner and repo and rev and src_hash:
|
||||||
|
src_expr = (
|
||||||
|
f'pkgs.fetchFromGitHub {{ owner = "{owner}"; repo = "{repo}";'
|
||||||
|
f' rev = "{rev}"; hash = "{src_hash}"; }}'
|
||||||
|
)
|
||||||
|
elif fetcher == "git" and url and rev and src_hash:
|
||||||
|
parsed = urllib.parse.urlparse(url)
|
||||||
|
parts = [p for p in parsed.path.split("/") if p]
|
||||||
|
if parsed.hostname == "github.com" and len(parts) >= 2:
|
||||||
|
gh_owner, gh_repo = parts[0], parts[1]
|
||||||
|
src_expr = (
|
||||||
|
f'pkgs.fetchFromGitHub {{ owner = "{gh_owner}"; repo = "{gh_repo}";'
|
||||||
|
f' rev = "{rev}"; hash = "{src_hash}"; }}'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
src_expr = f'pkgs.fetchgit {{ url = "{url}"; rev = "{rev}"; hash = "{src_hash}"; }}'
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
subdir_attr = f'sourceRoot = "${{src.name}}/{subdir}";' if subdir else ""
|
||||||
|
expr = (
|
||||||
|
f"let pkgs = import <nixpkgs> {{}};\n"
|
||||||
|
f" src = {src_expr};\n"
|
||||||
|
f"in pkgs.rustPlatform.fetchCargoVendor {{\n"
|
||||||
|
f" inherit src;\n"
|
||||||
|
f" {subdir_attr}\n"
|
||||||
|
f" hash = pkgs.lib.fakeHash;\n"
|
||||||
|
f"}}"
|
||||||
|
)
|
||||||
|
p = subprocess.run(
|
||||||
|
["nix", "build", "--impure", "--expr", expr],
|
||||||
|
text=True,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
check=False,
|
||||||
|
)
|
||||||
|
m = re.search(r"got:\s+(sha256-[A-Za-z0-9+/=]+)", p.stderr)
|
||||||
|
if m:
|
||||||
|
return m.group(1)
|
||||||
|
eprint(f"cargo vendor prefetch failed:\n{p.stderr[-600:]}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Source prefetch dispatch
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def prefetch_source(comp: Json, merged_vars: Json) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Compute and return the SRI hash for a source component using the correct
|
||||||
|
Nix fetcher. Returns None on failure.
|
||||||
|
"""
|
||||||
|
fetcher = comp.get("fetcher", "none")
|
||||||
|
rendered = render(comp, merged_vars)
|
||||||
|
|
||||||
|
if fetcher == "github":
|
||||||
|
owner = comp.get("owner") or ""
|
||||||
|
repo = comp.get("repo") or ""
|
||||||
|
ref = rendered.get("tag") or rendered.get("rev") or ""
|
||||||
|
submodules = bool(comp.get("submodules", False))
|
||||||
|
if owner and repo and ref:
|
||||||
|
return prefetch_github(owner, repo, ref, submodules=submodules)
|
||||||
|
|
||||||
|
elif fetcher == "git":
|
||||||
|
url = comp.get("url") or ""
|
||||||
|
rev = rendered.get("rev") or rendered.get("tag") or ""
|
||||||
|
if url and rev:
|
||||||
|
return prefetch_git(url, rev)
|
||||||
|
|
||||||
|
elif fetcher == "url":
|
||||||
|
url = rendered.get("url") or rendered.get("urlTemplate") or ""
|
||||||
|
if url:
|
||||||
|
extra = comp.get("extra") or {}
|
||||||
|
if extra.get("unpack") == "zip":
|
||||||
|
return prefetch_fetchzip(url, strip_root=extra.get("stripRoot", True))
|
||||||
|
return prefetch_url(url)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Candidate fetching (what versions are available upstream)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class Candidates:
|
||||||
|
"""Latest available refs for a source component."""
|
||||||
|
|
||||||
|
__slots__ = ("release", "release_date", "tag", "tag_date", "commit", "commit_date")
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.release = self.release_date = ""
|
||||||
|
self.tag = self.tag_date = ""
|
||||||
|
self.commit = self.commit_date = ""
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_candidates(comp: Json, merged_vars: Json) -> Candidates:
|
||||||
|
"""
|
||||||
|
Fetch the latest release, tag, and commit for a source component.
|
||||||
|
For 'url' fetcher with github variables, fetches the latest release tag.
|
||||||
|
"""
|
||||||
|
c = Candidates()
|
||||||
|
fetcher = comp.get("fetcher", "none")
|
||||||
|
branch: Optional[str] = comp.get("branch") or None
|
||||||
|
|
||||||
|
if fetcher == "github":
|
||||||
|
owner = comp.get("owner") or ""
|
||||||
|
repo = comp.get("repo") or ""
|
||||||
|
if not (owner and repo):
|
||||||
|
return c
|
||||||
|
|
||||||
|
if not branch:
|
||||||
|
r = gh_latest_release(owner, repo)
|
||||||
|
if r:
|
||||||
|
c.release = r
|
||||||
|
c.release_date = gh_release_date(owner, repo, r)
|
||||||
|
t = gh_latest_tag(owner, repo)
|
||||||
|
if t:
|
||||||
|
c.tag = t
|
||||||
|
c.tag_date = gh_ref_date(owner, repo, t)
|
||||||
|
|
||||||
|
m = gh_head_commit(owner, repo, branch)
|
||||||
|
if m:
|
||||||
|
c.commit = m
|
||||||
|
c.commit_date = gh_ref_date(owner, repo, m)
|
||||||
|
|
||||||
|
elif fetcher == "git":
|
||||||
|
url = comp.get("url") or ""
|
||||||
|
if url:
|
||||||
|
m = git_branch_commit(url, branch)
|
||||||
|
if m:
|
||||||
|
c.commit = m
|
||||||
|
c.commit_date = git_commit_date_for_github(url, m)
|
||||||
|
|
||||||
|
elif fetcher == "url":
|
||||||
|
url_info = _url_source_info(comp, merged_vars)
|
||||||
|
kind = url_info.get("kind")
|
||||||
|
|
||||||
|
if kind == "github":
|
||||||
|
owner = url_info["owner"]
|
||||||
|
repo = url_info["repo"]
|
||||||
|
tags = gh_release_tags(owner, repo)
|
||||||
|
prefix = str(merged_vars.get("releasePrefix") or "")
|
||||||
|
suffix = str(merged_vars.get("releaseSuffix") or "")
|
||||||
|
if prefix or suffix:
|
||||||
|
latest = next(
|
||||||
|
(t for t in tags if t.startswith(prefix) and t.endswith(suffix)),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
latest = tags[0] if tags else None
|
||||||
|
if latest:
|
||||||
|
c.release = latest
|
||||||
|
c.release_date = gh_release_date(owner, repo, latest)
|
||||||
|
|
||||||
|
elif kind == "pypi":
|
||||||
|
name = url_info["name"]
|
||||||
|
latest = pypi_latest_version(name)
|
||||||
|
if latest:
|
||||||
|
c.release = latest
|
||||||
|
|
||||||
|
elif kind == "openvsx":
|
||||||
|
publisher = url_info["publisher"]
|
||||||
|
ext_name = url_info["ext_name"]
|
||||||
|
latest = openvsx_latest_version(publisher, ext_name)
|
||||||
|
if latest:
|
||||||
|
c.release = latest
|
||||||
|
|
||||||
|
return c
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Non-git upstream version helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def pypi_latest_version(name: str) -> Optional[str]:
|
||||||
|
"""Return the latest stable release version from PyPI."""
|
||||||
|
data = http_get_json(f"https://pypi.org/pypi/{urllib.parse.quote(name)}/json")
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return None
|
||||||
|
return (data.get("info") or {}).get("version") or None
|
||||||
|
|
||||||
|
|
||||||
|
def pypi_hash(name: str, version: str) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Return the SRI hash for a PyPI sdist or wheel using nix-prefetch.
|
||||||
|
Falls back to a fake-hash Nix build if nix-prefetch-url is unavailable.
|
||||||
|
"""
|
||||||
|
data = http_get_json(
|
||||||
|
f"https://pypi.org/pypi/{urllib.parse.quote(name)}/{urllib.parse.quote(version)}/json"
|
||||||
|
)
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return None
|
||||||
|
|
||||||
|
urls = data.get("urls") or []
|
||||||
|
# Prefer sdist; fall back to any wheel
|
||||||
|
sdist_url = next((u["url"] for u in urls if u.get("packagetype") == "sdist"), None)
|
||||||
|
wheel_url = next(
|
||||||
|
(u["url"] for u in urls if u.get("packagetype") == "bdist_wheel"), None
|
||||||
|
)
|
||||||
|
url = sdist_url or wheel_url
|
||||||
|
if not url:
|
||||||
|
return None
|
||||||
|
return prefetch_url(url)
|
||||||
|
|
||||||
|
|
||||||
|
def openvsx_latest_version(publisher: str, ext_name: str) -> Optional[str]:
|
||||||
|
"""Return the latest version of an extension from Open VSX Registry."""
|
||||||
|
data = http_get_json(
|
||||||
|
f"https://open-vsx.org/api/{urllib.parse.quote(publisher)}/{urllib.parse.quote(ext_name)}"
|
||||||
|
)
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
return None
|
||||||
|
return data.get("version") or None
|
||||||
|
|
||||||
|
|
||||||
|
def _url_source_info(comp: Json, merged_vars: Json) -> Json:
|
||||||
|
"""
|
||||||
|
Classify a url-fetcher source and extract the relevant identifiers.
|
||||||
|
Returns a dict with at least 'kind' in:
|
||||||
|
'github' — GitHub release asset; includes 'owner', 'repo'
|
||||||
|
'pypi' — PyPI package; includes 'name', 'version_var'
|
||||||
|
'openvsx' — Open VSX extension; includes 'publisher', 'ext_name', 'version_var'
|
||||||
|
'plain' — plain URL with a version variable; includes 'version_var' if found
|
||||||
|
'static' — hardcoded URL with no variable parts
|
||||||
|
"""
|
||||||
|
tmpl = comp.get("urlTemplate") or comp.get("url") or ""
|
||||||
|
|
||||||
|
# Check merged_vars for explicit github owner/repo
|
||||||
|
owner = str(merged_vars.get("owner") or "")
|
||||||
|
repo = str(merged_vars.get("repo") or "")
|
||||||
|
if owner and repo:
|
||||||
|
return {"kind": "github", "owner": owner, "repo": repo}
|
||||||
|
|
||||||
|
# Detect from URL template
|
||||||
|
gh_m = re.search(r"github\.com/([^/\$]+)/([^/\$]+)/releases/download", tmpl)
|
||||||
|
if gh_m:
|
||||||
|
vvar = _find_version_var(tmpl, merged_vars)
|
||||||
|
return {
|
||||||
|
"kind": "github",
|
||||||
|
"owner": gh_m.group(1),
|
||||||
|
"repo": gh_m.group(2),
|
||||||
|
"version_var": vvar,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Open VSX (open-vsx.org/api/${publisher}/${name}/${version}/...)
|
||||||
|
vsx_m = re.search(
|
||||||
|
r"open-vsx\.org/api/([^/\$]+)/([^/\$]+)/(?:\$\{[^}]+\}|[^/]+)/file", tmpl
|
||||||
|
)
|
||||||
|
if not vsx_m:
|
||||||
|
# Also match when publisher/name come from variables
|
||||||
|
if "open-vsx.org/api/" in tmpl:
|
||||||
|
publisher = str(merged_vars.get("publisher") or "")
|
||||||
|
ext_name = str(merged_vars.get("name") or "")
|
||||||
|
if publisher and ext_name:
|
||||||
|
vvar = _find_version_var(tmpl, merged_vars)
|
||||||
|
return {
|
||||||
|
"kind": "openvsx",
|
||||||
|
"publisher": publisher,
|
||||||
|
"ext_name": ext_name,
|
||||||
|
"version_var": vvar,
|
||||||
|
}
|
||||||
|
if vsx_m:
|
||||||
|
publisher = vsx_m.group(1)
|
||||||
|
ext_name = vsx_m.group(2)
|
||||||
|
# publisher/ext_name may be literal or variable refs
|
||||||
|
publisher = str(merged_vars.get(publisher.lstrip("${").rstrip("}"), publisher))
|
||||||
|
ext_name = str(merged_vars.get(ext_name.lstrip("${").rstrip("}"), ext_name))
|
||||||
|
vvar = _find_version_var(tmpl, merged_vars)
|
||||||
|
return {
|
||||||
|
"kind": "openvsx",
|
||||||
|
"publisher": publisher,
|
||||||
|
"ext_name": ext_name,
|
||||||
|
"version_var": vvar,
|
||||||
|
}
|
||||||
|
|
||||||
|
# PyPI: files.pythonhosted.org URLs
|
||||||
|
if "files.pythonhosted.org" in tmpl or "pypi.org" in tmpl:
|
||||||
|
pypi_name = str(merged_vars.get("name") or "")
|
||||||
|
if not pypi_name:
|
||||||
|
m = re.search(r"/packages/[^/]+/[^/]+/([^/]+)-\d", tmpl)
|
||||||
|
pypi_name = m.group(1).replace("_", "-") if m else ""
|
||||||
|
vvar = _find_version_var(tmpl, merged_vars)
|
||||||
|
return {"kind": "pypi", "name": pypi_name, "version_var": vvar}
|
||||||
|
|
||||||
|
vvar = _find_version_var(tmpl, merged_vars)
|
||||||
|
if vvar:
|
||||||
|
return {"kind": "plain", "version_var": vvar}
|
||||||
|
|
||||||
|
return {"kind": "static"}
|
||||||
|
|
||||||
|
|
||||||
|
def _find_version_var(tmpl: str, merged_vars: Json) -> str:
|
||||||
|
"""
|
||||||
|
Return the name of the variable in merged_vars that looks most like
|
||||||
|
a version string and appears in the template, or '' if none found.
|
||||||
|
Prefers keys named 'version', then anything whose value looks like a
|
||||||
|
semver/calver string.
|
||||||
|
"""
|
||||||
|
candidates = [k for k in merged_vars if f"${{{k}}}" in tmpl]
|
||||||
|
if "version" in candidates:
|
||||||
|
return "version"
|
||||||
|
# Pick the one whose value most resembles a version
|
||||||
|
ver_re = re.compile(r"^\d+[\.\-]\d")
|
||||||
|
for k in candidates:
|
||||||
|
if ver_re.match(str(merged_vars.get(k, ""))):
|
||||||
|
return k
|
||||||
|
return candidates[0] if candidates else ""
|
||||||
|
|
||||||
|
|
||||||
|
def apply_version_update(
|
||||||
|
comp: Json,
|
||||||
|
merged_vars: Json,
|
||||||
|
target_dict: Json,
|
||||||
|
new_version: str,
|
||||||
|
version_var: str = "version",
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Write `new_version` into the correct location in `target_dict`.
|
||||||
|
|
||||||
|
For url sources the version lives in `variables.<version_var>`.
|
||||||
|
For pypi sources it also lives in `variables.version` (the name is fixed).
|
||||||
|
Clears any URL-path hash so it gets re-prefetched.
|
||||||
|
"""
|
||||||
|
# Update the variable
|
||||||
|
vs = target_dict.setdefault("variables", {})
|
||||||
|
vs[version_var] = new_version
|
||||||
|
|
||||||
|
# Clear the old hash on the source so it must be re-prefetched
|
||||||
|
src_name = None
|
||||||
|
for k, v in (target_dict.get("sources") or {}).items():
|
||||||
|
if isinstance(v, dict) and "hash" in v:
|
||||||
|
src_name = k
|
||||||
|
break
|
||||||
|
# If no source entry yet, or the hash is on the base spec, clear it there too
|
||||||
|
if src_name:
|
||||||
|
target_dict["sources"][src_name].pop("hash", None)
|
||||||
|
else:
|
||||||
|
# Hash might be at base level (non-variant path)
|
||||||
|
for k, v in (comp if isinstance(comp, dict) else {}).items():
|
||||||
|
pass # read-only; we write through target_dict only
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Package discovery
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def find_packages() -> List[Tuple[str, Path]]:
|
||||||
|
"""
|
||||||
|
Scan packages/ for version.json files.
|
||||||
|
Returns sorted list of (display_name, path) tuples.
|
||||||
|
"""
|
||||||
|
results: List[Tuple[str, Path]] = []
|
||||||
|
for p in PKGS_DIR.rglob("version.json"):
|
||||||
|
rel = p.relative_to(PKGS_DIR).parent
|
||||||
|
results.append((str(rel), p))
|
||||||
|
results.sort()
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Source display helper
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def source_ref_label(comp: Json, merged_vars: Json) -> str:
|
||||||
|
"""Return a short human-readable reference string for a source."""
|
||||||
|
fetcher = comp.get("fetcher", "none")
|
||||||
|
rendered = render(comp, merged_vars)
|
||||||
|
|
||||||
|
if fetcher == "github":
|
||||||
|
tag = rendered.get("tag") or ""
|
||||||
|
rev = rendered.get("rev") or ""
|
||||||
|
owner = rendered.get("owner") or str(merged_vars.get("owner") or "")
|
||||||
|
repo = rendered.get("repo") or str(merged_vars.get("repo") or "")
|
||||||
|
if tag and owner and repo:
|
||||||
|
return f"{owner}/{repo}@{tag}"
|
||||||
|
if tag:
|
||||||
|
return tag
|
||||||
|
if rev and owner and repo:
|
||||||
|
return f"{owner}/{repo}@{rev[:7]}"
|
||||||
|
if rev:
|
||||||
|
return rev[:12]
|
||||||
|
return ""
|
||||||
|
|
||||||
|
if fetcher == "git":
|
||||||
|
ref = rendered.get("tag") or rendered.get("rev") or comp.get("version") or ""
|
||||||
|
if len(ref) == 40 and all(c in "0123456789abcdef" for c in ref):
|
||||||
|
return ref[:12]
|
||||||
|
return ref
|
||||||
|
|
||||||
|
if fetcher == "url":
|
||||||
|
url = rendered.get("url") or rendered.get("urlTemplate") or ""
|
||||||
|
if not url:
|
||||||
|
return ""
|
||||||
|
if "${" in url:
|
||||||
|
tmpl = comp.get("urlTemplate") or comp.get("url") or url
|
||||||
|
filename = os.path.basename(urllib.parse.urlparse(tmpl).path)
|
||||||
|
return re.sub(r"\$\{([^}]+)\}", r"<\1>", filename)
|
||||||
|
filename = os.path.basename(urllib.parse.urlparse(url).path)
|
||||||
|
owner = str(merged_vars.get("owner") or "")
|
||||||
|
repo = str(merged_vars.get("repo") or "")
|
||||||
|
rp = str(merged_vars.get("releasePrefix") or "")
|
||||||
|
rs = str(merged_vars.get("releaseSuffix") or "")
|
||||||
|
base = str(merged_vars.get("base") or "")
|
||||||
|
rel = str(merged_vars.get("release") or "")
|
||||||
|
tag = f"{rp}{base}-{rel}{rs}" if (base and rel) else ""
|
||||||
|
if owner and repo and tag and filename:
|
||||||
|
return f"{owner}/{repo}@{tag} · {filename}"
|
||||||
|
return filename or url
|
||||||
|
|
||||||
|
return str(comp.get("version") or comp.get("tag") or comp.get("rev") or "")
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Deep set helper
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def deep_set(obj: Json, path: List[str], value: Any) -> None:
|
||||||
|
cur = obj
|
||||||
|
for key in path[:-1]:
|
||||||
|
if key not in cur or not isinstance(cur[key], dict):
|
||||||
|
cur[key] = {}
|
||||||
|
cur = cur[key]
|
||||||
|
cur[path[-1]] = value
|
||||||
472
scripts/update.py
Normal file
472
scripts/update.py
Normal file
@@ -0,0 +1,472 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
version.json CLI updater.
|
||||||
|
|
||||||
|
Usage examples:
|
||||||
|
# Update a GitHub source to its latest release tag, then recompute hash
|
||||||
|
scripts/update.py --file packages/edk2/version.json --github-latest-release --prefetch
|
||||||
|
|
||||||
|
# Update a specific component to the latest commit
|
||||||
|
scripts/update.py --file packages/edk2/version.json --component edk2 --github-latest-commit --prefetch
|
||||||
|
|
||||||
|
# Update all URL-based sources in a file (recompute hash only)
|
||||||
|
scripts/update.py --file packages/uboot/version.json --url-prefetch
|
||||||
|
|
||||||
|
# Update a variant's variables
|
||||||
|
scripts/update.py --file packages/proton-cachyos/version.json --variant cachyos-v4 \\
|
||||||
|
--set variables.base=10.0 --set variables.release=20260301
|
||||||
|
|
||||||
|
# Filter tags with a regex (e.g. only stable_* tags)
|
||||||
|
scripts/update.py --file packages/raspberrypi/linux-rpi/version.json \\
|
||||||
|
--component stable --github-latest-tag --tag-regex '^stable_\\d{8}$' --prefetch
|
||||||
|
|
||||||
|
# Update a fetchgit source to HEAD
|
||||||
|
scripts/update.py --file packages/linux-cachyos/version.json --component zfs --git-latest --prefetch
|
||||||
|
|
||||||
|
# Dry run (show what would change, don't write)
|
||||||
|
scripts/update.py --file packages/edk2/version.json --github-latest-release --prefetch --dry-run
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
# Ensure scripts/ is on the path so we can import lib and hooks
|
||||||
|
sys.path.insert(0, str(Path(__file__).resolve().parent))
|
||||||
|
|
||||||
|
import lib
|
||||||
|
import hooks # noqa: F401 — registers hooks as a side effect
|
||||||
|
|
||||||
|
|
||||||
|
def _apply_set_pairs(target: lib.Json, pairs: List[str]) -> bool:
|
||||||
|
changed = False
|
||||||
|
for pair in pairs:
|
||||||
|
if "=" not in pair:
|
||||||
|
lib.eprint(f"--set: expected KEY=VALUE, got: {pair!r}")
|
||||||
|
continue
|
||||||
|
key, val = pair.split("=", 1)
|
||||||
|
path = [p for p in key.strip().split(".") if p]
|
||||||
|
lib.deep_set(target, path, val)
|
||||||
|
lib.eprint(f" set {'.'.join(path)} = {val!r}")
|
||||||
|
changed = True
|
||||||
|
return changed
|
||||||
|
|
||||||
|
|
||||||
|
def update_components(
|
||||||
|
spec: lib.Json,
|
||||||
|
variant: Optional[str],
|
||||||
|
components: Optional[List[str]],
|
||||||
|
args: argparse.Namespace,
|
||||||
|
) -> bool:
|
||||||
|
changed = False
|
||||||
|
merged_vars, merged_srcs, target_dict = lib.merged_view(spec, variant)
|
||||||
|
target_sources: lib.Json = target_dict.setdefault("sources", {})
|
||||||
|
|
||||||
|
names = (
|
||||||
|
list(merged_srcs.keys())
|
||||||
|
if not components
|
||||||
|
else [c for c in components if c in merged_srcs]
|
||||||
|
)
|
||||||
|
if components:
|
||||||
|
missing = [c for c in components if c not in merged_srcs]
|
||||||
|
for m in missing:
|
||||||
|
lib.eprint(f" [warn] component '{m}' not found in merged sources")
|
||||||
|
|
||||||
|
for name in names:
|
||||||
|
view_comp = merged_srcs[name]
|
||||||
|
fetcher = view_comp.get("fetcher", "none")
|
||||||
|
comp = target_sources.setdefault(name, {})
|
||||||
|
|
||||||
|
if fetcher == "github":
|
||||||
|
owner = view_comp.get("owner") or ""
|
||||||
|
repo = view_comp.get("repo") or ""
|
||||||
|
if not (owner and repo):
|
||||||
|
lib.eprint(f" [{name}] missing owner/repo, skipping")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# --set-branch: update branch field and fetch HEAD of that branch
|
||||||
|
if args.set_branch is not None:
|
||||||
|
new_branch = args.set_branch or None # empty string → clear branch
|
||||||
|
if new_branch:
|
||||||
|
comp["branch"] = new_branch
|
||||||
|
lib.eprint(f" [{name}] branch -> {new_branch!r}")
|
||||||
|
else:
|
||||||
|
comp.pop("branch", None)
|
||||||
|
lib.eprint(f" [{name}] branch cleared")
|
||||||
|
changed = True
|
||||||
|
rev = lib.gh_head_commit(owner, repo, new_branch)
|
||||||
|
if rev:
|
||||||
|
comp["rev"] = rev
|
||||||
|
comp.pop("tag", None)
|
||||||
|
lib.eprint(f" [{name}] rev -> {rev}")
|
||||||
|
changed = True
|
||||||
|
if args.prefetch:
|
||||||
|
sri = lib.prefetch_github(
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
rev,
|
||||||
|
submodules=bool(view_comp.get("submodules", False)),
|
||||||
|
)
|
||||||
|
if sri:
|
||||||
|
comp["hash"] = sri
|
||||||
|
lib.eprint(f" [{name}] hash -> {sri}")
|
||||||
|
changed = True
|
||||||
|
else:
|
||||||
|
lib.eprint(
|
||||||
|
f" [{name}] could not resolve HEAD for branch {new_branch!r}"
|
||||||
|
)
|
||||||
|
continue # skip the normal ref-update logic for this component
|
||||||
|
|
||||||
|
new_ref: Optional[str] = None
|
||||||
|
ref_kind = ""
|
||||||
|
|
||||||
|
if args.github_latest_release:
|
||||||
|
tag = lib.gh_latest_release(owner, repo)
|
||||||
|
if tag:
|
||||||
|
new_ref, ref_kind = tag, "tag"
|
||||||
|
elif args.github_latest_tag:
|
||||||
|
tag = lib.gh_latest_tag(owner, repo, tag_regex=args.tag_regex)
|
||||||
|
if tag:
|
||||||
|
new_ref, ref_kind = tag, "tag"
|
||||||
|
elif args.github_latest_commit:
|
||||||
|
rev = lib.gh_head_commit(owner, repo)
|
||||||
|
if rev:
|
||||||
|
new_ref, ref_kind = rev, "rev"
|
||||||
|
|
||||||
|
if new_ref:
|
||||||
|
if ref_kind == "tag":
|
||||||
|
comp["tag"] = new_ref
|
||||||
|
comp.pop("rev", None)
|
||||||
|
else:
|
||||||
|
comp["rev"] = new_ref
|
||||||
|
comp.pop("tag", None)
|
||||||
|
lib.eprint(f" [{name}] {ref_kind} -> {new_ref}")
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if args.prefetch and (new_ref or args.url_prefetch):
|
||||||
|
# Use merged view with the updated ref for prefetching
|
||||||
|
merged_vars2, merged_srcs2, _ = lib.merged_view(spec, variant)
|
||||||
|
view2 = lib.render(merged_srcs2.get(name, view_comp), merged_vars2)
|
||||||
|
sri = lib.prefetch_github(
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
view2.get("tag") or view2.get("rev") or new_ref or "",
|
||||||
|
submodules=bool(view_comp.get("submodules", False)),
|
||||||
|
)
|
||||||
|
if sri:
|
||||||
|
comp["hash"] = sri
|
||||||
|
lib.eprint(f" [{name}] hash -> {sri}")
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
elif fetcher == "git":
|
||||||
|
url = view_comp.get("url") or ""
|
||||||
|
if not url:
|
||||||
|
lib.eprint(f" [{name}] missing url for git fetcher, skipping")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# --set-branch: update branch field and fetch HEAD of that branch
|
||||||
|
if args.set_branch is not None:
|
||||||
|
new_branch = args.set_branch or None
|
||||||
|
if new_branch:
|
||||||
|
comp["branch"] = new_branch
|
||||||
|
lib.eprint(f" [{name}] branch -> {new_branch!r}")
|
||||||
|
else:
|
||||||
|
comp.pop("branch", None)
|
||||||
|
lib.eprint(f" [{name}] branch cleared")
|
||||||
|
changed = True
|
||||||
|
rev = lib.git_branch_commit(url, new_branch)
|
||||||
|
if rev:
|
||||||
|
comp["rev"] = rev
|
||||||
|
lib.eprint(f" [{name}] rev -> {rev}")
|
||||||
|
changed = True
|
||||||
|
if args.prefetch:
|
||||||
|
sri = lib.prefetch_git(url, rev)
|
||||||
|
if sri:
|
||||||
|
comp["hash"] = sri
|
||||||
|
lib.eprint(f" [{name}] hash -> {sri}")
|
||||||
|
changed = True
|
||||||
|
else:
|
||||||
|
lib.eprint(
|
||||||
|
f" [{name}] could not resolve HEAD for branch {new_branch!r}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if args.git_latest:
|
||||||
|
rev = lib.git_branch_commit(url, view_comp.get("branch"))
|
||||||
|
if rev:
|
||||||
|
comp["rev"] = rev
|
||||||
|
lib.eprint(f" [{name}] rev -> {rev}")
|
||||||
|
changed = True
|
||||||
|
if args.prefetch:
|
||||||
|
sri = lib.prefetch_git(url, rev)
|
||||||
|
if sri:
|
||||||
|
comp["hash"] = sri
|
||||||
|
lib.eprint(f" [{name}] hash -> {sri}")
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
elif fetcher == "url":
|
||||||
|
if args.latest_version:
|
||||||
|
url_info = lib._url_source_info(view_comp, merged_vars)
|
||||||
|
kind = url_info.get("kind", "plain")
|
||||||
|
version_var = url_info.get("version_var") or "version"
|
||||||
|
new_ver: Optional[str] = None
|
||||||
|
|
||||||
|
if kind == "github":
|
||||||
|
owner = url_info.get("owner", "")
|
||||||
|
repo = url_info.get("repo", "")
|
||||||
|
tags = lib.gh_release_tags(owner, repo) if owner and repo else []
|
||||||
|
prefix = str(merged_vars.get("releasePrefix") or "")
|
||||||
|
suffix = str(merged_vars.get("releaseSuffix") or "")
|
||||||
|
if prefix or suffix:
|
||||||
|
tag = next(
|
||||||
|
(
|
||||||
|
t
|
||||||
|
for t in tags
|
||||||
|
if t.startswith(prefix) and t.endswith(suffix)
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
tag = tags[0] if tags else None
|
||||||
|
if tag:
|
||||||
|
# Proton-cachyos style: extract base+release from tag
|
||||||
|
mid = tag
|
||||||
|
if prefix and mid.startswith(prefix):
|
||||||
|
mid = mid[len(prefix) :]
|
||||||
|
if suffix and mid.endswith(suffix):
|
||||||
|
mid = mid[: -len(suffix)]
|
||||||
|
parts = mid.split("-")
|
||||||
|
if (
|
||||||
|
len(parts) >= 2
|
||||||
|
and "base" in merged_vars
|
||||||
|
and "release" in merged_vars
|
||||||
|
):
|
||||||
|
lib.eprint(
|
||||||
|
f" [{name}] latest tag: {tag} (base={parts[0]}, release={parts[-1]})"
|
||||||
|
)
|
||||||
|
vs = target_dict.setdefault("variables", {})
|
||||||
|
vs["base"] = parts[0]
|
||||||
|
vs["release"] = parts[-1]
|
||||||
|
changed = True
|
||||||
|
merged_vars2, merged_srcs2, _ = lib.merged_view(
|
||||||
|
spec, variant
|
||||||
|
)
|
||||||
|
view2 = merged_srcs2.get(name, view_comp)
|
||||||
|
sri = lib.prefetch_source(view2, merged_vars2)
|
||||||
|
if sri:
|
||||||
|
comp["hash"] = sri
|
||||||
|
lib.eprint(f" [{name}] hash -> {sri}")
|
||||||
|
changed = True
|
||||||
|
else:
|
||||||
|
new_ver = tag
|
||||||
|
tag = None # avoid fall-through
|
||||||
|
|
||||||
|
elif kind == "openvsx":
|
||||||
|
publisher = url_info.get("publisher", "")
|
||||||
|
ext_name = url_info.get("ext_name", "")
|
||||||
|
new_ver = lib.openvsx_latest_version(publisher, ext_name)
|
||||||
|
|
||||||
|
elif kind == "plain":
|
||||||
|
lib.eprint(
|
||||||
|
f" [{name}] url (plain): cannot auto-detect version; use --set"
|
||||||
|
)
|
||||||
|
|
||||||
|
if new_ver:
|
||||||
|
lib.eprint(f" [{name}] latest version: {new_ver}")
|
||||||
|
vs = target_dict.setdefault("variables", {})
|
||||||
|
vs[version_var] = new_ver
|
||||||
|
changed = True
|
||||||
|
if args.prefetch:
|
||||||
|
# Re-render with updated variable
|
||||||
|
merged_vars2, merged_srcs2, _ = lib.merged_view(spec, variant)
|
||||||
|
view2 = merged_srcs2.get(name, view_comp)
|
||||||
|
sri = lib.prefetch_source(view2, merged_vars2)
|
||||||
|
if sri:
|
||||||
|
comp["hash"] = sri
|
||||||
|
lib.eprint(f" [{name}] hash -> {sri}")
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
elif args.url_prefetch or args.prefetch:
|
||||||
|
rendered = lib.render(view_comp, merged_vars)
|
||||||
|
url = rendered.get("url") or rendered.get("urlTemplate") or ""
|
||||||
|
if not url:
|
||||||
|
lib.eprint(f" [{name}] no url/urlTemplate for url fetcher")
|
||||||
|
else:
|
||||||
|
sri = lib.prefetch_source(view_comp, merged_vars)
|
||||||
|
if sri:
|
||||||
|
comp["hash"] = sri
|
||||||
|
lib.eprint(f" [{name}] hash -> {sri}")
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
elif fetcher == "pypi":
|
||||||
|
if args.latest_version:
|
||||||
|
pkg_name = view_comp.get("name") or str(merged_vars.get("name") or name)
|
||||||
|
new_ver = lib.pypi_latest_version(pkg_name)
|
||||||
|
if new_ver:
|
||||||
|
version_var = (
|
||||||
|
lib._url_source_info(view_comp, merged_vars).get("version_var")
|
||||||
|
or "version"
|
||||||
|
)
|
||||||
|
cur_ver = str(merged_vars.get(version_var) or "")
|
||||||
|
if new_ver == cur_ver:
|
||||||
|
lib.eprint(f" [{name}] pypi: already at {new_ver}")
|
||||||
|
else:
|
||||||
|
lib.eprint(f" [{name}] pypi: {cur_ver} -> {new_ver}")
|
||||||
|
vs = target_dict.setdefault("variables", {})
|
||||||
|
vs[version_var] = new_ver
|
||||||
|
changed = True
|
||||||
|
if args.prefetch:
|
||||||
|
sri = lib.pypi_hash(pkg_name, new_ver)
|
||||||
|
if sri:
|
||||||
|
comp["hash"] = sri
|
||||||
|
lib.eprint(f" [{name}] hash -> {sri}")
|
||||||
|
changed = True
|
||||||
|
else:
|
||||||
|
lib.eprint(f" [{name}] pypi hash prefetch failed")
|
||||||
|
else:
|
||||||
|
lib.eprint(
|
||||||
|
f" [{name}] pypi: could not fetch latest version for {pkg_name!r}"
|
||||||
|
)
|
||||||
|
elif args.url_prefetch or args.prefetch:
|
||||||
|
lib.eprint(
|
||||||
|
f" [{name}] pypi: use --latest-version --prefetch to update hash"
|
||||||
|
)
|
||||||
|
|
||||||
|
return changed
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> int:
|
||||||
|
ap = argparse.ArgumentParser(
|
||||||
|
description="Update version.json files",
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
epilog=__doc__.split("\n", 2)[2], # show the usage block as epilog
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--file", required=True, metavar="PATH", help="Path to version.json"
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--variant", metavar="NAME", help="Variant to target (default: base)"
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--component",
|
||||||
|
dest="components",
|
||||||
|
action="append",
|
||||||
|
metavar="NAME",
|
||||||
|
help="Limit to specific component(s); can be repeated",
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--github-latest-release",
|
||||||
|
action="store_true",
|
||||||
|
help="Update GitHub sources to latest release tag",
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--github-latest-tag",
|
||||||
|
action="store_true",
|
||||||
|
help="Update GitHub sources to latest tag",
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--github-latest-commit",
|
||||||
|
action="store_true",
|
||||||
|
help="Update GitHub sources to HEAD commit",
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--tag-regex",
|
||||||
|
metavar="REGEX",
|
||||||
|
help="Filter tags (used with --github-latest-tag)",
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--set-branch",
|
||||||
|
metavar="BRANCH",
|
||||||
|
default=None,
|
||||||
|
help=(
|
||||||
|
"Set the branch field on github/git sources, resolve its HEAD commit, "
|
||||||
|
"and (with --prefetch) recompute the hash. "
|
||||||
|
"Pass an empty string ('') to clear the branch and switch back to tag/release tracking."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--git-latest",
|
||||||
|
action="store_true",
|
||||||
|
help="Update fetchgit sources to latest HEAD commit",
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--latest-version",
|
||||||
|
action="store_true",
|
||||||
|
help=(
|
||||||
|
"Fetch the latest version from upstream (PyPI, Open VSX, GitHub releases) "
|
||||||
|
"and update the version variable. Use with --prefetch to also recompute the hash."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--url-prefetch",
|
||||||
|
action="store_true",
|
||||||
|
help="Recompute hash for url/urlTemplate sources",
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--prefetch",
|
||||||
|
action="store_true",
|
||||||
|
help="After updating ref, also recompute hash",
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--set",
|
||||||
|
dest="sets",
|
||||||
|
action="append",
|
||||||
|
default=[],
|
||||||
|
metavar="KEY=VALUE",
|
||||||
|
help="Set a field (dot-path relative to base or --variant). Can be repeated.",
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--dry-run", action="store_true", help="Show changes without writing"
|
||||||
|
)
|
||||||
|
ap.add_argument(
|
||||||
|
"--print",
|
||||||
|
dest="do_print",
|
||||||
|
action="store_true",
|
||||||
|
help="Print resulting JSON to stdout",
|
||||||
|
)
|
||||||
|
args = ap.parse_args()
|
||||||
|
|
||||||
|
path = Path(args.file)
|
||||||
|
if not path.exists():
|
||||||
|
lib.eprint(f"File not found: {path}")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
spec = lib.load_json(path)
|
||||||
|
lib.eprint(f"Loaded: {path}")
|
||||||
|
|
||||||
|
# Apply --set mutations
|
||||||
|
target = spec
|
||||||
|
if args.variant:
|
||||||
|
target = spec.setdefault("variants", {}).setdefault(args.variant, {})
|
||||||
|
changed = _apply_set_pairs(target, args.sets)
|
||||||
|
|
||||||
|
# Update refs/hashes
|
||||||
|
if update_components(spec, args.variant, args.components, args):
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
if args.dry_run:
|
||||||
|
lib.eprint("Dry run: no changes written.")
|
||||||
|
else:
|
||||||
|
lib.save_json(path, spec)
|
||||||
|
lib.eprint(f"Saved: {path}")
|
||||||
|
else:
|
||||||
|
lib.eprint("No changes.")
|
||||||
|
|
||||||
|
if args.do_print:
|
||||||
|
import json
|
||||||
|
|
||||||
|
print(json.dumps(spec, indent=2, ensure_ascii=False))
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
sys.exit(main())
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
sys.exit(130)
|
||||||
@@ -1,416 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Unified version.json updater (TUI-friendly core logic).
|
|
||||||
|
|
||||||
Improvements:
|
|
||||||
- Correctly merges base + variant variables and sources (component-wise deep merge)
|
|
||||||
- Updates are written back into the correct dictionary:
|
|
||||||
- Base: top-level spec["sources"][name]
|
|
||||||
- Variant: spec["variants"][variant]["sources"][name] (created if missing)
|
|
||||||
- Hash prefetch uses the merged view with rendered variables
|
|
||||||
|
|
||||||
Supports:
|
|
||||||
- Updating GitHub components to latest release tag, latest tag, or latest commit
|
|
||||||
- Updating Git (fetchgit) components to latest commit on default branch
|
|
||||||
- Recomputing SRI hash for url/urlTemplate, github tarballs, and fetchgit sources
|
|
||||||
- Setting arbitrary fields (variables.* or sources.*.*) via --set path=value
|
|
||||||
- Operating on a specific variant or the base (top-level) of a version.json
|
|
||||||
|
|
||||||
Requirements:
|
|
||||||
- nix-prefetch-url (or `nix prefetch-url`) and `nix hash to-sri` for URL hashing
|
|
||||||
- nix-prefetch-git + `nix hash to-sri` for Git fetchers
|
|
||||||
- Network access for GitHub API (optional GITHUB_TOKEN env var)
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
scripts/update_versions.py --file packages/edk2/version.json --github-latest-release --prefetch
|
|
||||||
scripts/update_versions.py --file packages/edk2/version.json --component edk2 --github-latest-commit --prefetch
|
|
||||||
scripts/update_versions.py --file packages/uboot/version.json --url-prefetch
|
|
||||||
scripts/update_versions.py --file packages/proton-cachyos/version.json --variant cachyos-v4 --set variables.base=10.0
|
|
||||||
scripts/update_versions.py --file packages/linux-cachyos/version.json --component zfs --git-latest --prefetch
|
|
||||||
"""
|
|
||||||
import argparse
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import urllib.request
|
|
||||||
import urllib.error
|
|
||||||
from typing import Any, Dict, List, Optional, Tuple
|
|
||||||
|
|
||||||
Json = Dict[str, Any]
|
|
||||||
|
|
||||||
|
|
||||||
def eprintln(*args, **kwargs):
|
|
||||||
print(*args, file=sys.stderr, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def load_json(path: str) -> Json:
|
|
||||||
with open(path, "r", encoding="utf-8") as f:
|
|
||||||
return json.load(f)
|
|
||||||
|
|
||||||
|
|
||||||
def save_json(path: str, data: Json):
|
|
||||||
with open(path, "w", encoding="utf-8") as f:
|
|
||||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
||||||
f.write("\n")
|
|
||||||
|
|
||||||
|
|
||||||
def deep_get(o: Json, path: List[str], default=None):
|
|
||||||
cur = o
|
|
||||||
for p in path:
|
|
||||||
if isinstance(cur, dict) and p in cur:
|
|
||||||
cur = cur[p]
|
|
||||||
else:
|
|
||||||
return default
|
|
||||||
return cur
|
|
||||||
|
|
||||||
|
|
||||||
def deep_set(o: Json, path: List[str], value: Any):
|
|
||||||
cur = o
|
|
||||||
for p in path[:-1]:
|
|
||||||
if p not in cur or not isinstance(cur[p], dict):
|
|
||||||
cur[p] = {}
|
|
||||||
cur = cur[p]
|
|
||||||
cur[path[-1]] = value
|
|
||||||
|
|
||||||
|
|
||||||
def parse_set_pair(pair: str) -> Tuple[List[str], str]:
|
|
||||||
if "=" not in pair:
|
|
||||||
raise ValueError(f"--set requires KEY=VALUE, got: {pair}")
|
|
||||||
key, val = pair.split("=", 1)
|
|
||||||
path = key.strip().split(".")
|
|
||||||
return path, val
|
|
||||||
|
|
||||||
|
|
||||||
def render_templates(value: Any, variables: Dict[str, Any]) -> Any:
|
|
||||||
# Simple ${var} string replacement across strings/structures
|
|
||||||
if isinstance(value, str):
|
|
||||||
def repl(m):
|
|
||||||
name = m.group(1)
|
|
||||||
return str(variables.get(name, m.group(0)))
|
|
||||||
return re.sub(r"\$\{([^}]+)\}", repl, value)
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
return {k: render_templates(v, variables) for k, v in value.items()}
|
|
||||||
elif isinstance(value, list):
|
|
||||||
return [render_templates(v, variables) for v in value]
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def http_get_json(url: str, token: Optional[str] = None) -> Any:
|
|
||||||
req = urllib.request.Request(url, headers={"Accept": "application/vnd.github+json"})
|
|
||||||
if token:
|
|
||||||
req.add_header("Authorization", f"Bearer {token}")
|
|
||||||
with urllib.request.urlopen(req) as resp:
|
|
||||||
return json.loads(resp.read().decode("utf-8"))
|
|
||||||
|
|
||||||
|
|
||||||
def github_latest_release_tag(owner: str, repo: str, token: Optional[str] = None) -> Optional[str]:
|
|
||||||
url = f"https://api.github.com/repos/{owner}/{repo}/releases/latest"
|
|
||||||
try:
|
|
||||||
data = http_get_json(url, token)
|
|
||||||
tag = data.get("tag_name")
|
|
||||||
return tag
|
|
||||||
except urllib.error.HTTPError as e:
|
|
||||||
eprintln(f"GitHub latest release failed: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def github_latest_tag(owner: str, repo: str, token: Optional[str] = None, tag_regex: Optional[str] = None) -> Optional[str]:
|
|
||||||
url = f"https://api.github.com/repos/{owner}/{repo}/tags?per_page=100"
|
|
||||||
try:
|
|
||||||
data = http_get_json(url, token)
|
|
||||||
tags = [t.get("name") for t in data if "name" in t]
|
|
||||||
if tag_regex:
|
|
||||||
rx = re.compile(tag_regex)
|
|
||||||
tags = [t for t in tags if rx.search(t)]
|
|
||||||
return tags[0] if tags else None
|
|
||||||
except urllib.error.HTTPError as e:
|
|
||||||
eprintln(f"GitHub tags failed: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def github_head_commit(owner: str, repo: str, token: Optional[str] = None) -> Optional[str]:
|
|
||||||
# Prefer git ls-remote to avoid API limits
|
|
||||||
url = f"https://github.com/{owner}/{repo}.git"
|
|
||||||
try:
|
|
||||||
out = subprocess.check_output(["git", "ls-remote", url, "HEAD"], text=True).strip()
|
|
||||||
if out:
|
|
||||||
sha = out.split()[0]
|
|
||||||
return sha
|
|
||||||
except Exception as e:
|
|
||||||
eprintln(f"git ls-remote failed for {url}: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def run_cmd_get_output(args: List[str]) -> str:
|
|
||||||
eprintln(f"Running: {' '.join(args)}")
|
|
||||||
return subprocess.check_output(args, text=True).strip()
|
|
||||||
|
|
||||||
|
|
||||||
def nix_prefetch_url(url: str) -> Optional[str]:
|
|
||||||
# returns SRI (sha256-...)
|
|
||||||
base32 = None
|
|
||||||
try:
|
|
||||||
base32 = run_cmd_get_output(["nix-prefetch-url", "--type", "sha256", url])
|
|
||||||
except Exception:
|
|
||||||
try:
|
|
||||||
base32 = run_cmd_get_output(["nix", "prefetch-url", url])
|
|
||||||
except Exception as e:
|
|
||||||
eprintln(f"Failed to prefetch url: {url}: {e}")
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
sri = run_cmd_get_output(["nix", "hash", "to-sri", "--type", "sha256", base32])
|
|
||||||
return sri
|
|
||||||
except Exception as e:
|
|
||||||
eprintln(f"Failed to convert base32 to SRI: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def github_tarball_url(owner: str, repo: str, ref: str) -> str:
|
|
||||||
# codeload is stable for tarball
|
|
||||||
return f"https://codeload.github.com/{owner}/{repo}/tar.gz/{ref}"
|
|
||||||
|
|
||||||
|
|
||||||
def nix_prefetch_github_tarball(owner: str, repo: str, ref: str) -> Optional[str]:
|
|
||||||
url = github_tarball_url(owner, repo, ref)
|
|
||||||
return nix_prefetch_url(url)
|
|
||||||
|
|
||||||
|
|
||||||
def nix_prefetch_git(url: str, rev: str) -> Optional[str]:
|
|
||||||
# returns SRI
|
|
||||||
try:
|
|
||||||
out = run_cmd_get_output(["nix-prefetch-git", "--no-deepClone", "--rev", rev, url])
|
|
||||||
try:
|
|
||||||
data = json.loads(out)
|
|
||||||
base32 = data.get("sha256") or data.get("hash")
|
|
||||||
except Exception:
|
|
||||||
base32 = out.splitlines()[-1].strip()
|
|
||||||
if not base32:
|
|
||||||
eprintln(f"Could not parse nix-prefetch-git output for {url}@{rev}")
|
|
||||||
return None
|
|
||||||
sri = run_cmd_get_output(["nix", "hash", "to-sri", "--type", "sha256", base32])
|
|
||||||
return sri
|
|
||||||
except Exception as e:
|
|
||||||
eprintln(f"nix-prefetch-git failed for {url}@{rev}: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
# -------------------- Merging logic (match lib/versioning.nix) --------------------
|
|
||||||
|
|
||||||
def deep_merge(a: Dict[str, Any], b: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
out = dict(a)
|
|
||||||
for k, v in b.items():
|
|
||||||
if k in out and isinstance(out[k], dict) and isinstance(v, dict):
|
|
||||||
out[k] = deep_merge(out[k], v)
|
|
||||||
else:
|
|
||||||
out[k] = v
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
def merge_sources(base_sources: Dict[str, Any], overrides: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
names = set(base_sources.keys()) | set(overrides.keys())
|
|
||||||
result: Dict[str, Any] = {}
|
|
||||||
for n in names:
|
|
||||||
if n in base_sources and n in overrides:
|
|
||||||
if isinstance(base_sources[n], dict) and isinstance(overrides[n], dict):
|
|
||||||
result[n] = deep_merge(base_sources[n], overrides[n])
|
|
||||||
else:
|
|
||||||
result[n] = overrides[n]
|
|
||||||
elif n in overrides:
|
|
||||||
result[n] = overrides[n]
|
|
||||||
else:
|
|
||||||
result[n] = base_sources[n]
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def merged_view(spec: Json, variant: Optional[str]) -> Tuple[Dict[str, Any], Dict[str, Any], Json, List[str]]:
|
|
||||||
"""
|
|
||||||
Returns (merged_variables, merged_sources, target_dict_to_write, base_path)
|
|
||||||
- merged_*: what to display/prefetch with
|
|
||||||
- target_dict_to_write: where to write changes (base or variants[variant])
|
|
||||||
"""
|
|
||||||
base_vars = spec.get("variables", {}) or {}
|
|
||||||
base_sources = spec.get("sources", {}) or {}
|
|
||||||
if variant:
|
|
||||||
vdict = spec.get("variants", {}).get(variant)
|
|
||||||
if not isinstance(vdict, dict):
|
|
||||||
raise ValueError(f"Variant '{variant}' not found")
|
|
||||||
v_vars = vdict.get("variables", {}) or {}
|
|
||||||
v_sources = vdict.get("sources", {}) or {}
|
|
||||||
merged_vars = dict(base_vars)
|
|
||||||
merged_vars.update(v_vars)
|
|
||||||
merged_srcs = merge_sources(base_sources, v_sources)
|
|
||||||
return merged_vars, merged_srcs, vdict, ["variants", variant]
|
|
||||||
else:
|
|
||||||
return dict(base_vars), dict(base_sources), spec, []
|
|
||||||
|
|
||||||
|
|
||||||
# -------------------- Update operations --------------------
|
|
||||||
|
|
||||||
def update_components(spec: Json,
|
|
||||||
variant: Optional[str],
|
|
||||||
components: Optional[List[str]],
|
|
||||||
args: argparse.Namespace) -> bool:
|
|
||||||
changed = False
|
|
||||||
gh_token = os.environ.get("GITHUB_TOKEN")
|
|
||||||
|
|
||||||
merged_vars, merged_srcs, target_dict, base_path = merged_view(spec, variant)
|
|
||||||
src_names = list(merged_srcs.keys()) if not components else [c for c in components if c in merged_srcs]
|
|
||||||
|
|
||||||
# Ensure target_dict has a sources dict to write into
|
|
||||||
target_sources = target_dict.setdefault("sources", {})
|
|
||||||
|
|
||||||
for name in src_names:
|
|
||||||
view_comp = merged_srcs[name]
|
|
||||||
fetcher = view_comp.get("fetcher", "none")
|
|
||||||
|
|
||||||
# Ensure a writable component entry exists (always write to the selected target: base or variant override)
|
|
||||||
comp = target_sources.setdefault(name, {})
|
|
||||||
if not isinstance(comp, dict):
|
|
||||||
comp = target_sources[name] = {}
|
|
||||||
|
|
||||||
if fetcher == "github":
|
|
||||||
owner = view_comp.get("owner")
|
|
||||||
repo = view_comp.get("repo")
|
|
||||||
if not owner or not repo:
|
|
||||||
eprintln(f"Component {name}: missing owner/repo for github fetcher")
|
|
||||||
continue
|
|
||||||
|
|
||||||
new_ref = None
|
|
||||||
ref_kind = None
|
|
||||||
if args.github_latest_release:
|
|
||||||
tag = github_latest_release_tag(owner, repo, gh_token)
|
|
||||||
if tag:
|
|
||||||
new_ref = tag
|
|
||||||
ref_kind = "tag"
|
|
||||||
elif args.github_latest_tag:
|
|
||||||
tag = github_latest_tag(owner, repo, gh_token, args.tag_regex)
|
|
||||||
if tag:
|
|
||||||
new_ref = tag
|
|
||||||
ref_kind = "tag"
|
|
||||||
elif args.github_latest_commit:
|
|
||||||
rev = github_head_commit(owner, repo, gh_token)
|
|
||||||
if rev:
|
|
||||||
new_ref = rev
|
|
||||||
ref_kind = "rev"
|
|
||||||
|
|
||||||
if new_ref:
|
|
||||||
if ref_kind == "tag":
|
|
||||||
comp["tag"] = new_ref
|
|
||||||
if "rev" in comp:
|
|
||||||
del comp["rev"]
|
|
||||||
else:
|
|
||||||
comp["rev"] = new_ref
|
|
||||||
if "tag" in comp:
|
|
||||||
del comp["tag"]
|
|
||||||
eprintln(f"Component {name}: set {ref_kind}={new_ref}")
|
|
||||||
changed = True
|
|
||||||
|
|
||||||
if args.prefetch:
|
|
||||||
ref = comp.get("tag") or comp.get("rev")
|
|
||||||
if not ref:
|
|
||||||
# fallback to merged view if not in override
|
|
||||||
ref = view_comp.get("tag") or view_comp.get("rev")
|
|
||||||
if ref:
|
|
||||||
sri = nix_prefetch_github_tarball(owner, repo, ref)
|
|
||||||
if sri:
|
|
||||||
comp["hash"] = sri
|
|
||||||
eprintln(f"Component {name}: updated hash={sri}")
|
|
||||||
changed = True
|
|
||||||
|
|
||||||
elif fetcher == "git":
|
|
||||||
url = view_comp.get("url")
|
|
||||||
if not url:
|
|
||||||
eprintln(f"Component {name}: missing url for git fetcher")
|
|
||||||
continue
|
|
||||||
if args.git_latest:
|
|
||||||
rev = github_head_commit(owner="", repo="", token=None) # placeholder; we will ls-remote below
|
|
||||||
try:
|
|
||||||
out = subprocess.check_output(["git", "ls-remote", url, "HEAD"], text=True).strip()
|
|
||||||
if out:
|
|
||||||
new_rev = out.split()[0]
|
|
||||||
comp["rev"] = new_rev
|
|
||||||
eprintln(f"Component {name}: set rev={new_rev}")
|
|
||||||
changed = True
|
|
||||||
if args.prefetch:
|
|
||||||
sri = nix_prefetch_git(url, new_rev)
|
|
||||||
if sri:
|
|
||||||
comp["hash"] = sri
|
|
||||||
eprintln(f"Component {name}: updated hash={sri}")
|
|
||||||
changed = True
|
|
||||||
except Exception as e:
|
|
||||||
eprintln(f"git ls-remote failed for {name}: {e}")
|
|
||||||
|
|
||||||
elif fetcher == "url":
|
|
||||||
if args.url_prefetch or args.prefetch:
|
|
||||||
rendered_comp = render_templates(view_comp, merged_vars)
|
|
||||||
url = rendered_comp.get("url") or rendered_comp.get("urlTemplate")
|
|
||||||
if not url:
|
|
||||||
eprintln(f"Component {name}: missing url/urlTemplate for url fetcher")
|
|
||||||
else:
|
|
||||||
sri = nix_prefetch_url(url)
|
|
||||||
if sri:
|
|
||||||
comp["hash"] = sri
|
|
||||||
eprintln(f"Component {name}: updated hash={sri}")
|
|
||||||
changed = True
|
|
||||||
|
|
||||||
elif fetcher == "pypi":
|
|
||||||
if args.prefetch:
|
|
||||||
eprintln(f"Component {name} (pypi): prefetch not implemented; use nix-prefetch-pypi or set hash manually.")
|
|
||||||
else:
|
|
||||||
# fetcher == "none" or other: no-op unless user --set a value
|
|
||||||
pass
|
|
||||||
|
|
||||||
return changed
|
|
||||||
|
|
||||||
|
|
||||||
# -------------------- Main --------------------
|
|
||||||
|
|
||||||
def main():
|
|
||||||
ap = argparse.ArgumentParser(description="Update unified version.json files")
|
|
||||||
ap.add_argument("--file", required=True, help="Path to version.json")
|
|
||||||
ap.add_argument("--variant", help="Variant name to update (default: base/top-level)")
|
|
||||||
ap.add_argument("--component", dest="components", action="append", help="Limit to specific component(s); can be repeated")
|
|
||||||
ap.add_argument("--github-latest-release", action="store_true", help="Update GitHub components to latest release tag")
|
|
||||||
ap.add_argument("--github-latest-tag", action="store_true", help="Update GitHub components to latest tag")
|
|
||||||
ap.add_argument("--github-latest-commit", action="store_true", help="Update GitHub components to HEAD commit")
|
|
||||||
ap.add_argument("--tag-regex", help="Regex to filter tags for --github-latest-tag")
|
|
||||||
ap.add_argument("--git-latest", action="store_true", help="Update fetchgit components to latest commit (HEAD)")
|
|
||||||
ap.add_argument("--url-prefetch", action="store_true", help="Recompute hash for url/urlTemplate components")
|
|
||||||
ap.add_argument("--prefetch", action="store_true", help="After changing refs, recompute hash as needed")
|
|
||||||
ap.add_argument("--set", dest="sets", action="append", default=[], help="Set a field: KEY=VALUE (dot path), relative to variant/base. Value is treated as string.")
|
|
||||||
ap.add_argument("--dry-run", action="store_true", help="Do not write changes")
|
|
||||||
ap.add_argument("--print", dest="do_print", action="store_true", help="Print result JSON to stdout")
|
|
||||||
args = ap.parse_args()
|
|
||||||
|
|
||||||
path = args.file
|
|
||||||
spec = load_json(path)
|
|
||||||
|
|
||||||
# Apply --set mutations (relative to base or selected variant)
|
|
||||||
target = spec if not args.variant else spec.setdefault("variants", {}).setdefault(args.variant, {})
|
|
||||||
changed = False
|
|
||||||
for pair in args.sets:
|
|
||||||
path_tokens, value = parse_set_pair(pair)
|
|
||||||
deep_set(target, path_tokens, value)
|
|
||||||
eprintln(f"Set {'.'.join((['variants', args.variant] if args.variant else []) + path_tokens)} = {value}")
|
|
||||||
changed = True
|
|
||||||
|
|
||||||
# Update refs/hashes based on fetcher type and flags with merged view
|
|
||||||
changed = update_components(spec, args.variant, args.components, args) or changed
|
|
||||||
|
|
||||||
if changed and not args.dry_run:
|
|
||||||
save_json(path, spec)
|
|
||||||
eprintln(f"Wrote changes to {path}")
|
|
||||||
else:
|
|
||||||
eprintln("No changes made.")
|
|
||||||
|
|
||||||
if args.do_print:
|
|
||||||
print(json.dumps(spec, indent=2, ensure_ascii=False))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
try:
|
|
||||||
main()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
sys.exit(130)
|
|
||||||
File diff suppressed because it is too large
Load Diff
161
update.py
161
update.py
@@ -1,161 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
from urllib.request import Request, urlopen
|
|
||||||
from urllib.error import HTTPError
|
|
||||||
|
|
||||||
GITHUB_API = "https://api.github.com"
|
|
||||||
CODEBERG_API = "https://codeberg.org/api/v1"
|
|
||||||
|
|
||||||
def run(cmd):
|
|
||||||
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
|
||||||
if p.returncode != 0:
|
|
||||||
raise RuntimeError(f"Command failed: {' '.join(cmd)}\n{p.stderr.strip()}")
|
|
||||||
return p.stdout.strip()
|
|
||||||
|
|
||||||
def http_get_json(url, token=None):
|
|
||||||
headers = {"Accept": "application/json"}
|
|
||||||
if token:
|
|
||||||
headers["Authorization"] = f"token {token}"
|
|
||||||
req = Request(url, headers=headers)
|
|
||||||
with urlopen(req) as resp:
|
|
||||||
return json.load(resp)
|
|
||||||
|
|
||||||
def github_latest_release(owner, repo, token=None):
|
|
||||||
url = f"{GITHUB_API}/repos/{owner}/{repo}/releases/latest"
|
|
||||||
return http_get_json(url, token=token)
|
|
||||||
|
|
||||||
def github_latest_commit(owner, repo, token=None):
|
|
||||||
url = f"{GITHUB_API}/repos/{owner}/{repo}/commits?per_page=1"
|
|
||||||
data = http_get_json(url, token=token)
|
|
||||||
return data[0]["sha"]
|
|
||||||
|
|
||||||
def codeberg_latest_release(owner, repo, token=None):
|
|
||||||
url = f"{CODEBERG_API}/repos/{owner}/{repo}/releases/latest"
|
|
||||||
return http_get_json(url, token=token)
|
|
||||||
|
|
||||||
def codeberg_latest_commit(owner, repo, token=None):
|
|
||||||
url = f"{CODEBERG_API}/repos/{owner}/{repo}/commits?limit=1"
|
|
||||||
data = http_get_json(url, token=token)
|
|
||||||
return data[0]["sha"]
|
|
||||||
|
|
||||||
def nix_hash_to_sri(hash_str):
|
|
||||||
# Convert nix-base32 to SRI
|
|
||||||
return run(["nix", "hash", "to-sri", "--type", "sha256", hash_str])
|
|
||||||
|
|
||||||
def prefetch_git(url, rev):
|
|
||||||
out = run(["nix-prefetch-git", "--url", url, "--rev", rev, "--fetch-submodules"])
|
|
||||||
data = json.loads(out)
|
|
||||||
return nix_hash_to_sri(data["sha256"])
|
|
||||||
|
|
||||||
def prefetch_url(url, unpack=False):
|
|
||||||
cmd = ["nix-prefetch-url", url]
|
|
||||||
if unpack:
|
|
||||||
cmd.insert(1, "--unpack")
|
|
||||||
hash_str = run(cmd)
|
|
||||||
return nix_hash_to_sri(hash_str)
|
|
||||||
|
|
||||||
def is_archive_url(url):
|
|
||||||
return bool(re.search(r"\.(tar\.gz|tar\.xz|tar\.bz2|zip)$", url))
|
|
||||||
|
|
||||||
def build_repo_url(location, owner, repo):
|
|
||||||
if location == "github":
|
|
||||||
return f"https://github.com/{owner}/{repo}.git"
|
|
||||||
if location == "codeberg":
|
|
||||||
return f"https://codeberg.org/{owner}/{repo}.git"
|
|
||||||
raise ValueError(f"Unknown repo location: {location}")
|
|
||||||
|
|
||||||
def build_release_tarball_url(location, owner, repo, tag):
|
|
||||||
if location == "github":
|
|
||||||
return f"https://github.com/{owner}/{repo}/archive/refs/tags/{tag}.tar.gz"
|
|
||||||
if location == "codeberg":
|
|
||||||
return f"https://codeberg.org/{owner}/{repo}/archive/{tag}.tar.gz"
|
|
||||||
raise ValueError(f"Unknown repo location: {location}")
|
|
||||||
|
|
||||||
def update_entry(name, entry, gh_token=None, cb_token=None):
|
|
||||||
location = entry.get("location")
|
|
||||||
owner = entry.get("owner")
|
|
||||||
repo = entry.get("repo")
|
|
||||||
url = entry.get("url")
|
|
||||||
|
|
||||||
if url and (location == "url" or location == "archive"):
|
|
||||||
# Direct URL source
|
|
||||||
unpack = is_archive_url(url)
|
|
||||||
new_hash = prefetch_url(url, unpack=unpack)
|
|
||||||
entry["hash"] = new_hash
|
|
||||||
return True
|
|
||||||
|
|
||||||
if location in ("github", "codeberg"):
|
|
||||||
if entry.get("tag"):
|
|
||||||
# Use latest release tag
|
|
||||||
if location == "github":
|
|
||||||
rel = github_latest_release(owner, repo, token=gh_token)
|
|
||||||
tag = rel["tag_name"]
|
|
||||||
else:
|
|
||||||
rel = codeberg_latest_release(owner, repo, token=cb_token)
|
|
||||||
tag = rel["tag_name"]
|
|
||||||
if tag != entry["tag"]:
|
|
||||||
entry["tag"] = tag
|
|
||||||
tar_url = build_release_tarball_url(location, owner, repo, tag)
|
|
||||||
entry["hash"] = prefetch_url(tar_url, unpack=True)
|
|
||||||
return True
|
|
||||||
|
|
||||||
if entry.get("rev"):
|
|
||||||
# Use latest commit
|
|
||||||
if location == "github":
|
|
||||||
sha = github_latest_commit(owner, repo, token=gh_token)
|
|
||||||
else:
|
|
||||||
sha = codeberg_latest_commit(owner, repo, token=cb_token)
|
|
||||||
if sha != entry["rev"]:
|
|
||||||
entry["rev"] = sha
|
|
||||||
repo_url = build_repo_url(location, owner, repo)
|
|
||||||
entry["hash"] = prefetch_git(repo_url, sha)
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def process_file(path, gh_token=None, cb_token=None):
|
|
||||||
data = json.loads(path.read_text())
|
|
||||||
changed = False
|
|
||||||
for name, entry in data.items():
|
|
||||||
try:
|
|
||||||
changed = update_entry(name, entry, gh_token=gh_token, cb_token=cb_token)
|
|
||||||
except HTTPError as e:
|
|
||||||
print(f"[WARN] {path}: {name}: HTTP error {e.code}", file=sys.stderr)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"[WARN] {path}: {name}: {e}", file=sys.stderr)
|
|
||||||
if changed:
|
|
||||||
path.write_text(json.dumps(data, indent=2, sort_keys=True) + "\n")
|
|
||||||
return changed
|
|
||||||
|
|
||||||
def main(root):
|
|
||||||
gh_token = None
|
|
||||||
cb_token = None
|
|
||||||
# Optional tokens from environment
|
|
||||||
# import os
|
|
||||||
# gh_token = os.environ.get("GITHUB_TOKEN")
|
|
||||||
# cb_token = os.environ.get("CODEBERG_TOKEN")
|
|
||||||
|
|
||||||
root = Path(root)
|
|
||||||
files = list(root.rglob("version*.json"))
|
|
||||||
if not files:
|
|
||||||
print("No version*.json files found")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
updated = 0
|
|
||||||
for f in files:
|
|
||||||
if process_file(f, gh_token=gh_token, cb_token=cb_token):
|
|
||||||
print(f"Updated: {f}")
|
|
||||||
updated += 1
|
|
||||||
|
|
||||||
print(f"Done. Updated {updated} file(s).")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
if len(sys.argv) != 2:
|
|
||||||
print(f"Usage: {sys.argv[0]} <root-folder>")
|
|
||||||
sys.exit(2)
|
|
||||||
sys.exit(main(sys.argv[1]))
|
|
||||||
Reference in New Issue
Block a user