This commit is contained in:
mjallen18
2026-03-24 14:41:22 -05:00
parent 4cc58ab381
commit 84eb2e3734
11 changed files with 799 additions and 48 deletions

View File

@@ -11,6 +11,17 @@ let
cfg = config.${namespace}.services.ai;
ntfyModelFailScript = pkgs.writeShellScript "update-qwen-model-notify-failure" ''
HOST="$(${pkgs.hostname}/bin/hostname)"
${pkgs.curl}/bin/curl -sf \
--user "$NTFY_USER:$NTFY_PASSWORD" \
-H "Title: Qwen model update FAILED on $HOST" \
-H "Priority: high" \
-H "Tags: rotating_light,robot_face" \
-d "The daily update-qwen-model job failed. Check: journalctl -u update-qwen-model.service" \
"https://ntfy.mjallen.dev/builds" || true
'';
aiConfig = lib.${namespace}.mkModule {
inherit config;
name = "ai";
@@ -127,11 +138,22 @@ let
''}";
User = "nix-apps";
Group = "jallen-nas";
EnvironmentFile = [ config.sops.templates."ntfy.env".path ];
};
unitConfig.OnFailure = "update-qwen-model-notify-failure.service";
# Run daily at 3 AM
startAt = "*-*-* 03:00:00";
};
systemd.services.update-qwen-model-notify-failure = {
description = "Notify ntfy on update-qwen-model failure";
serviceConfig = {
Type = "oneshot";
ExecStart = "${ntfyModelFailScript}";
EnvironmentFile = [ config.sops.templates."ntfy.env".path ];
};
};
# Ensure model is available before llama-cpp starts
systemd.services.llama-cpp = {
after = [ "update-qwen-model.service" ];