89 lines
2.0 KiB
Nix
Executable File
89 lines
2.0 KiB
Nix
Executable File
{
|
|
config,
|
|
lib,
|
|
pkgs,
|
|
namespace,
|
|
...
|
|
}:
|
|
let
|
|
cfg = config.${namespace}.programs.opencode;
|
|
net = lib.${namespace}.network;
|
|
in
|
|
{
|
|
options.${namespace}.programs.opencode = {
|
|
enable = lib.mkEnableOption "opencode";
|
|
};
|
|
|
|
config = lib.mkIf cfg.enable {
|
|
sops.secrets."hass-mcp/token" = { };
|
|
|
|
sops.templates."hass-mcp.env" = {
|
|
mode = "0600";
|
|
content = ''
|
|
HA_URL=http://${net.hosts.nuc.lan}:${toString net.ports.nuc.homeAssistant}
|
|
HA_TOKEN=${config.sops.placeholder."hass-mcp/token"}
|
|
'';
|
|
};
|
|
|
|
programs.opencode = {
|
|
enable = true;
|
|
enableMcpIntegration = true;
|
|
settings = {
|
|
provider = {
|
|
nas = {
|
|
npm = "@ai-sdk/openai-compatible";
|
|
name = "llama-server (local)";
|
|
options = {
|
|
baseURL = "http://${net.hosts.nas.lan}:${toString net.ports.nas.llamaCpp}/v1";
|
|
};
|
|
models = {
|
|
"gemma-4-26B-A4B-it-UD-Q8_K_XL" = {
|
|
name = "Gemma 4 26B-A4B (local)";
|
|
modalities = {
|
|
input = [
|
|
"image"
|
|
"text"
|
|
];
|
|
output = [ "text" ];
|
|
};
|
|
limit = {
|
|
context = 32768;
|
|
output = 8192;
|
|
};
|
|
};
|
|
};
|
|
};
|
|
};
|
|
};
|
|
};
|
|
|
|
programs.mcp = {
|
|
enable = true;
|
|
servers = {
|
|
nixos = {
|
|
command = "nix";
|
|
args = [
|
|
"run"
|
|
"github:utensils/mcp-nixos"
|
|
"--"
|
|
];
|
|
};
|
|
hass-mcp = {
|
|
command = "bash";
|
|
args = [
|
|
"-c"
|
|
"set -a; source ${config.sops.templates."hass-mcp.env".path}; set +a; exec uvx hass-mcp"
|
|
];
|
|
};
|
|
mcp-server-code-runner = {
|
|
command = "${pkgs.nodejs_24}/bin/npm";
|
|
args = [
|
|
"-y"
|
|
"@iflow-mcp/mcp-server-code-runner"
|
|
];
|
|
};
|
|
};
|
|
};
|
|
};
|
|
}
|