Files
nix-config/packages/homeassistant/ha-local-llm/default.nix
mjallen18 3b780d4d78 bruh
2026-01-05 23:31:06 -06:00

37 lines
1.2 KiB
Nix

{
buildHomeAssistantComponent,
python3Packages,
fetchFromGitHub,
...
}:
buildHomeAssistantComponent rec {
owner = "acon96";
domain = "llama_conversation";
version = "v0.4.5";
src = fetchFromGitHub {
owner = owner;
repo = "home-llm";
rev = version;
hash = "sha256-be4cW3f30G32MGl49YGja9/SlMHdnQ9IguuY14wgKGI=";
};
nativeBuildInputs = with python3Packages; [
huggingface-hub
ollama
webcolors
];
# postPatch = ''
# substituteInPlace custom_components/llama_conversation/manifest.json \
# --replace-fail '"ollama>=0.5.1"' '"ollama"'
# '';
meta = {
changelog = "https://github.com/acon96/home-llm/releases/tag/${version}";
description = "This project provides the required \"glue\" components to control your Home Assistant installation with a completely local Large Language Model acting as a personal assistant. The goal is to provide a drop in solution to be used as a \"conversation agent\" component by Home Assistant. The 2 main pieces of this solution are the Home LLM model and Local LLM Conversation integration.";
homepage = "https://github.com/acon96/home-llm";
maintainers = [ ];
};
}