33 lines
1.0 KiB
Nix
33 lines
1.0 KiB
Nix
{
|
|
buildHomeAssistantComponent,
|
|
fetchFromGitHub,
|
|
home-assistant,
|
|
...
|
|
}:
|
|
buildHomeAssistantComponent rec {
|
|
owner = "acon96";
|
|
domain = "llama_conversation";
|
|
version = "v0.4.6";
|
|
|
|
src = fetchFromGitHub {
|
|
owner = owner;
|
|
repo = "home-llm";
|
|
rev = version;
|
|
hash = "sha256-QmpyqNRhmnqFNiKPHm8GKuvZhbuYWDLck3eFC9MlIKQ=";
|
|
};
|
|
|
|
nativeBuildInputs = with home-assistant.python.pkgs; [
|
|
anthropic
|
|
huggingface-hub
|
|
ollama
|
|
webcolors
|
|
];
|
|
|
|
meta = {
|
|
changelog = "https://github.com/acon96/home-llm/releases/tag/${version}";
|
|
description = "This project provides the required \"glue\" components to control your Home Assistant installation with a completely local Large Language Model acting as a personal assistant. The goal is to provide a drop in solution to be used as a \"conversation agent\" component by Home Assistant. The 2 main pieces of this solution are the Home LLM model and Local LLM Conversation integration.";
|
|
homepage = "https://github.com/acon96/home-llm";
|
|
maintainers = [ ];
|
|
};
|
|
}
|