{ lib, namespace, pkgs, buildHomeAssistantComponent, home-assistant, ... }: let inherit (lib.trivial) importJSON; inherit (lib.${namespace}) selectVariant mkAllSources; versionSpec = importJSON ./version.json; selected = selectVariant versionSpec null null; sources = mkAllSources pkgs selected; src-meta = selected.sources.llama_conversation; version = if src-meta ? tag then src-meta.tag else src-meta.rev; in buildHomeAssistantComponent { owner = src-meta.owner; domain = "llama_conversation"; inherit version; src = sources.llama_conversation; nativeBuildInputs = with home-assistant.python.pkgs; [ anthropic huggingface-hub ollama webcolors ]; meta = { changelog = "https://github.com/acon96/home-llm/releases/tag/${version}"; description = "This project provides the required \"glue\" components to control your Home Assistant installation with a completely local Large Language Model acting as a personal assistant. The goal is to provide a drop in solution to be used as a \"conversation agent\" component by Home Assistant. The 2 main pieces of this solution are the Home LLM model and Local LLM Conversation integration."; homepage = "https://github.com/acon96/home-llm"; maintainers = [ ]; }; }