{ config, lib, pkgs, namespace, ... }: with lib; let name = "ai"; cfg = config.${namespace}.services.${name}; aiConfig = lib.${namespace}.mkModule { inherit config name; serviceName = "open-webui"; # todo multiple? description = "AI Services"; options = { }; moduleConfig = { services = { ollama = { enable = true; package = pkgs.ollama-rocm; port = 11434; host = "0.0.0.0"; user = "nix-apps"; group = "jallen-nas"; openFirewall = cfg.openFirewall; rocmOverrideGfx = "11.0.2"; loadModels = [ "mistral:instruct" ]; home = "${cfg.configDir}/ollama"; }; llama-cpp = { enable = false; port = 8127; host = "0.0.0.0"; openFirewall = cfg.openFirewall; # model = "${cfg.configDir}/llama-cpp/models/functionary-small-v3.2-GGUF/functionary-small-v3.2.Q4_0.gguf"; package = pkgs.llama-cpp-rocm; extraFlags = [ "--n_gpu-layers" "500" "-c" "0" "--numa" "numactl" "--jinja" ]; }; open-webui = { enable = true; package = pkgs.unstable.open-webui; host = "0.0.0.0"; port = 8888; openFirewall = cfg.openFirewall; # stateDir = "/media/nas/main/nix-app-data/open-webui"; environmentFile = config.sops.secrets."jallen-nas/open-webui".path; environment = { OPENID_PROVIDER_URL = "https://authentik.mjallen.dev/application/o/chat/.well-known/openid-configuration"; OAUTH_PROVIDER_NAME = "authentik"; OPENID_REDIRECT_URI = "https://chat.mjallen.dev/oauth/oidc/callback"; ENABLE_OAUTH_SIGNUP = "False"; OAUTH_MERGE_ACCOUNTS_BY_EMAIL = "True"; ENABLE_SIGNUP = "False"; ENABLE_LOGIN_FORM = "False"; ANONYMIZED_TELEMETRY = "False"; DO_NOT_TRACK = "True"; SCARF_NO_ANALYTICS = "True"; OLLAMA_API_BASE_URL = "http://127.0.0.1:11434"; LOCAL_FILES_ONLY = "False"; WEBUI_AUTH = "False"; }; }; }; }; }; in { imports = [ aiConfig ]; }