From 2e62c6f3bfdbe4fedf191fd69d25cf7cd3775b66 Mon Sep 17 00:00:00 2001 From: Geir Okkenhaug Jerstad Date: Sat, 14 Jun 2025 08:24:41 +0200 Subject: [PATCH] Update Ollama configuration and add Open WebUI support - Fix ollama module by removing invalid meta section - Update grey-area ollama service configuration: - Change host binding to 0.0.0.0 for external access - Remove invalid rsyslog configuration - Enable firewall access - Add Open WebUI module with proper configuration: - Integrate with Ollama API at localhost:11434 - Disable authentication for development - Open firewall on port 8080 - Successful test build of grey-area configuration --- machines/grey-area/configuration.nix | 1 + machines/grey-area/services/ollama.nix | 14 +++---------- modules/services/ollama.nix | 6 ------ modules/services/open-webui.nix | 29 ++++++++++++++++++++++++++ 4 files changed, 33 insertions(+), 17 deletions(-) create mode 100644 modules/services/open-webui.nix diff --git a/machines/grey-area/configuration.nix b/machines/grey-area/configuration.nix index c2737e3..1cb3347 100644 --- a/machines/grey-area/configuration.nix +++ b/machines/grey-area/configuration.nix @@ -25,6 +25,7 @@ ./services/audiobook.nix ./services/forgejo.nix ./services/ollama.nix + ../../modules/services/open-webui.nix ]; # Swap zram diff --git a/machines/grey-area/services/ollama.nix b/machines/grey-area/services/ollama.nix index 6628682..581697c 100644 --- a/machines/grey-area/services/ollama.nix +++ b/machines/grey-area/services/ollama.nix @@ -19,7 +19,7 @@ enable = true; # Network configuration - localhost only for security by default - host = "127.0.0.1"; + host = "0.0.0.0"; port = 11434; # Environment variables for optimal performance @@ -74,11 +74,10 @@ schedule = "weekly"; # Weekly backup is sufficient for models }; - # Don't open firewall by default - use reverse proxy if external access needed - openFirewall = false; + openFirewall = true; # Set to true if you want to allow external access # GPU acceleration (enable if grey-area has a compatible GPU) - enableGpuAcceleration = false; # Set to true if NVIDIA/AMD GPU available + #enableGpuAcceleration = false; # Set to true if NVIDIA/AMD GPU available }; # Create backup directory with proper permissions @@ -139,13 +138,6 @@ mode = "0755"; }; - # Add logging configuration to help with debugging - services.rsyslog.extraConfig = '' - # Ollama service logs - if $programname == 'ollama' then /var/log/ollama.log - & stop - ''; - # Firewall rule comments for documentation # To enable external access later, you would: # 1. Set services.homelab-ollama.openFirewall = true; diff --git a/modules/services/ollama.nix b/modules/services/ollama.nix index c4e8a7d..d67d5ba 100644 --- a/modules/services/ollama.nix +++ b/modules/services/ollama.nix @@ -430,10 +430,4 @@ in { "d ${cfg.dataDir}/runners 0755 ${cfg.user} ${cfg.group} -" ]; }; - - meta = { - maintainers = ["Geir Okkenhaug Jerstad"]; - description = "NixOS module for Ollama local LLM service"; - doc = ./ollama.md; - }; } diff --git a/modules/services/open-webui.nix b/modules/services/open-webui.nix new file mode 100644 index 0000000..d35aa95 --- /dev/null +++ b/modules/services/open-webui.nix @@ -0,0 +1,29 @@ +# Open WebUI Service Configuration +# +# This module provides Open WebUI configuration for interacting with Ollama +# Open WebUI provides a user-friendly web interface for local LLMs +{ + config, + lib, + pkgs, + ... +}: { + # Enable the built-in NixOS open-webui service + services.open-webui = { + enable = true; + port = 8080; + host = "0.0.0.0"; + + environment = { + ANONYMIZED_TELEMETRY = "False"; + DO_NOT_TRACK = "True"; + SCARF_NO_ANALYTICS = "True"; + OLLAMA_API_BASE_URL = "http://127.0.0.1:11434"; + # Disable authentication for easier development access + WEBUI_AUTH = "False"; + }; + }; + + # Open firewall for web interface + networking.firewall.allowedTCPPorts = [ 8080 ]; +}