Update Ollama configuration and add Open WebUI support

- Fix ollama module by removing invalid meta section
- Update grey-area ollama service configuration:
  - Change host binding to 0.0.0.0 for external access
  - Remove invalid rsyslog configuration
  - Enable firewall access
- Add Open WebUI module with proper configuration:
  - Integrate with Ollama API at localhost:11434
  - Disable authentication for development
  - Open firewall on port 8080
- Successful test build of grey-area configuration
This commit is contained in:
Geir Okkenhaug Jerstad 2025-06-14 08:24:41 +02:00
parent cf11d447f4
commit 2e62c6f3bf
4 changed files with 33 additions and 17 deletions

View file

@ -25,6 +25,7 @@
./services/audiobook.nix ./services/audiobook.nix
./services/forgejo.nix ./services/forgejo.nix
./services/ollama.nix ./services/ollama.nix
../../modules/services/open-webui.nix
]; ];
# Swap zram # Swap zram

View file

@ -19,7 +19,7 @@
enable = true; enable = true;
# Network configuration - localhost only for security by default # Network configuration - localhost only for security by default
host = "127.0.0.1"; host = "0.0.0.0";
port = 11434; port = 11434;
# Environment variables for optimal performance # Environment variables for optimal performance
@ -74,11 +74,10 @@
schedule = "weekly"; # Weekly backup is sufficient for models schedule = "weekly"; # Weekly backup is sufficient for models
}; };
# Don't open firewall by default - use reverse proxy if external access needed openFirewall = true; # Set to true if you want to allow external access
openFirewall = false;
# GPU acceleration (enable if grey-area has a compatible GPU) # GPU acceleration (enable if grey-area has a compatible GPU)
enableGpuAcceleration = false; # Set to true if NVIDIA/AMD GPU available #enableGpuAcceleration = false; # Set to true if NVIDIA/AMD GPU available
}; };
# Create backup directory with proper permissions # Create backup directory with proper permissions
@ -139,13 +138,6 @@
mode = "0755"; mode = "0755";
}; };
# Add logging configuration to help with debugging
services.rsyslog.extraConfig = ''
# Ollama service logs
if $programname == 'ollama' then /var/log/ollama.log
& stop
'';
# Firewall rule comments for documentation # Firewall rule comments for documentation
# To enable external access later, you would: # To enable external access later, you would:
# 1. Set services.homelab-ollama.openFirewall = true; # 1. Set services.homelab-ollama.openFirewall = true;

View file

@ -430,10 +430,4 @@ in {
"d ${cfg.dataDir}/runners 0755 ${cfg.user} ${cfg.group} -" "d ${cfg.dataDir}/runners 0755 ${cfg.user} ${cfg.group} -"
]; ];
}; };
meta = {
maintainers = ["Geir Okkenhaug Jerstad"];
description = "NixOS module for Ollama local LLM service";
doc = ./ollama.md;
};
} }

View file

@ -0,0 +1,29 @@
# Open WebUI Service Configuration
#
# This module provides Open WebUI configuration for interacting with Ollama
# Open WebUI provides a user-friendly web interface for local LLMs
{
config,
lib,
pkgs,
...
}: {
# Enable the built-in NixOS open-webui service
services.open-webui = {
enable = true;
port = 8080;
host = "0.0.0.0";
environment = {
ANONYMIZED_TELEMETRY = "False";
DO_NOT_TRACK = "True";
SCARF_NO_ANALYTICS = "True";
OLLAMA_API_BASE_URL = "http://127.0.0.1:11434";
# Disable authentication for easier development access
WEBUI_AUTH = "False";
};
};
# Open firewall for web interface
networking.firewall.allowedTCPPorts = [ 8080 ];
}