49 lines
1.3 KiB
Nix
49 lines
1.3 KiB
Nix
{ pkgs, configs, ... }:
|
|
{
|
|
services.soft-serve.enable = true;
|
|
services.soft-serve.settings = {
|
|
name = "geokkjer's repos";
|
|
log_format = "text";
|
|
ssh = {
|
|
listen_addr = "0.0.0.0:23231";
|
|
public_url = "ssh://git.geokkjer.eu:23231";
|
|
max_timeout = 30;
|
|
idle_timeout = 120;
|
|
};
|
|
stats.listen_addr = ":23233";
|
|
};
|
|
}
|
|
#+end80808080 * Ollama
|
|
|
|
#+begin_src nix :tangle ollama.nix
|
|
{ config, pkgs, ... }:
|
|
{
|
|
environment.systemPackages = with pkgs; [
|
|
ollama
|
|
];
|
|
systemd.services = {
|
|
ollama = {
|
|
description = "Server for local large language models";
|
|
after = ["network-online.target"];
|
|
wantedBy = ["multi-user.target"];
|
|
environment = {
|
|
HOME = "%S/ollama";
|
|
OLLAMA_HOST = "0.0.0.0";
|
|
OLLAMA_MODELS = "%S/ollama/models";
|
|
};
|
|
serviceConfig = {
|
|
Type = "simple";
|
|
User = "ollama";
|
|
Group = "ollama";
|
|
Restart = "always";
|
|
RestartSec = "3";
|
|
WorkingDirectory = "/var/lib/ollama";
|
|
StateDirectory = [ "ollama" ];
|
|
DynamicUser = true;
|
|
ExecStart = "${pkgs.ollama}/bin/ollama serve";
|
|
};
|
|
};
|
|
};
|
|
networking.firewall.allowedTCPPorts = [ 11434 ];
|
|
}
|