Trying out ollama

This commit is contained in:
Natsu Kagami 2025-04-08 22:32:55 +02:00
parent 97318f33f5
commit c454eee024
Signed by: nki
GPG key ID: 55A032EB38B49ADB

View file

@ -36,6 +36,9 @@ in
openrazer
];
config = mkMerge [
{
# Kernel
boot.kernelPackages = pkgs.linuxKernel.packages.linux_xanmod_stable;
@ -236,4 +239,35 @@ in
virtualisation.libvirtd.enable = true;
virtualisation.spiceUSBRedirection.enable = true;
}
{
# LLM poop
services.ollama = {
enable = true;
loadModels = [
"deepseek-r1:14b"
"gemma3:12b"
];
acceleration = "rocm";
rocmOverrideGfx = "10.3.1";
};
systemd.services.ollama = {
serviceConfig.LimitMEMLOCK = "${toString (16 * 1024 * 1024 * 1024)}";
};
services.open-webui = {
enable = true;
port = 5689;
openFirewall = true;
host = "0.0.0.0";
environment = {
ANONYMIZED_TELEMETRY = "False";
DO_NOT_TRACK = "True";
SCARF_NO_ANALYTICS = "True";
ENV = "prod";
ENABLE_SIGNUP = "false";
};
};
common.linux.tailscale.firewall.allowPorts = [ config.services.open-webui.port ];
}
];
}