{ config, lib, nixpkgs-us, ... }: { options = { sysconfig.virtualization.ollama.enable = lib.options.mkOption { type = lib.types.bool; default = false; }; }; config = lib.mkIf config.sysconfig.virtualization.ollama.enable { containers.ollama = { autoStart = true; privateNetwork = true; hostAddress = "192.168.100.10"; localAddress = "192.168.100.24"; bindMounts = { "/dev/nvidia0" = { hostPath = "/dev/nvidia0"; isReadOnly = false; }; "/dev/nvidiactl" = { hostPath = "/dev/nvidiactl"; isReadOnly = false; }; "/dev/nvidia-uvm" = { hostPath = "/dev/nvidia-uvm"; isReadOnly = false; }; "/dev/nvidia-modeset" = { hostPath = "/dev/nvidia-modeset"; isReadOnly = false; }; "/dev/nvidia-uvm-tools" = { hostPath = "/dev/nvidia-uvm-tools"; isReadOnly = false; }; }; allowedDevices = [ { node = "/dev/nvidia0"; modifier = "rw"; } { node = "/dev/nvidiactl"; modifier = "rw"; } { node = "/dev/nvidia-uvm"; modifier = "rw"; } { node = "/dev/nvidia-modeset"; modifier = "rw"; } { node = "/dev/nvidia-uvm-tools"; modifier = "rw"; } ]; config = { services.ollama = { enable = true; acceleration = "cuda"; package = let pkgs-us = import nixpkgs-us { system = "x86_64-linux"; config.allowUnfree = true; }; in pkgs-us.ollama-cuda; environmentVariables = { OLLAMA_CONTEXT_LENGTH = "24000"; }; host = "0.0.0.0"; loadModels = [ "llama3.1:8b" ]; openFirewall = true; user = "ollama"; }; users.users.ollama.extraGroups = [ "video" "render" ]; systemd.services.ollama.serviceConfig = { PrivateDevices = lib.mkForce false; DevicePolicy = lib.mkForce "auto"; }; system.stateVersion = "25.05"; }; }; }; }