{ config, lib, inputs, ... }: { options = { sysconfig.opts.virtualization.ollama.enable = lib.options.mkOption { type = lib.types.bool; default = false; }; }; config = lib.mkIf config.sysconfig.opts.virtualization.ollama.enable { containers.ollama = { autoStart = true; privateNetwork = true; hostAddress = "192.168.100.10"; localAddress = "192.168.100.24"; bindMounts = { "/dev/nvidia0" = { hostPath = "/dev/nvidia0"; isReadOnly = false; }; "/dev/nvidiactl" = { hostPath = "/dev/nvidiactl"; isReadOnly = false; }; "/dev/nvidia-uvm" = { hostPath = "/dev/nvidia-uvm"; isReadOnly = false; }; "/dev/nvidia-modeset" = { hostPath = "/dev/nvidia-modeset"; isReadOnly = false; }; "/dev/nvidia-uvm-tools" = { hostPath = "/dev/nvidia-uvm-tools"; isReadOnly = false; }; }; allowedDevices = [ { node = "/dev/nvidia0"; modifier = "rw"; } { node = "/dev/nvidiactl"; modifier = "rw"; } { node = "/dev/nvidia-uvm"; modifier = "rw"; } { node = "/dev/nvidia-modeset"; modifier = "rw"; } { node = "/dev/nvidia-uvm-tools"; modifier = "rw"; } ]; config = { services.ollama = { enable = true; acceleration = "cuda"; package = let pkgs-us = import inputs.nixpkgs-us { system = "x86_64-linux"; config.allowUnfree = true; }; in pkgs-us.ollama; host = "0.0.0.0"; port = 8080; openFirewall = true; }; system.stateVersion = "25.05"; }; }; }; }