ollama container?

This commit is contained in:
2025-07-29 17:56:36 -05:00
parent 2c477b5a4d
commit 4f1b74b41b
4 changed files with 96 additions and 3 deletions

View File

@@ -0,0 +1,88 @@
{ config, lib, inputs, ... }: {
options = {
sysconfig.opts.virtualization.ollama.enable = lib.options.mkOption {
type = lib.types.bool;
default = false;
};
};
config = lib.mkIf config.sysconfig.opts.virtualization.ollama.enable {
containers.ollama = {
autoStart = true;
privateNetwork = true;
hostAddress = "192.168.100.10";
localAddress = "192.168.100.24";
bindMounts = {
"/dev/nvidia0" = {
hostPath = "/dev/nvidia0";
isReadOnly = false;
};
"/dev/nvidiactl" = {
hostPath = "/dev/nvidiactl";
isReadOnly = false;
};
"/dev/nvidia-uvm" = {
hostPath = "/dev/nvidia-uvm";
isReadOnly = false;
};
"/dev/nvidia-modeset" = {
hostPath = "/dev/nvidia-modeset";
isReadOnly = false;
};
"/dev/nvidia-uvm-tools" = {
hostPath = "/dev/nvidia-uvm-tools";
isReadOnly = false;
};
};
allowedDevices = {
nvidia0 = {
node = "/dev/nvidia0";
modifier = "rw";
};
nvidiactl = {
node = "/dev/nvidiactl";
modifier = "rw";
};
nvidia-uvm = {
node = "/dev/nvidia-uvm";
modifier = "rw";
};
nvidia-modeset = {
node = "/dev/nvidia-modeset";
modifier = "rw";
};
nvidia-uvm-tools = {
node = "/dev/nvidia-uvm-tools";
modifier = "rw";
};
};
config = {
services.ollama = {
enable = true;
acceleration = "cuda";
package = let
pkgs-us = import inputs.nixpkgs-us {
system = "x86_64-linux";
config.allowUnfree = true;
};
in pkgs-us.ollama;
host = "0.0.0.0";
port = 80;
openFirewall = true;
};
system.stateVersion = "25.05";
};
};
};
}