{ config, lib, pkgs, ... }: { options.sysconfig.docker.ollama.enable = with lib; mkOption { type = with types; bool; default = false; }; config = lib.mkIf (config.sysconfig.docker.ollama.enable && config.sysconfig.docker.enable) { environment.systemPackages = with pkgs; [ ollama ]; networking.firewall.interfaces = { "ve-traefik" = { allowedTCPPorts = [ 11434 ]; }; "ve-openwebui" = { allowedTCPPorts = [ 11434 ]; }; }; virtualisation.oci-containers.containers.ollama = { image = "ollama/ollama:latest"; # unstable, waiting for 26.05 #pull = "newer"; hostname = "ollama.esotericbytes.com"; networks = [ "docker-main" ]; ports = [ "11434:11434" ]; volumes = [ "vol_ollama:/root/.ollama" ]; labels = { "traefik.http.routers.ollama.entrypoints" = "localsecure"; "traefik.http.routers.ollama.rule" = "Host(`ollama.esotericbytes.com`)"; "traefik.http.routers.ollama.service" = "ollama"; "traefik.http.routers.ollama.tls.certResolver" = "cloudflare"; "traefik.http.services.ollama.loadbalancer.server.url" = "http://192.168.100.10:11434"; }; extraOptions = lib.mkIf config.sysconfig.docker.nvidia [ "--device=nvidia.com/gpu=all" "--ip=192.168.101.4" ]; environment = { OLLAMA_CONTEXT_LENGTH = lib.mkDefault "32000"; }; }; }; }