about summary refs log tree commit diff
path: root/nixpkgs/nixos/modules/services/misc/ollama.nix
diff options
context:
space:
mode:
Diffstat (limited to 'nixpkgs/nixos/modules/services/misc/ollama.nix')
-rw-r--r--nixpkgs/nixos/modules/services/misc/ollama.nix60
1 files changed, 36 insertions, 24 deletions
diff --git a/nixpkgs/nixos/modules/services/misc/ollama.nix b/nixpkgs/nixos/modules/services/misc/ollama.nix
index 3ac3beb4de07..7a5661510e25 100644
--- a/nixpkgs/nixos/modules/services/misc/ollama.nix
+++ b/nixpkgs/nixos/modules/services/misc/ollama.nix
@@ -13,48 +13,60 @@ in
 {
   options = {
     services.ollama = {
-      enable = lib.mkEnableOption (
-        lib.mdDoc "Server for local large language models"
-      );
+      enable = lib.mkEnableOption "ollama server for local large language models";
+      package = lib.mkPackageOption pkgs "ollama" { };
       listenAddress = lib.mkOption {
         type = types.str;
         default = "127.0.0.1:11434";
-        description = lib.mdDoc ''
-          Specifies the bind address on which the ollama server HTTP interface listens.
+        example = "0.0.0.0:11111";
+        description = ''
+          The address which the ollama server HTTP interface binds and listens to.
         '';
       };
       acceleration = lib.mkOption {
         type = types.nullOr (types.enum [ "rocm" "cuda" ]);
         default = null;
         example = "rocm";
-        description = lib.mdDoc ''
-          Specifies the interface to use for hardware acceleration.
+        description = ''
+          What interface to use for hardware acceleration.
 
           - `rocm`: supported by modern AMD GPUs
           - `cuda`: supported by modern NVIDIA GPUs
         '';
       };
-      package = lib.mkPackageOption pkgs "ollama" { };
+      environmentVariables = lib.mkOption {
+        type = types.attrsOf types.str;
+        default = { };
+        example = {
+          HOME = "/tmp";
+          OLLAMA_LLM_LIBRARY = "cpu";
+        };
+        description = ''
+          Set arbitrary environment variables for the ollama service.
+
+          Be aware that these are only seen by the ollama server (systemd service),
+          not normal invocations like `ollama run`.
+          Since `ollama run` is mostly a shell around the ollama server, this is usually sufficient.
+        '';
+      };
     };
   };
 
   config = lib.mkIf cfg.enable {
-    systemd = {
-      services.ollama = {
-        wantedBy = [ "multi-user.target" ];
-        description = "Server for local large language models";
-        after = [ "network.target" ];
-        environment = {
-          HOME = "%S/ollama";
-          OLLAMA_MODELS = "%S/ollama/models";
-          OLLAMA_HOST = cfg.listenAddress;
-        };
-        serviceConfig = {
-          ExecStart = "${lib.getExe ollamaPackage} serve";
-          WorkingDirectory = "/var/lib/ollama";
-          StateDirectory = [ "ollama" ];
-          DynamicUser = true;
-        };
+    systemd.services.ollama = {
+      description = "Server for local large language models";
+      wantedBy = [ "multi-user.target" ];
+      after = [ "network.target" ];
+      environment = cfg.environmentVariables // {
+        HOME = "%S/ollama";
+        OLLAMA_MODELS = "%S/ollama/models";
+        OLLAMA_HOST = cfg.listenAddress;
+      };
+      serviceConfig = {
+        ExecStart = "${lib.getExe ollamaPackage} serve";
+        WorkingDirectory = "%S/ollama";
+        StateDirectory = [ "ollama" ];
+        DynamicUser = true;
       };
     };