1
0
Fork 0
mirror of https://github.com/LnL7/nix-darwin.git synced 2025-04-09 10:42:53 +00:00

Add some of the options from NixOS ollama module

Particularly: host, port, home, models and environmentVariables, to make
ollama module compatible with one from nixpkgs.

Except: sandbox, writablePaths, acceleration, openFirewall.
This commit is contained in:
Velnbur 2024-06-16 11:21:20 +03:00
parent 63df5bc7e1
commit abab9b7b6a
No known key found for this signature in database

View file

@ -21,21 +21,76 @@ in {
description = "This option specifies the ollama package to use.";
};
exec = mkOption {
host = mkOption {
type = types.str;
default = "ollama";
description = "Ollama command/binary to execute.";
default = "127.0.0.1";
example = "0.0.0.0";
description = ''
The host address which the ollama server HTTP interface listens to.
'';
};
port = mkOption {
type = types.port;
default = 11434;
example = 11111;
description = ''
Which port the ollama server listens to.
'';
};
home = lib.mkOption {
type = types.str;
default = "%S/ollama";
example = "/home/foo";
description = ''
The home directory that the ollama service is started in.
See also `services.ollama.writablePaths` and `services.ollama.sandbox`.
'';
};
models = mkOption {
type = types.str;
default = "%S/ollama/models";
example = "/path/to/ollama/models";
description = ''
The directory that the ollama service will read models from and download new models to.
'';
};
environmentVariables = mkOption {
type = types.attrsOf types.str;
default = { };
example = {
OLLAMA_LLM_LIBRARY = "cpu";
HIP_VISIBLE_DEVICES = "0,1";
};
description = ''
Set arbitrary environment variables for the ollama service.
Be aware that these are only seen by the ollama server (launchd daemon),
not normal invocations like `ollama run`.
Since `ollama run` is mostly a shell around the ollama server, this is usually sufficient.
'';
};
};
};
config = mkIf cfg.enable {
environment.systemPackages = [ cfg.package ];
launchd.daemons.ollama = {
path = [ config.environment.systemPath ];
serviceConfig.ProgramArguments =
[ "${cfg.package}/bin/${cfg.exec}" "serve" ];
environment = cfg.environmentVariables // {
HOME = cfg.home;
OLLAMA_MODELS = cfg.models;
OLLAMA_HOST = "${cfg.host}:${toString cfg.port}";
};
serviceConfig.ProgramArguments = [ "${cfg.package}/bin/ollama" "serve" ];
serviceConfig.RunAtLoad = true;
};
};