mirror of
https://github.com/alyraffauf/nixcfg.git
synced 2024-11-22 02:13:55 -05:00
mauville: enable ollama
This commit is contained in:
parent
07c6ad567c
commit
2ad8715d2e
|
@ -48,6 +48,11 @@ in {
|
|||
};
|
||||
services = {
|
||||
binaryCache.enable = true;
|
||||
ollama = {
|
||||
enable = true;
|
||||
gpu = "amd";
|
||||
listenAddress = "0.0.0.0:11434";
|
||||
};
|
||||
};
|
||||
system = {
|
||||
plymouth.enable = true;
|
||||
|
|
|
@ -4,5 +4,5 @@
|
|||
config,
|
||||
...
|
||||
}: {
|
||||
imports = [./binaryCache ./flatpak];
|
||||
imports = [./binaryCache ./flatpak ./ollama];
|
||||
}
|
||||
|
|
33
nixosModules/services/ollama/default.nix
Normal file
33
nixosModules/services/ollama/default.nix
Normal file
|
@ -0,0 +1,33 @@
|
|||
{
|
||||
pkgs,
|
||||
lib,
|
||||
config,
|
||||
...
|
||||
}: {
|
||||
options = {
|
||||
alyraffauf.services.ollama.enable = lib.mkEnableOption "Enable ollama interface for LLMs.";
|
||||
alyraffauf.services.ollama.listenAddress = lib.mkOption {
|
||||
description = "Listen Address for Ollama.";
|
||||
default = "127.0.0.1:11434";
|
||||
type = lib.types.str;
|
||||
};
|
||||
alyraffauf.services.ollama.gpu = lib.mkOption {
|
||||
description = "Type of GPU for enabling GPU acceleration.";
|
||||
default = null;
|
||||
type = lib.types.str;
|
||||
};
|
||||
};
|
||||
|
||||
config = lib.mkIf config.alyraffauf.services.ollama.enable {
|
||||
services.ollama = {
|
||||
enable = true;
|
||||
acceleration =
|
||||
if config.alyraffauf.services.ollama.gpu == "amd"
|
||||
then "rocm"
|
||||
else if config.alyraffauf.services.ollama.gpu == "nvidia"
|
||||
then "cuda"
|
||||
else null;
|
||||
listenAddress = config.alyraffauf.services.ollama.listenAddress;
|
||||
};
|
||||
};
|
||||
}
|
Loading…
Reference in a new issue