mauville: enable ollama

This commit is contained in:
Aly Raffauf 2024-04-23 21:24:05 -04:00
parent 07c6ad567c
commit 2ad8715d2e
3 changed files with 39 additions and 1 deletions

View file

@ -48,6 +48,11 @@ in {
}; };
services = { services = {
binaryCache.enable = true; binaryCache.enable = true;
ollama = {
enable = true;
gpu = "amd";
listenAddress = "0.0.0.0:11434";
};
}; };
system = { system = {
plymouth.enable = true; plymouth.enable = true;

View file

@ -4,5 +4,5 @@
config, config,
... ...
}: { }: {
imports = [./binaryCache ./flatpak]; imports = [./binaryCache ./flatpak ./ollama];
} }

View file

@ -0,0 +1,33 @@
{
pkgs,
lib,
config,
...
}: {
options = {
alyraffauf.services.ollama.enable = lib.mkEnableOption "Enable ollama interface for LLMs.";
alyraffauf.services.ollama.listenAddress = lib.mkOption {
description = "Listen Address for Ollama.";
default = "127.0.0.1:11434";
type = lib.types.str;
};
alyraffauf.services.ollama.gpu = lib.mkOption {
description = "Type of GPU for enabling GPU acceleration.";
default = null;
type = lib.types.str;
};
};
config = lib.mkIf config.alyraffauf.services.ollama.enable {
services.ollama = {
enable = true;
acceleration =
if config.alyraffauf.services.ollama.gpu == "amd"
then "rocm"
else if config.alyraffauf.services.ollama.gpu == "nvidia"
then "cuda"
else null;
listenAddress = config.alyraffauf.services.ollama.listenAddress;
};
};
}