dotfiles/nixos/modules/ollama.nix
2025-01-27 14:59:31 -06:00

64 lines
1.3 KiB
Nix
Executable file

{
flake,
pkgs,
...
}:
let
inherit (flake.config.services.instances)
ollama
web
;
service = ollama;
localhost = web.localhost.address0;
in
{
services = {
ollama = {
# acceleration = "rocm";
# package = pkgs.ollama.override {
# config = {
# rocmSupport = true;
# cudaSupport = false;
# };
# };
enable = true;
group = service.name;
host = "http://${localhost}";
# models = service.paths.path1;
user = service.name;
};
# llama-cpp = {
# enable = true;
# port = 8080;
# host = localhost;
# model = "/models/qwen2.5-coder-32b-instruct-q8_0-00004-of-00005.gguf";
# };
open-webui = {
enable = true;
host = localhost;
port = service.ports.port0;
environment = {
ENABLE_OLLAMA_API = "True";
ANONYMIZED_TELEMETRY = "False";
DO_NOT_TRACK = "True";
SCARF_NO_ANALYTICS = "True";
OLLAMA_BASE_URL = "http://${localhost}:${toString service.ports.port1}";
WEBUI_AUTH = "True";
};
};
};
systemd.tmpfiles.rules = [
"Z ${service.paths.path1} 0755 ${service.name} ${service.name} -"
];
networking = {
firewall = {
allowedTCPPorts = [
# 8080
service.ports.port0
service.ports.port1
];
};
};
}