disable llama things

This commit is contained in:
ant 2026-01-24 15:15:44 +01:00
parent adb56935a1
commit cd668264a5

View file

@ -87,13 +87,34 @@ in
virtualisation.libvirtd.enable = true;
programs.virt-manager.enable = true;
services.ollama = {
enable = true;
acceleration = "cuda";
};
services.open-webui = {
enable = true;
};
# services.llama-cpp = {
# package = pkgs-unstable.llama-cpp-vulkan;
# enable = true;
# port = 8182;
# model = "/home/ant/models/Qwen3-4B-Instruct-2507-UD-Q8_K_XL.gguf";
# extraFlags = [
# "-fa" "on"
# "--jinja"
# "--cache-reuse" "256"
# "--reasoning-format" "auto"
# "--ctx-size" "16384"
# "--n-gpu-layers" "999"
# "--cache-type-k" "q4_0"
# "--cache-type-v" "q4_0"
# "--n-cpu-moe" "25"
# ];
# };
# systemd.services.llama-cpp = {
# environment.XDG_CACHE_HOME = "/var/cache/llama.cpp";
# serviceConfig.CacheDirectory = "llama.cpp";
# };
# services.ollama = {
# enable = true;
# acceleration = "cuda";
# };
# services.open-webui = {
# enable = true;
# };
services.immich.enable = true;
# This value determines the NixOS release from which the default