Files
nixos/henrovnix_ok/home/apps/ai.nix
T
2026-02-28 11:21:53 +01:00

76 lines
2.1 KiB
Nix

{ config, pkgs, lib, flakeRoot, ... }:
let
# Read environment variables (OLLAMA_HOST and MISTRAL_API_KEY) from a local file.
AiRepoEnv = flakeRoot + "/assets/conf/apps/ai/ai.env";
AiRepoConf = flakeRoot + "/assets/conf/apps/ai/ai.conf";
envVars = lib.genAttrs (builtins.splitStrings "\n" (builtins.readFile (toString AiRepoEnv)))
(nameValue: builtins.splitString "=" nameValue);
in
{
# Install ZED editor and Ollama with Vulkan support (for CPU/AMD).
home.packages = [
pkgs.ollama-vulkan
pkgs.zed
];
# --- Environment Variables ---
home.sessionVariables = {
OLLAMA_HOST = envVars.OLLAMA_HOST or "http://127.0.0.1:11434";
MISTRAL_API_KEY = envVars.MISTRAL_API_KEY or "";
};
# --- Ollama User Service ---
systemd.user.services.ollama = {
description = "Ollama service for local AI models";
wantedBy = [ "default.target" ];
after = [ "network.target" ];
serviceConfig = {
Type = "forking";
ExecStart = ''
${pkgs.ollama-vulkan}/bin/ollama serve
'';
ExecStartPost = ''
sleep 5
${pkgs.ollama-vulkan}/bin/ollama pull codellama:70b
${pkgs.ollama-vulkan}/bin/ollama pull mixtral:8x7b
'';
Restart = "on-failure";
};
};
# --- ZED Configuration ---
home.file.".config/zed/settings.json".text = lib.mkForce (
builtins.toJSON (
(builtins.fromJSON (builtins.readFile (toString AiRepoConf)))
// {
mistral = {
apiKey = envVars.MISTRAL_API_KEY or "";
defaultModel = "mistral-pro";
};
ollama = {
endpoint = envVars.OLLAMA_HOST or "http://127.0.0.1:11434";
defaultModel = "codellama:70b";
};
}
)
);
# --- Usage Notes ---
# 1. Pulling Additional Models:
# ollama pull <model-name>
# 2. Switching GPU Backends:
# - NVIDIA: Replace `ollama-vulkan` with `ollama`
# - AMD: Use `ollama-rocm` and ensure ROCm is installed
# 3. ZED Plugin Setup:
# - Install Ollama and Mistral plugins in ZED
# 4. Security:
# - Never commit ai.env to version control
# 5. Persistent Service:
# loginctl enable-linger $(whoami)
}