71 lines
1.8 KiB
Nix
71 lines
1.8 KiB
Nix
{ config, pkgs, lib, flakeRoot, ... }:
|
|
|
|
let
|
|
AiRepoEnv = flakeRoot + "/assets/conf/apps/ai/ai.env";
|
|
AiRepoConf = flakeRoot + "/assets/conf/apps/ai/ai.conf";
|
|
|
|
# Read and parse the environment file
|
|
envVars = lib.genAttrs (
|
|
builtins.map (
|
|
line: let
|
|
parts = builtins.split "=" line;
|
|
key = builtins.elemAt parts 0;
|
|
value = builtins.elemAt parts 1;
|
|
in { name = key; value = value; }
|
|
) (builtins.split "\n" (builtins.readFile (toString AiRepoEnv)))
|
|
) (entry: entry.name);
|
|
in
|
|
{
|
|
# Install ZED editor and Ollama with Vulkan support (for CPU/AMD).
|
|
home.packages = [
|
|
pkgs.ollama-vulkan
|
|
pkgs.zed
|
|
];
|
|
|
|
# --- Environment Variables ---
|
|
home.sessionVariables = {
|
|
OLLAMA_HOST = envVars.OLLAMA_HOST or "http://127.0.0.1:11434";
|
|
MISTRAL_API_KEY = envVars.MISTRAL_API_KEY or "";
|
|
};
|
|
|
|
# --- Ollama User Service ---
|
|
systemd.user.services.ollama = {
|
|
description = "Ollama service for local AI models";
|
|
wantedBy = [ "default.target" ];
|
|
after = [ "network.target" ];
|
|
|
|
serviceConfig = {
|
|
Type = "forking";
|
|
|
|
ExecStart = ''
|
|
${pkgs.ollama-vulkan}/bin/ollama serve
|
|
'';
|
|
|
|
ExecStartPost = ''
|
|
sleep 5
|
|
|
|
${pkgs.ollama-vulkan}/bin/ollama pull codellama:70b
|
|
${pkgs.ollama-vulkan}/bin/ollama pull mixtral:8x7b
|
|
'';
|
|
Restart = "on-failure";
|
|
};
|
|
};
|
|
|
|
# --- ZED Configuration ---
|
|
home.file.".config/zed/settings.json".text = lib.mkForce (
|
|
builtins.toJSON (
|
|
(builtins.fromJSON (builtins.readFile (toString AiRepoConf)))
|
|
// {
|
|
mistral = {
|
|
apiKey = envVars.MISTRAL_API_KEY or "";
|
|
defaultModel = "mistral-pro";
|
|
};
|
|
ollama = {
|
|
endpoint = envVars.OLLAMA_HOST or "http://127.0.0.1:11434";
|
|
defaultModel = "codellama:70b";
|
|
};
|
|
}
|
|
)
|
|
);
|
|
}
|