rebuilding nix files

This commit is contained in:
2026-02-28 11:28:08 +01:00
parent e978d7a9bc
commit 14a337240f
3 changed files with 387 additions and 438 deletions
+27 -44
View File
@@ -1646,28 +1646,22 @@ It automatically pulls and prepares selected coding models (e.g., Qwen2.5-Coder
{ config, pkgs, lib, flakeRoot, ... }:
let
# Path to the environment file containing OLLAMA_HOST and MISTRAL_API_KEY
# Path to environment file
AiRepoEnv = flakeRoot + "/assets/conf/apps/ai/ai.env";
# Path to the ZED configuration template
AiRepoConf = flakeRoot + "/assets/conf/apps/ai/ai.conf";
# Robust environment file parser
envVars = lib.genAttrs (
builtins.map (
line: let
# Trim whitespace from the line
# Simple, reliable way to parse key=value pairs
parseEnv = lines:
lib.foldl' (acc: line:
let
trimmed = builtins.trim line;
# Skip comments and empty lines
isValid = trimmed != "" && (builtins.substr 0 1 trimmed) != "#";
# Split on first '=' only
parts = if isValid then builtins.split "=" trimmed else [ ];
# Get key (first part) and trim whitespace
key = if builtins.length parts > 0 then builtins.trim (builtins.elemAt parts 0) else null;
# Get value (everything after first '=') and trim whitespace
value = if builtins.length parts > 1 then builtins.trim (builtins.concatStringsSep "=" (builtins.drop 1 parts)) else "";
in { inherit key value; }
) (builtins.split "\n" (builtins.readFile (toString AiRepoEnv)))
) (entry: entry.key);
parts = builtins.split "=" trimmed;
in
if trimmed == "" || builtins.elemAt trimmed 0 == "#"
then acc
else builtins.add (builtins.elemAt parts 0) (builtins.elemAt parts 1) acc
) { } (builtins.split "\n" (builtins.readFile (toString AiRepoEnv)));
envVars = parseEnv;
in
{
# Install required packages
@@ -1676,13 +1670,13 @@ in
pkgs.zed
];
# Set environment variables for the user session
# Set environment variables
home.sessionVariables = {
OLLAMA_HOST = envVars.OLLAMA_HOST or "http://127.0.0.1:11434";
MISTRAL_API_KEY = envVars.MISTRAL_API_KEY or "";
};
# Configure Ollama as a user service
# Configure Ollama service
systemd.user.services.ollama = {
description = "Ollama service for local AI models";
wantedBy = [ "default.target" ];
@@ -1690,9 +1684,7 @@ in
serviceConfig = {
Type = "forking";
ExecStart = ''
${pkgs.ollama-vulkan}/bin/ollama serve
'';
ExecStart = "${pkgs.ollama-vulkan}/bin/ollama serve";
ExecStartPost = ''
sleep 5
${pkgs.ollama-vulkan}/bin/ollama pull codellama:70b
@@ -1702,28 +1694,19 @@ in
};
};
# Generate ZED's settings.json
# ZED configuration
home.file.".config/zed/settings.json".text = lib.mkForce (
builtins.toJSON (
(builtins.fromJSON (builtins.readFile (toString AiRepoConf)))
// {
mistral = {
apiKey = envVars.MISTRAL_API_KEY or "";
defaultModel = "mistral-pro";
};
ollama = {
endpoint = envVars.OLLAMA_HOST or "http://127.0.0.1:11434";
defaultModel = "codellama:70b";
};
}
)
builtins.toJSON {
mistral = {
apiKey = envVars.MISTRAL_API_KEY or "";
defaultModel = "mistral-pro";
};
ollama = {
endpoint = envVars.OLLAMA_HOST or "http://127.0.0.1:11434";
defaultModel = "codellama:70b";
};
}
);
# Filter out null entries from envVars
home.file.".config/ai-env-test".text = lib.mkForce (
builtins.toJSON envVars
);
# --- Usage Notes ---
# 1. Pulling Additional Models:
# To add more models later, run: