115 lines
3.4 KiB
Nix
115 lines
3.4 KiB
Nix
{ config, hello, pkgs, lib, flakeRoot, ... }:
|
|
let
|
|
# Read environment variables from file
|
|
envVars = lib.genAttrs (builtins.splitStrings "\n" (builtins.readFile (toString ./assets/conf/apps/ai.env)))
|
|
(nameValue: builtins.splitString "=" nameValue);
|
|
in
|
|
{
|
|
# Install ZED and Ollama
|
|
home.packages = [
|
|
pkgs.ollama-vulkan # For Vulkan (CPU/AMD). For NVIDIA: pkgs.ollama. For AMD ROCm: pkgs.ollama-rocm
|
|
pkgs.zed
|
|
];
|
|
# Environment variables for ZED and Ollama
|
|
home.sessionVariables = {
|
|
OLLAMA_HOST = envVars.OLLAMA_HOST or "http://127.0.0.1:11434";
|
|
MISTRAL_API_KEY = envVars.MISTRAL_API_KEY or "";
|
|
};
|
|
# Run Ollama as a user service
|
|
systemd.user.services.ollama = {
|
|
description = "Ollama service for local AI models";
|
|
wantedBy = [ "default.target" ];
|
|
after = [ "network.target" ];
|
|
serviceConfig = {
|
|
Type = "forking";
|
|
ExecStart = ''
|
|
${pkgs.ollama-vulkan}/bin/ollama serve
|
|
'';
|
|
ExecStartPost = ''
|
|
sleep 5
|
|
${pkgs.ollama-vulkan}/bin/ollama pull codellama:70b
|
|
${pkgs.ollama-vulkan}/bin/ollama pull mixtral:8x7b
|
|
'';
|
|
Restart = "on-failure";
|
|
};
|
|
};
|
|
# Configure ZED using the external config file with substituted variables
|
|
home.file.".config/zed/settings.json".text = lib.mkForce (
|
|
builtins.readFile (toString ./assets/conf/apps/ai.conf)
|
|
// ''
|
|
{
|
|
"mistral": {
|
|
"apiKey": "${envVars.MISTRAL_API_KEY}"
|
|
},
|
|
"ollama": {
|
|
"endpoint": "${envVars.OLLAMA_HOST}"
|
|
}
|
|
}
|
|
''
|
|
);
|
|
}
|
|
|
|
{ config, lib, pkgs, ... }:
|
|
let
|
|
# Continue gebruikt tegenwoordig bij voorkeur config.yaml; config.json bestaat nog
|
|
# maar is “deprecated” in de docs. We schrijven hier bewust config.json omdat jij dat vroeg.
|
|
continueConfigJson = builtins.toJSON {
|
|
models = [
|
|
{
|
|
title = "Qwen2.5-Coder 7B";
|
|
provider = "ollama";
|
|
model = "qwen2.5-coder:7b";
|
|
apiBase = "http://localhost:11434";
|
|
}
|
|
{
|
|
title = "Qwen2.5-Coder 32B";
|
|
provider = "ollama";
|
|
model = "qwen2.5-coder:32b";
|
|
apiBase = "http://localhost:11434";
|
|
}
|
|
{
|
|
title = "StarCoder2 15B";
|
|
provider = "ollama";
|
|
model = "starcoder2:15b";
|
|
apiBase = "http://localhost:11434";
|
|
}
|
|
];
|
|
# Tab-autocomplete model (pas aan naar smaak/VRAM)
|
|
tabAutocompleteModel = {
|
|
title = "Qwen2.5-Coder 7B";
|
|
provider = "ollama";
|
|
model = "qwen2.5-coder:7b";
|
|
apiBase = "http://localhost:11434";
|
|
};
|
|
};
|
|
in
|
|
{
|
|
programs.zed-editor = {
|
|
enable = true;
|
|
# Zed-extensies (taal/LS/etc). "Continue" bestaat (nog) niet als Zed-extensie.
|
|
# Dit is de officiële HM interface voor Zed extensions.
|
|
extensions = [
|
|
"nix"
|
|
"toml"
|
|
"rust"
|
|
"org-mode"
|
|
];
|
|
# Zed AI: Ollama als provider
|
|
# Zed kan modellen auto-discoveren die jij met Ollama gepulld hebt.
|
|
userSettings = {
|
|
language_models = {
|
|
ollama = {
|
|
api_url = "http://localhost:11434";
|
|
auto_discover = true;
|
|
# Optioneel: zet een grotere context voor alle Ollama modellen
|
|
# (Zed stuurt dit als `num_ctx` naar Ollama)
|
|
context_window = 8192;
|
|
};
|
|
};
|
|
};
|
|
};
|
|
# Continue config.json neerzetten (voor Continue in VS Code / JetBrains)
|
|
# Pad: ~/.config/continue/config.json
|
|
xdg.configFile."continue/config.json".text = continueConfigJson;
|
|
}
|