Removed obsolete ai.nix, added comments in code
This commit is contained in:
+71
-88
@@ -1643,56 +1643,109 @@ It ensures the Ollama CLI is available system-wide for interacting with local mo
|
||||
It automatically pulls and prepares selected coding models (e.g., Qwen2.5-Coder and StarCoder2) at system activation.
|
||||
|
||||
#+begin_src nix :tangle home/apps/ai.nix :noweb tangle :mkdirp yes
|
||||
{ config, pkgs, lib, flakeRoot, ... }:
|
||||
{ config, pkgs, lib, ... }:
|
||||
|
||||
let
|
||||
# Read environment variables from file
|
||||
# Read environment variables (OLLAMA_HOST and MISTRAL_API_KEY) from a local file.
|
||||
# This keeps sensitive values out of the Nix store and version control.
|
||||
# File format: one KEY=value per line (e.g., MISTRAL_API_KEY=yourkey)
|
||||
envVars = lib.genAttrs (builtins.splitStrings "\n" (builtins.readFile (toString ./assets/conf/apps/ai.env)))
|
||||
(nameValue: builtins.splitString "=" nameValue);
|
||||
in
|
||||
{
|
||||
# Install ZED and Ollama
|
||||
# --- Packages ---
|
||||
# Install ZED editor and Ollama with Vulkan support (for CPU/AMD).
|
||||
# For NVIDIA GPUs, replace `ollama-vulkan` with `ollama`.
|
||||
# For AMD ROCm, use `ollama-rocm` and ensure ROCm is installed.
|
||||
home.packages = [
|
||||
pkgs.ollama-vulkan # For Vulkan (CPU/AMD). For NVIDIA: pkgs.ollama. For AMD ROCm: pkgs.ollama-rocm
|
||||
pkgs.ollama-vulkan
|
||||
pkgs.zed
|
||||
];
|
||||
# Environment variables for ZED and Ollama
|
||||
|
||||
# --- Environment Variables ---
|
||||
# Set OLLAMA_HOST and MISTRAL_API_KEY for ZED and other user applications.
|
||||
# Values are read from ./assets/conf/apps/ai.env.
|
||||
home.sessionVariables = {
|
||||
OLLAMA_HOST = envVars.OLLAMA_HOST or "http://127.0.0.1:11434";
|
||||
MISTRAL_API_KEY = envVars.MISTRAL_API_KEY or "";
|
||||
OLLAMA_HOST = envVars.OLLAMA_HOST or "http://127.0.0.1:11434"; # Default Ollama endpoint
|
||||
MISTRAL_API_KEY = envVars.MISTRAL_API_KEY or ""; # Mistral API key (required for cloud models)
|
||||
};
|
||||
# Run Ollama as a user service
|
||||
|
||||
# --- Ollama User Service ---
|
||||
# Configure Ollama to run as a user service (starts on login).
|
||||
# This avoids root privileges and allows per-user model management.
|
||||
systemd.user.services.ollama = {
|
||||
description = "Ollama service for local AI models";
|
||||
wantedBy = [ "default.target" ];
|
||||
after = [ "network.target" ];
|
||||
wantedBy = [ "default.target" ]; # Start with user session
|
||||
after = [ "network.target" ]; # Ensure network is ready
|
||||
|
||||
serviceConfig = {
|
||||
Type = "forking";
|
||||
Type = "forking"; # Run as a background process
|
||||
|
||||
# Start Ollama server
|
||||
ExecStart = ''
|
||||
${pkgs.ollama-vulkan}/bin/ollama serve
|
||||
'';
|
||||
|
||||
# Pull default models after server starts
|
||||
ExecStartPost = ''
|
||||
sleep 5
|
||||
${pkgs.ollama-vulkan}/bin/ollama pull codellama:70b
|
||||
${pkgs.ollama-vulkan}/bin/ollama pull mixtral:8x7b
|
||||
sleep 5 # Wait for server to initialize
|
||||
|
||||
# Pull coding and chat models at startup
|
||||
${pkgs.ollama-vulkan}/bin/ollama pull codellama:70b # Best for coding tasks
|
||||
${pkgs.ollama-vulkan}/bin/ollama pull mixtral:8x7b # Best for chat/conversation
|
||||
|
||||
# Uncomment to pull additional models:
|
||||
# ${pkgs.ollama-vulkan}/bin/ollama pull llama3:8b # General-purpose model
|
||||
# ${pkgs.ollama-vulkan}/bin/ollama pull qwen2.5-coder:7b # Multilingual coding
|
||||
# ${pkgs.ollama-vulkan}/bin/ollama pull starcoder2:15b # Alternative for code
|
||||
'';
|
||||
Restart = "on-failure";
|
||||
Restart = "on-failure"; # Restart if Ollama crashes
|
||||
};
|
||||
};
|
||||
# Configure ZED using the external config file with substituted variables
|
||||
|
||||
# --- ZED Configuration ---
|
||||
# Generate ZED's settings.json with substituted API keys and endpoints.
|
||||
# Base config is read from ./assets/conf/apps/ai.conf, with variables injected.
|
||||
home.file.".config/zed/settings.json".text = lib.mkForce (
|
||||
builtins.readFile (toString ./assets/conf/apps/ai.conf)
|
||||
// ''
|
||||
{
|
||||
"mistral": {
|
||||
"apiKey": "${envVars.MISTRAL_API_KEY}"
|
||||
"apiKey": "${envVars.MISTRAL_API_KEY}", # Inject Mistral API key
|
||||
"defaultModel": "mistral-pro" # Default Mistral model
|
||||
},
|
||||
"ollama": {
|
||||
"endpoint": "${envVars.OLLAMA_HOST}"
|
||||
"endpoint": "${envVars.OLLAMA_HOST}", # Inject Ollama endpoint
|
||||
"defaultModel": "codellama:70b" # Default Ollama model for coding
|
||||
}
|
||||
}
|
||||
''
|
||||
);
|
||||
|
||||
# --- Usage Notes ---
|
||||
# 1. Pulling Additional Models:
|
||||
# To add more models later, run:
|
||||
# ollama pull <model-name>
|
||||
# Example: ollama pull llama3:8b
|
||||
|
||||
# 2. Switching GPU Backends:
|
||||
# - For NVIDIA: Replace all `ollama-vulkan` with `ollama` (uses CUDA)
|
||||
# - For AMD: Use `ollama-rocm` and ensure ROCm is installed
|
||||
|
||||
# 3. ZED Plugin Setup:
|
||||
# - Install the Ollama and Mistral plugins in ZED via the plugin marketplace
|
||||
# - The Ollama plugin will use the local models pulled above
|
||||
# - The Mistral plugin will use the MISTRAL_API_KEY for cloud access
|
||||
|
||||
# 4. Security:
|
||||
# - Never commit ./assets/conf/apps/ai.env to version control
|
||||
# - For extra security, encrypt ai.env using sops-nix or age
|
||||
|
||||
# 5. Persistent Service:
|
||||
# To keep Ollama running after logout, enable lingering:
|
||||
# loginctl enable-linger $(whoami)
|
||||
}
|
||||
|
||||
#+end_src
|
||||
|
||||
** NCSway
|
||||
@@ -2308,76 +2361,6 @@ You'll notice the color values in multiple places outside this as well.
|
||||
}
|
||||
#+end_src
|
||||
|
||||
** ai.nix
|
||||
This Home-Manager module installs and configures the Zed editor in a user environment.
|
||||
It integrates Ollama as a local LLM provider within Zed’s AI settings for code assistance.
|
||||
It also generates a Continue configuration file pointing to the local Ollama instance for compatible editors.
|
||||
#+begin_src nix :tangle home/apps/ai.nix :noweb tangle :mkdirp yes.
|
||||
{ config, lib, pkgs, ... }:
|
||||
let
|
||||
# Continue gebruikt tegenwoordig bij voorkeur config.yaml; config.json bestaat nog
|
||||
# maar is “deprecated” in de docs. We schrijven hier bewust config.json omdat jij dat vroeg.
|
||||
continueConfigJson = builtins.toJSON {
|
||||
models = [
|
||||
{
|
||||
title = "Qwen2.5-Coder 7B";
|
||||
provider = "ollama";
|
||||
model = "qwen2.5-coder:7b";
|
||||
apiBase = "http://localhost:11434";
|
||||
}
|
||||
{
|
||||
title = "Qwen2.5-Coder 32B";
|
||||
provider = "ollama";
|
||||
model = "qwen2.5-coder:32b";
|
||||
apiBase = "http://localhost:11434";
|
||||
}
|
||||
{
|
||||
title = "StarCoder2 15B";
|
||||
provider = "ollama";
|
||||
model = "starcoder2:15b";
|
||||
apiBase = "http://localhost:11434";
|
||||
}
|
||||
];
|
||||
# Tab-autocomplete model (pas aan naar smaak/VRAM)
|
||||
tabAutocompleteModel = {
|
||||
title = "Qwen2.5-Coder 7B";
|
||||
provider = "ollama";
|
||||
model = "qwen2.5-coder:7b";
|
||||
apiBase = "http://localhost:11434";
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
programs.zed-editor = {
|
||||
enable = true;
|
||||
# Zed-extensies (taal/LS/etc). "Continue" bestaat (nog) niet als Zed-extensie.
|
||||
# Dit is de officiële HM interface voor Zed extensions.
|
||||
extensions = [
|
||||
"nix"
|
||||
"toml"
|
||||
"rust"
|
||||
"org-mode"
|
||||
];
|
||||
# Zed AI: Ollama als provider
|
||||
# Zed kan modellen auto-discoveren die jij met Ollama gepulld hebt.
|
||||
userSettings = {
|
||||
language_models = {
|
||||
ollama = {
|
||||
api_url = "http://localhost:11434";
|
||||
auto_discover = true;
|
||||
# Optioneel: zet een grotere context voor alle Ollama modellen
|
||||
# (Zed stuurt dit als `num_ctx` naar Ollama)
|
||||
context_window = 8192;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
# Continue config.json neerzetten (voor Continue in VS Code / JetBrains)
|
||||
# Pad: ~/.config/continue/config.json
|
||||
xdg.configFile."continue/config.json".text = continueConfigJson;
|
||||
}
|
||||
#+end_src
|
||||
|
||||
** Default-apps
|
||||
This is where you can set defaults
|
||||
#+begin_src nix :tangle home/apps/defaults-apps.nix :noweb tangle :mkdirp yes.
|
||||
|
||||
Reference in New Issue
Block a user