From 63944261b11e948483f6f1c08122c3238de7a4a3 Mon Sep 17 00:00:00 2001 From: Henro Veijer Date: Tue, 24 Feb 2026 09:19:39 +0100 Subject: [PATCH] Adding Ollama + models + ZED --- henrovnix_ok/README.org | 3 +- henrovnix_ok/configuration/apps/ai.nix | 25 ++++++++++ henrovnix_ok/home/apps/ai.nix | 63 +++++++++++++++++++++++++ henrovnix_ok/home/apps/theme.nix | 64 -------------------------- 4 files changed, 90 insertions(+), 65 deletions(-) create mode 100644 henrovnix_ok/configuration/apps/ai.nix create mode 100644 henrovnix_ok/home/apps/ai.nix diff --git a/henrovnix_ok/README.org b/henrovnix_ok/README.org index 5396405af..a1f5c9171 100755 --- a/henrovnix_ok/README.org +++ b/henrovnix_ok/README.org @@ -891,6 +891,7 @@ This section describes a way of installing packages, either through nixpkgs orr This module enables and configures the Ollama system service on NixOS, including optional GPU acceleration (CUDA or ROCm). It ensures the Ollama CLI is available system-wide for interacting with local models. It automatically pulls and prepares selected coding models (e.g., Qwen2.5-Coder and StarCoder2) at system activation. + #+begin_src nix :tangle configuration/apps/ai.nix :noweb tangle :mkdirp yes { config, lib, pkgs, ... }: { @@ -2029,7 +2030,7 @@ You'll notice the color values in multiple places outside this as well. This Home-Manager module installs and configures the Zed editor in a user environment. It integrates Ollama as a local LLM provider within Zed’s AI settings for code assistance. It also generates a Continue configuration file pointing to the local Ollama instance for compatible editors. -#+begin_src nix :tangle home/apps/theme.nix :noweb tangle :mkdirp yes. +#+begin_src nix :tangle home/apps/ai.nix :noweb tangle :mkdirp yes. { config, lib, pkgs, ... }: let # Continue gebruikt tegenwoordig bij voorkeur config.yaml; config.json bestaat nog diff --git a/henrovnix_ok/configuration/apps/ai.nix b/henrovnix_ok/configuration/apps/ai.nix new file mode 100644 index 000000000..7c21e09ff --- /dev/null +++ b/henrovnix_ok/configuration/apps/ai.nix @@ -0,0 +1,25 @@ +{ config, lib, pkgs, ... }: +{ + # Ollama server als systemd service + services.ollama = { + enable = true; + # Optioneel: "cuda" (NVIDIA) of "rocm" (AMD) + # Laat weg of zet op null/false als je CPU-only wilt. + acceleration = "cuda"; + # acceleration = "rocm"; + # Laat NixOS de modellen automatisch pullen zodra de service draait + # (via `ollama pull ...`) + loadModels = [ + "qwen2.5-coder:7b" + "qwen2.5-coder:32b" + "starcoder2:15b" + # Alternatief: + # "starcoder2:7b" + # "starcoder2:latest" + ]; + }; + # Installeer de CLI tool (handig voor 'ollama run', 'ollama list', etc.) + environment.systemPackages = [ + pkgs.ollama + ]; +} diff --git a/henrovnix_ok/home/apps/ai.nix b/henrovnix_ok/home/apps/ai.nix new file mode 100644 index 000000000..6bd69c69c --- /dev/null +++ b/henrovnix_ok/home/apps/ai.nix @@ -0,0 +1,63 @@ +{ config, lib, pkgs, ... }: +let + # Continue gebruikt tegenwoordig bij voorkeur config.yaml; config.json bestaat nog + # maar is “deprecated” in de docs. We schrijven hier bewust config.json omdat jij dat vroeg. + continueConfigJson = builtins.toJSON { + models = [ + { + title = "Qwen2.5-Coder 7B"; + provider = "ollama"; + model = "qwen2.5-coder:7b"; + apiBase = "http://localhost:11434"; + } + { + title = "Qwen2.5-Coder 32B"; + provider = "ollama"; + model = "qwen2.5-coder:32b"; + apiBase = "http://localhost:11434"; + } + { + title = "StarCoder2 15B"; + provider = "ollama"; + model = "starcoder2:15b"; + apiBase = "http://localhost:11434"; + } + ]; + # Tab-autocomplete model (pas aan naar smaak/VRAM) + tabAutocompleteModel = { + title = "Qwen2.5-Coder 7B"; + provider = "ollama"; + model = "qwen2.5-coder:7b"; + apiBase = "http://localhost:11434"; + }; + }; +in +{ + programs.zed-editor = { + enable = true; + # Zed-extensies (taal/LS/etc). "Continue" bestaat (nog) niet als Zed-extensie. + # Dit is de officiële HM interface voor Zed extensions. + extensions = [ + "nix" + "toml" + "rust" + ]; + # Zed AI: Ollama als provider + # Zed kan modellen auto-discoveren die jij met Ollama gepulld hebt. + userSettings = { + language_models = { + ollama = { + api_url = "http://localhost:11434"; + auto_discover = true; + + # Optioneel: zet een grotere context voor alle Ollama modellen + # (Zed stuurt dit als `num_ctx` naar Ollama) + context_window = 8192; + }; + }; + }; + }; + # Continue config.json neerzetten (voor Continue in VS Code / JetBrains) + # Pad: ~/.config/continue/config.json + xdg.configFile."continue/config.json".text = continueConfigJson; +} diff --git a/henrovnix_ok/home/apps/theme.nix b/henrovnix_ok/home/apps/theme.nix index 7c5a9def3..eb011c005 100644 --- a/henrovnix_ok/home/apps/theme.nix +++ b/henrovnix_ok/home/apps/theme.nix @@ -20,67 +20,3 @@ catppuccin.gtk.icon.enable = true; catppuccin.cursors.enable = true; } - -{ config, lib, pkgs, ... }: -let - # Continue gebruikt tegenwoordig bij voorkeur config.yaml; config.json bestaat nog - # maar is “deprecated” in de docs. We schrijven hier bewust config.json omdat jij dat vroeg. - continueConfigJson = builtins.toJSON { - models = [ - { - title = "Qwen2.5-Coder 7B"; - provider = "ollama"; - model = "qwen2.5-coder:7b"; - apiBase = "http://localhost:11434"; - } - { - title = "Qwen2.5-Coder 32B"; - provider = "ollama"; - model = "qwen2.5-coder:32b"; - apiBase = "http://localhost:11434"; - } - { - title = "StarCoder2 15B"; - provider = "ollama"; - model = "starcoder2:15b"; - apiBase = "http://localhost:11434"; - } - ]; - # Tab-autocomplete model (pas aan naar smaak/VRAM) - tabAutocompleteModel = { - title = "Qwen2.5-Coder 7B"; - provider = "ollama"; - model = "qwen2.5-coder:7b"; - apiBase = "http://localhost:11434"; - }; - }; -in -{ - programs.zed-editor = { - enable = true; - # Zed-extensies (taal/LS/etc). "Continue" bestaat (nog) niet als Zed-extensie. - # Dit is de officiële HM interface voor Zed extensions. - extensions = [ - "nix" - "toml" - "rust" - ]; - # Zed AI: Ollama als provider - # Zed kan modellen auto-discoveren die jij met Ollama gepulld hebt. - userSettings = { - language_models = { - ollama = { - api_url = "http://localhost:11434"; - auto_discover = true; - - # Optioneel: zet een grotere context voor alle Ollama modellen - # (Zed stuurt dit als `num_ctx` naar Ollama) - context_window = 8192; - }; - }; - }; - }; - # Continue config.json neerzetten (voor Continue in VS Code / JetBrains) - # Pad: ~/.config/continue/config.json - xdg.configFile."continue/config.json".text = continueConfigJson; -}