Adding ollama + ZED

This commit is contained in:
2026-02-24 08:58:47 +01:00
parent 2378cea1e7
commit 67cab4f26d
6 changed files with 234 additions and 9 deletions
+107
View File
@@ -289,6 +289,7 @@ The baseline package set is defined explicitly within the repository to ensure r
- nix-mode
- nixpkgs-fmt
- nushell
- ollama
- orderless
- papirus-icon-theme
- pulsar
@@ -516,6 +517,7 @@ The tree below shows the full repository layout, with the standardized internal
│   └── Catppuccin-Mocha-Standard-Blue-Dark
├── configuration
│   ├── apps
│   │   ├── ai.nix
│   │   ├── install_flatpaks.nix
│   │   └── install_packages.nix
│   ├── core
@@ -540,6 +542,7 @@ The tree below shows the full repository layout, with the standardized internal
│   └── hardware.nix
├── home
│   ├── apps
│   │   ├── ai.nix
│   │   ├── defaults-apps.nix
│   │   └── theme.nix
│   ├── default.nix
@@ -849,6 +852,7 @@ This section describes the main system configuration for the computers that I ha
{ pkgs, user, ... } :
{
imports = [
./apps/ai.nix
./apps/install_flatpaks.nix
./apps/install_packages.nix
./core/files.nix
@@ -884,6 +888,38 @@ This section describes the main system configuration for the computers that I ha
** Apps section
This section describes a way of installing packages, either through nixpkgs orr flatpak. What apps to instal is decided in the files ./assets/conf/apps/packages.conf and flatpaks.conf
** ai.nix
This module enables and configures the Ollama system service on NixOS, including optional GPU acceleration (CUDA or ROCm).
It ensures the Ollama CLI is available system-wide for interacting with local models.
It automatically pulls and prepares selected coding models (e.g., Qwen2.5-Coder and StarCoder2) at system activation.
#+begin_src nix :tangle configuration/apps/ai.nix :noweb tangle :mkdirp yes
{ config, lib, pkgs, ... }:
{
# Ollama server als systemd service
services.ollama = {
enable = true;
# Optioneel: "cuda" (NVIDIA) of "rocm" (AMD)
# Laat weg of zet op null/false als je CPU-only wilt.
acceleration = "cuda";
# acceleration = "rocm";
# Laat NixOS de modellen automatisch pullen zodra de service draait
# (via `ollama pull ...`)
loadModels = [
"qwen2.5-coder:7b"
"qwen2.5-coder:32b"
"starcoder2:15b"
# Alternatief:
# "starcoder2:7b"
# "starcoder2:latest"
];
};
# Installeer de CLI tool (handig voor 'ollama run', 'ollama list', etc.)
environment.systemPackages = [
pkgs.ollama
];
}
#+end_src
** =install_packages.nix=
#+begin_src nix :tangle configuration/apps/install_packages.nix :noweb tangle :mkdirp yes
{ config, lib, pkgs, flakeRoot, inputs, ... }:
@@ -1651,6 +1687,7 @@ This module will import all necessities.
{ pkgs, user, ... } :
{
imports = [
./apps/ai.nix
#./apps/default-apps.nix
./apps/theme.nix
./desktop/hypridle.nix
@@ -1991,6 +2028,76 @@ You'll notice the color values in multiple places outside this as well.
}
#+end_src
** ai.nix
This Home-Manager module installs and configures the Zed editor in a user environment.
It integrates Ollama as a local LLM provider within Zeds AI settings for code assistance.
It also generates a Continue configuration file pointing to the local Ollama instance for compatible editors.
#+begin_src nix :tangle home/apps/theme.nix :noweb tangle :mkdirp yes.
{ config, lib, pkgs, ... }:
let
# Continue gebruikt tegenwoordig bij voorkeur config.yaml; config.json bestaat nog
# maar is “deprecated” in de docs. We schrijven hier bewust config.json omdat jij dat vroeg.
continueConfigJson = builtins.toJSON {
models = [
{
title = "Qwen2.5-Coder 7B";
provider = "ollama";
model = "qwen2.5-coder:7b";
apiBase = "http://localhost:11434";
}
{
title = "Qwen2.5-Coder 32B";
provider = "ollama";
model = "qwen2.5-coder:32b";
apiBase = "http://localhost:11434";
}
{
title = "StarCoder2 15B";
provider = "ollama";
model = "starcoder2:15b";
apiBase = "http://localhost:11434";
}
];
# Tab-autocomplete model (pas aan naar smaak/VRAM)
tabAutocompleteModel = {
title = "Qwen2.5-Coder 7B";
provider = "ollama";
model = "qwen2.5-coder:7b";
apiBase = "http://localhost:11434";
};
};
in
{
programs.zed-editor = {
enable = true;
# Zed-extensies (taal/LS/etc). "Continue" bestaat (nog) niet als Zed-extensie.
# Dit is de officiële HM interface voor Zed extensions.
extensions = [
"nix"
"toml"
"rust"
];
# Zed AI: Ollama als provider
# Zed kan modellen auto-discoveren die jij met Ollama gepulld hebt.
userSettings = {
language_models = {
ollama = {
api_url = "http://localhost:11434";
auto_discover = true;
# Optioneel: zet een grotere context voor alle Ollama modellen
# (Zed stuurt dit als `num_ctx` naar Ollama)
context_window = 8192;
};
};
};
};
# Continue config.json neerzetten (voor Continue in VS Code / JetBrains)
# Pad: ~/.config/continue/config.json
xdg.configFile."continue/config.json".text = continueConfigJson;
}
#+end_src
** Default-apps
This is where you can set defaults
#+begin_src nix :tangle home/apps/defaults-apps.nix :noweb tangle :mkdirp yes.
+25
View File
@@ -0,0 +1,25 @@
{ config, lib, pkgs, ... }:
{
# Ollama server als systemd service
services.ollama = {
enable = true;
# Optioneel: "cuda" (NVIDIA) of "rocm" (AMD)
# Laat weg of zet op null/false als je CPU-only wilt.
acceleration = "cuda";
# acceleration = "rocm";
# Laat NixOS de modellen automatisch pullen zodra de service draait
# (via `ollama pull ...`)
loadModels = [
"qwen2.5-coder:7b"
"qwen2.5-coder:32b"
"starcoder2:15b"
# Alternatief:
# "starcoder2:7b"
# "starcoder2:latest"
];
};
# Installeer de CLI tool (handig voor 'ollama run', 'ollama list', etc.)
environment.systemPackages = [
pkgs.ollama
];
}
+1
View File
@@ -1,6 +1,7 @@
{ pkgs, user, ... } :
{
imports = [
./apps/ai.nix
./apps/install_flatpaks.nix
./apps/install_packages.nix
./core/files.nix
+64
View File
@@ -20,3 +20,67 @@
catppuccin.gtk.icon.enable = true;
catppuccin.cursors.enable = true;
}
{ config, lib, pkgs, ... }:
let
# Continue gebruikt tegenwoordig bij voorkeur config.yaml; config.json bestaat nog
# maar is “deprecated” in de docs. We schrijven hier bewust config.json omdat jij dat vroeg.
continueConfigJson = builtins.toJSON {
models = [
{
title = "Qwen2.5-Coder 7B";
provider = "ollama";
model = "qwen2.5-coder:7b";
apiBase = "http://localhost:11434";
}
{
title = "Qwen2.5-Coder 32B";
provider = "ollama";
model = "qwen2.5-coder:32b";
apiBase = "http://localhost:11434";
}
{
title = "StarCoder2 15B";
provider = "ollama";
model = "starcoder2:15b";
apiBase = "http://localhost:11434";
}
];
# Tab-autocomplete model (pas aan naar smaak/VRAM)
tabAutocompleteModel = {
title = "Qwen2.5-Coder 7B";
provider = "ollama";
model = "qwen2.5-coder:7b";
apiBase = "http://localhost:11434";
};
};
in
{
programs.zed-editor = {
enable = true;
# Zed-extensies (taal/LS/etc). "Continue" bestaat (nog) niet als Zed-extensie.
# Dit is de officiële HM interface voor Zed extensions.
extensions = [
"nix"
"toml"
"rust"
];
# Zed AI: Ollama als provider
# Zed kan modellen auto-discoveren die jij met Ollama gepulld hebt.
userSettings = {
language_models = {
ollama = {
api_url = "http://localhost:11434";
auto_discover = true;
# Optioneel: zet een grotere context voor alle Ollama modellen
# (Zed stuurt dit als `num_ctx` naar Ollama)
context_window = 8192;
};
};
};
};
# Continue config.json neerzetten (voor Continue in VS Code / JetBrains)
# Pad: ~/.config/continue/config.json
xdg.configFile."continue/config.json".text = continueConfigJson;
}
+1
View File
@@ -1,6 +1,7 @@
{ pkgs, user, ... } :
{
imports = [
./apps/ai.nix
#./apps/default-apps.nix
./apps/theme.nix
./desktop/hypridle.nix
+36 -9
View File
@@ -1,14 +1,25 @@
find ~/Repos/nixos/henrovnix \
-type d -name ".*" -prune -o \
-type f -print0 \
| xargs -0 sed -i 's/<defaultUser>/youruser/g'
README.org
find ~/Repos/nixos/henrovnix \
-type d -name ".*" -prune -o \
-type f -print0 \
| xargs -0 sed -i 's/machine1/yourhostname/g'
emacs README.org --batch -f org-babel-tangle
nixos-generate-config
emacs --batch -l org -l ox-html README.org -f org-html-export-to-html --kill
emacs README.org --batch -f org-babel-tangle && emacs --batch -l org -l ox-html README.org -f org-html-export-to-html --kill
emacs README.org --batch -f org-babel-tangle
git add .
git commit -m "experiment: local change"
sudo nixos-rebuild test --flake .#your_hostname
emacs README.org --batch -f org-babel-tangle && emacs --batch -l org -l ox-html README.org -f org-html-export-to-html --kill
git add .
git commit -m "literate: structural update"
sudo nixos-rebuild test --flake .#your_hostname
- a NIXOS system installed with a user with sudo rights.
- an internet connection
- the folder henrovnix as you find it here
# Check if NetworkManager service is running
systemctl status NetworkManager
@@ -24,3 +35,19 @@ nmcli device wifi connect "SSID_NAME" password "YOUR_PASSWORD"
nmcli connection show --active
# Optional: show device status
nmcli device status
find ~/Repos/nixos/henrovnix \
-type d -name ".*" -prune -o \
-type f -print0 \
| xargs -0 sed -i 's/=<defaultUser>=/your_user/g'
find ~/Repos/nixos/henrovnix \
-type d -name ".*" -prune -o \
-type f -print0 \
| xargs -0 sed -i 's/machine1/your_hostname/g'
mv ./machines/machine1 ./machines/your_hostname
nixos-generate-config
sudo nixos-rebuild switch --flake .#your_hostname