Adding ollama + ZED
This commit is contained in:
@@ -289,6 +289,7 @@ The baseline package set is defined explicitly within the repository to ensure r
|
||||
- nix-mode
|
||||
- nixpkgs-fmt
|
||||
- nushell
|
||||
- ollama
|
||||
- orderless
|
||||
- papirus-icon-theme
|
||||
- pulsar
|
||||
@@ -516,6 +517,7 @@ The tree below shows the full repository layout, with the standardized internal
|
||||
│ └── Catppuccin-Mocha-Standard-Blue-Dark
|
||||
├── configuration
|
||||
│ ├── apps
|
||||
│ │ ├── ai.nix
|
||||
│ │ ├── install_flatpaks.nix
|
||||
│ │ └── install_packages.nix
|
||||
│ ├── core
|
||||
@@ -540,6 +542,7 @@ The tree below shows the full repository layout, with the standardized internal
|
||||
│ └── hardware.nix
|
||||
├── home
|
||||
│ ├── apps
|
||||
│ │ ├── ai.nix
|
||||
│ │ ├── defaults-apps.nix
|
||||
│ │ └── theme.nix
|
||||
│ ├── default.nix
|
||||
@@ -849,6 +852,7 @@ This section describes the main system configuration for the computers that I ha
|
||||
{ pkgs, user, ... } :
|
||||
{
|
||||
imports = [
|
||||
./apps/ai.nix
|
||||
./apps/install_flatpaks.nix
|
||||
./apps/install_packages.nix
|
||||
./core/files.nix
|
||||
@@ -884,6 +888,38 @@ This section describes the main system configuration for the computers that I ha
|
||||
** Apps section
|
||||
This section describes a way of installing packages, either through nixpkgs orr flatpak. What apps to instal is decided in the files ./assets/conf/apps/packages.conf and flatpaks.conf
|
||||
|
||||
** ai.nix
|
||||
This module enables and configures the Ollama system service on NixOS, including optional GPU acceleration (CUDA or ROCm).
|
||||
It ensures the Ollama CLI is available system-wide for interacting with local models.
|
||||
It automatically pulls and prepares selected coding models (e.g., Qwen2.5-Coder and StarCoder2) at system activation.
|
||||
#+begin_src nix :tangle configuration/apps/ai.nix :noweb tangle :mkdirp yes
|
||||
{ config, lib, pkgs, ... }:
|
||||
{
|
||||
# Ollama server als systemd service
|
||||
services.ollama = {
|
||||
enable = true;
|
||||
# Optioneel: "cuda" (NVIDIA) of "rocm" (AMD)
|
||||
# Laat weg of zet op null/false als je CPU-only wilt.
|
||||
acceleration = "cuda";
|
||||
# acceleration = "rocm";
|
||||
# Laat NixOS de modellen automatisch pullen zodra de service draait
|
||||
# (via `ollama pull ...`)
|
||||
loadModels = [
|
||||
"qwen2.5-coder:7b"
|
||||
"qwen2.5-coder:32b"
|
||||
"starcoder2:15b"
|
||||
# Alternatief:
|
||||
# "starcoder2:7b"
|
||||
# "starcoder2:latest"
|
||||
];
|
||||
};
|
||||
# Installeer de CLI tool (handig voor 'ollama run', 'ollama list', etc.)
|
||||
environment.systemPackages = [
|
||||
pkgs.ollama
|
||||
];
|
||||
}
|
||||
#+end_src
|
||||
|
||||
** =install_packages.nix=
|
||||
#+begin_src nix :tangle configuration/apps/install_packages.nix :noweb tangle :mkdirp yes
|
||||
{ config, lib, pkgs, flakeRoot, inputs, ... }:
|
||||
@@ -1651,6 +1687,7 @@ This module will import all necessities.
|
||||
{ pkgs, user, ... } :
|
||||
{
|
||||
imports = [
|
||||
./apps/ai.nix
|
||||
#./apps/default-apps.nix
|
||||
./apps/theme.nix
|
||||
./desktop/hypridle.nix
|
||||
@@ -1991,6 +2028,76 @@ You'll notice the color values in multiple places outside this as well.
|
||||
}
|
||||
#+end_src
|
||||
|
||||
** ai.nix
|
||||
This Home-Manager module installs and configures the Zed editor in a user environment.
|
||||
It integrates Ollama as a local LLM provider within Zed’s AI settings for code assistance.
|
||||
It also generates a Continue configuration file pointing to the local Ollama instance for compatible editors.
|
||||
#+begin_src nix :tangle home/apps/theme.nix :noweb tangle :mkdirp yes.
|
||||
{ config, lib, pkgs, ... }:
|
||||
let
|
||||
# Continue gebruikt tegenwoordig bij voorkeur config.yaml; config.json bestaat nog
|
||||
# maar is “deprecated” in de docs. We schrijven hier bewust config.json omdat jij dat vroeg.
|
||||
continueConfigJson = builtins.toJSON {
|
||||
models = [
|
||||
{
|
||||
title = "Qwen2.5-Coder 7B";
|
||||
provider = "ollama";
|
||||
model = "qwen2.5-coder:7b";
|
||||
apiBase = "http://localhost:11434";
|
||||
}
|
||||
{
|
||||
title = "Qwen2.5-Coder 32B";
|
||||
provider = "ollama";
|
||||
model = "qwen2.5-coder:32b";
|
||||
apiBase = "http://localhost:11434";
|
||||
}
|
||||
{
|
||||
title = "StarCoder2 15B";
|
||||
provider = "ollama";
|
||||
model = "starcoder2:15b";
|
||||
apiBase = "http://localhost:11434";
|
||||
}
|
||||
];
|
||||
# Tab-autocomplete model (pas aan naar smaak/VRAM)
|
||||
tabAutocompleteModel = {
|
||||
title = "Qwen2.5-Coder 7B";
|
||||
provider = "ollama";
|
||||
model = "qwen2.5-coder:7b";
|
||||
apiBase = "http://localhost:11434";
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
programs.zed-editor = {
|
||||
enable = true;
|
||||
# Zed-extensies (taal/LS/etc). "Continue" bestaat (nog) niet als Zed-extensie.
|
||||
# Dit is de officiële HM interface voor Zed extensions.
|
||||
extensions = [
|
||||
"nix"
|
||||
"toml"
|
||||
"rust"
|
||||
];
|
||||
# Zed AI: Ollama als provider
|
||||
# Zed kan modellen auto-discoveren die jij met Ollama gepulld hebt.
|
||||
userSettings = {
|
||||
language_models = {
|
||||
ollama = {
|
||||
api_url = "http://localhost:11434";
|
||||
auto_discover = true;
|
||||
|
||||
# Optioneel: zet een grotere context voor alle Ollama modellen
|
||||
# (Zed stuurt dit als `num_ctx` naar Ollama)
|
||||
context_window = 8192;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
# Continue config.json neerzetten (voor Continue in VS Code / JetBrains)
|
||||
# Pad: ~/.config/continue/config.json
|
||||
xdg.configFile."continue/config.json".text = continueConfigJson;
|
||||
}
|
||||
#+end_src
|
||||
|
||||
** Default-apps
|
||||
This is where you can set defaults
|
||||
#+begin_src nix :tangle home/apps/defaults-apps.nix :noweb tangle :mkdirp yes.
|
||||
|
||||
Reference in New Issue
Block a user