Newe Ollama.nix, the ai.nix did not work out. Added ZED-editor as separate app
This commit is contained in:
+31
-96
@@ -467,7 +467,7 @@ The tree below shows the full repository layout, with the standardized internal
|
||||
├── assets
|
||||
│ ├── conf
|
||||
│ │ ├── apps
|
||||
│ │ ├── ai.nix
|
||||
│ │ ├── ollama.nix
|
||||
│ │ │ ├── flatpaks.conf
|
||||
│ │ │ └── packages.conf
|
||||
│ │ ├── core
|
||||
@@ -549,7 +549,7 @@ The tree below shows the full repository layout, with the standardized internal
|
||||
│ └── hardware.nix
|
||||
├── home
|
||||
│ ├── apps
|
||||
│ │ ├── ai.nix
|
||||
│ │ ├── ollama.nix
|
||||
│ │ ├── defaults-apps.nix
|
||||
│ │ └── theme.nix
|
||||
│ ├── default.nix
|
||||
@@ -1605,7 +1605,7 @@ This module will import all necessities.
|
||||
{ pkgs, user, ... } :
|
||||
{
|
||||
imports = [
|
||||
./apps/ai.nix
|
||||
./apps/ollama.nix
|
||||
#./apps/default-apps.nix
|
||||
./apps/theme.nix
|
||||
./desktop/hypridle.nix
|
||||
@@ -1637,111 +1637,46 @@ This module will import all necessities.
|
||||
}
|
||||
#+end_src
|
||||
|
||||
** AI integrated with ZED
|
||||
This module enables and configures the Ollama system service on NixOS, including optional GPU acceleration (CUDA or ROCm).
|
||||
It ensures the Ollama CLI is available system-wide for interacting with local models.
|
||||
It automatically pulls and prepares selected coding models (e.g., Qwen2.5-Coder and StarCoder2) at system activation.
|
||||
|
||||
#+begin_src nix :tangle home/apps/ai.nix :noweb tangle :mkdirp yes
|
||||
{ config, pkgs, lib, flakeRoot, ... }:
|
||||
** Ollama
|
||||
This Home Manager Nix module (ai.nix) installs the Ollama package and configures it by reading a simple key-value configuration file (ollama.conf) for settings like the server host and default model. It sets environment variables (OLLAMA_HOST and OLAMA_DEFAULT_MODEL) for easy access in your shell or applications, with fallback defaults if the config file is missing or empty. Optionally, it also defines a user-level systemd service to automatically start the Ollama server on NixOS or systems with Home Manager’s systemd support enabled.
|
||||
|
||||
#+begin_src nix :tangle home/apps/ollama.nix :noweb tangle :mkdirp yes
|
||||
{ lib, config, pkgs, ... }:
|
||||
let
|
||||
# Path to environment file
|
||||
AiRepoEnv = flakeRoot + "/assets/conf/apps/ai/ai.env";
|
||||
|
||||
# Environment file parser
|
||||
parseEnv = lines:
|
||||
lib.foldl' (acc: line:
|
||||
let
|
||||
trimmed = builtins.trim line;
|
||||
in
|
||||
if trimmed == "" || (builtins.stringLength trimmed) > 0 && (builtins.substr 0 1 trimmed) == "#"
|
||||
then acc
|
||||
else
|
||||
let parts = builtins.split "=" trimmed;
|
||||
in
|
||||
if builtins.length parts >= 2
|
||||
then builtins.add (builtins.elemAt parts 0) (builtins.elemAt parts 1) acc
|
||||
else acc
|
||||
) { } (builtins.split "\n" (builtins.readFile (toString AiRepoEnv)));
|
||||
|
||||
envVars = parseEnv;
|
||||
# Path to the config file (relative to your flake or Home Manager root)
|
||||
ollamaConfPath = ./assets/conf/apps/ai/ollama/ollama.conf;
|
||||
# Read and parse the config file (assuming it's in shell variable format)
|
||||
ollamaConf = builtins.readFile ollamaConfPath;
|
||||
# Extract values using regex (adjust if your format differs)
|
||||
extractValue = key: builtins.match
|
||||
("(?m)^${key}=\"([^\"]+)\"$" + ollamaConf)
|
||||
"";
|
||||
ollamaHost = extractValue "OLLAMA_HOST";
|
||||
ollamaDefaultModel = extractValue "OLLAMA_DEFAULT_MODEL";
|
||||
in
|
||||
{
|
||||
# Install required packages
|
||||
home.packages = [
|
||||
pkgs.ollama-vulkan
|
||||
pkgs.zed-editor
|
||||
# Install Ollama
|
||||
home.packages = with pkgs; [
|
||||
ollama
|
||||
];
|
||||
|
||||
# Set environment variables
|
||||
# Configure Ollama environment variables
|
||||
home.sessionVariables = {
|
||||
OLLAMA_HOST = envVars.OLLAMA_HOST or "http://127.0.0.1:11434";
|
||||
MISTRAL_API_KEY = envVars.MISTRAL_API_KEY or "";
|
||||
OLLAMA_HOST = if ollamaHost != "" then ollamaHost else "http://127.0.0.1:11434";
|
||||
OLAMA_DEFAULT_MODEL = if ollamaDefaultModel != "" then ollamaDefaultModel else "codellama:70b";
|
||||
};
|
||||
|
||||
# Configure Ollama service using the correct Home Manager syntax
|
||||
# Optional: Start Ollama service (if using NixOS)
|
||||
systemd.user.services.ollama = {
|
||||
Unit = {
|
||||
Description = "Ollama service for local AI models";
|
||||
After = [ "network.target" ];
|
||||
Wants = [ "network.target" ];
|
||||
};
|
||||
|
||||
Service = {
|
||||
Type = "forking";
|
||||
ExecStart = "${pkgs.ollama-vulkan}/bin/ollama serve";
|
||||
ExecStartPost = ''
|
||||
sleep 5
|
||||
${pkgs.ollama-vulkan}/bin/ollama pull codellama:70b
|
||||
${pkgs.ollama-vulkan}/bin/ollama pull mixtral:8x7b
|
||||
'';
|
||||
description = "Ollama service";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
serviceConfig = {
|
||||
ExecStart = "${pkgs.ollama}/bin/ollama serve";
|
||||
Restart = "on-failure";
|
||||
};
|
||||
|
||||
Install = {
|
||||
WantedBy = [ "default.target" ];
|
||||
Environment = [
|
||||
"OLLAMA_HOST=${if ollamaHost != "" then ollamaHost else "http://127.0.0.1:11434"}"
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
# ZED configuration
|
||||
home.file.".config/zed/settings.json".text = lib.mkForce (
|
||||
builtins.toJSON {
|
||||
mistral = {
|
||||
apiKey = envVars.MISTRAL_API_KEY or "";
|
||||
defaultModel = "mistral-pro";
|
||||
};
|
||||
ollama = {
|
||||
endpoint = envVars.OLLAMA_HOST or "http://127.0.0.1:11434";
|
||||
defaultModel = "codellama:70b";
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
# --- Usage Notes ---
|
||||
# 1. Pulling Additional Models:
|
||||
# To add more models later, run:
|
||||
# ollama pull <model-name>
|
||||
# Example: ollama pull llama3:8b
|
||||
#
|
||||
# 2. Switching GPU Backends:
|
||||
# - For NVIDIA: Replace all `ollama-vulkan` with `ollama` (uses CUDA)
|
||||
# - For AMD: Use `ollama-rocm` and ensure ROCm is installed
|
||||
#
|
||||
# 3. ZED Plugin Setup:
|
||||
# - Install the Ollama and Mistral plugins in ZED via the plugin marketplace
|
||||
# - The Ollama plugin will use the local models pulled above
|
||||
# - The Mistral plugin will use the MISTRAL_API_KEY for cloud access
|
||||
#
|
||||
# 4. Security:
|
||||
# - Never commit ./assets/conf/apps/ai.env to version control
|
||||
# - For extra security, encrypt ai.env using sops-nix or age
|
||||
#
|
||||
# 5. Persistent Service:
|
||||
# To keep Ollama running after logout, enable lingering:
|
||||
# loginctl enable-linger $(whoami)
|
||||
}
|
||||
|
||||
#+end_src
|
||||
|
||||
** NCSway
|
||||
|
||||
Reference in New Issue
Block a user