Newe Ollama.nix, the ai.nix did not work out. Added ZED-editor as separate app
This commit is contained in:
@@ -0,0 +1,36 @@
|
||||
{ lib, config, pkgs, ... }:
|
||||
let
|
||||
# Path to the config file (relative to your flake or Home Manager root)
|
||||
ollamaConfPath = ./assets/conf/apps/ai/ollama/ollama.conf;
|
||||
# Read and parse the config file (assuming it's in shell variable format)
|
||||
ollamaConf = builtins.readFile ollamaConfPath;
|
||||
# Extract values using regex (adjust if your format differs)
|
||||
extractValue = key: builtins.match
|
||||
("(?m)^${key}=\"([^\"]+)\"$" + ollamaConf)
|
||||
"";
|
||||
ollamaHost = extractValue "OLLAMA_HOST";
|
||||
ollamaDefaultModel = extractValue "OLLAMA_DEFAULT_MODEL";
|
||||
in
|
||||
{
|
||||
# Install Ollama
|
||||
home.packages = with pkgs; [
|
||||
ollama
|
||||
];
|
||||
# Configure Ollama environment variables
|
||||
home.sessionVariables = {
|
||||
OLLAMA_HOST = if ollamaHost != "" then ollamaHost else "http://127.0.0.1:11434";
|
||||
OLAMA_DEFAULT_MODEL = if ollamaDefaultModel != "" then ollamaDefaultModel else "codellama:70b";
|
||||
};
|
||||
# Optional: Start Ollama service (if using NixOS)
|
||||
systemd.user.services.ollama = {
|
||||
description = "Ollama service";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
serviceConfig = {
|
||||
ExecStart = "${pkgs.ollama}/bin/ollama serve";
|
||||
Restart = "on-failure";
|
||||
Environment = [
|
||||
"OLLAMA_HOST=${if ollamaHost != "" then ollamaHost else "http://127.0.0.1:11434"}"
|
||||
];
|
||||
};
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user