New Ollama.nix, the ai.nix did not work out. Added ZED-editor as separate app

This commit is contained in:
2026-02-28 15:17:53 +01:00
parent e94d4551ed
commit cfb3a8c43b
4 changed files with 309 additions and 322 deletions
+303 -307
View File
File diff suppressed because it is too large Load Diff
+2 -6
View File
@@ -1645,11 +1645,7 @@ This Home Manager Nix module (ai.nix) installs the Ollama package and configures
let
ollamaConfPath = flakeRoot + "/assets/conf/apps/ai/ollama/ollama.conf";
envVars = {
OLLAMA_HOST = "http://127.0.0.1:11434";
OLLAMA_DEFAULT_MODEL = "codellama:70b";
OLLAMA_MODELS_TO_PULL = "codellama:70b,llama3.1:70b";
};
envVars = builtins.fromJSON (builtins.readFile ollamaConfPath);
in
{
@@ -1657,10 +1653,10 @@ in
enable = true;
package = pkgs.ollama;
environmentVariables = envVars;
wantedBy = [ "multi-user.target" ];
};
}
#+end_src
** Powermenu
@@ -1,5 +1,5 @@
{
"OLLAMA_HOST"="http://127.0.0.1:11434",
"OLAMA_DEFAULT_MODEL"="codellama:70b",
"OLLAMA_ADDITIONAL_MODELS"="llama3.1:70b"
"OLLAMA_HOST": "http://127.0.0.1:11434",
"OLLAMA_DEFAULT_MODEL": "codellama:70b",
"OLLAMA_ADDITIONAL_MODELS": "llama3.1:70b"
}
+1 -6
View File
@@ -2,11 +2,7 @@
let
ollamaConfPath = flakeRoot + "/assets/conf/apps/ai/ollama/ollama.conf";
envVars = {
OLLAMA_HOST = "http://127.0.0.1:11434";
OLLAMA_DEFAULT_MODEL = "codellama:70b";
OLLAMA_MODELS_TO_PULL = "codellama:70b,llama3.1:70b";
};
envVars = builtins.fromJSON (builtins.readFile ollamaConfPath);
in
{
@@ -14,6 +10,5 @@ in
enable = true;
package = pkgs.ollama;
environmentVariables = envVars;
wantedBy = [ "multi-user.target" ];
};
}