mirror of
https://github.com/XNM1/linux-nixos-hyprland-config-dotfiles.git
synced 2025-09-15 09:45:58 +03:00
LLM Update
Changelog: - Added `ollama` with `nextjs-ollama-llm-ui`. - Added `helix-gpt` and `lsp-ai` packages. - Added `nvtop` package with Nvidia and Intel GPUs support.
This commit is contained in:
@@ -57,6 +57,7 @@
|
||||
./info-fetchers.nix
|
||||
./utils.nix
|
||||
./terminal-utils.nix
|
||||
./llm.nix
|
||||
./work.nix
|
||||
];
|
||||
};
|
||||
|
@@ -20,7 +20,8 @@
|
||||
# clinfo
|
||||
# vdpauinfo
|
||||
# libva-utils
|
||||
# nvtop
|
||||
nvtopPackages.nvidia
|
||||
nvtopPackages.intel
|
||||
wlr-randr
|
||||
gpu-viewer
|
||||
dig
|
||||
|
10
nixos/llm.nix
Normal file
10
nixos/llm.nix
Normal file
@@ -0,0 +1,10 @@
|
||||
{ pkgs, ... }:
|
||||
|
||||
{
|
||||
services.ollama.enable = true;
|
||||
services.ollama.loadModels = [ "llama3.2" ];
|
||||
services.ollama.acceleration = "cuda";
|
||||
|
||||
services.nextjs-ollama-llm-ui.enable = true;
|
||||
services.nextjs-ollama-llm-ui.port = 5000;
|
||||
}
|
@@ -27,5 +27,6 @@
|
||||
slint-lsp
|
||||
terraform-ls
|
||||
ansible-language-server
|
||||
helix-gpt
|
||||
];
|
||||
}
|
||||
|
Reference in New Issue
Block a user