1
0
mirror of https://github.com/XNM1/linux-nixos-hyprland-config-dotfiles.git synced 2025-09-15 09:45:58 +03:00

LLM Update

Changelog:
- Added `ollama` with `nextjs-ollama-llm-ui`.
- Added `helix-gpt` and `lsp-ai` packages.
- Added `nvtop` package with Nvidia and Intel GPUs support.
This commit is contained in:
xnm
2024-09-29 23:11:12 +03:00
parent 599713a3df
commit f6c0b339a0
5 changed files with 15 additions and 1 deletions

View File

@@ -4,3 +4,4 @@
"contracts-node 0.35.0 (registry+https://github.com/rust-lang/crates.io-index)" = ["substrate-contracts-node"]
"dylint-link 2.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = ["dylint-link"]
"ink-lsp-server 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)" = ["ink-lsp-server"]
"lsp-ai 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = ["lsp-ai"]

View File

@@ -57,6 +57,7 @@
./info-fetchers.nix
./utils.nix
./terminal-utils.nix
./llm.nix
./work.nix
];
};

View File

@@ -20,7 +20,8 @@
# clinfo
# vdpauinfo
# libva-utils
# nvtop
nvtopPackages.nvidia
nvtopPackages.intel
wlr-randr
gpu-viewer
dig

10
nixos/llm.nix Normal file
View File

@@ -0,0 +1,10 @@
{ pkgs, ... }:
{
services.ollama.enable = true;
services.ollama.loadModels = [ "llama3.2" ];
services.ollama.acceleration = "cuda";
services.nextjs-ollama-llm-ui.enable = true;
services.nextjs-ollama-llm-ui.port = 5000;
}

View File

@@ -27,5 +27,6 @@
slint-lsp
terraform-ls
ansible-language-server
helix-gpt
];
}