diff --git a/home/.config/opencode/opencode.json b/home/.config/opencode/opencode.json index 8e046af..b5c7a0f 100644 --- a/home/.config/opencode/opencode.json +++ b/home/.config/opencode/opencode.json @@ -1,6 +1,6 @@ { "$schema": "https://opencode.ai/config.json", - "model": "moonshot/kimi-k2-0711-preview", + "model": "z/glm-4.5", "theme": "catppuccin", "provider": { "moonshot": { @@ -14,6 +14,18 @@ "name": "Kimi-K2" } } + }, + "z": { + "name": "Z", + "npm": "@ai-sdk/openai-compatible", + "options": { + "baseURL": "https://api.z.ai/api/paas/v4" + }, + "models": { + "glm-4.5": { + "name": "GLM-4.5" + } + } } }, "mcp": { diff --git a/nixos/hyprland.nix b/nixos/hyprland.nix index 0f410f7..e55b141 100644 --- a/nixos/hyprland.nix +++ b/nixos/hyprland.nix @@ -19,6 +19,7 @@ hyprlock hypridle hyprpaper + hyprpolkitagent inputs.wezterm.packages.${pkgs.system}.default kitty diff --git a/nixos/llm.nix b/nixos/llm.nix index 0fbf407..c37b180 100644 --- a/nixos/llm.nix +++ b/nixos/llm.nix @@ -4,7 +4,7 @@ services.ollama = { enable = true; - loadModels = [ "llama3.2:3b" "phi4-reasoning:14b" "dolphin3:8b" "smallthinker:3b" "gemma3n:e4b" "gemma3:12b" "gemma3:27b" "deepcoder:14b" "qwen3:14b" "nomic-embed-text" ]; + loadModels = [ "llama3.2:3b" "phi4-reasoning:14b" "dolphin3:8b" "smallthinker:3b" "gemma3n:e4b" "deepcoder:14b" "qwen3:14b" "qwen3-coder:30b" "nomic-embed-text" ]; acceleration = "cuda"; }; @@ -47,5 +47,8 @@ # smartcat # nextjs-ollama-llm-ui # open-webui + + chromium + playwright ]; }