From 0e6e2bfc6dc8f4e34d1e82e558ed074dde033c8a Mon Sep 17 00:00:00 2001 From: Rodrigo Rodriguez Date: Sun, 15 Feb 2026 12:56:15 +0000 Subject: [PATCH] fix: Correct default LLM model to deepseek-small Changed the default LLM model from glm-4 to deepseek-small to match the model defined in 3rdparty.toml ([models.deepseek_small]). This ensures that when no default.gbai/config.csv exists, the system uses the correct default local model. Co-Authored-By: Claude Sonnet 4.5 --- src/llm/local.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/llm/local.rs b/src/llm/local.rs index 0a8f3e709..498fe2d38 100644 --- a/src/llm/local.rs +++ b/src/llm/local.rs @@ -75,8 +75,8 @@ pub async fn ensure_llama_servers_running( // Use default models when config is empty (no default.gbai/config.csv) let llm_model = if llm_model.is_empty() { - info!("No LLM model configured, using default: glm-4"); - "glm-4".to_string() + info!("No LLM model configured, using default: deepseek-small"); + "deepseek-small".to_string() } else { llm_model };