fix: Correct default LLM model to deepseek-small
All checks were successful
BotServer CI / build (push) Successful in 8m57s

Changed the default LLM model from glm-4 to deepseek-small to match
the model defined in 3rdparty.toml ([models.deepseek_small]).

This ensures that when no default.gbai/config.csv exists, the system
uses the correct default local model.

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
Rodrigo Rodriguez 2026-02-15 12:56:15 +00:00
parent 337bef3bad
commit 0e6e2bfc6d

View file

@ -75,8 +75,8 @@ pub async fn ensure_llama_servers_running(
// Use default models when config is empty (no default.gbai/config.csv)
let llm_model = if llm_model.is_empty() {
info!("No LLM model configured, using default: glm-4");
"glm-4".to_string()
info!("No LLM model configured, using default: deepseek-small");
"deepseek-small".to_string()
} else {
llm_model
};