fix: Correct default LLM model to deepseek-small
All checks were successful
BotServer CI / build (push) Successful in 8m57s
All checks were successful
BotServer CI / build (push) Successful in 8m57s
Changed the default LLM model from glm-4 to deepseek-small to match the model defined in 3rdparty.toml ([models.deepseek_small]). This ensures that when no default.gbai/config.csv exists, the system uses the correct default local model. Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
337bef3bad
commit
0e6e2bfc6d
1 changed files with 2 additions and 2 deletions
|
|
@ -75,8 +75,8 @@ pub async fn ensure_llama_servers_running(
|
||||||
|
|
||||||
// Use default models when config is empty (no default.gbai/config.csv)
|
// Use default models when config is empty (no default.gbai/config.csv)
|
||||||
let llm_model = if llm_model.is_empty() {
|
let llm_model = if llm_model.is_empty() {
|
||||||
info!("No LLM model configured, using default: glm-4");
|
info!("No LLM model configured, using default: deepseek-small");
|
||||||
"glm-4".to_string()
|
"deepseek-small".to_string()
|
||||||
} else {
|
} else {
|
||||||
llm_model
|
llm_model
|
||||||
};
|
};
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue