fix: Use default local LLM models when config is empty
Some checks failed
BotServer CI / build (push) Has been cancelled

When no default.gbai/config.csv exists or when llm-model/embedding-model
config is empty, the system now uses default local models instead of
skipping server startup.

Changes:
- Default LLM model: glm-4
- Default Embedding model: bge-small-en-v1.5
- Logs when using defaults

This fixes the issue where the "default" bot would fail to load LLM
and Embedding services when no config.csv was present, causing the
error: "not loading embedding neither llm local for default bot"

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
Rodrigo Rodriguez 2026-02-15 12:54:40 +00:00
parent cf7bd7ffa2
commit 337bef3bad

View file

@ -72,6 +72,21 @@ pub async fn ensure_llama_servers_running(
) = config_values;
let llm_server_enabled = llm_server_enabled.to_lowercase() == "true";
// Use default models when config is empty (no default.gbai/config.csv)
let llm_model = if llm_model.is_empty() {
info!("No LLM model configured, using default: glm-4");
"glm-4".to_string()
} else {
llm_model
};
let embedding_model = if embedding_model.is_empty() {
info!("No embedding model configured, using default: bge-small-en-v1.5");
"bge-small-en-v1.5".to_string()
} else {
embedding_model
};
if !llm_server_enabled {
info!("Local LLM server management disabled (llm-server=false). Using external endpoints.");
info!(" LLM URL: {llm_url}");