fix: Use default local LLM models when config is empty
Some checks failed
BotServer CI / build (push) Has been cancelled
Some checks failed
BotServer CI / build (push) Has been cancelled
When no default.gbai/config.csv exists or when llm-model/embedding-model config is empty, the system now uses default local models instead of skipping server startup. Changes: - Default LLM model: glm-4 - Default Embedding model: bge-small-en-v1.5 - Logs when using defaults This fixes the issue where the "default" bot would fail to load LLM and Embedding services when no config.csv was present, causing the error: "not loading embedding neither llm local for default bot" Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
parent
cf7bd7ffa2
commit
337bef3bad
1 changed files with 15 additions and 0 deletions
|
|
@ -72,6 +72,21 @@ pub async fn ensure_llama_servers_running(
|
||||||
) = config_values;
|
) = config_values;
|
||||||
|
|
||||||
let llm_server_enabled = llm_server_enabled.to_lowercase() == "true";
|
let llm_server_enabled = llm_server_enabled.to_lowercase() == "true";
|
||||||
|
|
||||||
|
// Use default models when config is empty (no default.gbai/config.csv)
|
||||||
|
let llm_model = if llm_model.is_empty() {
|
||||||
|
info!("No LLM model configured, using default: glm-4");
|
||||||
|
"glm-4".to_string()
|
||||||
|
} else {
|
||||||
|
llm_model
|
||||||
|
};
|
||||||
|
|
||||||
|
let embedding_model = if embedding_model.is_empty() {
|
||||||
|
info!("No embedding model configured, using default: bge-small-en-v1.5");
|
||||||
|
"bge-small-en-v1.5".to_string()
|
||||||
|
} else {
|
||||||
|
embedding_model
|
||||||
|
};
|
||||||
if !llm_server_enabled {
|
if !llm_server_enabled {
|
||||||
info!("Local LLM server management disabled (llm-server=false). Using external endpoints.");
|
info!("Local LLM server management disabled (llm-server=false). Using external endpoints.");
|
||||||
info!(" LLM URL: {llm_url}");
|
info!(" LLM URL: {llm_url}");
|
||||||
|
|
|
||||||
Loading…
Add table
Reference in a new issue