botserver/src/core/config_reload.rs
Rodrigo Rodriguez 5ea171d126
Some checks failed
BotServer CI / build (push) Failing after 1m34s
Refactor: Split large files into modular subdirectories
Split 20+ files over 1000 lines into focused subdirectories for better
maintainability and code organization. All changes maintain backward
compatibility through re-export wrappers.

Major splits:
- attendance/llm_assist.rs (2074→7 modules)
- basic/keywords/face_api.rs → face_api/ (7 modules)
- basic/keywords/file_operations.rs → file_ops/ (8 modules)
- basic/keywords/hear_talk.rs → hearing/ (6 modules)
- channels/wechat.rs → wechat/ (10 modules)
- channels/youtube.rs → youtube/ (5 modules)
- contacts/mod.rs → contacts_api/ (6 modules)
- core/bootstrap/mod.rs → bootstrap/ (5 modules)
- core/shared/admin.rs → admin_*.rs (5 modules)
- designer/canvas.rs → canvas_api/ (6 modules)
- designer/mod.rs → designer_api/ (6 modules)
- docs/handlers.rs → handlers_api/ (11 modules)
- drive/mod.rs → drive_handlers.rs, drive_types.rs
- learn/mod.rs → types.rs
- main.rs → main_module/ (7 modules)
- meet/webinar.rs → webinar_api/ (8 modules)
- paper/mod.rs → (10 modules)
- security/auth.rs → auth_api/ (7 modules)
- security/passkey.rs → (4 modules)
- sources/mod.rs → sources_api/ (5 modules)
- tasks/mod.rs → task_api/ (5 modules)

Stats: 38,040 deletions, 1,315 additions across 318 files

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-02-12 21:09:30 +00:00

59 lines
2.1 KiB
Rust

// Simple config reload endpoint
use axum::{extract::State, http::StatusCode, response::Json};
use serde_json::{json, Value};
use std::sync::Arc;
use crate::core::shared::state::AppState;
use crate::core::config::ConfigManager;
pub async fn reload_config(
State(state): State<Arc<AppState>>,
) -> Result<Json<Value>, StatusCode> {
let config_manager = ConfigManager::new(state.conn.clone());
// Get default bot
let conn_arc = state.conn.clone();
let (default_bot_id, _) = tokio::task::spawn_blocking(move || -> Result<(uuid::Uuid, String), String> {
let mut conn = conn_arc
.get()
.map_err(|e| format!("failed to get db connection: {e}"))?;
Ok(crate::core::bot::get_default_bot(&mut *conn))
})
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
// Get LLM config
let llm_url = config_manager
.get_config(&default_bot_id, "llm-url", Some("http://localhost:8081"))
.unwrap_or_else(|_| "http://localhost:8081".to_string());
let llm_model = config_manager
.get_config(&default_bot_id, "llm-model", Some("local"))
.unwrap_or_else(|_| "local".to_string());
let llm_endpoint_path = config_manager
.get_config(&default_bot_id, "llm-endpoint-path", Some("/v1/chat/completions"))
.unwrap_or_else(|_| "/v1/chat/completions".to_string());
// Update LLM provider
if let Some(dynamic_llm) = &state.dynamic_llm_provider {
dynamic_llm
.update_from_config(&llm_url, Some(llm_model.clone()), Some(llm_endpoint_path.clone()))
.await;
Ok(Json(json!({
"status": "success",
"message": "LLM configuration reloaded",
"config": {
"llm_url": llm_url,
"llm_model": llm_model,
"llm_endpoint_path": llm_endpoint_path
}
})))
} else {
Ok(Json(json!({
"status": "error",
"message": "Dynamic LLM provider not available"
})))
}
}