chore: update submodules
All checks were successful
BotServer CI / build (push) Successful in 9m56s

This commit is contained in:
Rodrigo Rodriguez (Pragmatismo) 2026-03-10 19:39:31 -03:00
parent 1053c86a73
commit e98de24fe6
3 changed files with 58 additions and 9 deletions

View file

@ -39,8 +39,7 @@ pub struct AutomationService {
impl AutomationService {
#[must_use]
pub fn new(state: Arc<AppState>) -> Self {
// Temporarily disabled to debug CPU spike
// crate::llm::episodic_memory::start_episodic_memory_scheduler(Arc::clone(&state));
crate::llm::episodic_memory::start_episodic_memory_scheduler(Arc::clone(&state));
Self { state }
}
pub async fn spawn(self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {

View file

@ -426,7 +426,7 @@ impl BotOrchestrator {
}
.ok_or("Session not found")?;
{
if !message.content.trim().is_empty() {
let mut sm = state_clone.session_manager.blocking_lock();
sm.save_message(session.id, user_id, 1, &message.content, 1)?;
}
@ -591,6 +591,38 @@ impl BotOrchestrator {
}
} // End of if should_execute_start_bas
// If message content is empty, we stop here after potentially running start.bas.
// This happens when the bot is activated by its name in WhatsApp, where an empty string is sent as a signal.
if message_content.trim().is_empty() {
let user_id_str = message.user_id.clone();
let session_id_str = message.session_id.clone();
#[cfg(feature = "chat")]
let suggestions = get_suggestions(self.state.cache.as_ref(), &user_id_str, &session_id_str);
#[cfg(not(feature = "chat"))]
let suggestions: Vec<crate::core::shared::models::Suggestion> = Vec::new();
let final_response = BotResponse {
bot_id: message.bot_id,
user_id: message.user_id,
session_id: message.session_id,
channel: message.channel,
content: String::new(),
message_type: MessageType::BOT_RESPONSE,
stream_token: None,
is_complete: true,
suggestions,
context_name: None,
context_length: 0,
context_max_length: 0,
};
if let Err(e) = response_tx.send(final_response).await {
warn!("Failed to send final response for empty content: {}", e);
}
return Ok(());
}
if let Some(kb_manager) = self.state.kb_manager.as_ref() {
if let Err(e) = inject_kb_context(
kb_manager.clone(),

View file

@ -214,21 +214,39 @@ impl OpenAIClient {
history: &[(String, String)],
) -> Value {
let mut messages = Vec::new();
let mut system_parts = Vec::new();
if !system_prompt.is_empty() {
messages.push(serde_json::json!({
"role": "system",
"content": Self::sanitize_utf8(system_prompt)
}));
system_parts.push(Self::sanitize_utf8(system_prompt));
}
if !context_data.is_empty() {
system_parts.push(Self::sanitize_utf8(context_data));
}
for (role, content) in history {
if role == "episodic" || role == "compact" {
system_parts.push(format!("[Previous conversation summary]: {}", Self::sanitize_utf8(content)));
}
}
if !system_parts.is_empty() {
messages.push(serde_json::json!({
"role": "system",
"content": Self::sanitize_utf8(context_data)
"content": system_parts.join("\n\n")
}));
}
for (role, content) in history {
let normalized_role = match role.as_str() {
"user" => "user",
"assistant" => "assistant",
"system" => "system",
"episodic" | "compact" => continue, // Already added to system prompt
_ => "user", // Fallback Default
};
messages.push(serde_json::json!({
"role": role,
"role": normalized_role,
"content": Self::sanitize_utf8(content)
}));
}