diff --git a/src/automation/compact_prompt.rs b/src/automation/compact_prompt.rs index 8472916a..6a1635c3 100644 --- a/src/automation/compact_prompt.rs +++ b/src/automation/compact_prompt.rs @@ -101,7 +101,7 @@ async fn compact_prompt_for_bots( messages_since_summary ); - let mut compacted = String::new(); + let mut compacted = "Please summarize the following conversation between a human and an AI assistant:\n".to_string(); // Include messages from start_index onward let messages_to_include = history.iter().skip(start_index); @@ -114,6 +114,7 @@ async fn compact_prompt_for_bots( } let llm_provider = state.llm_provider.clone(); trace!("Starting summarization for session {}", session.id); + let mut filtered = String::new(); let summarized = match llm_provider.generate(&compacted, &serde_json::Value::Null).await { Ok(summary) => { trace!( @@ -128,7 +129,7 @@ async fn compact_prompt_for_bots( .unwrap().as_str(), ); - let filtered = handler.process_content(&summary); + filtered = handler.process_content(&summary); format!("SUMMARY: {}", filtered) } Err(e) => { @@ -147,7 +148,7 @@ async fn compact_prompt_for_bots( ); { let mut session_manager = state.session_manager.lock().await; - session_manager.save_message(session.id, session.user_id, 9, &summarized, 1)?; + session_manager.save_message(session.id, session.user_id, 9, &filtered, 1)?; } let _session_cleanup = guard((), |_| { diff --git a/src/llm/mod.rs b/src/llm/mod.rs index 6e6c03b4..abdd405b 100644 --- a/src/llm/mod.rs +++ b/src/llm/mod.rs @@ -81,7 +81,6 @@ impl LLMProvider for OpenAIClient { .json(&serde_json::json!({ "model": "gpt-3.5-turbo", "messages": messages, - "max_tokens": 1000, "stream": true })) .send() @@ -135,18 +134,20 @@ impl OpenAIClient { "compact" => "system", _ => continue }; - { - if let Some(r) = current_role.take() { + + if let Some(r) = current_role.take() { + if !current_content.is_empty() { messages.push(serde_json::json!({ "role": r, "content": current_content.trim() })); } - current_role = Some(role); - current_content = line[role_end + 1..].trim_start().to_string(); - continue; } + current_role = Some(role); + current_content = line[role_end + 1..].trim_start().to_string(); + continue; } + if let Some(_) = current_role { if !current_content.is_empty() { current_content.push('\n'); @@ -156,10 +157,12 @@ impl OpenAIClient { } if let Some(role) = current_role { - messages.push(serde_json::json!({ - "role": role, - "content": current_content.trim() - })); + if !current_content.is_empty() { + messages.push(serde_json::json!({ + "role": role, + "content": current_content.trim() + })); + } } messages }