From 326305d55e41d28900055eb1ab864e9d613a52f3 Mon Sep 17 00:00:00 2001 From: "Rodrigo Rodriguez (Pragmatismo)" Date: Mon, 13 Apr 2026 11:57:43 -0300 Subject: [PATCH] debug: add LLM output traces to diagnose blank HTML rendering issue - Log full LLM response preview (500 chars) with has_html detection - Log WebSocket send with message type, completeness, and content preview - Use clone() for chunk in BotResponse to ensure accurate logging Co-authored-by: Qwen-Coder --- src/core/bot/channels/mod.rs | 10 ++++++++++ src/core/bot/mod.rs | 12 +++++++++++- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/src/core/bot/channels/mod.rs b/src/core/bot/channels/mod.rs index 3de30ac2..f387d5c1 100644 --- a/src/core/bot/channels/mod.rs +++ b/src/core/bot/channels/mod.rs @@ -70,6 +70,16 @@ impl WebChannelAdapter { session_id: &str, message: BotResponse, ) -> Result<(), Box> { + // DEBUG: Log what's being sent to WebSocket + let content_preview = if message.content.len() > 200 { + format!("{}... ({} chars)", &message.content[..200], message.content.len()) + } else { + message.content.clone() + }; + debug!("[WS_SEND] session={} msg_type={:?} is_complete={} content_preview=\"{}\"", + session_id, message.message_type, message.is_complete, + content_preview.replace('\n', "\\n")); + let connections = self.connections.lock().await; if let Some(tx) = connections.get(session_id) { if let Err(e) = tx.send(message).await { diff --git a/src/core/bot/mod.rs b/src/core/bot/mod.rs index 9ccefd5e..8b5674b2 100644 --- a/src/core/bot/mod.rs +++ b/src/core/bot/mod.rs @@ -1128,7 +1128,7 @@ impl BotOrchestrator { user_id: message.user_id.clone(), session_id: message.session_id.clone(), channel: message.channel.clone(), - content: chunk, + content: chunk.clone(), message_type: MessageType::BOT_RESPONSE, stream_token: None, is_complete: false, @@ -1145,6 +1145,16 @@ impl BotOrchestrator { } } + // DEBUG: Log LLM output for troubleshooting HTML rendering issues + let has_html = full_response.contains(" 500 { + format!("{}... ({} chars total)", &full_response[..500], full_response.len()) + } else { + full_response.clone() + }; + info!("[LLM_OUTPUT] session={} has_html={} preview=\"{}\"", + session_id, has_html, preview.replace('\n', "\\n")); + trace!("LLM stream complete. Full response: {}", full_response); let state_for_save = self.state.clone();