From 47cb470c8eb7684afafd408f8e35c589f8f3218c Mon Sep 17 00:00:00 2001 From: "Rodrigo Rodriguez (Pragmatismo)" Date: Sat, 11 Apr 2026 22:30:39 -0300 Subject: [PATCH] Fix: Handle reasoning_content from NVIDIA reasoning models (gpt-oss-120b) --- src/llm/mod.rs | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/llm/mod.rs b/src/llm/mod.rs index 5a4ae7e7..53786650 100644 --- a/src/llm/mod.rs +++ b/src/llm/mod.rs @@ -456,13 +456,16 @@ impl LLMProvider for OpenAIClient { let chunk_str = String::from_utf8_lossy(&chunk); for line in chunk_str.lines() { if line.starts_with("data: ") && !line.contains("[DONE]") { - if let Ok(data) = serde_json::from_str::(&line[6..]) { - if let Some(content) = data["choices"][0]["delta"]["content"].as_str() { - let processed = handler.process_content(content); - if !processed.is_empty() { - let _ = tx.send(processed).await; - } +if let Ok(data) = serde_json::from_str::(&line[6..]) { + // Handle content (standard) or reasoning_content (NVIDIA reasoning models) + let content = data["choices"][0]["delta"]["content"].as_str() + .or_else(|| data["choices"][0]["delta"]["reasoning_content"].as_str()); + if let Some(content) = content { + let processed = handler.process_content(content); + if !processed.is_empty() { + let _ = tx.send(processed).await; } + } // Handle standard OpenAI tool_calls if let Some(tool_calls) = data["choices"][0]["delta"]["tool_calls"].as_array() {