From 8862394c15fc814d7973828face583153ba2f7c6 Mon Sep 17 00:00:00 2001 From: Debanjum Date: Tue, 19 Aug 2025 15:01:56 -0700 Subject: [PATCH] Handle unset reasoning, response chunk from openai api while streaming --- src/khoj/processor/conversation/openai/utils.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/khoj/processor/conversation/openai/utils.py b/src/khoj/processor/conversation/openai/utils.py index d29796b3..2b5889c2 100644 --- a/src/khoj/processor/conversation/openai/utils.py +++ b/src/khoj/processor/conversation/openai/utils.py @@ -170,6 +170,7 @@ def completion_with_backoff( chunk.type == "chunk" and chunk.chunk.choices and hasattr(chunk.chunk.choices[0].delta, "reasoning_content") + and chunk.chunk.choices[0].delta.reasoning_content ): thoughts += chunk.chunk.choices[0].delta.reasoning_content elif chunk.type == "chunk" and chunk.chunk.choices and chunk.chunk.choices[0].delta.tool_calls: @@ -1075,6 +1076,10 @@ async def ain_stream_thought_processor( yield chunk continue + if chunk.choices[0].delta.content is None: + # If delta content is None, we can't process it, just yield the chunk + continue + buf += chunk.choices[0].delta.content if mode == "detect_start":