Handle unset reasoning, response chunk from openai api while streaming

This commit is contained in:
Debanjum
2025-08-19 15:01:56 -07:00
parent 14b4d4b663
commit 8862394c15

View File

@@ -170,6 +170,7 @@ def completion_with_backoff(
chunk.type == "chunk"
and chunk.chunk.choices
and hasattr(chunk.chunk.choices[0].delta, "reasoning_content")
and chunk.chunk.choices[0].delta.reasoning_content
):
thoughts += chunk.chunk.choices[0].delta.reasoning_content
elif chunk.type == "chunk" and chunk.chunk.choices and chunk.chunk.choices[0].delta.tool_calls:
@@ -1075,6 +1076,10 @@ async def ain_stream_thought_processor(
yield chunk
continue
if chunk.choices[0].delta.content is None:
# If delta content is None, we can't process it, just yield the chunk
continue
buf += chunk.choices[0].delta.content
if mode == "detect_start":