Handle end of chunk logic in openai stream processor

This commit is contained in:
sabaimran
2024-08-20 10:50:09 -07:00
parent 029775420c
commit f6ce2fd432

View File

@@ -56,6 +56,8 @@ def completion_with_backoff(
)
aggregated_response = ""
for chunk in chat:
if len(chunk.choices) == 0:
continue
delta_chunk = chunk.choices[0].delta # type: ignore
if isinstance(delta_chunk, str):
aggregated_response += delta_chunk