Gracefully close thread when there's an exception in the openai llm thread. Closes #894.

This commit is contained in:
sabaimran
2024-09-03 13:14:55 -07:00
parent 6ed68b574b
commit 17901406aa

View File

@@ -100,6 +100,7 @@ def chat_completion_with_backoff(
def llm_thread(g, messages, model_name, temperature, openai_api_key=None, api_base_url=None, model_kwargs=None): def llm_thread(g, messages, model_name, temperature, openai_api_key=None, api_base_url=None, model_kwargs=None):
try:
client_key = f"{openai_api_key}--{api_base_url}" client_key = f"{openai_api_key}--{api_base_url}"
if client_key not in openai_clients: if client_key not in openai_clients:
client: openai.OpenAI = openai.OpenAI( client: openai.OpenAI = openai.OpenAI(
@@ -129,5 +130,7 @@ def llm_thread(g, messages, model_name, temperature, openai_api_key=None, api_ba
g.send(delta_chunk) g.send(delta_chunk)
elif delta_chunk.content: elif delta_chunk.content:
g.send(delta_chunk.content) g.send(delta_chunk.content)
except Exception as e:
logger.error(f"Error in llm_thread: {e}")
finally:
g.close() g.close()