Gracefully close thread when there's an exception in the anthropic llm thread. Include full stack traces.

This commit is contained in:
sabaimran
2024-09-03 13:16:34 -07:00
parent 17901406aa
commit 895f1c8e9e
2 changed files with 25 additions and 22 deletions

View File

@@ -89,6 +89,7 @@ def anthropic_chat_completion_with_backoff(
def anthropic_llm_thread( def anthropic_llm_thread(
g, messages, system_prompt, model_name, temperature, api_key, max_prompt_size=None, model_kwargs=None g, messages, system_prompt, model_name, temperature, api_key, max_prompt_size=None, model_kwargs=None
): ):
try:
if api_key not in anthropic_clients: if api_key not in anthropic_clients:
client: anthropic.Anthropic = anthropic.Anthropic(api_key=api_key) client: anthropic.Anthropic = anthropic.Anthropic(api_key=api_key)
anthropic_clients[api_key] = client anthropic_clients[api_key] = client
@@ -110,5 +111,7 @@ def anthropic_llm_thread(
) as stream: ) as stream:
for text in stream.text_stream: for text in stream.text_stream:
g.send(text) g.send(text)
except Exception as e:
logger.error(f"Error in anthropic_llm_thread: {e}", exc_info=True)
finally:
g.close() g.close()

View File

@@ -131,6 +131,6 @@ def llm_thread(g, messages, model_name, temperature, openai_api_key=None, api_ba
elif delta_chunk.content: elif delta_chunk.content:
g.send(delta_chunk.content) g.send(delta_chunk.content)
except Exception as e: except Exception as e:
logger.error(f"Error in llm_thread: {e}") logger.error(f"Error in llm_thread: {e}", exc_info=True)
finally: finally:
g.close() g.close()