Save conversation to DB in the background, as an asyncio task

This commit is contained in:
Debanjum
2025-04-22 17:15:53 +05:30
parent f929ff8438
commit 87262d15bb
5 changed files with 34 additions and 27 deletions

View File

@@ -1,3 +1,4 @@
import asyncio
import logging
from datetime import datetime, timedelta
from typing import AsyncGenerator, Dict, List, Optional
@@ -193,13 +194,13 @@ async def converse_anthropic(
if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(references):
response = prompts.no_notes_found.format()
if completion_func:
await completion_func(chat_response=response)
asyncio.create_task(completion_func(chat_response=response))
yield response
return
elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results):
response = prompts.no_online_results_found.format()
if completion_func:
await completion_func(chat_response=response)
asyncio.create_task(completion_func(chat_response=response))
yield response
return
@@ -251,4 +252,4 @@ async def converse_anthropic(
# Call completion_func once finish streaming and we have the full response
if completion_func:
await completion_func(chat_response=full_response)
asyncio.create_task(completion_func(chat_response=full_response))

View File

@@ -1,3 +1,4 @@
import asyncio
import logging
from datetime import datetime, timedelta
from typing import AsyncGenerator, Dict, List, Optional
@@ -218,13 +219,13 @@ async def converse_gemini(
if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(references):
response = prompts.no_notes_found.format()
if completion_func:
await completion_func(chat_response=response)
asyncio.create_task(completion_func(chat_response=response))
yield response
return
elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results):
response = prompts.no_online_results_found.format()
if completion_func:
await completion_func(chat_response=response)
asyncio.create_task(completion_func(chat_response=response))
yield response
return
@@ -275,4 +276,4 @@ async def converse_gemini(
# Call completion_func once finish streaming and we have the full response
if completion_func:
await completion_func(chat_response=full_response)
asyncio.create_task(completion_func(chat_response=full_response))

View File

@@ -202,13 +202,13 @@ async def converse_offline(
if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(references):
response = prompts.no_notes_found.format()
if completion_func:
await completion_func(chat_response=response)
asyncio.create_task(completion_func(chat_response=response))
yield response
return
elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results):
response = prompts.no_online_results_found.format()
if completion_func:
await completion_func(chat_response=response)
asyncio.create_task(completion_func(chat_response=response))
yield response
return
@@ -317,7 +317,7 @@ async def converse_offline(
# Call the completion function after streaming is done
if completion_func:
await completion_func(chat_response=aggregated_response_container["response"])
asyncio.create_task(completion_func(chat_response=aggregated_response_container["response"]))
def send_message_to_model_offline(

View File

@@ -1,3 +1,4 @@
import asyncio
import logging
from datetime import datetime, timedelta
from typing import AsyncGenerator, Dict, List, Optional
@@ -219,13 +220,13 @@ async def converse_openai(
if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(references):
response = prompts.no_notes_found.format()
if completion_func:
await completion_func(chat_response=response)
asyncio.create_task(completion_func(chat_response=response))
yield response
return
elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results):
response = prompts.no_online_results_found.format()
if completion_func:
await completion_func(chat_response=response)
asyncio.create_task(completion_func(chat_response=response))
yield response
return
@@ -277,7 +278,7 @@ async def converse_openai(
# Call completion_func once finish streaming and we have the full response
if completion_func:
await completion_func(chat_response=full_response)
asyncio.create_task(completion_func(chat_response=full_response))
def clean_response_schema(schema: BaseModel | dict) -> dict:

View File

@@ -998,22 +998,26 @@ async def chat(
return
llm_response = construct_automation_created_message(automation, crontime, query_to_run, subject)
await save_to_conversation_log(
q,
llm_response,
user,
meta_log,
user_message_time,
intent_type="automation",
client_application=request.user.client_app,
conversation_id=conversation_id,
inferred_queries=[query_to_run],
automation_id=automation.id,
query_images=uploaded_images,
train_of_thought=train_of_thought,
raw_query_files=raw_query_files,
tracer=tracer,
# Trigger task to save conversation to DB
asyncio.create_task(
save_to_conversation_log(
q,
llm_response,
user,
meta_log,
user_message_time,
intent_type="automation",
client_application=request.user.client_app,
conversation_id=conversation_id,
inferred_queries=[query_to_run],
automation_id=automation.id,
query_images=uploaded_images,
train_of_thought=train_of_thought,
raw_query_files=raw_query_files,
tracer=tracer,
)
)
# Send LLM Response
async for result in send_llm_response(llm_response, tracer.get("usage")):
yield result
return