Save conversation to DB in the background, as an asyncio task

This commit is contained in:
Debanjum
2025-04-22 17:15:53 +05:30
parent f929ff8438
commit 87262d15bb
5 changed files with 34 additions and 27 deletions

View File

@@ -1,3 +1,4 @@
import asyncio
import logging import logging
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import AsyncGenerator, Dict, List, Optional from typing import AsyncGenerator, Dict, List, Optional
@@ -193,13 +194,13 @@ async def converse_anthropic(
if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(references): if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(references):
response = prompts.no_notes_found.format() response = prompts.no_notes_found.format()
if completion_func: if completion_func:
await completion_func(chat_response=response) asyncio.create_task(completion_func(chat_response=response))
yield response yield response
return return
elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results): elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results):
response = prompts.no_online_results_found.format() response = prompts.no_online_results_found.format()
if completion_func: if completion_func:
await completion_func(chat_response=response) asyncio.create_task(completion_func(chat_response=response))
yield response yield response
return return
@@ -251,4 +252,4 @@ async def converse_anthropic(
# Call completion_func once finish streaming and we have the full response # Call completion_func once finish streaming and we have the full response
if completion_func: if completion_func:
await completion_func(chat_response=full_response) asyncio.create_task(completion_func(chat_response=full_response))

View File

@@ -1,3 +1,4 @@
import asyncio
import logging import logging
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import AsyncGenerator, Dict, List, Optional from typing import AsyncGenerator, Dict, List, Optional
@@ -218,13 +219,13 @@ async def converse_gemini(
if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(references): if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(references):
response = prompts.no_notes_found.format() response = prompts.no_notes_found.format()
if completion_func: if completion_func:
await completion_func(chat_response=response) asyncio.create_task(completion_func(chat_response=response))
yield response yield response
return return
elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results): elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results):
response = prompts.no_online_results_found.format() response = prompts.no_online_results_found.format()
if completion_func: if completion_func:
await completion_func(chat_response=response) asyncio.create_task(completion_func(chat_response=response))
yield response yield response
return return
@@ -275,4 +276,4 @@ async def converse_gemini(
# Call completion_func once finish streaming and we have the full response # Call completion_func once finish streaming and we have the full response
if completion_func: if completion_func:
await completion_func(chat_response=full_response) asyncio.create_task(completion_func(chat_response=full_response))

View File

@@ -202,13 +202,13 @@ async def converse_offline(
if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(references): if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(references):
response = prompts.no_notes_found.format() response = prompts.no_notes_found.format()
if completion_func: if completion_func:
await completion_func(chat_response=response) asyncio.create_task(completion_func(chat_response=response))
yield response yield response
return return
elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results): elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results):
response = prompts.no_online_results_found.format() response = prompts.no_online_results_found.format()
if completion_func: if completion_func:
await completion_func(chat_response=response) asyncio.create_task(completion_func(chat_response=response))
yield response yield response
return return
@@ -317,7 +317,7 @@ async def converse_offline(
# Call the completion function after streaming is done # Call the completion function after streaming is done
if completion_func: if completion_func:
await completion_func(chat_response=aggregated_response_container["response"]) asyncio.create_task(completion_func(chat_response=aggregated_response_container["response"]))
def send_message_to_model_offline( def send_message_to_model_offline(

View File

@@ -1,3 +1,4 @@
import asyncio
import logging import logging
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import AsyncGenerator, Dict, List, Optional from typing import AsyncGenerator, Dict, List, Optional
@@ -219,13 +220,13 @@ async def converse_openai(
if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(references): if conversation_commands == [ConversationCommand.Notes] and is_none_or_empty(references):
response = prompts.no_notes_found.format() response = prompts.no_notes_found.format()
if completion_func: if completion_func:
await completion_func(chat_response=response) asyncio.create_task(completion_func(chat_response=response))
yield response yield response
return return
elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results): elif conversation_commands == [ConversationCommand.Online] and is_none_or_empty(online_results):
response = prompts.no_online_results_found.format() response = prompts.no_online_results_found.format()
if completion_func: if completion_func:
await completion_func(chat_response=response) asyncio.create_task(completion_func(chat_response=response))
yield response yield response
return return
@@ -277,7 +278,7 @@ async def converse_openai(
# Call completion_func once finish streaming and we have the full response # Call completion_func once finish streaming and we have the full response
if completion_func: if completion_func:
await completion_func(chat_response=full_response) asyncio.create_task(completion_func(chat_response=full_response))
def clean_response_schema(schema: BaseModel | dict) -> dict: def clean_response_schema(schema: BaseModel | dict) -> dict:

View File

@@ -998,7 +998,9 @@ async def chat(
return return
llm_response = construct_automation_created_message(automation, crontime, query_to_run, subject) llm_response = construct_automation_created_message(automation, crontime, query_to_run, subject)
await save_to_conversation_log( # Trigger task to save conversation to DB
asyncio.create_task(
save_to_conversation_log(
q, q,
llm_response, llm_response,
user, user,
@@ -1014,6 +1016,8 @@ async def chat(
raw_query_files=raw_query_files, raw_query_files=raw_query_files,
tracer=tracer, tracer=tracer,
) )
)
# Send LLM Response
async for result in send_llm_response(llm_response, tracer.get("usage")): async for result in send_llm_response(llm_response, tracer.get("usage")):
yield result yield result
return return