From 0847fb010247ef44dbfb6c7dcf81430c98cc5381 Mon Sep 17 00:00:00 2001 From: Debanjum Singh Solanky Date: Tue, 22 Oct 2024 02:32:34 -0700 Subject: [PATCH] Pass online context from chat history to chat model for response Previously only notes context from chat history was included. This change includes online context from chat history for model to use for response generation. This can reduce need for online lookups by reusing previous online context for faster responses. But will increase overall response time when not reusing past online context, as faster context buildup per conversation. Unsure if inclusion of context is preferrable. If not, both notes and online context should be removed. --- src/khoj/processor/conversation/utils.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/khoj/processor/conversation/utils.py b/src/khoj/processor/conversation/utils.py index 75f17963..3aee61c9 100644 --- a/src/khoj/processor/conversation/utils.py +++ b/src/khoj/processor/conversation/utils.py @@ -180,11 +180,15 @@ def generate_chatml_messages_with_context( # Extract Chat History for Context chatml_messages: List[ChatMessage] = [] for chat in conversation_log.get("chat", []): + message_context = "" if not is_none_or_empty(chat.get("context")): references = "\n\n".join( {f"# File: {item['file']}\n## {item['compiled']}\n" for item in chat.get("context") or []} ) message_context = f"{prompts.notes_conversation.format(references=references)}\n\n" + if not is_none_or_empty(chat.get("onlineContext")): + message_context += f"{prompts.online_search_conversation.format(online_results=chat.get('onlineContext'))}" + if not is_none_or_empty(chat.get("context")) or not is_none_or_empty(chat.get("onlineContext")): reconstructed_context_message = ChatMessage(content=message_context, role="context") chatml_messages.insert(0, reconstructed_context_message)