From 45f461d175c64aee2b791bb1024f42fe63b6c3bd Mon Sep 17 00:00:00 2001 From: Debanjum Singh Solanky Date: Sun, 5 Mar 2023 15:00:20 -0600 Subject: [PATCH] Keep search results passed to GPT as context in conversation logs This will be useful to 1. Show source references used to arrive at answer 2. Carry out multi-turn conversations --- src/khoj/processor/conversation/gpt.py | 3 ++- src/khoj/routers/api_beta.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/khoj/processor/conversation/gpt.py b/src/khoj/processor/conversation/gpt.py index f6bc892f..b9264115 100644 --- a/src/khoj/processor/conversation/gpt.py +++ b/src/khoj/processor/conversation/gpt.py @@ -8,6 +8,7 @@ import openai # Internal Packages from khoj.utils.constants import empty_escape_sequences +from khoj.utils.helpers import merge_dicts def answer(text, user_query, model, api_key=None, temperature=0.5, max_tokens=500): @@ -192,7 +193,7 @@ def message_to_log(user_message, gpt_message, user_message_metadata={}, conversa current_dt = datetime.now().strftime("%Y-%m-%d %H:%M:%S") # Create json log from Human's message - human_log = user_message_metadata or default_user_message_metadata + human_log = merge_dicts(user_message_metadata, default_user_message_metadata) human_log["message"] = user_message human_log["by"] = "you" human_log["created"] = current_dt diff --git a/src/khoj/routers/api_beta.py b/src/khoj/routers/api_beta.py index 7640020a..e46f097c 100644 --- a/src/khoj/routers/api_beta.py +++ b/src/khoj/routers/api_beta.py @@ -111,7 +111,7 @@ def chat(q: Optional[str] = None): # Update Conversation History state.processor_config.conversation.chat_session = message_to_prompt(q, chat_session, gpt_message=gpt_response) state.processor_config.conversation.meta_log["chat"] = message_to_log( - q, gpt_response, conversation_log=meta_log.get("chat", []) + q, gpt_response, user_message_metadata={"context": collated_result}, conversation_log=meta_log.get("chat", []) ) return {"status": status, "response": gpt_response}