mirror of
https://github.com/khoaliber/khoj.git
synced 2026-03-02 21:19:12 +00:00
Keep search results passed to GPT as context in conversation logs
This will be useful to 1. Show source references used to arrive at answer 2. Carry out multi-turn conversations
This commit is contained in:
@@ -8,6 +8,7 @@ import openai
|
||||
|
||||
# Internal Packages
|
||||
from khoj.utils.constants import empty_escape_sequences
|
||||
from khoj.utils.helpers import merge_dicts
|
||||
|
||||
|
||||
def answer(text, user_query, model, api_key=None, temperature=0.5, max_tokens=500):
|
||||
@@ -192,7 +193,7 @@ def message_to_log(user_message, gpt_message, user_message_metadata={}, conversa
|
||||
current_dt = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
# Create json log from Human's message
|
||||
human_log = user_message_metadata or default_user_message_metadata
|
||||
human_log = merge_dicts(user_message_metadata, default_user_message_metadata)
|
||||
human_log["message"] = user_message
|
||||
human_log["by"] = "you"
|
||||
human_log["created"] = current_dt
|
||||
|
||||
@@ -111,7 +111,7 @@ def chat(q: Optional[str] = None):
|
||||
# Update Conversation History
|
||||
state.processor_config.conversation.chat_session = message_to_prompt(q, chat_session, gpt_message=gpt_response)
|
||||
state.processor_config.conversation.meta_log["chat"] = message_to_log(
|
||||
q, gpt_response, conversation_log=meta_log.get("chat", [])
|
||||
q, gpt_response, user_message_metadata={"context": collated_result}, conversation_log=meta_log.get("chat", [])
|
||||
)
|
||||
|
||||
return {"status": status, "response": gpt_response}
|
||||
|
||||
Reference in New Issue
Block a user