mirror of
https://github.com/khoaliber/khoj.git
synced 2026-03-09 21:29:11 +00:00
Support multi-turn conversations in chat mode
- Only use decent quality search results, if any, as context - Pass source results used by previous chat messages as context - Loosen prompt to allow looking at previous chats and notes to answer - Pass current date for context - Make GPT provide reason when it can't answer the question. Gives user context to tune their questions
This commit is contained in:
@@ -125,7 +125,9 @@ def converse(text, user_query, conversation_log=None, api_key=None, temperature=
|
|||||||
|
|
||||||
personality_primer = "You are a friendly, helpful personal assistant."
|
personality_primer = "You are a friendly, helpful personal assistant."
|
||||||
conversation_primer = f"""
|
conversation_primer = f"""
|
||||||
Using my notes below, answer the following question. If the answer is not contained within the notes, say "I don't know."
|
Using our chats and notes as context, answer the following question.
|
||||||
|
If the answer is not contained within the provided context, say "I don't know." and provide reason.
|
||||||
|
Current Date: {datetime.now().strftime("%Y-%m-%d")}
|
||||||
|
|
||||||
Notes:
|
Notes:
|
||||||
{text}
|
{text}
|
||||||
@@ -154,7 +156,7 @@ Question: {user_query}"""
|
|||||||
def generate_chatml_messages_with_context(user_message, system_message, conversation_log=None):
|
def generate_chatml_messages_with_context(user_message, system_message, conversation_log=None):
|
||||||
"""Generate messages for ChatGPT with context from previous conversation"""
|
"""Generate messages for ChatGPT with context from previous conversation"""
|
||||||
# Extract Chat History for Context
|
# Extract Chat History for Context
|
||||||
chat_logs = [chat["message"] for chat in conversation_log.get("chat", [])]
|
chat_logs = [f'{chat["message"]}\n\nNotes:\n{chat.get("context","")}' for chat in conversation_log.get("chat", [])]
|
||||||
last_backnforth = reciprocal_conversation_to_chatml(chat_logs[-2:])
|
last_backnforth = reciprocal_conversation_to_chatml(chat_logs[-2:])
|
||||||
rest_backnforth = reciprocal_conversation_to_chatml(chat_logs[-4:-2])
|
rest_backnforth = reciprocal_conversation_to_chatml(chat_logs[-4:-2])
|
||||||
|
|
||||||
|
|||||||
@@ -97,7 +97,7 @@ def chat(q: Optional[str] = None):
|
|||||||
return {"status": "ok", "response": []}
|
return {"status": "ok", "response": []}
|
||||||
|
|
||||||
# Collate context for GPT
|
# Collate context for GPT
|
||||||
result_list = search(q, n=2, r=True)
|
result_list = search(q, n=2, r=True, score_threshold=0)
|
||||||
collated_result = "\n\n".join([f"# {item.additional['compiled']}" for item in result_list])
|
collated_result = "\n\n".join([f"# {item.additional['compiled']}" for item in result_list])
|
||||||
logger.debug(f"Reference Context:\n{collated_result}")
|
logger.debug(f"Reference Context:\n{collated_result}")
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user