Use multiple compiled search results for more relevant context to GPT

Increase temperature to allow GPT to collect answer across multiple
notes
This commit is contained in:
Debanjum Singh Solanky
2023-03-03 13:31:41 -06:00
parent c3b624e351
commit 9d42b5d60d
2 changed files with 7 additions and 5 deletions

View File

@@ -10,7 +10,7 @@ import openai
from khoj.utils.constants import empty_escape_sequences from khoj.utils.constants import empty_escape_sequences
def answer(text, user_query, model, api_key=None, temperature=0.3, max_tokens=200): def answer(text, user_query, model, api_key=None, temperature=0.5, max_tokens=500):
""" """
Answer user query using provided text as reference with OpenAI's GPT Answer user query using provided text as reference with OpenAI's GPT
""" """

View File

@@ -59,10 +59,12 @@ def answer_beta(q: str):
chat_session = state.processor_config.conversation.chat_session chat_session = state.processor_config.conversation.chat_session
meta_log = state.processor_config.conversation.meta_log meta_log = state.processor_config.conversation.meta_log
# Converse with OpenAI GPT # Collate context for GPT
result_list = search(q, n=1, r=True) result_list = search(q, n=2, r=True)
collated_result = "\n".join([item.entry for item in result_list]) collated_result = "\n\n".join([f"# {item.additional['compiled']}" for item in result_list])
logger.debug(f"Reference Notes:\n{collated_result}") logger.debug(f"Reference Context:\n{collated_result}")
# Make GPT respond to user query using provided context
try: try:
gpt_response = answer(collated_result, user_query=q, model=model, api_key=api_key) gpt_response = answer(collated_result, user_query=q, model=model, api_key=api_key)
status = "ok" status = "ok"