mirror of
https://github.com/khoaliber/khoj.git
synced 2026-03-02 21:19:12 +00:00
Use multiple compiled search results for more relevant context to GPT
Increase temperature to allow GPT to collect answer across multiple notes
This commit is contained in:
@@ -10,7 +10,7 @@ import openai
|
||||
from khoj.utils.constants import empty_escape_sequences
|
||||
|
||||
|
||||
def answer(text, user_query, model, api_key=None, temperature=0.3, max_tokens=200):
|
||||
def answer(text, user_query, model, api_key=None, temperature=0.5, max_tokens=500):
|
||||
"""
|
||||
Answer user query using provided text as reference with OpenAI's GPT
|
||||
"""
|
||||
|
||||
@@ -59,10 +59,12 @@ def answer_beta(q: str):
|
||||
chat_session = state.processor_config.conversation.chat_session
|
||||
meta_log = state.processor_config.conversation.meta_log
|
||||
|
||||
# Converse with OpenAI GPT
|
||||
result_list = search(q, n=1, r=True)
|
||||
collated_result = "\n".join([item.entry for item in result_list])
|
||||
logger.debug(f"Reference Notes:\n{collated_result}")
|
||||
# Collate context for GPT
|
||||
result_list = search(q, n=2, r=True)
|
||||
collated_result = "\n\n".join([f"# {item.additional['compiled']}" for item in result_list])
|
||||
logger.debug(f"Reference Context:\n{collated_result}")
|
||||
|
||||
# Make GPT respond to user query using provided context
|
||||
try:
|
||||
gpt_response = answer(collated_result, user_query=q, model=model, api_key=api_key)
|
||||
status = "ok"
|
||||
|
||||
Reference in New Issue
Block a user