Update Chat API, Logs, Interfaces to store, use references as list

- Remove the need to split by magic string in emacs and chat interfaces
- Move compiling references into string as context for GPT to GPT layer
- Update setup in tests to use new style of setting references
- Name first argument to converse as more appropriate "references"
This commit is contained in:
Debanjum Singh Solanky
2023-03-24 21:55:22 +07:00
parent b08745b541
commit 508b2176b7
6 changed files with 91 additions and 86 deletions

View File

@@ -443,9 +443,8 @@ RECEIVE-DATE is the message receive date."
(let* ((message (cdr (or (assoc 'response json-response) (assoc 'message json-response))))
(sender (cdr (assoc 'by json-response)))
(receive-date (cdr (assoc 'created json-response)))
(context (or (cdr (assoc 'context json-response)) ""))
(reference-source-texts (split-string context "\n\n# " t))
(footnotes (mapcar #'khoj--generate-reference reference-source-texts))
(references (or (cdr (assoc 'context json-response)) '()))
(footnotes (mapcar #'khoj--generate-reference references))
(footnote-links (mapcar #'car footnotes))
(footnote-defs (mapcar #'cdr footnotes)))
(thread-first

View File

@@ -39,7 +39,6 @@
let references = '';
if (context) {
references = context
.split("\n\n# ")
.map((reference, index) => generateReference(reference, index))
.join("<sup>,</sup>");
}

View File

@@ -223,13 +223,14 @@ A:{ "search-type": "notes" }"""
return json.loads(story.strip(empty_escape_sequences))
def converse(text, user_query, conversation_log={}, api_key=None, temperature=0.2):
def converse(references, user_query, conversation_log={}, api_key=None, temperature=0.2):
"""
Converse with user using OpenAI's ChatGPT
"""
# Initialize Variables
model = "gpt-3.5-turbo"
openai.api_key = api_key or os.getenv("OPENAI_API_KEY")
compiled_references = "\n\n".join({f"# {item}" for item in references})
personality_primer = "You are Khoj, a friendly, smart and helpful personal assistant."
conversation_primer = f"""
@@ -237,7 +238,7 @@ Using the notes and our past conversations as context, answer the following ques
Current Date: {datetime.now().strftime("%Y-%m-%d")}
Notes:
{text}
{compiled_references}
Question: {user_query}"""

View File

@@ -215,11 +215,11 @@ def chat(q: Optional[str] = None):
result_list = []
for query in inferred_queries:
result_list.extend(search(query, n=5, r=True, score_threshold=-5.0, dedupe=False))
collated_result = "\n\n".join({f"# {item.additional['compiled']}" for item in result_list})
compiled_references = [item.additional["compiled"] for item in result_list]
try:
with timer("Generating chat response took", logger):
gpt_response = converse(collated_result, q, meta_log, api_key=api_key)
gpt_response = converse(compiled_references, q, meta_log, api_key=api_key)
status = "ok"
except Exception as e:
gpt_response = str(e)
@@ -231,8 +231,8 @@ def chat(q: Optional[str] = None):
q,
gpt_response,
user_message_metadata={"created": user_message_time},
khoj_message_metadata={"context": collated_result, "intent": {"inferred-queries": inferred_queries}},
khoj_message_metadata={"context": compiled_references, "intent": {"inferred-queries": inferred_queries}},
conversation_log=meta_log.get("chat", []),
)
return {"status": status, "response": gpt_response, "context": collated_result}
return {"status": status, "response": gpt_response, "context": compiled_references}