Add support for our first Local LLM 🤖🏠 (#330)

* Add support for gpt4all's falcon model as an additional conversation processor
- Update the UI pages to allow the user to point to the new endpoints for GPT
- Update the internal schemas to support both GPT4 models and OpenAI
- Add unit tests benchmarking some of the Falcon performance
* Add exc_info to include stack trace in error logs for text processors
* Pull shared functions into utils.py to be used across gpt4 and gpt
* Add migration for new processor conversation schema
* Skip GPT4All actor tests due to typing issues
* Fix Obsidian processor configuration in auto-configure flow
* Rename enable_local_llm to enable_offline_chat
This commit is contained in:
sabaimran
2023-07-26 23:27:08 +00:00
committed by GitHub
parent 23d77ee338
commit 8b2af0b5ef
34 changed files with 1258 additions and 291 deletions

View File

@@ -373,7 +373,7 @@ CONFIG is json obtained from Khoj config API."
(ignore-error json-end-of-file (json-parse-buffer :object-type 'alist :array-type 'list :null-object json-null :false-object json-false))))
(default-index-dir (khoj--get-directory-from-config default-config '(content-type org embeddings-file)))
(default-chat-dir (khoj--get-directory-from-config default-config '(processor conversation conversation-logfile)))
(chat-model (or khoj-chat-model (alist-get 'chat-model (alist-get 'conversation (alist-get 'processor default-config)))))
(chat-model (or khoj-chat-model (alist-get 'chat-model (alist-get 'openai (alist-get 'conversation (alist-get 'processor default-config))))))
(default-model (alist-get 'model (alist-get 'conversation (alist-get 'processor default-config))))
(config (or current-config default-config)))
@@ -423,15 +423,27 @@ CONFIG is json obtained from Khoj config API."
;; Configure processors
(cond
((not khoj-openai-api-key)
(setq config (delq (assoc 'processor config) config)))
(let* ((processor (assoc 'processor config))
(conversation (assoc 'conversation processor))
(openai (assoc 'openai conversation)))
(when openai
;; Unset the `openai' field in the khoj conversation processor config
(message "khoj.el: disable Khoj Chat using OpenAI as your OpenAI API key got removed from config")
(setcdr conversation (delq openai (cdr conversation)))
(setcdr processor (delq conversation (cdr processor)))
(setq config (delq processor config))
(push conversation (cdr processor))
(push processor config))))
((not current-config)
(message "khoj.el: Chat not configured yet.")
(setq config (delq (assoc 'processor config) config))
(cl-pushnew `(processor . ((conversation . ((conversation-logfile . ,(format "%s/conversation.json" default-chat-dir))
(chat-model . ,chat-model)
(model . ,default-model)
(openai-api-key . ,khoj-openai-api-key)))))
(openai . (
(chat-model . ,chat-model)
(api-key . ,khoj-openai-api-key)
))
))))
config))
((not (alist-get 'conversation (alist-get 'processor config)))
@@ -440,21 +452,19 @@ CONFIG is json obtained from Khoj config API."
(setq new-processor-type (delq (assoc 'conversation new-processor-type) new-processor-type))
(cl-pushnew `(conversation . ((conversation-logfile . ,(format "%s/conversation.json" default-chat-dir))
(chat-model . ,chat-model)
(model . ,default-model)
(openai-api-key . ,khoj-openai-api-key)))
new-processor-type)
(setq config (delq (assoc 'processor config) config))
(cl-pushnew `(processor . ,new-processor-type) config)))
;; Else if khoj is not configured with specified openai api key
((not (and (equal (alist-get 'openai-api-key (alist-get 'conversation (alist-get 'processor config))) khoj-openai-api-key)
(equal (alist-get 'chat-model (alist-get 'conversation (alist-get 'processor config))) khoj-chat-model)))
((not (and (equal (alist-get 'api-key (alist-get 'openai (alist-get 'conversation (alist-get 'processor config)))) khoj-openai-api-key)
(equal (alist-get 'chat-model (alist-get 'openai (alist-get 'conversation (alist-get 'processor config)))) khoj-chat-model)))
(message "khoj.el: Chat configuration has gone stale.")
(let* ((chat-directory (khoj--get-directory-from-config config '(processor conversation conversation-logfile)))
(new-processor-type (alist-get 'processor config)))
(setq new-processor-type (delq (assoc 'conversation new-processor-type) new-processor-type))
(cl-pushnew `(conversation . ((conversation-logfile . ,(format "%s/conversation.json" chat-directory))
(model . ,default-model)
(chat-model . ,khoj-chat-model)
(openai-api-key . ,khoj-openai-api-key)))
new-processor-type)

View File

@@ -36,7 +36,7 @@ export async function configureKhojBackend(vault: Vault, setting: KhojSetting, n
let khojDefaultMdIndexDirectory = getIndexDirectoryFromBackendConfig(defaultConfig["content-type"]["markdown"]["embeddings-file"]);
let khojDefaultPdfIndexDirectory = getIndexDirectoryFromBackendConfig(defaultConfig["content-type"]["pdf"]["embeddings-file"]);
let khojDefaultChatDirectory = getIndexDirectoryFromBackendConfig(defaultConfig["processor"]["conversation"]["conversation-logfile"]);
let khojDefaultChatModelName = defaultConfig["processor"]["conversation"]["model"];
let khojDefaultChatModelName = defaultConfig["processor"]["conversation"]["openai"]["chat-model"];
// Get current config if khoj backend configured, else get default config from khoj backend
await request(khoj_already_configured ? khojConfigUrl : `${khojConfigUrl}/default`)
@@ -142,25 +142,35 @@ export async function configureKhojBackend(vault: Vault, setting: KhojSetting, n
data["processor"] = {
"conversation": {
"conversation-logfile": `${khojDefaultChatDirectory}/conversation.json`,
"model": khojDefaultChatModelName,
"openai-api-key": setting.openaiApiKey,
}
"openai": {
"chat-model": khojDefaultChatModelName,
"api-key": setting.openaiApiKey,
}
},
}
}
// Else if khoj config has no conversation processor config
else if (!data["processor"]["conversation"]) {
data["processor"]["conversation"] = {
"conversation-logfile": `${khojDefaultChatDirectory}/conversation.json`,
"model": khojDefaultChatModelName,
"openai-api-key": setting.openaiApiKey,
else if (!data["processor"]["conversation"] || !data["processor"]["conversation"]["openai"]) {
data["processor"] = {
"conversation": {
"conversation-logfile": `${khojDefaultChatDirectory}/conversation.json`,
"openai": {
"chat-model": khojDefaultChatModelName,
"api-key": setting.openaiApiKey,
}
},
}
}
// Else if khoj is not configured with OpenAI API key from khoj plugin settings
else if (data["processor"]["conversation"]["openai-api-key"] !== setting.openaiApiKey) {
data["processor"]["conversation"] = {
"conversation-logfile": data["processor"]["conversation"]["conversation-logfile"],
"model": data["processor"]["conversation"]["model"],
"openai-api-key": setting.openaiApiKey,
else if (data["processor"]["conversation"]["openai"]["api-key"] !== setting.openaiApiKey) {
data["processor"] = {
"conversation": {
"conversation-logfile": data["processor"]["conversation"]["conversation-logfile"],
"openai": {
"chat-model": data["processor"]["conversation"]["openai"]["chat-model"],
"api-key": setting.openaiApiKey,
}
},
}
}