From a85ff941ca49538ac6090e4d891e72710737744f Mon Sep 17 00:00:00 2001 From: Debanjum Singh Solanky Date: Wed, 4 Oct 2023 20:39:31 -0700 Subject: [PATCH] Make offline chat model user configurable Only GPT4All supported Llama v2 models will work given the prompt structure is not currently configurable --- src/khoj/utils/config.py | 3 ++- src/khoj/utils/rawconfig.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/khoj/utils/config.py b/src/khoj/utils/config.py index f06d4c69..5accd2ad 100644 --- a/src/khoj/utils/config.py +++ b/src/khoj/utils/config.py @@ -84,7 +84,7 @@ class SearchModels: @dataclass class GPT4AllProcessorConfig: - chat_model: Optional[str] = "llama-2-7b-chat.ggmlv3.q4_0.bin" + chat_model: Optional[str] = None loaded_model: Union[Any, None] = None @@ -95,6 +95,7 @@ class ConversationProcessorConfigModel: ): self.openai_model = conversation_config.openai self.gpt4all_model = GPT4AllProcessorConfig() + self.gpt4all_model.chat_model = conversation_config.offline_chat_model self.enable_offline_chat = conversation_config.enable_offline_chat self.conversation_logfile = Path(conversation_config.conversation_logfile) self.chat_session: List[str] = [] diff --git a/src/khoj/utils/rawconfig.py b/src/khoj/utils/rawconfig.py index 0a916db4..30a98354 100644 --- a/src/khoj/utils/rawconfig.py +++ b/src/khoj/utils/rawconfig.py @@ -95,6 +95,7 @@ class ConversationProcessorConfig(ConfigBase): conversation_logfile: Path openai: Optional[OpenAIProcessorConfig] enable_offline_chat: Optional[bool] = False + offline_chat_model: Optional[str] = "llama-2-7b-chat.ggmlv3.q4_0.bin" class ProcessorConfig(ConfigBase):