diff --git a/src/khoj/routers/api.py b/src/khoj/routers/api.py index 91db7c58..8dc0a37e 100644 --- a/src/khoj/routers/api.py +++ b/src/khoj/routers/api.py @@ -288,6 +288,7 @@ if not state.demo: async def set_processor_enable_offline_chat_config_data( request: Request, enable_offline_chat: bool, + offline_chat_model: Optional[str] = None, client: Optional[str] = None, ): _initialize_config() @@ -302,6 +303,8 @@ if not state.demo: assert state.config.processor.conversation is not None state.config.processor.conversation.offline_chat.enable_offline_chat = enable_offline_chat + if offline_chat_model is not None: + state.config.processor.conversation.offline_chat.chat_model = offline_chat_model state.processor_config = configure_processor(state.config.processor, state.processor_config) update_telemetry_state( diff --git a/src/khoj/routers/helpers.py b/src/khoj/routers/helpers.py index 0bc66991..d8b0aa8b 100644 --- a/src/khoj/routers/helpers.py +++ b/src/khoj/routers/helpers.py @@ -122,7 +122,7 @@ def generate_chat_response( conversation_log=meta_log, completion_func=partial_completion, conversation_command=conversation_command, - model=state.processor_config.conversation.gpt4all_model.chat_model, + model=state.processor_config.conversation.offline_chat.chat_model, ) elif state.processor_config.conversation.openai_model: diff --git a/src/khoj/utils/config.py b/src/khoj/utils/config.py index 90e8862a..daae1982 100644 --- a/src/khoj/utils/config.py +++ b/src/khoj/utils/config.py @@ -84,7 +84,6 @@ class SearchModels: @dataclass class GPT4AllProcessorConfig: - chat_model: Optional[str] = None loaded_model: Union[Any, None] = None @@ -95,7 +94,6 @@ class ConversationProcessorConfigModel: ): self.openai_model = conversation_config.openai self.gpt4all_model = GPT4AllProcessorConfig() - self.gpt4all_model.chat_model = conversation_config.offline_chat_model self.offline_chat = conversation_config.offline_chat self.conversation_logfile = Path(conversation_config.conversation_logfile) self.chat_session: List[str] = [] @@ -103,7 +101,7 @@ class ConversationProcessorConfigModel: if self.offline_chat.enable_offline_chat: try: - self.gpt4all_model.loaded_model = download_model(self.gpt4all_model.chat_model) + self.gpt4all_model.loaded_model = download_model(self.offline_chat.chat_model) except ValueError as e: self.offline_chat.enable_offline_chat = False self.gpt4all_model.loaded_model = None