mirror of
https://github.com/khoaliber/khoj.git
synced 2026-03-07 05:40:17 +00:00
Use chat_model specified in new offline_chat section of config
- Dedupe offline_chat_model variable. Only reference offline chat model stored under offline_chat. Delete the previous chat_model field under GPT4AllProcessorConfig - Set offline chat model to use via config/offline_chat API endpoint
This commit is contained in:
@@ -288,6 +288,7 @@ if not state.demo:
|
|||||||
async def set_processor_enable_offline_chat_config_data(
|
async def set_processor_enable_offline_chat_config_data(
|
||||||
request: Request,
|
request: Request,
|
||||||
enable_offline_chat: bool,
|
enable_offline_chat: bool,
|
||||||
|
offline_chat_model: Optional[str] = None,
|
||||||
client: Optional[str] = None,
|
client: Optional[str] = None,
|
||||||
):
|
):
|
||||||
_initialize_config()
|
_initialize_config()
|
||||||
@@ -302,6 +303,8 @@ if not state.demo:
|
|||||||
|
|
||||||
assert state.config.processor.conversation is not None
|
assert state.config.processor.conversation is not None
|
||||||
state.config.processor.conversation.offline_chat.enable_offline_chat = enable_offline_chat
|
state.config.processor.conversation.offline_chat.enable_offline_chat = enable_offline_chat
|
||||||
|
if offline_chat_model is not None:
|
||||||
|
state.config.processor.conversation.offline_chat.chat_model = offline_chat_model
|
||||||
state.processor_config = configure_processor(state.config.processor, state.processor_config)
|
state.processor_config = configure_processor(state.config.processor, state.processor_config)
|
||||||
|
|
||||||
update_telemetry_state(
|
update_telemetry_state(
|
||||||
|
|||||||
@@ -122,7 +122,7 @@ def generate_chat_response(
|
|||||||
conversation_log=meta_log,
|
conversation_log=meta_log,
|
||||||
completion_func=partial_completion,
|
completion_func=partial_completion,
|
||||||
conversation_command=conversation_command,
|
conversation_command=conversation_command,
|
||||||
model=state.processor_config.conversation.gpt4all_model.chat_model,
|
model=state.processor_config.conversation.offline_chat.chat_model,
|
||||||
)
|
)
|
||||||
|
|
||||||
elif state.processor_config.conversation.openai_model:
|
elif state.processor_config.conversation.openai_model:
|
||||||
|
|||||||
@@ -84,7 +84,6 @@ class SearchModels:
|
|||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class GPT4AllProcessorConfig:
|
class GPT4AllProcessorConfig:
|
||||||
chat_model: Optional[str] = None
|
|
||||||
loaded_model: Union[Any, None] = None
|
loaded_model: Union[Any, None] = None
|
||||||
|
|
||||||
|
|
||||||
@@ -95,7 +94,6 @@ class ConversationProcessorConfigModel:
|
|||||||
):
|
):
|
||||||
self.openai_model = conversation_config.openai
|
self.openai_model = conversation_config.openai
|
||||||
self.gpt4all_model = GPT4AllProcessorConfig()
|
self.gpt4all_model = GPT4AllProcessorConfig()
|
||||||
self.gpt4all_model.chat_model = conversation_config.offline_chat_model
|
|
||||||
self.offline_chat = conversation_config.offline_chat
|
self.offline_chat = conversation_config.offline_chat
|
||||||
self.conversation_logfile = Path(conversation_config.conversation_logfile)
|
self.conversation_logfile = Path(conversation_config.conversation_logfile)
|
||||||
self.chat_session: List[str] = []
|
self.chat_session: List[str] = []
|
||||||
@@ -103,7 +101,7 @@ class ConversationProcessorConfigModel:
|
|||||||
|
|
||||||
if self.offline_chat.enable_offline_chat:
|
if self.offline_chat.enable_offline_chat:
|
||||||
try:
|
try:
|
||||||
self.gpt4all_model.loaded_model = download_model(self.gpt4all_model.chat_model)
|
self.gpt4all_model.loaded_model = download_model(self.offline_chat.chat_model)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
self.offline_chat.enable_offline_chat = False
|
self.offline_chat.enable_offline_chat = False
|
||||||
self.gpt4all_model.loaded_model = None
|
self.gpt4all_model.loaded_model = None
|
||||||
|
|||||||
Reference in New Issue
Block a user