mirror of
https://github.com/khoaliber/khoj.git
synced 2026-03-08 05:39:13 +00:00
Upgrade default offline chat model to llama 3.1
This commit is contained in:
@@ -378,7 +378,7 @@ def client_offline_chat(search_config: SearchConfig, default_user2: KhojUser):
|
||||
|
||||
# Initialize Processor from Config
|
||||
ChatModelOptionsFactory(
|
||||
chat_model="NousResearch/Hermes-2-Pro-Mistral-7B-GGUF",
|
||||
chat_model="bartowski/Meta-Llama-3.1-8B-Instruct-GGUF",
|
||||
tokenizer=None,
|
||||
max_prompt_size=None,
|
||||
model_type="offline",
|
||||
|
||||
@@ -49,7 +49,7 @@ class ChatModelOptionsFactory(factory.django.DjangoModelFactory):
|
||||
|
||||
max_prompt_size = 3500
|
||||
tokenizer = None
|
||||
chat_model = "NousResearch/Hermes-2-Pro-Mistral-7B-GGUF"
|
||||
chat_model = "bartowski/Meta-Llama-3.1-8B-Instruct-GGUF"
|
||||
model_type = "offline"
|
||||
openai_config = factory.LazyAttribute(
|
||||
lambda obj: OpenAIProcessorConversationConfigFactory() if os.getenv("OPENAI_API_KEY") else None
|
||||
|
||||
Reference in New Issue
Block a user