Upgrade default offline chat model to llama 3.1

This commit is contained in:
Debanjum Singh Solanky
2024-08-16 07:58:04 -05:00
parent acdc3f9470
commit 58c8068079
12 changed files with 30 additions and 12 deletions

View File

@@ -378,7 +378,7 @@ def client_offline_chat(search_config: SearchConfig, default_user2: KhojUser):
# Initialize Processor from Config
ChatModelOptionsFactory(
chat_model="NousResearch/Hermes-2-Pro-Mistral-7B-GGUF",
chat_model="bartowski/Meta-Llama-3.1-8B-Instruct-GGUF",
tokenizer=None,
max_prompt_size=None,
model_type="offline",

View File

@@ -49,7 +49,7 @@ class ChatModelOptionsFactory(factory.django.DjangoModelFactory):
max_prompt_size = 3500
tokenizer = None
chat_model = "NousResearch/Hermes-2-Pro-Mistral-7B-GGUF"
chat_model = "bartowski/Meta-Llama-3.1-8B-Instruct-GGUF"
model_type = "offline"
openai_config = factory.LazyAttribute(
lambda obj: OpenAIProcessorConversationConfigFactory() if os.getenv("OPENAI_API_KEY") else None