Run online, offine chat actor, director tests for any supported provider

- Previously online chat actors, director tests only worked with openai.
  This change allows running them for any supported onlnie provider
  including Google, Anthropic and Openai.

- Enable online/offline chat actor, director in two ways:
  1. Explicitly setting KHOJ_TEST_CHAT_PROVIDER environment variable to
     google, anthropic, openai, offline
  2. Implicitly by the first API key found from openai, google or anthropic.

- Default offline chat provider to use Llama 3.1 3B for faster, lower
  compute test runs
This commit is contained in:
Debanjum
2024-11-18 14:19:09 -08:00
parent 653127bf1d
commit 2a76c69d0d
6 changed files with 64 additions and 27 deletions

View File

@@ -13,6 +13,7 @@ from khoj.configure import (
)
from khoj.database.models import (
Agent,
ChatModelOptions,
GithubConfig,
GithubRepoConfig,
KhojApiUser,
@@ -39,6 +40,8 @@ from tests.helpers import (
SubscriptionFactory,
UserConversationProcessorConfigFactory,
UserFactory,
get_chat_api_key,
get_chat_provider,
)
@@ -307,10 +310,19 @@ def chat_client_builder(search_config, user, index_content=True, require_auth=Fa
configure_content(user, all_files)
# Initialize Processor from Config
if os.getenv("OPENAI_API_KEY"):
chat_model = ChatModelOptionsFactory(chat_model="gpt-4o-mini", model_type="openai")
chat_model.openai_config = OpenAIProcessorConversationConfigFactory()
UserConversationProcessorConfigFactory(user=user, setting=chat_model)
chat_provider = get_chat_provider()
online_chat_model: ChatModelOptionsFactory = None
if chat_provider == ChatModelOptions.ModelType.OPENAI:
online_chat_model = ChatModelOptionsFactory(chat_model="gpt-4o-mini", model_type="openai")
elif chat_provider == ChatModelOptions.ModelType.GOOGLE:
online_chat_model = ChatModelOptionsFactory(chat_model="gemini-1.5-flash", model_type="google")
elif chat_provider == ChatModelOptions.ModelType.ANTHROPIC:
online_chat_model = ChatModelOptionsFactory(chat_model="claude-3-5-haiku-20241022", model_type="anthropic")
if online_chat_model:
online_chat_model.openai_config = OpenAIProcessorConversationConfigFactory(
api_key=get_chat_api_key(chat_provider)
)
UserConversationProcessorConfigFactory(user=user, setting=online_chat_model)
state.anonymous_mode = not require_auth
@@ -385,7 +397,7 @@ def client_offline_chat(search_config: SearchConfig, default_user2: KhojUser):
# Initialize Processor from Config
ChatModelOptionsFactory(
chat_model="bartowski/Meta-Llama-3.1-8B-Instruct-GGUF",
chat_model="bartowski/Meta-Llama-3.1-3B-Instruct-GGUF",
tokenizer=None,
max_prompt_size=None,
model_type="offline",