[Multi-User Part 8]: Make conversation processor settings server-wide (#529)

- Rather than having each individual user configure their conversation settings, allow the server admin to configure the OpenAI API key or offline model once, and let all the users re-use that code.
- To configure the settings, the admin should go to the `django/admin` page and configure the relevant chat settings. To create an admin, run `python3 src/manage.py createsuperuser` and enter in the details. For simplicity, the email and username should match.
- Remove deprecated/unnecessary endpoints and views for configuring per-user chat settings
This commit is contained in:
sabaimran
2023-11-02 10:43:27 -07:00
committed by GitHub
parent 0fb81189ca
commit fe6720fa06
21 changed files with 458 additions and 509 deletions

View File

@@ -4,9 +4,10 @@ import os
from database.models import (
KhojUser,
KhojApiUser,
ConversationProcessorConfig,
ChatModelOptions,
OfflineChatProcessorConversationConfig,
OpenAIProcessorConversationConfig,
UserConversationConfig,
Conversation,
)
@@ -30,20 +31,29 @@ class ApiUserFactory(factory.django.DjangoModelFactory):
token = factory.Faker("password")
class ConversationProcessorConfigFactory(factory.django.DjangoModelFactory):
class ChatModelOptionsFactory(factory.django.DjangoModelFactory):
class Meta:
model = ConversationProcessorConfig
model = ChatModelOptions
max_prompt_size = 2000
tokenizer = None
chat_model = "llama-2-7b-chat.ggmlv3.q4_0.bin"
model_type = "offline"
class UserConversationProcessorConfigFactory(factory.django.DjangoModelFactory):
class Meta:
model = UserConversationConfig
user = factory.SubFactory(UserFactory)
setting = factory.SubFactory(ChatModelOptionsFactory)
class OfflineChatProcessorConversationConfigFactory(factory.django.DjangoModelFactory):
class Meta:
model = OfflineChatProcessorConversationConfig
enable_offline_chat = True
chat_model = "llama-2-7b-chat.ggmlv3.q4_0.bin"
enabled = True
class OpenAIProcessorConversationConfigFactory(factory.django.DjangoModelFactory):
@@ -51,7 +61,6 @@ class OpenAIProcessorConversationConfigFactory(factory.django.DjangoModelFactory
model = OpenAIProcessorConversationConfig
api_key = os.getenv("OPENAI_API_KEY")
chat_model = "gpt-3.5-turbo"
class ConversationFactory(factory.django.DjangoModelFactory):