mirror of
https://github.com/khoaliber/khoj.git
synced 2026-03-02 21:19:12 +00:00
Only auto load available chat models from Ollama provider for now
Allowing models from any openai proxy service makes it too unwieldy. And a bunch of them do not even support this endpoint.
This commit is contained in:
@@ -235,6 +235,10 @@ def initialization(interactive: bool = True):
|
||||
# Get OpenAI configs with custom base URLs
|
||||
custom_configs = AiModelApi.objects.exclude(api_base_url__isnull=True)
|
||||
|
||||
# Only enable for whitelisted provider names (i.e Ollama) for now
|
||||
# TODO: This is hacky. Will be replaced with more robust solution based on provider type enum
|
||||
custom_configs = custom_configs.filter(name__in=["Ollama"])
|
||||
|
||||
for config in custom_configs:
|
||||
try:
|
||||
# Create OpenAI client with custom base URL
|
||||
|
||||
Reference in New Issue
Block a user