Merge branch 'master' of github.com:khoj-ai/khoj into features/add-chat-controls

This commit is contained in:
sabaimran
2025-01-29 14:58:15 -08:00
3 changed files with 17 additions and 4 deletions

View File

@@ -267,7 +267,7 @@ export default function Agents() {
const modelOptions: ModelOptions[] = userConfig?.chat_model_options || []; const modelOptions: ModelOptions[] = userConfig?.chat_model_options || [];
const selectedChatModelOption: number = userConfig?.selected_chat_model_config || 0; const selectedChatModelOption: number = userConfig?.selected_chat_model_config || 0;
const isSubscribed: boolean = isUserSubscribed(userConfig); const isSubscribed: boolean = userConfig?.is_active || false;
// The default model option should map to the item in the modelOptions array that has the same id as the selectedChatModelOption // The default model option should map to the item in the modelOptions array that has the same id as the selectedChatModelOption
const defaultModelOption = modelOptions.find( const defaultModelOption = modelOptions.find(

View File

@@ -1,7 +1,7 @@
import logging import logging
import os import os
from threading import Thread from threading import Thread
from typing import Dict from typing import Dict, List
import openai import openai
from openai.types.chat.chat_completion import ChatCompletion from openai.types.chat.chat_completion import ChatCompletion
@@ -181,6 +181,19 @@ def llm_thread(
elif model_name.startswith("o1-"): elif model_name.startswith("o1-"):
temperature = 1 temperature = 1
model_kwargs.pop("response_format", None) model_kwargs.pop("response_format", None)
elif model_name.startswith("deepseek-reasoner"):
# Two successive messages cannot be from the same role. Should merge any back-to-back messages from the same role.
# The first message should always be a user message (except system message).
updated_messages: List[dict] = []
for i, message in enumerate(formatted_messages):
if i > 0 and message["role"] == formatted_messages[i - 1]["role"]:
updated_messages[-1]["content"] += " " + message["content"]
elif i == 1 and formatted_messages[i - 1]["role"] == "system" and message["role"] == "assistant":
updated_messages[-1]["content"] += " " + message["content"]
else:
updated_messages.append(message)
formatted_messages = updated_messages
if os.getenv("KHOJ_LLM_SEED"): if os.getenv("KHOJ_LLM_SEED"):
model_kwargs["seed"] = int(os.getenv("KHOJ_LLM_SEED")) model_kwargs["seed"] = int(os.getenv("KHOJ_LLM_SEED"))

View File

@@ -104,7 +104,7 @@ async def put_content(
incoming_entries_size_limit=10, incoming_entries_size_limit=10,
subscribed_incoming_entries_size_limit=75, subscribed_incoming_entries_size_limit=75,
total_entries_size_limit=10, total_entries_size_limit=10,
subscribed_total_entries_size_limit=200, subscribed_total_entries_size_limit=500,
) )
), ),
): ):
@@ -126,7 +126,7 @@ async def patch_content(
incoming_entries_size_limit=10, incoming_entries_size_limit=10,
subscribed_incoming_entries_size_limit=75, subscribed_incoming_entries_size_limit=75,
total_entries_size_limit=10, total_entries_size_limit=10,
subscribed_total_entries_size_limit=200, subscribed_total_entries_size_limit=500,
) )
), ),
): ):