Drop unused arguments to default tool picker, research mode

is_automated_task check isn't required as automation cannot be created
via chat anymore.

conversation specific file_filters are extracted directly in document
search, so doesn't need to be passed down from chat api endpoint
This commit is contained in:
Debanjum
2025-08-27 14:35:24 -07:00
parent 9d7adbcbaa
commit be79b8a633
3 changed files with 2 additions and 9 deletions

View File

@@ -923,9 +923,7 @@ async def event_generator(
# Automated tasks are handled before to allow mixing them with other conversation commands # Automated tasks are handled before to allow mixing them with other conversation commands
cmds_to_rate_limit = [] cmds_to_rate_limit = []
is_automated_task = False
if q.startswith("/automated_task"): if q.startswith("/automated_task"):
is_automated_task = True
q = q.replace("/automated_task", "").lstrip() q = q.replace("/automated_task", "").lstrip()
cmds_to_rate_limit += [ConversationCommand.AutomatedTask] cmds_to_rate_limit += [ConversationCommand.AutomatedTask]
@@ -985,7 +983,6 @@ async def event_generator(
chosen_io = await aget_data_sources_and_output_format( chosen_io = await aget_data_sources_and_output_format(
q, q,
chat_history, chat_history,
is_automated_task,
user=user, user=user,
query_images=uploaded_images, query_images=uploaded_images,
agent=agent, agent=agent,
@@ -1017,7 +1014,6 @@ async def event_generator(
return return
defiltered_query = defilter_query(q) defiltered_query = defilter_query(q)
file_filters = conversation.file_filters if conversation and conversation.file_filters else []
if conversation_commands == [ConversationCommand.Research]: if conversation_commands == [ConversationCommand.Research]:
async for research_result in research( async for research_result in research(
@@ -1031,12 +1027,11 @@ async def event_generator(
send_status_func=partial(send_event, ChatEvent.STATUS), send_status_func=partial(send_event, ChatEvent.STATUS),
user_name=user_name, user_name=user_name,
location=location, location=location,
file_filters=file_filters,
query_files=attached_file_context, query_files=attached_file_context,
tracer=tracer,
cancellation_event=cancellation_event, cancellation_event=cancellation_event,
interrupt_queue=child_interrupt_queue, interrupt_queue=child_interrupt_queue,
abort_message=ChatEvent.END_EVENT.value, abort_message=ChatEvent.END_EVENT.value,
tracer=tracer,
): ):
if isinstance(research_result, ResearchIteration): if isinstance(research_result, ResearchIteration):
if research_result.summarizedResult: if research_result.summarizedResult:

View File

@@ -345,7 +345,6 @@ async def acheck_if_safe_prompt(system_prompt: str, user: KhojUser = None, lax:
async def aget_data_sources_and_output_format( async def aget_data_sources_and_output_format(
query: str, query: str,
chat_history: list[ChatMessageModel], chat_history: list[ChatMessageModel],
is_task: bool,
user: KhojUser, user: KhojUser,
query_images: List[str] = None, query_images: List[str] = None,
agent: Agent = None, agent: Agent = None,

View File

@@ -218,12 +218,11 @@ async def research(
send_status_func: Optional[Callable] = None, send_status_func: Optional[Callable] = None,
user_name: str = None, user_name: str = None,
location: LocationData = None, location: LocationData = None,
file_filters: List[str] = [],
tracer: dict = {},
query_files: str = None, query_files: str = None,
cancellation_event: Optional[asyncio.Event] = None, cancellation_event: Optional[asyncio.Event] = None,
interrupt_queue: Optional[asyncio.Queue] = None, interrupt_queue: Optional[asyncio.Queue] = None,
abort_message: str = ChatEvent.END_EVENT.value, abort_message: str = ChatEvent.END_EVENT.value,
tracer: dict = {},
): ):
max_document_searches = 7 max_document_searches = 7
max_online_searches = 3 max_online_searches = 3