mirror of
https://github.com/khoaliber/khoj.git
synced 2026-03-05 21:29:11 +00:00
Drop unused generated_files arg from chat context
generated_files wasn't being set (anymore?). But it was being passed around through for chat context and being saved to db. Also reduce variables used to set mermaid diagram description
This commit is contained in:
@@ -15,7 +15,7 @@ from khoj.processor.conversation.utils import (
|
||||
messages_to_print,
|
||||
)
|
||||
from khoj.utils.helpers import is_none_or_empty, truncate_code_context
|
||||
from khoj.utils.rawconfig import FileAttachment, LocationData
|
||||
from khoj.utils.rawconfig import LocationData
|
||||
from khoj.utils.yaml import yaml_dump
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -60,7 +60,6 @@ async def converse_anthropic(
|
||||
operator_results: Optional[List[OperatorRun]] = None,
|
||||
query_images: Optional[list[str]] = None,
|
||||
query_files: str = None,
|
||||
generated_files: List[FileAttachment] = None,
|
||||
program_execution_context: Optional[List[str]] = None,
|
||||
generated_asset_results: Dict[str, Dict] = {},
|
||||
location_data: LocationData = None,
|
||||
@@ -134,7 +133,6 @@ async def converse_anthropic(
|
||||
vision_enabled=vision_available,
|
||||
model_type=ChatModel.ModelType.ANTHROPIC,
|
||||
query_files=query_files,
|
||||
generated_files=generated_files,
|
||||
generated_asset_results=generated_asset_results,
|
||||
program_execution_context=program_execution_context,
|
||||
)
|
||||
|
||||
@@ -15,7 +15,7 @@ from khoj.processor.conversation.utils import (
|
||||
messages_to_print,
|
||||
)
|
||||
from khoj.utils.helpers import is_none_or_empty, truncate_code_context
|
||||
from khoj.utils.rawconfig import FileAttachment, LocationData
|
||||
from khoj.utils.rawconfig import LocationData
|
||||
from khoj.utils.yaml import yaml_dump
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -69,7 +69,6 @@ async def converse_gemini(
|
||||
operator_results: Optional[List[OperatorRun]] = None,
|
||||
query_images: Optional[list[str]] = None,
|
||||
query_files: str = None,
|
||||
generated_files: List[FileAttachment] = None,
|
||||
generated_asset_results: Dict[str, Dict] = {},
|
||||
program_execution_context: List[str] = None,
|
||||
location_data: LocationData = None,
|
||||
@@ -145,7 +144,6 @@ async def converse_gemini(
|
||||
vision_enabled=vision_available,
|
||||
model_type=ChatModel.ModelType.GOOGLE,
|
||||
query_files=query_files,
|
||||
generated_files=generated_files,
|
||||
generated_asset_results=generated_asset_results,
|
||||
program_execution_context=program_execution_context,
|
||||
)
|
||||
|
||||
@@ -22,7 +22,7 @@ from khoj.processor.conversation.utils import (
|
||||
messages_to_print,
|
||||
)
|
||||
from khoj.utils.helpers import ToolDefinition, is_none_or_empty, truncate_code_context
|
||||
from khoj.utils.rawconfig import FileAttachment, LocationData
|
||||
from khoj.utils.rawconfig import LocationData
|
||||
from khoj.utils.yaml import yaml_dump
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -104,7 +104,6 @@ async def converse_openai(
|
||||
operator_results: Optional[List[OperatorRun]] = None,
|
||||
query_images: Optional[list[str]] = None,
|
||||
query_files: str = None,
|
||||
generated_files: List[FileAttachment] = None,
|
||||
generated_asset_results: Dict[str, Dict] = {},
|
||||
program_execution_context: List[str] = None,
|
||||
location_data: LocationData = None,
|
||||
@@ -180,7 +179,6 @@ async def converse_openai(
|
||||
vision_enabled=vision_available,
|
||||
model_type=ChatModel.ModelType.OPENAI,
|
||||
query_files=query_files,
|
||||
generated_files=generated_files,
|
||||
generated_asset_results=generated_asset_results,
|
||||
program_execution_context=program_execution_context,
|
||||
)
|
||||
|
||||
@@ -449,7 +449,6 @@ async def save_to_conversation_log(
|
||||
query_images: List[str] = None,
|
||||
raw_query_files: List[FileAttachment] = [],
|
||||
generated_images: List[str] = [],
|
||||
raw_generated_files: List[FileAttachment] = [],
|
||||
generated_mermaidjs_diagram: str = None,
|
||||
research_results: Optional[List[ResearchIteration]] = None,
|
||||
train_of_thought: List[Any] = [],
|
||||
@@ -474,7 +473,6 @@ async def save_to_conversation_log(
|
||||
"trainOfThought": train_of_thought,
|
||||
"turnId": turn_id,
|
||||
"images": generated_images,
|
||||
"queryFiles": [file.model_dump(mode="json") for file in raw_generated_files],
|
||||
}
|
||||
|
||||
if generated_mermaidjs_diagram:
|
||||
@@ -569,7 +567,6 @@ def generate_chatml_messages_with_context(
|
||||
model_type="",
|
||||
context_message="",
|
||||
query_files: str = None,
|
||||
generated_files: List[FileAttachment] = None,
|
||||
generated_asset_results: Dict[str, Dict] = {},
|
||||
program_execution_context: List[str] = [],
|
||||
):
|
||||
@@ -694,10 +691,6 @@ def generate_chatml_messages_with_context(
|
||||
if not is_none_or_empty(context_message):
|
||||
messages.append(ChatMessage(content=context_message, role="user"))
|
||||
|
||||
if generated_files:
|
||||
message_attached_files = gather_raw_query_files({file.name: file.content for file in generated_files})
|
||||
messages.append(ChatMessage(content=message_attached_files, role="assistant"))
|
||||
|
||||
if not is_none_or_empty(generated_asset_results):
|
||||
messages.append(
|
||||
ChatMessage(
|
||||
|
||||
@@ -90,7 +90,6 @@ from khoj.utils.helpers import (
|
||||
is_operator_enabled,
|
||||
)
|
||||
from khoj.utils.rawconfig import (
|
||||
FileAttachment,
|
||||
FileFilterRequest,
|
||||
FilesFilterRequest,
|
||||
LocationData,
|
||||
@@ -732,7 +731,6 @@ async def event_generator(
|
||||
attached_file_context = gather_raw_query_files(query_files)
|
||||
|
||||
generated_images: List[str] = []
|
||||
generated_files: List[FileAttachment] = []
|
||||
generated_mermaidjs_diagram: str = None
|
||||
generated_asset_results: Dict = dict()
|
||||
program_execution_context: List[str] = []
|
||||
@@ -769,7 +767,6 @@ async def event_generator(
|
||||
train_of_thought=train_of_thought,
|
||||
raw_query_files=raw_query_files,
|
||||
generated_images=generated_images,
|
||||
raw_generated_files=generated_asset_results,
|
||||
generated_mermaidjs_diagram=generated_mermaidjs_diagram,
|
||||
user_message_time=user_message_time,
|
||||
tracer=tracer,
|
||||
@@ -816,7 +813,6 @@ async def event_generator(
|
||||
train_of_thought=train_of_thought,
|
||||
raw_query_files=raw_query_files,
|
||||
generated_images=generated_images,
|
||||
raw_generated_files=generated_asset_results,
|
||||
generated_mermaidjs_diagram=generated_mermaidjs_diagram,
|
||||
user_message_time=user_message_time,
|
||||
tracer=tracer,
|
||||
@@ -1316,8 +1312,6 @@ async def event_generator(
|
||||
yield result
|
||||
|
||||
inferred_queries = []
|
||||
diagram_description = ""
|
||||
|
||||
async for result in generate_mermaidjs_diagram(
|
||||
q=defiltered_query,
|
||||
chat_history=chat_history,
|
||||
@@ -1337,9 +1331,7 @@ async def event_generator(
|
||||
better_diagram_description_prompt, mermaidjs_diagram_description = result
|
||||
if better_diagram_description_prompt and mermaidjs_diagram_description:
|
||||
inferred_queries.append(better_diagram_description_prompt)
|
||||
diagram_description = mermaidjs_diagram_description
|
||||
|
||||
generated_mermaidjs_diagram = diagram_description
|
||||
generated_mermaidjs_diagram = mermaidjs_diagram_description
|
||||
|
||||
generated_asset_results["diagrams"] = {
|
||||
"query": better_diagram_description_prompt,
|
||||
@@ -1386,7 +1378,6 @@ async def event_generator(
|
||||
user_name,
|
||||
uploaded_images,
|
||||
attached_file_context,
|
||||
generated_files,
|
||||
program_execution_context,
|
||||
generated_asset_results,
|
||||
is_subscribed,
|
||||
@@ -1447,7 +1438,6 @@ async def event_generator(
|
||||
train_of_thought=train_of_thought,
|
||||
raw_query_files=raw_query_files,
|
||||
generated_images=generated_images,
|
||||
raw_generated_files=generated_files,
|
||||
generated_mermaidjs_diagram=generated_mermaidjs_diagram,
|
||||
tracer=tracer,
|
||||
)
|
||||
|
||||
@@ -127,7 +127,6 @@ from khoj.utils.helpers import (
|
||||
)
|
||||
from khoj.utils.rawconfig import (
|
||||
ChatRequestBody,
|
||||
FileAttachment,
|
||||
FileData,
|
||||
LocationData,
|
||||
SearchResponse,
|
||||
@@ -1593,7 +1592,6 @@ async def agenerate_chat_response(
|
||||
user_name: Optional[str] = None,
|
||||
query_images: Optional[List[str]] = None,
|
||||
query_files: str = None,
|
||||
raw_generated_files: List[FileAttachment] = [],
|
||||
program_execution_context: List[str] = [],
|
||||
generated_asset_results: Dict[str, Dict] = {},
|
||||
is_subscribed: bool = False,
|
||||
@@ -1641,7 +1639,6 @@ async def agenerate_chat_response(
|
||||
operator_results=operator_results,
|
||||
query_images=query_images,
|
||||
query_files=query_files,
|
||||
generated_files=raw_generated_files,
|
||||
generated_asset_results=generated_asset_results,
|
||||
program_execution_context=program_execution_context,
|
||||
location_data=location_data,
|
||||
@@ -1672,7 +1669,6 @@ async def agenerate_chat_response(
|
||||
operator_results=operator_results,
|
||||
query_images=query_images,
|
||||
query_files=query_files,
|
||||
generated_files=raw_generated_files,
|
||||
generated_asset_results=generated_asset_results,
|
||||
program_execution_context=program_execution_context,
|
||||
location_data=location_data,
|
||||
@@ -1702,7 +1698,6 @@ async def agenerate_chat_response(
|
||||
operator_results=operator_results,
|
||||
query_images=query_images,
|
||||
query_files=query_files,
|
||||
generated_files=raw_generated_files,
|
||||
generated_asset_results=generated_asset_results,
|
||||
program_execution_context=program_execution_context,
|
||||
location_data=location_data,
|
||||
|
||||
Reference in New Issue
Block a user