diff --git a/src/khoj/interface/web/index.html b/src/khoj/interface/web/index.html index 0f3e8f78..cb2bae49 100644 --- a/src/khoj/interface/web/index.html +++ b/src/khoj/interface/web/index.html @@ -287,7 +287,7 @@ - +
diff --git a/src/khoj/processor/conversation/gpt4all/chat_model.py b/src/khoj/processor/conversation/gpt4all/chat_model.py index d3dd9bf5..d0cf22ff 100644 --- a/src/khoj/processor/conversation/gpt4all/chat_model.py +++ b/src/khoj/processor/conversation/gpt4all/chat_model.py @@ -119,7 +119,7 @@ def converse_offline( model: str = "llama-2-7b-chat.ggmlv3.q4_K_S.bin", loaded_model: Union[GPT4All, None] = None, completion_func=None, - conversation_command=ConversationCommand.Notes, + conversation_command=ConversationCommand.Default, ) -> Union[ThreadedGenerator, Iterator[str]]: """ Converse with user using Llama diff --git a/src/khoj/processor/conversation/openai/gpt.py b/src/khoj/processor/conversation/openai/gpt.py index 9185e3c7..eef5de1e 100644 --- a/src/khoj/processor/conversation/openai/gpt.py +++ b/src/khoj/processor/conversation/openai/gpt.py @@ -109,7 +109,7 @@ def converse( api_key: Optional[str] = None, temperature: float = 0.2, completion_func=None, - conversation_command=ConversationCommand.Notes, + conversation_command=ConversationCommand.Default, ): """ Converse with user using OpenAI's ChatGPT diff --git a/src/khoj/processor/conversation/prompts.py b/src/khoj/processor/conversation/prompts.py index e5c08ff3..346db020 100644 --- a/src/khoj/processor/conversation/prompts.py +++ b/src/khoj/processor/conversation/prompts.py @@ -236,9 +236,10 @@ Q:""" # -- help_message = PromptTemplate.from_template( """ +**/notes**: Chat using the information in your knowledge base. +**/general**: Chat using just Khoj's general knowledge. This will not search against your notes. +**/default**: Chat using your knowledge base and Khoj's general knowledge for context. **/help**: Show this help message. -**/notes**: Chat using the information in your knowledge base. This is the default method. -**/general**: Chat using general knowledge with the LLM. This will not search against your notes. You are using the **{model}** model. **version**: {version} diff --git a/src/khoj/routers/api.py b/src/khoj/routers/api.py index 4c5541b1..4f7c6f42 100644 --- a/src/khoj/routers/api.py +++ b/src/khoj/routers/api.py @@ -705,7 +705,7 @@ async def chat( compiled_references, inferred_queries = await extract_references_and_questions( request, q, (n or 5), conversation_command ) - conversation_command = get_conversation_command(query=q, any_references=is_none_or_empty(compiled_references)) + conversation_command = get_conversation_command(query=q, any_references=not is_none_or_empty(compiled_references)) if conversation_command == ConversationCommand.Help: model_type = "offline" if state.processor_config.conversation.enable_offline_chat else "openai" formatted_help = help_message.format(model=model_type, version=state.khoj_version) @@ -755,7 +755,7 @@ async def extract_references_and_questions( request: Request, q: str, n: int, - conversation_type: ConversationCommand = ConversationCommand.Notes, + conversation_type: ConversationCommand = ConversationCommand.Default, ): # Load Conversation History meta_log = state.processor_config.conversation.meta_log diff --git a/src/khoj/routers/helpers.py b/src/khoj/routers/helpers.py index 63f82a1d..267af330 100644 --- a/src/khoj/routers/helpers.py +++ b/src/khoj/routers/helpers.py @@ -60,15 +60,15 @@ def update_telemetry_state( def get_conversation_command(query: str, any_references: bool = False) -> ConversationCommand: if query.startswith("/notes"): return ConversationCommand.Notes - elif query.startswith("/general"): - return ConversationCommand.General elif query.startswith("/help"): return ConversationCommand.Help + elif query.startswith("/general"): + return ConversationCommand.General # If no relevant notes found for the given query elif not any_references: return ConversationCommand.General else: - return ConversationCommand.Notes + return ConversationCommand.Default def generate_chat_response( @@ -76,7 +76,7 @@ def generate_chat_response( meta_log: dict, compiled_references: List[str] = [], inferred_queries: List[str] = [], - conversation_command: ConversationCommand = ConversationCommand.Notes, + conversation_command: ConversationCommand = ConversationCommand.Default, ) -> Union[ThreadedGenerator, Iterator[str]]: def _save_to_conversation_log( q: str, diff --git a/src/khoj/utils/helpers.py b/src/khoj/utils/helpers.py index 7d02497f..9bd139d4 100644 --- a/src/khoj/utils/helpers.py +++ b/src/khoj/utils/helpers.py @@ -214,13 +214,15 @@ def log_telemetry( class ConversationCommand(str, Enum): + Default = "default" General = "general" Notes = "notes" Help = "help" command_descriptions = { - ConversationCommand.General: "This command allows you to search talk with the LLM without including context from your knowledge base.", - ConversationCommand.Notes: "This command allows you to search talk with the LLM while including context from your knowledge base.", - ConversationCommand.Help: "This command displays a help message with all available commands and other metadata.", + ConversationCommand.General: "Only talk about information that relies on Khoj's general knowledge, not your personal knowledge base.", + ConversationCommand.Notes: "Only talk about information that is available in your knowledge base.", + ConversationCommand.Default: "The default command when no command specified. It intelligently auto-switches between general and notes mode.", + ConversationCommand.Help: "Display a help message with all available commands and other metadata.", }