From 1eb092010c3fbb73b7ceccf4a3392b6ec1e3619f Mon Sep 17 00:00:00 2001 From: Debanjum Date: Thu, 10 Apr 2025 19:44:34 +0530 Subject: [PATCH] Fix handling unset response_schema being passed to gemini models Use of `is_none_or_empty' doesn't work well with classes, which response_schema can get set to. --- src/khoj/processor/conversation/google/gemini_chat.py | 4 ++-- src/khoj/processor/conversation/google/utils.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/khoj/processor/conversation/google/gemini_chat.py b/src/khoj/processor/conversation/google/gemini_chat.py index 619ab5b6..ba0b93f7 100644 --- a/src/khoj/processor/conversation/google/gemini_chat.py +++ b/src/khoj/processor/conversation/google/gemini_chat.py @@ -140,8 +140,8 @@ def gemini_send_message_to_model( """ model_kwargs = {} - # This caused unwanted behavior and terminates response early for gemini 1.5 series. Monitor for flakiness with 2.0 series. - if response_type == "json_object" and model in ["gemini-2.0-flash"]: + # Monitor for flakiness in 1.5+ models. This would cause unwanted behavior and terminate response early in 1.5 models. + if response_type == "json_object" and not model.startswith("gemini-1.5"): model_kwargs["response_mime_type"] = "application/json" if response_schema: model_kwargs["response_schema"] = response_schema diff --git a/src/khoj/processor/conversation/google/utils.py b/src/khoj/processor/conversation/google/utils.py index b4f613d5..449a08dd 100644 --- a/src/khoj/processor/conversation/google/utils.py +++ b/src/khoj/processor/conversation/google/utils.py @@ -89,7 +89,7 @@ def gemini_completion_with_backoff( # format model response schema response_schema = None - if model_kwargs and not is_none_or_empty(model_kwargs.get("response_schema")): + if model_kwargs and model_kwargs.get("response_schema"): response_schema = clean_response_schema(model_kwargs["response_schema"]) seed = int(os.getenv("KHOJ_LLM_SEED")) if os.getenv("KHOJ_LLM_SEED") else None