Fix using non-reasoning openai model via responses API

Pass arg to include encrypted reasoning only for reasoning openai
models. Non reasoning openai models do not except this arg
This commit is contained in:
Debanjum
2025-08-18 21:34:04 -07:00
parent e504141c07
commit 14b4d4b663

View File

@@ -458,6 +458,7 @@ def responses_completion_with_backoff(
temperature = 1
reasoning_effort = "medium" if deepthought else "low"
model_kwargs["reasoning"] = {"effort": reasoning_effort, "summary": "auto"}
model_kwargs["include"] = ["reasoning.encrypted_content"]
# Remove unsupported params for reasoning models
model_kwargs.pop("top_p", None)
model_kwargs.pop("stop", None)
@@ -472,7 +473,6 @@ def responses_completion_with_backoff(
temperature=temperature,
timeout=httpx.Timeout(30, read=read_timeout), # type: ignore
store=False,
include=["reasoning.encrypted_content"],
**model_kwargs,
)
if not model_response or not isinstance(model_response, OpenAIResponse) or not model_response.output: