mirror of
https://github.com/khoaliber/khoj.git
synced 2026-03-02 13:18:18 +00:00
Stream responses by openai's o1 model series, as api now supports it
Previously o1 models did not support streaming responses via API. Now they seem to do
This commit is contained in:
@@ -56,7 +56,6 @@ def completion_with_backoff(
|
||||
# Update request parameters for compatability with o1 model series
|
||||
# Refer: https://platform.openai.com/docs/guides/reasoning/beta-limitations
|
||||
if model.startswith("o1"):
|
||||
stream = False
|
||||
temperature = 1
|
||||
model_kwargs.pop("stop", None)
|
||||
model_kwargs.pop("response_format", None)
|
||||
@@ -156,7 +155,6 @@ def llm_thread(
|
||||
# Update request parameters for compatability with o1 model series
|
||||
# Refer: https://platform.openai.com/docs/guides/reasoning/beta-limitations
|
||||
if model_name.startswith("o1"):
|
||||
stream = False
|
||||
temperature = 1
|
||||
model_kwargs.pop("stop", None)
|
||||
model_kwargs.pop("response_format", None)
|
||||
|
||||
Reference in New Issue
Block a user