Do not use the custom configured max prompt size to send message to anthropic

This commit is contained in:
sabaimran
2024-07-02 21:58:51 +05:30
parent afcfc60637
commit 6fa2dbc042

View File

@@ -99,15 +99,13 @@ def anthropic_llm_thread(
anthropic.types.MessageParam(role=message.role, content=message.content) for message in messages anthropic.types.MessageParam(role=message.role, content=message.content) for message in messages
] ]
max_prompt_size = max_prompt_size or DEFAULT_MAX_TOKENS_ANTHROPIC
with client.messages.stream( with client.messages.stream(
messages=formatted_messages, messages=formatted_messages,
model=model_name, # type: ignore model=model_name, # type: ignore
temperature=temperature, temperature=temperature,
system=system_prompt, system=system_prompt,
timeout=20, timeout=20,
max_tokens=max_prompt_size, max_tokens=DEFAULT_MAX_TOKENS_ANTHROPIC,
**(model_kwargs or dict()), **(model_kwargs or dict()),
) as stream: ) as stream:
for text in stream.text_stream: for text in stream.text_stream: