Add type hints to function args calling anthropic model api

This commit is contained in:
Debanjum
2025-05-22 14:56:09 -07:00
parent 97c5222b04
commit a76032522e

View File

@@ -43,16 +43,16 @@ MAX_REASONING_TOKENS_ANTHROPIC = 12000
) )
def anthropic_completion_with_backoff( def anthropic_completion_with_backoff(
messages: list[ChatMessage], messages: list[ChatMessage],
system_prompt, system_prompt: str,
model_name: str, model_name: str,
temperature=0.4, temperature: float = 0.4,
api_key=None, api_key: str | None = None,
api_base_url: str = None, api_base_url: str | None = None,
model_kwargs=None, model_kwargs: dict | None = None,
max_tokens=None, max_tokens: int | None = None,
response_type="text", response_type: str = "text",
deepthought=False, deepthought: bool = False,
tracer={}, tracer: dict = {},
) -> str: ) -> str:
client = anthropic_clients.get(api_key) client = anthropic_clients.get(api_key)
if not client: if not client:
@@ -122,15 +122,15 @@ def anthropic_completion_with_backoff(
) )
async def anthropic_chat_completion_with_backoff( async def anthropic_chat_completion_with_backoff(
messages: list[ChatMessage], messages: list[ChatMessage],
model_name, model_name: str | None,
temperature, temperature: float,
api_key, api_key: str | None,
api_base_url, api_base_url: str,
system_prompt: str, system_prompt: str,
max_prompt_size=None, max_prompt_size: int | None = None,
deepthought=False, deepthought: bool = False,
model_kwargs=None, model_kwargs: dict | None = None,
tracer={}, tracer: dict = {},
) -> AsyncGenerator[ResponseWithThought, None]: ) -> AsyncGenerator[ResponseWithThought, None]:
client = anthropic_async_clients.get(api_key) client = anthropic_async_clients.get(api_key)
if not client: if not client: