mirror of
https://github.com/khoaliber/khoj.git
synced 2026-03-05 21:29:11 +00:00
Resolve Pydantic deprecation warnings (#1211)
## PR Summary This PR resolves the deprecation warnings of the Pydantic library, which you can find in the [CI logs](https://github.com/khoj-ai/khoj/actions/runs/16528997676/job/46749452047#step:9:142): ```python PydanticDeprecatedSince20: The `copy` method is deprecated; use `model_copy` instead. See the docstring of `BaseModel.copy` for details about how to handle `include` and `exclude`. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.11/migration/ ```
This commit is contained in:
@@ -91,7 +91,7 @@ class TestTruncateMessage:
|
||||
# Arrange
|
||||
chat_history = generate_chat_history(5)
|
||||
big_chat_message = ChatMessage(role="user", content=generate_content(100, suffix="Question?"))
|
||||
copy_big_chat_message = big_chat_message.copy()
|
||||
copy_big_chat_message = big_chat_message.model_copy()
|
||||
chat_history.insert(0, big_chat_message)
|
||||
initial_tokens = sum([utils.count_tokens(message.content, self.encoder) for message in chat_history])
|
||||
|
||||
@@ -114,7 +114,7 @@ class TestTruncateMessage:
|
||||
chat_history = generate_chat_history(5)
|
||||
chat_history[0].role = "system" # Mark the first message as system message
|
||||
big_chat_message = ChatMessage(role="user", content=generate_content(100, suffix="Question?"))
|
||||
copy_big_chat_message = big_chat_message.copy()
|
||||
copy_big_chat_message = big_chat_message.model_copy()
|
||||
|
||||
chat_history.insert(0, big_chat_message)
|
||||
initial_tokens = sum([utils.count_tokens(message.content, self.encoder) for message in chat_history])
|
||||
@@ -138,7 +138,7 @@ class TestTruncateMessage:
|
||||
def test_truncate_single_large_non_system_message(self):
|
||||
# Arrange
|
||||
big_chat_message = ChatMessage(role="user", content=generate_content(100, suffix="Question?"))
|
||||
copy_big_chat_message = big_chat_message.copy()
|
||||
copy_big_chat_message = big_chat_message.model_copy()
|
||||
chat_messages = [big_chat_message]
|
||||
initial_tokens = sum([utils.count_tokens(message.content, self.encoder) for message in chat_messages])
|
||||
|
||||
@@ -160,7 +160,7 @@ class TestTruncateMessage:
|
||||
# Arrange
|
||||
big_chat_message_content = [{"type": "text", "text": " ".join(["hi"] * (self.max_prompt_size + 1))}]
|
||||
big_chat_message = ChatMessage(role="user", content=big_chat_message_content)
|
||||
copy_big_chat_message = big_chat_message.copy()
|
||||
copy_big_chat_message = big_chat_message.model_copy()
|
||||
chat_messages = [big_chat_message]
|
||||
initial_tokens = sum([utils.count_tokens(message.content, self.encoder) for message in chat_messages])
|
||||
|
||||
|
||||
Reference in New Issue
Block a user