Truncate message logs to below max supported prompt size by model

- Use tiktoken to count tokens for chat models
- Make conversation turns to add to prompt configurable via method
  argument to generate_chatml_messages_with_context method
This commit is contained in:
Debanjum Singh Solanky
2023-03-25 04:37:55 +07:00
parent 4725416fbd
commit 7e36f421f9
4 changed files with 30 additions and 6 deletions

View File

@@ -41,6 +41,7 @@ dependencies = [
"fastapi == 0.77.1",
"jinja2 == 3.1.2",
"openai >= 0.27.0",
"tiktoken >= 0.3.0",
"pillow == 9.3.0",
"pydantic == 1.9.1",
"pyqt6 == 6.3.1",