Add default tokenizer, max_prompt as fallback for non-default offline chat models

Pass user configured chat model as argument to use by converse_offline

The proper fix for this would allow users to configure the max_prompt
and tokenizer to use (while supplying default ones, if none provided)
For now, this is a reasonable start.
This commit is contained in:
Debanjum Singh Solanky
2023-10-13 22:26:59 -07:00
parent 56bd69d5af
commit 1ad8b150e8
3 changed files with 12 additions and 5 deletions

View File

@@ -59,8 +59,8 @@ dependencies = [
"bs4 >= 0.0.1",
"anyio == 3.7.1",
"pymupdf >= 1.23.3",
"gpt4all == 1.0.12; platform_system == 'Linux' and platform_machine == 'x86_64'",
"gpt4all == 1.0.12; platform_system == 'Windows' or platform_system == 'Darwin'",
"gpt4all >= 1.0.12; platform_system == 'Linux' and platform_machine == 'x86_64'",
"gpt4all >= 1.0.12; platform_system == 'Windows' or platform_system == 'Darwin'",
]
dynamic = ["version"]