Resolve issues with GPT4All and fix prompt for yesterday extract questions date filter (#483)

- GPT4All integration had ceased working with 0.1.7 specification. Update to use 1.0.12. At a later date, we should also use first party support for llama v2 via gpt4all
- Update the system prompt for the extract_questions flow to add start and end date to the yesterday date filter example.
- Update all setup data in conftest.py to use new client-server indexing pattern
This commit is contained in:
sabaimran
2023-09-18 14:41:26 -07:00
committed by GitHub
parent 8141be97f6
commit 2dd15e9f63
4 changed files with 16 additions and 34 deletions

View File

@@ -28,9 +28,10 @@ def download_model(model_name: str):
raise e
url = model_metadata.model_name_to_url.get(model_name)
model_path = os.path.expanduser(f"~/.cache/gpt4all/")
if not url:
logger.debug(f"Model {model_name} not found in model metadata. Skipping download.")
return GPT4All(model_name)
return GPT4All(model_name=model_name, model_path=model_path)
filename = os.path.expanduser(f"~/.cache/gpt4all/{model_name}")
if os.path.exists(filename):
@@ -39,8 +40,8 @@ def download_model(model_name: str):
requests.get("https://www.google.com/", timeout=5)
except:
logger.debug("User is offline. Disabling allowed download flag")
return GPT4All(model_name, allow_download=False)
return GPT4All(model_name)
return GPT4All(model_name=model_name, model_path=model_path, allow_download=False)
return GPT4All(model_name=model_name, model_path=model_path)
# Download the model to a tmp file. Once the download is completed, move the tmp file to the actual file
tmp_filename = filename + ".tmp"

View File

@@ -209,7 +209,7 @@ A: Bob is {bob_tom_age_difference} years older than Tom. As Bob is {bob_age} yea
Q: What does yesterday's note say?
["Note from {yesterday_date} dt='{yesterday_date}'"]
["Note from {yesterday_date} dt>='{yesterday_date}' dt<'{current_date}'"]
A: Yesterday's note contains the following information: ...