From 17fa123b4ec858b846fa8ab4f0e4dbc84360c884 Mon Sep 17 00:00:00 2001 From: Debanjum Singh Solanky Date: Mon, 26 Dec 2022 15:14:32 -0300 Subject: [PATCH] Split entries by max tokens while converting Beancount entries To JSONL --- src/processor/ledger/beancount_to_jsonl.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/processor/ledger/beancount_to_jsonl.py b/src/processor/ledger/beancount_to_jsonl.py index ccad97da..9f37df70 100644 --- a/src/processor/ledger/beancount_to_jsonl.py +++ b/src/processor/ledger/beancount_to_jsonl.py @@ -35,6 +35,12 @@ class BeancountToJsonl(TextToJsonl): end = time.time() logger.debug(f"Parse transactions from Beancount files into dictionaries: {end - start} seconds") + # Split entries by max tokens supported by model + start = time.time() + current_entries = self.split_entries_by_max_tokens(current_entries, max_tokens=256) + end = time.time() + logger.debug(f"Split entries by max token size supported by model: {end - start} seconds") + # Identify, mark and merge any new entries with previous entries start = time.time() if not previous_entries: