Improvements based on code feedback

This commit is contained in:
Debanjum
2025-02-28 14:37:54 +05:30
parent 94ca458639
commit c133d11556
4 changed files with 7 additions and 6 deletions

View File

@@ -108,7 +108,7 @@ jobs:
BATCH_SIZE: "20"
RANDOMIZE: "True"
KHOJ_URL: "http://localhost:42110"
KHOJ_CHAT_MODEL: ${{ github.event_name == 'workflow_dispatch' && inputs.chat_model || 'gemini-2.0-flash' }}
KHOJ_DEFAULT_CHAT_MODEL: ${{ github.event_name == 'workflow_dispatch' && inputs.chat_model || 'gemini-2.0-flash' }}
KHOJ_LLM_SEED: "42"
GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }}
SERPER_DEV_API_KEY: ${{ matrix.dataset != 'math500' && secrets.SERPER_DEV_API_KEY }}

View File

@@ -1051,7 +1051,7 @@ print(\"Evaluated Expression at x=1:\", evaluated_expression)
Example 3:
---
Q: Plot the world ppulation growth over the years, given this year, world population world tuples: [(2000, 6), (2001, 7), (2002, 8), (2003, 9), (2004, 10)].
Q: Plot the world population growth over the years, given this year, world population world tuples: [(2000, 6), (2001, 7), (2002, 8), (2003, 9), (2004, 10)].
A: Absolutely! We can utilize the Pandas and Matplotlib libraries (as both are available in the sandbox) to create the world population growth plot.
```python
import pandas as pd

View File

@@ -40,6 +40,7 @@ logger = logging.getLogger(__name__)
SANDBOX_URL = os.getenv("KHOJ_TERRARIUM_URL", "http://localhost:8080")
DEFAULT_E2B_TEMPLATE = "pmt2o0ghpang8gbiys57"
class GeneratedCode(NamedTuple):
@@ -219,7 +220,7 @@ async def execute_e2b(code: str, input_files: list[dict]) -> dict[str, Any]:
sandbox = await AsyncSandbox.create(
api_key=os.getenv("E2B_API_KEY"),
template=os.getenv("E2B_TEMPLATE", "pmt2o0ghpang8gbiys57"),
template=os.getenv("E2B_TEMPLATE", DEFAULT_E2B_TEMPLATE),
timeout=120,
request_timeout=30,
)
@@ -232,7 +233,7 @@ async def execute_e2b(code: str, input_files: list[dict]) -> dict[str, Any]:
]
await asyncio.gather(*upload_tasks)
# Note stored files before execution
# Note stored files before execution to identify new files created during execution
E2bFile = NamedTuple("E2bFile", [("name", str), ("path", str)])
original_files = {E2bFile(f.name, f.path) for f in await sandbox.files.list("~")}
@@ -261,7 +262,7 @@ async def execute_e2b(code: str, input_files: list[dict]) -> dict[str, Any]:
# Collect output files from execution results
for idx, result in enumerate(execution.results):
for result_type in ["png", "jpeg", "svg", "text", "markdown", "json"]:
for result_type in {"png", "jpeg", "svg", "text", "markdown", "json"}:
if b64_data := getattr(result, result_type, None):
output_files.append({"filename": f"{idx}.{result_type}", "b64_data": b64_data})
break

View File

@@ -309,7 +309,7 @@ def initialization(interactive: bool = True):
# Update the default chat model if it doesn't match
chat_config = ConversationAdapters.get_default_chat_model()
env_default_chat_model = os.getenv("KHOJ_CHAT_MODEL")
env_default_chat_model = os.getenv("KHOJ_DEFAULT_CHAT_MODEL")
if not chat_config or not env_default_chat_model:
return
if chat_config.name != env_default_chat_model: