diff --git a/src/khoj/configure.py b/src/khoj/configure.py index c6c9e308..e6746f31 100644 --- a/src/khoj/configure.py +++ b/src/khoj/configure.py @@ -3,6 +3,7 @@ import sys import logging import json from enum import Enum +import requests # External Packages import schedule @@ -223,3 +224,18 @@ def save_chat_session(): state.processor_config.conversation.chat_session = None logger.info("📩 Saved current chat session to conversation logs") + + +@schedule.repeat(schedule.every(1).minutes) +def upload_telemetry(): + if not state.config.app.should_log_telemetry or not state.telemetry: + print("No telemetry to upload") if not state.telemetry else print("Telemetry logging disabled") + return + + try: + logger.debug(f"📡 Upload usage telemetry to {constants.telemetry_server}: {state.telemetry}") + requests.post(constants.telemetry_server, json=state.telemetry) + except Exception as e: + logger.error(f"Error uploading telemetry: {e}") + else: + state.telemetry = None diff --git a/src/khoj/routers/api.py b/src/khoj/routers/api.py index d92da70f..21493557 100644 --- a/src/khoj/routers/api.py +++ b/src/khoj/routers/api.py @@ -168,7 +168,7 @@ def search( # Cache results state.query_cache[query_cache_key] = results - log_telemetry(telemetry_type="api", api="search", app_config=state.config.app) + state.telemetry += [log_telemetry(telemetry_type="api", api="search", app_config=state.config.app)] return results @@ -193,7 +193,7 @@ def update(t: Optional[SearchType] = None, force: Optional[bool] = False): else: logger.info("📬 Processor reconfigured via API") - log_telemetry(telemetry_type="api", api="update", app_config=state.config.app) + state.telemetry += [log_telemetry(telemetry_type="api", api="update", app_config=state.config.app)] return {"status": "ok", "message": "khoj reloaded"} @@ -255,6 +255,6 @@ def chat(q: Optional[str] = None): conversation_log=meta_log.get("chat", []), ) - log_telemetry(telemetry_type="api", api="chat", app_config=state.config.app) + state.telemetry += [log_telemetry(telemetry_type="api", api="chat", app_config=state.config.app)] return {"status": status, "response": gpt_response, "context": compiled_references} diff --git a/src/khoj/utils/helpers.py b/src/khoj/utils/helpers.py index 73d25c98..e9581769 100644 --- a/src/khoj/utils/helpers.py +++ b/src/khoj/utils/helpers.py @@ -172,7 +172,7 @@ def log_telemetry(telemetry_type: str, api: str = None, client: str = None, app_ """Log basic app usage telemetry like client, os, api called""" # Do not log usage telemetry, if telemetry is disabled via app config if not app_config or not app_config.should_log_telemetry: - return + return [] # Populate telemetry data to log request_body = { @@ -189,9 +189,4 @@ def log_telemetry(telemetry_type: str, api: str = None, client: str = None, app_ request_body["client"] = client # Log telemetry data to telemetry endpoint - logger = logging.getLogger(__name__) - try: - logger.debug(f"Log usage telemetry to {constants.telemetry_server}: {request_body}") - requests.post(constants.telemetry_server, json=request_body) - except: - pass + return request_body diff --git a/src/khoj/utils/state.py b/src/khoj/utils/state.py index 7a38bfdf..9d5ed27f 100644 --- a/src/khoj/utils/state.py +++ b/src/khoj/utils/state.py @@ -1,6 +1,6 @@ # Standard Packages import threading -from typing import List +from typing import List, Dict from packaging import version # External Packages @@ -25,6 +25,7 @@ cli_args: List[str] = None query_cache = LRU() search_index_lock = threading.Lock() SearchType = utils_config.SearchType +telemetry: List[Dict[str, str]] = [] if torch.cuda.is_available(): # Use CUDA GPU