Store, retrieve task metadata from the job name field

This commit is contained in:
Debanjum Singh Solanky
2024-04-29 17:27:11 +05:30
parent ae10ff4a5f
commit d341b1efe8
2 changed files with 22 additions and 12 deletions

View File

@@ -397,15 +397,17 @@ def get_jobs(request: Request) -> Response:
tasks: list[Job] = state.scheduler.get_jobs()
# Collate all tasks assigned by user that are still active
tasks_info = [
{
"id": task.id,
"name": re.sub(r"^/task\s*", "", task.name),
"next": task.next_run_time.strftime("%Y-%m-%d %H:%M"),
}
for task in tasks
if task.id.startswith(f"job_{user.uuid}_")
]
tasks_info = []
for task in tasks:
if task.id.startswith(f"job_{user.uuid}_"):
task_metadata = json.loads(task.name)
tasks_info.append(
{
"id": task.id,
"name": re.sub(r"^/task\s*", "", task_metadata["inferred_query"]),
"next": task.next_run_time.strftime("%Y-%m-%d %H:%M"),
}
)
# Return tasks information as a JSON response
return Response(content=json.dumps(tasks_info), media_type="application/json", status_code=200)
@@ -426,7 +428,12 @@ def delete_job(request: Request, task_id: str) -> Response:
return Response(content="Invalid job", status_code=403)
# Collate info about user task to be deleted
task_info = {"id": task.id, "name": task.name, "next": task.next_run_time.strftime("%Y-%m-%d %H:%MS")}
task_metadata = json.loads(task.name)
task_info = {
"id": task.id,
"name": task_metadata["inferred_query"],
"next": task.next_run_time.strftime("%Y-%m-%d %H:%MS"),
}
# Delete job
task.remove()

View File

@@ -926,8 +926,11 @@ async def create_scheduled_task(
crontime, inferred_query, subject = await schedule_query(q, location, meta_log)
trigger = CronTrigger.from_crontab(crontime, user_timezone)
# Generate id and metadata used by task scheduler and process locks for the task runs
job_id = f"job_{user.uuid}_" + hashlib.md5(f"{inferred_query}_{crontime}".encode("utf-8")).hexdigest()
job_metadata = json.dumps(
{"inferred_query": inferred_query, "original_query": q, "subject": subject, "crontime": crontime}
)
query_id = hashlib.md5(f"{inferred_query}".encode("utf-8")).hexdigest()
job_id = f"job_{user.uuid}_{crontime}_{query_id}"
job = state.scheduler.add_job(
run_with_process_lock,
trigger=trigger,
@@ -943,7 +946,7 @@ async def create_scheduled_task(
"calling_url": calling_url,
},
id=job_id,
name=f"{inferred_query}",
name=job_metadata,
max_instances=2, # Allow second instance to kill any previous instance with stale lock
jitter=30,
)