From fc151a68fe1c3e71f475e610bb701c8a9749780a Mon Sep 17 00:00:00 2001 From: Tilo-K Date: Wed, 31 Dec 2025 01:15:05 +0100 Subject: [PATCH] feat: fix jobs --- ...aa586d25d4644a907d748cd3ade0401fc77be.json | 44 ++++++++++++++++++ src/jobs.rs | 45 ++++++++++++------- src/main.rs | 1 + 3 files changed, 74 insertions(+), 16 deletions(-) create mode 100644 .sqlx/query-2f0d0c29c0a1c4892c3a01b47a2aa586d25d4644a907d748cd3ade0401fc77be.json diff --git a/.sqlx/query-2f0d0c29c0a1c4892c3a01b47a2aa586d25d4644a907d748cd3ade0401fc77be.json b/.sqlx/query-2f0d0c29c0a1c4892c3a01b47a2aa586d25d4644a907d748cd3ade0401fc77be.json new file mode 100644 index 0000000..fd4bf91 --- /dev/null +++ b/.sqlx/query-2f0d0c29c0a1c4892c3a01b47a2aa586d25d4644a907d748cd3ade0401fc77be.json @@ -0,0 +1,44 @@ +{ + "db_name": "SQLite", + "query": "SELECT * FROM job_events ORDER BY created_at DESC LIMIT 1000", + "describe": { + "columns": [ + { + "name": "id", + "ordinal": 0, + "type_info": "Integer" + }, + { + "name": "job_name", + "ordinal": 1, + "type_info": "Text" + }, + { + "name": "event_type", + "ordinal": 2, + "type_info": "Text" + }, + { + "name": "event_data", + "ordinal": 3, + "type_info": "Text" + }, + { + "name": "created_at", + "ordinal": 4, + "type_info": "Text" + } + ], + "parameters": { + "Right": 0 + }, + "nullable": [ + false, + false, + false, + true, + true + ] + }, + "hash": "2f0d0c29c0a1c4892c3a01b47a2aa586d25d4644a907d748cd3ade0401fc77be" +} diff --git a/src/jobs.rs b/src/jobs.rs index 9fd4002..acd212d 100644 --- a/src/jobs.rs +++ b/src/jobs.rs @@ -1,7 +1,9 @@ use std::{error::Error, option, time::Duration}; +use actix_web::{Responder, get}; use quartz::{Job, Scheduler, Trigger}; -use sqlx::query; +use serde::{Deserialize, Serialize}; +use sqlx::{query, query_as}; use tokio::runtime::Handle; async fn log_job_event(job_name: &str, event: &str, event_data: Option<&str>) { @@ -34,32 +36,20 @@ use crate::{auth, db}; pub fn init_scheduler(rt: Handle) -> Scheduler { let scheduler = Scheduler::new(); - let trigger_every_5min = Trigger::with_identity("5min_trigger", "default_group") - .every(Duration::from_mins(5)) - .repeat(u32::max_value()); - - let trigger_every_3min = Trigger::with_identity("3min_trigger", "default_group") + let trigger_every_3min = Trigger::with_identity("3min_trigger", "3min_group") .every(Duration::from_mins(3)) .repeat(u32::max_value()); - let rt_cleanup = rt.clone(); - let cleanup_tokens_job = Job::with_identity("cleanup_tokens_job", "default_group", move || { - let rt = rt_cleanup.clone(); - rt.spawn(async move { - let _ = cleanup_old_tokens().await; - }); - }); - let rt_refresh = rt.clone(); let refresh_access_token_job = - Job::with_identity("refresh_access_token_job", "default_group", move || { + Job::with_identity("refresh_access_token_job", "refresh_group", move || { let rt = rt_refresh.clone(); rt.spawn(async move { let _ = refresh_access_token().await; + let _ = cleanup_old_tokens().await; }); }); - scheduler.schedule_job(cleanup_tokens_job, trigger_every_5min); scheduler.schedule_job(refresh_access_token_job, trigger_every_3min); scheduler @@ -95,3 +85,26 @@ pub async fn refresh_access_token() -> Result<(), anyhow::Error> { log_job_event("REFRESH_ACCESS_TOKEN", "FINISHED", None).await; Ok(()) } + +#[derive(Debug, Serialize, Deserialize)] +struct JobEvent { + id: i64, + job_name: String, + event_type: String, + event_data: Option, + created_at: Option, +} + +#[get("/api/job_events")] +pub async fn get_job_events() -> impl Responder { + let pool = db::get_pool().await.expect("Could not get database pool"); + let logs = query_as!( + JobEvent, + "SELECT * FROM job_events ORDER BY created_at DESC LIMIT 1000" + ) + .fetch_all(&pool) + .await + .expect("Could not fetch job events"); + + serde_json::to_string(&logs).unwrap() +} diff --git a/src/main.rs b/src/main.rs index ebee468..5466f2d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -58,6 +58,7 @@ async fn main() -> std::io::Result<()> { .service(servers::get_server) .service(servers::update_server_hostname) .service(servers::get_server_metrics) + .service(jobs::get_job_events) .service( Files::new("/", "./frontend/dist") .index_file("index.html")