This commit is contained in:
parent
bfec05e065
commit
22b1b17eef
1 changed files with 70 additions and 22 deletions
|
|
@ -822,17 +822,27 @@ pub async fn list_jobs(
|
||||||
|
|
||||||
// Original query but let's see all statuses
|
// Original query but let's see all statuses
|
||||||
let query = "
|
let query = "
|
||||||
|
WITH job_durations AS (
|
||||||
|
SELECT
|
||||||
|
je.job_label,
|
||||||
|
be.build_request_id,
|
||||||
|
(MAX(be.timestamp) - MIN(be.timestamp)) / 1000000 as duration_ms
|
||||||
|
FROM job_events je
|
||||||
|
JOIN build_events be ON je.event_id = be.event_id
|
||||||
|
GROUP BY je.job_label, be.build_request_id
|
||||||
|
HAVING MAX(CASE WHEN je.status IN ('3', '4', '5', '6') THEN 1 ELSE 0 END) = 1
|
||||||
|
)
|
||||||
SELECT
|
SELECT
|
||||||
je.job_label,
|
je.job_label,
|
||||||
COUNT(CASE WHEN je.status IN ('3', '6') THEN 1 END) as completed_count,
|
COUNT(CASE WHEN je.status IN ('3', '6') THEN 1 END) as completed_count,
|
||||||
COUNT(CASE WHEN je.status = '4' THEN 1 END) as failed_count,
|
COUNT(CASE WHEN je.status IN ('4', '5') THEN 1 END) as failed_count,
|
||||||
COUNT(*) as total_count,
|
COUNT(CASE WHEN je.status IN ('3', '4', '5', '6') THEN 1 END) as total_count,
|
||||||
-- For now, skip duration calculation since we need start/end times
|
COALESCE(AVG(jd.duration_ms), 0) as avg_duration_ms,
|
||||||
NULL as avg_duration_ms,
|
|
||||||
MAX(be.timestamp) as last_run,
|
MAX(be.timestamp) as last_run,
|
||||||
GROUP_CONCAT(DISTINCT je.status) as all_statuses
|
GROUP_CONCAT(DISTINCT je.status) as all_statuses
|
||||||
FROM job_events je
|
FROM job_events je
|
||||||
JOIN build_events be ON je.event_id = be.event_id
|
JOIN build_events be ON je.event_id = be.event_id
|
||||||
|
LEFT JOIN job_durations jd ON je.job_label = jd.job_label
|
||||||
WHERE je.job_label != ''
|
WHERE je.job_label != ''
|
||||||
GROUP BY je.job_label
|
GROUP BY je.job_label
|
||||||
ORDER BY last_run DESC";
|
ORDER BY last_run DESC";
|
||||||
|
|
@ -855,7 +865,7 @@ pub async fn list_jobs(
|
||||||
let completed_count: u32 = row[1].parse().unwrap_or(0);
|
let completed_count: u32 = row[1].parse().unwrap_or(0);
|
||||||
let failed_count: u32 = row[2].parse().unwrap_or(0);
|
let failed_count: u32 = row[2].parse().unwrap_or(0);
|
||||||
let total_count: u32 = row[3].parse().unwrap_or(0);
|
let total_count: u32 = row[3].parse().unwrap_or(0);
|
||||||
let avg_duration_ms: Option<i64> = row[4].parse().ok();
|
let avg_duration_ms: Option<i64> = row[4].parse::<f64>().ok().map(|f| f as i64);
|
||||||
let last_run: Option<i64> = row[5].parse().ok();
|
let last_run: Option<i64> = row[5].parse().ok();
|
||||||
let all_statuses = &row[6];
|
let all_statuses = &row[6];
|
||||||
|
|
||||||
|
|
@ -919,21 +929,31 @@ pub async fn get_job_metrics(
|
||||||
|
|
||||||
// Get overall job metrics
|
// Get overall job metrics
|
||||||
let metrics_query = "
|
let metrics_query = "
|
||||||
|
WITH job_run_durations AS (
|
||||||
SELECT
|
SELECT
|
||||||
COUNT(CASE WHEN je.status IN ('3', '6') THEN 1 END) as completed_count,
|
be.build_request_id,
|
||||||
COUNT(*) as total_count,
|
(MAX(be.timestamp) - MIN(be.timestamp)) / 1000000 as duration_ms
|
||||||
-- Skip duration calculation for now
|
|
||||||
NULL as avg_duration_ms
|
|
||||||
FROM job_events je
|
FROM job_events je
|
||||||
JOIN build_events be ON je.event_id = be.event_id
|
JOIN build_events be ON je.event_id = be.event_id
|
||||||
|
WHERE je.job_label = ?
|
||||||
|
GROUP BY be.build_request_id
|
||||||
|
HAVING MAX(CASE WHEN je.status IN ('3', '4', '5', '6') THEN 1 ELSE 0 END) = 1
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
COUNT(CASE WHEN je.status IN ('3', '6') THEN 1 END) as completed_count,
|
||||||
|
COUNT(CASE WHEN je.status IN ('3', '4', '5', '6') THEN 1 END) as total_count,
|
||||||
|
COALESCE(AVG(jrd.duration_ms), 0) as avg_duration_ms
|
||||||
|
FROM job_events je
|
||||||
|
JOIN build_events be ON je.event_id = be.event_id
|
||||||
|
LEFT JOIN job_run_durations jrd ON be.build_request_id = jrd.build_request_id
|
||||||
WHERE je.job_label = ?";
|
WHERE je.job_label = ?";
|
||||||
|
|
||||||
let (success_rate, total_runs, avg_duration_ms) = match service.event_log.execute_query(&metrics_query.replace("?", &format!("'{}'", decoded_label))).await {
|
let (success_rate, total_runs, avg_duration_ms) = match service.event_log.execute_query(&metrics_query.replace("?", &format!("'{}'", decoded_label)).replace("?", &format!("'{}'", decoded_label))).await {
|
||||||
Ok(result) if !result.rows.is_empty() => {
|
Ok(result) if !result.rows.is_empty() => {
|
||||||
let row = &result.rows[0];
|
let row = &result.rows[0];
|
||||||
let completed_count: u32 = row[0].parse().unwrap_or(0);
|
let completed_count: u32 = row[0].parse().unwrap_or(0);
|
||||||
let total_count: u32 = row[1].parse().unwrap_or(0);
|
let total_count: u32 = row[1].parse().unwrap_or(0);
|
||||||
let avg_duration: Option<i64> = row[2].parse().ok();
|
let avg_duration: Option<i64> = row[2].parse::<f64>().ok().map(|f| f as i64);
|
||||||
|
|
||||||
let success_rate = if total_count > 0 {
|
let success_rate = if total_count > 0 {
|
||||||
completed_count as f64 / total_count as f64
|
completed_count as f64 / total_count as f64
|
||||||
|
|
@ -946,28 +966,43 @@ pub async fn get_job_metrics(
|
||||||
_ => (0.0, 0, None),
|
_ => (0.0, 0, None),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Get recent runs
|
// Get recent runs - consolidated by build request to show final status per job run
|
||||||
let recent_runs_query = "
|
let recent_runs_query = "
|
||||||
SELECT DISTINCT
|
SELECT
|
||||||
be.build_request_id,
|
be.build_request_id,
|
||||||
je.target_partitions,
|
je.target_partitions,
|
||||||
je.status,
|
je.status,
|
||||||
be.timestamp,
|
MIN(be.timestamp) as started_at,
|
||||||
(julianday('now') - julianday(be.timestamp/1000000000, 'unixepoch')) * 24 * 60 * 60 * 1000 as duration_ms
|
MAX(be.timestamp) as completed_at
|
||||||
FROM job_events je
|
FROM job_events je
|
||||||
JOIN build_events be ON je.event_id = be.event_id
|
JOIN build_events be ON je.event_id = be.event_id
|
||||||
WHERE je.job_label = ?
|
WHERE je.job_label = ?
|
||||||
ORDER BY be.timestamp DESC
|
GROUP BY be.build_request_id, je.target_partitions
|
||||||
|
HAVING je.status = (
|
||||||
|
SELECT je2.status
|
||||||
|
FROM job_events je2
|
||||||
|
JOIN build_events be2 ON je2.event_id = be2.event_id
|
||||||
|
WHERE je2.job_label = ?
|
||||||
|
AND be2.build_request_id = be.build_request_id
|
||||||
|
ORDER BY be2.timestamp DESC
|
||||||
|
LIMIT 1
|
||||||
|
)
|
||||||
|
ORDER BY started_at DESC
|
||||||
LIMIT 50";
|
LIMIT 50";
|
||||||
|
|
||||||
let recent_runs = match service.event_log.execute_query(&recent_runs_query.replace("?", &format!("'{}'", decoded_label))).await {
|
let recent_runs = match service.event_log.execute_query(&recent_runs_query.replace("?", &format!("'{}'", decoded_label)).replace("?", &format!("'{}'", decoded_label))).await {
|
||||||
Ok(result) => {
|
Ok(result) => {
|
||||||
result.rows.into_iter().map(|row| {
|
result.rows.into_iter().map(|row| {
|
||||||
let build_request_id = row[0].clone();
|
let build_request_id = row[0].clone();
|
||||||
let partitions_json: String = row[1].clone();
|
let partitions_json: String = row[1].clone();
|
||||||
let status_code: String = row[2].clone();
|
let status_code: String = row[2].clone();
|
||||||
let started_at: i64 = row[3].parse().unwrap_or(0);
|
let started_at: i64 = row[3].parse().unwrap_or(0);
|
||||||
let duration_ms: Option<i64> = row[4].parse().ok();
|
let completed_at: i64 = row[4].parse().unwrap_or(started_at);
|
||||||
|
let duration_ms: Option<i64> = if completed_at > started_at {
|
||||||
|
Some((completed_at - started_at) / 1_000_000) // Convert nanoseconds to milliseconds
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
let partitions: Vec<String> = serde_json::from_str::<Vec<serde_json::Value>>(&partitions_json)
|
let partitions: Vec<String> = serde_json::from_str::<Vec<serde_json::Value>>(&partitions_json)
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
|
|
@ -983,6 +1018,7 @@ pub async fn get_job_metrics(
|
||||||
"3" => "completed",
|
"3" => "completed",
|
||||||
"4" => "failed",
|
"4" => "failed",
|
||||||
"5" => "cancelled",
|
"5" => "cancelled",
|
||||||
|
"6" => "skipped",
|
||||||
_ => "unknown",
|
_ => "unknown",
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -1000,26 +1036,38 @@ pub async fn get_job_metrics(
|
||||||
|
|
||||||
// Get daily stats (simplified - just recent days)
|
// Get daily stats (simplified - just recent days)
|
||||||
let daily_stats_query = "
|
let daily_stats_query = "
|
||||||
|
WITH daily_job_durations AS (
|
||||||
|
SELECT
|
||||||
|
date(be.timestamp/1000000000, 'unixepoch') as date,
|
||||||
|
be.build_request_id,
|
||||||
|
(MAX(be.timestamp) - MIN(be.timestamp)) / 1000000 as duration_ms
|
||||||
|
FROM job_events je
|
||||||
|
JOIN build_events be ON je.event_id = be.event_id
|
||||||
|
WHERE je.job_label = ?
|
||||||
|
AND be.timestamp > (strftime('%s', 'now', '-30 days') * 1000000000)
|
||||||
|
GROUP BY date(be.timestamp/1000000000, 'unixepoch'), be.build_request_id
|
||||||
|
HAVING MAX(CASE WHEN je.status IN ('3', '4', '5', '6') THEN 1 ELSE 0 END) = 1
|
||||||
|
)
|
||||||
SELECT
|
SELECT
|
||||||
date(be.timestamp/1000000000, 'unixepoch') as date,
|
date(be.timestamp/1000000000, 'unixepoch') as date,
|
||||||
COUNT(CASE WHEN je.status IN ('3', '6') THEN 1 END) as completed_count,
|
COUNT(CASE WHEN je.status IN ('3', '6') THEN 1 END) as completed_count,
|
||||||
COUNT(*) as total_count,
|
COUNT(CASE WHEN je.status IN ('3', '4', '5', '6') THEN 1 END) as total_count,
|
||||||
-- Skip duration calculation for now
|
COALESCE(AVG(djd.duration_ms), 0) as avg_duration_ms
|
||||||
NULL as avg_duration_ms
|
|
||||||
FROM job_events je
|
FROM job_events je
|
||||||
JOIN build_events be ON je.event_id = be.event_id
|
JOIN build_events be ON je.event_id = be.event_id
|
||||||
|
LEFT JOIN daily_job_durations djd ON date(be.timestamp/1000000000, 'unixepoch') = djd.date
|
||||||
WHERE je.job_label = ?
|
WHERE je.job_label = ?
|
||||||
AND be.timestamp > (strftime('%s', 'now', '-30 days') * 1000000000)
|
AND be.timestamp > (strftime('%s', 'now', '-30 days') * 1000000000)
|
||||||
GROUP BY date(be.timestamp/1000000000, 'unixepoch')
|
GROUP BY date(be.timestamp/1000000000, 'unixepoch')
|
||||||
ORDER BY date DESC";
|
ORDER BY date DESC";
|
||||||
|
|
||||||
let daily_stats = match service.event_log.execute_query(&daily_stats_query.replace("?", &format!("'{}'", decoded_label))).await {
|
let daily_stats = match service.event_log.execute_query(&daily_stats_query.replace("?", &format!("'{}'", decoded_label)).replace("?", &format!("'{}'", decoded_label))).await {
|
||||||
Ok(result) => {
|
Ok(result) => {
|
||||||
result.rows.into_iter().map(|row| {
|
result.rows.into_iter().map(|row| {
|
||||||
let date = row[0].clone();
|
let date = row[0].clone();
|
||||||
let completed_count: u32 = row[1].parse().unwrap_or(0);
|
let completed_count: u32 = row[1].parse().unwrap_or(0);
|
||||||
let total_count: u32 = row[2].parse().unwrap_or(0);
|
let total_count: u32 = row[2].parse().unwrap_or(0);
|
||||||
let avg_duration: Option<i64> = row[3].parse().ok();
|
let avg_duration: Option<i64> = row[3].parse::<f64>().ok().map(|f| f as i64);
|
||||||
|
|
||||||
let success_rate = if total_count > 0 {
|
let success_rate = if total_count > 0 {
|
||||||
completed_count as f64 / total_count as f64
|
completed_count as f64 / total_count as f64
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue