Skip to content

Add job summary links to post-merge report #139481

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Apr 9, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 36 additions & 8 deletions src/ci/citool/src/analysis.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ use build_helper::metrics::{
format_build_steps,
};

use crate::github::JobInfoResolver;
use crate::metrics;
use crate::metrics::{JobMetrics, JobName, get_test_suites};
use crate::utils::{output_details, pluralize};
Expand Down Expand Up @@ -185,13 +186,19 @@ fn render_table(suites: BTreeMap<String, TestSuiteRecord>) -> String {
}

/// Outputs a report of test differences between the `parent` and `current` commits.
pub fn output_test_diffs(job_metrics: &HashMap<JobName, JobMetrics>) {
pub fn output_test_diffs(
job_metrics: &HashMap<JobName, JobMetrics>,
job_info_resolver: &mut JobInfoResolver,
) {
let aggregated_test_diffs = aggregate_test_diffs(&job_metrics);
report_test_diffs(aggregated_test_diffs);
report_test_diffs(aggregated_test_diffs, job_metrics, job_info_resolver);
}

/// Prints the ten largest differences in bootstrap durations.
pub fn output_largest_duration_changes(job_metrics: &HashMap<JobName, JobMetrics>) {
pub fn output_largest_duration_changes(
job_metrics: &HashMap<JobName, JobMetrics>,
job_info_resolver: &mut JobInfoResolver,
) {
struct Entry<'a> {
job: &'a JobName,
before: Duration,
Expand Down Expand Up @@ -225,14 +232,14 @@ pub fn output_largest_duration_changes(job_metrics: &HashMap<JobName, JobMetrics
});
}
}
changes.sort_by(|e1, e2| e1.change.partial_cmp(&e2.change).unwrap().reverse());
changes.sort_by(|e1, e2| e1.change.abs().partial_cmp(&e2.change.abs()).unwrap().reverse());

println!("# Job duration changes");
for (index, entry) in changes.into_iter().take(10).enumerate() {
println!(
"{}. `{}`: {:.1}s -> {:.1}s ({:.1}%)",
"{}. {}: {:.1}s -> {:.1}s ({:.1}%)",
index + 1,
entry.job,
format_job_link(job_info_resolver, job_metrics, entry.job),
entry.before.as_secs_f64(),
entry.after.as_secs_f64(),
entry.change
Expand Down Expand Up @@ -400,7 +407,11 @@ fn generate_test_name(name: &str) -> String {
}

/// Prints test changes in Markdown format to stdout.
fn report_test_diffs(diff: AggregatedTestDiffs) {
fn report_test_diffs(
diff: AggregatedTestDiffs,
job_metrics: &HashMap<JobName, JobMetrics>,
job_info_resolver: &mut JobInfoResolver,
) {
println!("# Test differences");
if diff.diffs.is_empty() {
println!("No test diffs found");
Expand Down Expand Up @@ -521,9 +532,26 @@ fn report_test_diffs(diff: AggregatedTestDiffs) {
println!(
"- {}: {}",
format_job_group(group as u64),
jobs.iter().map(|j| format!("`{j}`")).collect::<Vec<_>>().join(", ")
jobs.iter()
.map(|j| format_job_link(job_info_resolver, job_metrics, j))
.collect::<Vec<_>>()
.join(", ")
);
}
},
);
}

/// Tries to get a GitHub Actions job summary URL from the resolver.
/// If it is not available, just wraps the job name in backticks.
fn format_job_link(
job_info_resolver: &mut JobInfoResolver,
job_metrics: &HashMap<JobName, JobMetrics>,
job_name: &str,
) -> String {
job_metrics
.get(job_name)
.and_then(|metrics| job_info_resolver.get_job_summary_link(job_name, &metrics.current))
.map(|summary_url| format!("[{job_name}]({summary_url})"))
.unwrap_or_else(|| format!("`{job_name}`"))
}
109 changes: 109 additions & 0 deletions src/ci/citool/src/github.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
use std::collections::HashMap;

use anyhow::Context;
use build_helper::metrics::{CiMetadata, JsonRoot};

pub struct GitHubClient;

impl GitHubClient {
fn get_workflow_run_jobs(
&self,
repo: &str,
workflow_run_id: u64,
) -> anyhow::Result<Vec<GitHubJob>> {
let req = ureq::get(format!(
"https://api.github.com/repos/{repo}/actions/runs/{workflow_run_id}/jobs?per_page=100"
))
.header("User-Agent", "rust-lang/rust/citool")
.header("Accept", "application/vnd.github+json")
.header("X-GitHub-Api-Version", "2022-11-28")
.call()
.context("cannot get workflow job list")?;

let status = req.status();
let mut body = req.into_body();
if status.is_success() {
// This API response is actually paged, but we assume for now that there are at
// most 100 jobs per workflow.
let response = body
.read_json::<WorkflowRunJobsResponse>()
.context("cannot deserialize workflow run jobs response")?;
// The CI job names have a prefix, e.g. `auto - foo`. We remove the prefix here to
// normalize the job name.
Ok(response
.jobs
.into_iter()
.map(|mut job| {
job.name = job
.name
.split_once(" - ")
.map(|res| res.1.to_string())
.unwrap_or_else(|| job.name);
job
})
.collect())
} else {
Err(anyhow::anyhow!(
"Cannot get jobs of workflow run {workflow_run_id}: {status}\n{}",
body.read_to_string()?
))
}
}
}

#[derive(serde::Deserialize)]
struct WorkflowRunJobsResponse {
jobs: Vec<GitHubJob>,
}

#[derive(serde::Deserialize)]
struct GitHubJob {
name: String,
id: u64,
}

/// Can be used to resolve information about GitHub Actions jobs.
/// Caches results internally to avoid too unnecessary GitHub API calls.
pub struct JobInfoResolver {
client: GitHubClient,
// Workflow run ID -> jobs
workflow_job_cache: HashMap<u64, Vec<GitHubJob>>,
}

impl JobInfoResolver {
pub fn new() -> Self {
Self { client: GitHubClient, workflow_job_cache: Default::default() }
}

/// Get a link to a job summary for the given job name and bootstrap execution.
pub fn get_job_summary_link(&mut self, job_name: &str, metrics: &JsonRoot) -> Option<String> {
metrics.ci_metadata.as_ref().and_then(|metadata| {
self.get_job_id(metadata, job_name).map(|job_id| {
format!(
"https://github.com/{}/actions/runs/{}#summary-{job_id}",
metadata.repository, metadata.workflow_run_id
)
})
})
}

fn get_job_id(&mut self, ci_metadata: &CiMetadata, job_name: &str) -> Option<u64> {
if let Some(job) = self
.workflow_job_cache
.get(&ci_metadata.workflow_run_id)
.and_then(|jobs| jobs.iter().find(|j| j.name == job_name))
{
return Some(job.id);
}

let jobs = self
.client
.get_workflow_run_jobs(&ci_metadata.repository, ci_metadata.workflow_run_id)
.inspect_err(|e| eprintln!("Cannot download workflow jobs: {e:?}"))
.ok()?;
let job_id = jobs.iter().find(|j| j.name == job_name).map(|j| j.id);
// Save the cache even if the job name was not found, it could be useful for further lookups
self.workflow_job_cache.insert(ci_metadata.workflow_run_id, jobs);
job_id
}
}
11 changes: 8 additions & 3 deletions src/ci/citool/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
mod analysis;
mod cpu_usage;
mod datadog;
mod github;
mod jobs;
mod metrics;
mod utils;
Expand All @@ -18,6 +19,7 @@ use serde_yaml::Value;
use crate::analysis::{output_largest_duration_changes, output_test_diffs};
use crate::cpu_usage::load_cpu_usage;
use crate::datadog::upload_datadog_metric;
use crate::github::JobInfoResolver;
use crate::jobs::RunType;
use crate::metrics::{JobMetrics, download_auto_job_metrics, download_job_metrics, load_metrics};
use crate::utils::load_env_var;
Expand Down Expand Up @@ -145,6 +147,7 @@ fn postprocess_metrics(
) -> anyhow::Result<()> {
let metrics = load_metrics(&metrics_path)?;

let mut job_info_resolver = JobInfoResolver::new();
if let (Some(parent), Some(job_name)) = (parent, job_name) {
// This command is executed also on PR builds, which might not have parent metrics
// available, because some PR jobs don't run on auto builds, and PR jobs do not upload metrics
Expand All @@ -160,7 +163,7 @@ fn postprocess_metrics(
job_name,
JobMetrics { parent: Some(parent_metrics), current: metrics },
)]);
output_test_diffs(&job_metrics);
output_test_diffs(&job_metrics, &mut job_info_resolver);
return Ok(());
}
Err(error) => {
Expand All @@ -180,8 +183,10 @@ fn post_merge_report(db: JobDatabase, current: String, parent: String) -> anyhow
let metrics = download_auto_job_metrics(&db, &parent, &current)?;

println!("\nComparing {parent} (parent) -> {current} (this PR)\n");
output_test_diffs(&metrics);
output_largest_duration_changes(&metrics);

let mut job_info_resolver = JobInfoResolver::new();
output_test_diffs(&metrics, &mut job_info_resolver);
output_largest_duration_changes(&metrics, &mut job_info_resolver);

Ok(())
}
Expand Down
Loading