diff --git a/.github/workflows/post-merge.yml b/.github/workflows/post-merge.yml
index 31e075f45d646..de31c28cc90e0 100644
--- a/.github/workflows/post-merge.yml
+++ b/.github/workflows/post-merge.yml
@@ -35,8 +35,13 @@ jobs:
cd src/ci/citool
- echo "Post-merge analysis result" > output.log
+ printf "*This is an experimental post-merge analysis report. You can ignore it.*\n\n" > output.log
+ printf "\nPost-merge report
\n\n" >> output.log
+
cargo run --release post-merge-report ${PARENT_COMMIT} ${{ github.sha }} >> output.log
+
+ printf " \n" >> output.log
+
cat output.log
gh pr comment ${HEAD_PR} -F output.log
diff --git a/compiler/rustc_codegen_ssa/src/back/linker.rs b/compiler/rustc_codegen_ssa/src/back/linker.rs
index a8405a2aec98d..732f86b08d8e8 100644
--- a/compiler/rustc_codegen_ssa/src/back/linker.rs
+++ b/compiler/rustc_codegen_ssa/src/back/linker.rs
@@ -1784,7 +1784,10 @@ fn exported_symbols_for_non_proc_macro(tcx: TyCtxt<'_>, crate_type: CrateType) -
let mut symbols = Vec::new();
let export_threshold = symbol_export::crates_export_threshold(&[crate_type]);
for_each_exported_symbols_include_dep(tcx, crate_type, |symbol, info, cnum| {
- if info.level.is_below_threshold(export_threshold) {
+ // Do not export mangled symbols from cdylibs and don't attempt to export compiler-builtins
+ // from any cdylib. The latter doesn't work anyway as we use hidden visibility for
+ // compiler-builtins. Most linkers silently ignore it, but ld64 gives a warning.
+ if info.level.is_below_threshold(export_threshold) && !tcx.is_compiler_builtins(cnum) {
symbols.push(symbol_export::exporting_symbol_name_for_instance_in_crate(
tcx, symbol, cnum,
));
diff --git a/compiler/rustc_passes/messages.ftl b/compiler/rustc_passes/messages.ftl
index ed498d9d34429..06398dd4f725b 100644
--- a/compiler/rustc_passes/messages.ftl
+++ b/compiler/rustc_passes/messages.ftl
@@ -811,8 +811,8 @@ passes_unused_duplicate =
passes_unused_empty_lints_note =
attribute `{$name}` with an empty list has no effect
-passes_unused_linker_warnings_note =
- the `linker_warnings` lint can only be controlled at the root of a crate that needs to be linked
+passes_unused_linker_messages_note =
+ the `linker_messages` lint can only be controlled at the root of a crate that needs to be linked
passes_unused_multiple =
multiple `{$name}` attributes
diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs
index ece5a53aaa9c4..148a3e1880a45 100644
--- a/compiler/rustc_passes/src/check_attr.rs
+++ b/compiler/rustc_passes/src/check_attr.rs
@@ -2387,7 +2387,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
.iter()
.all(|kind| matches!(kind, CrateType::Rlib | CrateType::Staticlib));
if never_needs_link {
- errors::UnusedNote::LinkerWarningsBinaryCrateOnly
+ errors::UnusedNote::LinkerMessagesBinaryCrateOnly
} else {
return;
}
diff --git a/compiler/rustc_passes/src/errors.rs b/compiler/rustc_passes/src/errors.rs
index b8359c27e538f..a72f40cd843a7 100644
--- a/compiler/rustc_passes/src/errors.rs
+++ b/compiler/rustc_passes/src/errors.rs
@@ -770,8 +770,8 @@ pub(crate) enum UnusedNote {
NoLints { name: Symbol },
#[note(passes_unused_default_method_body_const_note)]
DefaultMethodBodyConst,
- #[note(passes_unused_linker_warnings_note)]
- LinkerWarningsBinaryCrateOnly,
+ #[note(passes_unused_linker_messages_note)]
+ LinkerMessagesBinaryCrateOnly,
}
#[derive(LintDiagnostic)]
diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs
index dcdb7fa9c1054..34755249b60b1 100644
--- a/compiler/rustc_session/src/config.rs
+++ b/compiler/rustc_session/src/config.rs
@@ -2709,7 +2709,12 @@ pub fn parse_crate_types_from_list(list_list: Vec) -> Result CrateType::Cdylib,
"bin" => CrateType::Executable,
"proc-macro" => CrateType::ProcMacro,
- _ => return Err(format!("unknown crate type: `{part}`")),
+ _ => {
+ return Err(format!(
+ "unknown crate type: `{part}`, expected one of: \
+ `lib`, `rlib`, `staticlib`, `dylib`, `cdylib`, `bin`, `proc-macro`",
+ ));
+ }
};
if !crate_types.contains(&new_part) {
crate_types.push(new_part)
diff --git a/library/core/src/marker.rs b/library/core/src/marker.rs
index e234f105b0b76..a4ab4674f4a36 100644
--- a/library/core/src/marker.rs
+++ b/library/core/src/marker.rs
@@ -465,9 +465,13 @@ impl Copy for &T {}
/// Notably, this doesn't include all trivially-destructible types for semver
/// reasons.
///
-/// Bikeshed name for now.
+/// Bikeshed name for now. This trait does not do anything other than reflect the
+/// set of types that are allowed within unions for field validity.
#[unstable(feature = "bikeshed_guaranteed_no_drop", issue = "none")]
#[lang = "bikeshed_guaranteed_no_drop"]
+#[rustc_deny_explicit_impl]
+#[rustc_do_not_implement_via_object]
+#[doc(hidden)]
pub trait BikeshedGuaranteedNoDrop {}
/// Types for which it is safe to share references between threads.
diff --git a/library/std/src/sys/pal/uefi/helpers.rs b/library/std/src/sys/pal/uefi/helpers.rs
index 0a2a8f5ef67be..60c33c637d762 100644
--- a/library/std/src/sys/pal/uefi/helpers.rs
+++ b/library/std/src/sys/pal/uefi/helpers.rs
@@ -216,6 +216,60 @@ pub(crate) fn device_path_to_text(path: NonNull) -> io::R
Err(io::const_error!(io::ErrorKind::NotFound, "no device path to text protocol found"))
}
+fn device_node_to_text(path: NonNull) -> io::Result {
+ fn node_to_text(
+ protocol: NonNull,
+ path: NonNull,
+ ) -> io::Result {
+ let path_ptr: *mut r_efi::efi::Char16 = unsafe {
+ ((*protocol.as_ptr()).convert_device_node_to_text)(
+ path.as_ptr(),
+ // DisplayOnly
+ r_efi::efi::Boolean::FALSE,
+ // AllowShortcuts
+ r_efi::efi::Boolean::FALSE,
+ )
+ };
+
+ let path = os_string_from_raw(path_ptr)
+ .ok_or(io::const_error!(io::ErrorKind::InvalidData, "Invalid path"))?;
+
+ if let Some(boot_services) = crate::os::uefi::env::boot_services() {
+ let boot_services: NonNull = boot_services.cast();
+ unsafe {
+ ((*boot_services.as_ptr()).free_pool)(path_ptr.cast());
+ }
+ }
+
+ Ok(path)
+ }
+
+ static LAST_VALID_HANDLE: AtomicPtr =
+ AtomicPtr::new(crate::ptr::null_mut());
+
+ if let Some(handle) = NonNull::new(LAST_VALID_HANDLE.load(Ordering::Acquire)) {
+ if let Ok(protocol) = open_protocol::(
+ handle,
+ device_path_to_text::PROTOCOL_GUID,
+ ) {
+ return node_to_text(protocol, path);
+ }
+ }
+
+ let device_path_to_text_handles = locate_handles(device_path_to_text::PROTOCOL_GUID)?;
+ for handle in device_path_to_text_handles {
+ if let Ok(protocol) = open_protocol::(
+ handle,
+ device_path_to_text::PROTOCOL_GUID,
+ ) {
+ LAST_VALID_HANDLE.store(handle.as_ptr(), Ordering::Release);
+ return node_to_text(protocol, path);
+ }
+ }
+
+ Err(io::const_error!(io::ErrorKind::NotFound, "No device path to text protocol found"))
+}
+
/// Gets RuntimeServices.
pub(crate) fn runtime_services() -> Option> {
let system_table: NonNull =
@@ -319,6 +373,11 @@ impl<'a> BorrowedDevicePath<'a> {
pub(crate) fn to_text(&self) -> io::Result {
device_path_to_text(self.protocol)
}
+
+ #[expect(dead_code)]
+ pub(crate) const fn iter(&'a self) -> DevicePathIterator<'a> {
+ DevicePathIterator::new(DevicePathNode::new(self.protocol))
+ }
}
impl<'a> crate::fmt::Debug for BorrowedDevicePath<'a> {
@@ -330,6 +389,126 @@ impl<'a> crate::fmt::Debug for BorrowedDevicePath<'a> {
}
}
+pub(crate) struct DevicePathIterator<'a>(Option>);
+
+impl<'a> DevicePathIterator<'a> {
+ const fn new(node: DevicePathNode<'a>) -> Self {
+ if node.is_end() { Self(None) } else { Self(Some(node)) }
+ }
+}
+
+impl<'a> Iterator for DevicePathIterator<'a> {
+ type Item = DevicePathNode<'a>;
+
+ fn next(&mut self) -> Option {
+ let cur_node = self.0?;
+
+ let next_node = unsafe { cur_node.next_node() };
+ self.0 = if next_node.is_end() { None } else { Some(next_node) };
+
+ Some(cur_node)
+ }
+}
+
+#[derive(Copy, Clone)]
+pub(crate) struct DevicePathNode<'a> {
+ protocol: NonNull,
+ phantom: PhantomData<&'a r_efi::protocols::device_path::Protocol>,
+}
+
+impl<'a> DevicePathNode<'a> {
+ pub(crate) const fn new(protocol: NonNull) -> Self {
+ Self { protocol, phantom: PhantomData }
+ }
+
+ pub(crate) const fn length(&self) -> u16 {
+ let len = unsafe { (*self.protocol.as_ptr()).length };
+ u16::from_le_bytes(len)
+ }
+
+ pub(crate) const fn node_type(&self) -> u8 {
+ unsafe { (*self.protocol.as_ptr()).r#type }
+ }
+
+ pub(crate) const fn sub_type(&self) -> u8 {
+ unsafe { (*self.protocol.as_ptr()).sub_type }
+ }
+
+ pub(crate) fn data(&self) -> &[u8] {
+ let length: usize = self.length().into();
+
+ // Some nodes do not have any special data
+ if length > 4 {
+ let raw_ptr: *const u8 = self.protocol.as_ptr().cast();
+ let data = unsafe { raw_ptr.add(4) };
+ unsafe { crate::slice::from_raw_parts(data, length - 4) }
+ } else {
+ &[]
+ }
+ }
+
+ pub(crate) const fn is_end(&self) -> bool {
+ self.node_type() == r_efi::protocols::device_path::TYPE_END
+ && self.sub_type() == r_efi::protocols::device_path::End::SUBTYPE_ENTIRE
+ }
+
+ #[expect(dead_code)]
+ pub(crate) const fn is_end_instance(&self) -> bool {
+ self.node_type() == r_efi::protocols::device_path::TYPE_END
+ && self.sub_type() == r_efi::protocols::device_path::End::SUBTYPE_INSTANCE
+ }
+
+ pub(crate) unsafe fn next_node(&self) -> Self {
+ let node = unsafe {
+ self.protocol
+ .cast::()
+ .add(self.length().into())
+ .cast::()
+ };
+ Self::new(node)
+ }
+
+ #[expect(dead_code)]
+ pub(crate) fn to_path(&'a self) -> BorrowedDevicePath<'a> {
+ BorrowedDevicePath::new(self.protocol)
+ }
+
+ pub(crate) fn to_text(&self) -> io::Result {
+ device_node_to_text(self.protocol)
+ }
+}
+
+impl<'a> PartialEq for DevicePathNode<'a> {
+ fn eq(&self, other: &Self) -> bool {
+ let self_len = self.length();
+ let other_len = other.length();
+
+ self_len == other_len
+ && unsafe {
+ compiler_builtins::mem::memcmp(
+ self.protocol.as_ptr().cast(),
+ other.protocol.as_ptr().cast(),
+ usize::from(self_len),
+ ) == 0
+ }
+ }
+}
+
+impl<'a> crate::fmt::Debug for DevicePathNode<'a> {
+ fn fmt(&self, f: &mut crate::fmt::Formatter<'_>) -> crate::fmt::Result {
+ match self.to_text() {
+ Ok(p) => p.fmt(f),
+ Err(_) => f
+ .debug_struct("DevicePathNode")
+ .field("type", &self.node_type())
+ .field("sub_type", &self.sub_type())
+ .field("length", &self.length())
+ .field("specific_device_path_data", &self.data())
+ .finish(),
+ }
+ }
+}
+
pub(crate) struct OwnedProtocol {
guid: r_efi::efi::Guid,
handle: NonNull,
diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs
index c393eb55c6244..2354fe1ebafbe 100644
--- a/src/bootstrap/src/core/build_steps/dist.rs
+++ b/src/bootstrap/src/core/build_steps/dist.rs
@@ -2481,7 +2481,7 @@ impl Step for Gcc {
fn run(self, builder: &Builder<'_>) -> Self::Output {
let tarball = Tarball::new(builder, "gcc", &self.target.triple);
let output = builder.ensure(super::gcc::Gcc { target: self.target });
- tarball.add_file(output.libgccjit, ".", 0o644);
+ tarball.add_file(output.libgccjit, "lib", 0o644);
tarball.generate()
}
}
diff --git a/src/bootstrap/src/core/build_steps/gcc.rs b/src/bootstrap/src/core/build_steps/gcc.rs
index 5a4bc9bdbcba9..0aa2a3325313f 100644
--- a/src/bootstrap/src/core/build_steps/gcc.rs
+++ b/src/bootstrap/src/core/build_steps/gcc.rs
@@ -63,11 +63,7 @@ impl Step for Gcc {
}
build_gcc(&metadata, builder, target);
-
- let lib_alias = metadata.install_dir.join("lib/libgccjit.so.0");
- if !lib_alias.exists() {
- t!(builder.symlink_file(&libgccjit_path, lib_alias));
- }
+ create_lib_alias(builder, &libgccjit_path);
t!(metadata.stamp.write());
@@ -75,6 +71,15 @@ impl Step for Gcc {
}
}
+/// Creates a libgccjit.so.0 alias next to libgccjit.so if it does not
+/// already exist
+fn create_lib_alias(builder: &Builder<'_>, libgccjit: &PathBuf) {
+ let lib_alias = libgccjit.parent().unwrap().join("libgccjit.so.0");
+ if !lib_alias.exists() {
+ t!(builder.symlink_file(libgccjit, lib_alias));
+ }
+}
+
pub struct Meta {
stamp: BuildStamp,
out_dir: PathBuf,
@@ -109,8 +114,10 @@ fn try_download_gcc(builder: &Builder<'_>, target: TargetSelection) -> Option PathBuf {
}
fn build_gcc(metadata: &Meta, builder: &Builder<'_>, target: TargetSelection) {
+ if builder.build.cc_tool(target).is_like_clang()
+ || builder.build.cxx_tool(target).is_like_clang()
+ {
+ panic!(
+ "Attempting to build GCC using Clang, which is known to misbehave. Please use GCC as the host C/C++ compiler. "
+ );
+ }
+
let Meta { stamp: _, out_dir, install_dir, root } = metadata;
t!(fs::create_dir_all(out_dir));
@@ -203,18 +218,13 @@ fn build_gcc(metadata: &Meta, builder: &Builder<'_>, target: TargetSelection) {
let mut configure_cmd = command(src_dir.join("configure"));
configure_cmd
.current_dir(out_dir)
- // On CI, we compile GCC with Clang.
- // The -Wno-everything flag is needed to make GCC compile with Clang 19.
- // `-g -O2` are the default flags that are otherwise used by Make.
- // FIXME(kobzol): change the flags once we have [gcc] configuration in config.toml.
- .env("CXXFLAGS", "-Wno-everything -g -O2")
- .env("CFLAGS", "-Wno-everything -g -O2")
.arg("--enable-host-shared")
- .arg("--enable-languages=jit")
+ .arg("--enable-languages=c,jit,lto")
.arg("--enable-checking=release")
.arg("--disable-bootstrap")
.arg("--disable-multilib")
.arg(format!("--prefix={}", install_dir.display()));
+
let cc = builder.build.cc(target).display().to_string();
let cc = builder
.build
diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs
index 8dfd1bbfd60f8..91574f8bf5d99 100644
--- a/src/bootstrap/src/lib.rs
+++ b/src/bootstrap/src/lib.rs
@@ -27,6 +27,7 @@ use std::{env, fs, io, str};
use build_helper::ci::gha;
use build_helper::exit;
+use cc::Tool;
use termcolor::{ColorChoice, StandardStream, WriteColor};
use utils::build_stamp::BuildStamp;
use utils::channel::GitInfo;
@@ -1218,6 +1219,16 @@ Executed at: {executed_at}"#,
self.cc.borrow()[&target].path().into()
}
+ /// Returns the internal `cc::Tool` for the C compiler.
+ fn cc_tool(&self, target: TargetSelection) -> Tool {
+ self.cc.borrow()[&target].clone()
+ }
+
+ /// Returns the internal `cc::Tool` for the C++ compiler.
+ fn cxx_tool(&self, target: TargetSelection) -> Tool {
+ self.cxx.borrow()[&target].clone()
+ }
+
/// Returns C flags that `cc-rs` thinks should be enabled for the
/// specified target by default.
fn cc_handled_clags(&self, target: TargetSelection, c: CLang) -> Vec {
diff --git a/src/ci/citool/src/merge_report.rs b/src/ci/citool/src/merge_report.rs
index 17e42d49286fe..62daa2e68530a 100644
--- a/src/ci/citool/src/merge_report.rs
+++ b/src/ci/citool/src/merge_report.rs
@@ -1,8 +1,8 @@
-use std::cmp::Reverse;
-use std::collections::HashMap;
+use std::collections::{HashMap, HashSet};
+use std::path::PathBuf;
use anyhow::Context;
-use build_helper::metrics::{JsonRoot, TestOutcome};
+use build_helper::metrics::{JsonRoot, TestOutcome, TestSuiteMetadata};
use crate::jobs::JobDatabase;
use crate::metrics::get_test_suites;
@@ -13,8 +13,10 @@ type JobName = String;
/// Computes a post merge CI analysis report between the `parent` and `current` commits.
pub fn post_merge_report(job_db: JobDatabase, parent: Sha, current: Sha) -> anyhow::Result<()> {
let jobs = download_all_metrics(&job_db, &parent, ¤t)?;
- let diffs = aggregate_test_diffs(&jobs)?;
- report_test_changes(diffs);
+ let aggregated_test_diffs = aggregate_test_diffs(&jobs)?;
+
+ println!("Comparing {parent} (base) -> {current} (this PR)\n");
+ report_test_diffs(aggregated_test_diffs);
Ok(())
}
@@ -54,7 +56,16 @@ Maybe it was newly added?"#,
Ok(jobs)
}
+/// Downloads job metrics of the given job for the given commit.
+/// Caches the result on the local disk.
fn download_job_metrics(job_name: &str, sha: &str) -> anyhow::Result {
+ let cache_path = PathBuf::from(".citool-cache").join(sha).join(job_name).join("metrics.json");
+ if let Some(cache_entry) =
+ std::fs::read_to_string(&cache_path).ok().and_then(|data| serde_json::from_str(&data).ok())
+ {
+ return Ok(cache_entry);
+ }
+
let url = get_metrics_url(job_name, sha);
let mut response = ureq::get(&url).call()?;
if !response.status().is_success() {
@@ -68,6 +79,13 @@ fn download_job_metrics(job_name: &str, sha: &str) -> anyhow::Result {
.body_mut()
.read_json()
.with_context(|| anyhow::anyhow!("cannot deserialize metrics from {url}"))?;
+
+ // Ignore errors if cache cannot be created
+ if std::fs::create_dir_all(cache_path.parent().unwrap()).is_ok() {
+ if let Ok(serialized) = serde_json::to_string(&data) {
+ let _ = std::fs::write(&cache_path, &serialized);
+ }
+ }
Ok(data)
}
@@ -76,81 +94,80 @@ fn get_metrics_url(job_name: &str, sha: &str) -> String {
format!("https://ci-artifacts.rust-lang.org/rustc-builds{suffix}/{sha}/metrics-{job_name}.json")
}
+/// Represents a difference in the outcome of tests between a base and a current commit.
+/// Maps test diffs to jobs that contained them.
+#[derive(Debug)]
+struct AggregatedTestDiffs {
+ diffs: HashMap>,
+}
+
fn aggregate_test_diffs(
jobs: &HashMap,
-) -> anyhow::Result> {
- let mut job_diffs = vec![];
+) -> anyhow::Result {
+ let mut diffs: HashMap> = HashMap::new();
// Aggregate test suites
for (name, metrics) in jobs {
if let Some(parent) = &metrics.parent {
let tests_parent = aggregate_tests(parent);
let tests_current = aggregate_tests(&metrics.current);
- let test_diffs = calculate_test_diffs(tests_parent, tests_current);
- if !test_diffs.is_empty() {
- job_diffs.push((name.clone(), test_diffs));
+ for diff in calculate_test_diffs(tests_parent, tests_current) {
+ diffs.entry(diff).or_default().push(name.to_string());
}
}
}
- // Aggregate jobs with the same diff, as often the same diff will appear in many jobs
- let job_diffs: HashMap, Vec> =
- job_diffs.into_iter().fold(HashMap::new(), |mut acc, (job, diffs)| {
- acc.entry(diffs).or_default().push(job);
- acc
- });
+ Ok(AggregatedTestDiffs { diffs })
+}
- Ok(job_diffs
- .into_iter()
- .map(|(test_diffs, jobs)| AggregatedTestDiffs { jobs, test_diffs })
- .collect())
+#[derive(Eq, PartialEq, Hash, Debug)]
+enum TestOutcomeDiff {
+ ChangeOutcome { before: TestOutcome, after: TestOutcome },
+ Missing { before: TestOutcome },
+ Added(TestOutcome),
}
-fn calculate_test_diffs(
- reference: TestSuiteData,
- current: TestSuiteData,
-) -> Vec<(Test, TestOutcomeDiff)> {
- let mut diffs = vec![];
+#[derive(Eq, PartialEq, Hash, Debug)]
+struct TestDiff {
+ test: Test,
+ diff: TestOutcomeDiff,
+}
+
+fn calculate_test_diffs(parent: TestSuiteData, current: TestSuiteData) -> HashSet {
+ let mut diffs = HashSet::new();
for (test, outcome) in ¤t.tests {
- match reference.tests.get(test) {
+ match parent.tests.get(test) {
Some(before) => {
if before != outcome {
- diffs.push((
- test.clone(),
- TestOutcomeDiff::ChangeOutcome {
+ diffs.insert(TestDiff {
+ test: test.clone(),
+ diff: TestOutcomeDiff::ChangeOutcome {
before: before.clone(),
after: outcome.clone(),
},
- ));
+ });
}
}
- None => diffs.push((test.clone(), TestOutcomeDiff::Added(outcome.clone()))),
+ None => {
+ diffs.insert(TestDiff {
+ test: test.clone(),
+ diff: TestOutcomeDiff::Added(outcome.clone()),
+ });
+ }
}
}
- for (test, outcome) in &reference.tests {
+ for (test, outcome) in &parent.tests {
if !current.tests.contains_key(test) {
- diffs.push((test.clone(), TestOutcomeDiff::Missing { before: outcome.clone() }));
+ diffs.insert(TestDiff {
+ test: test.clone(),
+ diff: TestOutcomeDiff::Missing { before: outcome.clone() },
+ });
}
}
diffs
}
-/// Represents a difference in the outcome of tests between a base and a current commit.
-#[derive(Debug)]
-struct AggregatedTestDiffs {
- /// All jobs that had the exact same test diffs.
- jobs: Vec,
- test_diffs: Vec<(Test, TestOutcomeDiff)>,
-}
-
-#[derive(Eq, PartialEq, Hash, Debug)]
-enum TestOutcomeDiff {
- ChangeOutcome { before: TestOutcome, after: TestOutcome },
- Missing { before: TestOutcome },
- Added(TestOutcome),
-}
-
/// Aggregates test suite executions from all bootstrap invocations in a given CI job.
#[derive(Default)]
struct TestSuiteData {
@@ -160,6 +177,7 @@ struct TestSuiteData {
#[derive(Hash, PartialEq, Eq, Debug, Clone)]
struct Test {
name: String,
+ is_doctest: bool,
}
/// Extracts all tests from the passed metrics and map them to their outcomes.
@@ -168,7 +186,10 @@ fn aggregate_tests(metrics: &JsonRoot) -> TestSuiteData {
let test_suites = get_test_suites(&metrics);
for suite in test_suites {
for test in &suite.tests {
- let test_entry = Test { name: normalize_test_name(&test.name) };
+ // Poor man's detection of doctests based on the "(line XYZ)" suffix
+ let is_doctest = matches!(suite.metadata, TestSuiteMetadata::CargoPackage { .. })
+ && test.name.contains("(line");
+ let test_entry = Test { name: normalize_test_name(&test.name), is_doctest };
tests.insert(test_entry, test.outcome.clone());
}
}
@@ -181,16 +202,13 @@ fn normalize_test_name(name: &str) -> String {
}
/// Prints test changes in Markdown format to stdout.
-fn report_test_changes(mut diffs: Vec) {
+fn report_test_diffs(diff: AggregatedTestDiffs) {
println!("## Test differences");
- if diffs.is_empty() {
+ if diff.diffs.is_empty() {
println!("No test diffs found");
return;
}
- // Sort diffs in decreasing order by diff count
- diffs.sort_by_key(|entry| Reverse(entry.test_diffs.len()));
-
fn format_outcome(outcome: &TestOutcome) -> String {
match outcome {
TestOutcome::Passed => "pass".to_string(),
@@ -219,36 +237,79 @@ fn report_test_changes(mut diffs: Vec) {
}
}
- let max_diff_count = 10;
- let max_job_count = 5;
- let max_test_count = 10;
-
- for diff in diffs.iter().take(max_diff_count) {
- let mut jobs = diff.jobs.clone();
- jobs.sort();
-
- let jobs = jobs.iter().take(max_job_count).map(|j| format!("`{j}`")).collect::>();
+ fn format_job_group(group: u64) -> String {
+ format!("**J{group}**")
+ }
- let extra_jobs = diff.jobs.len().saturating_sub(max_job_count);
- let suffix = if extra_jobs > 0 {
- format!(" (and {extra_jobs} {})", pluralize("other", extra_jobs))
- } else {
- String::new()
+ // It would be quite noisy to repeat the jobs that contained the test changes after/next to
+ // every test diff. At the same time, grouping the test diffs by
+ // [unique set of jobs that contained them] also doesn't work well, because the test diffs
+ // would have to be duplicated several times.
+ // Instead, we create a set of unique job groups, and then print a job group after each test.
+ // We then print the job groups at the end, as a sort of index.
+ let mut grouped_diffs: Vec<(&TestDiff, u64)> = vec![];
+ let mut job_list_to_group: HashMap<&[JobName], u64> = HashMap::new();
+ let mut job_index: Vec<&[JobName]> = vec![];
+
+ let original_diff_count = diff.diffs.len();
+ let diffs = diff
+ .diffs
+ .into_iter()
+ .filter(|(diff, _)| !diff.test.is_doctest)
+ .map(|(diff, mut jobs)| {
+ jobs.sort();
+ (diff, jobs)
+ })
+ .collect::>();
+ let doctest_count = original_diff_count.saturating_sub(diffs.len());
+
+ let max_diff_count = 100;
+ for (diff, jobs) in diffs.iter().take(max_diff_count) {
+ let jobs = &*jobs;
+ let job_group = match job_list_to_group.get(jobs.as_slice()) {
+ Some(id) => *id,
+ None => {
+ let id = job_index.len() as u64;
+ job_index.push(jobs);
+ job_list_to_group.insert(jobs, id);
+ id
+ }
};
- println!("- {}{suffix}", jobs.join(","));
+ grouped_diffs.push((diff, job_group));
+ }
- let extra_tests = diff.test_diffs.len().saturating_sub(max_test_count);
- for (test, outcome_diff) in diff.test_diffs.iter().take(max_test_count) {
- println!(" - {}: {}", test.name, format_diff(&outcome_diff));
- }
- if extra_tests > 0 {
- println!(" - (and {extra_tests} additional {})", pluralize("tests", extra_tests));
- }
+ // Sort diffs by job group and test name
+ grouped_diffs.sort_by(|(d1, g1), (d2, g2)| g1.cmp(&g2).then(d1.test.name.cmp(&d2.test.name)));
+
+ for (diff, job_group) in grouped_diffs {
+ println!(
+ "- `{}`: {} ({})",
+ diff.test.name,
+ format_diff(&diff.diff),
+ format_job_group(job_group)
+ );
}
let extra_diffs = diffs.len().saturating_sub(max_diff_count);
if extra_diffs > 0 {
- println!("\n(and {extra_diffs} additional {})", pluralize("diff", extra_diffs));
+ println!("\n(and {extra_diffs} additional {})", pluralize("test diff", extra_diffs));
+ }
+
+ if doctest_count > 0 {
+ println!(
+ "\nAdditionally, {doctest_count} doctest {} were found. These are ignored, as they are noisy.",
+ pluralize("diff", doctest_count)
+ );
+ }
+
+ // Now print the job group index
+ println!("\n**Job group index**\n");
+ for (group, jobs) in job_index.into_iter().enumerate() {
+ println!(
+ "- {}: {}",
+ format_job_group(group as u64),
+ jobs.iter().map(|j| format!("`{j}`")).collect::>().join(", ")
+ );
}
}
diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile
index f54ecef1e308a..ae5bf8946dd94 100644
--- a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile
+++ b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile
@@ -101,7 +101,9 @@ ENV SCRIPT python3 ../x.py build --set rust.debug=true opt-dist && \
./build/$HOSTS/stage0-tools-bin/opt-dist linux-ci -- python3 ../x.py dist \
--host $HOSTS --target $HOSTS \
--include-default-paths \
- build-manifest bootstrap gcc
+ build-manifest bootstrap && \
+ # Use GCC for building GCC, as it seems to behave badly when built with Clang
+ CC=/rustroot/bin/cc CXX=/rustroot/bin/c++ python3 ../x.py dist gcc
ENV CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER=clang
# This is the only builder which will create source tarballs
diff --git a/src/tools/opt-dist/src/tests.rs b/src/tools/opt-dist/src/tests.rs
index 8b3bd77141ba8..c8759bb6ff62a 100644
--- a/src/tools/opt-dist/src/tests.rs
+++ b/src/tools/opt-dist/src/tests.rs
@@ -1,3 +1,5 @@
+use std::path::Path;
+
use anyhow::Context;
use camino::{Utf8Path, Utf8PathBuf};
@@ -86,36 +88,57 @@ llvm-config = "{llvm_config}"
log::info!("Using following `config.toml` for running tests:\n{config_content}");
// Simulate a stage 0 compiler with the extracted optimized dist artifacts.
- std::fs::write("config.toml", config_content)?;
-
- let x_py = env.checkout_path().join("x.py");
- let mut args = vec![
- env.python_binary(),
- x_py.as_str(),
- "test",
- "--build",
- env.host_tuple(),
- "--stage",
- "0",
- "tests/assembly",
- "tests/codegen",
- "tests/codegen-units",
- "tests/incremental",
- "tests/mir-opt",
- "tests/pretty",
- "tests/run-make/glibc-symbols-x86_64-unknown-linux-gnu",
- "tests/ui",
- "tests/crashes",
- ];
- for test_path in env.skipped_tests() {
- args.extend(["--skip", test_path]);
+ with_backed_up_file(Path::new("config.toml"), &config_content, || {
+ let x_py = env.checkout_path().join("x.py");
+ let mut args = vec![
+ env.python_binary(),
+ x_py.as_str(),
+ "test",
+ "--build",
+ env.host_tuple(),
+ "--stage",
+ "0",
+ "tests/assembly",
+ "tests/codegen",
+ "tests/codegen-units",
+ "tests/incremental",
+ "tests/mir-opt",
+ "tests/pretty",
+ "tests/run-make/glibc-symbols-x86_64-unknown-linux-gnu",
+ "tests/ui",
+ "tests/crashes",
+ ];
+ for test_path in env.skipped_tests() {
+ args.extend(["--skip", test_path]);
+ }
+ cmd(&args)
+ .env("COMPILETEST_FORCE_STAGE0", "1")
+ // Also run dist-only tests
+ .env("COMPILETEST_ENABLE_DIST_TESTS", "1")
+ .run()
+ .context("Cannot execute tests")
+ })
+}
+
+/// Backup `path` (if it exists), then write `contents` into it, and then restore the original
+/// contents of the file.
+fn with_backed_up_file(path: &Path, contents: &str, func: F) -> anyhow::Result<()>
+where
+ F: FnOnce() -> anyhow::Result<()>,
+{
+ let original_contents =
+ if path.is_file() { Some(std::fs::read_to_string(path)?) } else { None };
+
+ // Overwrite it with new contents
+ std::fs::write(path, contents)?;
+
+ let ret = func();
+
+ if let Some(original_contents) = original_contents {
+ std::fs::write(path, original_contents)?;
}
- cmd(&args)
- .env("COMPILETEST_FORCE_STAGE0", "1")
- // Also run dist-only tests
- .env("COMPILETEST_ENABLE_DIST_TESTS", "1")
- .run()
- .context("Cannot execute tests")
+
+ ret
}
/// Tries to find the version of the dist artifacts (either nightly, beta, or 1.XY.Z).
diff --git a/tests/ui/crate_type_flag.rs b/tests/ui/crate_type_flag.rs
new file mode 100644
index 0000000000000..4f3cfbae45d68
--- /dev/null
+++ b/tests/ui/crate_type_flag.rs
@@ -0,0 +1,4 @@
+//@ compile-flags: --crate-type dynlib
+//@ error-pattern: unknown crate type: `dynlib`, expected one of: `lib`, `rlib`, `staticlib`, `dylib`, `cdylib`, `bin`, `proc-macro`
+
+fn main() {}
diff --git a/tests/ui/crate_type_flag.stderr b/tests/ui/crate_type_flag.stderr
new file mode 100644
index 0000000000000..26a3e1fbd6829
--- /dev/null
+++ b/tests/ui/crate_type_flag.stderr
@@ -0,0 +1,2 @@
+error: unknown crate type: `dynlib`, expected one of: `lib`, `rlib`, `staticlib`, `dylib`, `cdylib`, `bin`, `proc-macro`
+
diff --git a/tests/ui/invalid-compile-flags/crate-type-flag.empty_crate_type.stderr b/tests/ui/invalid-compile-flags/crate-type-flag.empty_crate_type.stderr
index 0f8772024dfab..4ab816d30f6c9 100644
--- a/tests/ui/invalid-compile-flags/crate-type-flag.empty_crate_type.stderr
+++ b/tests/ui/invalid-compile-flags/crate-type-flag.empty_crate_type.stderr
@@ -1,2 +1,2 @@
-error: unknown crate type: ``
+error: unknown crate type: ``, expected one of: `lib`, `rlib`, `staticlib`, `dylib`, `cdylib`, `bin`, `proc-macro`
diff --git a/tests/ui/invalid-compile-flags/crate-type-flag.proc_underscore_macro.stderr b/tests/ui/invalid-compile-flags/crate-type-flag.proc_underscore_macro.stderr
index a4a9741699643..91cc66801f102 100644
--- a/tests/ui/invalid-compile-flags/crate-type-flag.proc_underscore_macro.stderr
+++ b/tests/ui/invalid-compile-flags/crate-type-flag.proc_underscore_macro.stderr
@@ -1,2 +1,2 @@
-error: unknown crate type: `proc_macro`
+error: unknown crate type: `proc_macro`, expected one of: `lib`, `rlib`, `staticlib`, `dylib`, `cdylib`, `bin`, `proc-macro`
diff --git a/tests/ui/invalid-compile-flags/crate-type-flag.rs b/tests/ui/invalid-compile-flags/crate-type-flag.rs
index 07d853b330716..0101430714974 100644
--- a/tests/ui/invalid-compile-flags/crate-type-flag.rs
+++ b/tests/ui/invalid-compile-flags/crate-type-flag.rs
@@ -45,16 +45,16 @@
// `proc-macro` is accepted, but `proc_macro` is not.
//@ revisions: proc_underscore_macro
//@[proc_underscore_macro] compile-flags: --crate-type=proc_macro
-//@[proc_underscore_macro] error-pattern: "unknown crate type: `proc_macro`"
+//@[proc_underscore_macro] error-pattern: unknown crate type: `proc_macro`
// Empty `--crate-type` not accepted.
//@ revisions: empty_crate_type
//@[empty_crate_type] compile-flags: --crate-type=
-//@[empty_crate_type] error-pattern: "unknown crate type: ``"
+//@[empty_crate_type] error-pattern: unknown crate type: ``
// Random unknown crate type. Also check that we can handle non-ASCII.
//@ revisions: unknown
//@[unknown] compile-flags: --crate-type=🤡
-//@[unknown] error-pattern: "unknown crate type: `🤡`"
+//@[unknown] error-pattern: unknown crate type: `🤡`
fn main() {}
diff --git a/tests/ui/invalid-compile-flags/crate-type-flag.unknown.stderr b/tests/ui/invalid-compile-flags/crate-type-flag.unknown.stderr
index 7fb0f09a1afbd..ec202e171f058 100644
--- a/tests/ui/invalid-compile-flags/crate-type-flag.unknown.stderr
+++ b/tests/ui/invalid-compile-flags/crate-type-flag.unknown.stderr
@@ -1,2 +1,2 @@
-error: unknown crate type: `🤡`
+error: unknown crate type: `🤡`, expected one of: `lib`, `rlib`, `staticlib`, `dylib`, `cdylib`, `bin`, `proc-macro`
diff --git a/tests/ui/lint/linker-warning.stderr b/tests/ui/lint/linker-warning.stderr
index 3a2c392fd0312..c678562ab5466 100644
--- a/tests/ui/lint/linker-warning.stderr
+++ b/tests/ui/lint/linker-warning.stderr
@@ -16,7 +16,7 @@ warning: unused attribute
LL | #![allow(linker_messages)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: remove this attribute
|
- = note: the `linker_warnings` lint can only be controlled at the root of a crate that needs to be linked
+ = note: the `linker_messages` lint can only be controlled at the root of a crate that needs to be linked
warning: 2 warnings emitted