diff --git a/crates/cargo-util-schemas/src/core/package_id_spec.rs b/crates/cargo-util-schemas/src/core/package_id_spec.rs index c1e636c5193..5cd156974c3 100644 --- a/crates/cargo-util-schemas/src/core/package_id_spec.rs +++ b/crates/cargo-util-schemas/src/core/package_id_spec.rs @@ -130,14 +130,20 @@ impl PackageIdSpec { // Leave `sparse` as part of URL, see `SourceId::new` // url = strip_url_protocol(&url); } - "path" => { + kind_str @ ("path" | "builtin") => { if url.query().is_some() { return Err(ErrorKind::UnexpectedQueryString(url).into()); } if scheme != "file" { return Err(ErrorKind::UnsupportedPathPlusScheme(scheme.into()).into()); } - kind = Some(SourceKind::Path); + //TODO: This affects Cargo's json output, and needs consideration for what we + //want for different commands. + kind = if kind_str == "path" { + Some(SourceKind::Path) + } else { + Some(SourceKind::Builtin) + }; url = strip_url_protocol(&url); } kind => return Err(ErrorKind::UnsupportedProtocol(kind.into()).into()), diff --git a/crates/cargo-util-schemas/src/core/source_kind.rs b/crates/cargo-util-schemas/src/core/source_kind.rs index 3794791114d..1c16a251371 100644 --- a/crates/cargo-util-schemas/src/core/source_kind.rs +++ b/crates/cargo-util-schemas/src/core/source_kind.rs @@ -15,6 +15,8 @@ pub enum SourceKind { LocalRegistry, /// A directory-based registry. Directory, + /// Package sources distributed with the rust toolchain + Builtin, } // The hash here is important for what folder packages get downloaded into. @@ -40,6 +42,7 @@ impl SourceKind { SourceKind::SparseRegistry => None, SourceKind::LocalRegistry => Some("local-registry"), SourceKind::Directory => Some("directory"), + SourceKind::Builtin => Some("builtin"), } } } @@ -71,6 +74,10 @@ impl Ord for SourceKind { (_, SourceKind::Directory) => Ordering::Greater, (SourceKind::Git(a), SourceKind::Git(b)) => a.cmp(b), + (SourceKind::Git(_), _) => Ordering::Less, + (_, SourceKind::Git(_)) => Ordering::Greater, + + (SourceKind::Builtin, SourceKind::Builtin) => Ordering::Equal, } } } diff --git a/crates/resolver-tests/src/lib.rs b/crates/resolver-tests/src/lib.rs index a78e2d3cd24..d23e22bf9bc 100644 --- a/crates/resolver-tests/src/lib.rs +++ b/crates/resolver-tests/src/lib.rs @@ -204,6 +204,7 @@ pub fn resolve_with_global_context_raw( &version_prefs, ResolveVersion::with_rust_version(None), Some(gctx), + &[], ); // The largest test in our suite takes less then 30 secs. diff --git a/src/cargo/core/compiler/standard_lib.rs b/src/cargo/core/compiler/standard_lib.rs index 45a313a921d..e6914521a29 100644 --- a/src/cargo/core/compiler/standard_lib.rs +++ b/src/cargo/core/compiler/standard_lib.rs @@ -15,7 +15,11 @@ use std::path::PathBuf; use super::BuildConfig; -fn std_crates<'a>(crates: &'a [String], default: &'static str, units: &[Unit]) -> HashSet<&'a str> { +pub fn std_crates<'a>( + crates: &'a [String], + default: &'static str, + units: &[Unit], +) -> HashSet<&'a str> { let mut crates = HashSet::from_iter(crates.iter().map(|s| s.as_str())); // This is a temporary hack until there is a more principled way to // declare dependencies in Cargo.toml. @@ -59,6 +63,7 @@ pub fn resolve_std<'gctx>( // TODO: Consider doing something to enforce --locked? Or to prevent the // lock file from being written, such as setting ephemeral. let mut std_ws = Workspace::new(&std_ws_manifest_path, gctx)?; + std_ws.set_is_std(true); // Don't require optional dependencies in this workspace, aka std's own // `[dev-dependencies]`. No need for us to generate a `Resolve` which has // those included because we'll never use them anyway. @@ -216,7 +221,7 @@ fn generate_roots( Ok(()) } -fn detect_sysroot_src_path(target_data: &RustcTargetData<'_>) -> CargoResult { +pub fn detect_sysroot_src_path(target_data: &RustcTargetData<'_>) -> CargoResult { if let Some(s) = target_data.gctx.get_env_os("__CARGO_TESTS_ONLY_SRC_ROOT") { return Ok(s.into()); } diff --git a/src/cargo/core/compiler/unit_dependencies.rs b/src/cargo/core/compiler/unit_dependencies.rs index 2d0ea7c3c62..1468e693afa 100644 --- a/src/cargo/core/compiler/unit_dependencies.rs +++ b/src/cargo/core/compiler/unit_dependencies.rs @@ -53,6 +53,8 @@ struct State<'a, 'gctx> { std_resolve: Option<&'a Resolve>, /// Like `usr_features` but for building standard library (`-Zbuild-std`). std_features: Option<&'a ResolvedFeatures>, + // The root units of any opaque dependencies present in the user resolve + opaque_roots: &'a HashMap>, /// `true` while generating the dependencies for the standard library. is_std: bool, /// The high-level operation requested by the user. @@ -93,7 +95,7 @@ pub fn build_unit_dependencies<'a, 'gctx>( resolve: &'a Resolve, features: &'a ResolvedFeatures, std_resolve: Option<&'a (Resolve, ResolvedFeatures)>, - roots: &[Unit], + roots: &[Unit], //TODO: builtins can be roots if requested on the command line scrape_units: &[Unit], std_roots: &HashMap>, intent: UserIntent, @@ -120,6 +122,7 @@ pub fn build_unit_dependencies<'a, 'gctx>( usr_features: features, std_resolve, std_features, + opaque_roots: std_roots, is_std: false, intent, target_data, @@ -130,15 +133,14 @@ pub fn build_unit_dependencies<'a, 'gctx>( }; let std_unit_deps = calc_deps_of_std(&mut state, std_roots)?; + if let Some(std_unit_deps) = std_unit_deps { + attach_std_deps(&mut state, std_unit_deps); + } deps_of_roots(roots, &mut state)?; super::links::validate_links(state.resolve(), &state.unit_dependencies)?; // Hopefully there aren't any links conflicts with the standard library? - if let Some(std_unit_deps) = std_unit_deps { - attach_std_deps(&mut state, std_roots, std_unit_deps); - } - connect_run_custom_build_deps(&mut state); // Dependencies are used in tons of places throughout the backend, many of @@ -189,38 +191,14 @@ fn calc_deps_of_std( Ok(Some(std::mem::take(&mut state.unit_dependencies))) } -/// Add the standard library units to the `unit_dependencies`. -fn attach_std_deps( - state: &mut State<'_, '_>, - std_roots: &HashMap>, - std_unit_deps: UnitGraph, -) { - // Attach the standard library as a dependency of every target unit. - let mut found = false; - for (unit, deps) in state.unit_dependencies.iter_mut() { - if !unit.kind.is_host() && !unit.mode.is_run_custom_build() { - deps.extend(std_roots[&unit.kind].iter().map(|unit| UnitDep { - unit: unit.clone(), - unit_for: UnitFor::new_normal(unit.kind), - extern_crate_name: unit.pkg.name(), - dep_name: None, - // TODO: Does this `public` make sense? - public: true, - noprelude: true, - nounused: true, - // Artificial dependency - manifest_deps: Unhashed(None), - })); - found = true; +/// Add the dependencies of standard library units to the `unit_dependencies`. +fn attach_std_deps(state: &mut State<'_, '_>, std_unit_deps: UnitGraph) { + for (unit, deps) in std_unit_deps.into_iter() { + if unit.pkg.package_id().name() == "sysroot" { + continue; } - } - // And also include the dependencies of the standard library itself. Don't - // include these if no units actually needed the standard library. - if found { - for (unit, deps) in std_unit_deps.into_iter() { - if let Some(other_unit) = state.unit_dependencies.insert(unit, deps) { - panic!("std unit collision with existing unit: {:?}", other_unit); - } + if let Some(other_unit) = state.unit_dependencies.insert(unit, deps) { + panic!("std unit collision with existing unit: {:?}", other_unit); } } } @@ -341,17 +319,42 @@ fn compute_deps( )?; ret.push(unit_dep); } else { - let unit_dep = new_unit_dep( - state, - unit, - dep_pkg, - dep_lib, - Some(manifest_deps), - dep_unit_for, - unit.kind.for_target(dep_lib), - mode, - IS_NO_ARTIFACT_DEP, - )?; + // if builtin, return from state.opaque_roots + let unit_dep = if dep_pkg_id.source_id().is_builtin() { + if unit_for.is_for_host() { + // Build scripts/proc_macros shouldn't use build-std + continue; + } + let unit = state + .opaque_roots + .get(&unit.kind.for_target(dep_lib)) + .expect("Std was resolved for all requested targets") + .iter() + .find(|&u| u.pkg.name() == dep_pkg_id.name()) + .expect("libstd was resolved with all possible builtin deps as roots"); + UnitDep { + unit: unit.clone(), + unit_for: UnitFor::new_normal(unit.kind), + extern_crate_name: unit.pkg.name(), + dep_name: None, + public: true, + noprelude: true, + nounused: true, + manifest_deps: Unhashed(None), + } + } else { + new_unit_dep( + state, + unit, + dep_pkg, + dep_lib, + Some(manifest_deps), + dep_unit_for, + unit.kind.for_target(dep_lib), + mode, + IS_NO_ARTIFACT_DEP, + )? + }; ret.push(unit_dep); } @@ -367,6 +370,30 @@ fn compute_deps( } state.dev_dependency_edges.extend(dev_deps); + if state.gctx.cli_unstable().build_std.is_some() + && unit.mode.is_rustc_test() + && unit.target.harness() + && !unit_for.is_for_host() + { + // If test isn't found, we were probably compiling for no-std. + if let Some(test) = state.opaque_roots[&unit.kind] + .iter() + .find(|u| u.pkg.name() == "test") + { + let unitdep = UnitDep { + unit: test.clone(), + unit_for: UnitFor::new_normal(test.kind), + extern_crate_name: test.pkg.name(), + dep_name: None, + public: true, + noprelude: true, + nounused: true, + manifest_deps: Unhashed(None), + }; + ret.push(unitdep); + } + } + // If this target is a build script, then what we've collected so far is // all we need. If this isn't a build script, then it depends on the // build script if there is one. @@ -651,6 +678,10 @@ fn compute_deps_doc( // the documentation of the library being built. let mut ret = Vec::new(); for (id, deps) in state.deps(unit, unit_for) { + if id.source_id().is_builtin() { + // Build-std for cargo doc is not yet implemented + continue; + } let Some(dep_lib) = calc_artifact_deps(unit, unit_for, id, &deps, state, &mut ret)? else { continue; }; diff --git a/src/cargo/core/dependency.rs b/src/cargo/core/dependency.rs index 29853420da9..204a70e3a1f 100644 --- a/src/cargo/core/dependency.rs +++ b/src/cargo/core/dependency.rs @@ -1,4 +1,5 @@ use cargo_platform::Platform; +use cargo_util_schemas::core::SourceKind; use semver::VersionReq; use serde::Serialize; use serde::ser; @@ -51,6 +52,10 @@ struct Inner { // This dependency should be used only for this platform. // `None` means *all platforms*. platform: Option, + + // Opaque dependencies should be resolved with a separate resolver run, and handled + // by unit generation. + opaque: bool, } #[derive(Serialize)] @@ -162,10 +167,42 @@ impl Dependency { platform: None, explicit_name_in_toml: None, artifact: None, + opaque: false, }), } } + pub fn new_for_builtin_summary(s: Summary) -> CargoResult { + if let SourceKind::Builtin = s.source_id().kind() { + Ok(Dependency { + // Most of these fields are ignored by the resolver/registry, which will just match this + // dependency up with the given Summary + inner: Arc::new(Inner { + name: s.name(), + source_id: s.source_id(), + registry_id: None, + req: OptVersionReq::Any, + kind: DepKind::Normal, + only_match_name: true, + optional: false, + public: true, + features: Vec::new(), + default_features: true, + specified_req: false, + platform: None, + explicit_name_in_toml: None, + artifact: None, + opaque: true, + }), + }) + } else { + Err(anyhow::format_err!( + "Can't create builtin dependency for a non-builtin \"{}\"", + s.source_id() + )) + } + } + pub fn serialized( &self, unstable_flags: &CliUnstable, @@ -467,6 +504,10 @@ impl Dependency { pub(crate) fn maybe_lib(&self) -> bool { self.artifact().map(|a| a.is_lib).unwrap_or(true) } + + pub fn is_opaque(&self) -> bool { + self.inner.opaque + } } /// The presence of an artifact turns an ordinary dependency into an Artifact dependency. diff --git a/src/cargo/core/resolver/dep_cache.rs b/src/cargo/core/resolver/dep_cache.rs index 9acd59fd413..4f56dc17264 100644 --- a/src/cargo/core/resolver/dep_cache.rs +++ b/src/cargo/core/resolver/dep_cache.rs @@ -24,6 +24,7 @@ use crate::util::LocalPollAdapter; use crate::util::closest_msg; use crate::util::errors::CargoResult; use crate::util::interning::{INTERNED_DEFAULT, InternedString}; +use crate::util::network::PollExt; use anyhow::Context as _; use std::cell::RefCell; @@ -191,6 +192,8 @@ pub struct RegistryQueryer<'a, T: Registry> { (Option, Summary, ResolveOpts), (Rc<(HashSet, Rc>)>, bool), >, + /// The set of builtin dependencies to inject when appropriate + implicit_builtin_deps: &'a [Dependency], } impl<'a, T: Registry> RegistryQueryer<'a, T> { @@ -198,6 +201,7 @@ impl<'a, T: Registry> RegistryQueryer<'a, T> { registry: &'a T, replacements: &'a [(PackageIdSpec, Dependency)], version_prefs: &'a VersionPreferences, + implicit_builtin_deps: &'a [Dependency], ) -> Self { let inner = Rc::new(RegistryQueryerAsync::new( registry, @@ -208,6 +212,7 @@ impl<'a, T: Registry> RegistryQueryer<'a, T> { inner: inner.clone(), poller: LocalPollAdapter::new(inner), summary_cache: HashMap::new(), + implicit_builtin_deps, } } @@ -301,6 +306,19 @@ impl<'a, T: Registry> RegistryQueryer<'a, T> { }) .collect::>>()?; + if opts.inject_builtins { + for dep in self.implicit_builtin_deps { + // TODO: This kicks off multiple queries per package searched. What's the + // performance impact? + let candidates = self + .query(dep, first_version) + .expect("Builtin packages should be immediately available") + .expect("Builtin names should be valid by this point"); + + deps.push((dep.clone(), candidates, Rc::new(Default::default()))); + } + } + // Attempt to resolve dependencies with fewer candidates before trying // dependencies with more candidates. This way if the dependency with // only one candidate can't be resolved we don't have to do a bunch of diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index 4f437123282..86c44b13ef7 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -514,6 +514,7 @@ impl ser::Serialize for Resolve { let encodable = ids .iter() + .filter(|&p_id| !p_id.source_id().is_builtin()) .map(|&id| encodable_resolve_node(id, self, &state)) .collect::>(); @@ -607,6 +608,7 @@ fn encodable_resolve_node( None => { let mut deps = resolve .deps_not_replaced(id) + .filter(|(id, _)| !id.source_id().is_builtin()) .map(|(id, _)| encodable_package_id(id, state, resolve.version())) .collect::>(); deps.sort(); @@ -661,7 +663,7 @@ pub fn encodable_package_id( } fn encodable_source_id(id: SourceId, version: ResolveVersion) -> Option { - if id.is_path() { + if id.is_path() || id.is_builtin() { None } else { Some( diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 5539d46a9ea..52a22cbbad6 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -65,6 +65,7 @@ use std::time::{Duration, Instant}; use tracing::{debug, trace}; use crate::core::PackageIdSpec; +use crate::core::dependency::DepKind; use crate::core::{Dependency, PackageId, Registry, Summary}; use crate::util::context::GlobalContext; use crate::util::errors::CargoResult; @@ -126,6 +127,7 @@ pub fn resolve( version_prefs: &VersionPreferences, resolve_version: ResolveVersion, gctx: Option<&GlobalContext>, + implicit_builtin_deps: &[Dependency], ) -> CargoResult { let first_version = match gctx { Some(config) if config.cli_unstable().direct_minimal_versions => { @@ -133,7 +135,8 @@ pub fn resolve( } _ => None, }; - let mut registry = RegistryQueryer::new(registry, replacements, version_prefs); + let mut registry = + RegistryQueryer::new(registry, replacements, version_prefs, implicit_builtin_deps); // Global cache of the reasons for each time we backtrack. let mut past_conflicting_activations = conflict_cache::ConflictCache::new(); @@ -239,7 +242,7 @@ fn activate_deps_loop( while let Some((just_here_for_the_error_messages, frame)) = remaining_deps.pop_most_constrained() { - let (mut parent, (mut dep, candidates, mut features)) = frame; + let (mut parent, siblings_inject_builtins, (mut dep, candidates, mut features)) = frame; // If we spend a lot of time here (we shouldn't in most cases) then give // a bit of a visual indicator as to what we're doing. @@ -391,12 +394,18 @@ fn activate_deps_loop( }; let pid = candidate.package_id(); + // The deps frame inject_builtins field is a baseline for all siblings + // We shouldn't inject builtins for a builtin dep, nor for build-dependencies + let inject_builtins = siblings_inject_builtins + && !dep.source_id().is_builtin() + && dep.kind() != DepKind::Build; let opts = ResolveOpts { dev_deps: false, features: RequestedFeatures::DepFeatures { features: Rc::clone(&features), uses_default_features: dep.uses_default_features(), }, + inject_builtins, }; trace!( "{}[{}]>{} trying {}", @@ -687,6 +696,7 @@ fn activate( let frame = DepsFrame { parent: candidate, just_for_error_messages: false, + inject_builtins: opts.inject_builtins, remaining_siblings: RcVecIter::new(Rc::clone(deps)), }; Ok(Some((frame, now.elapsed()))) diff --git a/src/cargo/core/resolver/types.rs b/src/cargo/core/resolver/types.rs index 345b6ccbdd1..a3a8034ec69 100644 --- a/src/cargo/core/resolver/types.rs +++ b/src/cargo/core/resolver/types.rs @@ -138,7 +138,7 @@ impl ResolveBehavior { } } -/// Options for how the resolve should work. +/// Options for how a Summary should be activated during the resolve. #[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct ResolveOpts { /// Whether or not dev-dependencies should be included. @@ -148,6 +148,10 @@ pub struct ResolveOpts { pub dev_deps: bool, /// Set of features requested on the command-line. pub features: RequestedFeatures, + /// Whether or not to inject builtin dependencies. Host deps like proc_macros and build scripts + /// should not use build-std, so therefore build-dependencies and transitive + /// dependencies via a build-dependencies do not have builtin dependencies. + pub inject_builtins: bool, } impl ResolveOpts { @@ -156,11 +160,16 @@ impl ResolveOpts { ResolveOpts { dev_deps: true, features: RequestedFeatures::CliFeatures(CliFeatures::new_all(true)), + inject_builtins: true, } } - pub fn new(dev_deps: bool, features: RequestedFeatures) -> ResolveOpts { - ResolveOpts { dev_deps, features } + pub fn new(dev_deps: bool, features: RequestedFeatures, inject_builtins: bool) -> ResolveOpts { + ResolveOpts { + dev_deps, + features, + inject_builtins, + } } } @@ -220,6 +229,7 @@ impl PackageId { pub struct DepsFrame { pub parent: Summary, pub just_for_error_messages: bool, + pub inject_builtins: bool, pub remaining_siblings: RcVecIter, } @@ -296,7 +306,7 @@ impl RemainingDeps { self.data.insert((x, insertion_time)); self.time += 1; } - pub fn pop_most_constrained(&mut self) -> Option<(bool, (Summary, DepInfo))> { + pub fn pop_most_constrained(&mut self) -> Option<(bool, (Summary, bool, DepInfo))> { while let Some((mut deps_frame, insertion_time)) = self.data.remove_min() { let just_here_for_the_error_messages = deps_frame.just_for_error_messages; @@ -306,8 +316,12 @@ impl RemainingDeps { let sibling = deps_frame.remaining_siblings.iter().next().cloned(); if let Some(sibling) = sibling { let parent = Summary::clone(&deps_frame.parent); + let inject_builtins = deps_frame.inject_builtins; self.data.insert((deps_frame, insertion_time)); - return Some((just_here_for_the_error_messages, (parent, sibling))); + return Some(( + just_here_for_the_error_messages, + (parent, inject_builtins, sibling), + )); } } None diff --git a/src/cargo/core/source_id.rs b/src/cargo/core/source_id.rs index dade72b5a4b..fc9f1e3da3b 100644 --- a/src/cargo/core/source_id.rs +++ b/src/cargo/core/source_id.rs @@ -1,6 +1,7 @@ use crate::core::GitReference; use crate::core::PackageId; use crate::core::SourceKind; +use crate::sources::BuiltinSource; use crate::sources::registry::CRATES_IO_HTTP_INDEX; use crate::sources::source::Source; use crate::sources::{CRATES_IO_DOMAIN, CRATES_IO_INDEX, CRATES_IO_REGISTRY, DirectorySource}; @@ -140,6 +141,23 @@ impl SourceId { } } + /// Converts a path-based SourceId to a Builtin + pub fn as_builtin(&self) -> CargoResult { + if self.is_builtin() { + Ok(self.clone()) + } else if !self.is_path() { + Err(anyhow::format_err!( + "Attempted to convert non-path SourceId `{}` to Builtin", + self.to_string() + )) + } else { + Ok(SourceId::wrap(SourceIdInner { + kind: SourceKind::Builtin, + ..self.inner.clone() + })) + } + } + /// Parses a source URL and returns the corresponding ID. /// /// ## Example @@ -176,6 +194,10 @@ impl SourceId { let url = url.into_url()?; SourceId::new(SourceKind::Path, url, None) } + "builtin" => { + let url = url.into_url()?; + SourceId::new(SourceKind::Builtin, url, None) + } kind => Err(anyhow::format_err!("unsupported source protocol: {}", kind)), } } @@ -387,6 +409,10 @@ impl SourceId { matches!(self.inner.kind, SourceKind::Git(_)) } + pub fn is_builtin(self) -> bool { + matches!(self.inner.kind, SourceKind::Builtin) + } + /// Creates an implementation of `Source` corresponding to this ID. /// /// * `yanked_whitelist` --- Packages allowed to be used, even if they are yanked. @@ -409,6 +435,14 @@ impl SourceId { } Ok(Box::new(PathSource::new(&path, self, gctx))) } + SourceKind::Builtin => { + let path = self + .inner + .url + .to_file_path() + .expect("builtin sources should not be remote"); + Ok(Box::new(BuiltinSource::from_path(&path, self, gctx))) + } SourceKind::Registry | SourceKind::SparseRegistry => Ok(Box::new( RegistrySource::remote(self, yanked_whitelist, gctx)?, )), @@ -679,6 +713,7 @@ impl fmt::Display for SourceId { } SourceKind::LocalRegistry => write!(f, "registry `{}`", url_display(&self.inner.url)), SourceKind::Directory => write!(f, "dir {}", url_display(&self.inner.url)), + SourceKind::Builtin => write!(f, "builtin {}", url_display(&self.inner.url)), } } } diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index 7d9dd83fc2e..a54b38ff6c8 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -173,6 +173,29 @@ impl Summary { }; me.map_dependencies(|dep| dep.map_source(to_replace, replace_with)) } + + // Converts the Summary into a dummy summary to represent a builtin package during the user's resolve + pub fn to_opaque_builtin_summary(self) -> CargoResult { + let pid = self.package_id(); + let builtin_sid = pid.source_id().as_builtin()?; + + Ok(Summary { + inner: Arc::new(Inner { + package_id: PackageId::with_source_id(pid, builtin_sid), + // Builtins are opaque dependencies - the real deps are inserted during unit generation + dependencies: vec![], + // Features are ignored during unit generation - a future patch implementing + // features for explicit builtin dependencies will probably want to change this. + features: Arc::new(BTreeMap::new()), + // TODO: Checksums are checked later, right? + checksum: None, + links: self.links(), + // Builtins are always valid for our current toolchain + rust_version: None, + pubtime: None, + }), + }) + } } impl PartialEq for Summary { diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs index 4e3bdc5ba18..38b73256f46 100644 --- a/src/cargo/core/workspace.rs +++ b/src/cargo/core/workspace.rs @@ -41,7 +41,9 @@ use crate::lints::rules::unused_workspace_dependencies; use crate::lints::rules::unused_workspace_package_fields; use crate::ops; use crate::ops::lockfile::LOCKFILE_NAME; -use crate::sources::{CRATES_IO_INDEX, CRATES_IO_REGISTRY, PathSource, SourceConfigMap}; +use crate::sources::{ + BuiltinSource, CRATES_IO_INDEX, CRATES_IO_REGISTRY, PathSource, SourceConfigMap, +}; use crate::util::context; use crate::util::context::{FeatureUnification, Value}; use crate::util::edit_distance; @@ -148,6 +150,9 @@ pub struct Workspace<'gctx> { /// Local overlay configuration. See [`crate::sources::overlay`]. local_overlays: HashMap, + + /// Whether this is the standard library's workspace + is_std: bool, } // Separate structure for tracking loaded packages (to avoid loading anything @@ -283,6 +288,7 @@ impl<'gctx> Workspace<'gctx> { resolve_publish_time: None, custom_metadata: None, local_overlays: HashMap::new(), + is_std: false, } } @@ -322,6 +328,30 @@ impl<'gctx> Workspace<'gctx> { Ok(ws) } + /// Used for discovering standard library crates, this workspace constructor is very minimal + /// TODO: This constructor should be expanded so it can be passed to the main std resolve in + /// order to avoid parsing manifests twice + pub fn new_standard_library( + manifest_path: &Path, + gctx: &'gctx GlobalContext, + ) -> CargoResult> { + let mut ws = Workspace::new_default(manifest_path.to_path_buf(), gctx); + ws.is_ephemeral = true; + ws.is_std = true; + + if manifest_path.is_relative() { + bail!( + "manifest_path:{:?} is not an absolute path. Please provide an absolute path.", + manifest_path + ) + } else { + ws.root_manifest = ws.find_root(manifest_path)?; + } + + ws.find_members()?; + Ok(ws) + } + /// Reloads the workspace. /// /// This is useful if the workspace has been updated, such as with `cargo @@ -656,6 +686,19 @@ impl<'gctx> Workspace<'gctx> { }) } + /// Drops the Workspace and returns its members. Useful for th resolver, which only creates + /// an ephemeral workspace to look up the standard library workspace members + pub fn into_members(self) -> Vec { + let mut packages = self.packages.packages; + self.members + .iter() + .filter_map(move |path| match packages.remove(path) { + Some(MaybePackage::Package(p)) => Some(p), + _ => None, + }) + .collect() + } + /// Returns an iterator over default packages in this workspace pub fn default_members<'a>(&'a self) -> impl Iterator { let packages = &self.packages; @@ -701,6 +744,15 @@ impl<'gctx> Workspace<'gctx> { self.is_ephemeral } + pub fn is_std(&self) -> bool { + self.is_std + } + + pub fn set_is_std(&mut self, is_std: bool) -> &mut Workspace<'gctx> { + self.is_std = is_std; + self + } + pub fn require_optional_deps(&self) -> bool { self.require_optional_deps } @@ -1288,6 +1340,26 @@ impl<'gctx> Workspace<'gctx> { } } + /// Similar to the regular `preload` method, but preloads packages as `builtins` in order to + /// satisfy builtin dependencies later. + pub fn preload_builtins(&self, registry: &mut PackageRegistry<'gctx>) { + for pkg in self.packages.packages.values() { + let mut pkg = match *pkg { + MaybePackage::Package(ref p) => p.clone(), + MaybePackage::Virtual(_) => continue, + }; + let summary = pkg + .summary() + .clone() + .to_opaque_builtin_summary() + .expect("workspace members should be path sources"); + *pkg.manifest_mut().summary_mut() = summary; + let src = + BuiltinSource::preload_with(pkg, self.gctx).expect("builtin summary can be built"); + registry.add_preloaded(Box::new(src)); + } + } + pub fn emit_warnings(&self) -> CargoResult<()> { let mut first_emitted_error = None; diff --git a/src/cargo/ops/cargo_add/mod.rs b/src/cargo/ops/cargo_add/mod.rs index 90ca258b9d7..a1d982d694a 100644 --- a/src/cargo/ops/cargo_add/mod.rs +++ b/src/cargo/ops/cargo_add/mod.rs @@ -413,7 +413,7 @@ fn resolve_dependency( } selected } else { - let mut source = crate::sources::PathSource::new(&src.path, src.source_id()?, gctx); + let source = crate::sources::PathSource::new(&src.path, src.source_id()?, gctx); let package = source.root_package()?; let mut selected = Dependency::from(package.summary()); if let Some(Source::Path(selected_src)) = &mut selected.source { diff --git a/src/cargo/ops/cargo_package/mod.rs b/src/cargo/ops/cargo_package/mod.rs index d427f1fabd1..0a36b50479e 100644 --- a/src/cargo/ops/cargo_package/mod.rs +++ b/src/cargo/ops/cargo_package/mod.rs @@ -779,6 +779,7 @@ fn build_lock( None, &[], true, + None, )?; let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?; diff --git a/src/cargo/ops/cargo_package/verify.rs b/src/cargo/ops/cargo_package/verify.rs index 3871772a092..fab224573a6 100644 --- a/src/cargo/ops/cargo_package/verify.rs +++ b/src/cargo/ops/cargo_package/verify.rs @@ -62,7 +62,7 @@ pub fn run_verify( // Manufacture an ephemeral workspace to ensure that even if the top-level // package has a workspace we can still build our new crate. let id = SourceId::for_path(&dst)?; - let mut src = PathSource::new(&dst, id, ws.gctx()); + let src = PathSource::new(&dst, id, ws.gctx()); let new_pkg = src.root_package()?; let pkg_fingerprint = hash_all(&dst)?; diff --git a/src/cargo/ops/cargo_update.rs b/src/cargo/ops/cargo_update.rs index 0c47dcb3fdf..ee32fb94374 100644 --- a/src/cargo/ops/cargo_update.rs +++ b/src/cargo/ops/cargo_update.rs @@ -47,6 +47,7 @@ pub fn generate_lockfile(ws: &Workspace<'_>) -> CargoResult<()> { None, &[], true, + None, )?; ops::write_pkg_lockfile(ws, &mut resolve)?; print_lockfile_changes(ws, previous_resolve, &resolve, &mut registry)?; @@ -87,6 +88,7 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes None, &[], true, + None, )? } } @@ -177,6 +179,7 @@ pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoRes Some(&keep), &[], true, + None, )?; print_lockfile_updates( @@ -1106,7 +1109,7 @@ impl PackageDiff { pub fn new(resolve: &Resolve) -> impl Iterator { let mut changes = BTreeMap::new(); let empty = Self::default(); - for dep in resolve.iter() { + for dep in resolve.iter().filter(|id| !id.source_id().is_builtin()) { changes .entry(Self::key(dep)) .or_insert_with(|| empty.clone()) @@ -1156,14 +1159,17 @@ impl PackageDiff { // Map `(package name, package source)` to `(removed versions, added versions)`. let mut changes = BTreeMap::new(); let empty = Self::default(); - for dep in previous_resolve.iter() { + for dep in previous_resolve + .iter() + .filter(|id| !id.source_id().is_builtin()) + { changes .entry(Self::key(dep)) .or_insert_with(|| empty.clone()) .removed .push(dep); } - for dep in resolve.iter() { + for dep in resolve.iter().filter(|id| !id.source_id().is_builtin()) { changes .entry(Self::key(dep)) .or_insert_with(|| empty.clone()) diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index 722cefe5713..0ff65b821c9 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -63,6 +63,8 @@ use crate::core::PackageIdSpecQuery; use crate::core::PackageSet; use crate::core::SourceId; use crate::core::Workspace; +use crate::core::compiler::standard_lib::detect_sysroot_src_path; +use crate::core::compiler::standard_lib::std_crates; use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::registry::{LockedPatchDependency, PackageRegistry}; use crate::core::resolver::features::{ @@ -85,6 +87,7 @@ use cargo_util_terminal::report::Group; use cargo_util_terminal::report::Level; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; +use std::path::PathBuf; use std::rc::Rc; use tracing::{debug, trace}; @@ -133,7 +136,15 @@ version. This may also occur with an optional dependency that is not enabled."; /// `package`, which don't specify any options or features. pub fn resolve_ws<'a>(ws: &Workspace<'a>, dry_run: bool) -> CargoResult<(PackageSet<'a>, Resolve)> { let mut registry = ws.package_registry()?; - let resolve = resolve_with_registry(ws, &mut registry, dry_run)?; + //TODO: This is overkill just to get the sysroot + let builtins_root = match (ws.is_std(), &ws.gctx().cli_unstable().build_std) { + (false, Some(_)) => { + let target_data = RustcTargetData::new(ws, &[])?; + Some(&detect_sysroot_src_path(&target_data)?) + } + (_, _) => None, + }; + let resolve = resolve_with_registry(ws, &mut registry, dry_run, builtins_root)?; let packages = get_resolved_packages(&resolve, registry)?; Ok((packages, resolve)) } @@ -173,6 +184,11 @@ pub fn resolve_ws_with_opts<'gctx>( .cloned() .collect(); let specs = &specs[..]; + let builtins_root = match (ws.is_std(), &ws.gctx().cli_unstable().build_std) { + (false, Some(_)) => Some(detect_sysroot_src_path(&target_data)?), + _ => None, + }; + let builtins_root = builtins_root.as_ref(); let mut registry = ws.package_registry()?; let (resolve, resolved_with_overrides) = if ws.ignore_lock() { let add_patches = true; @@ -186,13 +202,14 @@ pub fn resolve_ws_with_opts<'gctx>( None, specs, add_patches, + builtins_root, )?; ops::print_lockfile_changes(ws, None, &resolved_with_overrides, &mut registry)?; (resolve, resolved_with_overrides) } else if ws.require_optional_deps() { // First, resolve the root_package's *listed* dependencies, as well as // downloading and updating all remotes and such. - let resolve = resolve_with_registry(ws, &mut registry, dry_run)?; + let resolve = resolve_with_registry(ws, &mut registry, dry_run, builtins_root)?; // No need to add patches again, `resolve_with_registry` has done it. let add_patches = false; @@ -244,6 +261,7 @@ pub fn resolve_ws_with_opts<'gctx>( None, specs, add_patches, + builtins_root, )?; (Some(resolve), resolved_with_overrides) } else { @@ -258,6 +276,7 @@ pub fn resolve_ws_with_opts<'gctx>( None, specs, add_patches, + builtins_root, )?; // Skipping `print_lockfile_changes` as there are cases where this prints irrelevant // information @@ -352,6 +371,7 @@ fn resolve_with_registry<'gctx>( ws: &Workspace<'gctx>, registry: &mut PackageRegistry<'gctx>, dry_run: bool, + builtins_root: Option<&PathBuf>, ) -> CargoResult { let prev = ops::load_pkg_lockfile(ws)?; let mut resolve = resolve_with_previous( @@ -363,6 +383,7 @@ fn resolve_with_registry<'gctx>( None, &[], true, + builtins_root, )?; let print = if !ws.is_ephemeral() && ws.require_optional_deps() { @@ -410,6 +431,7 @@ pub fn resolve_with_previous<'gctx>( keep_previous: Option>, specs: &[PackageIdSpec], register_patches: bool, + builtins_root: Option<&PathBuf>, ) -> CargoResult { // We only want one Cargo at a time resolving a crate graph since this can // involve a lot of frobbing of the global caches. @@ -427,9 +449,24 @@ pub fn resolve_with_previous<'gctx>( registry.add_sources(Some(member.package_id().source_id()))?; } + let implicit_builtin_deps = if let Some(p) = builtins_root { + let builtin_ws = Workspace::new_standard_library(&p.join("Cargo.toml"), ws.gctx())?; + // As above, preloading packages avoids parsing manifests multiple times + builtin_ws.preload_builtins(registry); + let summaries = get_builtin_summaries(builtin_ws)?; + summaries + .into_iter() + .map(|s| { + registry.add_sources(Some(s.package_id().source_id()))?; + Dependency::new_for_builtin_summary(s) + }) + .collect::>()? + } else { + vec![] + }; + // Try to keep all from previous resolve if no instruction given. let keep_previous = keep_previous.unwrap_or(&|_| true); - // While registering patches, we will record preferences for particular versions // of various packages. let mut version_prefs = VersionPreferences::default(); @@ -493,6 +530,7 @@ pub fn resolve_with_previous<'gctx>( summary, ResolveOpts { dev_deps, + inject_builtins: builtins_root.is_some(), features: RequestedFeatures::CliFeatures(features), }, ) @@ -509,6 +547,7 @@ pub fn resolve_with_previous<'gctx>( &version_prefs, ResolveVersion::with_rust_version(ws.lowest_rust_version()), Some(ws.gctx()), + &implicit_builtin_deps, )?; let patches = registry.patches().values().flat_map(|v| v.iter()); @@ -527,6 +566,36 @@ pub fn resolve_with_previous<'gctx>( Ok(resolved) } +fn get_builtin_summaries<'gctx>(ws: Workspace<'gctx>) -> CargoResult> { + // Test is injected during unit generation + let crates = ws + .gctx() + .cli_unstable() + .build_std + .as_ref() + .expect("build-std is enabled"); + + // In general, we don't know exactly what standard library packages to depend on at this point. + // For example, we may be builting for multiple targets which default to different set of + // builtin crates, but at the resolve is target-independent. + // We also can't tell if a Summary is actually a proc-macro (host-only) dependency which doesn't + // support build-std at all. This is handled properly by unit generation. + let dep_names: HashSet<_> = std_crates( + crates, + "std", + &[], // Units are only used to work out if test needs to be inserted, which we handle during + // unit generation + ) + .into_iter() + .collect(); + + ws.into_members() + .into_iter() + .filter(|p| dep_names.contains(p.name().as_str())) + .map(|p| p.summary().clone().to_opaque_builtin_summary()) + .collect::>() +} + /// Read the `paths` configuration variable to discover all path overrides that /// have been configured. #[tracing::instrument(skip_all)] diff --git a/src/cargo/sources/builtin.rs b/src/cargo/sources/builtin.rs new file mode 100644 index 00000000000..70fc806db93 --- /dev/null +++ b/src/cargo/sources/builtin.rs @@ -0,0 +1,126 @@ +use std::{cell::RefCell, path::Path}; + +use crate::{ + CargoResult, GlobalContext, + core::{Dependency, Package, PackageId, SourceId, Summary}, + sources::source::{MaybePackage, QueryKind, Source}, + sources::{IndexSummary, PathSource}, +}; + +/// A builtin source represents standard library packages used in build-std, which are "built into" +/// the toolchain. +/// +/// It is very similar to a path source, but as all builtin dependencies are opaque +/// returns an opaque summary when queried, with no dependencies, and is +pub struct BuiltinSource<'gctx> { + /// The unique identifier for this source + source_id: SourceId, + /// The underlying path source which discovers packages + path_source: PathSource<'gctx>, + /// The cached opaque summary + opaque_summary: RefCell>, +} + +impl<'gctx> BuiltinSource<'gctx> { + pub fn from_path(path: &Path, source_id: SourceId, gctx: &'gctx GlobalContext) -> Self { + assert!( + source_id.is_builtin(), + "source `{source_id} is not a builtin" + ); + let path_source = PathSource::new(path, source_id, gctx); + Self { + source_id, + path_source, + opaque_summary: RefCell::new(None), + } + } + + pub fn preload_with(pkg: Package, gctx: &'gctx GlobalContext) -> CargoResult { + assert!(pkg.package_id().source_id().is_builtin()); + let summary = pkg.summary().clone(); + let source_id = summary.source_id(); + let inner = PathSource::preload_with(pkg, gctx); + Ok(Self { + source_id, + path_source: inner, + opaque_summary: RefCell::new(Some(summary)), + }) + } + + fn load(&self) -> CargoResult<()> { + let mut summary = self.opaque_summary.borrow_mut(); + if summary.is_none() { + let p = self.path_source.root_package()?; + *summary = Some(p.summary().clone().to_opaque_builtin_summary()?); + } + Ok(()) + } +} + +#[async_trait::async_trait(?Send)] +impl<'gctx> Source for BuiltinSource<'gctx> { + /// All builtin dependencies are opaque, so this will return a summary without any dependencies when queried + async fn query( + &self, + dep: &Dependency, + kind: QueryKind, + f: &mut dyn FnMut(IndexSummary), + ) -> CargoResult<()> { + self.load()?; + if let Some(s) = self.opaque_summary.borrow().as_ref() { + let matched = match kind { + QueryKind::Exact | QueryKind::RejectedVersions => dep.matches(s), + QueryKind::AlternativeNames => true, + QueryKind::Normalized => dep.matches(s), + }; + if matched { + f(IndexSummary::Candidate(s.clone())); + } + } + Ok(()) + } + + fn supports_checksums(&self) -> bool { + self.path_source.supports_checksums() + } + + fn requires_precise(&self) -> bool { + self.path_source.requires_precise() + } + + fn source_id(&self) -> SourceId { + self.source_id + } + + async fn download(&self, id: PackageId) -> CargoResult { + self.path_source.download(id).await + } + + async fn finish_download(&self, id: PackageId, data: Vec) -> CargoResult { + self.path_source.finish_download(id, data).await + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + self.path_source.fingerprint(pkg) + } + + fn describe(&self) -> String { + self.source_id.to_string() + } + + fn add_to_yanked_whitelist(&self, _pkgs: &[PackageId]) { + // Builtin source cannot be yanked + } + + async fn is_yanked(&self, _pkg: PackageId) -> CargoResult { + Ok(false) + } + + fn invalidate_cache(&self) { + // Builtin source has no local cache. + } + + fn set_quiet(&mut self, _quiet: bool) { + // Builtin source does not display status + } +} diff --git a/src/cargo/sources/directory.rs b/src/cargo/sources/directory.rs index 4d49d581889..4aa2b9d2ed1 100644 --- a/src/cargo/sources/directory.rs +++ b/src/cargo/sources/directory.rs @@ -133,7 +133,7 @@ impl<'gctx> DirectorySource<'gctx> { continue; } - let mut src = PathSource::new(&path, self.source_id, self.gctx); + let src = PathSource::new(&path, self.source_id, self.gctx); src.load()?; let mut pkg = src.root_package()?; diff --git a/src/cargo/sources/mod.rs b/src/cargo/sources/mod.rs index 5d92c92770b..999d4cf2b58 100644 --- a/src/cargo/sources/mod.rs +++ b/src/cargo/sources/mod.rs @@ -26,6 +26,7 @@ //! //! [source replacement]: https://doc.rust-lang.org/nightly/cargo/reference/source-replacement.html +pub use self::builtin::BuiltinSource; pub use self::config::SourceConfigMap; pub use self::directory::DirectorySource; pub use self::git::GitSource; @@ -37,6 +38,7 @@ pub use self::registry::{ }; pub use self::replaced::ReplacedSource; +pub mod builtin; pub mod config; pub mod directory; pub mod git; diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index 78dbe2027ee..443f1649586 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -69,7 +69,7 @@ impl<'gctx> PathSource<'gctx> { } /// Gets the package on the root path. - pub fn root_package(&mut self) -> CargoResult { + pub fn root_package(&self) -> CargoResult { trace!("root_package; source={:?}", self); self.load()?; diff --git a/tests/testsuite/mock-std/library/alloc/Cargo.toml b/tests/testsuite/mock-std/library/alloc/Cargo.toml index dc965abffac..bc2cdefaa1b 100644 --- a/tests/testsuite/mock-std/library/alloc/Cargo.toml +++ b/tests/testsuite/mock-std/library/alloc/Cargo.toml @@ -6,3 +6,4 @@ edition = "2018" [dependencies] registry-dep-using-core = { version = "*", features = ['mockbuild'] } +core = { path = "../core" } diff --git a/tests/testsuite/mock-std/library/std/Cargo.toml b/tests/testsuite/mock-std/library/std/Cargo.toml index 2a31b514709..694ca7216f2 100644 --- a/tests/testsuite/mock-std/library/std/Cargo.toml +++ b/tests/testsuite/mock-std/library/std/Cargo.toml @@ -7,6 +7,7 @@ edition = "2018" [dependencies] registry-dep-using-alloc = { version = "*", features = ['mockbuild'] } dep_test = { path = "../../dep_test" } +alloc = { path = "../alloc/" } [features] feature1 = [] diff --git a/tests/testsuite/standard_lib.rs b/tests/testsuite/standard_lib.rs index acccdd4699d..77707bf9121 100644 --- a/tests/testsuite/standard_lib.rs +++ b/tests/testsuite/standard_lib.rs @@ -246,7 +246,7 @@ fn basic() { } #[cargo_test(build_std_mock)] -fn shared_std_dependency_rebuild() { +fn shared_std_dependency() { let manifest_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); let setup = setup(); let p = project() @@ -261,7 +261,11 @@ fn shared_std_dependency_rebuild() { [build-dependencies] dep_test = {{ path = \"{}/tests/testsuite/mock-std/dep_test\" }} + + [dependencies] + dep_test = {{ path = \"{}/tests/testsuite/mock-std/dep_test\" }} ", + manifest_dir.replace('\\', "/"), manifest_dir.replace('\\', "/") ) .as_str(), @@ -284,27 +288,38 @@ fn shared_std_dependency_rebuild() { ) .build(); + // One build each for the: + // - Build-std build + // - Build-time user crate dependency (with the sysroot std) + // - Runtime user crate dependency (with the build-std std) p.cargo("build -v") .build_std(&setup) - .target_host() .with_stderr_data(str![[r#" ... [RUNNING] `[..] rustc --crate-name dep_test [..]` ... [RUNNING] `[..] rustc --crate-name dep_test [..]` ... +[RUNNING] `[..] rustc --crate-name dep_test [..]` +... "#]]) .run(); + p.cargo("clean").run(); + + // Sanity check for `rebuild_unit_graph_shared` p.cargo("build -v") .build_std(&setup) - .with_stderr_does_not_contain(str![[r#" - ... - [RUNNING] `[..] rustc --crate-name dep_test [..]` - ... - [RUNNING] `[..] rustc --crate-name dep_test [..]` - ... - "#]]) + .target_host() + .with_stderr_data(str![[r#" +... +[RUNNING] `[..] rustc --crate-name dep_test [..]` +... +[RUNNING] `[..] rustc --crate-name dep_test [..]` +... +[RUNNING] `[..] rustc --crate-name dep_test [..]` +... +"#]]) .run(); } @@ -384,6 +399,55 @@ fn check_core() { .run(); } +#[cargo_test(build_std_mock)] +fn build_std_does_not_change_lockfile() { + let setup = setup(); + let p = project().file("src/lib.rs", "").build(); + + p.cargo("generate-lockfile").run(); + let lockfile = p.read_lockfile(); + + p.cargo("build").build_std(&setup).run(); + + let build_std_lockfile = p.read_lockfile(); + assert_eq!(lockfile, build_std_lockfile); + assert!(!build_std_lockfile.contains("name = \"core\"")); + assert!(!build_std_lockfile.contains("name = \"std\"")); + assert!(!build_std_lockfile.contains("name = \"alloc\"")); +} + +#[cargo_test(build_std_mock)] +fn builtins_do_not_show_in_status_messages() { + let setup = setup(); + let p = project() + .file("src/lib.rs", "#![no_std]") + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + + [dependencies] + registry-dep-using-core = "1.0" + "#, + ) + .build(); + + // New lockfile + p.cargo("c") + .build_std_arg(&setup, "core") + .with_stderr_contains("[LOCKING] 1 package [..]") + .run(); + + // Updating lockfile + p.cargo("add registry-dep-using-alloc") + .build_std_arg(&setup, "core,alloc") + .with_stderr_contains("[ADDING] registry-dep-using-alloc [..]") + .with_stderr_contains("[LOCKING] 1 package [..]") + .run(); +} + #[cargo_test(build_std_mock)] fn build_std_with_no_arg_for_core_only_target() { let target = "aarch64-unknown-none";