diff --git a/ci/azure-test-all.yml b/ci/azure-test-all.yml index 626858431e8..ad6c2fed59d 100644 --- a/ci/azure-test-all.yml +++ b/ci/azure-test-all.yml @@ -16,6 +16,11 @@ steps: - bash: rustup component add clippy || echo "clippy not available" displayName: "Install clippy (maybe)" +# This is needed for standard library tests. +- bash: rustup component add rust-src + condition: and(succeeded(), eq(variables['TOOLCHAIN'], 'nightly')) + displayName: "Install rust-src (maybe)" + # Deny warnings on CI to avoid warnings getting into the codebase, and note the # `force-system-lib-on-osx` which is intended to fix compile issues on OSX where # compiling curl from source on OSX yields linker errors on Azure. diff --git a/src/cargo/core/compiler/build_config.rs b/src/cargo/core/compiler/build_config.rs index 3c3350cdb08..958f8c1cab5 100644 --- a/src/cargo/core/compiler/build_config.rs +++ b/src/cargo/core/compiler/build_config.rs @@ -156,7 +156,7 @@ pub enum CompileMode { /// a test. Check { test: bool }, /// Used to indicate benchmarks should be built. This is not used in - /// `Target`, because it is essentially the same as `Test` (indicating + /// `Unit`, because it is essentially the same as `Test` (indicating /// `--test` should be passed to rustc) and by using `Test` instead it /// allows some de-duping of Units to occur. Bench, @@ -221,6 +221,14 @@ impl CompileMode { } } + /// Returns `true` if this is something that passes `--test` to rustc. + pub fn is_rustc_test(self) -> bool { + match self { + CompileMode::Test | CompileMode::Bench | CompileMode::Check { test: true } => true, + _ => false, + } + } + /// Returns `true` if this is the *execution* of a `build.rs` script. pub fn is_run_custom_build(self) -> bool { self == CompileMode::RunCustomBuild diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs index 7eab9dea45f..9cb34e3390e 100644 --- a/src/cargo/core/compiler/build_context/mod.rs +++ b/src/cargo/core/compiler/build_context/mod.rs @@ -8,7 +8,7 @@ use crate::core::compiler::unit::UnitInterner; use crate::core::compiler::{BuildConfig, BuildOutput, Kind, Unit}; use crate::core::profiles::Profiles; use crate::core::{Dependency, Workspace}; -use crate::core::{PackageId, PackageSet, Resolve}; +use crate::core::{PackageId, PackageSet}; use crate::util::errors::CargoResult; use crate::util::{profile, Cfg, Config, Rustc}; @@ -26,8 +26,6 @@ pub struct BuildContext<'a, 'cfg> { pub ws: &'a Workspace<'cfg>, /// The cargo configuration. pub config: &'cfg Config, - /// The dependency graph for our build. - pub resolve: &'a Resolve, pub profiles: &'a Profiles, pub build_config: &'a BuildConfig, /// Extra compiler args for either `rustc` or `rustdoc`. @@ -48,7 +46,6 @@ pub struct BuildContext<'a, 'cfg> { impl<'a, 'cfg> BuildContext<'a, 'cfg> { pub fn new( ws: &'a Workspace<'cfg>, - resolve: &'a Resolve, packages: &'a PackageSet<'cfg>, config: &'cfg Config, build_config: &'a BuildConfig, @@ -75,7 +72,6 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> { Ok(BuildContext { ws, - resolve, packages, config, rustc, @@ -90,16 +86,6 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> { }) } - pub fn extern_crate_name(&self, unit: &Unit<'a>, dep: &Unit<'a>) -> CargoResult { - self.resolve - .extern_crate_name(unit.pkg.package_id(), dep.pkg.package_id(), dep.target) - } - - pub fn is_public_dependency(&self, unit: &Unit<'a>, dep: &Unit<'a>) -> bool { - self.resolve - .is_public_dep(unit.pkg.package_id(), dep.pkg.package_id()) - } - /// Whether a dependency should be compiled for the host or target platform, /// specified by `Kind`. pub fn dep_platform_activated(&self, dep: &Dependency, kind: Kind) -> bool { diff --git a/src/cargo/core/compiler/compilation.rs b/src/cargo/core/compiler/compilation.rs index 47c64c49775..ec5374d04be 100644 --- a/src/cargo/core/compiler/compilation.rs +++ b/src/cargo/core/compiler/compilation.rs @@ -6,7 +6,7 @@ use std::path::PathBuf; use semver::Version; use super::BuildContext; -use crate::core::{Edition, Package, PackageId, Target}; +use crate::core::{Edition, InternedString, Package, PackageId, Target}; use crate::util::{self, join_paths, process, CargoResult, CfgExpr, Config, ProcessBuilder}; pub struct Doctest { @@ -16,7 +16,7 @@ pub struct Doctest { pub target: Target, /// Extern dependencies needed by `rustdoc`. The path is the location of /// the compiled lib. - pub deps: Vec<(String, PathBuf)>, + pub deps: Vec<(InternedString, PathBuf)>, } /// A structure returning the result of a compilation. diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/context/compilation_files.rs index f7e0ee7cfb8..743d7d821c5 100644 --- a/src/cargo/core/compiler/context/compilation_files.rs +++ b/src/cargo/core/compiler/context/compilation_files.rs @@ -522,9 +522,7 @@ fn compute_metadata<'a, 'cfg>( // Also mix in enabled features to our metadata. This'll ensure that // when changing feature sets each lib is separately cached. - bcx.resolve - .features_sorted(unit.pkg.package_id()) - .hash(&mut hasher); + unit.features.hash(&mut hasher); // Mix in the target-metadata of all the dependencies of this target. { diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/context/mod.rs index f82026e5281..e61fa65d98a 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/context/mod.rs @@ -1,7 +1,6 @@ #![allow(deprecated)] -use std::collections::{HashMap, HashSet}; +use std::collections::{BTreeSet, HashMap, HashSet}; use std::ffi::OsStr; -use std::fmt::Write; use std::path::PathBuf; use std::sync::{Arc, Mutex}; @@ -10,7 +9,7 @@ use jobserver::Client; use crate::core::compiler::compilation; use crate::core::compiler::Unit; -use crate::core::{Package, PackageId, Resolve}; +use crate::core::PackageId; use crate::util::errors::{CargoResult, CargoResultExt}; use crate::util::{internal, profile, Config}; @@ -19,11 +18,9 @@ use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts}; use super::fingerprint::Fingerprint; use super::job_queue::JobQueue; use super::layout::Layout; +use super::unit_dependencies::{UnitDep, UnitGraph}; use super::{BuildContext, Compilation, CompileMode, Executor, FileFlavor, Kind}; -mod unit_dependencies; -use self::unit_dependencies::build_unit_dependencies; - mod compilation_files; use self::compilation_files::CompilationFiles; pub use self::compilation_files::{Metadata, OutputFile}; @@ -51,9 +48,6 @@ pub struct Context<'a, 'cfg> { /// Linking information for each `Unit`. /// See `build_map` for details. pub build_scripts: HashMap, Arc>, - /// Used to check the `links` field in the manifest is not duplicated and - /// is used correctly. - pub links: Links, /// Job server client to manage concurrency with other processes. pub jobserver: Client, /// "Primary" packages are the ones the user selected on the command-line @@ -61,13 +55,11 @@ pub struct Context<'a, 'cfg> { /// based on the current directory and the default workspace members. primary_packages: HashSet, /// The dependency graph of units to compile. - unit_dependencies: HashMap, Vec>>, + unit_dependencies: UnitGraph<'a>, /// An abstraction of the files and directories that will be generated by /// the compilation. This is `None` until after `unit_dependencies` has /// been computed. files: Option>, - /// Cache of packages, populated when they are downloaded. - package_cache: HashMap, /// A flag indicating whether pipelining is enabled for this compilation /// session. Pipelining largely only affects the edges of the dependency @@ -82,7 +74,11 @@ pub struct Context<'a, 'cfg> { } impl<'a, 'cfg> Context<'a, 'cfg> { - pub fn new(config: &'cfg Config, bcx: &'a BuildContext<'a, 'cfg>) -> CargoResult { + pub fn new( + config: &'cfg Config, + bcx: &'a BuildContext<'a, 'cfg>, + unit_dependencies: UnitGraph<'a>, + ) -> CargoResult { // Load up the jobserver that we'll use to manage our parallelism. This // is the same as the GNU make implementation of a jobserver, and // intentionally so! It's hoped that we can interact with GNU make and @@ -112,12 +108,10 @@ impl<'a, 'cfg> Context<'a, 'cfg> { compiled: HashSet::new(), build_scripts: HashMap::new(), build_explicit_deps: HashMap::new(), - links: Links::new(), jobserver, primary_packages: HashSet::new(), - unit_dependencies: HashMap::new(), + unit_dependencies, files: None, - package_cache: HashMap::new(), rmeta_required: HashSet::new(), pipelining, }) @@ -203,18 +197,15 @@ impl<'a, 'cfg> Context<'a, 'cfg> { // pass `--extern` for rlib deps and skip out on all other // artifacts. let mut doctest_deps = Vec::new(); - for dep in self.dep_targets(unit) { - if dep.target.is_lib() && dep.mode == CompileMode::Build { - let outputs = self.outputs(&dep)?; + for dep in self.unit_deps(unit) { + if dep.unit.target.is_lib() && dep.unit.mode == CompileMode::Build { + let outputs = self.outputs(&dep.unit)?; let outputs = outputs.iter().filter(|output| { output.path.extension() == Some(OsStr::new("rlib")) - || dep.target.for_host() + || dep.unit.target.for_host() }); for output in outputs { - doctest_deps.push(( - self.bcx.extern_crate_name(unit, &dep)?, - output.path.clone(), - )); + doctest_deps.push((dep.extern_crate_name, output.path.clone())); } } } @@ -227,7 +218,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { }); } - let feats = self.bcx.resolve.features(unit.pkg.package_id()); + let feats = &unit.features; if !feats.is_empty() { self.compilation .cfgs @@ -305,7 +296,8 @@ impl<'a, 'cfg> Context<'a, 'cfg> { self.primary_packages .extend(units.iter().map(|u| u.pkg.package_id())); - build_unit_dependencies(self, units)?; + self.record_units_requiring_metadata(); + let files = CompilationFiles::new( units, host_layout, @@ -357,38 +349,35 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// For a package, return all targets which are registered as dependencies /// for that package. + /// NOTE: This is deprecated, use `unit_deps` instead. // // TODO: this ideally should be `-> &[Unit<'a>]`. pub fn dep_targets(&self, unit: &Unit<'a>) -> Vec> { - self.unit_dependencies[unit].clone() + self.unit_dependencies[unit] + .iter() + .map(|dep| dep.unit) + .collect() } - pub fn is_primary_package(&self, unit: &Unit<'a>) -> bool { - self.primary_packages.contains(&unit.pkg.package_id()) + /// Direct dependencies for the given unit. + pub fn unit_deps(&self, unit: &Unit<'a>) -> &[UnitDep<'a>] { + &self.unit_dependencies[unit] } - /// Gets a package for the given package ID. - pub fn get_package(&self, id: PackageId) -> CargoResult<&'a Package> { - self.package_cache - .get(&id) - .cloned() - .ok_or_else(|| failure::format_err!("failed to find {}", id)) + pub fn is_primary_package(&self, unit: &Unit<'a>) -> bool { + self.primary_packages.contains(&unit.pkg.package_id()) } /// Returns the list of filenames read by cargo to generate the `BuildContext` /// (all `Cargo.toml`, etc.). pub fn build_plan_inputs(&self) -> CargoResult> { - let mut inputs = Vec::new(); - // Note that we're using the `package_cache`, which should have been - // populated by `build_unit_dependencies`, and only those packages are - // considered as all the inputs. - // - // (Notably, we skip dev-deps here if they aren't present.) - for pkg in self.package_cache.values() { - inputs.push(pkg.manifest_path().to_path_buf()); + // Keep sorted for consistency. + let mut inputs = BTreeSet::new(); + // Note: dev-deps are skipped if they are not present in the unit graph. + for unit in self.unit_dependencies.keys() { + inputs.insert(unit.pkg.manifest_path().to_path_buf()); } - inputs.sort(); - Ok(inputs) + Ok(inputs.into_iter().collect()) } fn check_collistions(&self) -> CargoResult<()> { @@ -488,6 +477,20 @@ impl<'a, 'cfg> Context<'a, 'cfg> { Ok(()) } + /// Records the list of units which are required to emit metadata. + /// + /// Units which depend only on the metadata of others requires the others to + /// actually produce metadata, so we'll record that here. + fn record_units_requiring_metadata(&mut self) { + for (key, deps) in self.unit_dependencies.iter() { + for dep in deps { + if self.only_requires_rmeta(key, &dep.unit) { + self.rmeta_required.insert(dep.unit); + } + } + } + } + /// Returns whether when `parent` depends on `dep` if it only requires the /// metadata file from `dep`. pub fn only_requires_rmeta(&self, parent: &Unit<'a>, dep: &Unit<'a>) -> bool { @@ -509,70 +512,3 @@ impl<'a, 'cfg> Context<'a, 'cfg> { self.rmeta_required.contains(unit) } } - -#[derive(Default)] -pub struct Links { - validated: HashSet, - links: HashMap, -} - -impl Links { - pub fn new() -> Links { - Links { - validated: HashSet::new(), - links: HashMap::new(), - } - } - - pub fn validate(&mut self, resolve: &Resolve, unit: &Unit<'_>) -> CargoResult<()> { - if !self.validated.insert(unit.pkg.package_id()) { - return Ok(()); - } - let lib = match unit.pkg.manifest().links() { - Some(lib) => lib, - None => return Ok(()), - }; - if let Some(&prev) = self.links.get(lib) { - let pkg = unit.pkg.package_id(); - - let describe_path = |pkgid: PackageId| -> String { - let dep_path = resolve.path_to_top(&pkgid); - let mut dep_path_desc = format!("package `{}`", dep_path[0]); - for dep in dep_path.iter().skip(1) { - write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap(); - } - dep_path_desc - }; - - failure::bail!( - "multiple packages link to native library `{}`, \ - but a native library can be linked only once\n\ - \n\ - {}\nlinks to native library `{}`\n\ - \n\ - {}\nalso links to native library `{}`", - lib, - describe_path(prev), - lib, - describe_path(pkg), - lib - ) - } - if !unit - .pkg - .manifest() - .targets() - .iter() - .any(|t| t.is_custom_build()) - { - failure::bail!( - "package `{}` specifies that it links to `{}` but does not \ - have a custom build script", - unit.pkg.package_id(), - lib - ) - } - self.links.insert(lib.to_string(), unit.pkg.package_id()); - Ok(()) - } -} diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index 32d88d8bb27..93b958c76a6 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -190,7 +190,7 @@ fn build_work<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoRes // Be sure to pass along all enabled features for this package, this is the // last piece of statically known information that we have. - for feat in bcx.resolve.features(unit.pkg.package_id()).iter() { + for feat in &unit.features { cmd.env(&format!("CARGO_FEATURE_{}", super::envify(feat)), "1"); } diff --git a/src/cargo/core/compiler/fingerprint.rs b/src/cargo/core/compiler/fingerprint.rs index fea9267baec..27392ac7cad 100644 --- a/src/cargo/core/compiler/fingerprint.rs +++ b/src/cargo/core/compiler/fingerprint.rs @@ -201,7 +201,8 @@ use serde::de; use serde::ser; use serde::{Deserialize, Serialize}; -use crate::core::Package; +use crate::core::compiler::unit_dependencies::UnitDep; +use crate::core::{InternedString, Package}; use crate::util; use crate::util::errors::{CargoResult, CargoResultExt}; use crate::util::paths; @@ -320,7 +321,7 @@ struct DepFingerprint { pkg_id: u64, /// The crate name we're using for this dependency, which if we change we'll /// need to recompile! - name: String, + name: InternedString, /// Whether or not this dependency is flagged as a public dependency or not. public: bool, /// Whether or not this dependency is an rmeta dependency or a "full" @@ -446,7 +447,7 @@ impl<'de> Deserialize<'de> for DepFingerprint { let (pkg_id, name, public, hash) = <(u64, String, bool, u64)>::deserialize(d)?; Ok(DepFingerprint { pkg_id, - name, + name: InternedString::new(&name), public, fingerprint: Arc::new(Fingerprint { memoized_hash: Mutex::new(Some(hash)), @@ -932,12 +933,9 @@ impl DepFingerprint { fn new<'a, 'cfg>( cx: &mut Context<'a, 'cfg>, parent: &Unit<'a>, - dep: &Unit<'a>, + dep: &UnitDep<'a>, ) -> CargoResult { - let fingerprint = calculate(cx, dep)?; - let name = cx.bcx.extern_crate_name(parent, dep)?; - let public = cx.bcx.is_public_dependency(parent, dep); - + let fingerprint = calculate(cx, &dep.unit)?; // We need to be careful about what we hash here. We have a goal of // supporting renaming a project directory and not rebuilding // everything. To do that, however, we need to make sure that the cwd @@ -948,18 +946,18 @@ impl DepFingerprint { // names (sort of for this same reason), so if the package source is a // `path` then we just hash the name, but otherwise we hash the full // id as it won't change when the directory is renamed. - let pkg_id = if dep.pkg.package_id().source_id().is_path() { - util::hash_u64(dep.pkg.package_id().name()) + let pkg_id = if dep.unit.pkg.package_id().source_id().is_path() { + util::hash_u64(dep.unit.pkg.package_id().name()) } else { - util::hash_u64(dep.pkg.package_id()) + util::hash_u64(dep.unit.pkg.package_id()) }; Ok(DepFingerprint { pkg_id, - name, - public, + name: dep.extern_crate_name, + public: dep.public, fingerprint, - only_requires_rmeta: cx.only_requires_rmeta(parent, dep), + only_requires_rmeta: cx.only_requires_rmeta(parent, &dep.unit), }) } } @@ -1039,11 +1037,13 @@ fn calculate_normal<'a, 'cfg>( // Skip fingerprints of binaries because they don't actually induce a // recompile, they're just dependencies in the sense that they need to be // built. - let mut deps = cx - .dep_targets(unit) - .iter() - .filter(|u| !u.target.is_bin()) - .map(|dep| DepFingerprint::new(cx, unit, dep)) + // + // Create Vec since mutable cx is needed in closure. + let deps = Vec::from(cx.unit_deps(unit)); + let mut deps = deps + .into_iter() + .filter(|dep| !dep.unit.target.is_bin()) + .map(|dep| DepFingerprint::new(cx, unit, &dep)) .collect::>>()?; deps.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id)); @@ -1090,10 +1090,7 @@ fn calculate_normal<'a, 'cfg>( // Note that .0 is hashed here, not .1 which is the cwd. That doesn't // actually affect the output artifact so there's no need to hash it. path: util::hash_u64(super::path_args(cx.bcx, unit).0), - features: format!( - "{:?}", - cx.bcx.resolve.features_sorted(unit.pkg.package_id()) - ), + features: format!("{:?}", unit.features), deps, local: Mutex::new(local), memoized_hash: Mutex::new(None), @@ -1135,9 +1132,10 @@ fn calculate_run_custom_build<'a, 'cfg>( // Overridden build scripts don't need to track deps. vec![] } else { - cx.dep_targets(unit) - .iter() - .map(|dep| DepFingerprint::new(cx, unit, dep)) + // Create Vec since mutable cx is needed in closure. + let deps = Vec::from(cx.unit_deps(unit)); + deps.into_iter() + .map(|dep| DepFingerprint::new(cx, unit, &dep)) .collect::>>()? }; diff --git a/src/cargo/core/compiler/links.rs b/src/cargo/core/compiler/links.rs new file mode 100644 index 00000000000..cc021e3a15d --- /dev/null +++ b/src/cargo/core/compiler/links.rs @@ -0,0 +1,59 @@ +use super::unit_dependencies::UnitGraph; +use crate::core::{PackageId, Resolve}; +use crate::util::errors::CargoResult; +use std::collections::{HashMap, HashSet}; +use std::fmt::Write; + +/// Validate `links` field does not conflict between packages. +pub fn validate_links(resolve: &Resolve, unit_graph: &UnitGraph<'_>) -> CargoResult<()> { + // NOTE: This is the *old* links validator. Links are usually validated in + // the resolver. However, the `links` field was added to the index in + // early 2018 (see https://github.com/rust-lang/cargo/pull/4978). However, + // `links` has been around since 2014, so there are still many crates in + // the index that don't have `links` properly set in the index (over 600 + // at the time of this writing in 2019). This can probably be removed at + // some point in the future, though it might be worth considering fixing + // the index. + let mut validated: HashSet = HashSet::new(); + let mut links: HashMap = HashMap::new(); + let mut units: Vec<_> = unit_graph.keys().collect(); + // Sort primarily to make testing easier. + units.sort_unstable(); + for unit in units { + if !validated.insert(unit.pkg.package_id()) { + continue; + } + let lib = match unit.pkg.manifest().links() { + Some(lib) => lib, + None => continue, + }; + if let Some(&prev) = links.get(lib) { + let pkg = unit.pkg.package_id(); + + let describe_path = |pkgid: PackageId| -> String { + let dep_path = resolve.path_to_top(&pkgid); + let mut dep_path_desc = format!("package `{}`", dep_path[0]); + for dep in dep_path.iter().skip(1) { + write!(dep_path_desc, "\n ... which is depended on by `{}`", dep).unwrap(); + } + dep_path_desc + }; + + failure::bail!( + "multiple packages link to native library `{}`, \ + but a native library can be linked only once\n\ + \n\ + {}\nlinks to native library `{}`\n\ + \n\ + {}\nalso links to native library `{}`", + lib, + describe_path(prev), + lib, + describe_path(pkg), + lib + ) + } + links.insert(lib.to_string(), unit.pkg.package_id()); + } + Ok(()) +} diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 599b7721909..df6e00bf077 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -8,8 +8,11 @@ mod fingerprint; mod job; mod job_queue; mod layout; +mod links; mod output_depinfo; +pub mod standard_lib; mod unit; +pub mod unit_dependencies; use std::env; use std::ffi::{OsStr, OsString}; @@ -35,6 +38,7 @@ use self::job::{Job, Work}; use self::job_queue::{JobQueue, JobState}; pub use self::layout::is_bad_artifact_name; use self::output_depinfo::output_depinfo; +use self::unit_dependencies::UnitDep; pub use crate::core::compiler::unit::{Unit, UnitInterner}; use crate::core::manifest::TargetSourcePath; use crate::core::profiles::{Lto, PanicStrategy, Profile}; @@ -121,7 +125,6 @@ fn compile<'a, 'cfg: 'a>( // we've got everything constructed. let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name())); fingerprint::prepare_init(cx, unit)?; - cx.links.validate(bcx.resolve, unit)?; let job = if unit.mode.is_run_custom_build() { custom_build::prepare(cx, unit)? @@ -408,12 +411,7 @@ fn link_targets<'a, 'cfg>( let package_id = unit.pkg.package_id(); let profile = unit.profile; let unit_mode = unit.mode; - let features = bcx - .resolve - .features_sorted(package_id) - .into_iter() - .map(|s| s.to_owned()) - .collect(); + let features = unit.features.iter().map(|s| s.to_string()).collect(); let json_messages = bcx.build_config.emit_json(); let executable = cx.get_executable(unit)?; let mut target = unit.target.clone(); @@ -630,7 +628,7 @@ fn rustdoc<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult rustdoc.arg("-o").arg(doc_dir); - for feat in bcx.resolve.features_sorted(unit.pkg.package_id()) { + for feat in &unit.features { rustdoc.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } @@ -913,10 +911,7 @@ fn build_base_args<'a, 'cfg>( cmd.arg("--cfg").arg("test"); } - // We ideally want deterministic invocations of rustc to ensure that - // rustc-caching strategies like sccache are able to cache more, so sort the - // feature list here. - for feat in bcx.resolve.features_sorted(unit.pkg.package_id()) { + for feat in &unit.features { cmd.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); } @@ -993,38 +988,39 @@ fn build_deps_args<'a, 'cfg>( }); } - let dep_targets = cx.dep_targets(unit); + // Create Vec since mutable cx is needed in closure below. + let deps = Vec::from(cx.unit_deps(unit)); // If there is not one linkable target but should, rustc fails later // on if there is an `extern crate` for it. This may turn into a hard // error in the future (see PR #4797). - if !dep_targets + if !deps .iter() - .any(|u| !u.mode.is_doc() && u.target.linkable()) + .any(|dep| !dep.unit.mode.is_doc() && dep.unit.target.linkable()) { - if let Some(u) = dep_targets + if let Some(dep) = deps .iter() - .find(|u| !u.mode.is_doc() && u.target.is_lib()) + .find(|dep| !dep.unit.mode.is_doc() && dep.unit.target.is_lib()) { bcx.config.shell().warn(format!( "The package `{}` \ provides no linkable target. The compiler might raise an error while compiling \ `{}`. Consider adding 'dylib' or 'rlib' to key `crate-type` in `{}`'s \ Cargo.toml. This warning might turn into a hard error in the future.", - u.target.crate_name(), + dep.unit.target.crate_name(), unit.target.crate_name(), - u.target.crate_name() + dep.unit.target.crate_name() ))?; } } let mut unstable_opts = false; - for dep in dep_targets { - if dep.mode.is_run_custom_build() { - cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep)); + for dep in deps { + if dep.unit.mode.is_run_custom_build() { + cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep.unit)); } - if dep.target.linkable() && !dep.mode.is_doc() { + if dep.unit.target.linkable() && !dep.unit.mode.is_doc() { link_to(cmd, cx, unit, &dep, &mut unstable_opts)?; } } @@ -1041,13 +1037,11 @@ fn build_deps_args<'a, 'cfg>( cmd: &mut ProcessBuilder, cx: &mut Context<'a, 'cfg>, current: &Unit<'a>, - dep: &Unit<'a>, + dep: &UnitDep<'a>, need_unstable_opts: &mut bool, ) -> CargoResult<()> { - let bcx = cx.bcx; - let mut value = OsString::new(); - value.push(bcx.extern_crate_name(current, dep)?); + value.push(dep.extern_crate_name.as_str()); value.push("="); let mut pass = |file| { @@ -1060,7 +1054,7 @@ fn build_deps_args<'a, 'cfg>( .features() .require(Feature::public_dependency()) .is_ok() - && !bcx.is_public_dependency(current, dep) + && !dep.public { cmd.arg("--extern-private"); *need_unstable_opts = true; @@ -1071,13 +1065,13 @@ fn build_deps_args<'a, 'cfg>( cmd.arg(&value); }; - let outputs = cx.outputs(dep)?; + let outputs = cx.outputs(&dep.unit)?; let mut outputs = outputs.iter().filter_map(|output| match output.flavor { FileFlavor::Linkable { rmeta } => Some((output, rmeta)), _ => None, }); - if cx.only_requires_rmeta(current, dep) { + if cx.only_requires_rmeta(current, &dep.unit) { let (output, _rmeta) = outputs .find(|(_output, rmeta)| *rmeta) .expect("failed to find rlib dep for pipelined dep"); diff --git a/src/cargo/core/compiler/standard_lib.rs b/src/cargo/core/compiler/standard_lib.rs new file mode 100644 index 00000000000..9a01c41190e --- /dev/null +++ b/src/cargo/core/compiler/standard_lib.rs @@ -0,0 +1,165 @@ +//! Code for building the standard library. + +use crate::core::compiler::{BuildContext, CompileMode, Kind, Unit}; +use crate::core::profiles::UnitFor; +use crate::core::resolver::ResolveOpts; +use crate::core::{Dependency, PackageId, PackageSet, Resolve, SourceId, Workspace}; +use crate::ops::{self, Packages}; +use crate::util::errors::CargoResult; +use std::collections::{HashMap, HashSet}; +use std::path::PathBuf; + +/// Parse the `-Zbuild-std` flag. +pub fn parse_unstable_flag(value: Option<&str>) -> Vec { + // This is a temporary hack until there is a more principled way to + // declare dependencies in Cargo.toml. + let value = value.unwrap_or("std"); + let mut crates: HashSet<&str> = value.split(',').collect(); + if crates.contains("std") { + crates.insert("core"); + crates.insert("panic_unwind"); + crates.insert("compiler_builtins"); + } else if crates.contains("core") { + crates.insert("compiler_builtins"); + } + crates.into_iter().map(|s| s.to_string()).collect() +} + +/// Resolve the standard library dependencies. +pub fn resolve_std<'cfg>( + ws: &Workspace<'cfg>, + crates: &[String], +) -> CargoResult<(PackageSet<'cfg>, Resolve)> { + let src_path = detect_sysroot_src_path(ws)?; + let to_patch = [ + "rustc-std-workspace-core", + "rustc-std-workspace-alloc", + "rustc-std-workspace-std", + ]; + let patches = to_patch + .iter() + .map(|&name| { + let source_path = SourceId::for_path(&src_path.join("src").join("tools").join(name))?; + let dep = Dependency::parse_no_deprecated(name, None, source_path)?; + Ok(dep) + }) + .collect::>>()?; + let crates_io_url = crate::sources::CRATES_IO_INDEX.parse().unwrap(); + let mut patch = HashMap::new(); + patch.insert(crates_io_url, patches); + let members = vec![ + String::from("src/libstd"), + String::from("src/libcore"), + String::from("src/liballoc"), + String::from("src/libtest"), + ]; + let ws_config = crate::core::WorkspaceConfig::Root(crate::core::WorkspaceRootConfig::new( + &src_path, + &Some(members), + /*default_members*/ &None, + /*exclude*/ &None, + )); + let virtual_manifest = crate::core::VirtualManifest::new( + /*replace*/ Vec::new(), + patch, + ws_config, + // Profiles are not used here, but we need something to pass in. + ws.profiles().clone(), + crate::core::Features::default(), + ); + + let config = ws.config(); + // This is a delicate hack. In order for features to resolve correctly, + // the resolver needs to run a specific "current" member of the workspace. + // Thus, in order to set the features for `std`, we need to set `libtest` + // to be the "current" member. `libtest` is the root, and all other + // standard library crates are dependencies from there. Since none of the + // other crates need to alter their features, this should be fine, for + // now. Perhaps in the future features will be decoupled from the resolver + // and it will be easier to control feature selection. + let current_manifest = src_path.join("src/libtest/Cargo.toml"); + // TODO: Consider setting require_option_deps false? + // TODO: Consider doing something to enforce --locked? Or to prevent the + // lock file from being written, such as setting ephemeral. + let std_ws = Workspace::new_virtual(src_path, current_manifest, virtual_manifest, config)?; + // `test` is not in the default set because it is optional, but it needs + // to be part of the resolve in case we do need it. + let mut spec_pkgs = Vec::from(crates); + spec_pkgs.push("test".to_string()); + let spec = Packages::Packages(spec_pkgs); + let specs = spec.to_package_id_specs(&std_ws)?; + let features = vec!["panic-unwind".to_string(), "backtrace".to_string()]; + // dev_deps setting shouldn't really matter here. + let opts = ResolveOpts::new( + /*dev_deps*/ false, &features, /*all_features*/ false, + /*uses_default_features*/ true, + ); + let resolve = ops::resolve_ws_with_opts(&std_ws, opts, &specs)?; + Ok(resolve) +} + +/// Generate a list of root `Unit`s for the standard library. +/// +/// The given slice of crate names is the root set. +pub fn generate_std_roots<'a>( + bcx: &BuildContext<'a, '_>, + crates: &[String], + std_resolve: &'a Resolve, +) -> CargoResult>> { + // Generate the root Units for the standard library. + let std_ids = crates + .iter() + .map(|crate_name| std_resolve.query(crate_name)) + .collect::>>()?; + // Convert PackageId to Package. + let std_pkgs = bcx.packages.get_many(std_ids)?; + // Generate a list of Units. + std_pkgs + .into_iter() + .map(|pkg| { + let lib = pkg + .targets() + .iter() + .find(|t| t.is_lib()) + .expect("std has a lib"); + let unit_for = UnitFor::new_normal(); + // I don't think we need to bother with Check here, the difference + // in time is minimal, and the difference in caching is + // significant. + let mode = CompileMode::Build; + let profile = bcx.profiles.get_profile( + pkg.package_id(), + /*is_member*/ false, + unit_for, + mode, + bcx.build_config.release, + ); + let features = std_resolve.features_sorted(pkg.package_id()); + Ok(bcx + .units + .intern(pkg, lib, profile, Kind::Target, mode, features)) + }) + .collect::>>() +} + +fn detect_sysroot_src_path(ws: &Workspace<'_>) -> CargoResult { + // NOTE: This is temporary until we figure out how to acquire the source. + // If we decide to keep the sysroot probe, then BuildConfig will need to + // be restructured so that the TargetInfo is created earlier and passed + // in, so we don't have this extra call to rustc. + let rustc = ws.config().load_global_rustc(Some(ws))?; + let output = rustc.process().arg("--print=sysroot").exec_with_output()?; + let s = String::from_utf8(output.stdout) + .map_err(|e| failure::format_err!("rustc didn't return utf8 output: {:?}", e))?; + let sysroot = PathBuf::from(s.trim()); + let src_path = sysroot.join("lib").join("rustlib").join("src").join("rust"); + let lock = src_path.join("Cargo.lock"); + if !lock.exists() { + failure::bail!( + "{:?} does not exist, unable to build with the standard \ + library, try:\n rustup component add rust-src", + lock + ); + } + Ok(src_path) +} diff --git a/src/cargo/core/compiler/unit.rs b/src/cargo/core/compiler/unit.rs index 00c9841cc14..122c077f471 100644 --- a/src/cargo/core/compiler/unit.rs +++ b/src/cargo/core/compiler/unit.rs @@ -48,6 +48,9 @@ pub struct UnitInner<'a> { pub kind: Kind, /// The "mode" this unit is being compiled for. See [`CompileMode`] for more details. pub mode: CompileMode, + /// The `cfg` features to enable for this unit. + /// This must be sorted. + pub features: Vec<&'a str>, } impl UnitInner<'_> { @@ -100,6 +103,7 @@ impl<'a> fmt::Debug for Unit<'a> { .field("profile", &self.profile) .field("kind", &self.kind) .field("mode", &self.mode) + .field("features", &self.features) .finish() } } @@ -139,6 +143,7 @@ impl<'a> UnitInterner<'a> { profile: Profile, kind: Kind, mode: CompileMode, + features: Vec<&'a str>, ) -> Unit<'a> { let inner = self.intern_inner(&UnitInner { pkg, @@ -146,6 +151,7 @@ impl<'a> UnitInterner<'a> { profile, kind, mode, + features, }); Unit { inner } } diff --git a/src/cargo/core/compiler/context/unit_dependencies.rs b/src/cargo/core/compiler/unit_dependencies.rs similarity index 59% rename from src/cargo/core/compiler/context/unit_dependencies.rs rename to src/cargo/core/compiler/unit_dependencies.rs index 5cb41e6385d..2d00b7abd7f 100644 --- a/src/cargo/core/compiler/context/unit_dependencies.rs +++ b/src/cargo/core/compiler/unit_dependencies.rs @@ -16,36 +16,140 @@ //! graph of `Unit`s, which capture these properties. use crate::core::compiler::Unit; -use crate::core::compiler::{BuildContext, CompileMode, Context, Kind}; +use crate::core::compiler::{BuildContext, CompileMode, Kind}; use crate::core::dependency::Kind as DepKind; use crate::core::package::Downloads; -use crate::core::profiles::UnitFor; -use crate::core::{Package, PackageId, Target}; +use crate::core::profiles::{Profile, UnitFor}; +use crate::core::resolver::Resolve; +use crate::core::{InternedString, Package, PackageId, Target}; use crate::CargoResult; use log::trace; use std::collections::{HashMap, HashSet}; -struct State<'a, 'cfg, 'tmp> { - cx: &'tmp mut Context<'a, 'cfg>, +/// The dependency graph of Units. +pub type UnitGraph<'a> = HashMap, Vec>>; + +/// A unit dependency. +#[derive(Debug, Clone, Hash, Eq, PartialEq, PartialOrd, Ord)] +pub struct UnitDep<'a> { + /// The dependency unit. + pub unit: Unit<'a>, + /// The purpose of this dependency (a dependency for a test, or a build + /// script, etc.). + pub unit_for: UnitFor, + /// The name the parent uses to refer to this dependency. + pub extern_crate_name: InternedString, + /// Whether or not this is a public dependency. + pub public: bool, +} + +/// Collection of stuff used while creating the `UnitGraph`. +struct State<'a, 'cfg> { + bcx: &'a BuildContext<'a, 'cfg>, waiting_on_download: HashSet, downloads: Downloads<'a, 'cfg>, + unit_dependencies: UnitGraph<'a>, + package_cache: HashMap, + usr_resolve: &'a Resolve, + std_resolve: Option<&'a Resolve>, + /// This flag is `true` while generating the dependencies for the standard + /// library. + is_std: bool, } pub fn build_unit_dependencies<'a, 'cfg>( - cx: &mut Context<'a, 'cfg>, + bcx: &'a BuildContext<'a, 'cfg>, + resolve: &'a Resolve, + std_resolve: Option<&'a Resolve>, roots: &[Unit<'a>], -) -> CargoResult<()> { - assert!( - cx.unit_dependencies.is_empty(), - "can only build unit deps once" - ); - + std_roots: &[Unit<'a>], +) -> CargoResult> { let mut state = State { - downloads: cx.bcx.packages.enable_download()?, - cx, + bcx, + downloads: bcx.packages.enable_download()?, waiting_on_download: HashSet::new(), + unit_dependencies: HashMap::new(), + package_cache: HashMap::new(), + usr_resolve: resolve, + std_resolve, + is_std: false, }; + let std_unit_deps = calc_deps_of_std(&mut state, std_roots)?; + + deps_of_roots(roots, &mut state)?; + super::links::validate_links(state.resolve(), &state.unit_dependencies)?; + // Hopefully there aren't any links conflicts with the standard library? + + if let Some(std_unit_deps) = std_unit_deps { + attach_std_deps(&mut state, std_roots, std_unit_deps); + } + + connect_run_custom_build_deps(&mut state.unit_dependencies); + + // Dependencies are used in tons of places throughout the backend, many of + // which affect the determinism of the build itself. As a result be sure + // that dependency lists are always sorted to ensure we've always got a + // deterministic output. + for list in state.unit_dependencies.values_mut() { + list.sort(); + } + trace!("ALL UNIT DEPENDENCIES {:#?}", state.unit_dependencies); + + Ok(state.unit_dependencies) +} + +/// Compute all the dependencies for the standard library. +fn calc_deps_of_std<'a, 'cfg>( + mut state: &mut State<'a, 'cfg>, + std_roots: &[Unit<'a>], +) -> CargoResult>> { + if std_roots.is_empty() { + return Ok(None); + } + // Compute dependencies for the standard library. + state.is_std = true; + deps_of_roots(std_roots, &mut state)?; + state.is_std = false; + Ok(Some(std::mem::replace( + &mut state.unit_dependencies, + HashMap::new(), + ))) +} + +/// Add the standard library units to the `unit_dependencies`. +fn attach_std_deps<'a, 'cfg>( + state: &mut State<'a, 'cfg>, + std_roots: &[Unit<'a>], + std_unit_deps: UnitGraph<'a>, +) { + // Attach the standard library as a dependency of every target unit. + for (unit, deps) in state.unit_dependencies.iter_mut() { + if unit.kind == Kind::Target && !unit.mode.is_run_custom_build() { + deps.extend(std_roots.iter().map(|unit| UnitDep { + unit: *unit, + unit_for: UnitFor::new_normal(), + extern_crate_name: unit.pkg.name(), + // TODO: Does this `public` make sense? + public: true, + })); + } + } + // And also include the dependencies of the standard library itself. + for (unit, deps) in std_unit_deps.into_iter() { + if let Some(other_unit) = state.unit_dependencies.insert(unit, deps) { + panic!("std unit collision with existing unit: {:?}", other_unit); + } + } +} + +/// Compute all the dependencies of the given root units. +/// The result is stored in state.unit_dependencies. +fn deps_of_roots<'a, 'cfg>(roots: &[Unit<'a>], mut state: &mut State<'a, 'cfg>) -> CargoResult<()> { + // Loop because we are downloading while building the dependency graph. + // The partially-built unit graph is discarded through each pass of the + // loop because it is incomplete because not all required Packages have + // been downloaded. loop { for unit in roots.iter() { state.get(unit.pkg.package_id())?; @@ -56,7 +160,7 @@ pub fn build_unit_dependencies<'a, 'cfg>( // cleared, and avoid building the lib thrice (once with `panic`, once // without, once for `--test`). In particular, the lib included for // Doc tests and examples are `Build` mode here. - let unit_for = if unit.mode.is_any_test() || state.cx.bcx.build_config.test() { + let unit_for = if unit.mode.is_any_test() || state.bcx.build_config.test() { UnitFor::new_test() } else if unit.target.is_custom_build() { // This normally doesn't happen, except `clean` aggressively @@ -73,32 +177,18 @@ pub fn build_unit_dependencies<'a, 'cfg>( if !state.waiting_on_download.is_empty() { state.finish_some_downloads()?; - state.cx.unit_dependencies.clear(); + state.unit_dependencies.clear(); } else { break; } } - - connect_run_custom_build_deps(&mut state); - - trace!("ALL UNIT DEPENDENCIES {:#?}", state.cx.unit_dependencies); - - record_units_requiring_metadata(state.cx); - - // Dependencies are used in tons of places throughout the backend, many of - // which affect the determinism of the build itself. As a result be sure - // that dependency lists are always sorted to ensure we've always got a - // deterministic output. - for list in state.cx.unit_dependencies.values_mut() { - list.sort(); - } - Ok(()) } -fn deps_of<'a, 'cfg, 'tmp>( +/// Compute the dependencies of a single unit. +fn deps_of<'a, 'cfg>( unit: &Unit<'a>, - state: &mut State<'a, 'cfg, 'tmp>, + state: &mut State<'a, 'cfg>, unit_for: UnitFor, ) -> CargoResult<()> { // Currently the `unit_dependencies` map does not include `unit_for`. This should @@ -107,12 +197,11 @@ fn deps_of<'a, 'cfg, 'tmp>( // `TestDependency`. `CustomBuild` should also be fine since if the // requested unit's settings are the same as `Any`, `CustomBuild` can't // affect anything else in the hierarchy. - if !state.cx.unit_dependencies.contains_key(unit) { + if !state.unit_dependencies.contains_key(unit) { let unit_deps = compute_deps(unit, state, unit_for)?; - let to_insert: Vec<_> = unit_deps.iter().map(|&(unit, _)| unit).collect(); - state.cx.unit_dependencies.insert(*unit, to_insert); - for (unit, unit_for) in unit_deps { - deps_of(&unit, state, unit_for)?; + state.unit_dependencies.insert(*unit, unit_deps.clone()); + for unit_dep in unit_deps { + deps_of(&unit_dep.unit, state, unit_dep.unit_for)?; } } Ok(()) @@ -122,21 +211,21 @@ fn deps_of<'a, 'cfg, 'tmp>( /// for that package. /// This returns a `Vec` of `(Unit, UnitFor)` pairs. The `UnitFor` /// is the profile type that should be used for dependencies of the unit. -fn compute_deps<'a, 'cfg, 'tmp>( +fn compute_deps<'a, 'cfg>( unit: &Unit<'a>, - state: &mut State<'a, 'cfg, 'tmp>, + state: &mut State<'a, 'cfg>, unit_for: UnitFor, -) -> CargoResult, UnitFor)>> { +) -> CargoResult>> { if unit.mode.is_run_custom_build() { - return compute_deps_custom_build(unit, state.cx.bcx); + return compute_deps_custom_build(unit, state); } else if unit.mode.is_doc() { // Note: this does not include doc test. return compute_deps_doc(unit, state); } - let bcx = state.cx.bcx; + let bcx = state.bcx; let id = unit.pkg.package_id(); - let deps = bcx.resolve.deps(id).filter(|&(_id, deps)| { + let deps = state.resolve().deps(id).filter(|&(_id, deps)| { assert!(!deps.is_empty()); deps.iter().any(|dep| { // If this target is a build command, then we only want build @@ -185,13 +274,21 @@ fn compute_deps<'a, 'cfg, 'tmp>( && lib.proc_macro() && unit.kind == Kind::Target { - let unit = new_unit(bcx, pkg, lib, dep_unit_for, Kind::Target, mode); - ret.push((unit, dep_unit_for)); - let unit = new_unit(bcx, pkg, lib, dep_unit_for, Kind::Host, mode); - ret.push((unit, dep_unit_for)); + let unit_dep = new_unit_dep(state, unit, pkg, lib, dep_unit_for, Kind::Target, mode)?; + ret.push(unit_dep); + let unit_dep = new_unit_dep(state, unit, pkg, lib, dep_unit_for, Kind::Host, mode)?; + ret.push(unit_dep); } else { - let unit = new_unit(bcx, pkg, lib, dep_unit_for, unit.kind.for_target(lib), mode); - ret.push((unit, dep_unit_for)); + let unit_dep = new_unit_dep( + state, + unit, + pkg, + lib, + dep_unit_for, + unit.kind.for_target(lib), + mode, + )?; + ret.push(unit_dep); } } @@ -201,7 +298,7 @@ fn compute_deps<'a, 'cfg, 'tmp>( if unit.target.is_custom_build() { return Ok(ret); } - ret.extend(dep_build_script(unit, bcx)); + ret.extend(dep_build_script(unit, state)?); // If this target is a binary, test, example, etc, then it depends on // the library of the same package. The call to `resolve.deps` above @@ -210,7 +307,7 @@ fn compute_deps<'a, 'cfg, 'tmp>( if unit.target.is_lib() && unit.mode != CompileMode::Doctest { return Ok(ret); } - ret.extend(maybe_lib(unit, bcx, unit_for)); + ret.extend(maybe_lib(unit, state, unit_for)?); // If any integration tests/benches are being run, make sure that // binaries are built as well. @@ -228,22 +325,21 @@ fn compute_deps<'a, 'cfg, 'tmp>( t.is_bin() && // Skip binaries with required features that have not been selected. t.required_features().unwrap_or(&no_required_features).iter().all(|f| { - bcx.resolve.features(id).contains(f) + unit.features.contains(&f.as_str()) }) }) .map(|t| { - ( - new_unit( - bcx, - unit.pkg, - t, - UnitFor::new_normal(), - unit.kind.for_target(t), - CompileMode::Build, - ), + new_unit_dep( + state, + unit, + unit.pkg, + t, UnitFor::new_normal(), + unit.kind.for_target(t), + CompileMode::Build, ) - }), + }) + .collect::>>>()?, ); } @@ -256,15 +352,14 @@ fn compute_deps<'a, 'cfg, 'tmp>( /// the returned set of units must all be run before `unit` is run. fn compute_deps_custom_build<'a, 'cfg>( unit: &Unit<'a>, - bcx: &BuildContext<'a, 'cfg>, -) -> CargoResult, UnitFor)>> { + state: &mut State<'a, 'cfg>, +) -> CargoResult>> { if let Some(links) = unit.pkg.manifest().links() { - if bcx.script_override(links, unit.kind).is_some() { + if state.bcx.script_override(links, unit.kind).is_some() { // Overridden build scripts don't have any dependencies. return Ok(Vec::new()); } } - // When not overridden, then the dependencies to run a build script are: // // 1. Compiling the build script itself. @@ -274,28 +369,29 @@ fn compute_deps_custom_build<'a, 'cfg>( // We don't have a great way of handling (2) here right now so this is // deferred until after the graph of all unit dependencies has been // constructed. - let unit = new_unit( - bcx, + let unit_dep = new_unit_dep( + state, + unit, unit.pkg, unit.target, + // All dependencies of this unit should use profiles for custom + // builds. UnitFor::new_build(), // Build scripts always compiled for the host. Kind::Host, CompileMode::Build, - ); - // All dependencies of this unit should use profiles for custom - // builds. - Ok(vec![(unit, UnitFor::new_build())]) + )?; + Ok(vec![unit_dep]) } /// Returns the dependencies necessary to document a package. -fn compute_deps_doc<'a, 'cfg, 'tmp>( +fn compute_deps_doc<'a, 'cfg>( unit: &Unit<'a>, - state: &mut State<'a, 'cfg, 'tmp>, -) -> CargoResult, UnitFor)>> { - let bcx = state.cx.bcx; - let deps = bcx - .resolve + state: &mut State<'a, 'cfg>, +) -> CargoResult>> { + let bcx = state.bcx; + let deps = state + .resolve() .deps(unit.pkg.package_id()) .filter(|&(_id, deps)| { deps.iter().any(|dep| match dep.kind() { @@ -321,42 +417,63 @@ fn compute_deps_doc<'a, 'cfg, 'tmp>( // However, for plugins/proc macros, deps should be built like normal. let mode = check_or_build_mode(unit.mode, lib); let dep_unit_for = UnitFor::new_normal().with_for_host(lib.for_host()); - let lib_unit = new_unit(bcx, dep, lib, dep_unit_for, unit.kind.for_target(lib), mode); - ret.push((lib_unit, dep_unit_for)); + let lib_unit_dep = new_unit_dep( + state, + unit, + dep, + lib, + dep_unit_for, + unit.kind.for_target(lib), + mode, + )?; + ret.push(lib_unit_dep); if let CompileMode::Doc { deps: true } = unit.mode { // Document this lib as well. - let doc_unit = new_unit( - bcx, + let doc_unit_dep = new_unit_dep( + state, + unit, dep, lib, dep_unit_for, unit.kind.for_target(lib), unit.mode, - ); - ret.push((doc_unit, dep_unit_for)); + )?; + ret.push(doc_unit_dep); } } // Be sure to build/run the build script for documented libraries. - ret.extend(dep_build_script(unit, bcx)); + ret.extend(dep_build_script(unit, state)?); // If we document a binary/example, we need the library available. if unit.target.is_bin() || unit.target.is_example() { - ret.extend(maybe_lib(unit, bcx, UnitFor::new_normal())); + ret.extend(maybe_lib(unit, state, UnitFor::new_normal())?); } Ok(ret) } fn maybe_lib<'a>( unit: &Unit<'a>, - bcx: &BuildContext<'a, '_>, + state: &mut State<'a, '_>, unit_for: UnitFor, -) -> Option<(Unit<'a>, UnitFor)> { - unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| { - let mode = check_or_build_mode(unit.mode, t); - let unit = new_unit(bcx, unit.pkg, t, unit_for, unit.kind.for_target(t), mode); - (unit, unit_for) - }) +) -> CargoResult>> { + unit.pkg + .targets() + .iter() + .find(|t| t.linkable()) + .map(|t| { + let mode = check_or_build_mode(unit.mode, t); + new_unit_dep( + state, + unit, + unit.pkg, + t, + unit_for, + unit.kind.for_target(t), + mode, + ) + }) + .transpose() } /// If a build script is scheduled to be run for the package specified by @@ -368,8 +485,8 @@ fn maybe_lib<'a>( /// build script. fn dep_build_script<'a>( unit: &Unit<'a>, - bcx: &BuildContext<'a, '_>, -) -> Option<(Unit<'a>, UnitFor)> { + state: &State<'a, '_>, +) -> CargoResult>> { unit.pkg .targets() .iter() @@ -377,16 +494,22 @@ fn dep_build_script<'a>( .map(|t| { // The profile stored in the Unit is the profile for the thing // the custom build script is running for. - let unit = bcx.units.intern( + let profile = state + .bcx + .profiles + .get_profile_run_custom_build(&unit.profile); + new_unit_dep_with_profile( + state, + unit, unit.pkg, t, - bcx.profiles.get_profile_run_custom_build(&unit.profile), + UnitFor::new_build(), unit.kind, CompileMode::RunCustomBuild, - ); - - (unit, UnitFor::new_build()) + profile, + ) }) + .transpose() } /// Choose the correct mode for dependencies. @@ -407,23 +530,56 @@ fn check_or_build_mode(mode: CompileMode, target: &Target) -> CompileMode { } } -fn new_unit<'a>( - bcx: &BuildContext<'a, '_>, +/// Create a new Unit for a dependency from `parent` to `pkg` and `target`. +fn new_unit_dep<'a>( + state: &State<'a, '_>, + parent: &Unit<'a>, pkg: &'a Package, target: &'a Target, unit_for: UnitFor, kind: Kind, mode: CompileMode, -) -> Unit<'a> { - let profile = bcx.profiles.get_profile( +) -> CargoResult> { + let profile = state.bcx.profiles.get_profile( pkg.package_id(), - bcx.ws.is_member(pkg), + state.bcx.ws.is_member(pkg), unit_for, mode, - bcx.build_config.release, + state.bcx.build_config.release, ); + new_unit_dep_with_profile(state, parent, pkg, target, unit_for, kind, mode, profile) +} - bcx.units.intern(pkg, target, profile, kind, mode) +fn new_unit_dep_with_profile<'a>( + state: &State<'a, '_>, + parent: &Unit<'a>, + pkg: &'a Package, + target: &'a Target, + unit_for: UnitFor, + kind: Kind, + mode: CompileMode, + profile: Profile, +) -> CargoResult> { + // TODO: consider making extern_crate_name return InternedString? + let extern_crate_name = InternedString::new(&state.resolve().extern_crate_name( + parent.pkg.package_id(), + pkg.package_id(), + target, + )?); + let public = state + .resolve() + .is_public_dep(parent.pkg.package_id(), pkg.package_id()); + let features = state.resolve().features_sorted(pkg.package_id()); + let unit = state + .bcx + .units + .intern(pkg, target, profile, kind, mode, features); + Ok(UnitDep { + unit, + unit_for, + extern_crate_name, + public, + }) } /// Fill in missing dependencies for units of the `RunCustomBuild` @@ -436,7 +592,7 @@ fn new_unit<'a>( /// /// Here we take the entire `deps` map and add more dependencies from execution /// of one build script to execution of another build script. -fn connect_run_custom_build_deps(state: &mut State<'_, '_, '_>) { +fn connect_run_custom_build_deps(unit_dependencies: &mut UnitGraph<'_>) { let mut new_deps = Vec::new(); { @@ -445,12 +601,12 @@ fn connect_run_custom_build_deps(state: &mut State<'_, '_, '_>) { // example a library might depend on a build script, so this map will // have the build script as the key and the library would be in the // value's set. - let mut reverse_deps = HashMap::new(); - for (unit, deps) in state.cx.unit_dependencies.iter() { + let mut reverse_deps_map = HashMap::new(); + for (unit, deps) in unit_dependencies.iter() { for dep in deps { - if dep.mode == CompileMode::RunCustomBuild { - reverse_deps - .entry(dep) + if dep.unit.mode == CompileMode::RunCustomBuild { + reverse_deps_map + .entry(dep.unit) .or_insert_with(HashSet::new) .insert(unit); } @@ -466,29 +622,37 @@ fn connect_run_custom_build_deps(state: &mut State<'_, '_, '_>) { // `links`, then we depend on that package's build script! Here we use // `dep_build_script` to manufacture an appropriate build script unit to // depend on. - for unit in state - .cx - .unit_dependencies + for unit in unit_dependencies .keys() .filter(|k| k.mode == CompileMode::RunCustomBuild) { - let reverse_deps = match reverse_deps.get(unit) { + // This is the lib that runs this custom build. + let reverse_deps = match reverse_deps_map.get(unit) { Some(set) => set, None => continue, }; let to_add = reverse_deps .iter() - .flat_map(|reverse_dep| state.cx.unit_dependencies[reverse_dep].iter()) + // Get all deps for lib. + .flat_map(|reverse_dep| unit_dependencies[reverse_dep].iter()) + // Only deps with `links`. .filter(|other| { - other.pkg != unit.pkg - && other.target.linkable() - && other.pkg.manifest().links().is_some() + other.unit.pkg != unit.pkg + && other.unit.target.linkable() + && other.unit.pkg.manifest().links().is_some() + }) + // Get the RunCustomBuild for other lib. + .filter_map(|other| { + unit_dependencies[&other.unit] + .iter() + .find(|other_dep| other_dep.unit.mode == CompileMode::RunCustomBuild) + .cloned() }) - .filter_map(|other| dep_build_script(other, state.cx.bcx).map(|p| p.0)) .collect::>(); if !to_add.is_empty() { + // (RunCustomBuild, set(other RunCustomBuild)) new_deps.push((*unit, to_add)); } } @@ -496,39 +660,28 @@ fn connect_run_custom_build_deps(state: &mut State<'_, '_, '_>) { // And finally, add in all the missing dependencies! for (unit, new_deps) in new_deps { - state - .cx - .unit_dependencies - .get_mut(&unit) - .unwrap() - .extend(new_deps); + unit_dependencies.get_mut(&unit).unwrap().extend(new_deps); } } -/// Records the list of units which are required to emit metadata. -/// -/// Units which depend only on the metadata of others requires the others to -/// actually produce metadata, so we'll record that here. -fn record_units_requiring_metadata(cx: &mut Context<'_, '_>) { - for (key, deps) in cx.unit_dependencies.iter() { - for dep in deps { - if cx.only_requires_rmeta(key, dep) { - cx.rmeta_required.insert(*dep); - } +impl<'a, 'cfg> State<'a, 'cfg> { + fn resolve(&self) -> &'a Resolve { + if self.is_std { + self.std_resolve.unwrap() + } else { + self.usr_resolve } } -} -impl<'a, 'cfg, 'tmp> State<'a, 'cfg, 'tmp> { fn get(&mut self, id: PackageId) -> CargoResult> { - if let Some(pkg) = self.cx.package_cache.get(&id) { + if let Some(pkg) = self.package_cache.get(&id) { return Ok(Some(pkg)); } if !self.waiting_on_download.insert(id) { return Ok(None); } if let Some(pkg) = self.downloads.start(id)? { - self.cx.package_cache.insert(id, pkg); + self.package_cache.insert(id, pkg); self.waiting_on_download.remove(&id); return Ok(Some(pkg)); } @@ -548,7 +701,7 @@ impl<'a, 'cfg, 'tmp> State<'a, 'cfg, 'tmp> { loop { let pkg = self.downloads.wait()?; self.waiting_on_download.remove(&pkg.package_id()); - self.cx.package_cache.insert(pkg.package_id(), pkg); + self.package_cache.insert(pkg.package_id(), pkg); // Arbitrarily choose that 5 or more packages concurrently download // is a good enough number to "fill the network pipe". If we have diff --git a/src/cargo/core/features.rs b/src/cargo/core/features.rs index b910eb72661..f52e3097c10 100644 --- a/src/cargo/core/features.rs +++ b/src/cargo/core/features.rs @@ -334,6 +334,7 @@ pub struct CliUnstable { pub install_upgrade: bool, pub cache_messages: bool, pub binary_dep_depinfo: bool, + pub build_std: Option>, } impl CliUnstable { @@ -380,6 +381,9 @@ impl CliUnstable { "install-upgrade" => self.install_upgrade = true, "cache-messages" => self.cache_messages = true, "binary-dep-depinfo" => self.binary_dep_depinfo = true, + "build-std" => { + self.build_std = Some(crate::core::compiler::standard_lib::parse_unstable_flag(v)) + } _ => failure::bail!("unknown `-Z` flag specified: {}", k), } diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 01a9864cec9..db6c36f5507 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -452,6 +452,18 @@ impl<'cfg> PackageSet<'cfg> { pub fn sources_mut(&self) -> RefMut<'_, SourceMap<'cfg>> { self.sources.borrow_mut() } + + /// Merge the given set into self. + pub fn add_set(&mut self, set: PackageSet<'cfg>) { + assert!(!self.downloading.get()); + assert!(!set.downloading.get()); + for (pkg_id, p_cell) in set.packages { + self.packages.entry(pkg_id).or_insert(p_cell); + } + let mut sources = self.sources.borrow_mut(); + let other_sources = set.sources.into_inner(); + sources.add_source_map(other_sources); + } } // When dynamically linked against libcurl, we want to ignore some failures diff --git a/src/cargo/core/profiles.rs b/src/cargo/core/profiles.rs index c20b5800a80..c7083b48ee7 100644 --- a/src/cargo/core/profiles.rs +++ b/src/cargo/core/profiles.rs @@ -576,7 +576,7 @@ impl fmt::Display for PanicStrategy { /// Flags used in creating `Unit`s to indicate the purpose for the target, and /// to ensure the target's dependencies have the correct settings. -#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] +#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)] pub struct UnitFor { /// A target for `build.rs` or any of its dependencies, or a proc-macro or /// any of its dependencies. This enables `build-override` profiles for diff --git a/src/cargo/core/source/mod.rs b/src/cargo/core/source/mod.rs index 0a07b8dfb6e..c06ff1f5e45 100644 --- a/src/cargo/core/source/mod.rs +++ b/src/cargo/core/source/mod.rs @@ -318,4 +318,11 @@ impl<'src> SourceMap<'src> { ) -> impl Iterator { self.map.iter_mut().map(|(a, b)| (a, &mut **b)) } + + /// Merge the given map into self. + pub fn add_source_map(&mut self, other: SourceMap<'src>) { + for (key, value) in other.map { + self.map.entry(key).or_insert(value); + } + } } diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs index d11d00bbcb9..56c25b040a6 100644 --- a/src/cargo/core/workspace.rs +++ b/src/cargo/core/workspace.rs @@ -138,17 +138,24 @@ impl<'cfg> Workspace<'cfg> { /// root and all member packages. It will then validate the workspace /// before returning it, so `Ok` is only returned for valid workspaces. pub fn new(manifest_path: &Path, config: &'cfg Config) -> CargoResult> { - let target_dir = config.target_dir()?; + let mut ws = Workspace::new_default(manifest_path.to_path_buf(), config); + ws.target_dir = config.target_dir()?; + ws.root_manifest = ws.find_root(manifest_path)?; + ws.find_members()?; + ws.validate()?; + Ok(ws) + } - let mut ws = Workspace { + fn new_default(current_manifest: PathBuf, config: &'cfg Config) -> Workspace<'cfg> { + Workspace { config, - current_manifest: manifest_path.to_path_buf(), + current_manifest, packages: Packages { config, packages: HashMap::new(), }, root_manifest: None, - target_dir, + target_dir: None, members: Vec::new(), member_ids: HashSet::new(), default_members: Vec::new(), @@ -156,10 +163,24 @@ impl<'cfg> Workspace<'cfg> { require_optional_deps: true, loaded_packages: RefCell::new(HashMap::new()), ignore_lock: false, - }; - ws.root_manifest = ws.find_root(manifest_path)?; + } + } + + pub fn new_virtual( + root_path: PathBuf, + current_manifest: PathBuf, + manifest: VirtualManifest, + config: &'cfg Config, + ) -> CargoResult> { + let mut ws = Workspace::new_default(current_manifest, config); + ws.root_manifest = Some(root_path.join("Cargo.toml")); + ws.target_dir = config.target_dir()?; + ws.packages + .packages + .insert(root_path, MaybePackage::Virtual(manifest)); ws.find_members()?; - ws.validate()?; + // TODO: validation does not work because it walks up the directory + // tree looking for the root which is a fake file that doesn't exist. Ok(ws) } @@ -178,37 +199,21 @@ impl<'cfg> Workspace<'cfg> { target_dir: Option, require_optional_deps: bool, ) -> CargoResult> { - let mut ws = Workspace { - config, - current_manifest: package.manifest_path().to_path_buf(), - packages: Packages { - config, - packages: HashMap::new(), - }, - root_manifest: None, - target_dir: None, - members: Vec::new(), - member_ids: HashSet::new(), - default_members: Vec::new(), - is_ephemeral: true, - require_optional_deps, - loaded_packages: RefCell::new(HashMap::new()), - ignore_lock: false, + let mut ws = Workspace::new_default(package.manifest_path().to_path_buf(), config); + ws.is_ephemeral = true; + ws.require_optional_deps = require_optional_deps; + let key = ws.current_manifest.parent().unwrap(); + let id = package.package_id(); + let package = MaybePackage::Package(package); + ws.packages.packages.insert(key.to_path_buf(), package); + ws.target_dir = if let Some(dir) = target_dir { + Some(dir) + } else { + ws.config.target_dir()? }; - { - let key = ws.current_manifest.parent().unwrap(); - let id = package.package_id(); - let package = MaybePackage::Package(package); - ws.packages.packages.insert(key.to_path_buf(), package); - ws.target_dir = if let Some(dir) = target_dir { - Some(dir) - } else { - ws.config.target_dir()? - }; - ws.members.push(ws.current_manifest.clone()); - ws.member_ids.insert(id); - ws.default_members.push(ws.current_manifest.clone()); - } + ws.members.push(ws.current_manifest.clone()); + ws.member_ids.insert(id); + ws.default_members.push(ws.current_manifest.clone()); Ok(ws) } diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index af15ab26525..957ceb2429d 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -2,6 +2,7 @@ use std::collections::HashMap; use std::fs; use std::path::Path; +use crate::core::compiler::unit_dependencies; use crate::core::compiler::UnitInterner; use crate::core::compiler::{BuildConfig, BuildContext, CompileMode, Context, Kind}; use crate::core::profiles::UnitFor; @@ -56,7 +57,6 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { build_config.release = opts.release; let bcx = BuildContext::new( ws, - &resolve, &packages, opts.config, &build_config, @@ -93,14 +93,20 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { opts.release, ) }; - units.push(bcx.units.intern(pkg, target, profile, *kind, *mode)); + let features = resolve.features_sorted(pkg.package_id()); + units.push( + bcx.units + .intern(pkg, target, profile, *kind, *mode, features), + ); } } } } } - let mut cx = Context::new(config, &bcx)?; + let unit_dependencies = + unit_dependencies::build_unit_dependencies(&bcx, &resolve, None, &units, &[])?; + let mut cx = Context::new(config, &bcx, unit_dependencies)?; cx.prepare_units(None, &units)?; for unit in units.iter() { diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index dde087e0406..76e885b01bc 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -7,7 +7,8 @@ //! rough outline is: //! //! - Resolve the dependency graph (see `ops::resolve`). -//! - Download any packages needed (see `PackageSet`). +//! - Download any packages needed (see `PackageSet`). Note that dependency +//! downloads are deferred until `build_unit_dependencies`. //! - Generate a list of top-level "units" of work for the targets the user //! requested on the command-line. Each `Unit` corresponds to a compiler //! invocation. This is done in this module (`generate_targets`). @@ -27,6 +28,8 @@ use std::iter::FromIterator; use std::path::PathBuf; use std::sync::Arc; +use crate::core::compiler::standard_lib; +use crate::core::compiler::unit_dependencies::build_unit_dependencies; use crate::core::compiler::{BuildConfig, BuildContext, Compilation, Context}; use crate::core::compiler::{CompileMode, Kind, Unit}; use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner}; @@ -297,16 +300,38 @@ pub fn compile_ws<'a>( Kind::Host }; + let profiles = ws.profiles(); + let specs = spec.to_package_id_specs(ws)?; let dev_deps = ws.require_optional_deps() || filter.need_dev_deps(build_config.mode); let opts = ResolveOpts::new(dev_deps, features, all_features, !no_default_features); let resolve = ops::resolve_ws_with_opts(ws, opts, &specs)?; - let (packages, resolve_with_overrides) = resolve; + let (mut packages, resolve_with_overrides) = resolve; + + let std_resolve = if let Some(crates) = &config.cli_unstable().build_std { + if build_config.requested_target.is_none() { + // TODO: This should eventually be fixed. Unfortunately it is not + // easy to get the host triple in BuildConfig. Consider changing + // requested_target to an enum, or some other approach. + failure::bail!("-Zbuild-std requires --target"); + } + let (std_package_set, std_resolve) = standard_lib::resolve_std(ws, crates)?; + packages.add_set(std_package_set); + Some(std_resolve) + } else { + None + }; + // Find the packages in the resolver that the user wants to build (those + // passed in with `-p` or the defaults from the workspace), and convert + // Vec to a Vec<&PackageId>. let to_build_ids = specs .iter() .map(|s| s.query(resolve_with_overrides.iter())) .collect::>>()?; + // Now get the `Package` for each `PackageId`. This may trigger a download + // if the user specified `-p` for a dependency that is not downloaded. + // Dependencies will be downloaded during build_unit_dependencies. let mut to_builds = packages.get_many(to_build_ids)?; // The ordering here affects some error messages coming out of cargo, so @@ -315,7 +340,7 @@ pub fn compile_ws<'a>( to_builds.sort_by_key(|p| p.package_id()); for pkg in to_builds.iter() { - pkg.manifest().print_teapot(ws.config()); + pkg.manifest().print_teapot(config); if build_config.mode.is_any_test() && !ws.is_member(pkg) @@ -342,13 +367,11 @@ pub fn compile_ws<'a>( ); } - let profiles = ws.profiles(); profiles.validate_packages(&mut config.shell(), &packages)?; let interner = UnitInterner::new(); let mut bcx = BuildContext::new( ws, - &resolve_with_overrides, &packages, config, build_config, @@ -366,6 +389,21 @@ pub fn compile_ws<'a>( &bcx, )?; + let std_roots = if let Some(crates) = &config.cli_unstable().build_std { + // Only build libtest if it looks like it is needed. + let mut crates = crates.clone(); + if !crates.iter().any(|c| c == "test") + && units + .iter() + .any(|unit| unit.mode.is_rustc_test() && unit.target.harness()) + { + crates.push("test".to_string()); + } + standard_lib::generate_std_roots(&bcx, &crates, std_resolve.as_ref().unwrap())? + } else { + Vec::new() + }; + if let Some(args) = extra_args { if units.len() != 1 { failure::bail!( @@ -385,9 +423,17 @@ pub fn compile_ws<'a>( } } + let unit_dependencies = build_unit_dependencies( + &bcx, + &resolve_with_overrides, + std_resolve.as_ref(), + &units, + &std_roots, + )?; + let ret = { let _p = profile::start("compiling"); - let cx = Context::new(config, &bcx)?; + let cx = Context::new(config, &bcx, unit_dependencies)?; cx.compile(&units, export_dir.clone(), exec)? }; @@ -580,7 +626,7 @@ fn generate_targets<'a>( packages: &[&'a Package], filter: &CompileFilter, default_arch_kind: Kind, - resolve: &Resolve, + resolve: &'a Resolve, bcx: &BuildContext<'a, '_>, ) -> CargoResult>> { // Helper for creating a `Unit` struct. @@ -652,7 +698,9 @@ fn generate_targets<'a>( target_mode, bcx.build_config.release, ); - bcx.units.intern(pkg, target, profile, kind, target_mode) + let features = resolve.features_sorted(pkg.package_id()); + bcx.units + .intern(pkg, target, profile, kind, target_mode, features) }; // Create a list of proposed targets. diff --git a/src/cargo/ops/cargo_test.rs b/src/cargo/ops/cargo_test.rs index 74433005a30..1079df47efa 100644 --- a/src/cargo/ops/cargo_test.rs +++ b/src/cargo/ops/cargo_test.rs @@ -185,7 +185,7 @@ fn run_doc_tests( } for &(ref extern_crate_name, ref lib) in deps.iter() { - let mut arg = OsString::from(extern_crate_name); + let mut arg = OsString::from(extern_crate_name.as_str()); arg.push("="); arg.push(lib); p.arg("--extern").arg(&arg); diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs index dd4da3e6e7c..d66ea182c1c 100644 --- a/src/cargo/util/toml/mod.rs +++ b/src/cargo/util/toml/mod.rs @@ -895,6 +895,17 @@ impl TomlManifest { )); } + if let Some(links) = &project.links { + if !targets.iter().any(|t| t.is_custom_build()) { + bail!( + "package `{}` specifies that it links to `{}` but does not \ + have a custom build script", + pkgid, + links + ) + } + } + let mut deps = Vec::new(); let replace; let patch; diff --git a/tests/testsuite/build_script.rs b/tests/testsuite/build_script.rs index ab3db4ab298..04db56f9054 100644 --- a/tests/testsuite/build_script.rs +++ b/tests/testsuite/build_script.rs @@ -333,7 +333,10 @@ fn links_no_build_cmd() { .with_status(101) .with_stderr( "\ -[ERROR] package `foo v0.5.0 ([CWD])` specifies that it links to `a` but does \ +[ERROR] failed to parse manifest at `[..]/foo/Cargo.toml` + +Caused by: + package `foo v0.5.0 ([CWD])` specifies that it links to `a` but does \ not have a custom build script ", ) @@ -388,6 +391,61 @@ failed to select a version for `a-sys` which could resolve this conflict ").run(); } +#[cargo_test] +fn links_duplicates_old_registry() { + // Test old links validator. See `validate_links`. + Package::new("bar", "0.1.0") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.1.0" + links = "a" + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .publish(); + + let p = project() + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + links = "a" + + [dependencies] + bar = "0.1" + "#, + ) + .file("build.rs", "fn main() {}") + .file("src/lib.rs", "") + .build(); + + p.cargo("build") + .with_status(101) + .with_stderr( + "\ +[UPDATING] `[..]` index +[DOWNLOADING] crates ... +[DOWNLOADED] bar v0.1.0 ([..]) +[ERROR] multiple packages link to native library `a`, \ + but a native library can be linked only once + +package `bar v0.1.0` + ... which is depended on by `foo v0.1.0 ([..]foo)` +links to native library `a` + +package `foo v0.1.0 ([..]foo)` +also links to native library `a` +", + ) + .run(); +} + #[cargo_test] fn links_duplicates_deep_dependency() { // this tests that the links_duplicates are caught at resolver time diff --git a/tests/testsuite/main.rs b/tests/testsuite/main.rs index a3b21245996..60002361b49 100644 --- a/tests/testsuite/main.rs +++ b/tests/testsuite/main.rs @@ -91,6 +91,7 @@ mod rustflags; mod search; mod shell_quoting; mod small_fd_limits; +mod standard_lib; mod test; mod tool_paths; mod update; diff --git a/tests/testsuite/standard_lib.rs b/tests/testsuite/standard_lib.rs new file mode 100644 index 00000000000..3c7b5823397 --- /dev/null +++ b/tests/testsuite/standard_lib.rs @@ -0,0 +1,272 @@ +use crate::support::{is_nightly, paths, project, rustc_host, Execs, Project}; + +fn cargo_build_std(project: &Project, cmd: &str, crates: &str) -> Execs { + let unstable = if crates.is_empty() { + "-Zbuild-std".to_string() + } else { + format!("-Zbuild-std={}", crates) + }; + let target = paths::root().join("target"); + let mut execs = project.cargo(cmd); + if !cmd.contains("--target") { + execs.arg("--target").arg(rustc_host()); + } + execs + .arg(unstable) + .arg("-Zno-index-update") + .env_remove("CARGO_HOME") + .env_remove("HOME") + .env("CARGO_TARGET_DIR", target.as_os_str()) + .masquerade_as_nightly_cargo(); + execs +} + +#[cargo_test] +fn std_lib() { + if !is_nightly() { + // -Zbuild-std is nightly + // -Zno-index-update is nightly + // We don't want these tests to run on rust-lang/rust. + return; + } + simple_lib_std(); + simple_bin_std(); + lib_nostd(); + check_core(); + cross_custom(); + hashbrown(); + libc(); + test(); + target_proc_macro(); + bench(); + doc(); + check_std(); + doctest(); +} + +fn simple_lib_std() { + let p = project().file("src/lib.rs", "").build(); + cargo_build_std(&p, "build -v", "") + .with_stderr_contains("[RUNNING] `rustc [..]--crate-name std [..]") + .run(); + // Check freshness. + p.change_file("src/lib.rs", " "); + cargo_build_std(&p, "build -v", "std") + .with_stderr_contains("[FRESH] std[..]") + .run(); +} + +fn simple_bin_std() { + let p = project().file("src/main.rs", "fn main() {}").build(); + cargo_build_std(&p, "run -v", "std").run(); +} + +fn lib_nostd() { + let p = project() + .file( + "src/lib.rs", + r#" + #![no_std] + pub fn foo() { + assert_eq!(core::u8::MIN, 0); + } + "#, + ) + .build(); + cargo_build_std(&p, "build -v --lib", "core") + .with_stderr_does_not_contain("[..]libstd[..]") + .run(); +} + +fn check_core() { + let p = project() + .file("src/lib.rs", "#![no_std] fn unused_fn() {}") + .build(); + + cargo_build_std(&p, "check -v", "core") + .with_stderr_contains("[WARNING] [..]unused_fn[..]`") + .run(); +} + +fn cross_custom() { + let p = project() + .file("src/lib.rs", "#![no_std] pub fn f() {}") + .file( + "custom-target.json", + r#" + { + "llvm-target": "x86_64-unknown-none-gnu", + "data-layout": "e-m:e-i64:64-f80:128-n8:16:32:64-S128", + "arch": "x86_64", + "target-endian": "little", + "target-pointer-width": "64", + "target-c-int-width": "32", + "os": "none", + "linker-flavor": "ld.lld" + } + "#, + ) + .build(); + + cargo_build_std(&p, "build --target custom-target.json -v", "core").run(); +} + +fn hashbrown() { + let p = project() + .file( + "src/lib.rs", + r#" + pub fn f() -> hashbrown::HashMap { + hashbrown::HashMap::new() + } + "#, + ) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + edition = "2018" + + [dependencies] + hashbrown = "=0.4.0" + "#, + ) + .build(); + + cargo_build_std(&p, "build -v", "std").run(); +} + +fn libc() { + let p = project() + .file( + "src/lib.rs", + r#" + pub fn f() -> ! { + unsafe { libc::exit(123); } + } + "#, + ) + .file( + "Cargo.toml", + r#" + [package] + name = "foo" + version = "0.1.0" + edition = "2018" + + [dependencies] + libc = "=0.2.54" + "#, + ) + .build(); + + cargo_build_std(&p, "build -v", "std").run(); +} + +fn test() { + let p = project() + .file( + "src/lib.rs", + r#" + #[cfg(test)] + mod tests { + #[test] + fn it_works() { + assert_eq!(2 + 2, 4); + } + } + "#, + ) + .build(); + + cargo_build_std(&p, "test -v", "std") + .with_stdout_contains("test tests::it_works ... ok") + .run(); +} + +fn target_proc_macro() { + let p = project() + .file( + "src/lib.rs", + r#" + extern crate proc_macro; + pub fn f() { + let _ts = proc_macro::TokenStream::new(); + } + "#, + ) + .build(); + + cargo_build_std(&p, "build -v", "std,proc_macro").run(); +} + +fn bench() { + let p = project() + .file( + "src/lib.rs", + r#" + #![feature(test)] + extern crate test; + + #[bench] + fn b1(b: &mut test::Bencher) { + b.iter(|| ()) + } + "#, + ) + .build(); + + cargo_build_std(&p, "bench -v", "std").run(); +} + +fn doc() { + let p = project() + .file( + "src/lib.rs", + r#" + /// Doc + pub fn f() -> Result<(), ()> {Ok(())} + "#, + ) + .build(); + + cargo_build_std(&p, "doc -v", "std").run(); +} + +fn check_std() { + let p = project() + .file("src/lib.rs", "pub fn f() {}") + .file("src/main.rs", "fn main() {}") + .file( + "tests/t1.rs", + r#" + #[test] + fn t1() { + assert_eq!(1, 2); + } + "#, + ) + .build(); + + cargo_build_std(&p, "check -v --all-targets", "std").run(); + cargo_build_std(&p, "check -v --all-targets --profile=test", "std").run(); +} + +fn doctest() { + let p = project() + .file( + "src/lib.rs", + r#" + /// Doc + /// ``` + /// assert_eq!(1, 1); + /// ``` + pub fn f() {} + "#, + ) + .build(); + + cargo_build_std(&p, "test --doc -v", "std").run(); +}