diff --git a/src/bin/bench.rs b/src/bin/bench.rs index d910784e6bb..735b2e0d4e6 100644 --- a/src/bin/bench.rs +++ b/src/bin/bench.rs @@ -50,20 +50,31 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); config.shell().set_verbose(options.flag_verbose); + let mut benches = Vec::new(); + if let Some(s) = options.flag_bench { + benches.push(s); + } + let ops = ops::TestOptions { - name: options.flag_bench.as_ref().map(|s| &s[..]), no_run: options.flag_no_run, compile_opts: ops::CompileOptions { - env: "bench", config: config, jobs: options.flag_jobs, target: options.flag_target.as_ref().map(|s| &s[..]), - dev_deps: true, features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: options.flag_package.as_ref().map(|s| &s[..]), - lib_only: false, exec_engine: None, + release: true, + mode: ops::CompileMode::Bench, + filter: if benches.is_empty() { + ops::CompileFilter::Everything + } else { + ops::CompileFilter::Only { + lib: false, bins: &[], examples: &[], tests: &[], + benches: &benches, + } + }, }, }; diff --git a/src/bin/build.rs b/src/bin/build.rs index 3408f40774d..4ba7de2921a 100644 --- a/src/bin/build.rs +++ b/src/bin/build.rs @@ -47,28 +47,29 @@ the --release flag will use the `release` profile instead. "; pub fn execute(options: Options, config: &Config) -> CliResult> { - debug!("executing; cmd=cargo-build; args={:?}", env::args().collect::>()); + debug!("executing; cmd=cargo-build; args={:?}", + env::args().collect::>()); config.shell().set_verbose(options.flag_verbose); let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - let env = if options.flag_release { - "release" - } else { - "compile" - }; - let opts = CompileOptions { - env: env, config: config, jobs: options.flag_jobs, target: options.flag_target.as_ref().map(|t| &t[..]), - dev_deps: false, features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: options.flag_package.as_ref().map(|s| &s[..]), - lib_only: options.flag_lib, exec_engine: None, + mode: ops::CompileMode::Build, + release: options.flag_release, + filter: if options.flag_lib { + ops::CompileFilter::Only { + lib: true, bins: &[], examples: &[], benches: &[], tests: &[] + } + } else { + ops::CompileFilter::Everything + }, }; ops::compile(&root, &opts).map(|_| None).map_err(|err| { diff --git a/src/bin/cargo.rs b/src/bin/cargo.rs index 0ad3a53c2d8..7c04323cdfe 100644 --- a/src/bin/cargo.rs +++ b/src/bin/cargo.rs @@ -105,7 +105,7 @@ fn execute(flags: Flags, config: &Config) -> CliResult> { // For the commands `cargo` and `cargo help`, re-execute ourselves as // `cargo -h` so we can go through the normal process of printing the // help message. - "" | "help" if flags.arg_args.len() == 0 => { + "" | "help" if flags.arg_args.is_empty() => { config.shell().set_verbose(true); let args = &["cargo".to_string(), "-h".to_string()]; let r = cargo::call_main_without_stdin(execute, config, USAGE, args, diff --git a/src/bin/doc.rs b/src/bin/doc.rs index dbb1b5ab4f9..6524ff52dbc 100644 --- a/src/bin/doc.rs +++ b/src/bin/doc.rs @@ -46,19 +46,20 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); let mut doc_opts = ops::DocOptions { - all: !options.flag_no_deps, open_result: options.flag_open, compile_opts: ops::CompileOptions { - env: if options.flag_no_deps {"doc"} else {"doc-all"}, config: config, jobs: options.flag_jobs, target: None, - dev_deps: false, features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: options.flag_package.as_ref().map(|s| &s[..]), - lib_only: false, exec_engine: None, + filter: ops::CompileFilter::Everything, + release: false, + mode: ops::CompileMode::Doc { + deps: !options.flag_no_deps, + }, }, }; diff --git a/src/bin/run.rs b/src/bin/run.rs index f786832a2b1..9ff8728fa3a 100644 --- a/src/bin/run.rs +++ b/src/bin/run.rs @@ -1,6 +1,5 @@ use cargo::ops; -use cargo::core::manifest::TargetKind; -use cargo::util::{CliResult, CliError, human, Config}; +use cargo::util::{CliResult, CliError, Config}; use cargo::util::important_paths::{find_root_manifest_for_cwd}; #[derive(RustcDecodable)] @@ -47,36 +46,35 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { config.shell().set_verbose(options.flag_verbose); let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - let env = match (options.flag_release, options.flag_example.is_some()) { - (true, _) => "release", - (false, true) => "test", - (false, false) => "compile" - }; + let (mut examples, mut bins) = (Vec::new(), Vec::new()); + if let Some(s) = options.flag_bin { + bins.push(s); + } + if let Some(s) = options.flag_example { + examples.push(s); + } let compile_opts = ops::CompileOptions { - env: env, config: config, jobs: options.flag_jobs, target: options.flag_target.as_ref().map(|t| &t[..]), - dev_deps: true, features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: None, - lib_only: false, exec_engine: None, - }; - - let (target_kind, name) = match (options.flag_bin, options.flag_example) { - (Some(bin), None) => (TargetKind::Bin, Some(bin)), - (None, Some(example)) => (TargetKind::Example, Some(example)), - (None, None) => (TargetKind::Bin, None), - (Some(_), Some(_)) => return Err(CliError::from_boxed( - human("specify either `--bin` or `--example`, not both"), 1)), + release: options.flag_release, + mode: ops::CompileMode::Build, + filter: if examples.is_empty() && bins.is_empty() { + ops::CompileFilter::Everything + } else { + ops::CompileFilter::Only { + lib: false, tests: &[], benches: &[], + bins: &bins, examples: &examples, + } + }, }; let err = try!(ops::run(&root, - target_kind, - name, &compile_opts, &options.arg_args).map_err(|err| { CliError::from_boxed(err, 101) diff --git a/src/bin/test.rs b/src/bin/test.rs index 6c7e18e63e9..1b09e6d2ce0 100644 --- a/src/bin/test.rs +++ b/src/bin/test.rs @@ -9,6 +9,7 @@ struct Options { flag_jobs: Option, flag_manifest_path: Option, flag_test: Option, + flag_bin: Option, flag_no_default_features: bool, flag_no_run: bool, flag_package: Option, @@ -24,7 +25,8 @@ Usage: Options: -h, --help Print this message - --test NAME Name of the test executable to run + --test NAME Name of the integration test to run + --bin NAME Name of the binary to run tests for --no-run Compile, but don't run tests -p SPEC, --package SPEC Package to run tests for -j N, --jobs N The number of jobs to run in parallel @@ -52,20 +54,34 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); config.shell().set_verbose(options.flag_verbose); + let (mut tests, mut bins) = (Vec::new(), Vec::new()); + if let Some(s) = options.flag_test { + tests.push(s); + } + if let Some(s) = options.flag_bin { + bins.push(s); + } + let ops = ops::TestOptions { - name: options.flag_test.as_ref().map(|s| &s[..]), no_run: options.flag_no_run, compile_opts: ops::CompileOptions { - env: "test", config: config, jobs: options.flag_jobs, target: options.flag_target.as_ref().map(|s| &s[..]), - dev_deps: true, features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: options.flag_package.as_ref().map(|s| &s[..]), - lib_only: false, exec_engine: None, + release: false, + mode: ops::CompileMode::Test, + filter: if tests.is_empty() && bins.is_empty() { + ops::CompileFilter::Everything + } else { + ops::CompileFilter::Only { + lib: false, examples: &[], benches: &[], + tests: &tests, bins: &bins, + } + } }, }; diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index 2ae63dcb3f1..f572e629645 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -1,4 +1,4 @@ -use std::hash; +use std::default::Default; use std::path::{PathBuf, Path}; use semver::Version; @@ -10,7 +10,7 @@ use core::dependency::SerializedDependency; use util::{CargoResult, human}; /// Contains all the informations about a package, as loaded from a Cargo.toml. -#[derive(PartialEq,Clone, Debug)] +#[derive(Clone, Debug)] pub struct Manifest { summary: Summary, targets: Vec, @@ -21,6 +21,7 @@ pub struct Manifest { exclude: Vec, include: Vec, metadata: ManifestMetadata, + profiles: Profiles, } /// General metadata about a package which is just blindly uploaded to the @@ -104,196 +105,31 @@ impl LibKind { pub enum TargetKind { Lib(Vec), Bin, + Test, + Bench, Example, + CustomBuild, } -#[derive(RustcEncodable, RustcDecodable, Clone, PartialEq, Eq, Debug)] +#[derive(RustcEncodable, RustcDecodable, Clone, PartialEq, Eq, Debug, Hash)] pub struct Profile { - env: String, // compile, test, dev, bench, etc. - opt_level: u32, - lto: bool, - codegen_units: Option, // None = use rustc default - debug: bool, - rpath: bool, - test: bool, - doctest: bool, - doc: bool, - dest: String, - for_host: bool, - harness: bool, // whether to use the test harness (--test) - custom_build: bool, + pub opt_level: u32, + pub lto: bool, + pub codegen_units: Option, // None = use rustc default + pub debuginfo: bool, + pub ndebug: bool, + pub rpath: bool, + pub test: bool, + pub doc: bool, } -impl Profile { - fn default() -> Profile { - Profile { - env: String::new(), - opt_level: 0, - lto: false, - codegen_units: None, - debug: false, - rpath: false, - test: false, - doc: false, - dest: "debug".to_string(), - for_host: false, - doctest: false, - custom_build: false, - harness: true, - } - } - - pub fn default_dev() -> Profile { - Profile { - env: "compile".to_string(), // run in the default environment only - opt_level: 0, - debug: true, - .. Profile::default() - } - } - - pub fn default_test() -> Profile { - Profile { - env: "test".to_string(), - debug: true, - test: true, - .. Profile::default() - } - } - - pub fn default_example() -> Profile { - Profile { - test: false, - .. Profile::default_test() - } - } - - pub fn default_bench() -> Profile { - Profile { - env: "bench".to_string(), - test: true, - .. Profile::default_release() - } - } - - pub fn default_release() -> Profile { - Profile { - env: "release".to_string(), - opt_level: 3, - dest: "release".to_string(), - .. Profile::default() - } - } - - pub fn default_doc() -> Profile { - Profile { - env: "doc".to_string(), - doc: true, - .. Profile::default() - } - } - - pub fn codegen_units(&self) -> Option { self.codegen_units } - pub fn debug(&self) -> bool { self.debug } - pub fn env(&self) -> &str { &self.env } - pub fn is_compile(&self) -> bool { self.env == "compile" } - pub fn is_custom_build(&self) -> bool { self.custom_build } - pub fn is_doc(&self) -> bool { self.doc } - pub fn is_doctest(&self) -> bool { self.doctest } - pub fn is_for_host(&self) -> bool { self.for_host } - pub fn is_test(&self) -> bool { self.test } - pub fn lto(&self) -> bool { self.lto } - pub fn opt_level(&self) -> u32 { self.opt_level } - pub fn rpath(&self) -> bool { self.rpath } - pub fn uses_test_harness(&self) -> bool { self.harness } - pub fn dest(&self) -> &str { &self.dest } - - pub fn set_opt_level(mut self, level: u32) -> Profile { - self.opt_level = level; - self - } - - pub fn set_lto(mut self, lto: bool) -> Profile { - self.lto = lto; - self - } - - pub fn set_codegen_units(mut self, units: Option) -> Profile { - self.codegen_units = units; - self - } - - pub fn set_debug(mut self, debug: bool) -> Profile { - self.debug = debug; - self - } - - pub fn set_rpath(mut self, rpath: bool) -> Profile { - self.rpath = rpath; - self - } - - pub fn set_test(mut self, test: bool) -> Profile { - self.test = test; - self - } - - pub fn set_doctest(mut self, doctest: bool) -> Profile { - self.doctest = doctest; - self - } - - pub fn set_doc(mut self, doc: bool) -> Profile { - self.doc = doc; - self - } - - /// Sets whether the `Target` must be compiled for the host instead of the - /// target platform. - pub fn set_for_host(mut self, for_host: bool) -> Profile { - self.for_host = for_host; - self - } - - pub fn set_harness(mut self, harness: bool) -> Profile { - self.harness = harness; - self - } - - /// Sets whether the `Target` is a custom build script. - pub fn set_custom_build(mut self, custom_build: bool) -> Profile { - self.custom_build = custom_build; - self - } -} - -impl hash::Hash for Profile { - fn hash(&self, into: &mut H) { - // Be sure to match all fields explicitly, but ignore those not relevant - // to the actual hash of a profile. - let Profile { - opt_level, - lto, - codegen_units, - debug, - rpath, - for_host, - ref dest, - harness, - - // test flags are separated by file, not by profile hash, and - // env/doc also don't matter for the actual contents of the output - // file, just where the output file is located. - doc: _, - env: _, - test: _, - doctest: _, - - custom_build: _, - } = *self; - (opt_level, lto, codegen_units, debug, - rpath, for_host, dest, harness).hash(into) - } +#[derive(Default, Clone, Debug)] +pub struct Profiles { + pub release: Profile, + pub dev: Profile, + pub test: Profile, + pub bench: Profile, + pub doc: Profile, } /// Informations about a binary, a library, an example, etc. that is part of the @@ -303,8 +139,13 @@ pub struct Target { kind: TargetKind, name: String, src_path: PathBuf, - profile: Profile, metadata: Option, + tested: bool, + benched: bool, + doc: bool, + doctest: bool, + harness: bool, // whether to use the test harness (--test) + for_host: bool, } #[derive(RustcEncodable)] @@ -312,7 +153,6 @@ pub struct SerializedTarget { kind: Vec<&'static str>, name: String, src_path: String, - profile: Profile, metadata: Option } @@ -324,13 +164,15 @@ impl Encodable for Target { } TargetKind::Bin => vec!("bin"), TargetKind::Example => vec!["example"], + TargetKind::Test => vec!["test"], + TargetKind::CustomBuild => vec!["custom-build"], + TargetKind::Bench => vec!["bench"], }; SerializedTarget { kind: kind, name: self.name.clone(), src_path: self.src_path.display().to_string(), - profile: self.profile.clone(), metadata: self.metadata.clone() }.encode(s) } @@ -342,7 +184,8 @@ impl Manifest { exclude: Vec, include: Vec, links: Option, - metadata: ManifestMetadata) -> Manifest { + metadata: ManifestMetadata, + profiles: Profiles) -> Manifest { Manifest { summary: summary, targets: targets, @@ -353,6 +196,7 @@ impl Manifest { include: include, links: links, metadata: metadata, + profiles: profiles, } } @@ -368,6 +212,7 @@ impl Manifest { pub fn targets(&self) -> &[Target] { &self.targets } pub fn version(&self) -> &Version { self.package_id().version() } pub fn warnings(&self) -> &[String] { &self.warnings } + pub fn profiles(&self) -> &Profiles { &self.profiles } pub fn links(&self) -> Option<&str> { self.links.as_ref().map(|s| s.as_slice()) } @@ -386,85 +231,106 @@ impl Manifest { } impl Target { - pub fn file_stem(&self) -> String { - match self.metadata { - Some(ref metadata) => format!("{}{}", self.name, - metadata.extra_filename), - None => self.name.clone() + fn blank() -> Target { + Target { + kind: TargetKind::Bin, + name: String::new(), + src_path: PathBuf::new(""), + metadata: None, + doc: false, + doctest: false, + harness: true, + for_host: false, + tested: true, + benched: true, } } pub fn lib_target(name: &str, crate_targets: Vec, - src_path: &Path, profile: &Profile, + src_path: &Path, metadata: Metadata) -> Target { Target { kind: TargetKind::Lib(crate_targets), name: name.to_string(), src_path: src_path.to_path_buf(), - profile: profile.clone(), - metadata: Some(metadata) + metadata: Some(metadata), + doctest: true, + doc: true, + ..Target::blank() } } - pub fn bin_target(name: &str, src_path: &Path, profile: &Profile, + pub fn bin_target(name: &str, src_path: &Path, metadata: Option) -> Target { Target { kind: TargetKind::Bin, name: name.to_string(), src_path: src_path.to_path_buf(), - profile: profile.clone(), metadata: metadata, + doc: true, + ..Target::blank() } } /// Builds a `Target` corresponding to the `build = "build.rs"` entry. - pub fn custom_build_target(name: &str, src_path: &Path, profile: &Profile, + pub fn custom_build_target(name: &str, src_path: &Path, metadata: Option) -> Target { Target { - kind: TargetKind::Bin, + kind: TargetKind::CustomBuild, name: name.to_string(), src_path: src_path.to_path_buf(), - profile: profile.clone(), metadata: metadata, + for_host: true, + benched: false, + tested: false, + ..Target::blank() } } - pub fn example_target(name: &str, src_path: &Path, profile: &Profile) -> Target { + pub fn example_target(name: &str, src_path: &Path) -> Target { Target { kind: TargetKind::Example, name: name.to_string(), src_path: src_path.to_path_buf(), - profile: profile.clone(), - metadata: None, + benched: false, + ..Target::blank() } } pub fn test_target(name: &str, src_path: &Path, - profile: &Profile, metadata: Metadata) -> Target { + metadata: Metadata) -> Target { Target { - kind: TargetKind::Bin, + kind: TargetKind::Test, name: name.to_string(), src_path: src_path.to_path_buf(), - profile: profile.clone(), metadata: Some(metadata), + benched: false, + ..Target::blank() } } pub fn bench_target(name: &str, src_path: &Path, - profile: &Profile, metadata: Metadata) -> Target { + metadata: Metadata) -> Target { Target { - kind: TargetKind::Bin, + kind: TargetKind::Bench, name: name.to_string(), src_path: src_path.to_path_buf(), - profile: profile.clone(), metadata: Some(metadata), + tested: false, + ..Target::blank() } } pub fn name(&self) -> &str { &self.name } pub fn src_path(&self) -> &Path { &self.src_path } - pub fn profile(&self) -> &Profile { &self.profile } pub fn metadata(&self) -> Option<&Metadata> { self.metadata.as_ref() } + pub fn kind(&self) -> &TargetKind { &self.kind } + pub fn tested(&self) -> bool { self.tested } + pub fn harness(&self) -> bool { self.harness } + pub fn documented(&self) -> bool { self.doc } + pub fn doctested(&self) -> bool { self.doctest } + pub fn for_host(&self) -> bool { self.for_host } + pub fn benched(&self) -> bool { self.benched } pub fn is_lib(&self) -> bool { match self.kind { @@ -473,52 +339,123 @@ impl Target { } } - pub fn is_dylib(&self) -> bool { + pub fn linkable(&self) -> bool { match self.kind { - TargetKind::Lib(ref kinds) => kinds.iter().any(|&k| k == LibKind::Dylib), + TargetKind::Lib(ref kinds) => { + kinds.iter().any(|k| { + match *k { + LibKind::Lib | LibKind::Rlib | LibKind::Dylib => true, + LibKind::StaticLib => false, + } + }) + } _ => false } } - pub fn is_rlib(&self) -> bool { + pub fn is_bin(&self) -> bool { self.kind == TargetKind::Bin } + pub fn is_example(&self) -> bool { self.kind == TargetKind::Example } + pub fn is_test(&self) -> bool { self.kind == TargetKind::Test } + pub fn is_bench(&self) -> bool { self.kind == TargetKind::Bench } + pub fn is_custom_build(&self) -> bool { self.kind == TargetKind::CustomBuild } + + /// Returns the arguments suitable for `--crate-type` to pass to rustc. + pub fn rustc_crate_types(&self) -> Vec<&'static str> { match self.kind { - TargetKind::Lib(ref kinds) => - kinds.iter().any(|&k| k == LibKind::Rlib || k == LibKind::Lib), - _ => false + TargetKind::Lib(ref kinds) => { + kinds.iter().map(|kind| kind.crate_type()).collect() + }, + TargetKind::CustomBuild | + TargetKind::Bench | + TargetKind::Test | + TargetKind::Example | + TargetKind::Bin => vec!("bin"), } } - pub fn is_staticlib(&self) -> bool { + pub fn can_lto(&self) -> bool { match self.kind { - TargetKind::Lib(ref kinds) => kinds.iter().any(|&k| k == LibKind::StaticLib), - _ => false + TargetKind::Lib(ref v) => *v == [LibKind::StaticLib], + _ => true, } } - /// Returns true for binary, bench, and tests. - pub fn is_bin(&self) -> bool { - match self.kind { - TargetKind::Bin => true, - _ => false + pub fn set_tested(&mut self, tested: bool) -> &mut Target { + self.tested = tested; + self + } + pub fn set_benched(&mut self, benched: bool) -> &mut Target { + self.benched = benched; + self + } + pub fn set_doctest(&mut self, doctest: bool) -> &mut Target { + self.doctest = doctest; + self + } + pub fn set_for_host(&mut self, for_host: bool) -> &mut Target { + self.for_host = for_host; + self + } + pub fn set_harness(&mut self, harness: bool) -> &mut Target { + self.harness = harness; + self + } + pub fn set_doc(&mut self, doc: bool) -> &mut Target { + self.doc = doc; + self + } +} + +impl Profile { + pub fn default_dev() -> Profile { + Profile { + debuginfo: true, + ..Profile::default() } } - /// Returns true for exampels - pub fn is_example(&self) -> bool { - match self.kind { - TargetKind::Example => true, - _ => false + pub fn default_release() -> Profile { + Profile { + opt_level: 3, + debuginfo: false, + ndebug: true, + ..Profile::default() } } - /// Returns the arguments suitable for `--crate-type` to pass to rustc. - pub fn rustc_crate_types(&self) -> Vec<&'static str> { - match self.kind { - TargetKind::Lib(ref kinds) => { - kinds.iter().map(|kind| kind.crate_type()).collect() - }, - TargetKind::Example | - TargetKind::Bin => vec!("bin"), + pub fn default_test() -> Profile { + Profile { + test: true, + ..Profile::default_dev() + } + } + + pub fn default_bench() -> Profile { + Profile { + test: true, + ..Profile::default_release() + } + } + + pub fn default_doc() -> Profile { + Profile { + doc: true, + ..Profile::default_dev() + } + } +} + +impl Default for Profile { + fn default() -> Profile { + Profile { + opt_level: 0, + lto: false, + codegen_units: None, + debuginfo: false, + ndebug: false, + rpath: false, + test: false, + doc: false, } } } diff --git a/src/cargo/core/mod.rs b/src/cargo/core/mod.rs index d345e3d67c5..cfd3aebdfd0 100644 --- a/src/cargo/core/mod.rs +++ b/src/cargo/core/mod.rs @@ -1,7 +1,7 @@ pub use self::dependency::Dependency; -pub use self::manifest::{Manifest, Target, TargetKind, Profile}; +pub use self::manifest::{Manifest, Target, TargetKind, Profile, LibKind, Profiles}; pub use self::package::{Package, PackageSet}; -pub use self::package_id::PackageId; +pub use self::package_id::{PackageId, Metadata}; pub use self::package_id_spec::PackageIdSpec; pub use self::registry::Registry; pub use self::resolver::Resolve; diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 72613af2fbc..e7ed17fa6d9 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -85,7 +85,7 @@ impl Package { } pub fn has_custom_build(&self) -> bool { - self.targets().iter().any(|t| t.profile().is_custom_build()) + self.targets().iter().any(|t| t.is_custom_build()) } } diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index b9834526a7d..06fac5cc0b1 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -3,11 +3,11 @@ use std::fs; use std::io::prelude::*; use std::path::Path; -use core::PackageSet; +use core::{PackageSet, Profiles, Profile}; use core::source::{Source, SourceMap}; use sources::PathSource; use util::{CargoResult, human, ChainError, Config}; -use ops::{self, Layout, Context}; +use ops::{self, Layout, Context, BuildConfig}; pub struct CleanOptions<'a, 'b: 'a> { pub spec: Option<&'a str>, @@ -50,17 +50,23 @@ pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> { // filenames and such let srcs = SourceMap::new(); let pkgs = PackageSet::new(&[]); - let cx = try!(Context::new("compile", &resolve, &srcs, &pkgs, opts.config, + let profiles = Profiles::default(); + let cx = try!(Context::new(&resolve, &srcs, &pkgs, opts.config, Layout::at(root.absolute_target_dir()), - None, &pkg, Default::default())); + None, &pkg, BuildConfig::default(), + &profiles)); // And finally, clean everything out! for target in pkg.targets().iter() { - let layout = Layout::new(&root, opts.target, target.profile().dest()); + // TODO: `cargo clean --release` + let layout = Layout::new(&root, opts.target, "debug"); try!(rm_rf(&layout.fingerprint(&pkg))); - for filename in try!(cx.target_filenames(target)).iter() { - try!(rm_rf(&layout.dest().join(&filename))); - try!(rm_rf(&layout.deps().join(&filename))); + let profiles = [Profile::default_dev(), Profile::default_test()]; + for profile in profiles.iter() { + for filename in try!(cx.target_filenames(target, profile)).iter() { + try!(rm_rf(&layout.dest().join(&filename))); + try!(rm_rf(&layout.deps().join(&filename))); + } } } diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 63de500f16c..e2aec36b4b0 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -30,6 +30,7 @@ use std::sync::Arc; use core::registry::PackageRegistry; use core::{Source, SourceId, PackageSet, Package, Target, PackageId}; +use core::{Profile, TargetKind}; use core::resolver::Method; use ops::{self, BuildOutput, ExecEngine}; use sources::{PathSource}; @@ -38,19 +39,45 @@ use util::{CargoResult, internal, human, ChainError, profile}; /// Contains informations about how a package should be compiled. pub struct CompileOptions<'a, 'b: 'a> { - pub env: &'a str, pub config: &'a Config<'b>, /// Number of concurrent jobs to use. pub jobs: Option, /// The target platform to compile for (example: `i686-unknown-linux-gnu`). pub target: Option<&'a str>, - /// True if dev-dependencies must be compiled. - pub dev_deps: bool, + /// Extra features to build for the root package pub features: &'a [String], + /// Flag if the default feature should be built for the root package pub no_default_features: bool, + /// Root package to build (if None it's the current one) pub spec: Option<&'a str>, - pub lib_only: bool, + /// Filter to apply to the root package to select which targets will be + /// built. + pub filter: CompileFilter<'a>, + /// Engine which drives compilation pub exec_engine: Option>>, + /// Whether this is a release build or not + pub release: bool, + /// Mode for this compile. + pub mode: CompileMode, +} + +#[derive(Copy, PartialEq)] +pub enum CompileMode { + Test, + Build, + Bench, + Doc { deps: bool }, +} + +pub enum CompileFilter<'a> { + Everything, + Only { + lib: bool, + bins: &'a [String], + examples: &'a [String], + tests: &'a [String], + benches: &'a [String], + } } pub fn compile(manifest_path: &Path, @@ -74,9 +101,9 @@ pub fn compile(manifest_path: &Path, pub fn compile_pkg(package: &Package, options: &CompileOptions) -> CargoResult { - let CompileOptions { env, config, jobs, target, spec, - dev_deps, features, no_default_features, - lib_only, ref exec_engine } = *options; + let CompileOptions { config, jobs, target, spec, features, + no_default_features, release, mode, + ref filter, ref exec_engine } = *options; let target = target.map(|s| s.to_string()); let features = features.iter().flat_map(|s| { @@ -108,10 +135,10 @@ pub fn compile_pkg(package: &Package, options: &CompileOptions) try!(registry.add_overrides(override_ids)); - let platform = target.as_ref().map(|e| e.as_slice()).or(Some(rustc_host.as_slice())); + let platform = target.as_ref().map(|e| &e[..]).or(Some(&rustc_host[..])); let method = Method::Required{ - dev_deps: dev_deps, + dev_deps: true, // TODO: remove this option? features: &features, uses_default_features: !no_default_features, target_platform: platform}; @@ -135,31 +162,138 @@ pub fn compile_pkg(package: &Package, options: &CompileOptions) None => package.package_id(), }; let to_build = packages.iter().find(|p| p.package_id() == pkgid).unwrap(); - let targets = to_build.targets().iter().filter(|target| { - target.profile().is_custom_build() || match env { - // doc-all == document everything, so look for doc targets - "doc" | "doc-all" => target.profile().env() == "doc", - env => target.profile().env() == env, - } - }).filter(|target| !lib_only || target.is_lib()).collect::>(); - - if lib_only && targets.len() == 0 { - return Err(human("There is no lib to build, remove `--lib` flag".to_string())); - } + let targets = try!(generate_targets(to_build, mode, filter, release)); let ret = { let _p = profile::start("compiling"); - let lib_overrides = try!(scrape_build_config(config, jobs, target)); + let mut build_config = try!(scrape_build_config(config, jobs, target)); + build_config.exec_engine = exec_engine.clone(); + build_config.release = release; + if let CompileMode::Doc { deps } = mode { + build_config.doc_all = deps; + } - try!(ops::compile_targets(&env, &targets, to_build, + try!(ops::compile_targets(&targets, to_build, &PackageSet::new(&packages), &resolve_with_overrides, &sources, - config, lib_overrides, exec_engine.clone())) + config, + build_config, + to_build.manifest().profiles())) }; return Ok(ret); } +impl<'a> CompileFilter<'a> { + pub fn matches(&self, target: &Target) -> bool { + match *self { + CompileFilter::Everything => true, + CompileFilter::Only { lib, bins, examples, tests, benches } => { + let list = match *target.kind() { + TargetKind::Bin => bins, + TargetKind::Test => tests, + TargetKind::Bench => benches, + TargetKind::Example => examples, + TargetKind::Lib(..) => return lib, + TargetKind::CustomBuild => return false, + }; + list.iter().any(|x| *x == target.name()) + } + } + } +} + +/// Given the configuration for a build, this function will generate all +/// target/profile combinations needed to be built. +fn generate_targets<'a>(pkg: &'a Package, + mode: CompileMode, + filter: &CompileFilter, + release: bool) + -> CargoResult> { + let profiles = pkg.manifest().profiles(); + let build = if release {&profiles.release} else {&profiles.dev}; + let test = if release {&profiles.bench} else {&profiles.test}; + let profile = match mode { + CompileMode::Test => test, + CompileMode::Bench => &profiles.bench, + CompileMode::Build => build, + CompileMode::Doc { .. } => &profiles.doc, + }; + return match *filter { + CompileFilter::Everything => { + match mode { + CompileMode::Bench => { + Ok(pkg.targets().iter().filter(|t| t.benched()).map(|t| { + (t, profile) + }).collect::>()) + } + CompileMode::Test => { + let mut base = pkg.targets().iter().filter(|t| { + t.tested() + }).map(|t| { + (t, if t.is_example() {build} else {profile}) + }).collect::>(); + + // Always compile the library if we're testing everything as + // it'll be needed for doctests + if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) { + if t.doctested() { + base.push((t, build)); + } + } + Ok(base) + } + CompileMode::Build => { + Ok(pkg.targets().iter().filter(|t| { + t.is_bin() || t.is_lib() + }).map(|t| (t, profile)).collect()) + } + CompileMode::Doc { .. } => { + Ok(pkg.targets().iter().filter(|t| t.documented()) + .map(|t| (t, profile)).collect()) + } + } + } + CompileFilter::Only { lib, bins, examples, tests, benches } => { + let mut targets = Vec::new(); + + if lib { + if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) { + targets.push((t, profile)); + } else { + return Err(human(format!("no library targets found"))) + } + } + + { + let mut find = |names: &[String], desc, kind, profile| { + for name in names { + let target = pkg.targets().iter().find(|t| { + t.name() == *name && *t.kind() == kind + }); + let t = match target { + Some(t) => t, + None => return Err(human(format!("no {} target \ + named `{}`", + desc, name))), + }; + debug!("found {} `{}`", desc, name); + targets.push((t, profile)); + } + Ok(()) + }; + try!(find(bins, "bin", TargetKind::Bin, profile)); + try!(find(examples, "example", TargetKind::Example, build)); + try!(find(tests, "test", TargetKind::Test, test)); + try!(find(benches, "bench", TargetKind::Bench, &profiles.bench)); + } + Ok(targets) + } + }; +} + +/// Read the `paths` configuration variable to discover all path overrides that +/// have been configured. fn source_ids_from_config(config: &Config, cur_path: &Path) -> CargoResult> { @@ -185,9 +319,17 @@ fn source_ids_from_config(config: &Config, cur_path: &Path) }).map(|p| SourceId::for_path(&p)).collect() } +/// Parse all config files to learn about build configuration. Currently +/// configured options are: +/// +/// * build.jobs +/// * target.$target.ar +/// * target.$target.linker +/// * target.$target.libfoo.metadata fn scrape_build_config(config: &Config, jobs: Option, - target: Option) -> CargoResult { + target: Option) + -> CargoResult { let cfg_jobs = match try!(config.get_i64("build.jobs")) { Some((n, p)) => { match n.to_u32() { @@ -251,7 +393,8 @@ fn scrape_target_config(config: &Config, triple: &str) match try!(config.get(&key)).unwrap() { ConfigValue::String(v, path) => { if k == "rustc-flags" { - let whence = format!("in `{}` (in {:?})", key, path); + let whence = format!("in `{}` (in {})", key, + path.display()); let (paths, links) = try!( BuildOutput::parse_rustc_flags(&v, &whence) ); @@ -263,14 +406,18 @@ fn scrape_target_config(config: &Config, triple: &str) }, ConfigValue::List(a, p) => { if k == "rustc-link-lib" { - output.library_links.extend(a.into_iter().map(|(v, _)| v)); + output.library_links.extend(a.into_iter().map(|v| v.0)); } else if k == "rustc-link-search" { - output.library_paths.extend(a.into_iter().map(|(v, _)| PathBuf::new(&v))); + output.library_paths.extend(a.into_iter().map(|v| { + PathBuf::new(&v.0) + })); } else { - try!(config.expected("string", &k, ConfigValue::List(a, p))); + try!(config.expected("string", &k, + ConfigValue::List(a, p))); } }, - // technically could be a list too, but that's the exception to the rule... + // technically could be a list too, but that's the exception to + // the rule... cv => { try!(config.expected("string", &k, cv)); } } } diff --git a/src/cargo/ops/cargo_doc.rs b/src/cargo/ops/cargo_doc.rs index 94da98eef92..fdfe8e3f684 100644 --- a/src/cargo/ops/cargo_doc.rs +++ b/src/cargo/ops/cargo_doc.rs @@ -10,7 +10,6 @@ use sources::PathSource; use util::{CargoResult, human}; pub struct DocOptions<'a, 'b: 'a> { - pub all: bool, pub open_result: bool, pub compile_opts: ops::CompileOptions<'a, 'b>, } @@ -25,7 +24,7 @@ pub fn doc(manifest_path: &Path, let mut lib_names = HashSet::new(); let mut bin_names = HashSet::new(); if options.compile_opts.spec.is_none() { - for target in package.targets().iter().filter(|t| t.profile().is_doc()) { + for target in package.targets() { if target.is_lib() { assert!(lib_names.insert(target.name())); } else { diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index f3364737a65..6b7ef6132cb 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -179,16 +179,16 @@ fn run_verify(config: &Config, pkg: &Package, tar: &Path) // Now that we've rewritten all our path dependencies, compile it! try!(ops::compile_pkg(&new_pkg, &ops::CompileOptions { - env: "compile", config: config, jobs: None, target: None, - dev_deps: false, features: &[], no_default_features: false, spec: None, - lib_only: false, + filter: ops::CompileFilter::Everything, exec_engine: None, + release: false, + mode: ops::CompileMode::Build, })); Ok(()) diff --git a/src/cargo/ops/cargo_run.rs b/src/cargo/ops/cargo_run.rs index bc6a9d5f279..5f5a3611322 100644 --- a/src/cargo/ops/cargo_run.rs +++ b/src/cargo/ops/cargo_run.rs @@ -1,14 +1,11 @@ use std::path::Path; -use ops::{self, ExecEngine}; -use util::{CargoResult, human, process, ProcessError, ChainError}; -use core::manifest::TargetKind; +use ops::{self, ExecEngine, CompileFilter}; +use util::{CargoResult, human, process, ProcessError}; use core::source::Source; use sources::PathSource; pub fn run(manifest_path: &Path, - target_kind: TargetKind, - name: Option, options: &ops::CompileOptions, args: &[String]) -> CargoResult> { let config = options.config; @@ -16,49 +13,43 @@ pub fn run(manifest_path: &Path, config)); try!(src.update()); let root = try!(src.root_package()); - let env = options.env; + let mut bins = root.manifest().targets().iter().filter(|a| { - let matches_kind = match target_kind { - TargetKind::Bin => a.is_bin(), - TargetKind::Example => a.is_example(), - TargetKind::Lib(_) => false, - }; - let matches_name = name.as_ref().map_or(true, |n| *n == a.name()); - matches_kind && matches_name && a.profile().env() == env && - !a.profile().is_custom_build() + !a.is_lib() && !a.is_custom_build() && match options.filter { + CompileFilter::Everything => a.is_bin(), + CompileFilter::Only { .. } => options.filter.matches(a), + } }); - let bin = try!(bins.next().chain_error(|| { - match (name.as_ref(), &target_kind) { - (Some(name), &TargetKind::Bin) => { - human(format!("no bin target named `{}` to run", name)) + if bins.next().is_none() { + match options.filter { + CompileFilter::Everything => { + return Err(human("a bin target must be available for \ + `cargo run`")) + } + CompileFilter::Only { .. } => { + // this will be verified in cargo_compile + } + } + } + if bins.next().is_some() { + match options.filter { + CompileFilter::Everything => { + return Err(human("`cargo run` requires that a project only have \ + one executable; use the `--bin` option to \ + specify which one to run")) } - (Some(name), &TargetKind::Example) => { - human(format!("no example target named `{}` to run", name)) + CompileFilter::Only { .. } => { + return Err(human("`cargo run` can run at most one executable, \ + but multiple were specified")) } - (Some(_), &TargetKind::Lib(..)) => unreachable!(), - (None, _) => human("a bin target must be available for `cargo run`"), } - })); - match bins.next() { - Some(..) => return Err( - human("`cargo run` requires that a project only have one executable. \ - Use the `--bin` option to specify which one to run")), - None => {} } let compile = try!(ops::compile(manifest_path, options)); - let dst = manifest_path.parent().unwrap().join("target"); - let dst = match options.target { - Some(target) => dst.join(target), - None => dst, - }; - let exe = match (bin.profile().dest(), bin.is_example()) { - (s, true) => dst.join(s).join("examples").join(bin.name()), - (s, false) => dst.join(s).join(bin.name()), - }; + let exe = &compile.binaries[0]; let exe = match exe.relative_from(config.cwd()) { Some(path) => path, - None => &*exe, + None => &**exe, }; let mut process = try!(compile.target_process(exe, &root)) .into_process_builder(); diff --git a/src/cargo/ops/cargo_rustc/context.rs b/src/cargo/ops/cargo_rustc/context.rs index f6ec075b793..87e1092e3ca 100644 --- a/src/cargo/ops/cargo_rustc/context.rs +++ b/src/cargo/ops/cargo_rustc/context.rs @@ -1,5 +1,5 @@ use std::collections::hash_map::Entry::{Occupied, Vacant}; -use std::collections::hash_map::HashMap; +use std::collections::{HashSet, HashMap}; use std::str; use std::sync::Arc; use std::path::PathBuf; @@ -7,6 +7,7 @@ use std::path::PathBuf; use regex::Regex; use core::{SourceMap, Package, PackageId, PackageSet, Resolve, Target, Profile}; +use core::{TargetKind, LibKind, Profiles, Metadata}; use util::{self, CargoResult, ChainError, internal, Config, profile}; use util::human; @@ -31,9 +32,11 @@ pub struct Context<'a, 'b: 'a> { pub compilation: Compilation, pub build_state: Arc, pub exec_engine: Arc>, - pub fingerprints: HashMap<(&'a PackageId, &'a Target, Kind), Fingerprint>, + pub fingerprints: HashMap<(&'a PackageId, &'a Target, &'a Profile, Kind), + Fingerprint>, + pub compiled: HashSet<(&'a PackageId, &'a Target, &'a Profile)>, + pub build_config: BuildConfig, - env: &'a str, host: Layout, target: Option, target_triple: String, @@ -43,19 +46,19 @@ pub struct Context<'a, 'b: 'a> { target_dylib: Option<(String, String)>, target_exe: String, requirements: HashMap<(&'a PackageId, &'a str), Platform>, - build_config: BuildConfig, + profiles: &'a Profiles, } impl<'a, 'b: 'a> Context<'a, 'b> { - pub fn new(env: &'a str, - resolve: &'a Resolve, + pub fn new(resolve: &'a Resolve, sources: &'a SourceMap<'a>, deps: &'a PackageSet, config: &'a Config<'b>, host: Layout, target_layout: Option, root_pkg: &Package, - build_config: BuildConfig) -> CargoResult> { + build_config: BuildConfig, + profiles: &'a Profiles) -> CargoResult> { let target = build_config.requested_target.clone(); let target = target.as_ref().map(|s| &s[..]); let (target_dylib, target_exe) = try!(Context::filename_parts(target)); @@ -65,9 +68,11 @@ impl<'a, 'b: 'a> Context<'a, 'b> { try!(Context::filename_parts(None)) }; let target_triple = target.unwrap_or(config.rustc_host()).to_string(); + let engine = build_config.exec_engine.as_ref().cloned().unwrap_or({ + Arc::new(Box::new(ProcessEngine) as Box) + }); Ok(Context { target_triple: target_triple, - env: env, host: host, target: target_layout, resolve: resolve, @@ -80,10 +85,12 @@ impl<'a, 'b: 'a> Context<'a, 'b> { host_exe: host_exe, requirements: HashMap::new(), compilation: Compilation::new(root_pkg), - build_state: Arc::new(BuildState::new(build_config.clone(), deps)), + build_state: Arc::new(BuildState::new(&build_config, deps)), build_config: build_config, - exec_engine: Arc::new(Box::new(ProcessEngine) as Box), + exec_engine: engine, fingerprints: HashMap::new(), + profiles: profiles, + compiled: HashSet::new(), }) } @@ -129,7 +136,9 @@ impl<'a, 'b: 'a> Context<'a, 'b> { /// Prepare this context, ensuring that all filesystem directories are in /// place. - pub fn prepare(&mut self, pkg: &'a Package) -> CargoResult<()> { + pub fn prepare(&mut self, pkg: &'a Package, + targets: &[(&'a Target, &'a Profile)]) + -> CargoResult<()> { let _p = profile::start("preparing layout"); try!(self.host.prepare().chain_error(|| { @@ -146,9 +155,8 @@ impl<'a, 'b: 'a> Context<'a, 'b> { None => {} } - let targets = pkg.targets().iter(); - for target in targets.filter(|t| t.profile().is_compile()) { - self.build_requirements(pkg, target, Platform::Target); + for &(target, profile) in targets { + self.build_requirements(pkg, target, profile, Platform::Target); } let jobs = self.jobs(); @@ -163,8 +171,8 @@ impl<'a, 'b: 'a> Context<'a, 'b> { } fn build_requirements(&mut self, pkg: &'a Package, target: &'a Target, - req: Platform) { - let req = if target.profile().is_for_host() {Platform::Plugin} else {req}; + profile: &Profile, req: Platform) { + let req = if target.for_host() {Platform::Plugin} else {req}; match self.requirements.entry((pkg.package_id(), target.name())) { Occupied(mut entry) => match (*entry.get(), req) { (Platform::Plugin, Platform::Plugin) | @@ -177,13 +185,15 @@ impl<'a, 'b: 'a> Context<'a, 'b> { Vacant(entry) => { entry.insert(req); } }; - for &(pkg, dep) in self.dep_targets(pkg, target).iter() { - self.build_requirements(pkg, dep, req); + for &(pkg, dep, profile) in self.dep_targets(pkg, target, profile).iter() { + self.build_requirements(pkg, dep, profile, req); } - match pkg.targets().iter().find(|t| t.profile().is_custom_build()) { + match pkg.targets().iter().find(|t| t.is_custom_build()) { Some(custom_build) => { - self.build_requirements(pkg, custom_build, Platform::Plugin); + let profile = self.build_script_profile(pkg.package_id()); + self.build_requirements(pkg, custom_build, profile, + Platform::Plugin); } None => {} } @@ -191,7 +201,7 @@ impl<'a, 'b: 'a> Context<'a, 'b> { pub fn get_requirement(&self, pkg: &'a Package, target: &'a Target) -> Platform { - let default = if target.profile().is_for_host() { + let default = if target.for_host() { Platform::Plugin } else { Platform::Target @@ -215,7 +225,7 @@ impl<'a, 'b: 'a> Context<'a, 'b> { /// target. pub fn out_dir(&self, pkg: &Package, kind: Kind, target: &Target) -> PathBuf { let out_dir = self.layout(pkg, kind); - if target.profile().is_custom_build() { + if target.is_custom_build() { out_dir.build(pkg) } else if target.is_example() { out_dir.examples().to_path_buf() @@ -246,31 +256,68 @@ impl<'a, 'b: 'a> Context<'a, 'b> { &self.target_triple } - /// Return the exact filename of the target. - pub fn target_filenames(&self, target: &Target) -> CargoResult> { - let stem = target.file_stem(); + /// Get the metadata for a target in a specific profile + pub fn target_metadata(&self, target: &Target, profile: &Profile) + -> Option { + let metadata = target.metadata(); + if target.is_lib() && profile.test { + // Libs and their tests are built in parallel, so we need to make + // sure that their metadata is different. + metadata.map(|m| m.clone()).map(|mut m| { + m.mix(&"test"); + m + }) + } else if target.is_bin() && profile.test { + // Make sure that the name of this test executable doesn't + // conflicts with a library that has the same name and is + // being tested + let mut metadata = self.resolve.root().generate_metadata(); + metadata.mix(&format!("bin-{}", target.name())); + Some(metadata) + } else { + metadata.map(|m| m.clone()) + } + } + + /// Returns the file stem for a given target/profile combo + pub fn file_stem(&self, target: &Target, profile: &Profile) -> String { + match self.target_metadata(target, profile) { + Some(ref metadata) => format!("{}{}", target.name(), + metadata.extra_filename), + None => target.name().to_string(), + } + } + + /// Return the filenames that the given target for the given profile will + /// generate. + pub fn target_filenames(&self, target: &Target, profile: &Profile) + -> CargoResult> { + let stem = self.file_stem(target, profile); + let suffix = if target.for_host() {&self.host_exe} else {&self.target_exe}; let mut ret = Vec::new(); - if target.is_example() || target.is_bin() || - target.profile().is_test() { - ret.push(format!("{}{}", stem, - if target.profile().is_for_host() { - &self.host_exe - } else { - &self.target_exe - })); - } else { - if target.is_dylib() { - let plugin = target.profile().is_for_host(); - let kind = if plugin {Kind::Host} else {Kind::Target}; - let (prefix, suffix) = try!(self.dylib(kind)); - ret.push(format!("{}{}{}", prefix, stem, suffix)); + match *target.kind() { + TargetKind::Example | TargetKind::Bin | TargetKind::CustomBuild | + TargetKind::Bench | TargetKind::Test => { + ret.push(format!("{}{}", stem, suffix)); } - if target.is_rlib() { - ret.push(format!("lib{}.rlib", stem)); + TargetKind::Lib(..) if profile.test => { + ret.push(format!("{}{}", stem, suffix)); } - if target.is_staticlib() { - ret.push(format!("lib{}.a", stem)); + TargetKind::Lib(ref libs) => { + for lib in libs.iter() { + match *lib { + LibKind::Dylib => { + let plugin = target.for_host(); + let kind = if plugin {Kind::Host} else {Kind::Target}; + let (prefix, suffix) = try!(self.dylib(kind)); + ret.push(format!("{}{}{}", prefix, stem, suffix)); + } + LibKind::Lib | + LibKind::Rlib => ret.push(format!("lib{}.rlib", stem)), + LibKind::StaticLib => ret.push(format!("lib{}.a", stem)), + } + } } } assert!(ret.len() > 0); @@ -279,10 +326,14 @@ impl<'a, 'b: 'a> Context<'a, 'b> { /// For a package, return all targets which are registered as dependencies /// for that package. - pub fn dep_targets(&self, pkg: &Package, target: &Target) - -> Vec<(&'a Package, &'a Target)> { + pub fn dep_targets(&self, pkg: &Package, target: &Target, + profile: &Profile) + -> Vec<(&'a Package, &'a Target, &'a Profile)> { + if profile.doc { + return self.doc_deps(pkg, target); + } let deps = match self.resolve.deps(pkg.package_id()) { - None => return vec!(), + None => return Vec::new(), Some(deps) => deps, }; let mut ret = deps.map(|id| self.get_package(id)).filter(|dep| { @@ -293,37 +344,85 @@ impl<'a, 'b: 'a> Context<'a, 'b> { // If this target is a build command, then we only want build // dependencies, otherwise we want everything *other than* build // dependencies. - let is_correct_dep = - target.profile().is_custom_build() == pkg_dep.is_build(); + let is_correct_dep = target.is_custom_build() == pkg_dep.is_build(); // If this dependency is *not* a transitive dependency, then it // only applies to test/example targets let is_actual_dep = pkg_dep.is_transitive() || - target.profile().is_test() || - target.is_example(); + target.is_test() || + target.is_example() || + profile.test; is_correct_dep && is_actual_dep }).filter_map(|pkg| { - pkg.targets().iter().find(|&t| self.is_relevant_target(t)) - .map(|t| (pkg, t)) + pkg.targets().iter().find(|t| t.is_lib()).map(|t| { + (pkg, t, self.lib_profile(pkg.package_id())) + }) }).collect::>(); + // If a target isn't actually a build script itself, then it depends on + // the build script if there is one. + if target.is_custom_build() { return ret } + let pkg = self.get_package(pkg.package_id()); + if let Some(t) = pkg.targets().iter().find(|t| t.is_custom_build()) { + ret.push((pkg, t, self.build_script_profile(pkg.package_id()))); + } + // If this target is a binary, test, example, etc, then it depends on // the library of the same package. The call to `resolve.deps` above // didn't include `pkg` in the return values, so we need to special case // it here and see if we need to push `(pkg, pkg_lib_target)`. - if !target.profile().is_custom_build() && - (target.is_bin() || target.is_example()) { - let pkg = self.get_package(pkg.package_id()); - let target = pkg.targets().iter().filter(|t| { - t.is_lib() && t.profile().is_compile() && - (t.is_rlib() || t.is_dylib()) - }).next(); - if let Some(t) = target { - ret.push((pkg, t)); + if target.is_lib() { return ret } + if let Some(t) = pkg.targets().iter().find(|t| t.linkable()) { + ret.push((pkg, t, self.lib_profile(pkg.package_id()))); + } + + // Integration tests/benchmarks require binaries to be built + if profile.test && (target.is_test() || target.is_bench()) { + ret.extend(pkg.targets().iter().filter(|t| t.is_bin()) + .map(|t| (pkg, t, self.lib_profile(pkg.package_id())))); + } + return ret + } + + /// Returns the dependencies necessary to document a package + fn doc_deps(&self, pkg: &Package, target: &Target) + -> Vec<(&'a Package, &'a Target, &'a Profile)> { + let pkg = self.get_package(pkg.package_id()); + let deps = self.resolve.deps(pkg.package_id()).into_iter(); + let deps = deps.flat_map(|a| a).map(|id| { + self.get_package(id) + }).filter(|dep| { + pkg.dependencies().iter().find(|d| { + d.name() == dep.name() + }).unwrap().is_transitive() + }).filter_map(|dep| { + dep.targets().iter().find(|t| t.is_lib()).map(|t| (dep, t)) + }); + + // To document a library, we depend on dependencies actually being + // built. If we're documenting *all* libraries, then we also depend on + // the documentation of the library being built. + let mut ret = Vec::new(); + for (dep, lib) in deps { + ret.push((dep, lib, self.lib_profile(dep.package_id()))); + if self.build_config.doc_all { + ret.push((dep, lib, &self.profiles.doc)); + } + } + + // Be sure to build/run the build script for documented libraries as + if let Some(t) = pkg.targets().iter().find(|t| t.is_custom_build()) { + ret.push((pkg, t, self.build_script_profile(pkg.package_id()))); + } + + // If we document a binary, we need the library available + if target.is_bin() { + if let Some(t) = pkg.targets().iter().find(|t| t.is_lib()) { + ret.push((pkg, t, self.lib_profile(pkg.package_id()))); } } - return ret; + return ret } /// Gets a package for the given package id. @@ -333,26 +432,6 @@ impl<'a, 'b: 'a> Context<'a, 'b> { .expect("Should have found package") } - pub fn env(&self) -> &str { - // The "doc-all" environment just means to document everything (see - // below), but we want to canonicalize that the the "doc" profile - // environment, so do that here. - if self.env == "doc-all" {"doc"} else {self.env} - } - - pub fn is_relevant_target(&self, target: &Target) -> bool { - target.is_lib() && match self.env { - "doc" | "test" => target.profile().is_compile(), - // doc-all == document everything, so look for doc targets and - // compile targets in dependencies - "doc-all" => target.profile().is_compile() || - (target.profile().env() == "doc" && - target.profile().is_doc()), - _ => target.profile().env() == self.env && - !target.profile().is_test(), - } - } - /// Get the user-specified linker for a particular host or target pub fn linker(&self, kind: Kind) -> Option<&str> { self.target_config(kind).linker.as_ref().map(|s| s.as_slice()) @@ -379,21 +458,18 @@ impl<'a, 'b: 'a> Context<'a, 'b> { self.build_config.requested_target.as_ref().map(|s| &s[..]) } - /// Calculate the actual profile to use for a target's compliation. - /// - /// This may involve overriding some options such as debug information, - /// rpath, opt level, etc. - pub fn profile(&self, target: &Target) -> Profile { - let mut profile = target.profile().clone(); - let root_package = self.get_package(self.resolve.root()); - for target in root_package.manifest().targets().iter() { - let root_profile = target.profile(); - if root_profile.env() != profile.env() { continue } - profile = profile.set_opt_level(root_profile.opt_level()) - .set_debug(root_profile.debug()) - .set_rpath(root_profile.rpath()) + pub fn lib_profile(&self, _pkg: &PackageId) -> &'a Profile { + if self.build_config.release { + &self.profiles.release + } else { + &self.profiles.dev } - profile + } + + pub fn build_script_profile(&self, _pkg: &PackageId) -> &'a Profile { + // TODO: should build scripts always be built with a dev + // profile? How is this controlled at the CLI layer? + &self.profiles.dev } } diff --git a/src/cargo/ops/cargo_rustc/custom_build.rs b/src/cargo/ops/cargo_rustc/custom_build.rs index 48bb6a2856c..82ea558583e 100644 --- a/src/cargo/ops/cargo_rustc/custom_build.rs +++ b/src/cargo/ops/cargo_rustc/custom_build.rs @@ -46,17 +46,15 @@ pub fn prepare(pkg: &Package, target: &Target, req: Platform, }; // Building the command to execute - let to_exec = try!(cx.target_filenames(target))[0].clone(); + let profile = cx.build_script_profile(pkg.package_id()); + let to_exec = try!(cx.target_filenames(target, profile))[0].clone(); let to_exec = script_output.join(&to_exec); // Start preparing the process to execute, starting out with some // environment variables. Note that the profile-related environment // variables are not set with this the build script's profile but rather the - // package's profile (some target which isn't a build script). - let profile_target = pkg.targets().iter().find(|t| { - cx.is_relevant_target(t) && !t.profile().is_custom_build() - }).unwrap_or(target); - let profile = cx.profile(profile_target); + // package's library profile. + let profile = cx.lib_profile(pkg.package_id()); let to_exec = to_exec.into_os_string(); let mut p = try!(super::process(CommandType::Host(to_exec), pkg, target, cx)); p.env("OUT_DIR", &build_output) @@ -66,9 +64,9 @@ pub fn prepare(pkg: &Package, target: &Target, req: Platform, Kind::Host => cx.config.rustc_host(), Kind::Target => cx.target_triple(), }) - .env("DEBUG", &profile.debug().to_string()) - .env("OPT_LEVEL", &profile.opt_level().to_string()) - .env("PROFILE", &profile.env()) + .env("DEBUG", &profile.debuginfo.to_string()) + .env("OPT_LEVEL", &profile.opt_level.to_string()) + .env("PROFILE", if cx.build_config.release {"release"} else {"debug"}) .env("HOST", &cx.config.rustc_host()); // Be sure to pass along all enabled features for this package, this is the @@ -88,10 +86,11 @@ pub fn prepare(pkg: &Package, target: &Target, req: Platform, // This information will be used at build-time later on to figure out which // sorts of variables need to be discovered at that time. let lib_deps = { - let non_build_target = pkg.targets().iter().find(|t| { - !t.profile().is_custom_build() + let not_custom = pkg.targets().iter().find(|t| { + !t.is_custom_build() }).unwrap(); - cx.dep_targets(pkg, non_build_target).iter().filter_map(|&(pkg, _)| { + cx.dep_targets(pkg, not_custom, profile).iter().filter_map(|&(pkg, t, _)| { + if !t.linkable() { return None } pkg.manifest().links().map(|links| { (links.to_string(), pkg.package_id().clone()) }) @@ -102,7 +101,8 @@ pub fn prepare(pkg: &Package, target: &Target, req: Platform, let id = pkg.package_id().clone(); let all = (id.clone(), pkg_name.clone(), build_state.clone(), build_output.clone()); - let plugin_deps = super::crawl_build_deps(cx, pkg, target, Kind::Host); + let plugin_deps = super::crawl_build_deps(cx, pkg, target, profile, + Kind::Host); try!(fs::create_dir_all(&cx.layout(pkg, Kind::Target).build(pkg))); try!(fs::create_dir_all(&cx.layout(pkg, Kind::Host).build(pkg))); @@ -206,7 +206,7 @@ pub fn prepare(pkg: &Package, target: &Target, req: Platform, } impl BuildState { - pub fn new(config: super::BuildConfig, + pub fn new(config: &super::BuildConfig, packages: &PackageSet) -> BuildState { let mut sources = HashMap::new(); for package in packages.iter() { @@ -219,15 +219,13 @@ impl BuildState { } } let mut outputs = HashMap::new(); - let i1 = config.host.overrides.into_iter().map(|p| (p, Kind::Host)); - let i2 = config.target.overrides.into_iter().map(|p| (p, Kind::Target)); + let i1 = config.host.overrides.iter().map(|p| (p, Kind::Host)); + let i2 = config.target.overrides.iter().map(|p| (p, Kind::Target)); for ((name, output), kind) in i1.chain(i2) { - match sources.get(&name) { - Some(id) => { outputs.insert((id.clone(), kind), output); } - - // If no package is using the library named `name`, then this is - // just an override that we ignore. - None => {} + // If no package is using the library named `name`, then this is + // just an override that we ignore. + if let Some(id) = sources.get(name) { + outputs.insert((id.clone(), kind), output.clone()); } } BuildState { outputs: Mutex::new(outputs) } diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs index fd52f9cd0e6..dca21525c53 100644 --- a/src/cargo/ops/cargo_rustc/fingerprint.rs +++ b/src/cargo/ops/cargo_rustc/fingerprint.rs @@ -4,7 +4,7 @@ use std::io::prelude::*; use std::io::{BufReader, SeekFrom}; use std::path::{Path, PathBuf}; -use core::{Package, Target}; +use core::{Package, Target, Profile}; use util; use util::{CargoResult, Fresh, Dirty, Freshness, internal, profile, ChainError}; @@ -43,27 +43,28 @@ pub type Preparation = (Freshness, Work, Work); pub fn prepare_target<'a, 'b>(cx: &mut Context<'a, 'b>, pkg: &'a Package, target: &'a Target, + profile: &'a Profile, kind: Kind) -> CargoResult { let _p = profile::start(format!("fingerprint: {} / {}", pkg.package_id(), target.name())); let new = dir(cx, pkg, kind); - let loc = new.join(&filename(target)); + let loc = new.join(&filename(target, profile)); info!("fingerprint at: {}", loc.display()); - let fingerprint = try!(calculate(cx, pkg, target, kind)); + let fingerprint = try!(calculate(cx, pkg, target, profile, kind)); let is_fresh = try!(is_fresh(&loc, &fingerprint)); let root = cx.out_dir(pkg, kind, target); let mut missing_outputs = false; - if !target.profile().is_doc() { - for filename in try!(cx.target_filenames(target)).iter() { + if !profile.doc { + for filename in try!(cx.target_filenames(target, profile)).iter() { let dst = root.join(filename); missing_outputs |= fs::metadata(&dst).is_err(); - if target.profile().is_test() { + if profile.test { cx.compilation.tests.push((target.name().to_string(), dst)); - } else if target.is_bin() { + } else if target.is_bin() || target.is_example() { cx.compilation.binaries.push(dst); } else if target.is_lib() { let pkgid = pkg.package_id().clone(); @@ -146,9 +147,10 @@ impl Fingerprint { fn calculate<'a, 'b>(cx: &mut Context<'a, 'b>, pkg: &'a Package, target: &'a Target, + profile: &'a Profile, kind: Kind) -> CargoResult { - let key = (pkg.package_id(), target, kind); + let key = (pkg.package_id(), target, profile, kind); match cx.fingerprints.get(&key) { Some(s) => return Ok(s.clone()), None => {} @@ -164,21 +166,29 @@ fn calculate<'a, 'b>(cx: &mut Context<'a, 'b>, v }); let extra = util::short_hash(&(cx.config.rustc_version(), target, &features, - cx.profile(target))); + profile)); // Next, recursively calculate the fingerprint for all of our dependencies. - let deps = try!(cx.dep_targets(pkg, target).into_iter().map(|(p, t)| { + // + // Skip the fingerprints of build scripts as they may not always be + // available and the dirtiness propagation for modification is tracked + // elsewhere. Also skip fingerprints of binaries because they don't actually + // induce a recompile, they're just dependencies in the sense that they need + // to be built. + let deps = try!(cx.dep_targets(pkg, target, profile).into_iter() + .filter(|&(_, t, _)| !t.is_custom_build() && !t.is_bin()) + .map(|(pkg, target, profile)| { let kind = match kind { Kind::Host => Kind::Host, - Kind::Target if t.profile().is_for_host() => Kind::Host, + Kind::Target if target.for_host() => Kind::Host, Kind::Target => Kind::Target, }; - calculate(cx, p, t, kind) + calculate(cx, pkg, target, profile, kind) }).collect::>>()); // And finally, calculate what our own local fingerprint is - let local = if use_dep_info(pkg, target) { - let dep_info = dep_info_loc(cx, pkg, target, kind); + let local = if use_dep_info(pkg, profile) { + let dep_info = dep_info_loc(cx, pkg, target, profile, kind); let mtime = try!(calculate_target_mtime(&dep_info)); // if the mtime listed is not fresh, then remove the `dep_info` file to @@ -204,10 +214,9 @@ fn calculate<'a, 'b>(cx: &mut Context<'a, 'b>, // git/registry source, then the mtime of files may fluctuate, but they won't // change so long as the source itself remains constant (which is the // responsibility of the source) -fn use_dep_info(pkg: &Package, target: &Target) -> bool { - let doc = target.profile().is_doc(); +fn use_dep_info(pkg: &Package, profile: &Profile) -> bool { let path = pkg.summary().source_id().is_path(); - !doc && path + !profile.doc && path } /// Prepare the necessary work for the fingerprint of a build command. @@ -293,8 +302,8 @@ pub fn dir(cx: &Context, pkg: &Package, kind: Kind) -> PathBuf { /// Returns the (old, new) location for the dep info file of a target. pub fn dep_info_loc(cx: &Context, pkg: &Package, target: &Target, - kind: Kind) -> PathBuf { - dir(cx, pkg, kind).join(&format!("dep-{}", filename(target))) + profile: &Profile, kind: Kind) -> PathBuf { + dir(cx, pkg, kind).join(&format!("dep-{}", filename(target, profile))) } fn is_fresh(loc: &Path, new_fingerprint: &Fingerprint) -> CargoResult { @@ -375,11 +384,11 @@ fn calculate_pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult source.fingerprint(pkg) } -fn filename(target: &Target) -> String { +fn filename(target: &Target, profile: &Profile) -> String { let kind = if target.is_lib() {"lib"} else {"bin"}; - let flavor = if target.profile().is_test() { + let flavor = if target.is_test() || profile.test { "test-" - } else if target.profile().is_doc() { + } else if profile.doc { "doc-" } else { "" diff --git a/src/cargo/ops/cargo_rustc/job_queue.rs b/src/cargo/ops/cargo_rustc/job_queue.rs index 6f2aab1544d..103726755e4 100644 --- a/src/cargo/ops/cargo_rustc/job_queue.rs +++ b/src/cargo/ops/cargo_rustc/job_queue.rs @@ -1,6 +1,6 @@ use std::collections::HashSet; use std::collections::hash_map::HashMap; -use std::collections::hash_map::Entry::{Occupied, Vacant}; +// use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::sync::mpsc::{channel, Sender, Receiver}; use threadpool::ThreadPool; @@ -27,7 +27,7 @@ pub struct JobQueue<'a> { packages: &'a PackageSet, active: u32, pending: HashMap<(&'a PackageId, Stage), PendingBuild>, - state: HashMap<&'a PackageId, Freshness>, + pkgids: HashSet<&'a PackageId>, printed: HashSet<&'a PackageId>, } @@ -58,6 +58,7 @@ pub enum Stage { Binaries, LibraryTests, BinaryTests, + End, } type Message = (PackageId, Stage, Freshness, CargoResult<()>); @@ -75,25 +76,17 @@ impl<'a> JobQueue<'a> { packages: packages, active: 0, pending: HashMap::new(), - state: HashMap::new(), + pkgids: HashSet::new(), printed: HashSet::new(), } } - pub fn enqueue(&mut self, pkg: &'a Package, stage: Stage, - jobs: Vec<(Job, Freshness)>) { - // Record the freshness state of this package as dirty if any job is - // dirty or fresh otherwise - let fresh = jobs.iter().fold(Fresh, |f1, &(_, f2)| f1.combine(f2)); - match self.state.entry(pkg.package_id()) { - Occupied(mut entry) => { *entry.get_mut() = entry.get().combine(fresh); } - Vacant(entry) => { entry.insert(fresh); } - }; - - // Add the package to the dependency graph - self.queue.enqueue(&(self.resolve, self.packages), Fresh, - (pkg.package_id(), stage), - (pkg, jobs)); + pub fn queue(&mut self, pkg: &'a Package, stage: Stage) + -> &mut Vec<(Job, Freshness)> { + self.pkgids.insert(pkg.package_id()); + &mut self.queue.queue(&(self.resolve, self.packages), Fresh, + (pkg.package_id(), stage), + (pkg, Vec::new())).1 } /// Execute all jobs necessary to build the dependency graph. @@ -123,11 +116,11 @@ impl<'a> JobQueue<'a> { // scheduling work as quickly as possibly. let (id, stage, fresh, result) = self.rx.recv().unwrap(); info!(" end: {} {:?}", id, stage); - let id = *self.state.keys().find(|&k| *k == &id).unwrap(); + let id = *self.pkgids.iter().find(|&k| *k == &id).unwrap(); self.active -= 1; match result { Ok(()) => { - let state = &mut self.pending[(id, stage)]; + let state = self.pending.get_mut(&(id, stage)).unwrap(); state.amt -= 1; state.fresh = state.fresh.combine(fresh); if state.amt == 0 { @@ -171,9 +164,11 @@ impl<'a> JobQueue<'a> { fresh: fresh, }); - let mut total_fresh = fresh.combine(self.state[pkg.package_id()]); + let mut total_fresh = fresh; let mut running = Vec::new(); + debug!("start {:?} at {:?} for {}", total_fresh, stage, pkg); for (job, job_freshness) in jobs.into_iter() { + debug!("job: {:?} ({:?})", job_freshness, total_fresh); let fresh = job_freshness.combine(fresh); total_fresh = total_fresh.combine(fresh); let my_tx = self.tx.clone(); @@ -196,32 +191,44 @@ impl<'a> JobQueue<'a> { } // Print out some nice progress information - // - // This isn't super trivial becuase we don't want to print loads and - // loads of information to the console, but we also want to produce a - // faithful representation of what's happening. This is somewhat nuanced - // as a package can start compiling *very* early on because of custom - // build commands and such. - // - // In general, we try to print "Compiling" for the first nontrivial task - // run for a package, regardless of when that is. We then don't print - // out any more information for a package after we've printed it once. - let print = !self.printed.contains(&pkg.package_id()); - if print && (stage == Stage::Libraries || - (total_fresh == Dirty && running.len() > 0)) { - self.printed.insert(pkg.package_id()); - match total_fresh { - Fresh => try!(config.shell().verbose(|c| { - c.status("Fresh", pkg) - })), - Dirty => try!(config.shell().status("Compiling", pkg)) - } - } + try!(self.note_working_on(config, pkg.package_id(), stage, total_fresh, + running.len())); for msg in running.iter() { try!(config.shell().verbose(|c| c.status("Running", msg))); } Ok(()) } + + // This isn't super trivial becuase we don't want to print loads and + // loads of information to the console, but we also want to produce a + // faithful representation of what's happening. This is somewhat nuanced + // as a package can start compiling *very* early on because of custom + // build commands and such. + // + // In general, we try to print "Compiling" for the first nontrivial task + // run for a package, regardless of when that is. We then don't print + // out any more information for a package after we've printed it once. + fn note_working_on(&mut self, config: &Config, pkg: &'a PackageId, + stage: Stage, fresh: Freshness, cmds_run: usize) + -> CargoResult<()> { + if self.printed.contains(&pkg) { return Ok(()) } + + match fresh { + // Any dirty stage which runs at least one command gets printed as + // being a compiled package + Dirty if cmds_run == 0 => {} + Dirty => { + self.printed.insert(pkg); + try!(config.shell().status("Compiling", pkg)); + } + Fresh if stage == Stage::End => { + self.printed.insert(pkg); + try!(config.shell().verbose(|c| c.status("Fresh", pkg))); + } + Fresh => {} + } + Ok(()) + } } impl<'a> Dependency for (&'a PackageId, Stage) { @@ -295,6 +302,13 @@ impl<'a> Dependency for (&'a PackageId, Stage) { base.extend(deps.map(|(id, _)| (id, Stage::Libraries))); base } + + // A marker stage to indicate when a package has entirely finished + // compiling, nothing is actually built as part of this stage. + Stage::End => { + vec![(id, Stage::Binaries), (id, Stage::BinaryTests), + (id, Stage::LibraryTests)] + } } } } diff --git a/src/cargo/ops/cargo_rustc/links.rs b/src/cargo/ops/cargo_rustc/links.rs index ed2b8bfd801..8f152d76c45 100644 --- a/src/cargo/ops/cargo_rustc/links.rs +++ b/src/cargo/ops/cargo_rustc/links.rs @@ -23,9 +23,7 @@ pub fn validate(deps: &PackageSet) -> CargoResult<()> { } None => {} } - if !dep.manifest().targets().iter().any(|t| { - t.profile().is_custom_build() - }) { + if !dep.manifest().targets().iter().any(|t| t.is_custom_build()) { return Err(human(format!("package `{}` specifies that it links to \ `{}` but does not have a custom build \ script", dep.package_id(), lib))) diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs index c8dce9a0128..ab670aaf81e 100644 --- a/src/cargo/ops/cargo_rustc/mod.rs +++ b/src/cargo/ops/cargo_rustc/mod.rs @@ -7,6 +7,7 @@ use std::path::{self, PathBuf}; use std::sync::Arc; use core::{SourceMap, Package, PackageId, PackageSet, Target, Resolve}; +use core::{Profile, Profiles}; use util::{self, CargoResult, human, caused_human}; use util::{Config, internal, ChainError, Fresh, profile, join_paths}; @@ -39,6 +40,9 @@ pub struct BuildConfig { pub target: TargetConfig, pub jobs: u32, pub requested_target: Option, + pub exec_engine: Option>>, + pub release: bool, + pub doc_all: bool, } #[derive(Clone, Default)] @@ -71,35 +75,16 @@ pub fn rustc_version() -> CargoResult<(String, String)> { Ok((output, triple)) } -// This is a temporary assert that ensures the consistency of the arguments -// given the current limitations of Cargo. The long term fix is to have each -// Target know the absolute path to the build location. -fn uniq_target_dest<'a>(targets: &[&'a Target]) -> &'a str { - let mut curr: Option<&str> = None; - - for t in targets.iter().filter(|t| !t.profile().is_custom_build()) { - let dest = t.profile().dest(); - - match curr { - Some(curr) => assert_eq!(curr, dest), - None => curr = Some(dest) - } - } - - curr.unwrap() -} - // Returns a mapping of the root package plus its immediate dependencies to // where the compiled libraries are all located. -pub fn compile_targets<'a, 'b>(env: &str, - targets: &[&'a Target], +pub fn compile_targets<'a, 'b>(targets: &[(&'a Target, &'a Profile)], pkg: &'a Package, deps: &PackageSet, resolve: &'a Resolve, sources: &'a SourceMap<'a>, config: &'a Config<'b>, build_config: BuildConfig, - exec_engine: Option>>) + profiles: &'a Profiles) -> CargoResult { if targets.is_empty() { return Ok(Compilation::new(pkg)) @@ -109,53 +94,35 @@ pub fn compile_targets<'a, 'b>(env: &str, try!(links::validate(deps)); - let dest = uniq_target_dest(targets); + let dest = if build_config.release {"release"} else {"debug"}; let root = if resolve.root() == pkg.package_id() { pkg } else { deps.iter().find(|p| p.package_id() == resolve.root()).unwrap() }; - let host_layout = Layout::new(root, None, dest); + let host_layout = Layout::new(root, None, &dest); let target_layout = build_config.requested_target.as_ref().map(|target| { - layout::Layout::new(root, Some(&target), dest) + layout::Layout::new(root, Some(&target), &dest) }); - let mut cx = try!(Context::new(env, resolve, sources, deps, config, + let mut cx = try!(Context::new(resolve, sources, deps, config, host_layout, target_layout, pkg, - build_config)); - if let Some(exec_engine) = exec_engine { - cx.exec_engine = exec_engine.clone(); - } + build_config, profiles)); let mut queue = JobQueue::new(cx.resolve, deps, cx.jobs()); - // First ensure that the destination directory exists - try!(cx.prepare(pkg)); + // Prep the context's build requirements and see the job graph for all + // packages initially. + { + let _p = profile::start("preparing build directories"); + try!(cx.prepare(pkg, targets)); + prepare_init(&mut cx, pkg, &mut queue, &mut HashSet::new()); + } // Build up a list of pending jobs, each of which represent compiling a // particular package. No actual work is executed as part of this, that's - // all done later as part of the `execute` function which will run + // all done next as part of the `execute` function which will run // everything in order with proper parallelism. - let mut compiled = HashSet::new(); - each_dep(pkg, &cx, |dep| { - compiled.insert(dep.package_id().clone()); - }); - for dep in deps.iter() { - if dep == pkg || !compiled.contains(dep.package_id()) { continue } - - // Only compile lib targets for dependencies - let targets = dep.targets().iter().filter(|target| { - target.profile().is_custom_build() || - cx.is_relevant_target(*target) - }).collect::>(); - - if targets.len() == 0 && dep.package_id() != resolve.root() { - return Err(human(format!("Package `{}` has no library targets", dep))) - } - - try!(compile(&targets, dep, &mut cx, &mut queue)); - } - try!(compile(targets, pkg, &mut cx, &mut queue)); // Now that we've figured out everything that we're going to do, do it! @@ -171,7 +138,7 @@ pub fn compile_targets<'a, 'b>(env: &str, for (&(ref pkg, _), output) in cx.build_state.outputs.lock().unwrap().iter() { let any_dylib = output.library_links.iter().any(|l| { - !l.ends_with(":static") && !l.ends_with(":framework") + !l.starts_with("static=") && !l.starts_with("framework=") }); if !any_dylib { continue } for dir in output.library_paths.iter() { @@ -181,61 +148,54 @@ pub fn compile_targets<'a, 'b>(env: &str, Ok(cx.compilation) } -fn compile<'a, 'b>(targets: &[&'a Target], pkg: &'a Package, +fn compile<'a, 'b>(targets: &[(&'a Target, &'a Profile)], + pkg: &'a Package, cx: &mut Context<'a, 'b>, jobs: &mut JobQueue<'a>) -> CargoResult<()> { debug!("compile_pkg; pkg={}", pkg); - let _p = profile::start(format!("preparing: {}", pkg)); - - if targets.is_empty() { - return Ok(()) - } + let profiling_marker = profile::start(format!("preparing: {}", pkg)); - // Prepare the fingerprint directory as the first step of building a package - let (target1, target2) = fingerprint::prepare_init(cx, pkg, Kind::Target); - let mut init = vec![(Job::new(target1, target2), Fresh)]; - if cx.requested_target().is_some() { - let (plugin1, plugin2) = fingerprint::prepare_init(cx, pkg, Kind::Host); - init.push((Job::new(plugin1, plugin2), Fresh)); - } - jobs.enqueue(pkg, Stage::Start, init); - - // After the custom command has run, execute rustc for all targets of our - // package. + // For each target/profile run the compiler or rustdoc accordingly. After + // having done so we enqueue the job in the right portion of the dependency + // graph and then move on to the next. // - // Each target has its own concept of freshness to ensure incremental - // rebuilds on the *target* granularity, not the *package* granularity. - let (mut libs, mut bins, mut lib_tests, mut bin_tests) = - (Vec::new(), Vec::new(), Vec::new(), Vec::new()); - let (mut build_custom, mut run_custom) = (Vec::new(), Vec::new()); - for &target in targets.iter() { - let work = if target.profile().is_doc() { - let rustdoc = try!(rustdoc(pkg, target, cx)); + // This loop also takes care of enqueueing the work needed to actually run + // the custom build commands as well. + for &(target, profile) in targets { + if !cx.compiled.insert((pkg.package_id(), target, profile)) { + continue + } + + let work = if profile.doc { + let rustdoc = try!(rustdoc(pkg, target, profile, cx)); vec![(rustdoc, Kind::Target)] } else { let req = cx.get_requirement(pkg, target); - try!(rustc(pkg, target, cx, req)) + try!(rustc(pkg, target, profile, cx, req)) }; - // Figure out what stage this work will go into - let dst = match (target.is_lib(), - target.profile().is_test(), - target.profile().is_custom_build()) { - (_, _, true) => &mut build_custom, - (true, true, _) => &mut lib_tests, - (false, true, _) => &mut bin_tests, - (true, false, _) => &mut libs, - (false, false, _) if target.profile().env() == "test" => &mut bin_tests, - (false, false, _) => &mut bins, - }; for (work, kind) in work.into_iter() { let (freshness, dirty, fresh) = - try!(fingerprint::prepare_target(cx, pkg, target, kind)); + try!(fingerprint::prepare_target(cx, pkg, target, profile, kind)); let dirty = Work::new(move |desc_tx| { try!(work.call(desc_tx.clone())); dirty.call(desc_tx) }); + + // Figure out what stage this work will go into + let dst = match (target.is_lib(), + profile.test, + target.is_custom_build()) { + (_, _, true) => jobs.queue(pkg, Stage::BuildCustomBuild), + (true, true, _) => jobs.queue(pkg, Stage::LibraryTests), + (false, true, _) => jobs.queue(pkg, Stage::BinaryTests), + (true, false, _) => jobs.queue(pkg, Stage::Libraries), + (false, false, _) if !target.is_bin() => { + jobs.queue(pkg, Stage::BinaryTests) + } + (false, false, _) => jobs.queue(pkg, Stage::Binaries), + }; dst.push((Job::new(dirty, fresh), freshness)); } @@ -244,15 +204,12 @@ fn compile<'a, 'b>(targets: &[&'a Target], pkg: &'a Package, // because we may need to run the build script multiple times. If the // package is needed in both a host and target context, we need to run // it once per context. - if !target.profile().is_custom_build() { continue } + if !target.is_custom_build() { continue } let mut reqs = Vec::new(); - let requirement = targets.iter().fold(None::, |req, t| { - if !t.profile().is_custom_build() && !t.profile().is_doc() { - let r2 = cx.get_requirement(pkg, *t); - req.map(|r| r.combine(r2)).or(Some(r2)) - } else { - req - } + let requirement = pkg.targets().iter().filter(|t| !t.is_custom_build()) + .fold(None::, |req, t| { + let r2 = cx.get_requirement(pkg, t); + req.map(|r| r.combine(r2)).or(Some(r2)) }).unwrap_or(Platform::Target); match requirement { Platform::Target => reqs.push(Platform::Target), @@ -266,9 +223,12 @@ fn compile<'a, 'b>(targets: &[&'a Target], pkg: &'a Package, } } } - let before = run_custom.len(); + let before = jobs.queue(pkg, Stage::RunCustomBuild).len(); for &req in reqs.iter() { - let kind = match req { Platform::Plugin => Kind::Host, _ => Kind::Target }; + let kind = match req { + Platform::Plugin => Kind::Host, + _ => Kind::Target, + }; let key = (pkg.package_id().clone(), kind); if pkg.manifest().links().is_some() && cx.build_state.outputs.lock().unwrap().contains_key(&key) { @@ -276,31 +236,69 @@ fn compile<'a, 'b>(targets: &[&'a Target], pkg: &'a Package, } let (dirty, fresh, freshness) = try!(custom_build::prepare(pkg, target, req, cx)); + let run_custom = jobs.queue(pkg, Stage::RunCustomBuild); run_custom.push((Job::new(dirty, fresh), freshness)); } - // If no build scripts were run, no need to compile the build script! - if run_custom.len() == before { - dst.pop(); + // If we didn't actually run the custom build command, then there's no + // need to compile it. + if jobs.queue(pkg, Stage::RunCustomBuild).len() == before { + jobs.queue(pkg, Stage::BuildCustomBuild).pop(); + } + } + drop(profiling_marker); + + // Be sure to compile all dependencies of this target as well. Don't recurse + // if we've already recursed, however. + for &(target, profile) in targets { + for &(pkg, target, p) in cx.dep_targets(pkg, target, profile).iter() { + try!(compile(&[(target, p)], pkg, cx, jobs)); } } - jobs.enqueue(pkg, Stage::BuildCustomBuild, build_custom); - jobs.enqueue(pkg, Stage::RunCustomBuild, run_custom); - jobs.enqueue(pkg, Stage::Libraries, libs); - jobs.enqueue(pkg, Stage::Binaries, bins); - jobs.enqueue(pkg, Stage::BinaryTests, bin_tests); - jobs.enqueue(pkg, Stage::LibraryTests, lib_tests); Ok(()) } -fn rustc(package: &Package, target: &Target, +fn prepare_init<'a, 'b>(cx: &mut Context<'a, 'b>, + pkg: &'a Package, + jobs: &mut JobQueue<'a>, + visited: &mut HashSet<&'a PackageId>) { + if !visited.insert(pkg.package_id()) { return } + + // Set up all dependencies + for dep in cx.resolve.deps(pkg.package_id()).into_iter().flat_map(|a| a) { + let dep = cx.get_package(dep); + prepare_init(cx, dep, jobs, visited); + } + + // Initialize blank queues for each stage + jobs.queue(pkg, Stage::BuildCustomBuild); + jobs.queue(pkg, Stage::RunCustomBuild); + jobs.queue(pkg, Stage::Libraries); + jobs.queue(pkg, Stage::Binaries); + jobs.queue(pkg, Stage::LibraryTests); + jobs.queue(pkg, Stage::BinaryTests); + jobs.queue(pkg, Stage::End); + + // Prepare the fingerprint directory as the first step of building a package + let (target1, target2) = fingerprint::prepare_init(cx, pkg, Kind::Target); + let init = jobs.queue(pkg, Stage::Start); + if cx.requested_target().is_some() { + let (plugin1, plugin2) = fingerprint::prepare_init(cx, pkg, + Kind::Host); + init.push((Job::new(plugin1, plugin2), Fresh)); + } + init.push((Job::new(target1, target2), Fresh)); +} + +fn rustc(package: &Package, target: &Target, profile: &Profile, cx: &mut Context, req: Platform) -> CargoResult >{ let crate_types = target.rustc_crate_types(); - let rustcs = try!(prepare_rustc(package, target, crate_types, cx, req)); + let rustcs = try!(prepare_rustc(package, target, profile, crate_types, + cx, req)); - let plugin_deps = crawl_build_deps(cx, package, target, Kind::Host); + let plugin_deps = crawl_build_deps(cx, package, target, profile, Kind::Host); return rustcs.into_iter().map(|(mut rustc, kind)| { let name = package.name().to_string(); @@ -312,15 +310,16 @@ fn rustc(package: &Package, target: &Target, } let exec_engine = cx.exec_engine.clone(); - let filenames = try!(cx.target_filenames(target)); + let filenames = try!(cx.target_filenames(target, profile)); let root = cx.out_dir(package, kind, target); // Prepare the native lib state (extra -L and -l flags) let build_state = cx.build_state.clone(); let current_id = package.package_id().clone(); let plugin_deps = plugin_deps.clone(); - let mut native_lib_deps = crawl_build_deps(cx, package, target, kind); - if package.has_custom_build() && !target.profile().is_custom_build() { + let mut native_lib_deps = crawl_build_deps(cx, package, target, + profile, kind); + if package.has_custom_build() && !target.is_custom_build() { native_lib_deps.insert(0, current_id.clone()); } @@ -330,9 +329,10 @@ fn rustc(package: &Package, target: &Target, t.is_lib() }); - let rustc_dep_info_loc = root.join(&target.file_stem()) + let rustc_dep_info_loc = root.join(&cx.file_stem(target, profile)) .with_extension("d"); - let dep_info_loc = fingerprint::dep_info_loc(cx, package, target, kind); + let dep_info_loc = fingerprint::dep_info_loc(cx, package, target, + profile, kind); let cwd = cx.config.cwd().to_path_buf(); Ok((Work::new(move |desc_tx| { @@ -394,19 +394,24 @@ fn rustc(package: &Package, target: &Target, } } -fn crawl_build_deps<'a>(cx: &'a Context, pkg: &'a Package, - target: &Target, kind: Kind) -> Vec { +fn crawl_build_deps<'a>(cx: &'a Context, + pkg: &'a Package, + target: &Target, + profile: &Profile, + kind: Kind) -> Vec { let mut deps = HashSet::new(); - visit(cx, pkg, target, kind, &mut HashSet::new(), &mut deps); + visit(cx, pkg, target, profile, kind, &mut HashSet::new(), &mut deps); let mut ret: Vec<_> = deps.into_iter().collect(); ret.sort(); return ret; - fn visit<'a>(cx: &'a Context, pkg: &'a Package, target: &Target, + fn visit<'a>(cx: &'a Context, + pkg: &'a Package, target: &Target, profile: &Profile, kind: Kind, visiting: &mut HashSet<&'a PackageId>, libs: &mut HashSet) { - for &(pkg, target) in cx.dep_targets(pkg, target).iter() { + for &(pkg, target, p) in cx.dep_targets(pkg, target, profile).iter() { + if !target.linkable() { continue } let req = cx.get_requirement(pkg, target); if !req.includes(kind) { continue } if !visiting.insert(pkg.package_id()) { continue } @@ -414,7 +419,7 @@ fn crawl_build_deps<'a>(cx: &'a Context, pkg: &'a Package, if pkg.has_custom_build() { libs.insert(pkg.package_id().clone()); } - visit(cx, pkg, target, kind, visiting, libs); + visit(cx, pkg, target, p, kind, visiting, libs); visiting.remove(&pkg.package_id()); } } @@ -431,6 +436,7 @@ fn add_plugin_deps(rustc: &mut CommandPrototype, let search_path = rustc.get_env(var).unwrap_or(OsString::new()); let mut search_path = env::split_paths(&search_path).collect::>(); for id in plugin_deps.into_iter() { + debug!("adding libs for plugin dep: {}", id); let output = &build_state[(id, Kind::Host)]; for path in output.library_paths.iter() { search_path.push(path.clone()); @@ -441,31 +447,33 @@ fn add_plugin_deps(rustc: &mut CommandPrototype, Ok(()) } -fn prepare_rustc(package: &Package, target: &Target, crate_types: Vec<&str>, +fn prepare_rustc(package: &Package, target: &Target, profile: &Profile, + crate_types: Vec<&str>, cx: &Context, req: Platform) -> CargoResult> { let mut base = try!(process(CommandType::Rustc, package, target, cx)); - build_base_args(cx, &mut base, package, target, &crate_types); + build_base_args(cx, &mut base, package, target, profile, &crate_types); - let mut target_cmd = base.clone(); - let mut plugin_cmd = base; - build_plugin_args(&mut target_cmd, cx, package, target, Kind::Target); - build_plugin_args(&mut plugin_cmd, cx, package, target, Kind::Host); - try!(build_deps_args(&mut target_cmd, target, package, cx, Kind::Target)); - try!(build_deps_args(&mut plugin_cmd, target, package, cx, Kind::Host)); + let mut targ_cmd = base.clone(); + let mut host_cmd = base; + build_plugin_args(&mut targ_cmd, cx, package, target, Kind::Target); + build_plugin_args(&mut host_cmd, cx, package, target, Kind::Host); + try!(build_deps_args(&mut targ_cmd, target, profile, package, cx, Kind::Target)); + try!(build_deps_args(&mut host_cmd, target, profile, package, cx, Kind::Host)); Ok(match req { - Platform::Target => vec![(target_cmd, Kind::Target)], - Platform::Plugin => vec![(plugin_cmd, Kind::Host)], - Platform::PluginAndTarget if cx.requested_target().is_none() => - vec![(target_cmd, Kind::Target)], - Platform::PluginAndTarget => vec![(target_cmd, Kind::Target), - (plugin_cmd, Kind::Host)], + Platform::Target => vec![(targ_cmd, Kind::Target)], + Platform::Plugin => vec![(host_cmd, Kind::Host)], + Platform::PluginAndTarget if cx.requested_target().is_none() => { + vec![(targ_cmd, Kind::Target)] + } + Platform::PluginAndTarget => vec![(targ_cmd, Kind::Target), + (host_cmd, Kind::Host)], }) } -fn rustdoc(package: &Package, target: &Target, +fn rustdoc(package: &Package, target: &Target, profile: &Profile, cx: &mut Context) -> CargoResult { let kind = Kind::Target; let cx_root = cx.get_package(cx.resolve.root()).absolute_target_dir() @@ -485,7 +493,7 @@ fn rustdoc(package: &Package, target: &Target, None => {} } - try!(build_deps_args(&mut rustdoc, target, package, cx, kind)); + try!(build_deps_args(&mut rustdoc, target, profile, package, cx, kind)); if package.has_custom_build() { rustdoc.env("OUT_DIR", &cx.layout(package, kind).build_out(package)); @@ -544,8 +552,12 @@ fn build_base_args(cx: &Context, cmd: &mut CommandPrototype, pkg: &Package, target: &Target, + profile: &Profile, crate_types: &[&str]) { - let metadata = target.metadata(); + let Profile { + opt_level, lto, codegen_units, debuginfo, ndebug, rpath, test, + doc: _doc, + } = *profile; // Move to cwd so the root_path() passed below is actually correct cmd.cwd(cx.config.cwd()); @@ -559,43 +571,39 @@ fn build_base_args(cx: &Context, cmd.arg("--crate-type").arg(crate_type); } - // Despite whatever this target's profile says, we need to configure it - // based off the profile found in the root package's targets. - let profile = cx.profile(target); - - let prefer_dynamic = profile.is_for_host() || + let prefer_dynamic = target.for_host() || (crate_types.contains(&"dylib") && pkg.package_id() != cx.resolve.root()); if prefer_dynamic { cmd.arg("-C").arg("prefer-dynamic"); } - if profile.opt_level() != 0 { - cmd.arg("-C").arg(&format!("opt-level={}", profile.opt_level())); + if opt_level != 0 { + cmd.arg("-C").arg(&format!("opt-level={}", opt_level)); } // Disable LTO for host builds as prefer_dynamic and it are mutually // exclusive. - let lto = (target.is_bin() || target.is_staticlib()) && profile.lto() && - !profile.is_for_host(); - if lto { + if target.can_lto() && lto && !target.for_host() { cmd.args(&["-C", "lto"]); } else { // There are some restrictions with LTO and codegen-units, so we // only add codegen units when LTO is not used. - match profile.codegen_units() { + match codegen_units { Some(n) => { cmd.arg("-C").arg(&format!("codegen-units={}", n)); } None => {}, } } - if profile.debug() { + if debuginfo { cmd.arg("-g"); - } else { + } + + if ndebug { cmd.args(&["--cfg", "ndebug"]); } - if profile.is_test() && profile.uses_test_harness() { + if test && target.harness() { cmd.arg("--test"); } @@ -608,7 +616,7 @@ fn build_base_args(cx: &Context, None => {} } - match metadata { + match cx.target_metadata(target, profile) { Some(m) => { cmd.arg("-C").arg(&format!("metadata={}", m.metadata)); cmd.arg("-C").arg(&format!("extra-filename={}", m.extra_filename)); @@ -616,7 +624,7 @@ fn build_base_args(cx: &Context, None => {} } - if profile.rpath() { + if rpath { cmd.arg("-C").arg("rpath"); } } @@ -641,8 +649,12 @@ fn build_plugin_args(cmd: &mut CommandPrototype, cx: &Context, pkg: &Package, } } -fn build_deps_args(cmd: &mut CommandPrototype, target: &Target, - package: &Package, cx: &Context, kind: Kind) +fn build_deps_args(cmd: &mut CommandPrototype, + target: &Target, + profile: &Profile, + package: &Package, + cx: &Context, + kind: Kind) -> CargoResult<()> { let layout = cx.layout(package, kind); cmd.arg("-L").arg(&{ @@ -660,24 +672,26 @@ fn build_deps_args(cmd: &mut CommandPrototype, target: &Target, cmd.env("OUT_DIR", &layout.build_out(package)); } - for &(pkg, target) in cx.dep_targets(package, target).iter() { - try!(link_to(cmd, pkg, target, cx, kind)); + for &(pkg, target, p) in cx.dep_targets(package, target, profile).iter() { + if target.linkable() { + try!(link_to(cmd, pkg, target, p, cx, kind)); + } } return Ok(()); fn link_to(cmd: &mut CommandPrototype, pkg: &Package, target: &Target, - cx: &Context, kind: Kind) -> CargoResult<()> { + profile: &Profile, cx: &Context, kind: Kind) -> CargoResult<()> { // If this target is itself a plugin *or* if it's being linked to a // plugin, then we want the plugin directory. Otherwise we want the // target directory (hence the || here). let layout = cx.layout(pkg, match kind { Kind::Host => Kind::Host, - Kind::Target if target.profile().is_for_host() => Kind::Host, + Kind::Target if target.for_host() => Kind::Host, Kind::Target => Kind::Target, }); - for filename in try!(cx.target_filenames(target)).iter() { + for filename in try!(cx.target_filenames(target, profile)).iter() { if filename.ends_with(".a") { continue } let mut v = OsString::new(); v.push(target.name()); @@ -707,29 +721,6 @@ pub fn process(cmd: CommandType, pkg: &Package, _target: &Target, Ok(cmd) } -fn each_dep<'a, F>(pkg: &Package, cx: &'a Context, mut f: F) - where F: FnMut(&'a Package) -{ - let mut visited = HashSet::new(); - let pkg = cx.get_package(pkg.package_id()); - visit_deps(pkg, cx, &mut visited, &mut f); - - fn visit_deps<'a, F>(pkg: &'a Package, cx: &'a Context, - visited: &mut HashSet<&'a PackageId>, f: &mut F) - where F: FnMut(&'a Package) - { - if !visited.insert(pkg.package_id()) { return } - f(pkg); - let deps = match cx.resolve.deps(pkg.package_id()) { - Some(deps) => deps, - None => return, - }; - for dep_id in deps { - visit_deps(cx.get_package(dep_id), cx, visited, f); - } - } -} - fn envify(s: &str) -> String { s.chars() .flat_map(|c| c.to_uppercase()) diff --git a/src/cargo/ops/cargo_test.rs b/src/cargo/ops/cargo_test.rs index 05469bf4ef4..2af21c2fcc8 100644 --- a/src/cargo/ops/cargo_test.rs +++ b/src/cargo/ops/cargo_test.rs @@ -3,62 +3,28 @@ use std::path::Path; use core::Source; use sources::PathSource; -use ops::{self, ExecEngine, ProcessEngine}; +use ops::{self, ExecEngine, ProcessEngine, Compilation}; use util::{CargoResult, ProcessError}; pub struct TestOptions<'a, 'b: 'a> { pub compile_opts: ops::CompileOptions<'a, 'b>, pub no_run: bool, - pub name: Option<&'a str>, } pub fn run_tests(manifest_path: &Path, options: &TestOptions, test_args: &[String]) -> CargoResult> { let config = options.compile_opts.config; - let mut source = try!(PathSource::for_path(&manifest_path.parent().unwrap(), - config)); - try!(source.update()); + let compile = match try!(build_and_run(manifest_path, options, test_args)) { + Ok(compile) => compile, + Err(e) => return Ok(Some(e)), + }; - let mut compile = try!(ops::compile(manifest_path, &options.compile_opts)); if options.no_run { return Ok(None) } - compile.tests.sort(); - - let tarname = options.name; - let tests_to_run = compile.tests.iter().filter(|&&(ref test_name, _)| { - tarname.map_or(true, |tarname| tarname == *test_name) - }); - - let cwd = config.cwd(); - for &(_, ref exe) in tests_to_run { - let to_display = match exe.relative_from(&cwd) { - Some(path) => path, - None => &**exe, - }; - let mut cmd = try!(compile.target_process(exe, &compile.package)); - cmd.args(test_args); - try!(config.shell().concise(|shell| { - shell.status("Running", to_display.display().to_string()) - })); - try!(config.shell().verbose(|shell| { - shell.status("Running", cmd.to_string()) - })); - match ExecEngine::exec(&mut ProcessEngine, cmd) { - Ok(()) => {} - Err(e) => return Ok(Some(e)) - } - } - - if options.name.is_some() { return Ok(None) } - if options.compile_opts.env == "bench" { return Ok(None) } - - let libs = compile.package.targets().iter().filter_map(|target| { - if !target.profile().is_doctest() || !target.is_lib() { - return None - } - Some((target.src_path(), target.name())) - }); + let libs = compile.package.targets().iter() + .filter(|t| t.doctested()) + .map(|t| (t.src_path(), t.name())); for (lib, name) in libs { try!(config.shell().status("Doc-tests", name)); @@ -104,5 +70,41 @@ pub fn run_benches(manifest_path: &Path, let mut args = args.to_vec(); args.push("--bench".to_string()); - run_tests(manifest_path, options, &args) + Ok(try!(build_and_run(manifest_path, options, &args)).err()) +} + +fn build_and_run(manifest_path: &Path, + options: &TestOptions, + test_args: &[String]) + -> CargoResult> { + let config = options.compile_opts.config; + let mut source = try!(PathSource::for_path(&manifest_path.parent().unwrap(), + config)); + try!(source.update()); + + let mut compile = try!(ops::compile(manifest_path, &options.compile_opts)); + if options.no_run { return Ok(Ok(compile)) } + compile.tests.sort(); + + let cwd = config.cwd(); + for &(_, ref exe) in &compile.tests { + let to_display = match exe.relative_from(&cwd) { + Some(path) => path, + None => &**exe, + }; + let mut cmd = try!(compile.target_process(exe, &compile.package)); + cmd.args(test_args); + try!(config.shell().concise(|shell| { + shell.status("Running", to_display.display().to_string()) + })); + try!(config.shell().verbose(|shell| { + shell.status("Running", cmd.to_string()) + })); + match ExecEngine::exec(&mut ProcessEngine, cmd) { + Ok(()) => {} + Err(e) => return Ok(Err(e)) + } + } + + Ok(Ok(compile)) } diff --git a/src/cargo/ops/mod.rs b/src/cargo/ops/mod.rs index 6b7341527e5..24106aa7a5f 100644 --- a/src/cargo/ops/mod.rs +++ b/src/cargo/ops/mod.rs @@ -1,5 +1,6 @@ pub use self::cargo_clean::{clean, CleanOptions}; pub use self::cargo_compile::{compile, compile_pkg, CompileOptions}; +pub use self::cargo_compile::{CompileFilter, CompileMode}; pub use self::cargo_read_manifest::{read_manifest,read_package,read_packages}; pub use self::cargo_rustc::{compile_targets, Compilation, Layout, Kind, rustc_version}; pub use self::cargo_rustc::{Context, LayoutProxy}; diff --git a/src/cargo/util/dependency_queue.rs b/src/cargo/util/dependency_queue.rs index 155606e4846..09bbc7ffeae 100644 --- a/src/cargo/util/dependency_queue.rs +++ b/src/cargo/util/dependency_queue.rs @@ -4,9 +4,8 @@ //! This structure is used to store the dependency graph and dynamically update //! it to figure out when a dependency should be built. -use std::collections::hash_set::HashSet; -use std::collections::hash_map::HashMap; use std::collections::hash_map::Entry::{Occupied, Vacant}; +use std::collections::{HashMap, HashSet}; use std::hash::Hash; pub use self::Freshness::{Fresh, Dirty}; @@ -73,10 +72,12 @@ impl DependencyQueue { /// /// It is assumed that any dependencies of this package will eventually also /// be added to the dependency queue. - pub fn enqueue(&mut self, cx: &K::Context, fresh: Freshness, key: K, - value: V) { - // ignore self-deps - if self.dep_map.contains_key(&key) { return } + pub fn queue(&mut self, cx: &K::Context, fresh: Freshness, + key: K, value: V) -> &mut V { + let slot = match self.dep_map.entry(key.clone()) { + Occupied(v) => return &mut v.into_mut().1, + Vacant(v) => v, + }; if fresh == Dirty { self.dirty.insert(key.clone()); @@ -91,7 +92,7 @@ impl DependencyQueue { }; assert!(rev.insert(key.clone())); } - assert!(self.dep_map.insert(key, (my_dependencies, value)).is_none()); + &mut slot.insert((my_dependencies, value)).1 } /// Dequeues a package that is ready to be built. diff --git a/src/cargo/util/toml.rs b/src/cargo/util/toml.rs index 603adcb65d1..ded986ee9f1 100644 --- a/src/cargo/util/toml.rs +++ b/src/cargo/util/toml.rs @@ -10,7 +10,7 @@ use toml; use semver; use rustc_serialize::{Decodable, Decoder}; -use core::SourceId; +use core::{SourceId, Profiles}; use core::{Summary, Manifest, Target, Dependency, PackageId, GitReference}; use core::dependency::Kind; use core::manifest::{LibKind, Profile, ManifestMetadata}; @@ -118,9 +118,7 @@ pub fn to_manifest(contents: &[u8], Some(ref toml) => add_unused_keys(&mut manifest, toml, "".to_string()), None => {} } - if manifest.targets().iter() - .filter(|t| !t.profile().is_custom_build() ) - .next().is_none() { + if !manifest.targets().iter().any(|t| !t.is_custom_build()) { return Err(human(format!("either a [lib] or [[bin]] section must \ be present"))) } @@ -449,15 +447,13 @@ impl TomlManifest { let new_build = project.build.as_ref().map(PathBuf::new); // Get targets - let profiles = self.profile.clone().unwrap_or(Default::default()); let targets = normalize(&lib, &bins, new_build, &examples, &tests, &benches, - &metadata, - &profiles); + &metadata); if targets.is_empty() { debug!("manifest has no build targets"); @@ -510,6 +506,7 @@ impl TomlManifest { repository: project.repository.clone(), keywords: project.keywords.clone().unwrap_or(Vec::new()), }; + let profiles = build_profiles(&self.profile); let mut manifest = Manifest::new(summary, targets, layout.root.join("target"), @@ -517,7 +514,8 @@ impl TomlManifest { exclude, include, project.links.clone(), - metadata); + metadata, + profiles); if used_deprecated_lib { manifest.add_warning(format!("the [[lib]] section has been \ deprecated in favor of [lib]")); @@ -648,77 +646,19 @@ fn normalize(libs: &[TomlLibTarget], examples: &[TomlExampleTarget], tests: &[TomlTestTarget], benches: &[TomlBenchTarget], - metadata: &Metadata, - profiles: &TomlProfiles) -> Vec { - #[derive(Copy)] - enum TestDep { Needed, NotNeeded } - - fn merge(profile: Profile, toml: &Option) -> Profile { - let toml = match *toml { - Some(ref toml) => toml, - None => return profile, - }; - let opt_level = toml.opt_level.unwrap_or(profile.opt_level()); - let lto = toml.lto.unwrap_or(profile.lto()); - let codegen_units = toml.codegen_units; - let debug = toml.debug.unwrap_or(profile.debug()); - let rpath = toml.rpath.unwrap_or(profile.rpath()); - profile.set_opt_level(opt_level).set_lto(lto) - .set_codegen_units(codegen_units) - .set_debug(debug).set_rpath(rpath) - } - - fn target_profiles(target: &TomlTarget, profiles: &TomlProfiles, - dep: TestDep) -> Vec { - let mut ret = vec![ - merge(Profile::default_dev(), &profiles.dev), - merge(Profile::default_release(), &profiles.release), - ]; - - match target.test { - Some(true) | None => { - ret.push(merge(Profile::default_test(), &profiles.test)); - } - Some(false) => {} - } - - let doctest = target.doctest.unwrap_or(true); - match target.doc { - Some(true) | None => { - ret.push(merge(Profile::default_doc().set_doctest(doctest), - &profiles.doc)); - } - Some(false) => {} - } - - match target.bench { - Some(true) | None => { - ret.push(merge(Profile::default_bench(), &profiles.bench)); - } - Some(false) => {} - } - - match dep { - TestDep::Needed => { - ret.push(merge(Profile::default_test().set_test(false), - &profiles.test)); - ret.push(merge(Profile::default_doc().set_doc(false), - &profiles.doc)); - ret.push(merge(Profile::default_bench().set_test(false), - &profiles.bench)); - } - _ => {} - } - - if target.plugin == Some(true) { - ret = ret.into_iter().map(|p| p.set_for_host(true)).collect(); - } - - ret + metadata: &Metadata) -> Vec { + fn configure(toml: &TomlTarget, target: &mut Target) { + let t2 = target.clone(); + target.set_tested(toml.test.unwrap_or(t2.tested())) + .set_doc(toml.doc.unwrap_or(t2.documented())) + .set_doctest(toml.doctest.unwrap_or(t2.doctested())) + .set_benched(toml.bench.unwrap_or(t2.benched())) + .set_harness(toml.harness.unwrap_or(t2.harness())) + .set_for_host(toml.plugin.unwrap_or(t2.for_host())); } fn lib_targets(dst: &mut Vec, libs: &[TomlLibTarget], - dep: TestDep, metadata: &Metadata, profiles: &TomlProfiles) { + metadata: &Metadata) { let l = &libs[0]; let path = l.path.clone().unwrap_or_else(|| { PathValue::Path(Path::new("src").join(&format!("{}.rs", l.name))) @@ -730,145 +670,100 @@ fn normalize(libs: &[TomlLibTarget], vec![if l.plugin == Some(true) {LibKind::Dylib} else {LibKind::Lib}] }); - for profile in target_profiles(l, profiles, dep).iter() { - let mut metadata = metadata.clone(); - // Libs and their tests are built in parallel, so we need to make - // sure that their metadata is different. - if profile.is_test() { - metadata.mix(&"test"); - } - dst.push(Target::lib_target(&l.name, crate_types.clone(), - &path.to_path(), profile, - metadata)); - } + let mut target = Target::lib_target(&l.name, crate_types.clone(), + &path.to_path(), + metadata.clone()); + configure(l, &mut target); + dst.push(target); } fn bin_targets(dst: &mut Vec, bins: &[TomlBinTarget], - dep: TestDep, metadata: &Metadata, - profiles: &TomlProfiles, default: &mut FnMut(&TomlBinTarget) -> PathBuf) { for bin in bins.iter() { let path = bin.path.clone().unwrap_or_else(|| { PathValue::Path(default(bin)) }); - - for profile in target_profiles(bin, profiles, dep).iter() { - let metadata = if profile.is_test() { - // Make sure that the name of this test executable doesn't - // conflicts with a library that has the same name and is - // being tested - let mut metadata = metadata.clone(); - metadata.mix(&format!("bin-{}", bin.name)); - Some(metadata) - } else { - None - }; - dst.push(Target::bin_target(&bin.name, - &path.to_path(), - profile, - metadata)); - } + let mut target = Target::bin_target(&bin.name, &path.to_path(), + None); + configure(bin, &mut target); + dst.push(target); } } - fn custom_build_target(dst: &mut Vec, cmd: &Path, - profiles: &TomlProfiles) { - let profiles = [ - merge(Profile::default_dev().set_for_host(true).set_custom_build(true), - &profiles.dev), - ]; - + fn custom_build_target(dst: &mut Vec, cmd: &Path) { let name = format!("build-script-{}", cmd.file_stem().and_then(|s| s.to_str()).unwrap_or("")); - for profile in profiles.iter() { - dst.push(Target::custom_build_target(&name, cmd, profile, None)); - } + dst.push(Target::custom_build_target(&name, cmd, None)); } - fn example_targets(dst: &mut Vec, examples: &[TomlExampleTarget], - profiles: &TomlProfiles, + fn example_targets(dst: &mut Vec, + examples: &[TomlExampleTarget], default: &mut FnMut(&TomlExampleTarget) -> PathBuf) { for ex in examples.iter() { let path = ex.path.clone().unwrap_or_else(|| { PathValue::Path(default(ex)) }); - let profile = merge(Profile::default_example(), &profiles.test); - let profile_release = merge(Profile::default_release(), &profiles.release); - dst.push(Target::example_target(&ex.name, - &path.to_path(), - &profile)); - dst.push(Target::example_target(&ex.name, - &path.to_path(), - &profile_release)); + let mut target = Target::example_target(&ex.name, &path.to_path()); + configure(ex, &mut target); + dst.push(target); } } fn test_targets(dst: &mut Vec, tests: &[TomlTestTarget], - metadata: &Metadata, profiles: &TomlProfiles, + metadata: &Metadata, default: &mut FnMut(&TomlTestTarget) -> PathBuf) { for test in tests.iter() { let path = test.path.clone().unwrap_or_else(|| { PathValue::Path(default(test)) }); - let harness = test.harness.unwrap_or(true); // make sure this metadata is different from any same-named libs. let mut metadata = metadata.clone(); metadata.mix(&format!("test-{}", test.name)); - let profile = Profile::default_test().set_harness(harness); - let profile = merge(profile, &profiles.test); - dst.push(Target::test_target(&test.name, - &path.to_path(), - &profile, - metadata)); + let mut target = Target::test_target(&test.name, &path.to_path(), + metadata); + configure(test, &mut target); + dst.push(target); } } fn bench_targets(dst: &mut Vec, benches: &[TomlBenchTarget], - metadata: &Metadata, profiles: &TomlProfiles, + metadata: &Metadata, default: &mut FnMut(&TomlBenchTarget) -> PathBuf) { for bench in benches.iter() { let path = bench.path.clone().unwrap_or_else(|| { PathValue::Path(default(bench)) }); - let harness = bench.harness.unwrap_or(true); // make sure this metadata is different from any same-named libs. let mut metadata = metadata.clone(); metadata.mix(&format!("bench-{}", bench.name)); - let profile = Profile::default_bench().set_harness(harness); - let profile = merge(profile, &profiles.bench); - dst.push(Target::bench_target(&bench.name, - &path.to_path(), - &profile, - metadata)); + let mut target = Target::bench_target(&bench.name, + &path.to_path(), + metadata); + configure(bench, &mut target); + dst.push(target); } } let mut ret = Vec::new(); - let test_dep = if examples.len() > 0 || tests.len() > 0 || benches.len() > 0 { - TestDep::Needed - } else { - TestDep::NotNeeded - }; - match (libs, bins) { ([_, ..], [_, ..]) => { - lib_targets(&mut ret, libs, TestDep::Needed, metadata, profiles); - bin_targets(&mut ret, bins, test_dep, metadata, profiles, + lib_targets(&mut ret, libs, metadata); + bin_targets(&mut ret, bins, &mut |bin| Path::new("src").join("bin") .join(&format!("{}.rs", bin.name))); }, ([_, ..], []) => { - lib_targets(&mut ret, libs, TestDep::Needed, metadata, profiles); + lib_targets(&mut ret, libs, metadata); }, ([], [_, ..]) => { - bin_targets(&mut ret, bins, test_dep, metadata, profiles, + bin_targets(&mut ret, bins, &mut |bin| Path::new("src") .join(&format!("{}.rs", bin.name))); }, @@ -876,14 +771,14 @@ fn normalize(libs: &[TomlLibTarget], } if let Some(custom_build) = custom_build { - custom_build_target(&mut ret, &custom_build, profiles); + custom_build_target(&mut ret, &custom_build); } - example_targets(&mut ret, examples, profiles, + example_targets(&mut ret, examples, &mut |ex| Path::new("examples") .join(&format!("{}.rs", ex.name))); - test_targets(&mut ret, tests, metadata, profiles, &mut |test| { + test_targets(&mut ret, tests, metadata, &mut |test| { if test.name == "test" { Path::new("src").join("test.rs") } else { @@ -891,7 +786,7 @@ fn normalize(libs: &[TomlLibTarget], } }); - bench_targets(&mut ret, benches, metadata, profiles, &mut |bench| { + bench_targets(&mut ret, benches, metadata, &mut |bench| { if bench.name == "bench" { Path::new("src").join("bench.rs") } else { @@ -901,3 +796,38 @@ fn normalize(libs: &[TomlLibTarget], ret } + +fn build_profiles(profiles: &Option) -> Profiles { + let profiles = profiles.as_ref(); + return Profiles { + release: merge(Profile::default_release(), + profiles.and_then(|p| p.release.as_ref())), + dev: merge(Profile::default_dev(), + profiles.and_then(|p| p.dev.as_ref())), + test: merge(Profile::default_test(), + profiles.and_then(|p| p.test.as_ref())), + bench: merge(Profile::default_bench(), + profiles.and_then(|p| p.bench.as_ref())), + doc: merge(Profile::default_doc(), + profiles.and_then(|p| p.doc.as_ref())), + }; + + fn merge(profile: Profile, toml: Option<&TomlProfile>) -> Profile { + let &TomlProfile { + opt_level, lto, codegen_units, debug, rpath + } = match toml { + Some(toml) => toml, + None => return profile, + }; + Profile { + opt_level: opt_level.unwrap_or(profile.opt_level), + lto: lto.unwrap_or(profile.lto), + codegen_units: codegen_units, + debuginfo: debug.unwrap_or(profile.debuginfo), + ndebug: !debug.unwrap_or(!profile.ndebug), + rpath: rpath.unwrap_or(profile.rpath), + test: profile.test, + doc: profile.doc, + } + } +} diff --git a/tests/test_cargo_bench.rs b/tests/test_cargo_bench.rs index 071d44d8aae..ccdf03b8e33 100644 --- a/tests/test_cargo_bench.rs +++ b/tests/test_cargo_bench.rs @@ -55,19 +55,11 @@ test!(bench_tarname { name = "foo" version = "0.0.1" authors = [] - - [[bin]] - name="bin1" - path="src/bin1.rs" - - [[bin]] - name="bin2" - path="src/bin2.rs" "#) - .file("src/bin1.rs", r#" + .file("benches/bin1.rs", r#" extern crate test; #[bench] fn run1(_ben: &mut test::Bencher) { }"#) - .file("src/bin2.rs", r#" + .file("benches/bin2.rs", r#" extern crate test; #[bench] fn run2(_ben: &mut test::Bencher) { }"#); diff --git a/tests/test_cargo_build_lib.rs b/tests/test_cargo_build_lib.rs index 97320184a0f..e26f0903321 100644 --- a/tests/test_cargo_build_lib.rs +++ b/tests/test_cargo_build_lib.rs @@ -51,7 +51,6 @@ test!(build_with_no_lib { "#); assert_that(p.cargo_process("build").arg("--lib"), - execs() - .with_status(101) - .with_stderr("There is no lib to build, remove `--lib` flag")); + execs().with_status(101) + .with_stderr("no library targets found")); }); diff --git a/tests/test_cargo_compile.rs b/tests/test_cargo_compile.rs index b3fc84c1a16..70029f0a230 100644 --- a/tests/test_cargo_compile.rs +++ b/tests/test_cargo_compile.rs @@ -5,7 +5,7 @@ use tempdir::TempDir; use support::{project, execs, main_file, basic_bin_manifest}; use support::{COMPILING, RUNNING, ProjectBuilder}; -use hamcrest::{assert_that, existing_file}; +use hamcrest::{assert_that, existing_file, is_not}; use support::paths::CargoPathExt; use cargo::util::process; @@ -600,13 +600,16 @@ test!(crate_version_env_vars { } "#); - assert_that(p.cargo_process("build"), execs().with_status(0)); + println!("build"); + assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); + println!("bin"); assert_that(process(&p.bin("foo")).unwrap(), execs().with_stdout(format!("0-5-1 @ alpha.1 in {}\n", p.root().display()).as_slice())); - assert_that(p.cargo("test"), + println!("test"); + assert_that(p.cargo("test").arg("-v"), execs().with_status(0)); }); @@ -977,7 +980,7 @@ test!(explicit_examples { fn main() { println!("{}, {}!", world::get_goodbye(), world::get_world()); } "#); - assert_that(p.cargo_process("test"), execs().with_status(0)); + assert_that(p.cargo_process("test").arg("-v"), execs().with_status(0)); assert_that(process(&p.bin("examples/hello")).unwrap(), execs().with_stdout("Hello, World!\n")); assert_that(process(&p.bin("examples/goodbye")).unwrap(), @@ -1346,9 +1349,7 @@ test!(dep_no_libs { "#) .file("bar/src/main.rs", ""); assert_that(foo.cargo_process("build"), - execs().with_status(101) - .with_stderr("\ -Package `bar v0.0.0 ([..])` has no library targets")); + execs().with_status(0)); }); test!(recompile_space_in_name { @@ -1545,18 +1546,18 @@ test!(example_bin_same_name { .file("src/main.rs", "fn main() {}") .file("examples/foo.rs", "fn main() {}"); - p.cargo_process("test").arg("--no-run") + p.cargo_process("test").arg("--no-run").arg("-v") .exec_with_output() .unwrap(); - assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("foo"), is_not(existing_file())); assert_that(&p.bin("examples/foo"), existing_file()); - p.cargo("test").arg("--no-run") - .exec_with_output() - .unwrap(); + p.cargo("test").arg("--no-run").arg("-v") + .exec_with_output() + .unwrap(); - assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("foo"), is_not(existing_file())); assert_that(&p.bin("examples/foo"), existing_file()); }); diff --git a/tests/test_cargo_compile_custom_build.rs b/tests/test_cargo_compile_custom_build.rs index 20b4557ef2e..301a10b8fab 100644 --- a/tests/test_cargo_compile_custom_build.rs +++ b/tests/test_cargo_compile_custom_build.rs @@ -91,7 +91,7 @@ test!(custom_build_env_vars { assert_eq!(opt, "0"); let opt = env::var("PROFILE").unwrap(); - assert_eq!(opt, "compile"); + assert_eq!(opt, "debug"); let debug = env::var("DEBUG").unwrap(); assert_eq!(debug, "true"); @@ -469,6 +469,7 @@ test!(testing_and_such { fn main() {} "#); + println!("build"); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); p.root().move_into_the_past().unwrap(); @@ -476,6 +477,7 @@ test!(testing_and_such { File::create(&p.root().join("src/lib.rs")).unwrap(); p.root().move_into_the_past().unwrap(); + println!("test"); assert_that(p.cargo("test").arg("-vj1"), execs().with_status(0) .with_stdout(format!("\ @@ -483,7 +485,7 @@ test!(testing_and_such { {running} `[..]build-script-build[..]` {running} `rustc [..] --crate-name foo [..]` {running} `rustc [..] --crate-name foo [..]` -{running} `[..]foo-[..]` +{running} `[..]foo-[..][..]` running 0 tests @@ -498,21 +500,22 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, doctest = DOCTEST).as_slice())); + println!("doc"); assert_that(p.cargo("doc").arg("-v"), execs().with_status(0) .with_stdout(format!("\ {compiling} foo v0.5.0 (file://[..]) {running} `rustdoc [..]` -{running} `rustc [..]` ", compiling = COMPILING, running = RUNNING).as_slice())); File::create(&p.root().join("src/main.rs")).unwrap() .write_all(b"fn main() {}").unwrap(); + println!("run"); assert_that(p.cargo("run"), execs().with_status(0) .with_stdout(format!("\ {compiling} foo v0.5.0 (file://[..]) -{running} `target[..]foo` +{running} `target[..]foo[..]` ", compiling = COMPILING, running = RUNNING).as_slice())); }); @@ -1176,7 +1179,7 @@ test!(profile_and_opt_level_set_correctly { fn main() { assert_eq!(env::var("OPT_LEVEL").unwrap(), "3"); - assert_eq!(env::var("PROFILE").unwrap(), "bench"); + assert_eq!(env::var("PROFILE").unwrap(), "release"); assert_eq!(env::var("DEBUG").unwrap(), "false"); } "#); diff --git a/tests/test_cargo_compile_git_deps.rs b/tests/test_cargo_compile_git_deps.rs index cae0dcaddfd..86a8bbfdfc7 100644 --- a/tests/test_cargo_compile_git_deps.rs +++ b/tests/test_cargo_compile_git_deps.rs @@ -965,7 +965,7 @@ test!(dep_with_changed_submodule { .with_stdout(format!("{} git repository `[..]`\n\ {} dep1 v0.5.0 ([..])\n\ {} foo v0.5.0 ([..])\n\ - {} `target[..]foo`\n\ + {} `target[..]foo[..]`\n\ project2\ ", UPDATING, @@ -1010,7 +1010,7 @@ test!(dep_with_changed_submodule { assert_that(project.cargo("run"), execs() .with_stdout(format!("{compiling} dep1 v0.5.0 ([..])\n\ {compiling} foo v0.5.0 ([..])\n\ - {running} `target[..]foo`\n\ + {running} `target[..]foo[..]`\n\ project3\ ", compiling = COMPILING, running = RUNNING)) diff --git a/tests/test_cargo_compile_plugins.rs b/tests/test_cargo_compile_plugins.rs index b983e2cfbdc..798c6ca6f8c 100644 --- a/tests/test_cargo_compile_plugins.rs +++ b/tests/test_cargo_compile_plugins.rs @@ -49,7 +49,7 @@ test!(plugin_to_the_max { path = "../baz" "#) .file("src/lib.rs", r#" - #![feature(plugin_registrar)] + #![feature(plugin_registrar, rustc_private)] extern crate rustc; extern crate baz; @@ -57,7 +57,7 @@ test!(plugin_to_the_max { use rustc::plugin::Registry; #[plugin_registrar] - pub fn foo(reg: &mut Registry) { + pub fn foo(_reg: &mut Registry) { println!("{}", baz::baz()); } "#); diff --git a/tests/test_cargo_run.rs b/tests/test_cargo_run.rs index 15d61e90637..8956c744b5b 100644 --- a/tests/test_cargo_run.rs +++ b/tests/test_cargo_run.rs @@ -22,7 +22,7 @@ test!(simple { assert_that(p.cargo_process("run"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) -{running} `target{sep}debug{sep}foo` +{running} `target{sep}debug{sep}foo[..]` hello ", compiling = COMPILING, @@ -98,7 +98,7 @@ test!(too_many_bins { assert_that(p.cargo_process("run"), execs().with_status(101) .with_stderr("`cargo run` requires that a project only \ - have one executable. Use the `--bin` option \ + have one executable; use the `--bin` option \ to specify which one to run\n")); }); @@ -120,10 +120,12 @@ test!(specify_name { fn main() { println!("hello b.rs"); } "#); - assert_that(p.cargo_process("run").arg("--bin").arg("a"), + assert_that(p.cargo_process("run").arg("--bin").arg("a").arg("-v"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) -{running} `target{sep}debug{sep}a` +{running} `rustc src[..]lib.rs [..]` +{running} `rustc src[..]a.rs [..]` +{running} `target{sep}debug{sep}a[..]` hello a.rs ", compiling = COMPILING, @@ -131,12 +133,14 @@ hello a.rs dir = path2url(p.root()), sep = SEP).as_slice())); - assert_that(p.cargo("run").arg("--bin").arg("b"), + assert_that(p.cargo("run").arg("--bin").arg("b").arg("-v"), execs().with_status(0).with_stdout(format!("\ -{running} `target{sep}debug{sep}b` +{compiling} foo v0.0.1 ([..]) +{running} `rustc src[..]b.rs [..]` +{running} `target{sep}debug{sep}b[..]` hello b.rs ", - running = RUNNING, + running = RUNNING, compiling = COMPILING, sep = SEP).as_slice())); }); @@ -159,7 +163,7 @@ test!(run_example { assert_that(p.cargo_process("run").arg("--example").arg("a"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) -{running} `target{sep}debug{sep}examples{sep}a` +{running} `target{sep}debug{sep}examples{sep}a[..]` example ", compiling = COMPILING, @@ -184,9 +188,10 @@ test!(either_name_or_example { "#); assert_that(p.cargo_process("run").arg("--bin").arg("a").arg("--example").arg("b"), - execs().with_status(1) - .with_stderr("specify either `--bin` or `--example`, \ - not both")); + execs().with_status(101) + .with_stderr("`cargo run` can run at most one \ + executable, but multiple were \ + specified")); }); test!(one_bin_multiple_examples { @@ -211,7 +216,7 @@ test!(one_bin_multiple_examples { assert_that(p.cargo_process("run"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) -{running} `target{sep}debug{sep}main` +{running} `target{sep}debug{sep}main[..]` hello main.rs ", compiling = COMPILING, @@ -284,7 +289,7 @@ test!(example_with_release_flag { -L dependency={dir}{sep}target{sep}release \ -L dependency={dir}{sep}target{sep}release{sep}deps \ --extern bar={dir}{sep}target{sep}release{sep}deps{sep}libbar-[..].rlib` -{running} `target{sep}release{sep}examples{sep}a` +{running} `target{sep}release{sep}examples{sep}a[..]` fast1 fast2 ", @@ -313,7 +318,7 @@ fast2 -L dependency={dir}{sep}target{sep}debug \ -L dependency={dir}{sep}target{sep}debug{sep}deps \ --extern bar={dir}{sep}target{sep}debug{sep}deps{sep}libbar-[..].rlib` -{running} `target{sep}debug{sep}examples{sep}a` +{running} `target{sep}debug{sep}examples{sep}a[..]` slow1 slow2 ", @@ -370,7 +375,7 @@ test!(release_works { assert_that(p.cargo_process("run").arg("--release"), execs().with_status(0).with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) -{running} `target{sep}release{sep}foo` +{running} `target{sep}release{sep}foo[..]` ", compiling = COMPILING, running = RUNNING, diff --git a/tests/test_cargo_test.rs b/tests/test_cargo_test.rs index bde0542dc5e..fb8a43b5594 100644 --- a/tests/test_cargo_test.rs +++ b/tests/test_cargo_test.rs @@ -3,7 +3,7 @@ use std::str; use support::{project, execs, basic_bin_manifest, basic_lib_manifest}; use support::{COMPILING, RUNNING, DOCTEST}; use support::paths::CargoPathExt; -use hamcrest::{assert_that, existing_file}; +use hamcrest::{assert_that, existing_file, is_not}; use cargo::util::process; fn setup() {} @@ -91,7 +91,7 @@ test!(many_similar_names { #[test] fn test_test() { foo::foo() } "#); - let output = p.cargo_process("test").exec_with_output().unwrap(); + let output = p.cargo_process("test").arg("-v").exec_with_output().unwrap(); let output = str::from_utf8(&output.stdout).unwrap(); assert!(output.contains("test bin_test"), "bin_test missing\n{}", output); assert!(output.contains("test lib_test"), "lib_test missing\n{}", output); @@ -898,7 +898,7 @@ test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured running = RUNNING, dir = prj.url()); - assert_that(prj.cargo_process("test").arg("--test").arg("bin2"), + assert_that(prj.cargo_process("test").arg("--bin").arg("bin2"), execs().with_status(0).with_stdout(expected_stdout.as_slice())); }); @@ -924,13 +924,6 @@ test test_b ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured -{running} target[..]b-[..] - -running 1 test -test test_b ... ok - -test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured - ", compiling = COMPILING, running = RUNNING, @@ -999,6 +992,7 @@ test!(selective_testing { doctest = false "#) .file("d1/src/lib.rs", "") + .file("d1/src/main.rs", "extern crate d1; fn main() {}") .file("d2/Cargo.toml", r#" [package] name = "d2" @@ -1009,7 +1003,8 @@ test!(selective_testing { name = "d2" doctest = false "#) - .file("d2/src/lib.rs", ""); + .file("d2/src/lib.rs", "") + .file("d2/src/main.rs", "extern crate d2; fn main() {}"); p.build(); println!("d1"); @@ -1021,7 +1016,14 @@ test!(selective_testing { running 0 tests -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured\n +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured + +{running} target[..]d1-[..] + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured + ", compiling = COMPILING, running = RUNNING, dir = p.url()).as_slice())); @@ -1034,7 +1036,14 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured\n running 0 tests -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured\n +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured + +{running} target[..]d2-[..] + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured + ", compiling = COMPILING, running = RUNNING, dir = p.url()).as_slice())); @@ -1047,7 +1056,8 @@ test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured\n running 0 tests -test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured\n +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured + ", compiling = COMPILING, running = RUNNING, dir = p.url()).as_slice())); }); @@ -1147,7 +1157,7 @@ test!(example_dev_dep { .file("bar/src/lib.rs", r#" #![feature(macro_rules)] // make sure this file takes awhile to compile - macro_rules! f0( () => (1u) ); + macro_rules! f0( () => (1) ); macro_rules! f1( () => ({(f0!()) + (f0!())}) ); macro_rules! f2( () => ({(f1!()) + (f1!())}) ); macro_rules! f3( () => ({(f2!()) + (f2!())}) ); @@ -1165,6 +1175,9 @@ test!(example_dev_dep { "#); assert_that(p.cargo_process("test"), execs().with_status(0)); + assert_that(p.cargo("run") + .arg("--example").arg("e1").arg("--release").arg("-v"), + execs().with_status(0)); }); test!(selective_testing_with_docs { @@ -1232,18 +1245,24 @@ test!(example_bin_same_name { execs().with_status(0) .with_stdout(format!("\ {compiling} foo v0.0.1 ({dir}) -{running} `rustc [..]bin[..]foo.rs [..] --test [..]` -{running} `rustc [..]bin[..]foo.rs [..]` -{running} `rustc [..]examples[..]foo.rs [..]` +{running} `rustc [..]` +{running} `rustc [..]` ", compiling = COMPILING, running = RUNNING, dir = p.url()).as_slice())); - assert_that(&p.bin("foo"), existing_file()); + assert_that(&p.bin("foo"), is_not(existing_file())); assert_that(&p.bin("examples/foo"), existing_file()); - assert_that(p.process(&p.bin("foo")), - execs().with_status(0).with_stdout("bin\n")); assert_that(p.process(&p.bin("examples/foo")), execs().with_status(0).with_stdout("example\n")); + + assert_that(p.cargo_process("run"), + execs().with_status(0) + .with_stdout(format!("\ +{compiling} foo v0.0.1 ([..]) +{running} [..] +bin +", compiling = COMPILING, running = RUNNING).as_slice())); + assert_that(&p.bin("foo"), existing_file()); }); test!(test_with_example_twice { @@ -1337,11 +1356,11 @@ test!(bad_example { assert_that(p.cargo_process("run").arg("--example").arg("foo"), execs().with_status(101).with_stderr("\ -no example target named `foo` to run +no example target named `foo` ")); assert_that(p.cargo_process("run").arg("--bin").arg("foo"), execs().with_status(101).with_stderr("\ -no bin target named `foo` to run +no bin target named `foo` ")); });