diff --git a/Makefile.in b/Makefile.in index e6dff63e006..45f4dab6baa 100644 --- a/Makefile.in +++ b/Makefile.in @@ -136,7 +136,7 @@ clean: # === Documentation DOCS := index faq config guide manifest build-script pkgid-spec crates-io \ - environment-variables specifying-dependencies + environment-variables specifying-dependencies source-replacement DOC_DIR := target/doc DOC_OPTS := --markdown-no-toc \ --markdown-css stylesheets/normalize.css \ diff --git a/src/bin/git_checkout.rs b/src/bin/git_checkout.rs index e67844cce57..f7762483ea6 100644 --- a/src/bin/git_checkout.rs +++ b/src/bin/git_checkout.rs @@ -1,6 +1,6 @@ use cargo::core::source::{Source, SourceId, GitReference}; use cargo::sources::git::{GitSource}; -use cargo::util::{Config, CliResult, CliError, human, ToUrl}; +use cargo::util::{Config, CliResult, ToUrl}; #[derive(RustcDecodable)] pub struct Options { @@ -37,20 +37,14 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { options.flag_locked)); let Options { flag_url: url, flag_reference: reference, .. } = options; - let url = try!(url.to_url().map_err(|e| { - human(format!("The URL `{}` you passed was \ - not a valid URL: {}", url, e)) - }) - .map_err(|e| CliError::new(e, 1))); + let url = try!(url.to_url()); let reference = GitReference::Branch(reference.clone()); let source_id = SourceId::for_git(&url, reference); let mut source = GitSource::new(&source_id, config); - try!(source.update().map_err(|e| { - CliError::new(human(format!("Couldn't update {:?}: {:?}", source, e)), 1) - })); + try!(source.update()); Ok(None) } diff --git a/src/bin/install.rs b/src/bin/install.rs index 78d8f58fd59..2a58688021d 100644 --- a/src/bin/install.rs +++ b/src/bin/install.rs @@ -1,6 +1,6 @@ use cargo::ops; use cargo::core::{SourceId, GitReference}; -use cargo::util::{CliResult, Config, ToUrl, human}; +use cargo::util::{CliResult, Config, ToUrl}; #[derive(RustcDecodable)] pub struct Options { @@ -116,7 +116,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { }; let source = if let Some(url) = options.flag_git { - let url = try!(url.to_url().map_err(human)); + let url = try!(url.to_url()); let gitref = if let Some(branch) = options.flag_branch { GitReference::Branch(branch) } else if let Some(tag) = options.flag_tag { @@ -132,7 +132,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { } else if options.arg_crate == None { try!(SourceId::for_path(&config.cwd())) } else { - try!(SourceId::for_central(config)) + try!(SourceId::crates_io(config)) }; let krate = options.arg_crate.as_ref().map(|s| &s[..]); diff --git a/src/bin/login.rs b/src/bin/login.rs index 635321c578d..53de98af66e 100644 --- a/src/bin/login.rs +++ b/src/bin/login.rs @@ -43,10 +43,10 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { let token = match options.arg_token.clone() { Some(token) => token, None => { - let src = try!(SourceId::for_central(config)); - let mut src = RegistrySource::new(&src, config); + let src = try!(SourceId::crates_io(config)); + let mut src = RegistrySource::remote(&src, config); try!(src.update()); - let config = try!(src.config()); + let config = try!(src.config()).unwrap(); let host = options.flag_host.clone().unwrap_or(config.api); println!("please visit {}me and paste the API Token below", host); let mut line = String::new(); diff --git a/src/cargo/core/dependency.rs b/src/cargo/core/dependency.rs index 08966e6e438..48f4bd456d9 100644 --- a/src/cargo/core/dependency.rs +++ b/src/cargo/core/dependency.rs @@ -249,6 +249,7 @@ impl Dependency { pub fn is_transitive(&self) -> bool { self.inner.is_transitive() } pub fn is_build(&self) -> bool { self.inner.is_build() } pub fn is_optional(&self) -> bool { self.inner.is_optional() } + /// Returns true if the default features of the dependency are requested. pub fn uses_default_features(&self) -> bool { self.inner.uses_default_features() @@ -263,6 +264,17 @@ impl Dependency { pub fn matches_id(&self, id: &PackageId) -> bool { self.inner.matches_id(id) } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) + -> Dependency { + if self.source_id() != to_replace { + self + } else { + Rc::try_unwrap(self.inner).unwrap_or_else(|r| (*r).clone()) + .set_source_id(replace_with.clone()) + .into_dependency() + } + } } impl Platform { diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index a4b401df031..02e9b4527df 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -4,7 +4,8 @@ use std::path::{PathBuf, Path}; use semver::Version; use rustc_serialize::{Encoder, Encodable}; -use core::{Dependency, PackageId, PackageIdSpec, Summary, WorkspaceConfig}; +use core::{Dependency, PackageId, Summary, SourceId, PackageIdSpec}; +use core::WorkspaceConfig; use core::package_id::Metadata; pub enum EitherManifest { @@ -232,6 +233,14 @@ impl Manifest { pub fn set_summary(&mut self, summary: Summary) { self.summary = summary; } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) + -> Manifest { + Manifest { + summary: self.summary.map_source(to_replace, replace_with), + ..self + } + } } impl VirtualManifest { diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 112d68b0653..ffd5c93640a 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -99,6 +99,14 @@ impl Package { .filter(|&(d, _)| d < 4); matches.min_by_key(|t| t.0).map(|t| t.1) } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) + -> Package { + Package { + manifest: self.manifest.map_source(to_replace, replace_with), + manifest_path: self.manifest_path, + } + } } impl fmt::Display for Package { diff --git a/src/cargo/core/package_id.rs b/src/cargo/core/package_id.rs index bd3a66d94c8..b29b8ca11d5 100644 --- a/src/cargo/core/package_id.rs +++ b/src/cargo/core/package_id.rs @@ -13,12 +13,12 @@ use util::{CargoResult, CargoError, short_hash, ToSemver}; use core::source::SourceId; /// Identifier for a specific version of a package in a specific source. -#[derive(Clone, Debug)] +#[derive(Clone)] pub struct PackageId { inner: Arc, } -#[derive(PartialEq, PartialOrd, Eq, Ord, Debug)] +#[derive(PartialEq, PartialOrd, Eq, Ord)] struct PackageIdInner { name: String, version: semver::Version, @@ -38,13 +38,19 @@ impl Decodable for PackageId { fn decode(d: &mut D) -> Result { let string: String = try!(Decodable::decode(d)); let regex = Regex::new(r"^([^ ]+) ([^ ]+) \(([^\)]+)\)$").unwrap(); - let captures = regex.captures(&string).expect("invalid serialized PackageId"); + let captures = try!(regex.captures(&string).ok_or_else(|| { + d.error("invalid serialized PackageId") + })); let name = captures.at(1).unwrap(); let version = captures.at(2).unwrap(); let url = captures.at(3).unwrap(); - let version = semver::Version::parse(version).ok().expect("invalid version"); - let source_id = SourceId::from_url(url); + let version = try!(semver::Version::parse(version).map_err(|_| { + d.error("invalid version") + })); + let source_id = try!(SourceId::from_url(url).map_err(|e| { + d.error(&e.to_string()) + })); Ok(PackageId { inner: Arc::new(PackageIdInner { @@ -151,6 +157,16 @@ impl PackageId { }), } } + + pub fn with_source_id(&self, source: &SourceId) -> PackageId { + PackageId { + inner: Arc::new(PackageIdInner { + name: self.inner.name.to_string(), + version: self.inner.version.clone(), + source_id: source.clone(), + }), + } + } } impl Metadata { @@ -173,16 +189,26 @@ impl fmt::Display for PackageId { } } +impl fmt::Debug for PackageId { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + f.debug_struct("PackageId") + .field("name", &self.inner.name) + .field("version", &self.inner.version.to_string()) + .field("source", &self.inner.source_id.to_string()) + .finish() + } +} + #[cfg(test)] mod tests { use super::PackageId; use core::source::SourceId; - use sources::RegistrySource; + use sources::CRATES_IO; use util::ToUrl; #[test] fn invalid_version_handled_nicely() { - let loc = RegistrySource::default_url().to_url().unwrap(); + let loc = CRATES_IO.to_url().unwrap(); let repo = SourceId::for_registry(&loc); assert!(PackageId::new("foo", "1.0", &repo).is_err()); diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index ccfe87cabee..90a1b76d244 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -3,6 +3,7 @@ use std::collections::{HashSet, HashMap}; use core::{Source, SourceId, SourceMap, Summary, Dependency, PackageId, Package}; use core::PackageSet; use util::{CargoResult, ChainError, Config, human, profile}; +use sources::config::SourceConfigMap; /// Source of information about a group of packages. /// @@ -10,6 +11,14 @@ use util::{CargoResult, ChainError, Config, human, profile}; pub trait Registry { /// Attempt to find the packages that match a dependency request. fn query(&mut self, name: &Dependency) -> CargoResult>; + + /// Returns whether or not this registry will return summaries with + /// checksums listed. + /// + /// By default, registries do not support checksums. + fn supports_checksums(&self) -> bool { + false + } } impl Registry for Vec { @@ -26,6 +35,12 @@ impl Registry for Vec { } } +impl<'a, T: ?Sized + Registry + 'a> Registry for Box { + fn query(&mut self, name: &Dependency) -> CargoResult> { + (**self).query(name) + } +} + /// This structure represents a registry of known packages. It internally /// contains a number of `Box` instances which are used to load a /// `Package` from. @@ -41,7 +56,6 @@ impl Registry for Vec { /// operations if necessary) and is ready to be queried for packages. pub struct PackageRegistry<'cfg> { sources: SourceMap<'cfg>, - config: &'cfg Config, // A list of sources which are considered "overrides" which take precedent // when querying for packages. @@ -65,6 +79,7 @@ pub struct PackageRegistry<'cfg> { source_ids: HashMap, locked: HashMap)>>>, + source_config: SourceConfigMap<'cfg>, } #[derive(PartialEq, Eq, Clone, Copy)] @@ -75,14 +90,15 @@ enum Kind { } impl<'cfg> PackageRegistry<'cfg> { - pub fn new(config: &'cfg Config) -> PackageRegistry<'cfg> { - PackageRegistry { + pub fn new(config: &'cfg Config) -> CargoResult> { + let source_config = try!(SourceConfigMap::new(config)); + Ok(PackageRegistry { sources: SourceMap::new(), source_ids: HashMap::new(), overrides: Vec::new(), - config: config, + source_config: source_config, locked: HashMap::new(), - } + }) } pub fn get(self, package_ids: &[PackageId]) -> PackageSet<'cfg> { @@ -158,8 +174,8 @@ impl<'cfg> PackageRegistry<'cfg> { fn load(&mut self, source_id: &SourceId, kind: Kind) -> CargoResult<()> { (|| { - // Save off the source - let source = source_id.load(self.config); + let source = try!(self.source_config.load(source_id)); + if kind == Kind::Override { self.overrides.push(source_id.clone()); } diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index 256e7a49c0c..8771a95328b 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -1,10 +1,12 @@ use std::collections::{HashMap, BTreeMap}; +use std::fmt; +use std::str::FromStr; use regex::Regex; use rustc_serialize::{Encodable, Encoder, Decodable, Decoder}; use core::{Package, PackageId, SourceId, Workspace}; -use util::{CargoResult, Graph, Config}; +use util::{CargoResult, Graph, Config, internal, ChainError, CargoError}; use super::Resolve; @@ -18,7 +20,7 @@ pub struct EncodableResolve { pub type Metadata = BTreeMap; impl EncodableResolve { - pub fn to_resolve(&self, ws: &Workspace) -> CargoResult { + pub fn to_resolve(self, ws: &Workspace) -> CargoResult { let path_deps = build_path_deps(ws); let default = try!(ws.current()).package_id().source_id(); @@ -90,13 +92,56 @@ impl EncodableResolve { try!(add_dependencies(id, pkg)); } } + let mut metadata = self.metadata.unwrap_or(BTreeMap::new()); + + // Parse out all package checksums. After we do this we can be in a few + // situations: + // + // * We parsed no checksums. In this situation we're dealing with an old + // lock file and we're gonna fill them all in. + // * We parsed some checksums, but not one for all packages listed. It + // could have been the case that some were listed, then an older Cargo + // client added more dependencies, and now we're going to fill in the + // missing ones. + // * There are too many checksums listed, indicative of an older Cargo + // client removing a package but not updating the checksums listed. + // + // In all of these situations they're part of normal usage, so we don't + // really worry about it. We just try to slurp up as many checksums as + // possible. + let mut checksums = HashMap::new(); + let prefix = "checksum "; + let mut to_remove = Vec::new(); + for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) { + to_remove.push(k.to_string()); + let k = &k[prefix.len()..]; + let id: EncodablePackageId = try!(k.parse().chain_error(|| { + internal("invalid encoding of checksum in lockfile") + })); + let id = try!(to_package_id(&id.name, + &id.version, + id.source.as_ref(), + default, + &path_deps)); + let v = if v == "" { + None + } else { + Some(v.to_string()) + }; + checksums.insert(id, v); + } + + for k in to_remove { + metadata.remove(&k); + } Ok(Resolve { graph: g, root: root, features: HashMap::new(), - metadata: self.metadata.clone(), replacements: replacements, + checksums: checksums, + metadata: metadata, }) } } @@ -168,29 +213,32 @@ pub struct EncodablePackageId { source: Option } -impl Encodable for EncodablePackageId { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - let mut out = format!("{} {}", self.name, self.version); +impl fmt::Display for EncodablePackageId { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + try!(write!(f, "{} {}", self.name, self.version)); if let Some(ref s) = self.source { - out.push_str(&format!(" ({})", s.to_url())); + try!(write!(f, " ({})", s.to_url())); } - out.encode(s) + Ok(()) } } -impl Decodable for EncodablePackageId { - fn decode(d: &mut D) -> Result { - let string: String = try!(Decodable::decode(d)); +impl FromStr for EncodablePackageId { + type Err = Box; + + fn from_str(s: &str) -> CargoResult { let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap(); - let captures = regex.captures(&string) - .expect("invalid serialized PackageId"); + let captures = try!(regex.captures(s).ok_or_else(|| { + internal("invalid serialized PackageId") + })); let name = captures.at(1).unwrap(); let version = captures.at(2).unwrap(); - let source = captures.at(3); - - let source_id = source.map(|s| SourceId::from_url(s)); + let source_id = match captures.at(3) { + Some(s) => Some(try!(SourceId::from_url(s))), + None => None, + }; Ok(EncodablePackageId { name: name.to_string(), @@ -200,6 +248,21 @@ impl Decodable for EncodablePackageId { } } +impl Encodable for EncodablePackageId { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + self.to_string().encode(s) + } +} + +impl Decodable for EncodablePackageId { + fn decode(d: &mut D) -> Result { + String::decode(d).and_then(|string| { + string.parse::() + .map_err(|e| d.error(&e.to_string())) + }) + } +} + pub struct WorkspaceResolve<'a, 'cfg: 'a> { pub ws: &'a Workspace<'cfg>, pub resolve: &'a Resolve, @@ -220,12 +283,25 @@ impl<'a, 'cfg> Encodable for WorkspaceResolve<'a, 'cfg> { } Some(encodable_resolve_node(id, self.resolve)) - }).collect::>(); + }).collect::>(); + + let mut metadata = self.resolve.metadata.clone(); + + for id in ids.iter().filter(|id| !id.source_id().is_path()) { + let checksum = match self.resolve.checksums[*id] { + Some(ref s) => &s[..], + None => "", + }; + let id = encodable_package_id(id); + metadata.insert(format!("checksum {}", id.to_string()), + checksum.to_string()); + } + let metadata = if metadata.len() == 0 {None} else {Some(metadata)}; EncodableResolve { package: Some(encodable), root: encodable_resolve_node(&root, self.resolve), - metadata: self.resolve.metadata.clone(), + metadata: metadata, }.encode(s) } } diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 2ecbbd0afe4..c774481f188 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -46,7 +46,7 @@ //! over the place. use std::cmp::Ordering; -use std::collections::{HashSet, HashMap, BinaryHeap}; +use std::collections::{HashSet, HashMap, BinaryHeap, BTreeMap}; use std::fmt; use std::ops::Range; use std::rc::Rc; @@ -75,8 +75,9 @@ pub struct Resolve { graph: Graph, replacements: HashMap, features: HashMap>, + checksums: HashMap>, root: PackageId, - metadata: Option, + metadata: Metadata, } pub struct Deps<'a> { @@ -115,27 +116,93 @@ struct Candidate { } impl Resolve { - fn new(root: PackageId) -> Resolve { - let mut g = Graph::new(); - g.add(root.clone(), &[]); - Resolve { - graph: g, - root: root, - replacements: HashMap::new(), - features: HashMap::new(), - metadata: None, + pub fn merge_from(&mut self, previous: &Resolve) -> CargoResult<()> { + // Given a previous instance of resolve, it should be forbidden to ever + // have a checksums which *differ*. If the same package id has differing + // checksums, then something has gone wrong such as: + // + // * Something got seriously corrupted + // * A "mirror" isn't actually a mirror as some changes were made + // * A replacement source wasn't actually a replacment, some changes + // were made + // + // In all of these cases, we want to report an error to indicate that + // something is awry. Normal execution (esp just using crates.io) should + // never run into this. + for (id, cksum) in previous.checksums.iter() { + if let Some(mine) = self.checksums.get(id) { + if mine == cksum { + continue + } + + // If the previous checksum wasn't calculated, the current + // checksum is `Some`. This may indicate that a source was + // erroneously replaced or was replaced with something that + // desires stronger checksum guarantees than can be afforded + // elsewhere. + if cksum.is_none() { + bail!("\ +checksum for `{}` was not previously calculated, but a checksum could now \ +be calculated + +this could be indicative of a few possible situations: + + * the source `{}` did not previously support checksums, + but was replaced with one that does + * newer Cargo implementations know how to checksum this source, but this + older implementation does not + * the lock file is corrupt +", id, id.source_id()) + + // If our checksum hasn't been calculated, then it could mean + // that future Cargo figured out how to checksum something or + // more realistically we were overridden with a source that does + // not have checksums. + } else if mine.is_none() { + bail!("\ +checksum for `{}` could not be calculated, but a checksum is listed in \ +the existing lock file + +this could be indicative of a few possible situations: + + * the source `{}` supports checksums, + but was replaced with one that doesn't + * the lock file is corrupt + +unable to verify that `{0}` is the same as when the lockfile was generated +", id, id.source_id()) + + // If the checksums aren't equal, and neither is None, then they + // must both be Some, in which case the checksum now differs. + // That's quite bad! + } else { + bail!("\ +checksum for `{}` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `{0}` is the same as when the lockfile was generated +", id); + } + } } - } - pub fn copy_metadata(&mut self, other: &Resolve) { - self.metadata = other.metadata.clone(); + // Be sure to just copy over any unknown metadata. + self.metadata = previous.metadata.clone(); + Ok(()) } pub fn iter(&self) -> Nodes { self.graph.iter() } - pub fn root(&self) -> &PackageId { &self.root } + pub fn root(&self) -> &PackageId { + &self.root + } pub fn deps(&self, pkg: &PackageId) -> Deps { Deps { edges: self.graph.edges(pkg), resolve: self } @@ -194,7 +261,9 @@ impl<'a> Iterator for DepsNotReplaced<'a> { #[derive(Clone)] struct Context<'a> { activations: HashMap<(String, SourceId), Vec>>, - resolve: Resolve, + resolve_graph: Graph, + resolve_features: HashMap>, + resolve_replacements: HashMap, replacements: &'a [(PackageIdSpec, Dependency)], } @@ -204,14 +273,33 @@ pub fn resolve(root: &PackageId, replacements: &[(PackageIdSpec, Dependency)], registry: &mut Registry) -> CargoResult { let cx = Context { - resolve: Resolve::new(root.clone()), + resolve_graph: Graph::new(), + resolve_features: HashMap::new(), + resolve_replacements: HashMap::new(), activations: HashMap::new(), replacements: replacements, }; let _p = profile::start(format!("resolving: {}", root)); let cx = try!(activate_deps_loop(cx, registry, summaries)); - try!(check_cycles(&cx)); - Ok(cx.resolve) + + let mut resolve = Resolve { + graph: cx.resolve_graph, + features: cx.resolve_features, + root: root.clone(), + checksums: HashMap::new(), + metadata: BTreeMap::new(), + replacements: cx.resolve_replacements, + }; + + for summary in cx.activations.values().flat_map(|v| v.iter()) { + let cksum = summary.checksum().map(|s| s.to_string()); + resolve.checksums.insert(summary.package_id().clone(), cksum); + } + + try!(check_cycles(&resolve, &cx.activations)); + + trace!("resolved: {:?}", resolve); + Ok(resolve) } /// Attempts to activate the summary `candidate` in the context `cx`. @@ -227,7 +315,7 @@ fn activate(cx: &mut Context, method: &Method) -> CargoResult> { if let Some(parent) = parent { - cx.resolve.graph.link(parent.package_id().clone(), + cx.resolve_graph.link(parent.package_id().clone(), candidate.summary.package_id().clone()); } @@ -237,7 +325,7 @@ fn activate(cx: &mut Context, let candidate = match candidate.replace { Some(replace) => { - cx.resolve.replacements.insert(candidate.summary.package_id().clone(), + cx.resolve_replacements.insert(candidate.summary.package_id().clone(), replace.package_id().clone()); if cx.flag_activated(&replace, method) { return Ok(None); @@ -480,7 +568,7 @@ fn activate_deps_loop<'a>(mut cx: Context<'a>, remaining_deps.extend(try!(activate(&mut cx, registry, Some(&parent), candidate, &method))); } - trace!("resolved: {:?}", cx.resolve); + Ok(cx) } @@ -523,8 +611,8 @@ fn activation_error(cx: &Context, dep.name(), parent.name(), dep.name()); 'outer: for v in prev_active.iter() { - for node in cx.resolve.graph.iter() { - let edges = match cx.resolve.graph.edges(node) { + for node in cx.resolve_graph.iter() { + let edges = match cx.resolve_graph.edges(node) { Some(edges) => edges, None => continue, }; @@ -709,7 +797,7 @@ impl<'a> Context<'a> { let key = (id.name().to_string(), id.source_id().clone()); let prev = self.activations.entry(key).or_insert(Vec::new()); if !prev.iter().any(|c| c == summary) { - self.resolve.graph.add(id.clone(), &[]); + self.resolve_graph.add(id.clone(), &[]); prev.push(summary.clone()); return false } @@ -722,7 +810,7 @@ impl<'a> Context<'a> { }; let has_default_feature = summary.features().contains_key("default"); - match self.resolve.features(id) { + match self.resolve_features.get(id) { Some(prev) => { features.iter().all(|f| prev.contains(f)) && (!use_default || prev.contains("default") || @@ -880,7 +968,7 @@ impl<'a> Context<'a> { // Record what list of features is active for this package. if !used_features.is_empty() { let pkgid = candidate.package_id(); - self.resolve.features.entry(pkgid.clone()) + self.resolve_features.entry(pkgid.clone()) .or_insert(HashSet::new()) .extend(used_features); } @@ -889,13 +977,15 @@ impl<'a> Context<'a> { } } -fn check_cycles(cx: &Context) -> CargoResult<()> { +fn check_cycles(resolve: &Resolve, + activations: &HashMap<(String, SourceId), Vec>>) + -> CargoResult<()> { let mut summaries = HashMap::new(); - for summary in cx.activations.values().flat_map(|v| v) { + for summary in activations.values().flat_map(|v| v) { summaries.insert(summary.package_id(), &**summary); } - return visit(&cx.resolve, - cx.resolve.root(), + return visit(resolve, + resolve.root(), &summaries, &mut HashSet::new(), &mut HashSet::new()); diff --git a/src/cargo/core/source.rs b/src/cargo/core/source.rs index c32e7227f75..ed684e95468 100644 --- a/src/cargo/core/source.rs +++ b/src/cargo/core/source.rs @@ -4,13 +4,17 @@ use std::fmt::{self, Formatter}; use std::hash; use std::path::Path; use std::sync::Arc; -use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; +use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT}; +use std::sync::atomic::Ordering::SeqCst; +use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use url::Url; use core::{Package, PackageId, Registry}; -use sources::{PathSource, GitSource, RegistrySource}; +use ops; use sources::git; +use sources::{PathSource, GitSource, RegistrySource, CRATES_IO}; +use sources::DirectorySource; use util::{human, Config, CargoResult, ToUrl}; /// A Source finds and downloads remote packages based on names and @@ -35,6 +39,35 @@ pub trait Source: Registry { /// The `pkg` argument is the package which this fingerprint should only be /// interested in for when this source may contain multiple packages. fn fingerprint(&self, pkg: &Package) -> CargoResult; + + /// If this source supports it, verifies the source of the package + /// specified. + /// + /// Note that the source may also have performed other checksum-based + /// verification during the `download` step, but this is intended to be run + /// just before a crate is compiled so it may perform more expensive checks + /// which may not be cacheable. + fn verify(&self, _pkg: &PackageId) -> CargoResult<()> { + Ok(()) + } +} + +impl<'a, T: Source + ?Sized + 'a> Source for Box { + fn update(&mut self) -> CargoResult<()> { + (**self).update() + } + + fn download(&mut self, id: &PackageId) -> CargoResult { + (**self).download(id) + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + (**self).fingerprint(pkg) + } + + fn verify(&self, pkg: &PackageId) -> CargoResult<()> { + (**self).verify(pkg) + } } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -45,6 +78,10 @@ enum Kind { Path, /// represents the central registry Registry, + /// represents a local filesystem-based registry + LocalRegistry, + /// represents a directory-based registry + Directory, } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -91,14 +128,14 @@ impl SourceId { /// libssh2-static-sys#80e71a3021618eb05\ /// 656c58fb7c5ef5f12bc747f"); /// ``` - pub fn from_url(string: &str) -> SourceId { + pub fn from_url(string: &str) -> CargoResult { let mut parts = string.splitn(2, '+'); let kind = parts.next().unwrap(); let url = parts.next().unwrap(); match kind { "git" => { - let mut url = url.to_url().unwrap(); + let mut url = try!(url.to_url()); let mut reference = GitReference::Branch("master".to_string()); for (k, v) in url.query_pairs() { match &k[..] { @@ -114,18 +151,18 @@ impl SourceId { let precise = url.fragment().map(|s| s.to_owned()); url.set_fragment(None); url.set_query(None); - SourceId::for_git(&url, reference).with_precise(precise) - } + Ok(SourceId::for_git(&url, reference).with_precise(precise)) + }, "registry" => { - let url = url.to_url().unwrap(); - SourceId::new(Kind::Registry, url) - .with_precise(Some("locked".to_string())) + let url = try!(url.to_url()); + Ok(SourceId::new(Kind::Registry, url) + .with_precise(Some("locked".to_string()))) } "path" => { - let url = url.to_url().unwrap(); - SourceId::new(Kind::Path, url) + let url = try!(url.to_url()); + Ok(SourceId::new(Kind::Path, url)) } - _ => panic!("Unsupported serialized SourceId"), + kind => Err(human(format!("unsupported source protocol: {}", kind))) } } @@ -150,12 +187,18 @@ impl SourceId { SourceIdInner { kind: Kind::Registry, ref url, .. } => { format!("registry+{}", url) } + SourceIdInner { kind: Kind::LocalRegistry, ref url, .. } => { + format!("local-registry+{}", url) + } + SourceIdInner { kind: Kind::Directory, ref url, .. } => { + format!("directory+{}", url) + } } } // Pass absolute path pub fn for_path(path: &Path) -> CargoResult { - let url = try!(path.to_url().map_err(human)); + let url = try!(path.to_url()); Ok(SourceId::new(Kind::Path, url)) } @@ -167,12 +210,36 @@ impl SourceId { SourceId::new(Kind::Registry, url.clone()) } + pub fn for_local_registry(path: &Path) -> CargoResult { + let url = try!(path.to_url()); + Ok(SourceId::new(Kind::LocalRegistry, url)) + } + + pub fn for_directory(path: &Path) -> CargoResult { + let url = try!(path.to_url()); + Ok(SourceId::new(Kind::Directory, url)) + } + /// Returns the `SourceId` corresponding to the main repository. /// /// This is the main cargo registry by default, but it can be overridden in /// a `.cargo/config`. - pub fn for_central(config: &Config) -> CargoResult { - Ok(SourceId::for_registry(&try!(RegistrySource::url(config)))) + pub fn crates_io(config: &Config) -> CargoResult { + let cfg = try!(ops::registry_configuration(config)); + let url = if let Some(ref index) = cfg.index { + static WARNED: AtomicBool = ATOMIC_BOOL_INIT; + if !WARNED.swap(true, SeqCst) { + try!(config.shell().warn("custom registry support via \ + the `registry.index` configuration is \ + being removed, this functionality \ + will not work in the future")); + } + &index[..] + } else { + CRATES_IO + }; + let url = try!(url.to_url()); + Ok(SourceId::for_registry(&url)) } pub fn url(&self) -> &Url { @@ -182,7 +249,7 @@ impl SourceId { self.inner.kind == Kind::Path } pub fn is_registry(&self) -> bool { - self.inner.kind == Kind::Registry + self.inner.kind == Kind::Registry || self.inner.kind == Kind::LocalRegistry } pub fn is_git(&self) -> bool { @@ -204,7 +271,21 @@ impl SourceId { }; Box::new(PathSource::new(&path, self, config)) } - Kind::Registry => Box::new(RegistrySource::new(self, config)), + Kind::Registry => Box::new(RegistrySource::remote(self, config)), + Kind::LocalRegistry => { + let path = match self.inner.url.to_file_path() { + Ok(p) => p, + Err(()) => panic!("path sources cannot be remote"), + }; + Box::new(RegistrySource::local(self, &path, config)) + } + Kind::Directory => { + let path = match self.inner.url.to_file_path() { + Ok(p) => p, + Err(()) => panic!("path sources cannot be remote"), + }; + Box::new(DirectorySource::new(&path, self, config)) + } } } @@ -233,7 +314,7 @@ impl SourceId { Kind::Registry => {} _ => return false, } - self.inner.url.to_string() == RegistrySource::default_url() + self.inner.url.to_string() == CRATES_IO } } @@ -267,8 +348,10 @@ impl Encodable for SourceId { impl Decodable for SourceId { fn decode(d: &mut D) -> Result { - let string: String = Decodable::decode(d).ok().expect("Invalid encoded SourceId"); - Ok(SourceId::from_url(&string)) + let string: String = try!(Decodable::decode(d)); + SourceId::from_url(&string).map_err(|e| { + d.error(&e.to_string()) + }) } } @@ -288,9 +371,13 @@ impl fmt::Display for SourceId { } Ok(()) } - SourceIdInner { kind: Kind::Registry, ref url, .. } => { + SourceIdInner { kind: Kind::Registry, ref url, .. } | + SourceIdInner { kind: Kind::LocalRegistry, ref url, .. } => { write!(f, "registry {}", url) } + SourceIdInner { kind: Kind::Directory, ref url, .. } => { + write!(f, "dir {}", url) + } } } } diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index 01697171d2f..219e21bcadb 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -15,6 +15,7 @@ pub struct Summary { package_id: PackageId, dependencies: Vec, features: HashMap>, + checksum: Option, } impl Summary { @@ -60,6 +61,7 @@ impl Summary { package_id: pkg_id, dependencies: dependencies, features: features, + checksum: None, }) } @@ -69,18 +71,39 @@ impl Summary { pub fn source_id(&self) -> &SourceId { self.package_id.source_id() } pub fn dependencies(&self) -> &[Dependency] { &self.dependencies } pub fn features(&self) -> &HashMap> { &self.features } + pub fn checksum(&self) -> Option<&str> { + self.checksum.as_ref().map(|s| &s[..]) + } pub fn override_id(mut self, id: PackageId) -> Summary { self.package_id = id; self } + pub fn set_checksum(mut self, cksum: String) -> Summary { + self.checksum = Some(cksum); + self + } + pub fn map_dependencies(mut self, f: F) -> Summary where F: FnMut(Dependency) -> Dependency { let deps = mem::replace(&mut self.dependencies, Vec::new()); self.dependencies = deps.into_iter().map(f).collect(); self } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) + -> Summary { + let me = if self.package_id().source_id() == to_replace { + let new_id = self.package_id().with_source_id(replace_with); + self.override_id(new_id) + } else { + self + }; + me.map_dependencies(|dep| { + dep.map_source(to_replace, replace_with) + }) + } } impl PartialEq for Summary { @@ -88,15 +111,3 @@ impl PartialEq for Summary { self.package_id == other.package_id } } - -pub trait SummaryVec { - fn names(&self) -> Vec; -} - -impl SummaryVec for Vec { - // TODO: Move to Registry - fn names(&self) -> Vec { - self.iter().map(|summary| summary.name().to_string()).collect() - } - -} diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index 21cb053d12c..e19b37f116f 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -28,7 +28,7 @@ pub fn clean(ws: &Workspace, opts: &CleanOptions) -> CargoResult<()> { return rm_rf(&target_dir); } - let mut registry = PackageRegistry::new(opts.config); + let mut registry = try!(PackageRegistry::new(opts.config)); let resolve = try!(ops::resolve_ws(&mut registry, ws)); let packages = ops::get_resolved_packages(&resolve, registry); diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 78d16365c53..3ea700f49af 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -97,7 +97,7 @@ pub fn resolve_dependencies<'a>(ws: &Workspace<'a>, no_default_features: bool) -> CargoResult<(PackageSet<'a>, Resolve)> { - let mut registry = PackageRegistry::new(ws.config()); + let mut registry = try!(PackageRegistry::new(ws.config())); if let Some(source) = source { registry.add_preloaded(try!(ws.current()).package_id().source_id(), @@ -389,6 +389,7 @@ fn add_overrides<'a>(registry: &mut PackageRegistry<'a>, None => return Ok(()) }; let current = try!(ws.current()); + let paths = paths.val.iter().map(|&(ref s, ref p)| { // The path listed next to the string is the config file in which the // key was located, so we want to pop off the `.cargo/config` component @@ -482,11 +483,11 @@ fn scrape_target_config(config: &Config, triple: &str) rerun_if_changed: Vec::new(), warnings: Vec::new(), }; - for (k, value) in try!(value.table()).0 { + for (k, value) in try!(value.table(&lib_name)).0 { let key = format!("{}.{}", key, k); match &k[..] { "rustc-flags" => { - let (flags, definition) = try!(value.string()); + let (flags, definition) = try!(value.string(&k)); let whence = format!("in `{}` (in {})", key, definition.display()); let (paths, links) = try!( @@ -496,22 +497,22 @@ fn scrape_target_config(config: &Config, triple: &str) output.library_links.extend(links); } "rustc-link-lib" => { - let list = try!(value.list()); + let list = try!(value.list(&k)); output.library_links.extend(list.iter() .map(|v| v.0.clone())); } "rustc-link-search" => { - let list = try!(value.list()); + let list = try!(value.list(&k)); output.library_paths.extend(list.iter().map(|v| { PathBuf::from(&v.0) })); } "rustc-cfg" => { - let list = try!(value.list()); + let list = try!(value.list(&k)); output.cfgs.extend(list.iter().map(|v| v.0.clone())); } _ => { - let val = try!(value.string()).0; + let val = try!(value.string(&k)).0; output.metadata.push((k.clone(), val.to_string())); } } diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs index 03b19ca5d19..a0144c8f472 100644 --- a/src/cargo/ops/cargo_fetch.rs +++ b/src/cargo/ops/cargo_fetch.rs @@ -5,7 +5,7 @@ use util::CargoResult; /// Executes `cargo fetch`. pub fn fetch<'a>(ws: &Workspace<'a>) -> CargoResult<(Resolve, PackageSet<'a>)> { - let mut registry = PackageRegistry::new(ws.config()); + let mut registry = try!(PackageRegistry::new(ws.config())); let resolve = try!(ops::resolve_ws(&mut registry, ws)); let packages = get_resolved_packages(&resolve, registry); for id in resolve.iter() { diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index b196dec916b..45ee2b10111 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -16,7 +16,7 @@ pub struct UpdateOptions<'a> { } pub fn generate_lockfile(ws: &Workspace) -> CargoResult<()> { - let mut registry = PackageRegistry::new(ws.config()); + let mut registry = try!(PackageRegistry::new(ws.config())); let resolve = try!(ops::resolve_with_previous(&mut registry, ws, Method::Everything, None, None)); @@ -35,7 +35,7 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) Some(resolve) => resolve, None => return generate_lockfile(ws), }; - let mut registry = PackageRegistry::new(opts.config); + let mut registry = try!(PackageRegistry::new(opts.config)); let mut to_avoid = HashSet::new(); if opts.to_update.is_empty() { diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index b120ace95e1..d25bf11ba18 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -13,7 +13,7 @@ use toml; use core::{SourceId, Source, Package, Dependency, PackageIdSpec}; use core::{PackageId, Workspace}; use ops::{self, CompileFilter}; -use sources::{GitSource, PathSource, RegistrySource}; +use sources::{GitSource, PathSource, SourceConfigMap}; use util::{CargoResult, ChainError, Config, human, internal}; use util::{Filesystem, FileLock}; @@ -54,6 +54,7 @@ pub fn install(root: Option<&str>, force: bool) -> CargoResult<()> { let config = opts.config; let root = try!(resolve_root(root, config)); + let map = try!(SourceConfigMap::new(config)); let (pkg, source) = if source_id.is_git() { try!(select_pkg(GitSource::new(source_id, config), source_id, krate, vers, &mut |git| git.read_packages())) @@ -70,7 +71,7 @@ pub fn install(root: Option<&str>, source_id, krate, vers, &mut |path| path.read_packages())) } else { - try!(select_pkg(RegistrySource::new(source_id, config), + try!(select_pkg(try!(map.load(source_id)), source_id, krate, vers, &mut |_| Err(human("must specify a crate to install from \ crates.io, or use --path or --git to \ diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index 4e6ae0a9d71..e6a89e331b0 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -247,21 +247,21 @@ fn run_verify(ws: &Workspace, tar: &File, opts: &PackageOpts) -> CargoResult<()> try!(archive.unpack(dst.parent().unwrap())); let manifest_path = dst.join("Cargo.toml"); - // When packages are uploaded to the registry, all path dependencies are - // implicitly converted to registry-based dependencies, so we rewrite those + // When packages are uploaded to a registry, all path dependencies are + // implicitly converted to registry dependencies, so we rewrite those // dependencies here. // // We also make sure to point all paths at `dst` instead of the previous // location that the package was originally read from. In locking the // `SourceId` we're telling it that the corresponding `PathSource` will be // considered updated and we won't actually read any packages. - let registry = try!(SourceId::for_central(config)); + let cratesio = try!(SourceId::crates_io(config)); let precise = Some("locked".to_string()); let new_src = try!(SourceId::for_path(&dst)).with_precise(precise); let new_pkgid = try!(PackageId::new(pkg.name(), pkg.version(), &new_src)); let new_summary = pkg.summary().clone().map_dependencies(|d| { if !d.source_id().is_path() { return d } - d.clone_inner().set_source_id(registry.clone()).into_dependency() + d.clone_inner().set_source_id(cratesio.clone()).into_dependency() }); let mut new_manifest = pkg.manifest().clone(); new_manifest.set_summary(new_summary.override_id(new_pkgid)); diff --git a/src/cargo/ops/cargo_rustc/fingerprint.rs b/src/cargo/ops/cargo_rustc/fingerprint.rs index e8ad1880f8a..702deb5f863 100644 --- a/src/cargo/ops/cargo_rustc/fingerprint.rs +++ b/src/cargo/ops/cargo_rustc/fingerprint.rs @@ -57,6 +57,25 @@ pub fn prepare_target<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, let compare = compare_old_fingerprint(&loc, &*fingerprint); log_compare(unit, &compare); + // If our comparison failed (e.g. we're going to trigger a rebuild of this + // crate), then we also ensure the source of the crate passes all + // verification checks before we build it. + // + // The `Source::verify` method is intended to allow sources to execute + // pre-build checks to ensure that the relevant source code is all + // up-to-date and as expected. This is currently used primarily for + // directory sources which will use this hook to perform an integrity check + // on all files in the source to ensure they haven't changed. If they have + // changed then an error is issued. + if compare.is_err() { + let source_id = unit.pkg.package_id().source_id(); + let sources = cx.packages.sources(); + let source = try!(sources.get(source_id).chain_error(|| { + internal("missing package source") + })); + try!(source.verify(unit.pkg.package_id())); + } + let root = cx.out_dir(unit); let mut missing_outputs = false; if unit.profile.doc { diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index 292b0729e11..5866242fdab 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -163,18 +163,19 @@ pub fn registry(config: &Config, // Parse all configuration options let RegistryConfig { token: token_config, - index: index_config, + index: _index_config, } = try!(registry_configuration(config)); let token = token.or(token_config); - let index = index.or(index_config).unwrap_or(RegistrySource::default_url()); - let index = try!(index.to_url().map_err(human)); - let sid = SourceId::for_registry(&index); + let sid = match index { + Some(index) => SourceId::for_registry(&try!(index.to_url())), + None => try!(SourceId::crates_io(config)), + }; let api_host = { - let mut src = RegistrySource::new(&sid, config); + let mut src = RegistrySource::remote(&sid, config); try!(src.update().chain_error(|| { - human(format!("failed to update registry {}", index)) + human(format!("failed to update {}", sid)) })); - (try!(src.config())).api + (try!(src.config())).unwrap().api }; let handle = try!(http_handle(config)); Ok((Registry::new_handle(api_host, token, handle), sid)) diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index 7a6abacab00..10fb4aa87c9 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -153,7 +153,7 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry, &replace, registry)); if let Some(previous) = previous { - resolved.copy_metadata(previous); + try!(resolved.merge_from(previous)); } return Ok(resolved); diff --git a/src/cargo/sources/config.rs b/src/cargo/sources/config.rs new file mode 100644 index 00000000000..e40b6a7d546 --- /dev/null +++ b/src/cargo/sources/config.rs @@ -0,0 +1,184 @@ +//! Implementation of configuration for various sources +//! +//! This module will parse the various `source.*` TOML configuration keys into a +//! structure usable by Cargo itself. Currently this is primarily used to map +//! sources to one another via the `replace-with` key in `.cargo/config`. + +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +use url::Url; + +use core::{Source, SourceId}; +use sources::ReplacedSource; +use util::{CargoResult, Config, ChainError, human, ToUrl}; +use util::config::ConfigValue; + +pub struct SourceConfigMap<'cfg> { + cfgs: HashMap, + id2name: HashMap, + config: &'cfg Config, +} + +/// Configuration for a particular source, found in TOML looking like: +/// +/// ```toml +/// [source.crates-io] +/// registry = 'https://github.com/rust-lang/crates.io-index' +/// replace-with = 'foo' # optional +/// ``` +struct SourceConfig { + // id this source corresponds to, inferred from the various defined keys in + // the configuration + id: SourceId, + + // Name of the source that this source should be replaced with. This field + // is a tuple of (name, path) where path is where this configuration key was + // defined (the literal `.cargo/config` file). + replace_with: Option<(String, PathBuf)>, +} + +impl<'cfg> SourceConfigMap<'cfg> { + pub fn new(config: &'cfg Config) -> CargoResult> { + let mut base = try!(SourceConfigMap::empty(config)); + if let Some(table) = try!(config.get_table("source")) { + for (key, value) in table.val.iter() { + try!(base.add_config(key, value)); + } + } + Ok(base) + } + + pub fn empty(config: &'cfg Config) -> CargoResult> { + let mut base = SourceConfigMap { + cfgs: HashMap::new(), + id2name: HashMap::new(), + config: config, + }; + base.add("crates-io", SourceConfig { + id: try!(SourceId::crates_io(config)), + replace_with: None, + }); + Ok(base) + } + + pub fn load(&self, id: &SourceId) -> CargoResult> { + debug!("loading: {}", id); + let mut name = match self.id2name.get(id) { + Some(name) => name, + None => return Ok(id.load(self.config)), + }; + let mut path = Path::new("/"); + let orig_name = name; + let new_id; + loop { + let cfg = match self.cfgs.get(name) { + Some(cfg) => cfg, + None => bail!("could not find a configured source with the \ + name `{}` when attempting to lookup `{}` \ + (configuration in `{}`)", + name, orig_name, path.display()), + }; + match cfg.replace_with { + Some((ref s, ref p)) => { + name = s; + path = p; + } + None if *id == cfg.id => return Ok(id.load(self.config)), + None => { + new_id = cfg.id.with_precise(id.precise() + .map(|s| s.to_string())); + break + } + } + debug!("following pointer to {}", name); + if name == orig_name { + bail!("detected a cycle of `replace-with` sources, the source \ + `{}` is eventually replaced with itself \ + (configuration in `{}`)", name, path.display()) + } + } + let new_src = new_id.load(self.config); + let old_src = id.load(self.config); + if new_src.supports_checksums() != old_src.supports_checksums() { + let (supports, no_support) = if new_src.supports_checksums() { + (name, orig_name) + } else { + (orig_name, name) + }; + bail!("\ +cannot replace `{orig}` with `{name}`, the source `{supports}` supports \ +checksums, but `{no_support}` does not + +a lock file compatible with `{orig}` cannot be generated in this situation +", orig = orig_name, name = name, supports = supports, no_support = no_support); + } + Ok(Box::new(ReplacedSource::new(id, &new_id, new_src))) + } + + fn add(&mut self, name: &str, cfg: SourceConfig) { + self.id2name.insert(cfg.id.clone(), name.to_string()); + self.cfgs.insert(name.to_string(), cfg); + } + + fn add_config(&mut self, name: &str, cfg: &ConfigValue) -> CargoResult<()> { + let (table, _path) = try!(cfg.table(&format!("source.{}", name))); + let mut srcs = Vec::new(); + if let Some(val) = table.get("registry") { + let url = try!(url(val, &format!("source.{}.registry", name))); + srcs.push(SourceId::for_registry(&url)); + } + if let Some(val) = table.get("local-registry") { + let (s, path) = try!(val.string(&format!("source.{}.local-registry", + name))); + let mut path = path.to_path_buf(); + path.pop(); + path.pop(); + path.push(s); + srcs.push(try!(SourceId::for_local_registry(&path))); + } + if let Some(val) = table.get("directory") { + let (s, path) = try!(val.string(&format!("source.{}.directory", + name))); + let mut path = path.to_path_buf(); + path.pop(); + path.pop(); + path.push(s); + srcs.push(try!(SourceId::for_directory(&path))); + } + + let mut srcs = srcs.into_iter(); + let src = try!(srcs.next().chain_error(|| { + human(format!("no source URL specified for `source.{}`, need \ + either `registry` or `local-registry` defined", + name)) + })); + if srcs.next().is_some() { + return Err(human(format!("more than one source URL specified for \ + `source.{}`", name))) + } + + let mut replace_with = None; + if let Some(val) = table.get("replace-with") { + let (s, path) = try!(val.string(&format!("source.{}.replace-with", + name))); + replace_with = Some((s.to_string(), path.to_path_buf())); + } + + self.add(name, SourceConfig { + id: src, + replace_with: replace_with, + }); + + return Ok(()); + + fn url(cfg: &ConfigValue, key: &str) -> CargoResult { + let (url, path) = try!(cfg.string(key)); + url.to_url().chain_error(|| { + human(format!("configuration key `{}` specified an invalid \ + URL (in {})", key, path.display())) + + }) + } + } +} diff --git a/src/cargo/sources/directory.rs b/src/cargo/sources/directory.rs new file mode 100644 index 00000000000..84a9501a03b --- /dev/null +++ b/src/cargo/sources/directory.rs @@ -0,0 +1,153 @@ +use std::collections::HashMap; +use std::fmt::{self, Debug, Formatter}; +use std::fs::File; +use std::io::Read; +use std::path::{Path, PathBuf}; + +use rustc_serialize::hex::ToHex; +use rustc_serialize::json; + +use core::{Package, PackageId, Summary, SourceId, Source, Dependency, Registry}; +use sources::PathSource; +use util::{CargoResult, human, ChainError, Config, Sha256}; +use util::paths; + +pub struct DirectorySource<'cfg> { + id: SourceId, + root: PathBuf, + packages: HashMap, + config: &'cfg Config, +} + +#[derive(RustcDecodable)] +struct Checksum { + package: String, + files: HashMap, +} + +impl<'cfg> DirectorySource<'cfg> { + pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) + -> DirectorySource<'cfg> { + DirectorySource { + id: id.clone(), + root: path.to_path_buf(), + config: config, + packages: HashMap::new(), + } + } +} + +impl<'cfg> Debug for DirectorySource<'cfg> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + write!(f, "DirectorySource {{ root: {:?} }}", self.root) + } +} + +impl<'cfg> Registry for DirectorySource<'cfg> { + fn query(&mut self, dep: &Dependency) -> CargoResult> { + let packages = self.packages.values().map(|p| &p.0); + let matches = packages.filter(|pkg| dep.matches(pkg.summary())); + let summaries = matches.map(|pkg| pkg.summary().clone()); + Ok(summaries.collect()) + } + + fn supports_checksums(&self) -> bool { + true + } +} + +impl<'cfg> Source for DirectorySource<'cfg> { + fn update(&mut self) -> CargoResult<()> { + self.packages.clear(); + let entries = try!(self.root.read_dir().chain_error(|| { + human(format!("failed to read root of directory source: {}", + self.root.display())) + })); + + for entry in entries { + let entry = try!(entry); + let path = entry.path(); + let mut src = PathSource::new(&path, + &self.id, + self.config); + try!(src.update()); + let pkg = try!(src.root_package()); + + let cksum_file = path.join(".cargo-checksum.json"); + let cksum = try!(paths::read(&path.join(cksum_file)).chain_error(|| { + human(format!("failed to load checksum `.cargo-checksum.json` \ + of {} v{}", + pkg.package_id().name(), + pkg.package_id().version())) + + })); + let cksum: Checksum = try!(json::decode(&cksum).chain_error(|| { + human(format!("failed to decode `.cargo-checksum.json` of \ + {} v{}", + pkg.package_id().name(), + pkg.package_id().version())) + })); + + let mut manifest = pkg.manifest().clone(); + let summary = manifest.summary().clone(); + manifest.set_summary(summary.set_checksum(cksum.package.clone())); + let pkg = Package::new(manifest, pkg.manifest_path()); + self.packages.insert(pkg.package_id().clone(), (pkg, cksum)); + } + + Ok(()) + } + + fn download(&mut self, id: &PackageId) -> CargoResult { + self.packages.get(id).map(|p| &p.0).cloned().chain_error(|| { + human(format!("failed to find package with id: {}", id)) + }) + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + Ok(pkg.package_id().version().to_string()) + } + + fn verify(&self, id: &PackageId) -> CargoResult<()> { + let (pkg, cksum) = match self.packages.get(id) { + Some(&(ref pkg, ref cksum)) => (pkg, cksum), + None => bail!("failed to find entry for `{}` in directory source", + id), + }; + + let mut buf = [0; 16 * 1024]; + for (file, cksum) in cksum.files.iter() { + let mut h = Sha256::new(); + let file = pkg.root().join(file); + + try!((|| -> CargoResult<()> { + let mut f = try!(File::open(&file)); + loop { + match try!(f.read(&mut buf)) { + 0 => return Ok(()), + n => h.update(&buf[..n]), + } + } + }).chain_error(|| { + human(format!("failed to calculate checksum of: {}", + file.display())) + })); + + let actual = h.finish().to_hex(); + if &*actual != cksum { + bail!("\ + the listed checksum of `{}` has changed:\n\ + expected: {}\n\ + actual: {}\n\ + \n\ + directory sources are not intended to be edited, if \ + modifications are required then it is recommended \ + that [replace] is used with a forked copy of the \ + source\ + ", file.display(), cksum, actual); + } + } + + Ok(()) + } +} diff --git a/src/cargo/sources/git/utils.rs b/src/cargo/sources/git/utils.rs index 1149bfffd02..f7dc89d7fba 100644 --- a/src/cargo/sources/git/utils.rs +++ b/src/cargo/sources/git/utils.rs @@ -257,7 +257,7 @@ impl<'a> GitCheckout<'a> { })); } - let url = try!(source.to_url().map_err(human)); + let url = try!(source.to_url()); let url = url.to_string(); let repo = try!(git2::Repository::clone(&url, into).chain_error(|| { internal(format!("failed to clone {} into {}", source.display(), @@ -278,7 +278,7 @@ impl<'a> GitCheckout<'a> { fn fetch(&self, cargo_config: &Config) -> CargoResult<()> { info!("fetch {}", self.repo.path().display()); - let url = try!(self.database.path.to_url().map_err(human)); + let url = try!(self.database.path.to_url()); let url = url.to_string(); let refspec = "refs/heads/*:refs/heads/*"; try!(fetch(&self.repo, &url, refspec, &cargo_config)); diff --git a/src/cargo/sources/mod.rs b/src/cargo/sources/mod.rs index 7db73619311..ed784e95ab4 100644 --- a/src/cargo/sources/mod.rs +++ b/src/cargo/sources/mod.rs @@ -1,7 +1,13 @@ -pub use self::path::PathSource; +pub use self::config::SourceConfigMap; +pub use self::directory::DirectorySource; pub use self::git::GitSource; -pub use self::registry::RegistrySource; +pub use self::path::PathSource; +pub use self::registry::{RegistrySource, CRATES_IO}; +pub use self::replaced::ReplacedSource; -pub mod path; +pub mod config; +pub mod directory; pub mod git; +pub mod path; pub mod registry; +pub mod replaced; diff --git a/src/cargo/sources/registry.rs b/src/cargo/sources/registry.rs deleted file mode 100644 index f89f6bae794..00000000000 --- a/src/cargo/sources/registry.rs +++ /dev/null @@ -1,595 +0,0 @@ -//! A `Source` for registry-based packages. -//! -//! # What's a Registry? -//! -//! Registries are central locations where packages can be uploaded to, -//! discovered, and searched for. The purpose of a registry is to have a -//! location that serves as permanent storage for versions of a crate over time. -//! -//! Compared to git sources, a registry provides many packages as well as many -//! versions simultaneously. Git sources can also have commits deleted through -//! rebasings where registries cannot have their versions deleted. -//! -//! # The Index of a Registry -//! -//! One of the major difficulties with a registry is that hosting so many -//! packages may quickly run into performance problems when dealing with -//! dependency graphs. It's infeasible for cargo to download the entire contents -//! of the registry just to resolve one package's dependencies, for example. As -//! a result, cargo needs some efficient method of querying what packages are -//! available on a registry, what versions are available, and what the -//! dependencies for each version is. -//! -//! One method of doing so would be having the registry expose an HTTP endpoint -//! which can be queried with a list of packages and a response of their -//! dependencies and versions is returned. This is somewhat inefficient however -//! as we may have to hit the endpoint many times and we may have already -//! queried for much of the data locally already (for other packages, for -//! example). This also involves inventing a transport format between the -//! registry and Cargo itself, so this route was not taken. -//! -//! Instead, Cargo communicates with registries through a git repository -//! referred to as the Index. The Index of a registry is essentially an easily -//! query-able version of the registry's database for a list of versions of a -//! package as well as a list of dependencies for each version. -//! -//! Using git to host this index provides a number of benefits: -//! -//! * The entire index can be stored efficiently locally on disk. This means -//! that all queries of a registry can happen locally and don't need to touch -//! the network. -//! -//! * Updates of the index are quite efficient. Using git buys incremental -//! updates, compressed transmission, etc for free. The index must be updated -//! each time we need fresh information from a registry, but this is one -//! update of a git repository that probably hasn't changed a whole lot so -//! it shouldn't be too expensive. -//! -//! Additionally, each modification to the index is just appending a line at -//! the end of a file (the exact format is described later). This means that -//! the commits for an index are quite small and easily applied/compressable. -//! -//! ## The format of the Index -//! -//! The index is a store for the list of versions for all packages known, so its -//! format on disk is optimized slightly to ensure that `ls registry` doesn't -//! produce a list of all packages ever known. The index also wants to ensure -//! that there's not a million files which may actually end up hitting -//! filesystem limits at some point. To this end, a few decisions were made -//! about the format of the registry: -//! -//! 1. Each crate will have one file corresponding to it. Each version for a -//! crate will just be a line in this file. -//! 2. There will be two tiers of directories for crate names, under which -//! crates corresponding to those tiers will be located. -//! -//! As an example, this is an example hierarchy of an index: -//! -//! ```notrust -//! . -//! ├── 3 -//! │   └── u -//! │   └── url -//! ├── bz -//! │   └── ip -//! │   └── bzip2 -//! ├── config.json -//! ├── en -//! │   └── co -//! │   └── encoding -//! └── li -//!    ├── bg -//!    │   └── libgit2 -//!    └── nk -//!    └── link-config -//! ``` -//! -//! The root of the index contains a `config.json` file with a few entries -//! corresponding to the registry (see `RegistryConfig` below). -//! -//! Otherwise, there are three numbered directories (1, 2, 3) for crates with -//! names 1, 2, and 3 characters in length. The 1/2 directories simply have the -//! crate files underneath them, while the 3 directory is sharded by the first -//! letter of the crate name. -//! -//! Otherwise the top-level directory contains many two-letter directory names, -//! each of which has many sub-folders with two letters. At the end of all these -//! are the actual crate files themselves. -//! -//! The purpose of this layout is to hopefully cut down on `ls` sizes as well as -//! efficient lookup based on the crate name itself. -//! -//! ## Crate files -//! -//! Each file in the index is the history of one crate over time. Each line in -//! the file corresponds to one version of a crate, stored in JSON format (see -//! the `RegistryPackage` structure below). -//! -//! As new versions are published, new lines are appended to this file. The only -//! modifications to this file that should happen over time are yanks of a -//! particular version. -//! -//! # Downloading Packages -//! -//! The purpose of the Index was to provide an efficient method to resolve the -//! dependency graph for a package. So far we only required one network -//! interaction to update the registry's repository (yay!). After resolution has -//! been performed, however we need to download the contents of packages so we -//! can read the full manifest and build the source code. -//! -//! To accomplish this, this source's `download` method will make an HTTP -//! request per-package requested to download tarballs into a local cache. These -//! tarballs will then be unpacked into a destination folder. -//! -//! Note that because versions uploaded to the registry are frozen forever that -//! the HTTP download and unpacking can all be skipped if the version has -//! already been downloaded and unpacked. This caching allows us to only -//! download a package when absolutely necessary. -//! -//! # Filesystem Hierarchy -//! -//! Overall, the `$HOME/.cargo` looks like this when talking about the registry: -//! -//! ```notrust -//! # A folder under which all registry metadata is hosted (similar to -//! # $HOME/.cargo/git) -//! $HOME/.cargo/registry/ -//! -//! # For each registry that cargo knows about (keyed by hostname + hash) -//! # there is a folder which is the checked out version of the index for -//! # the registry in this location. Note that this is done so cargo can -//! # support multiple registries simultaneously -//! index/ -//! registry1-/ -//! registry2-/ -//! ... -//! -//! # This folder is a cache for all downloaded tarballs from a registry. -//! # Once downloaded and verified, a tarball never changes. -//! cache/ -//! registry1-/-.crate -//! ... -//! -//! # Location in which all tarballs are unpacked. Each tarball is known to -//! # be frozen after downloading, so transitively this folder is also -//! # frozen once its unpacked (it's never unpacked again) -//! src/ -//! registry1-/-/... -//! ... -//! ``` - -use std::collections::HashMap; -use std::fs::File; -use std::io::SeekFrom; -use std::io::prelude::*; -use std::path::{PathBuf, Path}; - -use curl::easy::Easy; -use flate2::read::GzDecoder; -use git2; -use rustc_serialize::hex::ToHex; -use rustc_serialize::json; -use tar::Archive; -use url::Url; - -use core::{Source, SourceId, PackageId, Package, Summary, Registry}; -use core::dependency::{Dependency, DependencyInner, Kind}; -use sources::{PathSource, git}; -use util::{CargoResult, Config, internal, ChainError, ToUrl, human}; -use util::{hex, Sha256, paths, Filesystem, FileLock}; -use util::network; -use ops; - -const DEFAULT: &'static str = "https://github.com/rust-lang/crates.io-index"; -const INDEX_LOCK: &'static str = ".cargo-index-lock"; - -pub struct RegistrySource<'cfg> { - source_id: SourceId, - checkout_path: Filesystem, - cache_path: Filesystem, - src_path: Filesystem, - config: &'cfg Config, - handle: Option, - hashes: HashMap<(String, String), String>, // (name, vers) => cksum - cache: HashMap>, - updated: bool, -} - -#[derive(RustcDecodable)] -pub struct RegistryConfig { - /// Download endpoint for all crates. This will be appended with - /// `///download` and then will be hit with an HTTP GET - /// request to download the tarball for a crate. - pub dl: String, - - /// API endpoint for the registry. This is what's actually hit to perform - /// operations like yanks, owner modifications, publish new crates, etc. - pub api: String, -} - -#[derive(RustcDecodable)] -struct RegistryPackage { - name: String, - vers: String, - deps: Vec, - features: HashMap>, - cksum: String, - yanked: Option, -} - -#[derive(RustcDecodable)] -struct RegistryDependency { - name: String, - req: String, - features: Vec, - optional: bool, - default_features: bool, - target: Option, - kind: Option, -} - -impl<'cfg> RegistrySource<'cfg> { - pub fn new(source_id: &SourceId, - config: &'cfg Config) -> RegistrySource<'cfg> { - let hash = hex::short_hash(source_id); - let ident = source_id.url().host_str().unwrap_or("").to_string(); - let part = format!("{}-{}", ident, hash); - RegistrySource { - checkout_path: config.registry_index_path().join(&part), - cache_path: config.registry_cache_path().join(&part), - src_path: config.registry_source_path().join(&part), - config: config, - source_id: source_id.clone(), - handle: None, - hashes: HashMap::new(), - cache: HashMap::new(), - updated: false, - } - } - - /// Get the configured default registry URL. - /// - /// This is the main cargo registry by default, but it can be overridden in - /// a .cargo/config - pub fn url(config: &Config) -> CargoResult { - let config = try!(ops::registry_configuration(config)); - let url = config.index.unwrap_or(DEFAULT.to_string()); - url.to_url().map_err(human) - } - - /// Get the default url for the registry - pub fn default_url() -> String { - DEFAULT.to_string() - } - - /// Decode the configuration stored within the registry. - /// - /// This requires that the index has been at least checked out. - pub fn config(&self) -> CargoResult { - let lock = try!(self.checkout_path.open_ro(Path::new(INDEX_LOCK), - self.config, - "the registry index")); - let path = lock.path().parent().unwrap(); - let contents = try!(paths::read(&path.join("config.json"))); - let config = try!(json::decode(&contents)); - Ok(config) - } - - /// Download the given package from the given url into the local cache. - /// - /// This will perform the HTTP request to fetch the package. This function - /// will only succeed if the HTTP download was successful and the file is - /// then ready for inspection. - /// - /// No action is taken if the package is already downloaded. - fn download_package(&mut self, pkg: &PackageId, url: &Url) - -> CargoResult { - let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); - let path = Path::new(&filename); - let mut dst = try!(self.cache_path.open_rw(path, self.config, &filename)); - let meta = try!(dst.file().metadata()); - if meta.len() > 0 { - return Ok(dst) - } - try!(self.config.shell().status("Downloading", pkg)); - - let expected_hash = try!(self.hash(pkg)); - let handle = match self.handle { - Some(ref mut handle) => handle, - None => { - self.handle = Some(try!(ops::http_handle(self.config))); - self.handle.as_mut().unwrap() - } - }; - // TODO: don't download into memory, but ensure that if we ctrl-c a - // download we should resume either from the start or the middle - // on the next time - try!(handle.get(true)); - try!(handle.url(&url.to_string())); - try!(handle.follow_location(true)); - let mut state = Sha256::new(); - let mut body = Vec::new(); - { - let mut handle = handle.transfer(); - try!(handle.write_function(|buf| { - state.update(buf); - body.extend_from_slice(buf); - Ok(buf.len()) - })); - try!(network::with_retry(self.config, || { - handle.perform() - })) - } - let code = try!(handle.response_code()); - if code != 200 && code != 0 { - bail!("failed to get 200 response from `{}`, got {}", url, code) - } - - // Verify what we just downloaded - if state.finish().to_hex() != expected_hash { - bail!("failed to verify the checksum of `{}`", pkg) - } - - try!(dst.write_all(&body)); - try!(dst.seek(SeekFrom::Start(0))); - Ok(dst) - } - - /// Return the hash listed for a specified PackageId. - fn hash(&mut self, pkg: &PackageId) -> CargoResult { - let key = (pkg.name().to_string(), pkg.version().to_string()); - if let Some(s) = self.hashes.get(&key) { - return Ok(s.clone()) - } - // Ok, we're missing the key, so parse the index file to load it. - try!(self.summaries(pkg.name())); - self.hashes.get(&key).chain_error(|| { - internal(format!("no hash listed for {}", pkg)) - }).map(|s| s.clone()) - } - - /// Unpacks a downloaded package into a location where it's ready to be - /// compiled. - /// - /// No action is taken if the source looks like it's already unpacked. - fn unpack_package(&self, - pkg: &PackageId, - tarball: &FileLock) - -> CargoResult { - let dst = self.src_path.join(&format!("{}-{}", pkg.name(), - pkg.version())); - try!(dst.create_dir()); - // Note that we've already got the `tarball` locked above, and that - // implies a lock on the unpacked destination as well, so this access - // via `into_path_unlocked` should be ok. - let dst = dst.into_path_unlocked(); - let ok = dst.join(".cargo-ok"); - if ok.exists() { - return Ok(dst) - } - - let gz = try!(GzDecoder::new(tarball.file())); - let mut tar = Archive::new(gz); - try!(tar.unpack(dst.parent().unwrap())); - try!(File::create(&ok)); - Ok(dst) - } - - /// Parse the on-disk metadata for the package provided - pub fn summaries(&mut self, name: &str) -> CargoResult<&Vec<(Summary, bool)>> { - if self.cache.contains_key(name) { - return Ok(self.cache.get(name).unwrap()); - } - let lock = self.checkout_path.open_ro(Path::new(INDEX_LOCK), - self.config, - "the registry index"); - let file = lock.and_then(|lock| { - let path = lock.path().parent().unwrap(); - let fs_name = name.chars().flat_map(|c| { - c.to_lowercase() - }).collect::(); - - // see module comment for why this is structured the way it is - let path = match fs_name.len() { - 1 => path.join("1").join(&fs_name), - 2 => path.join("2").join(&fs_name), - 3 => path.join("3").join(&fs_name[..1]).join(&fs_name), - _ => path.join(&fs_name[0..2]) - .join(&fs_name[2..4]) - .join(&fs_name), - }; - File::open(&path).map_err(human) - }); - let summaries = match file { - Ok(mut f) => { - let mut contents = String::new(); - try!(f.read_to_string(&mut contents)); - let ret: CargoResult>; - ret = contents.lines().filter(|l| l.trim().len() > 0) - .map(|l| self.parse_registry_package(l)) - .collect(); - try!(ret.chain_error(|| { - internal(format!("failed to parse registry's information \ - for: {}", name)) - })) - } - Err(..) => Vec::new(), - }; - let summaries = summaries.into_iter().filter(|summary| { - summary.0.package_id().name() == name - }).collect(); - self.cache.insert(name.to_string(), summaries); - Ok(self.cache.get(name).unwrap()) - } - - /// Parse a line from the registry's index file into a Summary for a - /// package. - /// - /// The returned boolean is whether or not the summary has been yanked. - fn parse_registry_package(&mut self, line: &str) - -> CargoResult<(Summary, bool)> { - let RegistryPackage { - name, vers, cksum, deps, features, yanked - } = try!(json::decode::(line)); - let pkgid = try!(PackageId::new(&name, &vers, &self.source_id)); - let deps: CargoResult> = deps.into_iter().map(|dep| { - self.parse_registry_dependency(dep) - }).collect(); - let deps = try!(deps); - self.hashes.insert((name, vers), cksum); - Ok((try!(Summary::new(pkgid, deps, features)), yanked.unwrap_or(false))) - } - - /// Converts an encoded dependency in the registry to a cargo dependency - fn parse_registry_dependency(&self, dep: RegistryDependency) - -> CargoResult { - let RegistryDependency { - name, req, features, optional, default_features, target, kind - } = dep; - - let dep = try!(DependencyInner::parse(&name, Some(&req), - &self.source_id)); - let kind = match kind.as_ref().map(|s| &s[..]).unwrap_or("") { - "dev" => Kind::Development, - "build" => Kind::Build, - _ => Kind::Normal, - }; - - let platform = match target { - Some(target) => Some(try!(target.parse())), - None => None, - }; - - // Unfortunately older versions of cargo and/or the registry ended up - // publishing lots of entries where the features array contained the - // empty feature, "", inside. This confuses the resolution process much - // later on and these features aren't actually valid, so filter them all - // out here. - let features = features.into_iter().filter(|s| !s.is_empty()).collect(); - - Ok(dep.set_optional(optional) - .set_default_features(default_features) - .set_features(features) - .set_platform(platform) - .set_kind(kind) - .into_dependency()) - } - - /// Actually perform network operations to update the registry - fn do_update(&mut self) -> CargoResult<()> { - if self.updated { - return Ok(()) - } - try!(self.checkout_path.create_dir()); - let lock = try!(self.checkout_path.open_rw(Path::new(INDEX_LOCK), - self.config, - "the registry index")); - let path = lock.path().parent().unwrap(); - - try!(self.config.shell().status("Updating", - format!("registry `{}`", self.source_id.url()))); - let repo = try!(git2::Repository::open(path).or_else(|_| { - let _ = lock.remove_siblings(); - git2::Repository::init(path) - })); - - // git fetch origin - let url = self.source_id.url().to_string(); - let refspec = "refs/heads/*:refs/remotes/origin/*"; - - try!(git::fetch(&repo, &url, refspec, &self.config).chain_error(|| { - human(format!("failed to fetch `{}`", url)) - })); - - // git reset --hard origin/master - let reference = "refs/remotes/origin/master"; - let oid = try!(repo.refname_to_id(reference)); - trace!("[{}] updating to rev {}", self.source_id, oid); - let object = try!(repo.find_object(oid, None)); - try!(repo.reset(&object, git2::ResetType::Hard, None)); - self.updated = true; - self.cache.clear(); - Ok(()) - } -} - -impl<'cfg> Registry for RegistrySource<'cfg> { - fn query(&mut self, dep: &Dependency) -> CargoResult> { - // If this is a precise dependency, then it came from a lockfile and in - // theory the registry is known to contain this version. If, however, we - // come back with no summaries, then our registry may need to be - // updated, so we fall back to performing a lazy update. - if dep.source_id().precise().is_some() { - let mut summaries = try!(self.summaries(dep.name())).iter().map(|s| { - s.0.clone() - }).collect::>(); - if try!(summaries.query(dep)).is_empty() { - try!(self.do_update()); - } - } - - let mut summaries = { - let summaries = try!(self.summaries(dep.name())); - summaries.iter().filter(|&&(_, yanked)| { - dep.source_id().precise().is_some() || !yanked - }).map(|s| s.0.clone()).collect::>() - }; - - // Handle `cargo update --precise` here. If specified, our own source - // will have a precise version listed of the form `=` where - // `` is the name of a crate on this source and `` is the - // version requested (agument to `--precise`). - summaries.retain(|s| { - match self.source_id.precise() { - Some(p) if p.starts_with(dep.name()) && - p[dep.name().len()..].starts_with("=") => { - let vers = &p[dep.name().len() + 1..]; - s.version().to_string() == vers - } - _ => true, - } - }); - summaries.query(dep) - } -} - -impl<'cfg> Source for RegistrySource<'cfg> { - fn update(&mut self) -> CargoResult<()> { - // If we have an imprecise version then we don't know what we're going - // to look for, so we always attempt to perform an update here. - // - // If we have a precise version, then we'll update lazily during the - // querying phase. Note that precise in this case is only - // `Some("locked")` as other `Some` values indicate a `cargo update - // --precise` request - if self.source_id.precise() != Some("locked") { - try!(self.do_update()); - } - Ok(()) - } - - fn download(&mut self, package: &PackageId) -> CargoResult { - let config = try!(self.config()); - let url = try!(config.dl.to_url().map_err(internal)); - let mut url = url.clone(); - url.path_segments_mut().unwrap() - .push(package.name()) - .push(&package.version().to_string()) - .push("download"); - let krate = try!(self.download_package(package, &url).chain_error(|| { - internal(format!("failed to download package `{}` from {}", - package, url)) - })); - let path = try!(self.unpack_package(package, &krate).chain_error(|| { - internal(format!("failed to unpack package `{}`", package)) - })); - - let mut src = PathSource::new(&path, &self.source_id, self.config); - try!(src.update()); - src.download(package) - } - - fn fingerprint(&self, pkg: &Package) -> CargoResult { - Ok(pkg.package_id().version().to_string()) - } -} diff --git a/src/cargo/sources/registry/index.rs b/src/cargo/sources/registry/index.rs new file mode 100644 index 00000000000..7be0a309c65 --- /dev/null +++ b/src/cargo/sources/registry/index.rs @@ -0,0 +1,196 @@ +use std::collections::HashMap; +use std::io::prelude::*; +use std::fs::File; +use std::path::Path; + +use rustc_serialize::json; + +use core::dependency::{Dependency, DependencyInner, Kind}; +use core::{SourceId, Summary, PackageId, Registry}; +use sources::registry::{RegistryPackage, RegistryDependency, INDEX_LOCK}; +use util::{CargoResult, ChainError, internal, Filesystem, Config}; + +pub struct RegistryIndex<'cfg> { + source_id: SourceId, + path: Filesystem, + cache: HashMap>, + hashes: HashMap<(String, String), String>, // (name, vers) => cksum + config: &'cfg Config, + locked: bool, +} + +impl<'cfg> RegistryIndex<'cfg> { + pub fn new(id: &SourceId, + path: &Filesystem, + config: &'cfg Config, + locked: bool) -> RegistryIndex<'cfg> { + RegistryIndex { + source_id: id.clone(), + path: path.clone(), + cache: HashMap::new(), + hashes: HashMap::new(), + config: config, + locked: locked, + } + } + + /// Return the hash listed for a specified PackageId. + pub fn hash(&mut self, pkg: &PackageId) -> CargoResult { + let key = (pkg.name().to_string(), pkg.version().to_string()); + if let Some(s) = self.hashes.get(&key) { + return Ok(s.clone()) + } + // Ok, we're missing the key, so parse the index file to load it. + try!(self.summaries(pkg.name())); + self.hashes.get(&key).chain_error(|| { + internal(format!("no hash listed for {}", pkg)) + }).map(|s| s.clone()) + } + + /// Parse the on-disk metadata for the package provided + /// + /// Returns a list of pairs of (summary, yanked) for the package name + /// specified. + pub fn summaries(&mut self, name: &str) -> CargoResult<&Vec<(Summary, bool)>> { + if self.cache.contains_key(name) { + return Ok(self.cache.get(name).unwrap()); + } + let summaries = try!(self.load_summaries(name)); + let summaries = summaries.into_iter().filter(|summary| { + summary.0.package_id().name() == name + }).collect(); + self.cache.insert(name.to_string(), summaries); + Ok(self.cache.get(name).unwrap()) + } + + fn load_summaries(&mut self, name: &str) -> CargoResult> { + let (path, _lock) = if self.locked { + let lock = self.path.open_ro(Path::new(INDEX_LOCK), + self.config, + "the registry index"); + match lock { + Ok(lock) => { + (lock.path().parent().unwrap().to_path_buf(), Some(lock)) + } + Err(_) => return Ok(Vec::new()), + } + } else { + (self.path.clone().into_path_unlocked(), None) + }; + + let fs_name = name.chars().flat_map(|c| { + c.to_lowercase() + }).collect::(); + + // see module comment for why this is structured the way it is + let path = match fs_name.len() { + 1 => path.join("1").join(&fs_name), + 2 => path.join("2").join(&fs_name), + 3 => path.join("3").join(&fs_name[..1]).join(&fs_name), + _ => path.join(&fs_name[0..2]) + .join(&fs_name[2..4]) + .join(&fs_name), + }; + match File::open(&path) { + Ok(mut f) => { + let mut contents = String::new(); + try!(f.read_to_string(&mut contents)); + let ret: CargoResult>; + ret = contents.lines().filter(|l| l.trim().len() > 0) + .map(|l| self.parse_registry_package(l)) + .collect(); + ret.chain_error(|| { + internal(format!("failed to parse registry's information \ + for: {}", name)) + }) + } + Err(..) => Ok(Vec::new()), + } + } + + /// Parse a line from the registry's index file into a Summary for a + /// package. + /// + /// The returned boolean is whether or not the summary has been yanked. + fn parse_registry_package(&mut self, line: &str) + -> CargoResult<(Summary, bool)> { + let RegistryPackage { + name, vers, cksum, deps, features, yanked + } = try!(json::decode::(line)); + let pkgid = try!(PackageId::new(&name, &vers, &self.source_id)); + let deps: CargoResult> = deps.into_iter().map(|dep| { + self.parse_registry_dependency(dep) + }).collect(); + let deps = try!(deps); + let summary = try!(Summary::new(pkgid, deps, features)); + let summary = summary.set_checksum(cksum.clone()); + self.hashes.insert((name, vers), cksum); + Ok((summary, yanked.unwrap_or(false))) + } + + /// Converts an encoded dependency in the registry to a cargo dependency + fn parse_registry_dependency(&self, dep: RegistryDependency) + -> CargoResult { + let RegistryDependency { + name, req, features, optional, default_features, target, kind + } = dep; + + let dep = try!(DependencyInner::parse(&name, Some(&req), + &self.source_id)); + let kind = match kind.as_ref().map(|s| &s[..]).unwrap_or("") { + "dev" => Kind::Development, + "build" => Kind::Build, + _ => Kind::Normal, + }; + + let platform = match target { + Some(target) => Some(try!(target.parse())), + None => None, + }; + + // Unfortunately older versions of cargo and/or the registry ended up + // publishing lots of entries where the features array contained the + // empty feature, "", inside. This confuses the resolution process much + // later on and these features aren't actually valid, so filter them all + // out here. + let features = features.into_iter().filter(|s| !s.is_empty()).collect(); + + Ok(dep.set_optional(optional) + .set_default_features(default_features) + .set_features(features) + .set_platform(platform) + .set_kind(kind) + .into_dependency()) + } +} + +impl<'cfg> Registry for RegistryIndex<'cfg> { + fn query(&mut self, dep: &Dependency) -> CargoResult> { + let mut summaries = { + let summaries = try!(self.summaries(dep.name())); + summaries.iter().filter(|&&(_, yanked)| { + dep.source_id().precise().is_some() || !yanked + }).map(|s| s.0.clone()).collect::>() + }; + + // Handle `cargo update --precise` here. If specified, our own source + // will have a precise version listed of the form `=` where + // `` is the name of a crate on this source and `` is the + // version requested (agument to `--precise`). + summaries.retain(|s| { + match self.source_id.precise() { + Some(p) if p.starts_with(dep.name()) && + p[dep.name().len()..].starts_with("=") => { + let vers = &p[dep.name().len() + 1..]; + s.version().to_string() == vers + } + _ => true, + } + }); + summaries.query(dep) + } + + fn supports_checksums(&self) -> bool { + true + } +} diff --git a/src/cargo/sources/registry/local.rs b/src/cargo/sources/registry/local.rs new file mode 100644 index 00000000000..46387bb6841 --- /dev/null +++ b/src/cargo/sources/registry/local.rs @@ -0,0 +1,97 @@ +use std::io::SeekFrom; +use std::io::prelude::*; +use std::path::Path; + +use rustc_serialize::hex::ToHex; + +use core::PackageId; +use sources::registry::{RegistryData, RegistryConfig}; +use util::{Config, CargoResult, ChainError, human, Sha256, Filesystem}; +use util::FileLock; + +pub struct LocalRegistry<'cfg> { + index_path: Filesystem, + root: Filesystem, + src_path: Filesystem, + config: &'cfg Config, +} + +impl<'cfg> LocalRegistry<'cfg> { + pub fn new(root: &Path, + config: &'cfg Config, + name: &str) -> LocalRegistry<'cfg> { + LocalRegistry { + src_path: config.registry_source_path().join(name), + index_path: Filesystem::new(root.join("index")), + root: Filesystem::new(root.to_path_buf()), + config: config, + } + } +} + +impl<'cfg> RegistryData for LocalRegistry<'cfg> { + fn index_path(&self) -> &Filesystem { + &self.index_path + } + + fn config(&self) -> CargoResult> { + // Local registries don't have configuration for remote APIs or anything + // like that + Ok(None) + } + + fn update_index(&mut self) -> CargoResult<()> { + // Nothing to update, we just use what's on disk. Verify it actually + // exists though. We don't use any locks as we're just checking whether + // these directories exist. + let root = self.root.clone().into_path_unlocked(); + if !root.is_dir() { + bail!("local registry path is not a directory: {}", + root.display()) + } + let index_path = self.index_path.clone().into_path_unlocked(); + if !index_path.is_dir() { + bail!("local registry index path is not a directory: {}", + index_path.display()) + } + Ok(()) + } + + fn download(&mut self, pkg: &PackageId, checksum: &str) + -> CargoResult { + let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version()); + let mut crate_file = try!(self.root.open_ro(&crate_file, + self.config, + "crate file")); + + // If we've already got an unpacked version of this crate, then skip the + // checksum below as it is in theory already verified. + let dst = format!("{}-{}", pkg.name(), pkg.version()); + if self.src_path.join(dst).into_path_unlocked().exists() { + return Ok(crate_file) + } + + try!(self.config.shell().status("Unpacking", pkg)); + + // We don't actually need to download anything per-se, we just need to + // verify the checksum matches the .crate file itself. + let mut state = Sha256::new(); + let mut buf = [0; 64 * 1024]; + loop { + let n = try!(crate_file.read(&mut buf).chain_error(|| { + human(format!("failed to read `{}`", crate_file.path().display())) + })); + if n == 0 { + break + } + state.update(&buf[..n]); + } + if state.finish().to_hex() != checksum { + bail!("failed to verify the checksum of `{}`", pkg) + } + + try!(crate_file.seek(SeekFrom::Start(0))); + + Ok(crate_file) + } +} diff --git a/src/cargo/sources/registry/mod.rs b/src/cargo/sources/registry/mod.rs new file mode 100644 index 00000000000..3a1babafb04 --- /dev/null +++ b/src/cargo/sources/registry/mod.rs @@ -0,0 +1,368 @@ +//! A `Source` for registry-based packages. +//! +//! # What's a Registry? +//! +//! Registries are central locations where packages can be uploaded to, +//! discovered, and searched for. The purpose of a registry is to have a +//! location that serves as permanent storage for versions of a crate over time. +//! +//! Compared to git sources, a registry provides many packages as well as many +//! versions simultaneously. Git sources can also have commits deleted through +//! rebasings where registries cannot have their versions deleted. +//! +//! # The Index of a Registry +//! +//! One of the major difficulties with a registry is that hosting so many +//! packages may quickly run into performance problems when dealing with +//! dependency graphs. It's infeasible for cargo to download the entire contents +//! of the registry just to resolve one package's dependencies, for example. As +//! a result, cargo needs some efficient method of querying what packages are +//! available on a registry, what versions are available, and what the +//! dependencies for each version is. +//! +//! One method of doing so would be having the registry expose an HTTP endpoint +//! which can be queried with a list of packages and a response of their +//! dependencies and versions is returned. This is somewhat inefficient however +//! as we may have to hit the endpoint many times and we may have already +//! queried for much of the data locally already (for other packages, for +//! example). This also involves inventing a transport format between the +//! registry and Cargo itself, so this route was not taken. +//! +//! Instead, Cargo communicates with registries through a git repository +//! referred to as the Index. The Index of a registry is essentially an easily +//! query-able version of the registry's database for a list of versions of a +//! package as well as a list of dependencies for each version. +//! +//! Using git to host this index provides a number of benefits: +//! +//! * The entire index can be stored efficiently locally on disk. This means +//! that all queries of a registry can happen locally and don't need to touch +//! the network. +//! +//! * Updates of the index are quite efficient. Using git buys incremental +//! updates, compressed transmission, etc for free. The index must be updated +//! each time we need fresh information from a registry, but this is one +//! update of a git repository that probably hasn't changed a whole lot so +//! it shouldn't be too expensive. +//! +//! Additionally, each modification to the index is just appending a line at +//! the end of a file (the exact format is described later). This means that +//! the commits for an index are quite small and easily applied/compressable. +//! +//! ## The format of the Index +//! +//! The index is a store for the list of versions for all packages known, so its +//! format on disk is optimized slightly to ensure that `ls registry` doesn't +//! produce a list of all packages ever known. The index also wants to ensure +//! that there's not a million files which may actually end up hitting +//! filesystem limits at some point. To this end, a few decisions were made +//! about the format of the registry: +//! +//! 1. Each crate will have one file corresponding to it. Each version for a +//! crate will just be a line in this file. +//! 2. There will be two tiers of directories for crate names, under which +//! crates corresponding to those tiers will be located. +//! +//! As an example, this is an example hierarchy of an index: +//! +//! ```notrust +//! . +//! ├── 3 +//! │   └── u +//! │   └── url +//! ├── bz +//! │   └── ip +//! │   └── bzip2 +//! ├── config.json +//! ├── en +//! │   └── co +//! │   └── encoding +//! └── li +//!    ├── bg +//!    │   └── libgit2 +//!    └── nk +//!    └── link-config +//! ``` +//! +//! The root of the index contains a `config.json` file with a few entries +//! corresponding to the registry (see `RegistryConfig` below). +//! +//! Otherwise, there are three numbered directories (1, 2, 3) for crates with +//! names 1, 2, and 3 characters in length. The 1/2 directories simply have the +//! crate files underneath them, while the 3 directory is sharded by the first +//! letter of the crate name. +//! +//! Otherwise the top-level directory contains many two-letter directory names, +//! each of which has many sub-folders with two letters. At the end of all these +//! are the actual crate files themselves. +//! +//! The purpose of this layout is to hopefully cut down on `ls` sizes as well as +//! efficient lookup based on the crate name itself. +//! +//! ## Crate files +//! +//! Each file in the index is the history of one crate over time. Each line in +//! the file corresponds to one version of a crate, stored in JSON format (see +//! the `RegistryPackage` structure below). +//! +//! As new versions are published, new lines are appended to this file. The only +//! modifications to this file that should happen over time are yanks of a +//! particular version. +//! +//! # Downloading Packages +//! +//! The purpose of the Index was to provide an efficient method to resolve the +//! dependency graph for a package. So far we only required one network +//! interaction to update the registry's repository (yay!). After resolution has +//! been performed, however we need to download the contents of packages so we +//! can read the full manifest and build the source code. +//! +//! To accomplish this, this source's `download` method will make an HTTP +//! request per-package requested to download tarballs into a local cache. These +//! tarballs will then be unpacked into a destination folder. +//! +//! Note that because versions uploaded to the registry are frozen forever that +//! the HTTP download and unpacking can all be skipped if the version has +//! already been downloaded and unpacked. This caching allows us to only +//! download a package when absolutely necessary. +//! +//! # Filesystem Hierarchy +//! +//! Overall, the `$HOME/.cargo` looks like this when talking about the registry: +//! +//! ```notrust +//! # A folder under which all registry metadata is hosted (similar to +//! # $HOME/.cargo/git) +//! $HOME/.cargo/registry/ +//! +//! # For each registry that cargo knows about (keyed by hostname + hash) +//! # there is a folder which is the checked out version of the index for +//! # the registry in this location. Note that this is done so cargo can +//! # support multiple registries simultaneously +//! index/ +//! registry1-/ +//! registry2-/ +//! ... +//! +//! # This folder is a cache for all downloaded tarballs from a registry. +//! # Once downloaded and verified, a tarball never changes. +//! cache/ +//! registry1-/-.crate +//! ... +//! +//! # Location in which all tarballs are unpacked. Each tarball is known to +//! # be frozen after downloading, so transitively this folder is also +//! # frozen once its unpacked (it's never unpacked again) +//! src/ +//! registry1-/-/... +//! ... +//! ``` + +use std::collections::HashMap; +use std::fs::File; +use std::path::{PathBuf, Path}; + +use flate2::read::GzDecoder; +use tar::Archive; + +use core::{Source, SourceId, PackageId, Package, Summary, Registry}; +use core::dependency::Dependency; +use sources::PathSource; +use util::{CargoResult, Config, internal, ChainError, FileLock, Filesystem}; +use util::hex; + +const INDEX_LOCK: &'static str = ".cargo-index-lock"; +pub static CRATES_IO: &'static str = "https://github.com/rust-lang/crates.io-index"; + +pub struct RegistrySource<'cfg> { + source_id: SourceId, + src_path: Filesystem, + config: &'cfg Config, + updated: bool, + ops: Box, + index: index::RegistryIndex<'cfg>, + index_locked: bool, +} + +#[derive(RustcDecodable)] +pub struct RegistryConfig { + /// Download endpoint for all crates. This will be appended with + /// `///download` and then will be hit with an HTTP GET + /// request to download the tarball for a crate. + pub dl: String, + + /// API endpoint for the registry. This is what's actually hit to perform + /// operations like yanks, owner modifications, publish new crates, etc. + pub api: String, +} + +#[derive(RustcDecodable)] +struct RegistryPackage { + name: String, + vers: String, + deps: Vec, + features: HashMap>, + cksum: String, + yanked: Option, +} + +#[derive(RustcDecodable)] +struct RegistryDependency { + name: String, + req: String, + features: Vec, + optional: bool, + default_features: bool, + target: Option, + kind: Option, +} + +pub trait RegistryData { + fn index_path(&self) -> &Filesystem; + fn config(&self) -> CargoResult>; + fn update_index(&mut self) -> CargoResult<()>; + fn download(&mut self, + pkg: &PackageId, + checksum: &str) -> CargoResult; +} + +mod index; +mod remote; +mod local; + +fn short_name(id: &SourceId) -> String { + let hash = hex::short_hash(id); + let ident = id.url().host_str().unwrap_or("").to_string(); + format!("{}-{}", ident, hash) +} + +impl<'cfg> RegistrySource<'cfg> { + pub fn remote(source_id: &SourceId, + config: &'cfg Config) -> RegistrySource<'cfg> { + let name = short_name(source_id); + let ops = remote::RemoteRegistry::new(source_id, config, &name); + RegistrySource::new(source_id, config, &name, Box::new(ops), true) + } + + pub fn local(source_id: &SourceId, + path: &Path, + config: &'cfg Config) -> RegistrySource<'cfg> { + let name = short_name(source_id); + let ops = local::LocalRegistry::new(path, config, &name); + RegistrySource::new(source_id, config, &name, Box::new(ops), false) + } + + fn new(source_id: &SourceId, + config: &'cfg Config, + name: &str, + ops: Box, + index_locked: bool) -> RegistrySource<'cfg> { + RegistrySource { + src_path: config.registry_source_path().join(name), + config: config, + source_id: source_id.clone(), + updated: false, + index: index::RegistryIndex::new(source_id, + ops.index_path(), + config, + index_locked), + index_locked: index_locked, + ops: ops, + } + } + + /// Decode the configuration stored within the registry. + /// + /// This requires that the index has been at least checked out. + pub fn config(&self) -> CargoResult> { + self.ops.config() + } + + /// Unpacks a downloaded package into a location where it's ready to be + /// compiled. + /// + /// No action is taken if the source looks like it's already unpacked. + fn unpack_package(&self, + pkg: &PackageId, + tarball: &FileLock) + -> CargoResult { + let dst = self.src_path.join(&format!("{}-{}", pkg.name(), + pkg.version())); + try!(dst.create_dir()); + // Note that we've already got the `tarball` locked above, and that + // implies a lock on the unpacked destination as well, so this access + // via `into_path_unlocked` should be ok. + let dst = dst.into_path_unlocked(); + let ok = dst.join(".cargo-ok"); + if ok.exists() { + return Ok(dst) + } + + let gz = try!(GzDecoder::new(tarball.file())); + let mut tar = Archive::new(gz); + try!(tar.unpack(dst.parent().unwrap())); + try!(File::create(&ok)); + Ok(dst) + } + + fn do_update(&mut self) -> CargoResult<()> { + try!(self.ops.update_index()); + let path = self.ops.index_path(); + self.index = index::RegistryIndex::new(&self.source_id, + path, + self.config, + self.index_locked); + Ok(()) + } +} + +impl<'cfg> Registry for RegistrySource<'cfg> { + fn query(&mut self, dep: &Dependency) -> CargoResult> { + // If this is a precise dependency, then it came from a lockfile and in + // theory the registry is known to contain this version. If, however, we + // come back with no summaries, then our registry may need to be + // updated, so we fall back to performing a lazy update. + if dep.source_id().precise().is_some() && !self.updated { + if try!(self.index.query(dep)).is_empty() { + try!(self.do_update()); + } + } + + self.index.query(dep) + } + + fn supports_checksums(&self) -> bool { + true + } +} + +impl<'cfg> Source for RegistrySource<'cfg> { + fn update(&mut self) -> CargoResult<()> { + // If we have an imprecise version then we don't know what we're going + // to look for, so we always attempt to perform an update here. + // + // If we have a precise version, then we'll update lazily during the + // querying phase. Note that precise in this case is only + // `Some("locked")` as other `Some` values indicate a `cargo update + // --precise` request + if self.source_id.precise() != Some("locked") { + try!(self.do_update()); + } + Ok(()) + } + + fn download(&mut self, package: &PackageId) -> CargoResult { + let hash = try!(self.index.hash(package)); + let path = try!(self.ops.download(package, &hash)); + let path = try!(self.unpack_package(package, &path).chain_error(|| { + internal(format!("failed to unpack package `{}`", package)) + })); + let mut src = PathSource::new(&path, &self.source_id, self.config); + try!(src.update()); + src.download(package) + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + Ok(pkg.package_id().version().to_string()) + } +} diff --git a/src/cargo/sources/registry/remote.rs b/src/cargo/sources/registry/remote.rs new file mode 100644 index 00000000000..700bd6811db --- /dev/null +++ b/src/cargo/sources/registry/remote.rs @@ -0,0 +1,153 @@ +use std::io::SeekFrom; +use std::io::prelude::*; +use std::path::Path; + +use curl::easy::Easy; +use git2; +use rustc_serialize::json; +use rustc_serialize::hex::ToHex; + +use core::{PackageId, SourceId}; +use ops; +use sources::git; +use sources::registry::{RegistryData, RegistryConfig, INDEX_LOCK}; +use util::network; +use util::paths; +use util::{FileLock, Filesystem}; +use util::{Config, CargoResult, ChainError, human, Sha256, ToUrl}; + +pub struct RemoteRegistry<'cfg> { + index_path: Filesystem, + cache_path: Filesystem, + source_id: SourceId, + config: &'cfg Config, + handle: Option, +} + +impl<'cfg> RemoteRegistry<'cfg> { + pub fn new(source_id: &SourceId, config: &'cfg Config, name: &str) + -> RemoteRegistry<'cfg> { + RemoteRegistry { + index_path: config.registry_index_path().join(name), + cache_path: config.registry_cache_path().join(name), + source_id: source_id.clone(), + config: config, + handle: None, + } + } +} + +impl<'cfg> RegistryData for RemoteRegistry<'cfg> { + fn index_path(&self) -> &Filesystem { + &self.index_path + } + + fn config(&self) -> CargoResult> { + let lock = try!(self.index_path.open_ro(Path::new(INDEX_LOCK), + self.config, + "the registry index")); + let path = lock.path().parent().unwrap(); + let contents = try!(paths::read(&path.join("config.json"))); + let config = try!(json::decode(&contents)); + Ok(Some(config)) + } + + fn update_index(&mut self) -> CargoResult<()> { + // Ensure that we'll actually be able to acquire an HTTP handle later on + // once we start trying to download crates. This will weed out any + // problems with `.cargo/config` configuration related to HTTP. + // + // This way if there's a problem the error gets printed before we even + // hit the index, which may not actually read this configuration. + try!(ops::http_handle(self.config)); + + // Then we actually update the index + try!(self.index_path.create_dir()); + let lock = try!(self.index_path.open_rw(Path::new(INDEX_LOCK), + self.config, + "the registry index")); + let path = lock.path().parent().unwrap(); + + try!(self.config.shell().status("Updating", + format!("registry `{}`", self.source_id.url()))); + let repo = try!(git2::Repository::open(path).or_else(|_| { + let _ = lock.remove_siblings(); + git2::Repository::init(path) + })); + + // git fetch origin + let url = self.source_id.url().to_string(); + let refspec = "refs/heads/*:refs/remotes/origin/*"; + + try!(git::fetch(&repo, &url, refspec, &self.config).chain_error(|| { + human(format!("failed to fetch `{}`", url)) + })); + + // git reset --hard origin/master + let reference = "refs/remotes/origin/master"; + let oid = try!(repo.refname_to_id(reference)); + trace!("[{}] updating to rev {}", self.source_id, oid); + let object = try!(repo.find_object(oid, None)); + try!(repo.reset(&object, git2::ResetType::Hard, None)); + Ok(()) + } + + fn download(&mut self, pkg: &PackageId, checksum: &str) + -> CargoResult { + let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); + let path = Path::new(&filename); + let mut dst = try!(self.cache_path.open_rw(path, self.config, &filename)); + let meta = try!(dst.file().metadata()); + if meta.len() > 0 { + return Ok(dst) + } + try!(self.config.shell().status("Downloading", pkg)); + + let config = try!(self.config()).unwrap(); + let mut url = try!(config.dl.to_url()); + url.path_segments_mut().unwrap() + .push(pkg.name()) + .push(&pkg.version().to_string()) + .push("download"); + + let handle = match self.handle { + Some(ref mut handle) => handle, + None => { + self.handle = Some(try!(ops::http_handle(self.config))); + self.handle.as_mut().unwrap() + } + }; + // TODO: don't download into memory, but ensure that if we ctrl-c a + // download we should resume either from the start or the middle + // on the next time + try!(handle.get(true)); + try!(handle.url(&url.to_string())); + try!(handle.follow_location(true)); + let mut state = Sha256::new(); + let mut body = Vec::new(); + { + let mut handle = handle.transfer(); + try!(handle.write_function(|buf| { + state.update(buf); + body.extend_from_slice(buf); + Ok(buf.len()) + })); + try!(network::with_retry(self.config, || { + handle.perform() + })) + } + let code = try!(handle.response_code()); + if code != 200 && code != 0 { + bail!("failed to get 200 response from `{}`, got {}", url, code) + } + + // Verify what we just downloaded + if state.finish().to_hex() != checksum { + bail!("failed to verify the checksum of `{}`", pkg) + } + + try!(dst.write_all(&body)); + try!(dst.seek(SeekFrom::Start(0))); + Ok(dst) + } +} diff --git a/src/cargo/sources/replaced.rs b/src/cargo/sources/replaced.rs new file mode 100644 index 00000000000..7fb95bdf6c8 --- /dev/null +++ b/src/cargo/sources/replaced.rs @@ -0,0 +1,60 @@ +use core::{Source, Registry, PackageId, Package, Dependency, Summary, SourceId}; +use util::{CargoResult, ChainError, human}; + +pub struct ReplacedSource<'cfg> { + to_replace: SourceId, + replace_with: SourceId, + inner: Box, +} + +impl<'cfg> ReplacedSource<'cfg> { + pub fn new(to_replace: &SourceId, + replace_with: &SourceId, + src: Box) -> ReplacedSource<'cfg> { + ReplacedSource { + to_replace: to_replace.clone(), + replace_with: replace_with.clone(), + inner: src, + } + } +} + +impl<'cfg> Registry for ReplacedSource<'cfg> { + fn query(&mut self, dep: &Dependency) -> CargoResult> { + let dep = dep.clone().map_source(&self.to_replace, &self.replace_with); + let ret = try!(self.inner.query(&dep).chain_error(|| { + human(format!("failed to query replaced source `{}`", + self.to_replace)) + })); + Ok(ret.into_iter().map(|summary| { + summary.map_source(&self.replace_with, &self.to_replace) + }).collect()) + } +} + +impl<'cfg> Source for ReplacedSource<'cfg> { + fn update(&mut self) -> CargoResult<()> { + self.inner.update().chain_error(|| { + human(format!("failed to update replaced source `{}`", + self.to_replace)) + }) + } + + fn download(&mut self, id: &PackageId) -> CargoResult { + let id = id.with_source_id(&self.replace_with); + let pkg = try!(self.inner.download(&id).chain_error(|| { + human(format!("failed to download replaced source `{}`", + self.to_replace)) + })); + Ok(pkg.map_source(&self.replace_with, &self.to_replace)) + } + + fn fingerprint(&self, id: &Package) -> CargoResult { + self.inner.fingerprint(&id) + } + + fn verify(&self, id: &PackageId) -> CargoResult<()> { + let id = id.with_source_id(&self.replace_with); + self.inner.verify(&id) + } +} diff --git a/src/cargo/util/config.rs b/src/cargo/util/config.rs index 28d890d7604..43721f016db 100644 --- a/src/cargo/util/config.rs +++ b/src/cargo/util/config.rs @@ -274,7 +274,7 @@ impl Config { } pub fn expected(&self, ty: &str, key: &str, val: CV) -> CargoResult { - val.expected(ty).map_err(|e| { + val.expected(ty, key).map_err(|e| { human(format!("invalid configuration for key `{}`\n{}", key, e)) }) } @@ -512,38 +512,39 @@ impl ConfigValue { Ok(()) } - pub fn i64(&self) -> CargoResult<(i64, &Path)> { + pub fn i64(&self, key: &str) -> CargoResult<(i64, &Path)> { match *self { CV::Integer(i, ref p) => Ok((i, p)), - _ => self.expected("integer"), + _ => self.expected("integer", key), } } - pub fn string(&self) -> CargoResult<(&str, &Path)> { + pub fn string(&self, key: &str) -> CargoResult<(&str, &Path)> { match *self { CV::String(ref s, ref p) => Ok((s, p)), - _ => self.expected("string"), + _ => self.expected("string", key), } } - pub fn table(&self) -> CargoResult<(&HashMap, &Path)> { + pub fn table(&self, key: &str) + -> CargoResult<(&HashMap, &Path)> { match *self { CV::Table(ref table, ref p) => Ok((table, p)), - _ => self.expected("table"), + _ => self.expected("table", key), } } - pub fn list(&self) -> CargoResult<&[(String, PathBuf)]> { + pub fn list(&self, key: &str) -> CargoResult<&[(String, PathBuf)]> { match *self { CV::List(ref list, _) => Ok(list), - _ => self.expected("list"), + _ => self.expected("list", key), } } - pub fn boolean(&self) -> CargoResult<(bool, &Path)> { + pub fn boolean(&self, key: &str) -> CargoResult<(bool, &Path)> { match *self { CV::Boolean(b, ref p) => Ok((b, p)), - _ => self.expected("bool"), + _ => self.expected("bool", key), } } @@ -567,10 +568,10 @@ impl ConfigValue { } } - fn expected(&self, wanted: &str) -> CargoResult { - Err(internal(format!("expected a {}, but found a {} in {}", - wanted, self.desc(), - self.definition_path().display()))) + fn expected(&self, wanted: &str, key: &str) -> CargoResult { + Err(human(format!("expected a {}, but found a {} for `{}` in {}", + wanted, self.desc(), key, + self.definition_path().display()))) } fn into_toml(self) -> toml::Value { diff --git a/src/cargo/util/to_url.rs b/src/cargo/util/to_url.rs index c8685708287..c250937b3a2 100644 --- a/src/cargo/util/to_url.rs +++ b/src/cargo/util/to_url.rs @@ -1,22 +1,25 @@ -use url::Url; use std::path::Path; +use url::Url; + +use util::{human, CargoResult}; + pub trait ToUrl { - fn to_url(self) -> Result; + fn to_url(self) -> CargoResult; } impl<'a> ToUrl for &'a str { - fn to_url(self) -> Result { + fn to_url(self) -> CargoResult { Url::parse(self).map_err(|s| { - format!("invalid url `{}`: {}", self, s) + human(format!("invalid url `{}`: {}", self, s)) }) } } impl<'a> ToUrl for &'a Path { - fn to_url(self) -> Result { + fn to_url(self) -> CargoResult { Url::from_file_path(self).map_err(|()| { - format!("invalid path url `{}`", self.display()) + human(format!("invalid path url `{}`", self.display())) }) } } diff --git a/src/cargo/util/toml.rs b/src/cargo/util/toml.rs index 7de06ddc250..ffc089aa538 100644 --- a/src/cargo/util/toml.rs +++ b/src/cargo/util/toml.rs @@ -817,7 +817,7 @@ impl TomlDependency { .or_else(|| details.tag.clone().map(GitReference::Tag)) .or_else(|| details.rev.clone().map(GitReference::Rev)) .unwrap_or_else(|| GitReference::Branch("master".to_string())); - let loc = try!(git.to_url().map_err(human)); + let loc = try!(git.to_url()); SourceId::for_git(&loc, reference) }, (None, Some(path)) => { @@ -838,7 +838,7 @@ impl TomlDependency { cx.source_id.clone() } }, - (None, None) => try!(SourceId::for_central(cx.config)), + (None, None) => try!(SourceId::crates_io(cx.config)), }; let version = details.version.as_ref().map(|v| &v[..]); diff --git a/src/doc/faq.md b/src/doc/faq.md index 7782247ce79..c7ac8a9b1e7 100644 --- a/src/doc/faq.md +++ b/src/doc/faq.md @@ -187,5 +187,7 @@ that this flag *does not change the behavior of Cargo*, it simply asserts that Cargo shouldn't touch the network as a previous command has been run to ensure that network activity shouldn't be necessary. -Note that Cargo does not yet support vendoring in a first-class fashion, but -this is a hotly desired feature and coming soon! +For more information about vendoring, see documentation on [source +replacement][replace]. + +[replace]: source-replacement.html diff --git a/src/doc/header.html b/src/doc/header.html index 84544730a53..296861c434d 100644 --- a/src/doc/header.html +++ b/src/doc/header.html @@ -38,6 +38,7 @@

CARGO

  • Configuration
  • Package ID specs
  • Environment Variables
  • +
  • Source Replacement
  • diff --git a/src/doc/source-replacement.md b/src/doc/source-replacement.md new file mode 100644 index 00000000000..866324e931d --- /dev/null +++ b/src/doc/source-replacement.md @@ -0,0 +1,128 @@ +% Replacing sources + +Cargo supports the ability to **replace one source with another** to express +strategies along the lines of mirrors or vendoering dependencies. Configuration +is currently done through the [`.cargo/config` configuration][config] mechanism, +like so: + +[config]: config.html + +```toml +# The `source` table is where all keys related to source-replacement +# are store. +[source] + +# Under the `source` table are a number of other tables whose keys are a +# name for the relevant source. For example this section defines a new +# source, called `my-awesome-source`, which comes from a directory +# located at `vendor` relative to the directory containing this `.cargo/config` +# file +[source.my-awesome-source] +directory = "vendor" + +# The crates.io default source for crates is available under the name +# "crates-io", and here we use the `replace-with` key to indicate that it's +# replaced with our source above. +[source.crates-io] +replace-with = "my-awesome-source" +``` + +With this configuration Cargo attempt to look up all crates in the directory +"vendor" rather than querying the online registry at crates.io. Using source +replacement Cargo can express: + +* Vendoring - custom sources can be defined which represent crates on the local + filesystem. These sources are subsets of the source that they're replacing and + can be checked into projects if necessary. + +* Mirroring - sources can be replaced with an equivalent version which acts as a + cache for crates.io itself. + +Cargo has a core assumption about source replacement that the source code is +exactly the same from both sources. In our above example Cargo assumes that all +of the crates coming from `my-awesome-source` are the exact same as the copies +from `crates-io`. Note that this also means that `my-awesome-source` is not +allowed to have crates which are not present in the `crates-io` source. + +As a consequence, source replacement is not appropriate for situations such as +patching a dependency or a private registry. Cargo supports patching +dependencies through the usage of [the `[replace]` key][replace-section], and +private registry support is planned for a future version of Cargo. + +[replace-section]: manifest.html#the-replace-section + +## Configuration + +Configuration of replacement sources is done through [`.cargo/config`][config] +and the full set of available keys are: + +```toml +# Each source has its own table where the key is the name of the source +[source.the-source-name] + +# Indicate that `the-source-name` will be replaced with `another-source`, +# defined elsewhere +replace-with = "another-source" + +# Available kinds of sources that can be specified (described below) +registry = "https://example.com/path/to/index" +local-registry = "path/to/registry" +directory = "path/to/vendor" +``` + +The `crates-io` represents the crates.io online registry (default source of +crates) and can be replaced with: + +```toml +[source.crates-io] +replace-with = 'another-source' +``` + +## Registry Sources + +A "registry source" is one that is the same as crates.io itself. That is, it has +an index served in a git repository which matches the format of the +[crates.io index](https://github.com/rust-lang/crates.io-index). That repository +then has configuration indicating where to download crates from. + +Currently there is not an already-available project for setting up a mirror of +crates.io. Stay tuned though! + +## Local Registry Sources + +A "local registry source" is intended to be a subset of another registry +source, but available on the local filesystem (aka vendoring). Local registries +are downloaded ahead of time, typically sync'd with a `Cargo.lock`, and are +made up of a set of `*.crate` files and an index like the normal registry is. + +The primary way to manage and crate local registry sources is through the +[`cargo-local-registry`][cargo-local-registry] subcommand, available on +crates.io and can be installed with `cargo install cargo-local-registry`. + +[cargo-local-registry]: https://crates.io/crates/cargo-local-registry + +Local registries are contained within one directory and contain a number of +`*.crate` files downloaded from crates.io as well as an `index` directory with +the same format as the crates.io-index project (populated with just entries for +the crates that are present). + +## Directory Sources + +A "directory source" is similar to a local registry source where it contains a +number of crates available on the local filesystem, suitable for vendoring +dependencies. Also like local registries, directory sources can primarily be +managed by an external subcommand, [`cargo-vendor`][cargo-vendor], which can be +installed with `cargo install cargo-vendor`. + +[cargo-vendor]: https://crates.io/crates/cargo-vendor + +Directory sources are distinct from local registries though in that they contain +the unpacked version of `*.crate` files, making it more suitable in some +situations to check everything into source control. A directory source is just a +directory containing a number of other directories which contain the source code +for crates (the unpacked version of `*.crate` files). Currently no restriction +is placed on the name of each directory. + +Each crate in a directory source also has an associated metadata file indicating +the checksum of each file in the crate to protect against accidental +modifications. diff --git a/tests/bad-config.rs b/tests/bad-config.rs index 2ccece7d550..15324711fbb 100644 --- a/tests/bad-config.rs +++ b/tests/bad-config.rs @@ -74,11 +74,14 @@ fn bad3() { proxy = true "#); Package::new("foo", "1.0.0").publish(); + assert_that(foo.cargo_process("publish").arg("-v"), execs().with_status(101).with_stderr("\ -[UPDATING] registry `[..]` -[ERROR] invalid configuration for key `http.proxy` -expected a string, but found a boolean in [..]config +error: failed to update registry [..] + +Caused by: + invalid configuration for key `http.proxy` +expected a string, but found a boolean for `http.proxy` in [..]config ")); } @@ -95,7 +98,7 @@ fn bad4() { Caused by: invalid configuration for key `cargo-new.name` -expected a string, but found a boolean in [..]config +expected a string, but found a boolean for `cargo-new.name` in [..]config ")); } @@ -538,6 +541,156 @@ This will be considered an error in future versions ")); } +#[test] +fn bad_source_config1() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.foo] + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +error: no source URL specified for `source.foo`, need [..] +")); +} + +#[test] +fn bad_source_config2() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'bar' + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +error: failed to load source for a dependency on `bar` + +Caused by: + Unable to update registry https://[..] + +Caused by: + could not find a configured source with the name `bar` \ + when attempting to lookup `crates-io` (configuration in [..]) +")); +} + +#[test] +fn bad_source_config3() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'crates-io' + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +error: failed to load source for a dependency on `bar` + +Caused by: + Unable to update registry https://[..] + +Caused by: + detected a cycle of `replace-with` sources, [..] +")); +} + +#[test] +fn bad_source_config4() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'bar' + + [source.bar] + registry = 'http://example.com' + replace-with = 'crates-io' + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +error: failed to load source for a dependency on `bar` + +Caused by: + Unable to update registry https://[..] + +Caused by: + detected a cycle of `replace-with` sources, the source `crates-io` is \ + eventually replaced with itself (configuration in [..]) +")); +} + +#[test] +fn bad_source_config5() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'bar' + + [source.bar] + registry = 'not a url' + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +error: configuration key `source.bar.registry` specified an invalid URL (in [..]) + +Caused by: + invalid url `not a url`: [..] +")); +} + #[test] fn both_git_and_path_specified() { let foo = project("foo") @@ -561,6 +714,31 @@ This will be considered an error in future versions ")); } +#[test] +fn bad_source_config6() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = ['not', 'a', 'string'] + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +error: expected a string, but found a array for `source.crates-io.replace-with` in [..] +")); +} + #[test] fn ignored_git_revision() { let foo = project("foo") @@ -581,3 +759,30 @@ fn ignored_git_revision() { [WARNING] key `branch` is ignored for dependency (bar). \ This will be considered an error in future versions")); } + +#[test] +fn bad_source_config7() { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.foo] + registry = 'http://example.com' + local-registry = 'file:///another/file' + "#); + + Package::new("bar", "0.1.0").publish(); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +error: more than one source URL specified for `source.foo` +")); +} diff --git a/tests/build-script.rs b/tests/build-script.rs index 3cc250af3f1..7814a45a0dd 100644 --- a/tests/build-script.rs +++ b/tests/build-script.rs @@ -2113,7 +2113,7 @@ fn warnings_hidden_for_upstream() { .with_stderr("\ [UPDATING] registry `[..]` [DOWNLOADING] bar v0.1.0 ([..]) -[COMPILING] bar v0.1.0 ([..]) +[COMPILING] bar v0.1.0 [RUNNING] `rustc [..]` [RUNNING] `[..]` [RUNNING] `rustc [..]` @@ -2159,7 +2159,7 @@ fn warnings_printed_on_vv() { .with_stderr("\ [UPDATING] registry `[..]` [DOWNLOADING] bar v0.1.0 ([..]) -[COMPILING] bar v0.1.0 ([..]) +[COMPILING] bar v0.1.0 [RUNNING] `rustc [..]` [RUNNING] `[..]` warning: foo diff --git a/tests/cargo_alias_config.rs b/tests/cargo_alias_config.rs index c026382e4d0..b235d220f43 100644 --- a/tests/cargo_alias_config.rs +++ b/tests/cargo_alias_config.rs @@ -19,7 +19,7 @@ fn alias_incorrect_config_type() { execs().with_status(101). with_stderr_contains("[ERROR] invalid configuration \ for key `alias.b-cargo-test` -expected a list, but found a integer in [..]")); +expected a list, but found a integer for [..]")); } diff --git a/tests/cargotest/support/mod.rs b/tests/cargotest/support/mod.rs index f493faef6ad..96f8d2baf4c 100644 --- a/tests/cargotest/support/mod.rs +++ b/tests/cargotest/support/mod.rs @@ -652,7 +652,8 @@ fn substitute_macros(input: &str) -> String { ("[VERIFYING]", " Verifying"), ("[ARCHIVING]", " Archiving"), ("[INSTALLING]", " Installing"), - ("[REPLACING]", " Replacing") + ("[REPLACING]", " Replacing"), + ("[UNPACKING]", " Unpacking"), ]; let mut result = input.to_owned(); for &(pat, subst) in macros.iter() { diff --git a/tests/cargotest/support/registry.rs b/tests/cargotest/support/registry.rs index a783c6c0b05..12e857211cc 100644 --- a/tests/cargotest/support/registry.rs +++ b/tests/cargotest/support/registry.rs @@ -27,6 +27,7 @@ pub struct Package { files: Vec<(String, String)>, yanked: bool, features: HashMap>, + local: bool, } struct Dependency { @@ -37,7 +38,7 @@ struct Dependency { features: Vec, } -fn init() { +pub fn init() { let config = paths::home().join(".cargo/config"); t!(fs::create_dir_all(config.parent().unwrap())); if fs::metadata(&config).is_ok() { @@ -45,16 +46,23 @@ fn init() { } t!(t!(File::create(&config)).write_all(format!(r#" [registry] - index = "{reg}" token = "api-token" + + [source.crates-io] + registry = 'https://wut' + replace-with = 'dummy-registry' + + [source.dummy-registry] + registry = '{reg}' "#, reg = registry()).as_bytes())); // Init a new registry repo(®istry_path()) .file("config.json", &format!(r#" - {{"dl":"{}","api":""}} + {{"dl":"{0}","api":"{0}"}} "#, dl_url())) .build(); + fs::create_dir_all(dl_path().join("api/v1/crates")).unwrap(); } impl Package { @@ -67,9 +75,15 @@ impl Package { files: Vec::new(), yanked: false, features: HashMap::new(), + local: false, } } + pub fn local(&mut self, local: bool) -> &mut Package { + self.local = local; + self + } + pub fn file(&mut self, name: &str, contents: &str) -> &mut Package { self.files.push((name.to_string(), contents.to_string())); self @@ -118,7 +132,7 @@ impl Package { self } - pub fn publish(&self) { + pub fn publish(&self) -> String { self.make_archive(); // Figure out what we're going to write into the index @@ -155,7 +169,11 @@ impl Package { }; // Write file/line in the index - let dst = registry_path().join(&file); + let dst = if self.local { + registry_path().join("index").join(&file) + } else { + registry_path().join(&file) + }; let mut prev = String::new(); let _ = File::open(&dst).and_then(|mut f| f.read_to_string(&mut prev)); t!(fs::create_dir_all(dst.parent().unwrap())); @@ -163,20 +181,24 @@ impl Package { .write_all((prev + &line[..] + "\n").as_bytes())); // Add the new file to the index - let repo = t!(git2::Repository::open(®istry_path())); - let mut index = t!(repo.index()); - t!(index.add_path(Path::new(&file))); - t!(index.write()); - let id = t!(index.write_tree()); - - // Commit this change - let tree = t!(repo.find_tree(id)); - let sig = t!(repo.signature()); - let parent = t!(repo.refname_to_id("refs/heads/master")); - let parent = t!(repo.find_commit(parent)); - t!(repo.commit(Some("HEAD"), &sig, &sig, - "Another commit", &tree, - &[&parent])); + if !self.local { + let repo = t!(git2::Repository::open(®istry_path())); + let mut index = t!(repo.index()); + t!(index.add_path(Path::new(&file))); + t!(index.write()); + let id = t!(index.write_tree()); + + // Commit this change + let tree = t!(repo.find_tree(id)); + let sig = t!(repo.signature()); + let parent = t!(repo.refname_to_id("refs/heads/master")); + let parent = t!(repo.find_commit(parent)); + t!(repo.commit(Some("HEAD"), &sig, &sig, + "Another commit", &tree, + &[&parent])); + } + + return cksum } fn make_archive(&self) { @@ -226,11 +248,16 @@ impl Package { } pub fn archive_dst(&self) -> PathBuf { - dl_path().join(&self.name).join(&self.vers).join("download") + if self.local { + registry_path().join(format!("{}-{}.crate", self.name, + self.vers)) + } else { + dl_path().join(&self.name).join(&self.vers).join("download") + } } } -fn cksum(s: &[u8]) -> String { +pub fn cksum(s: &[u8]) -> String { let mut sha = Sha256::new(); sha.update(s); sha.finish().to_hex() diff --git a/tests/cfg.rs b/tests/cfg.rs index adbdfc3e67a..e8de0101beb 100644 --- a/tests/cfg.rs +++ b/tests/cfg.rs @@ -221,8 +221,8 @@ fn works_through_the_registry() { [UPDATING] registry [..] [DOWNLOADING] [..] [DOWNLOADING] [..] -[COMPILING] foo v0.1.0 ([..]) -[COMPILING] bar v0.1.0 ([..]) +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 [COMPILING] a v0.0.1 ([..]) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] ")); diff --git a/tests/directory.rs b/tests/directory.rs new file mode 100644 index 00000000000..9d6dcf1477f --- /dev/null +++ b/tests/directory.rs @@ -0,0 +1,335 @@ +#[macro_use] +extern crate cargotest; +extern crate hamcrest; +extern crate rustc_serialize; + +use std::collections::HashMap; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::str; + +use rustc_serialize::json; + +use cargotest::support::{project, execs, ProjectBuilder}; +use cargotest::support::paths; +use cargotest::support::registry::{Package, cksum}; +use hamcrest::assert_that; + +fn setup() { + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all(br#" + [source.crates-io] + registry = 'https://wut' + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + directory = 'index' + "#)); +} + +struct VendorPackage { + p: Option, + cksum: Checksum, +} + +#[derive(RustcEncodable)] +struct Checksum { + package: String, + files: HashMap, +} + +impl VendorPackage { + fn new(name: &str) -> VendorPackage { + VendorPackage { + p: Some(project(&format!("index/{}", name))), + cksum: Checksum { + package: String::new(), + files: HashMap::new(), + }, + } + } + + fn file(&mut self, name: &str, contents: &str) -> &mut VendorPackage { + self.p = Some(self.p.take().unwrap().file(name, contents)); + self.cksum.files.insert(name.to_string(), cksum(contents.as_bytes())); + self + } + + fn build(&mut self) { + let p = self.p.take().unwrap(); + let json = json::encode(&self.cksum).unwrap(); + let p = p.file(".cargo-checksum.json", &json); + p.build(); + } +} + +#[test] +fn simple() { + setup(); + + VendorPackage::new("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .build(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", r#" + extern crate foo; + + pub fn bar() { + foo::foo(); + } + "#); + p.build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 ([..]bar) +[FINISHED] [..] +")); +} + +#[test] +fn not_there() { + setup(); + + project("index").build(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", r#" + extern crate foo; + + pub fn bar() { + foo::foo(); + } + "#); + p.build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: no matching package named `foo` found (required by `bar`) +location searched: [..] +version required: ^0.1.0 +")); +} + +#[test] +fn multiple() { + setup(); + + VendorPackage::new("foo-0.1.0") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .file(".cargo-checksum", "") + .build(); + + VendorPackage::new("foo-0.2.0") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.2.0" + authors = [] + "#) + .file("src/lib.rs", "pub fn foo() {}") + .file(".cargo-checksum", "") + .build(); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", r#" + extern crate foo; + + pub fn bar() { + foo::foo(); + } + "#); + p.build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 ([..]bar) +[FINISHED] [..] +")); +} + +#[test] +fn crates_io_then_directory() { + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", r#" + extern crate foo; + + pub fn bar() { + foo::foo(); + } + "#); + p.build(); + + let cksum = Package::new("foo", "0.1.0") + .file("src/lib.rs", "pub fn foo() -> u32 { 0 }") + .publish(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[UPDATING] registry `[..]` +[DOWNLOADING] foo v0.1.0 ([..]) +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 ([..]bar) +[FINISHED] [..] +")); + + setup(); + + let mut v = VendorPackage::new("foo"); + v.file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#); + v.file("src/lib.rs", "pub fn foo() -> u32 { 1 }"); + v.cksum.package = cksum; + v.build(); + + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.1.0 ([..]bar) +[FINISHED] [..] +")); +} + +#[test] +fn crates_io_then_bad_checksum() { + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + Package::new("foo", "0.1.0").publish(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + setup(); + + VendorPackage::new("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: checksum for `foo v0.1.0` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `foo v0.1.0` is the same as when the lockfile was generated + +")); +} + +#[test] +fn bad_file_checksum() { + setup(); + + VendorPackage::new("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "") + .build(); + + let mut f = t!(File::create(paths::root().join("index/foo/src/lib.rs"))); + t!(f.write_all(b"fn foo() -> u32 { 0 }")); + + let p = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +error: the listed checksum of `[..]lib.rs` has changed: +expected: [..] +actual: [..] + +directory sources are not intended to be edited, if modifications are \ +required then it is recommended that [replace] is used with a forked copy of \ +the source +")); +} diff --git a/tests/install.rs b/tests/install.rs index 8a967d1dc76..af6efcfad7f 100644 --- a/tests/install.rs +++ b/tests/install.rs @@ -26,7 +26,7 @@ fn pkg(name: &str, vers: &str) { extern crate {}; fn main() {{}} ", name)) - .publish() + .publish(); } #[test] @@ -36,8 +36,8 @@ fn simple() { assert_that(cargo_process("install").arg("foo"), execs().with_status(0).with_stderr(&format!("\ [UPDATING] registry `[..]` -[DOWNLOADING] foo v0.0.1 (registry file://[..]) -[COMPILING] foo v0.0.1 (registry file://[..]) +[DOWNLOADING] foo v0.0.1 (registry [..]) +[COMPILING] foo v0.0.1 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] {home}[..]bin[..]foo[..] warning: be sure to add `[..]` to your PATH to be able to run the installed binaries @@ -61,8 +61,8 @@ fn pick_max_version() { assert_that(cargo_process("install").arg("foo"), execs().with_status(0).with_stderr(&format!("\ [UPDATING] registry `[..]` -[DOWNLOADING] foo v0.0.2 (registry file://[..]) -[COMPILING] foo v0.0.2 (registry file://[..]) +[DOWNLOADING] foo v0.0.2 (registry [..]) +[COMPILING] foo v0.0.2 [FINISHED] release [optimized] target(s) in [..] [INSTALLING] {home}[..]bin[..]foo[..] warning: be sure to add `[..]` to your PATH to be able to run the installed binaries @@ -77,7 +77,7 @@ fn missing() { assert_that(cargo_process("install").arg("bar"), execs().with_status(101).with_stderr("\ [UPDATING] registry [..] -[ERROR] could not find `bar` in `registry file://[..]` +[ERROR] could not find `bar` in `registry [..]` ")); } @@ -87,7 +87,7 @@ fn bad_version() { assert_that(cargo_process("install").arg("foo").arg("--vers=0.2.0"), execs().with_status(101).with_stderr("\ [UPDATING] registry [..] -[ERROR] could not find `foo` in `registry file://[..]` with version `0.2.0` +[ERROR] could not find `foo` in `registry [..]` with version `0.2.0` ")); } @@ -533,7 +533,7 @@ fn compile_failure() { assert_that(cargo_process("install").arg("--path").arg(p.root()), execs().with_status(101).with_stderr_contains("\ -[ERROR] failed to compile `foo v0.1.0 (file://[..])`, intermediate artifacts can be \ +[ERROR] failed to compile `foo v0.1.0 ([..])`, intermediate artifacts can be \ found at `[..]target` Caused by: @@ -583,9 +583,9 @@ fn list() { execs().with_status(0)); assert_that(cargo_process("install").arg("--list"), execs().with_status(0).with_stdout("\ -bar v0.2.1 (registry [..]): +bar v0.2.1: bar[..] -foo v0.0.1 (registry [..]): +foo v0.0.1: foo[..] ")); } @@ -606,7 +606,7 @@ fn uninstall_bin_does_not_exist() { execs().with_status(0)); assert_that(cargo_process("uninstall").arg("foo").arg("--bin=bar"), execs().with_status(101).with_stderr("\ -[ERROR] binary `bar[..]` not installed as part of `foo v0.0.1 ([..])` +[ERROR] binary `bar[..]` not installed as part of `foo v0.0.1` ")); } diff --git a/tests/local-registry.rs b/tests/local-registry.rs new file mode 100644 index 00000000000..b974f01c4c8 --- /dev/null +++ b/tests/local-registry.rs @@ -0,0 +1,350 @@ +#[macro_use] +extern crate cargotest; +extern crate hamcrest; + +use std::fs::{self, File}; +use std::io::prelude::*; + +use cargotest::support::paths::{self, CargoPathExt}; +use cargotest::support::registry::Package; +use cargotest::support::{project, execs}; +use hamcrest::assert_that; + +fn setup() { + let root = paths::root(); + t!(fs::create_dir(&root.join(".cargo"))); + t!(t!(File::create(root.join(".cargo/config"))).write_all(br#" + [source.crates-io] + registry = 'https://wut' + replace-with = 'my-awesome-local-registry' + + [source.my-awesome-local-registry] + local-registry = 'registry' + "#)); +} + +#[test] +fn simple() { + setup(); + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.0.1" + "#) + .file("src/lib.rs", r#" + extern crate foo; + pub fn bar() { + foo::foo(); + } + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stderr(&format!("\ +[UNPACKING] foo v0.0.1 ([..]) +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url()))); + assert_that(p.cargo("build"), execs().with_status(0).with_stderr("\ +[FINISHED] [..] +")); + assert_that(p.cargo("test"), execs().with_status(0)); +} + +#[test] +fn multiple_versions() { + setup(); + Package::new("foo", "0.0.1").local(true).publish(); + Package::new("foo", "0.1.0") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/lib.rs", r#" + extern crate foo; + pub fn bar() { + foo::foo(); + } + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stderr(&format!("\ +[UNPACKING] foo v0.1.0 ([..]) +[COMPILING] foo v0.1.0 +[COMPILING] bar v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url()))); + + Package::new("foo", "0.2.0") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + assert_that(p.cargo("update").arg("-v"), + execs().with_status(0).with_stderr("\ +[UPDATING] foo v0.1.0 -> v0.2.0 +")); +} + +#[test] +fn multiple_names() { + setup(); + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("bar", "0.1.0") + .local(true) + .file("src/lib.rs", "pub fn bar() {}") + .publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + bar = "*" + "#) + .file("src/lib.rs", r#" + extern crate foo; + extern crate bar; + pub fn local() { + foo::foo(); + bar::bar(); + } + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stderr(&format!("\ +[UNPACKING] [..] +[UNPACKING] [..] +[COMPILING] [..] +[COMPILING] [..] +[COMPILING] local v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url()))); +} + +#[test] +fn interdependent() { + setup(); + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("bar", "0.1.0") + .local(true) + .dep("foo", "*") + .file("src/lib.rs", "extern crate foo; pub fn bar() {}") + .publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + bar = "*" + "#) + .file("src/lib.rs", r#" + extern crate foo; + extern crate bar; + pub fn local() { + foo::foo(); + bar::bar(); + } + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stderr(&format!("\ +[UNPACKING] [..] +[UNPACKING] [..] +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.1.0 +[COMPILING] local v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url()))); +} + +#[test] +fn path_dep_rewritten() { + setup(); + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("bar", "0.1.0") + .local(true) + .dep("foo", "*") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "foo", version = "*" } + "#) + .file("src/lib.rs", "extern crate foo; pub fn bar() {}") + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("foo/src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + bar = "*" + "#) + .file("src/lib.rs", r#" + extern crate foo; + extern crate bar; + pub fn local() { + foo::foo(); + bar::bar(); + } + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stderr(&format!("\ +[UNPACKING] [..] +[UNPACKING] [..] +[COMPILING] foo v0.0.1 +[COMPILING] bar v0.1.0 +[COMPILING] local v0.0.1 ({dir}) +[FINISHED] [..] +", + dir = p.url()))); +} + +#[test] +fn invalid_dir_bad() { + setup(); + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'https://wut' + replace-with = 'my-awesome-local-directory' + + [source.my-awesome-local-directory] + local-registry = '/path/to/nowhere' + "#); + + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr("\ +[ERROR] failed to load source for a dependency on `foo` + +Caused by: + Unable to update registry https://[..] + +Caused by: + failed to update replaced source `registry https://[..]` + +Caused by: + local registry path is not a directory: [..]path[..]to[..]nowhere +")); +} + +#[test] +fn different_directory_replacing_the_registry_is_bad() { + setup(); + + // Move our test's .cargo/config to a temporary location and publish a + // registry package we're going to use first. + let config = paths::root().join(".cargo"); + let config_tmp = paths::root().join(".cargo-old"); + t!(fs::rename(&config, &config_tmp)); + + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/lib.rs", ""); + p.build(); + + // Generate a lock file against the crates.io registry + Package::new("foo", "0.0.1").publish(); + assert_that(p.cargo("build"), execs().with_status(0)); + + // Switch back to our directory source, and now that we're replacing + // crates.io make sure that this fails because we're replacing with a + // different checksum + config.rm_rf(); + t!(fs::rename(&config_tmp, &config)); + Package::new("foo", "0.0.1") + .file("src/lib.rs", "invalid") + .local(true) + .publish(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[ERROR] checksum for `foo v0.0.1` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `foo v0.0.1` is the same as when the lockfile was generated + +")); +} diff --git a/tests/lockfile-compat.rs b/tests/lockfile-compat.rs new file mode 100644 index 00000000000..5462339fd91 --- /dev/null +++ b/tests/lockfile-compat.rs @@ -0,0 +1,278 @@ +#[macro_use] +extern crate cargotest; +extern crate hamcrest; + +use std::fs::File; +use std::io::prelude::*; + +use cargotest::support::git; +use cargotest::support::registry::Package; +use cargotest::support::{execs, project}; +use hamcrest::assert_that; + +#[test] +fn oldest_lockfile_still_works() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + let lockfile = r#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +"#; + File::create(p.root().join("Cargo.lock")).unwrap() + .write_all(lockfile.as_bytes()).unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + let mut lock = String::new(); + File::open(p.root().join("Cargo.lock")).unwrap() + .read_to_string(&mut lock).unwrap(); + assert!(lock.starts_with(lockfile.trim())); +} + +#[test] +fn totally_wild_checksums_works() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + File::create(p.root().join("Cargo.lock")).unwrap().write_all(br#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum baz 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"checksum foo 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"#).unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + let mut lock = String::new(); + File::open(p.root().join("Cargo.lock")).unwrap() + .read_to_string(&mut lock).unwrap(); + assert!(lock.starts_with(r#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"#.trim())); +} + +#[test] +fn wrong_checksum_is_an_error() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + t!(t!(File::create(p.root().join("Cargo.lock"))).write_all(br#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"#)); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry `[..]` +error: checksum for `foo v0.1.0` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `foo v0.1.0` is the same as when the lockfile was generated + +")); +} + +// If the checksum is unlisted in the lockfile (e.g. ) yet we can +// calculate it (e.g. it's a registry dep), then we should in theory just fill +// it in. +#[test] +fn unlisted_checksum_is_bad_if_we_calculate() { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + t!(t!(File::create(p.root().join("Cargo.lock"))).write_all(br#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "" +"#)); + + assert_that(p.cargo("fetch"), + execs().with_status(101).with_stderr("\ +[UPDATING] registry `[..]` +error: checksum for `foo v0.1.0` was not previously calculated, but a checksum \ +could now be calculated + +this could be indicative of a few possible situations: + + * the source `[..]` did not previously support checksums, + but was replaced with one that does + * newer Cargo implementations know how to checksum this source, but this + older implementation does not + * the lock file is corrupt + +")); +} + +// If the checksum is listed in the lockfile yet we cannot calculate it (e.g. +// git dependencies as of today), then make sure we choke. +#[test] +fn listed_checksum_bad_if_we_cannot_compute() { + let git = git::new("foo", |p| { + p.file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("bar") + .file("Cargo.toml", &format!(r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = {{ git = '{}' }} + "#, git.url())) + .file("src/lib.rs", ""); + p.build(); + + let lockfile = format!(r#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (git+{0})" +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "git+{0}" + +[metadata] +"checksum foo 0.1.0 (git+{0})" = "checksum" +"#, git.url()); + File::create(p.root().join("Cargo.lock")).unwrap() + .write_all(lockfile.as_bytes()).unwrap(); + + assert_that(p.cargo("fetch"), + execs().with_status(101).with_stderr("\ +[UPDATING] git repository `[..]` +error: checksum for `foo v0.1.0 ([..])` could not be calculated, but a \ +checksum is listed in the existing lock file[..] + +this could be indicative of a few possible situations: + + * the source `[..]` supports checksums, + but was replaced with one that doesn't + * the lock file is corrupt + +unable to verify that `foo v0.1.0 ([..])` is the same as when the lockfile was generated + +")); +} diff --git a/tests/metadata.rs b/tests/metadata.rs index 478acf883ab..4d5df0f414f 100644 --- a/tests/metadata.rs +++ b/tests/metadata.rs @@ -75,10 +75,10 @@ fn cargo_metadata_with_deps_and_version() { { "dependencies": [], "features": {}, - "id": "baz 0.0.1 (registry+file:[..])", + "id": "baz 0.0.1 (registry+[..])", "manifest_path": "[..]Cargo.toml", "name": "baz", - "source": "registry+file:[..]", + "source": "registry+[..]", "targets": [ { "kind": [ @@ -98,16 +98,16 @@ fn cargo_metadata_with_deps_and_version() { "name": "baz", "optional": false, "req": "^0.0.1", - "source": "registry+file:[..]", + "source": "registry+[..]", "target": null, "uses_default_features": true } ], "features": {}, - "id": "bar 0.0.1 (registry+file:[..])", + "id": "bar 0.0.1 (registry+[..])", "manifest_path": "[..]Cargo.toml", "name": "bar", - "source": "registry+file:[..]", + "source": "registry+[..]", "targets": [ { "kind": [ @@ -127,7 +127,7 @@ fn cargo_metadata_with_deps_and_version() { "name": "bar", "optional": false, "req": "*", - "source": "registry+file:[..]", + "source": "registry+[..]", "target": null, "uses_default_features": true } @@ -153,19 +153,19 @@ fn cargo_metadata_with_deps_and_version() { "nodes": [ { "dependencies": [ - "bar 0.0.1 (registry+file:[..])" + "bar 0.0.1 (registry+[..])" ], "id": "foo 0.5.0 (path+file:[..]foo)" }, { "dependencies": [ - "baz 0.0.1 (registry+file:[..])" + "baz 0.0.1 (registry+[..])" ], - "id": "bar 0.0.1 (registry+file:[..])" + "id": "bar 0.0.1 (registry+[..])" }, { "dependencies": [], - "id": "baz 0.0.1 (registry+file:[..])" + "id": "baz 0.0.1 (registry+[..])" } ], "root": "foo 0.5.0 (path+file:[..]foo)" diff --git a/tests/overrides.rs b/tests/overrides.rs index a0341ca7d02..5eebed0bfa1 100644 --- a/tests/overrides.rs +++ b/tests/overrides.rs @@ -3,7 +3,7 @@ extern crate hamcrest; use cargotest::support::git; use cargotest::support::paths; -use cargotest::support::registry::{registry, Package}; +use cargotest::support::registry::Package; use cargotest::support::{execs, project}; use hamcrest::assert_that; @@ -145,7 +145,7 @@ fn transitive() { [UPDATING] git repository `[..]` [DOWNLOADING] bar v0.2.0 (registry [..]) [COMPILING] foo v0.1.0 (file://[..]) -[COMPILING] bar v0.2.0 (registry [..]) +[COMPILING] bar v0.2.0 [COMPILING] local v0.0.1 (file://[..]) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] ")); @@ -343,7 +343,7 @@ fn override_adds_some_deps() { [UPDATING] registry `file://[..]` [UPDATING] git repository `[..]` [DOWNLOADING] foo v0.1.1 (registry [..]) -[COMPILING] foo v0.1.1 (registry [..]) +[COMPILING] foo v0.1.1 [COMPILING] bar v0.1.0 ([..]) [COMPILING] local v0.0.1 (file://[..]) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] @@ -356,7 +356,9 @@ fn override_adds_some_deps() { execs().with_status(0).with_stderr("\ [UPDATING] git repository `file://[..]` ")); - assert_that(p.cargo("update").arg("-p").arg(&format!("{}#bar", registry())), + assert_that(p.cargo("update") + .arg("-p") + .arg("https://github.com/rust-lang/crates.io-index#bar"), execs().with_status(0).with_stderr("\ [UPDATING] registry `file://[..]` ")); @@ -532,8 +534,10 @@ fn multiple_specs() { [replace] "foo:0.1.0" = {{ git = '{0}' }} - "{1}#foo:0.1.0" = {{ git = '{0}' }} - "#, foo.url(), registry())) + + [replace."https://github.com/rust-lang/crates.io-index#foo:0.1.0"] + git = '{0}' + "#, foo.url())) .file("src/lib.rs", ""); assert_that(p.cargo_process("build"), @@ -545,7 +549,7 @@ error: overlapping replacement specifications found: * [..] * [..] -both specifications match: foo v0.1.0 ([..]) +both specifications match: foo v0.1.0 ")); } diff --git a/tests/publish.rs b/tests/publish.rs index ad02f97f181..38ccad219ff 100644 --- a/tests/publish.rs +++ b/tests/publish.rs @@ -25,13 +25,12 @@ fn upload() -> Url { Url::from_file_path(&*upload_path()).ok().unwrap() } fn setup() { let config = paths::root().join(".cargo/config"); - fs::create_dir_all(config.parent().unwrap()).unwrap(); - File::create(&config).unwrap().write_all(&format!(r#" + t!(fs::create_dir_all(config.parent().unwrap())); + t!(t!(File::create(&config)).write_all(&format!(r#" [registry] - index = "{reg}" token = "api-token" - "#, reg = registry()).as_bytes()).unwrap(); - fs::create_dir_all(&upload_path().join("api/v1/crates")).unwrap(); + "#).as_bytes())); + t!(fs::create_dir_all(&upload_path().join("api/v1/crates"))); repo(®istry_path()) .file("config.json", &format!(r#"{{ @@ -56,7 +55,8 @@ fn simple() { "#) .file("src/main.rs", "fn main() {}"); - assert_that(p.cargo_process("publish").arg("--no-verify"), + assert_that(p.cargo_process("publish").arg("--no-verify") + .arg("--host").arg(registry().to_string()), execs().with_status(0).with_stderr(&format!("\ [UPDATING] registry `{reg}` [WARNING] manifest has no documentation, [..] @@ -110,7 +110,8 @@ fn git_deps() { "#) .file("src/main.rs", "fn main() {}"); - assert_that(p.cargo_process("publish").arg("-v").arg("--no-verify"), + assert_that(p.cargo_process("publish").arg("-v").arg("--no-verify") + .arg("--host").arg(registry().to_string()), execs().with_status(101).with_stderr("\ [UPDATING] registry [..] [ERROR] all dependencies must come from the same source. @@ -143,7 +144,8 @@ fn path_dependency_no_version() { "#) .file("bar/src/lib.rs", ""); - assert_that(p.cargo_process("publish"), + assert_that(p.cargo_process("publish") + .arg("--host").arg(registry().to_string()), execs().with_status(101).with_stderr("\ [UPDATING] registry [..] [ERROR] all path dependencies must have a version specified when publishing. @@ -167,7 +169,8 @@ fn unpublishable_crate() { "#) .file("src/main.rs", "fn main() {}"); - assert_that(p.cargo_process("publish"), + assert_that(p.cargo_process("publish") + .arg("--host").arg(registry().to_string()), execs().with_status(101).with_stderr("\ [ERROR] some crates cannot be published. `foo` is marked as unpublishable @@ -195,7 +198,8 @@ fn dont_publish_dirty() { let p = project("foo"); t!(File::create(p.root().join("bar"))); - assert_that(p.cargo("publish"), + assert_that(p.cargo("publish") + .arg("--host").arg(registry().to_string()), execs().with_status(101).with_stderr("\ [UPDATING] registry `[..]` error: 1 dirty files found in the working directory: @@ -226,7 +230,8 @@ fn publish_clean() { .build(); let p = project("foo"); - assert_that(p.cargo("publish"), + assert_that(p.cargo("publish") + .arg("--host").arg(registry().to_string()), execs().with_status(0)); } @@ -251,7 +256,8 @@ fn publish_in_sub_repo() { let p = project("foo"); t!(File::create(p.root().join("baz"))); - assert_that(p.cargo("publish").cwd(p.root().join("bar")), + assert_that(p.cargo("publish").cwd(p.root().join("bar")) + .arg("--host").arg(registry().to_string()), execs().with_status(0)); } @@ -277,7 +283,8 @@ fn publish_when_ignored() { let p = project("foo"); t!(File::create(p.root().join("baz"))); - assert_that(p.cargo("publish"), + assert_that(p.cargo("publish") + .arg("--host").arg(registry().to_string()), execs().with_status(0)); } @@ -301,7 +308,8 @@ fn ignore_when_crate_ignored() { .nocommit_file("bar/src/main.rs", "fn main() {}"); let p = project("foo"); t!(File::create(p.root().join("bar/baz"))); - assert_that(p.cargo("publish").cwd(p.root().join("bar")), + assert_that(p.cargo("publish").cwd(p.root().join("bar")) + .arg("--host").arg(registry().to_string()), execs().with_status(0)); } @@ -324,7 +332,8 @@ fn new_crate_rejected() { .nocommit_file("src/main.rs", "fn main() {}"); let p = project("foo"); t!(File::create(p.root().join("baz"))); - assert_that(p.cargo("publish"), + assert_that(p.cargo("publish") + .arg("--host").arg(registry().to_string()), execs().with_status(101)); } @@ -343,9 +352,10 @@ fn dry_run() { "#) .file("src/main.rs", "fn main() {}"); - assert_that(p.cargo_process("publish").arg("--dry-run"), + assert_that(p.cargo_process("publish").arg("--dry-run") + .arg("--host").arg(registry().to_string()), execs().with_status(0).with_stderr(&format!("\ -[UPDATING] registry `{reg}` +[UPDATING] registry `[..]` [WARNING] manifest has no documentation, [..] [PACKAGING] foo v0.0.1 ({dir}) [VERIFYING] foo v0.0.1 ({dir}) @@ -354,8 +364,7 @@ fn dry_run() { [UPLOADING] foo v0.0.1 ({dir}) [WARNING] aborting upload due to dry run ", - dir = p.url(), - reg = registry()))); + dir = p.url()))); // Ensure the API request wasn't actually made assert!(!upload_path().join("api/v1/crates/new").exists()); diff --git a/tests/registry.rs b/tests/registry.rs index 93d61ea6db3..0ddb55999d5 100644 --- a/tests/registry.rs +++ b/tests/registry.rs @@ -32,7 +32,7 @@ fn simple() { execs().with_status(0).with_stderr(&format!("\ [UPDATING] registry `{reg}` [DOWNLOADING] bar v0.0.1 (registry file://[..]) -[COMPILING] bar v0.0.1 (registry file://[..]) +[COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ", @@ -43,8 +43,8 @@ fn simple() { assert_that(p.cargo_process("build"), execs().with_status(0).with_stderr(&format!("\ [UPDATING] registry `{reg}` -[..] bar v0.0.1 (registry file://[..]) -[..] foo v0.0.1 ({dir}) +[COMPILING] bar v0.0.1 +[COMPILING] foo v0.0.1 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ", dir = p.url(), @@ -73,8 +73,8 @@ fn deps() { [UPDATING] registry `{reg}` [DOWNLOADING] [..] v0.0.1 (registry file://[..]) [DOWNLOADING] [..] v0.0.1 (registry file://[..]) -[COMPILING] baz v0.0.1 (registry file://[..]) -[COMPILING] bar v0.0.1 (registry file://[..]) +[COMPILING] baz v0.0.1 +[COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ", @@ -102,7 +102,7 @@ fn nonexistent() { execs().with_status(101).with_stderr("\ [UPDATING] registry [..] [ERROR] no matching package named `nonexistent` found (required by `foo`) -location searched: registry file://[..] +location searched: registry [..] version required: >= 0.0.0 ")); } @@ -127,7 +127,7 @@ fn wrong_version() { assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr_contains("\ [ERROR] no matching package named `foo` found (required by `foo`) -location searched: registry file://[..] +location searched: registry [..] version required: >= 1.0.0 versions found: 0.0.2, 0.0.1 ")); @@ -138,7 +138,7 @@ versions found: 0.0.2, 0.0.1 assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr_contains("\ [ERROR] no matching package named `foo` found (required by `foo`) -location searched: registry file://[..] +location searched: registry [..] version required: >= 1.0.0 versions found: 0.0.4, 0.0.3, 0.0.2, ... ")); @@ -160,7 +160,7 @@ fn bad_cksum() { let pkg = Package::new("bad-cksum", "0.0.1"); pkg.publish(); - File::create(&pkg.archive_dst()).unwrap(); + t!(File::create(&pkg.archive_dst())); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(101).with_stderr("\ @@ -169,7 +169,7 @@ fn bad_cksum() { [ERROR] unable to get packages from source Caused by: - failed to download package `bad-cksum v0.0.1 (registry file://[..])` from [..] + failed to download replaced source `registry https://[..]` Caused by: failed to verify the checksum of `bad-cksum v0.0.1 (registry file://[..])` @@ -195,7 +195,7 @@ fn update_registry() { assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr_contains("\ [ERROR] no matching package named `notyet` found (required by `foo`) -location searched: registry file://[..] +location searched: registry [..] version required: >= 0.0.0 ")); @@ -205,7 +205,7 @@ version required: >= 0.0.0 execs().with_status(0).with_stderr(&format!("\ [UPDATING] registry `{reg}` [DOWNLOADING] notyet v0.0.1 (registry file://[..]) -[COMPILING] notyet v0.0.1 (registry file://[..]) +[COMPILING] notyet v0.0.1 [COMPILING] foo v0.0.1 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ", @@ -247,7 +247,7 @@ fn package_with_path_deps() { Caused by: no matching package named `notyet` found (required by `foo`) -location searched: registry file://[..] +location searched: registry [..] version required: ^0.0.1 ")); @@ -259,7 +259,7 @@ version required: ^0.0.1 [VERIFYING] foo v0.0.1 ({dir}) [UPDATING] registry `[..]` [DOWNLOADING] notyet v0.0.1 (registry file://[..]) -[COMPILING] notyet v0.0.1 (registry file://[..]) +[COMPILING] notyet v0.0.1 [COMPILING] foo v0.0.1 ({dir}[..]) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ", dir = p.url()))); @@ -286,7 +286,7 @@ fn lockfile_locks() { execs().with_status(0).with_stderr(&format!("\ [UPDATING] registry `[..]` [DOWNLOADING] bar v0.0.1 (registry file://[..]) -[COMPILING] bar v0.0.1 (registry file://[..]) +[COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ", @@ -322,8 +322,8 @@ fn lockfile_locks_transitively() { [UPDATING] registry `[..]` [DOWNLOADING] [..] v0.0.1 (registry file://[..]) [DOWNLOADING] [..] v0.0.1 (registry file://[..]) -[COMPILING] baz v0.0.1 (registry file://[..]) -[COMPILING] bar v0.0.1 (registry file://[..]) +[COMPILING] baz v0.0.1 +[COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ", @@ -362,8 +362,8 @@ fn yanks_are_not_used() { [UPDATING] registry `[..]` [DOWNLOADING] [..] v0.0.1 (registry file://[..]) [DOWNLOADING] [..] v0.0.1 (registry file://[..]) -[COMPILING] baz v0.0.1 (registry file://[..]) -[COMPILING] bar v0.0.1 (registry file://[..]) +[COMPILING] baz v0.0.1 +[COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ", @@ -392,7 +392,7 @@ fn relying_on_a_yank_is_bad() { assert_that(p.cargo("build"), execs().with_status(101).with_stderr_contains("\ [ERROR] no matching package named `baz` found (required by `bar`) -location searched: registry file://[..] +location searched: registry [..] version required: = 0.0.2 versions found: 0.0.1 ")); @@ -418,7 +418,7 @@ fn yanks_in_lockfiles_are_ok() { assert_that(p.cargo("build"), execs().with_status(0)); - fs::remove_dir_all(®istry::registry_path().join("3")).unwrap(); + registry::registry_path().join("3").rm_rf(); Package::new("bar", "0.0.1").yanked(true).publish(); @@ -428,7 +428,7 @@ fn yanks_in_lockfiles_are_ok() { assert_that(p.cargo("update"), execs().with_status(101).with_stderr_contains("\ [ERROR] no matching package named `bar` found (required by `foo`) -location searched: registry file://[..] +location searched: registry [..] version required: * ")); } @@ -490,14 +490,14 @@ fn update_lockfile() { .arg("-p").arg("bar").arg("--precise").arg("0.0.2"), execs().with_status(0).with_stderr("\ [UPDATING] registry `[..]` -[UPDATING] bar v0.0.1 (registry file://[..]) -> v0.0.2 +[UPDATING] bar v0.0.1 -> v0.0.2 ")); println!("0.0.2 build"); assert_that(p.cargo("build"), execs().with_status(0).with_stderr(&format!("\ [DOWNLOADING] [..] v0.0.2 (registry file://[..]) -[COMPILING] bar v0.0.2 (registry file://[..]) +[COMPILING] bar v0.0.2 [COMPILING] foo v0.0.1 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ", @@ -508,14 +508,14 @@ fn update_lockfile() { .arg("-p").arg("bar"), execs().with_status(0).with_stderr("\ [UPDATING] registry `[..]` -[UPDATING] bar v0.0.2 (registry file://[..]) -> v0.0.3 +[UPDATING] bar v0.0.2 -> v0.0.3 ")); println!("0.0.3 build"); assert_that(p.cargo("build"), execs().with_status(0).with_stderr(&format!("\ [DOWNLOADING] [..] v0.0.3 (registry file://[..]) -[COMPILING] bar v0.0.3 (registry file://[..]) +[COMPILING] bar v0.0.3 [COMPILING] foo v0.0.1 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ", @@ -528,8 +528,8 @@ fn update_lockfile() { .arg("-p").arg("bar"), execs().with_status(0).with_stderr("\ [UPDATING] registry `[..]` -[UPDATING] bar v0.0.3 (registry file://[..]) -> v0.0.4 -[ADDING] spam v0.2.5 (registry file://[..]) +[UPDATING] bar v0.0.3 -> v0.0.4 +[ADDING] spam v0.2.5 ")); println!("new dependencies update"); @@ -538,8 +538,8 @@ fn update_lockfile() { .arg("-p").arg("bar"), execs().with_status(0).with_stderr("\ [UPDATING] registry `[..]` -[UPDATING] bar v0.0.4 (registry file://[..]) -> v0.0.5 -[REMOVING] spam v0.2.5 (registry file://[..]) +[UPDATING] bar v0.0.4 -> v0.0.5 +[REMOVING] spam v0.2.5 ")); } @@ -565,7 +565,7 @@ fn dev_dependency_not_used() { execs().with_status(0).with_stderr(&format!("\ [UPDATING] registry `[..]` [DOWNLOADING] [..] v0.0.1 (registry file://[..]) -[COMPILING] bar v0.0.1 (registry file://[..]) +[COMPILING] bar v0.0.1 [COMPILING] foo v0.0.1 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ", @@ -575,7 +575,7 @@ fn dev_dependency_not_used() { #[test] fn login_with_no_cargo_dir() { let home = paths::home().join("new-home"); - fs::create_dir(&home).unwrap(); + t!(fs::create_dir(&home)); assert_that(cargo_process().arg("login").arg("foo").arg("-v"), execs().with_status(0)); } @@ -633,14 +633,14 @@ fn updating_a_dep() { execs().with_status(0).with_stderr(&format!("\ [UPDATING] registry `[..]` [DOWNLOADING] bar v0.0.1 (registry file://[..]) -[COMPILING] bar v0.0.1 (registry file://[..]) +[COMPILING] bar v0.0.1 [COMPILING] a v0.0.1 ({dir}/a) [COMPILING] foo v0.0.1 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ", dir = p.url()))); - t!(File::create(&p.root().join("a/Cargo.toml"))).write_all(br#" + t!(t!(File::create(&p.root().join("a/Cargo.toml"))).write_all(br#" [project] name = "a" version = "0.0.1" @@ -648,7 +648,7 @@ fn updating_a_dep() { [dependencies] bar = "0.1.0" - "#).unwrap(); + "#)); Package::new("bar", "0.1.0").publish(); println!("second"); @@ -656,7 +656,7 @@ fn updating_a_dep() { execs().with_status(0).with_stderr(&format!("\ [UPDATING] registry `[..]` [DOWNLOADING] bar v0.1.0 (registry file://[..]) -[COMPILING] bar v0.1.0 (registry file://[..]) +[COMPILING] bar v0.1.0 [COMPILING] a v0.0.1 ({dir}/a) [COMPILING] foo v0.0.1 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs @@ -702,7 +702,7 @@ fn git_and_registry_dep() { [UPDATING] [..] [UPDATING] [..] [DOWNLOADING] a v0.0.1 (registry file://[..]) -[COMPILING] a v0.0.1 (registry [..]) +[COMPILING] a v0.0.1 [COMPILING] b v0.0.1 ([..]) [COMPILING] foo v0.0.1 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs @@ -717,6 +717,8 @@ fn git_and_registry_dep() { #[test] fn update_publish_then_update() { + // First generate a Cargo.lock and a clone of the registry index at the + // "head" of the current registry. let p = project("foo") .file("Cargo.toml", r#" [project] @@ -729,27 +731,44 @@ fn update_publish_then_update() { "#) .file("src/main.rs", "fn main() {}"); p.build(); - Package::new("a", "0.1.0").publish(); - assert_that(p.cargo("build"), execs().with_status(0)); + // Next, publish a new package and back up the copy of the registry we just + // created. Package::new("a", "0.1.1").publish(); + let registry = paths::home().join(".cargo/registry"); + let backup = paths::root().join("registry-backup"); + t!(fs::rename(®istry, &backup)); - let lock = p.root().join("Cargo.lock"); - let mut s = String::new(); - File::open(&lock).unwrap().read_to_string(&mut s).unwrap(); - File::create(&lock).unwrap() - .write_all(s.replace("0.1.0", "0.1.1").as_bytes()).unwrap(); - println!("second"); + // Generate a Cargo.lock with the newer version, and then move the old copy + // of the registry back into place. + let p2 = project("foo2") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] - fs::remove_dir_all(&p.root().join("target")).unwrap(); + [dependencies] + a = "0.1.1" + "#) + .file("src/main.rs", "fn main() {}"); + assert_that(p2.cargo_process("build"), + execs().with_status(0)); + registry.rm_rf(); + t!(fs::rename(&backup, ®istry)); + t!(fs::rename(p2.root().join("Cargo.lock"), p.root().join("Cargo.lock"))); + + // Finally, build the first project again (with our newer Cargo.lock) which + // should force an update of the old registry, download the new crate, and + // then build everything again. assert_that(p.cargo("build"), execs().with_status(0).with_stderr(&format!("\ [UPDATING] [..] [DOWNLOADING] a v0.1.1 (registry file://[..]) -[COMPILING] a v0.1.1 (registry [..]) +[COMPILING] a v0.1.1 [COMPILING] foo v0.5.0 ({dir}) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ", @@ -809,15 +828,15 @@ fn update_transitive_dependency() { execs().with_status(0) .with_stderr("\ [UPDATING] registry `[..]` -[UPDATING] b v0.1.0 (registry [..]) -> v0.1.1 +[UPDATING] b v0.1.0 -> v0.1.1 ")); assert_that(p.cargo("build"), execs().with_status(0) .with_stderr("\ [DOWNLOADING] b v0.1.1 (registry file://[..]) -[COMPILING] b v0.1.1 (registry [..]) -[COMPILING] a v0.1.0 (registry [..]) +[COMPILING] b v0.1.1 +[COMPILING] a v0.1.0 [COMPILING] foo v0.5.0 ([..]) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ")); @@ -892,15 +911,15 @@ fn update_multiple_packages() { execs().with_status(0) .with_stderr("\ [UPDATING] registry `[..]` -[UPDATING] a v0.1.0 (registry [..]) -> v0.1.1 -[UPDATING] b v0.1.0 (registry [..]) -> v0.1.1 +[UPDATING] a v0.1.0 -> v0.1.1 +[UPDATING] b v0.1.0 -> v0.1.1 ")); assert_that(p.cargo("update").arg("-pb").arg("-pc"), execs().with_status(0) .with_stderr("\ [UPDATING] registry `[..]` -[UPDATING] c v0.1.0 (registry [..]) -> v0.1.1 +[UPDATING] c v0.1.0 -> v0.1.1 ")); assert_that(p.cargo("build"), @@ -912,11 +931,11 @@ fn update_multiple_packages() { .with_stderr_contains("\ [DOWNLOADING] c v0.1.1 (registry file://[..])") .with_stderr_contains("\ -[COMPILING] a v0.1.1 (registry [..])") +[COMPILING] a v0.1.1") .with_stderr_contains("\ -[COMPILING] b v0.1.1 (registry [..])") +[COMPILING] b v0.1.1") .with_stderr_contains("\ -[COMPILING] c v0.1.1 (registry [..])") +[COMPILING] c v0.1.1") .with_stderr_contains("\ [COMPILING] foo v0.5.0 ([..])")); } @@ -1034,7 +1053,7 @@ fn only_download_relevant() { execs().with_status(0).with_stderr("\ [UPDATING] registry `[..]` [DOWNLOADING] baz v0.1.0 ([..]) -[COMPILING] baz v0.1.0 ([..]) +[COMPILING] baz v0.1.0 [COMPILING] bar v0.5.0 ([..]) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] secs ")); @@ -1106,16 +1125,12 @@ fn disallow_network() { assert_that(p.cargo("build").arg("--frozen"), execs().with_status(101).with_stderr("\ -[UPDATING] registry `[..]` error: failed to load source for a dependency on `foo` Caused by: Unable to update registry [..] Caused by: - failed to fetch `[..]` - -Caused by: - attempting to update a git repository, but --frozen was specified + attempting to make an HTTP request, but --frozen was specified ")); } diff --git a/tests/search.rs b/tests/search.rs index 19f04a33bdf..b2f1c8ad1bc 100644 --- a/tests/search.rs +++ b/tests/search.rs @@ -22,10 +22,6 @@ fn api() -> Url { Url::from_file_path(&*api_path()).ok().unwrap() } fn setup() { let config = paths::root().join(".cargo/config"); fs::create_dir_all(config.parent().unwrap()).unwrap(); - File::create(&config).unwrap().write_all(format!(r#" - [registry] - index = "{reg}" - "#, reg = registry()).as_bytes()).unwrap(); fs::create_dir_all(&api_path().join("api/v1")).unwrap(); repo(®istry_path()) @@ -85,7 +81,8 @@ fn simple() { .write_all(contents.as_bytes()).unwrap(); } - assert_that(cargo_process("search").arg("postgres"), + assert_that(cargo_process("search").arg("postgres") + .arg("--host").arg(registry().to_string()), execs().with_status(0) .with_stderr("\ [UPDATING] registry `[..]`") @@ -136,7 +133,8 @@ fn multiple_query_params() { .write_all(contents.as_bytes()).unwrap(); } - assert_that(cargo_process("search").arg("postgres").arg("sql"), + assert_that(cargo_process("search").arg("postgres").arg("sql") + .arg("--host").arg(registry().to_string()), execs().with_status(0) .with_stderr("\ [UPDATING] registry `[..]`") diff --git a/tests/workspaces.rs b/tests/workspaces.rs index dcec77ae3f4..8ded09b772a 100644 --- a/tests/workspaces.rs +++ b/tests/workspaces.rs @@ -502,7 +502,7 @@ fn share_dependencies() { .with_stderr("\ [UPDATING] registry `[..]` [DOWNLOADING] dep1 v0.1.3 ([..]) -[COMPILING] dep1 v0.1.3 ([..]) +[COMPILING] dep1 v0.1.3 [COMPILING] foo v0.1.0 ([..]) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] ")); @@ -587,7 +587,7 @@ fn lock_works_for_everyone() { execs().with_status(0) .with_stderr("\ [DOWNLOADING] dep2 v0.1.0 ([..]) -[COMPILING] dep2 v0.1.0 ([..]) +[COMPILING] dep2 v0.1.0 [COMPILING] foo v0.1.0 ([..]) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] ")); @@ -596,7 +596,7 @@ fn lock_works_for_everyone() { execs().with_status(0) .with_stderr("\ [DOWNLOADING] dep1 v0.1.0 ([..]) -[COMPILING] dep1 v0.1.0 ([..]) +[COMPILING] dep1 v0.1.0 [COMPILING] bar v0.1.0 ([..]) [FINISHED] debug [unoptimized + debuginfo] target(s) in [..] "));