From e5a11190b31957b488bc1e2f0416ebab4b4658ec Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Sun, 25 Nov 2018 12:31:27 -0500 Subject: [PATCH 1/4] SourceId is copy, clippy thinks we dont need &SourceId or SourceId.clone() --- src/bin/cargo/cli.rs | 9 +- src/bin/cargo/commands/git_checkout.rs | 2 +- src/bin/cargo/commands/install.rs | 5 +- src/bin/cargo/commands/login.rs | 7 +- src/cargo/core/dependency.rs | 63 ++++--- src/cargo/core/interning.rs | 2 +- src/cargo/core/manifest.rs | 32 ++-- src/cargo/core/package.rs | 218 +++++++++++++---------- src/cargo/core/package_id.rs | 26 +-- src/cargo/core/package_id_spec.rs | 13 +- src/cargo/core/profiles.rs | 43 +++-- src/cargo/core/registry.rs | 70 ++++---- src/cargo/core/resolver/context.rs | 8 +- src/cargo/core/resolver/encode.rs | 20 +-- src/cargo/core/source/mod.rs | 26 +-- src/cargo/core/source/source_id.rs | 77 ++++---- src/cargo/core/summary.rs | 92 +++++----- src/cargo/core/workspace.rs | 36 ++-- src/cargo/ops/cargo_generate_lockfile.rs | 8 +- src/cargo/ops/cargo_install.rs | 88 ++++----- src/cargo/ops/cargo_package.rs | 52 +++--- src/cargo/ops/cargo_read_manifest.rs | 8 +- src/cargo/ops/registry.rs | 45 +++-- src/cargo/ops/resolve.rs | 17 +- src/cargo/sources/config.rs | 12 +- src/cargo/sources/directory.rs | 16 +- src/cargo/sources/git/source.rs | 32 ++-- src/cargo/sources/path.rs | 34 ++-- src/cargo/sources/registry/index.rs | 61 ++++--- src/cargo/sources/registry/mod.rs | 63 ++++--- src/cargo/sources/registry/remote.rs | 46 +++-- src/cargo/sources/replaced.rs | 86 ++++----- src/cargo/util/toml/mod.rs | 30 ++-- tests/testsuite/search.rs | 39 ++-- tests/testsuite/support/resolver.rs | 24 ++- 35 files changed, 756 insertions(+), 654 deletions(-) diff --git a/src/bin/cargo/cli.rs b/src/bin/cargo/cli.rs index 33927c44500..ad1ddd85297 100644 --- a/src/bin/cargo/cli.rs +++ b/src/bin/cargo/cli.rs @@ -81,7 +81,7 @@ Run with 'cargo -Z [FLAG] [SUBCOMMAND]'" pub fn get_version_string(is_verbose: bool) -> String { let version = cargo::version(); - let mut version_string = String::from(version.to_string()); + let mut version_string = version.to_string(); version_string.push_str("\n"); if is_verbose { version_string.push_str(&format!( @@ -218,9 +218,10 @@ See 'cargo help ' for more information on a specific command.\n", opt( "verbose", "Use verbose output (-vv very verbose/build.rs output)", - ).short("v") - .multiple(true) - .global(true), + ) + .short("v") + .multiple(true) + .global(true), ) .arg( opt("quiet", "No output printed to stdout") diff --git a/src/bin/cargo/commands/git_checkout.rs b/src/bin/cargo/commands/git_checkout.rs index a9401f1059b..80b236293d7 100644 --- a/src/bin/cargo/commands/git_checkout.rs +++ b/src/bin/cargo/commands/git_checkout.rs @@ -28,7 +28,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let reference = GitReference::Branch(reference.to_string()); let source_id = SourceId::for_git(&url, reference)?; - let mut source = GitSource::new(&source_id, config)?; + let mut source = GitSource::new(source_id, config)?; source.update()?; diff --git a/src/bin/cargo/commands/install.rs b/src/bin/cargo/commands/install.rs index ba3c6699680..b6348d747d4 100644 --- a/src/bin/cargo/commands/install.rs +++ b/src/bin/cargo/commands/install.rs @@ -82,7 +82,8 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { compile_opts.build_config.release = !args.is_present("debug"); - let krates = args.values_of("crate") + let krates = args + .values_of("crate") .unwrap_or_default() .collect::>(); @@ -120,7 +121,7 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { ops::install( root, krates, - &source, + source, from_cwd, version, &compile_opts, diff --git a/src/bin/cargo/commands/login.rs b/src/bin/cargo/commands/login.rs index 39f53f342e5..0c743829624 100644 --- a/src/bin/cargo/commands/login.rs +++ b/src/bin/cargo/commands/login.rs @@ -3,9 +3,9 @@ use command_prelude::*; use std::io::{self, BufRead}; use cargo::core::{Source, SourceId}; +use cargo::ops; use cargo::sources::RegistrySource; use cargo::util::{CargoError, CargoResultExt}; -use cargo::ops; pub fn cli() -> App { subcommand("login") @@ -29,11 +29,12 @@ pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { return Err(format_err!( "token must be provided when \ --registry is provided." - ).into()); + ) + .into()); } None => { let src = SourceId::crates_io(config)?; - let mut src = RegistrySource::remote(&src, config); + let mut src = RegistrySource::remote(src, config); src.update()?; let config = src.config()?.unwrap(); args.value_of("host") diff --git a/src/cargo/core/dependency.rs b/src/cargo/core/dependency.rs index f96c2069136..bdb04e36a7a 100644 --- a/src/cargo/core/dependency.rs +++ b/src/cargo/core/dependency.rs @@ -2,14 +2,14 @@ use std::fmt; use std::rc::Rc; use std::str::FromStr; -use semver::VersionReq; use semver::ReqParseError; +use semver::VersionReq; use serde::ser; -use core::{PackageId, SourceId, Summary}; use core::interning::InternedString; -use util::{Cfg, CfgExpr, Config}; +use core::{PackageId, SourceId, Summary}; use util::errors::{CargoError, CargoResult, CargoResultExt}; +use util::{Cfg, CfgExpr, Config}; /// Information about a dependency requested by a Cargo manifest. /// Cheap to copy. @@ -48,7 +48,7 @@ pub enum Platform { #[derive(Serialize)] struct SerializedDependency<'a> { name: &'a str, - source: &'a SourceId, + source: SourceId, req: String, kind: Kind, rename: Option<&'a str>, @@ -74,7 +74,8 @@ impl ser::Serialize for Dependency { features: self.features(), target: self.platform(), rename: self.explicit_name_in_toml().map(|s| s.as_str()), - }.serialize(s) + } + .serialize(s) } } @@ -116,7 +117,7 @@ this warning. config.shell().warn(&msg)?; Ok(requirement) - }, + } Err(e) => { let err: CargoResult = Err(e.into()); let v: VersionReq = err.chain_err(|| { @@ -126,7 +127,7 @@ this warning. ) })?; Ok(v) - }, + } Ok(v) => Ok(v), } } @@ -140,7 +141,8 @@ impl ser::Serialize for Kind { Kind::Normal => None, Kind::Development => Some("dev"), Kind::Build => Some("build"), - }.serialize(s) + } + .serialize(s) } } @@ -149,7 +151,7 @@ impl Dependency { pub fn parse( name: &str, version: Option<&str>, - source_id: &SourceId, + source_id: SourceId, inside: &PackageId, config: &Config, ) -> CargoResult { @@ -173,7 +175,7 @@ impl Dependency { pub fn parse_no_deprecated( name: &str, version: Option<&str>, - source_id: &SourceId, + source_id: SourceId, ) -> CargoResult { let (specified_req, version_req) = match version { Some(v) => (true, parse_req_with_deprecated(name, v, None)?), @@ -190,12 +192,12 @@ impl Dependency { Ok(ret) } - pub fn new_override(name: &str, source_id: &SourceId) -> Dependency { + pub fn new_override(name: &str, source_id: SourceId) -> Dependency { assert!(!name.is_empty()); Dependency { inner: Rc::new(Inner { name: InternedString::new(name), - source_id: source_id.clone(), + source_id, registry_id: None, req: VersionReq::any(), kind: Kind::Normal, @@ -260,16 +262,16 @@ impl Dependency { self.inner.name } - pub fn source_id(&self) -> &SourceId { - &self.inner.source_id + pub fn source_id(&self) -> SourceId { + self.inner.source_id } - pub fn registry_id(&self) -> Option<&SourceId> { - self.inner.registry_id.as_ref() + pub fn registry_id(&self) -> Option { + self.inner.registry_id } - pub fn set_registry_id(&mut self, registry_id: &SourceId) -> &mut Dependency { - Rc::make_mut(&mut self.inner).registry_id = Some(registry_id.clone()); + pub fn set_registry_id(&mut self, registry_id: SourceId) -> &mut Dependency { + Rc::make_mut(&mut self.inner).registry_id = Some(registry_id); self } @@ -301,9 +303,14 @@ impl Dependency { } /// Sets the list of features requested for the package. - pub fn set_features(&mut self, features: impl IntoIterator>) -> &mut Dependency { - Rc::make_mut(&mut self.inner).features = - features.into_iter().map(|s| InternedString::new(s.as_ref())).collect(); + pub fn set_features( + &mut self, + features: impl IntoIterator>, + ) -> &mut Dependency { + Rc::make_mut(&mut self.inner).features = features + .into_iter() + .map(|s| InternedString::new(s.as_ref())) + .collect(); self } @@ -343,7 +350,7 @@ impl Dependency { /// Lock this dependency to depending on the specified package id pub fn lock_to(&mut self, id: &PackageId) -> &mut Dependency { - assert_eq!(self.inner.source_id, *id.source_id()); + assert_eq!(self.inner.source_id, id.source_id()); assert!(self.inner.req.matches(id.version())); trace!( "locking dep from `{}` with `{}` at {} to {}", @@ -353,7 +360,7 @@ impl Dependency { id ); self.set_version_req(VersionReq::exact(id.version())) - .set_source_id(id.source_id().clone()) + .set_source_id(id.source_id()) } /// Returns whether this is a "locked" dependency, basically whether it has @@ -405,15 +412,14 @@ impl Dependency { pub fn matches_id(&self, id: &PackageId) -> bool { self.inner.name == id.name() && (self.inner.only_match_name - || (self.inner.req.matches(id.version()) - && &self.inner.source_id == id.source_id())) + || (self.inner.req.matches(id.version()) && self.inner.source_id == id.source_id())) } - pub fn map_source(mut self, to_replace: &SourceId, replace_with: &SourceId) -> Dependency { + pub fn map_source(mut self, to_replace: SourceId, replace_with: SourceId) -> Dependency { if self.source_id() != to_replace { self } else { - self.set_source_id(replace_with.clone()); + self.set_source_id(replace_with); self } } @@ -446,7 +452,8 @@ impl FromStr for Platform { fn from_str(s: &str) -> CargoResult { if s.starts_with("cfg(") && s.ends_with(')') { let s = &s[4..s.len() - 1]; - let p = s.parse() + let p = s + .parse() .map(Platform::Cfg) .chain_err(|| format_err!("failed to parse `{}` as a cfg expression", s))?; Ok(p) diff --git a/src/cargo/core/interning.rs b/src/cargo/core/interning.rs index c925034041b..1c580e6ded5 100644 --- a/src/cargo/core/interning.rs +++ b/src/cargo/core/interning.rs @@ -34,7 +34,7 @@ impl Eq for InternedString {} impl InternedString { pub fn new(str: &str) -> InternedString { let mut cache = STRING_CACHE.lock().unwrap(); - let s = cache.get(str).map(|&s| s).unwrap_or_else(|| { + let s = cache.get(str).cloned().unwrap_or_else(|| { let s = leak(str.to_string()); cache.insert(s); s diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index f65778df45d..8763e4e628e 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -15,7 +15,7 @@ use core::{Dependency, PackageId, PackageIdSpec, SourceId, Summary}; use core::{Edition, Feature, Features, WorkspaceConfig}; use util::errors::*; use util::toml::TomlManifest; -use util::{Config, Filesystem, short_hash}; +use util::{short_hash, Config, Filesystem}; pub enum EitherManifest { Real(Manifest), @@ -254,11 +254,7 @@ impl fmt::Debug for TargetSourcePath { impl From for TargetSourcePath { fn from(path: PathBuf) -> Self { - assert!( - path.is_absolute(), - "`{}` is not absolute", - path.display() - ); + assert!(path.is_absolute(), "`{}` is not absolute", path.display()); TargetSourcePath::Path(path) } } @@ -290,7 +286,8 @@ impl ser::Serialize for Target { .required_features .as_ref() .map(|rf| rf.iter().map(|s| &**s).collect()), - }.serialize(s) + } + .serialize(s) } } @@ -468,7 +465,7 @@ impl Manifest { self.summary = summary; } - pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Manifest { + pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Manifest { Manifest { summary: self.summary.map_source(to_replace, replace_with), ..self @@ -490,11 +487,7 @@ impl Manifest { if self.default_run.is_some() { self.features .require(Feature::default_run()) - .chain_err(|| { - format_err!( - "the `default-run` manifest key is unstable" - ) - })?; + .chain_err(|| format_err!("the `default-run` manifest key is unstable"))?; } Ok(()) @@ -627,11 +620,7 @@ impl Target { } /// Builds a `Target` corresponding to the `build = "build.rs"` entry. - pub fn custom_build_target( - name: &str, - src_path: PathBuf, - edition: Edition, - ) -> Target { + pub fn custom_build_target(name: &str, src_path: PathBuf, edition: Edition) -> Target { Target { kind: TargetKind::CustomBuild, name: name.to_string(), @@ -740,7 +729,9 @@ impl Target { pub fn for_host(&self) -> bool { self.for_host } - pub fn edition(&self) -> Edition { self.edition } + pub fn edition(&self) -> Edition { + self.edition + } pub fn benched(&self) -> bool { self.benched } @@ -839,7 +830,8 @@ impl Target { pub fn can_lto(&self) -> bool { match self.kind { TargetKind::Lib(ref v) => { - !v.contains(&LibKind::Rlib) && !v.contains(&LibKind::Dylib) + !v.contains(&LibKind::Rlib) + && !v.contains(&LibKind::Dylib) && !v.contains(&LibKind::Lib) } _ => true, diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 8db4758aaca..9ba5affad63 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -1,16 +1,16 @@ -use std::cell::{Ref, RefCell, Cell}; +use std::cell::{Cell, Ref, RefCell}; use std::cmp::Ordering; use std::collections::{HashMap, HashSet}; use std::fmt; use std::hash; use std::mem; use std::path::{Path, PathBuf}; -use std::time::{Instant, Duration}; +use std::time::{Duration, Instant}; use bytesize::ByteSize; -use curl::easy::{Easy, HttpVersion}; -use curl::multi::{Multi, EasyHandle}; use curl; +use curl::easy::{Easy, HttpVersion}; +use curl::multi::{EasyHandle, Multi}; use curl_sys; use failure::ResultExt; use lazycell::LazyCell; @@ -18,14 +18,14 @@ use semver::Version; use serde::ser; use toml; +use core::interning::InternedString; +use core::source::MaybePackage; use core::{Dependency, Manifest, PackageId, SourceId, Target}; use core::{FeatureMap, SourceMap, Summary}; -use core::source::MaybePackage; -use core::interning::InternedString; use ops; -use util::{self, internal, lev_distance, Config, Progress, ProgressStyle}; use util::errors::{CargoResult, CargoResultExt, HttpNot200}; use util::network::Retry; +use util::{self, internal, lev_distance, Config, Progress, ProgressStyle}; /// Information about a package that is available somewhere in the file system. /// @@ -60,7 +60,7 @@ struct SerializedPackage<'a> { license: Option<&'a str>, license_file: Option<&'a str>, description: Option<&'a str>, - source: &'a SourceId, + source: SourceId, dependencies: &'a [Dependency], targets: Vec<&'a Target>, features: &'a FeatureMap, @@ -122,7 +122,8 @@ impl ser::Serialize for Package { repository, edition: &self.manifest.edition().to_string(), metabuild: self.manifest.metabuild(), - }.serialize(s) + } + .serialize(s) } } @@ -200,7 +201,7 @@ impl Package { matches.min_by_key(|t| t.0).map(|t| t.1) } - pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Package { + pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Package { Package { manifest: self.manifest.map_source(to_replace, replace_with), manifest_path: self.manifest_path, @@ -289,7 +290,7 @@ pub struct Downloads<'a, 'cfg: 'a> { /// because we want to apply timeouts to an entire batch of operations, not /// any one particular single operatino timeout: ops::HttpTimeout, // timeout configuration - updated_at: Cell, // last time we received bytes + updated_at: Cell, // last time we received bytes next_speed_check: Cell, // if threshold isn't 0 by this time, error next_speed_check_bytes_threshold: Cell, // decremented when we receive bytes } @@ -340,9 +341,11 @@ impl<'cfg> PackageSet<'cfg> { // that it's buggy, and we've empirically seen that it's buggy with HTTP // proxies. let mut multi = Multi::new(); - let multiplexing = config.get::>("http.multiplexing")? + let multiplexing = config + .get::>("http.multiplexing")? .unwrap_or(true); - multi.pipelining(false, multiplexing) + multi + .pipelining(false, multiplexing) .chain_err(|| "failed to enable multiplexing/pipelining in curl")?; // let's not flood crates.io with connections @@ -395,9 +398,10 @@ impl<'cfg> PackageSet<'cfg> { Ok(self.get_many(Some(id))?.remove(0)) } - pub fn get_many<'a>(&self, ids: impl IntoIterator) - -> CargoResult> - { + pub fn get_many<'a>( + &self, + ids: impl IntoIterator, + ) -> CargoResult> { let mut pkgs = Vec::new(); let mut downloads = self.enable_download()?; for id in ids { @@ -424,7 +428,9 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { pub fn start(&mut self, id: &PackageId) -> CargoResult> { // First up see if we've already cached this package, in which case // there's nothing to do. - let slot = self.set.packages + let slot = self + .set + .packages .get(id) .ok_or_else(|| internal(format!("couldn't find `{}` in package set", id)))?; if let Some(pkg) = slot.borrow() { @@ -445,7 +451,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { MaybePackage::Ready(pkg) => { debug!("{} doesn't need a download", id); assert!(slot.fill(pkg).is_ok()); - return Ok(Some(slot.borrow().unwrap())) + return Ok(Some(slot.borrow().unwrap())); } MaybePackage::Download { url, descriptor } => (url, descriptor), }; @@ -483,9 +489,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { warn!("ignoring HTTP/2 activation error: {}", e) } } else { - result.with_context(|_| { - "failed to enable HTTP2, is curl not built right?" - })?; + result.with_context(|_| "failed to enable HTTP2, is curl not built right?")?; } } else { handle.http_version(HttpVersion::V11)?; @@ -504,7 +508,9 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { debug!("{} - {} bytes of data", token, buf.len()); tls::with(|downloads| { if let Some(downloads) = downloads { - downloads.pending[&token].0.data + downloads.pending[&token] + .0 + .data .borrow_mut() .extend_from_slice(buf); } @@ -514,22 +520,23 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { handle.progress(true)?; handle.progress_function(move |dl_total, dl_cur, _, _| { - tls::with(|downloads| { - match downloads { - Some(d) => d.progress(token, dl_total as u64, dl_cur as u64), - None => false, - } + tls::with(|downloads| match downloads { + Some(d) => d.progress(token, dl_total as u64, dl_cur as u64), + None => false, }) })?; // If the progress bar isn't enabled then it may be awhile before the // first crate finishes downloading so we inform immediately that we're // downloading crates here. - if self.downloads_finished == 0 && - self.pending.len() == 0 && - !self.progress.borrow().as_ref().unwrap().is_enabled() + if self.downloads_finished == 0 + && self.pending.len() == 0 + && !self.progress.borrow().as_ref().unwrap().is_enabled() { - self.set.config.shell().status("Downloading", "crates ...")?; + self.set + .config + .shell() + .status("Downloading", "crates ...")?; } let dl = Download { @@ -569,7 +576,9 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { let (token, result) = self.wait_for_curl()?; debug!("{} finished with {:?}", token, result); - let (mut dl, handle) = self.pending.remove(&token) + let (mut dl, handle) = self + .pending + .remove(&token) .expect("got a token for a non-in-progress transfer"); let data = mem::replace(&mut *dl.data.borrow_mut(), Vec::new()); let mut handle = self.set.multi.remove(handle)?; @@ -581,42 +590,44 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { let ret = { let timed_out = &dl.timed_out; let url = &dl.url; - dl.retry.try(|| { - if let Err(e) = result { - // If this error is "aborted by callback" then that's - // probably because our progress callback aborted due to - // a timeout. We'll find out by looking at the - // `timed_out` field, looking for a descriptive message. - // If one is found we switch the error code (to ensure - // it's flagged as spurious) and then attach our extra - // information to the error. - if !e.is_aborted_by_callback() { - return Err(e.into()) - } + dl.retry + .try(|| { + if let Err(e) = result { + // If this error is "aborted by callback" then that's + // probably because our progress callback aborted due to + // a timeout. We'll find out by looking at the + // `timed_out` field, looking for a descriptive message. + // If one is found we switch the error code (to ensure + // it's flagged as spurious) and then attach our extra + // information to the error. + if !e.is_aborted_by_callback() { + return Err(e.into()); + } - return Err(match timed_out.replace(None) { - Some(msg) => { - let code = curl_sys::CURLE_OPERATION_TIMEDOUT; - let mut err = curl::Error::new(code); - err.set_extra(msg); - err + return Err(match timed_out.replace(None) { + Some(msg) => { + let code = curl_sys::CURLE_OPERATION_TIMEDOUT; + let mut err = curl::Error::new(code); + err.set_extra(msg); + err + } + None => e, } - None => e, - }.into()) - } + .into()); + } - let code = handle.response_code()?; - if code != 200 && code != 0 { - let url = handle.effective_url()?.unwrap_or(url); - return Err(HttpNot200 { - code, - url: url.to_string(), - }.into()) - } - Ok(()) - }).chain_err(|| { - format!("failed to download from `{}`", dl.url) - })? + let code = handle.response_code()?; + if code != 200 && code != 0 { + let url = handle.effective_url()?.unwrap_or(url); + return Err(HttpNot200 { + code, + url: url.to_string(), + } + .into()); + } + Ok(()) + }) + .chain_err(|| format!("failed to download from `{}`", dl.url))? }; match ret { Some(()) => break (dl, data), @@ -631,7 +642,10 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { // semblance of progress of how we're downloading crates, and if the // progress bar is enabled this provides a good log of what's happening. self.progress.borrow_mut().as_mut().unwrap().clear(); - self.set.config.shell().status("Downloaded", &dl.descriptor)?; + self.set + .config + .shell() + .status("Downloaded", &dl.descriptor)?; self.downloads_finished += 1; self.downloaded_bytes += dl.total.get(); @@ -665,7 +679,8 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { // extracted tarball. let finish_dur = start.elapsed(); self.updated_at.set(self.updated_at.get() + finish_dur); - self.next_speed_check.set(self.next_speed_check.get() + finish_dur); + self.next_speed_check + .set(self.next_speed_check.get() + finish_dur); let slot = &self.set.packages[&dl.id]; assert!(slot.fill(pkg).is_ok()); @@ -678,7 +693,8 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { handle.set_token(dl.token)?; self.updated_at.set(now); self.next_speed_check.set(now + self.timeout.dur); - self.next_speed_check_bytes_threshold.set(self.timeout.low_speed_limit as u64); + self.next_speed_check_bytes_threshold + .set(self.timeout.low_speed_limit as u64); dl.timed_out.set(None); dl.current.set(0); dl.total.set(0); @@ -704,7 +720,9 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { // `wait` method on `multi`. loop { let n = tls::set(self, || { - self.set.multi.perform() + self.set + .multi + .perform() .chain_err(|| "failed to perform http requests") })?; debug!("handles remaining: {}", n); @@ -721,12 +739,13 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { }); if let Some(pair) = results.pop() { - break Ok(pair) + break Ok(pair); } assert!(self.pending.len() > 0); - let timeout = self.set.multi.get_timeout()? - .unwrap_or(Duration::new(5, 0)); - self.set.multi.wait(&mut [], timeout) + let timeout = self.set.multi.get_timeout()?.unwrap_or(Duration::new(5, 0)); + self.set + .multi + .wait(&mut [], timeout) .chain_err(|| "failed to wait on curl `Multi`")?; } } @@ -744,25 +763,26 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { if delta >= threshold { self.next_speed_check.set(now + self.timeout.dur); - self.next_speed_check_bytes_threshold.set( - self.timeout.low_speed_limit as u64, - ); + self.next_speed_check_bytes_threshold + .set(self.timeout.low_speed_limit as u64); } else { self.next_speed_check_bytes_threshold.set(threshold - delta); } } if !self.tick(WhyTick::DownloadUpdate).is_ok() { - return false + return false; } // If we've spent too long not actually receiving any data we time out. if now - self.updated_at.get() > self.timeout.dur { self.updated_at.set(now); - let msg = format!("failed to download any data for `{}` within {}s", - dl.id, - self.timeout.dur.as_secs()); + let msg = format!( + "failed to download any data for `{}` within {}s", + dl.id, + self.timeout.dur.as_secs() + ); dl.timed_out.set(Some(msg)); - return false + return false; } // If we reached the point in time that we need to check our speed @@ -772,13 +792,15 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { if now >= self.next_speed_check.get() { self.next_speed_check.set(now + self.timeout.dur); assert!(self.next_speed_check_bytes_threshold.get() > 0); - let msg = format!("download of `{}` failed to transfer more \ - than {} bytes in {}s", - dl.id, - self.timeout.low_speed_limit, - self.timeout.dur.as_secs()); + let msg = format!( + "download of `{}` failed to transfer more \ + than {} bytes in {}s", + dl.id, + self.timeout.low_speed_limit, + self.timeout.dur.as_secs() + ); dl.timed_out.set(Some(msg)); - return false + return false; } true @@ -790,7 +812,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { if let WhyTick::DownloadUpdate = why { if !progress.update_allowed() { - return Ok(()) + return Ok(()); } } let mut msg = format!("{} crates", self.pending.len()); @@ -833,20 +855,22 @@ impl<'a, 'cfg> Drop for Downloads<'a, 'cfg> { // Don't print a download summary if we're not using a progress bar, // we've already printed lots of `Downloading...` items. if !progress.is_enabled() { - return + return; } // If we didn't download anything, no need for a summary if self.downloads_finished == 0 { - return + return; } // If an error happened, let's not clutter up the output if !self.success { - return + return; } - let mut status = format!("{} crates ({}) in {}", - self.downloads_finished, - ByteSize(self.downloaded_bytes), - util::elapsed(self.start.elapsed())); + let mut status = format!( + "{} crates ({}) in {}", + self.downloads_finished, + ByteSize(self.downloaded_bytes), + util::elapsed(self.start.elapsed()) + ); if self.largest.0 > ByteSize::mb(1).0 { status.push_str(&format!( " (largest was `{}` at {})", @@ -872,9 +896,7 @@ mod tls { if ptr == 0 { f(None) } else { - unsafe { - f(Some(&*(ptr as *const Downloads))) - } + unsafe { f(Some(&*(ptr as *const Downloads))) } } } diff --git a/src/cargo/core/package_id.rs b/src/cargo/core/package_id.rs index 568bef5b6c4..0be2cd9e40d 100644 --- a/src/cargo/core/package_id.rs +++ b/src/cargo/core/package_id.rs @@ -1,7 +1,7 @@ use std::cmp::Ordering; use std::fmt::{self, Formatter}; -use std::hash::Hash; use std::hash; +use std::hash::Hash; use std::path::Path; use std::sync::Arc; @@ -9,9 +9,9 @@ use semver; use serde::de; use serde::ser; -use util::{CargoResult, ToSemver}; -use core::source::SourceId; use core::interning::InternedString; +use core::source::SourceId; +use util::{CargoResult, ToSemver}; /// Identifier for a specific version of a package in a specific source. #[derive(Clone)] @@ -100,13 +100,13 @@ impl Ord for PackageId { } impl PackageId { - pub fn new(name: &str, version: T, sid: &SourceId) -> CargoResult { + pub fn new(name: &str, version: T, sid: SourceId) -> CargoResult { let v = version.to_semver()?; Ok(PackageId { inner: Arc::new(PackageIdInner { name: InternedString::new(name), version: v, - source_id: sid.clone(), + source_id: sid, }), }) } @@ -117,8 +117,8 @@ impl PackageId { pub fn version(&self) -> &semver::Version { &self.inner.version } - pub fn source_id(&self) -> &SourceId { - &self.inner.source_id + pub fn source_id(&self) -> SourceId { + self.inner.source_id } pub fn with_precise(&self, precise: Option) -> PackageId { @@ -131,12 +131,12 @@ impl PackageId { } } - pub fn with_source_id(&self, source: &SourceId) -> PackageId { + pub fn with_source_id(&self, source: SourceId) -> PackageId { PackageId { inner: Arc::new(PackageIdInner { name: self.inner.name, version: self.inner.version.clone(), - source_id: source.clone(), + source_id: source, }), } } @@ -190,9 +190,9 @@ mod tests { let loc = CRATES_IO_INDEX.to_url().unwrap(); let repo = SourceId::for_registry(&loc).unwrap(); - assert!(PackageId::new("foo", "1.0", &repo).is_err()); - assert!(PackageId::new("foo", "1", &repo).is_err()); - assert!(PackageId::new("foo", "bar", &repo).is_err()); - assert!(PackageId::new("foo", "", &repo).is_err()); + assert!(PackageId::new("foo", "1.0", repo).is_err()); + assert!(PackageId::new("foo", "1", repo).is_err()); + assert!(PackageId::new("foo", "bar", repo).is_err()); + assert!(PackageId::new("foo", "", repo).is_err()); } } diff --git a/src/cargo/core/package_id_spec.rs b/src/cargo/core/package_id_spec.rs index 8acc65401b6..64312f0812a 100644 --- a/src/cargo/core/package_id_spec.rs +++ b/src/cargo/core/package_id_spec.rs @@ -6,8 +6,8 @@ use serde::{de, ser}; use url::Url; use core::PackageId; -use util::{ToSemver, ToUrl}; use util::errors::{CargoResult, CargoResultExt}; +use util::{ToSemver, ToUrl}; /// Some or all of the data required to identify a package: /// @@ -104,7 +104,8 @@ impl PackageIdSpec { let frag = url.fragment().map(|s| s.to_owned()); url.set_fragment(None); let (name, version) = { - let mut path = url.path_segments() + let mut path = url + .path_segments() .ok_or_else(|| format_err!("pkgid urls must have a path: {}", url))?; let path_name = path.next_back().ok_or_else(|| { format_err!( @@ -275,10 +276,10 @@ impl<'de> de::Deserialize<'de> for PackageIdSpec { #[cfg(test)] mod tests { - use core::{PackageId, SourceId}; use super::PackageIdSpec; - use url::Url; + use core::{PackageId, SourceId}; use semver::Version; + use url::Url; #[test] fn good_parsing() { @@ -367,8 +368,8 @@ mod tests { fn matching() { let url = Url::parse("http://example.com").unwrap(); let sid = SourceId::for_registry(&url).unwrap(); - let foo = PackageId::new("foo", "1.2.3", &sid).unwrap(); - let bar = PackageId::new("bar", "1.2.3", &sid).unwrap(); + let foo = PackageId::new("foo", "1.2.3", sid).unwrap(); + let bar = PackageId::new("bar", "1.2.3", sid).unwrap(); assert!(PackageIdSpec::parse("foo").unwrap().matches(&foo)); assert!(!PackageIdSpec::parse("foo").unwrap().matches(&bar)); diff --git a/src/cargo/core/profiles.rs b/src/cargo/core/profiles.rs index e635b5fdfa7..669fed83955 100644 --- a/src/cargo/core/profiles.rs +++ b/src/cargo/core/profiles.rs @@ -205,11 +205,13 @@ impl ProfileMaker { .keys() .filter_map(|key| match *key { ProfilePackageSpec::All => None, - ProfilePackageSpec::Spec(ref spec) => if spec.matches(pkg_id) { - Some(spec) - } else { - None - }, + ProfilePackageSpec::Spec(ref spec) => { + if spec.matches(pkg_id) { + Some(spec) + } else { + None + } + } }) .collect(); match matches.len() { @@ -313,11 +315,13 @@ fn merge_toml( .iter() .filter_map(|(key, spec_profile)| match *key { ProfilePackageSpec::All => None, - ProfilePackageSpec::Spec(ref s) => if s.matches(pkg_id) { - Some(spec_profile) - } else { - None - }, + ProfilePackageSpec::Spec(ref s) => { + if s.matches(pkg_id) { + Some(spec_profile) + } else { + None + } + } }); if let Some(spec_profile) = matches.next() { merge_profile(profile, spec_profile); @@ -586,7 +590,7 @@ impl UnitFor { pub fn with_for_host(self, for_host: bool) -> UnitFor { UnitFor { custom_build: self.custom_build, - panic_ok: self.panic_ok && !for_host + panic_ok: self.panic_ok && !for_host, } } @@ -597,16 +601,25 @@ impl UnitFor { } /// Returns true if this unit is allowed to set the `panic` compiler flag. - pub fn is_panic_ok(&self) -> bool { + pub fn is_panic_ok(self) -> bool { self.panic_ok } /// All possible values, used by `clean`. pub fn all_values() -> &'static [UnitFor] { static ALL: [UnitFor; 3] = [ - UnitFor { custom_build: false, panic_ok: true }, - UnitFor { custom_build: true, panic_ok: false }, - UnitFor { custom_build: false, panic_ok: false }, + UnitFor { + custom_build: false, + panic_ok: true, + }, + UnitFor { + custom_build: true, + panic_ok: false, + }, + UnitFor { + custom_build: false, + panic_ok: false, + }, ]; &ALL } diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index e5c975ef5fe..cc2cb1a81ea 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -3,11 +3,11 @@ use std::collections::HashMap; use semver::VersionReq; use url::Url; -use core::{Dependency, PackageId, Source, SourceId, SourceMap, Summary}; use core::PackageSet; -use util::{profile, Config}; -use util::errors::{CargoResult, CargoResultExt}; +use core::{Dependency, PackageId, Source, SourceId, SourceMap, Summary}; use sources::config::SourceConfigMap; +use util::errors::{CargoResult, CargoResultExt}; +use util::{profile, Config}; /// Source of information about a group of packages. /// @@ -22,8 +22,8 @@ pub trait Registry { Ok(ret) } - fn describe_source(&self, source: &SourceId) -> String; - fn is_replaced(&self, source: &SourceId) -> bool; + fn describe_source(&self, source: SourceId) -> String; + fn is_replaced(&self, source: SourceId) -> bool; } /// This structure represents a registry of known packages. It internally @@ -102,8 +102,8 @@ impl<'cfg> PackageRegistry<'cfg> { PackageSet::new(package_ids, self.sources, self.config) } - fn ensure_loaded(&mut self, namespace: &SourceId, kind: Kind) -> CargoResult<()> { - match self.source_ids.get(namespace) { + fn ensure_loaded(&mut self, namespace: SourceId, kind: Kind) -> CargoResult<()> { + match self.source_ids.get(&namespace) { // We've previously loaded this source, and we've already locked it, // so we're not allowed to change it even if `namespace` has a // slightly different precise version listed. @@ -138,8 +138,8 @@ impl<'cfg> PackageRegistry<'cfg> { Ok(()) } - pub fn add_sources(&mut self, ids: &[SourceId]) -> CargoResult<()> { - for id in ids.iter() { + pub fn add_sources(&mut self, ids: impl IntoIterator) -> CargoResult<()> { + for id in ids { self.ensure_loaded(id, Kind::Locked)?; } Ok(()) @@ -150,13 +150,13 @@ impl<'cfg> PackageRegistry<'cfg> { } fn add_source(&mut self, source: Box, kind: Kind) { - let id = source.source_id().clone(); + let id = source.source_id(); self.sources.insert(source); - self.source_ids.insert(id.clone(), (id, kind)); + self.source_ids.insert(id, (id, kind)); } pub fn add_override(&mut self, source: Box) { - self.overrides.push(source.source_id().clone()); + self.overrides.push(source.source_id()); self.add_source(source, Kind::Override); } @@ -165,8 +165,9 @@ impl<'cfg> PackageRegistry<'cfg> { for dep in deps.iter() { trace!("\t-> {}", dep); } - let sub_map = self.locked - .entry(id.source_id().clone()) + let sub_map = self + .locked + .entry(id.source_id()) .or_insert_with(HashMap::new); let sub_vec = sub_map .entry(id.name().to_string()) @@ -200,9 +201,14 @@ impl<'cfg> PackageRegistry<'cfg> { // Remember that each dependency listed in `[patch]` has to resolve to // precisely one package, so that's why we're just creating a flat list // of summaries which should be the same length as `deps` above. - let unlocked_summaries = deps.iter() + let unlocked_summaries = deps + .iter() .map(|dep| { - debug!("registring a patch for `{}` with `{}`", url, dep.package_name()); + debug!( + "registring a patch for `{}` with `{}`", + url, + dep.package_name() + ); // Go straight to the source for resolving `dep`. Load it as we // normally would and then ask it directly for the list of summaries @@ -216,7 +222,8 @@ impl<'cfg> PackageRegistry<'cfg> { ) })?; - let mut summaries = self.sources + let mut summaries = self + .sources .get_mut(dep.source_id()) .expect("loaded source not present") .query_vec(dep)? @@ -289,14 +296,14 @@ impl<'cfg> PackageRegistry<'cfg> { &self.patches } - fn load(&mut self, source_id: &SourceId, kind: Kind) -> CargoResult<()> { + fn load(&mut self, source_id: SourceId, kind: Kind) -> CargoResult<()> { (|| { debug!("loading source {}", source_id); let source = self.source_config.load(source_id)?; assert_eq!(source.source_id(), source_id); if kind == Kind::Override { - self.overrides.push(source_id.clone()); + self.overrides.push(source_id); } self.add_source(source, kind); @@ -304,12 +311,12 @@ impl<'cfg> PackageRegistry<'cfg> { let _p = profile::start(format!("updating: {}", source_id)); self.sources.get_mut(source_id).unwrap().update() })() - .chain_err(|| format_err!("Unable to update {}", source_id))?; + .chain_err(|| format_err!("Unable to update {}", source_id))?; Ok(()) } fn query_overrides(&mut self, dep: &Dependency) -> CargoResult> { - for s in self.overrides.iter() { + for &s in self.overrides.iter() { let src = self.sources.get_mut(s).unwrap(); let dep = Dependency::new_override(&*dep.package_name(), s); let mut results = src.query_vec(&dep)?; @@ -532,14 +539,14 @@ impl<'cfg> Registry for PackageRegistry<'cfg> { Ok(()) } - fn describe_source(&self, id: &SourceId) -> String { + fn describe_source(&self, id: SourceId) -> String { match self.sources.get(id) { Some(src) => src.describe(), None => id.to_string(), } } - fn is_replaced(&self, id: &SourceId) -> bool { + fn is_replaced(&self, id: SourceId) -> bool { match self.sources.get(id) { Some(src) => src.is_replaced(), None => false, @@ -549,7 +556,7 @@ impl<'cfg> Registry for PackageRegistry<'cfg> { fn lock(locked: &LockedMap, patches: &HashMap>, summary: Summary) -> Summary { let pair = locked - .get(summary.source_id()) + .get(&summary.source_id()) .and_then(|map| map.get(&*summary.name())) .and_then(|vec| vec.iter().find(|&&(ref id, _)| id == summary.package_id())); @@ -561,7 +568,12 @@ fn lock(locked: &LockedMap, patches: &HashMap>, summary: Sum None => summary, }; summary.map_dependencies(|dep| { - trace!("\t{}/{}/{}", dep.package_name(), dep.version_req(), dep.source_id()); + trace!( + "\t{}/{}/{}", + dep.package_name(), + dep.version_req(), + dep.source_id() + ); // If we've got a known set of overrides for this summary, then // one of a few cases can arise: @@ -596,7 +608,7 @@ fn lock(locked: &LockedMap, patches: &HashMap>, summary: Sum // all known locked packages to see if they match this dependency. // If anything does then we lock it to that and move on. let v = locked - .get(dep.source_id()) + .get(&dep.source_id()) .and_then(|map| map.get(&*dep.package_name())) .and_then(|vec| vec.iter().find(|&&(ref id, _)| dep.matches_id(id))); if let Some(&(ref id, _)) = v { @@ -610,16 +622,14 @@ fn lock(locked: &LockedMap, patches: &HashMap>, summary: Sum // this dependency. let v = patches.get(dep.source_id().url()).map(|vec| { let dep2 = dep.clone(); - let mut iter = vec.iter().filter(move |p| { - dep2.matches_ignoring_source(p) - }); + let mut iter = vec.iter().filter(move |p| dep2.matches_ignoring_source(p)); (iter.next(), iter) }); if let Some((Some(patch_id), mut remaining)) = v { assert!(remaining.next().is_none()); let patch_source = patch_id.source_id(); let patch_locked = locked - .get(patch_source) + .get(&patch_source) .and_then(|m| m.get(&*patch_id.name())) .map(|list| list.iter().any(|&(ref id, _)| id == patch_id)) .unwrap_or(false); diff --git a/src/cargo/core/resolver/context.rs b/src/cargo/core/resolver/context.rs index 4a6629f93c5..c1dd7e22e96 100644 --- a/src/cargo/core/resolver/context.rs +++ b/src/cargo/core/resolver/context.rs @@ -3,9 +3,9 @@ use std::rc::Rc; use core::interning::InternedString; use core::{Dependency, FeatureValue, PackageId, SourceId, Summary}; +use im_rc; use util::CargoResult; use util::Graph; -use im_rc; use super::errors::ActivateResult; use super::types::{ConflictReason, DepInfo, GraphNode, Method, RcList, RegistryQueryer}; @@ -55,7 +55,7 @@ impl Context { let id = summary.package_id(); let prev = self .activations - .entry((id.name(), id.source_id().clone())) + .entry((id.name(), id.source_id())) .or_insert_with(|| Rc::new(Vec::new())); if !prev.iter().any(|c| c == summary) { self.resolve_graph.push(GraphNode::Add(id.clone())); @@ -126,14 +126,14 @@ impl Context { pub fn prev_active(&self, dep: &Dependency) -> &[Summary] { self.activations - .get(&(dep.package_name(), dep.source_id().clone())) + .get(&(dep.package_name(), dep.source_id())) .map(|v| &v[..]) .unwrap_or(&[]) } pub fn is_active(&self, id: &PackageId) -> bool { self.activations - .get(&(id.name(), id.source_id().clone())) + .get(&(id.name(), id.source_id())) .map(|v| v.iter().any(|s| s.package_id() == id)) .unwrap_or(false) } diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index c80ce59dfee..25d55d7134f 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -50,7 +50,7 @@ impl EncodableResolve { let enc_id = EncodablePackageId { name: pkg.name.clone(), version: pkg.version.clone(), - source: pkg.source.clone(), + source: pkg.source, }; if !all_pkgs.insert(enc_id.clone()) { @@ -63,7 +63,7 @@ impl EncodableResolve { debug!("path dependency now missing {} v{}", pkg.name, pkg.version); continue; } - Some(source) => PackageId::new(&pkg.name, &pkg.version, source)?, + Some(&source) => PackageId::new(&pkg.name, &pkg.version, source)?, }; assert!(live_pkgs.insert(enc_id, (id, pkg)).is_none()) @@ -156,7 +156,7 @@ impl EncodableResolve { let mut unused_patches = Vec::new(); for pkg in self.patch.unused { let id = match pkg.source.as_ref().or_else(|| path_deps.get(&pkg.name)) { - Some(src) => PackageId::new(&pkg.name, &pkg.version, src)?, + Some(&src) => PackageId::new(&pkg.name, &pkg.version, src)?, None => continue, }; unused_patches.push(id); @@ -188,9 +188,9 @@ fn build_path_deps(ws: &Workspace) -> HashMap { for member in members.iter() { ret.insert( member.package_id().name().to_string(), - member.package_id().source_id().clone(), + member.package_id().source_id(), ); - visited.insert(member.package_id().source_id().clone()); + visited.insert(member.package_id().source_id()); } for member in members.iter() { build_pkg(member, ws, &mut ret, &mut visited); @@ -224,7 +224,7 @@ fn build_path_deps(ws: &Workspace) -> HashMap { visited: &mut HashSet, ) { let id = dep.source_id(); - if visited.contains(id) || !id.is_path() { + if visited.contains(&id) || !id.is_path() { return; } let path = match id.url().to_file_path() { @@ -235,8 +235,8 @@ fn build_path_deps(ws: &Workspace) -> HashMap { Ok(p) => p, Err(_) => return, }; - ret.insert(pkg.name().to_string(), pkg.package_id().source_id().clone()); - visited.insert(pkg.package_id().source_id().clone()); + ret.insert(pkg.name().to_string(), pkg.package_id().source_id()); + visited.insert(pkg.package_id().source_id()); build_pkg(&pkg, ws, ret, visited); } } @@ -412,10 +412,10 @@ pub fn encodable_package_id(id: &PackageId) -> EncodablePackageId { } } -fn encode_source(id: &SourceId) -> Option { +fn encode_source(id: SourceId) -> Option { if id.is_path() { None } else { - Some(id.clone()) + Some(id) } } diff --git a/src/cargo/core/source/mod.rs b/src/cargo/core/source/mod.rs index c03c29b3ed7..a6dc8aa0955 100644 --- a/src/cargo/core/source/mod.rs +++ b/src/cargo/core/source/mod.rs @@ -12,10 +12,10 @@ pub use self::source_id::{GitReference, SourceId}; /// versions. pub trait Source { /// Returns the `SourceId` corresponding to this source - fn source_id(&self) -> &SourceId; + fn source_id(&self) -> SourceId; /// Returns the replaced `SourceId` corresponding to this source - fn replaced_source_id(&self) -> &SourceId { + fn replaced_source_id(&self) -> SourceId { self.source_id() } @@ -92,12 +92,12 @@ pub enum MaybePackage { impl<'a, T: Source + ?Sized + 'a> Source for Box { /// Forwards to `Source::source_id` - fn source_id(&self) -> &SourceId { + fn source_id(&self) -> SourceId { (**self).source_id() } /// Forwards to `Source::replaced_source_id` - fn replaced_source_id(&self) -> &SourceId { + fn replaced_source_id(&self) -> SourceId { (**self).replaced_source_id() } @@ -155,11 +155,11 @@ impl<'a, T: Source + ?Sized + 'a> Source for Box { } impl<'a, T: Source + ?Sized + 'a> Source for &'a mut T { - fn source_id(&self) -> &SourceId { + fn source_id(&self) -> SourceId { (**self).source_id() } - fn replaced_source_id(&self) -> &SourceId { + fn replaced_source_id(&self) -> SourceId { (**self).replaced_source_id() } @@ -231,13 +231,13 @@ impl<'src> SourceMap<'src> { } /// Like `HashMap::contains_key` - pub fn contains(&self, id: &SourceId) -> bool { - self.map.contains_key(id) + pub fn contains(&self, id: SourceId) -> bool { + self.map.contains_key(&id) } /// Like `HashMap::get` - pub fn get(&self, id: &SourceId) -> Option<&(Source + 'src)> { - let source = self.map.get(id); + pub fn get(&self, id: SourceId) -> Option<&(Source + 'src)> { + let source = self.map.get(&id); source.map(|s| { let s: &(Source + 'src) = &**s; @@ -246,8 +246,8 @@ impl<'src> SourceMap<'src> { } /// Like `HashMap::get_mut` - pub fn get_mut(&mut self, id: &SourceId) -> Option<&mut (Source + 'src)> { - self.map.get_mut(id).map(|s| { + pub fn get_mut(&mut self, id: SourceId) -> Option<&mut (Source + 'src)> { + self.map.get_mut(&id).map(|s| { let s: &mut (Source + 'src) = &mut **s; s }) @@ -261,7 +261,7 @@ impl<'src> SourceMap<'src> { /// Like `HashMap::insert`, but derives the SourceId key from the Source pub fn insert(&mut self, source: Box) { - let id = source.source_id().clone(); + let id = source.source_id(); self.map.insert(id, source); } diff --git a/src/cargo/core/source/source_id.rs b/src/cargo/core/source/source_id.rs index 47e8ef546f1..ea29d39c863 100644 --- a/src/cargo/core/source/source_id.rs +++ b/src/cargo/core/source/source_id.rs @@ -4,18 +4,18 @@ use std::fmt::{self, Formatter}; use std::hash::{self, Hash}; use std::path::Path; use std::ptr; -use std::sync::Mutex; -use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT}; use std::sync::atomic::Ordering::SeqCst; +use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT}; +use std::sync::Mutex; -use serde::ser; use serde::de; +use serde::ser; use url::Url; use ops; use sources::git; -use sources::{GitSource, PathSource, RegistrySource, CRATES_IO_INDEX}; use sources::DirectorySource; +use sources::{GitSource, PathSource, RegistrySource, CRATES_IO_INDEX}; use util::{CargoResult, Config, ToUrl}; lazy_static! { @@ -74,21 +74,19 @@ impl SourceId { /// /// The canonical url will be calculated, but the precise field will not fn new(kind: Kind, url: Url) -> CargoResult { - let source_id = SourceId::wrap( - SourceIdInner { - kind, - canonical_url: git::canonicalize_url(&url)?, - url, - precise: None, - name: None, - } - ); + let source_id = SourceId::wrap(SourceIdInner { + kind, + canonical_url: git::canonicalize_url(&url)?, + url, + precise: None, + name: None, + }); Ok(source_id) } fn wrap(inner: SourceIdInner) -> SourceId { let mut cache = SOURCE_ID_CACHE.lock().unwrap(); - let inner = cache.get(&inner).map(|&x| x).unwrap_or_else(|| { + let inner = cache.get(&inner).cloned().unwrap_or_else(|| { let inner = Box::leak(Box::new(inner)); cache.insert(inner); inner @@ -209,15 +207,13 @@ impl SourceId { pub fn alt_registry(config: &Config, key: &str) -> CargoResult { let url = config.get_registry_index(key)?; - Ok(SourceId::wrap( - SourceIdInner { - kind: Kind::Registry, - canonical_url: git::canonicalize_url(&url)?, - url, - precise: None, - name: Some(key.to_string()), - } - )) + Ok(SourceId::wrap(SourceIdInner { + kind: Kind::Registry, + canonical_url: git::canonicalize_url(&url)?, + url, + precise: None, + name: Some(key.to_string()), + })) } /// Get this source URL @@ -225,7 +221,7 @@ impl SourceId { &self.inner.url } - pub fn display_registry(&self) -> String { + pub fn display_registry(self) -> String { if self.is_default_registry() { "crates.io index".to_string() } else { @@ -234,12 +230,12 @@ impl SourceId { } /// Is this source from a filesystem path - pub fn is_path(&self) -> bool { + pub fn is_path(self) -> bool { self.inner.kind == Kind::Path } /// Is this source from a registry (either local or not) - pub fn is_registry(&self) -> bool { + pub fn is_registry(self) -> bool { match self.inner.kind { Kind::Registry | Kind::LocalRegistry => true, _ => false, @@ -247,12 +243,12 @@ impl SourceId { } /// Is this source from an alternative registry - pub fn is_alt_registry(&self) -> bool { + pub fn is_alt_registry(self) -> bool { self.is_registry() && self.inner.name.is_some() } /// Is this source from a git repository - pub fn is_git(&self) -> bool { + pub fn is_git(self) -> bool { match self.inner.kind { Kind::Git(_) => true, _ => false, @@ -260,7 +256,7 @@ impl SourceId { } /// Creates an implementation of `Source` corresponding to this ID. - pub fn load<'a>(&self, config: &'a Config) -> CargoResult> { + pub fn load<'a>(self, config: &'a Config) -> CargoResult> { trace!("loading SourceId; {}", self); match self.inner.kind { Kind::Git(..) => Ok(Box::new(GitSource::new(self, config)?)), @@ -290,12 +286,12 @@ impl SourceId { } /// Get the value of the precise field - pub fn precise(&self) -> Option<&str> { + pub fn precise(self) -> Option<&'static str> { self.inner.precise.as_ref().map(|s| &s[..]) } /// Get the git reference if this is a git source, otherwise None. - pub fn git_reference(&self) -> Option<&GitReference> { + pub fn git_reference(self) -> Option<&'static GitReference> { match self.inner.kind { Kind::Git(ref s) => Some(s), _ => None, @@ -303,17 +299,15 @@ impl SourceId { } /// Create a new SourceId from this source with the given `precise` - pub fn with_precise(&self, v: Option) -> SourceId { - SourceId::wrap( - SourceIdInner { - precise: v, - ..(*self.inner).clone() - } - ) + pub fn with_precise(self, v: Option) -> SourceId { + SourceId::wrap(SourceIdInner { + precise: v, + ..(*self.inner).clone() + }) } /// Whether the remote registry is the standard https://crates.io - pub fn is_default_registry(&self) -> bool { + pub fn is_default_registry(self) -> bool { match self.inner.kind { Kind::Registry => {} _ => return false, @@ -325,9 +319,10 @@ impl SourceId { /// /// For paths, remove the workspace prefix so the same source will give the /// same hash in different locations. - pub fn stable_hash(&self, workspace: &Path, into: &mut S) { + pub fn stable_hash(self, workspace: &Path, into: &mut S) { if self.is_path() { - if let Ok(p) = self.inner + if let Ok(p) = self + .inner .url .to_file_path() .unwrap() diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index 727bcdb4f51..8610f500e97 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -39,7 +39,9 @@ impl Summary { links: Option>, namespaced_features: bool, ) -> CargoResult - where K: Borrow + Ord + Display { + where + K: Borrow + Ord + Display, + { for dep in dependencies.iter() { let feature = dep.name_in_toml(); if !namespaced_features && features.get(&*feature).is_some() { @@ -78,7 +80,7 @@ impl Summary { pub fn version(&self) -> &Version { self.package_id().version() } - pub fn source_id(&self) -> &SourceId { + pub fn source_id(&self) -> SourceId { self.package_id().source_id() } pub fn dependencies(&self) -> &[Dependency] { @@ -119,7 +121,7 @@ impl Summary { self } - pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) -> Summary { + pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Summary { let me = if self.package_id().source_id() == to_replace { let new_id = self.package_id().with_source_id(replace_with); self.override_id(new_id) @@ -143,7 +145,9 @@ fn build_feature_map( dependencies: &[Dependency], namespaced: bool, ) -> CargoResult -where K: Borrow + Ord + Display { +where + K: Borrow + Ord + Display, +{ use self::FeatureValue::*; let mut dep_map = HashMap::new(); for dep in dependencies.iter() { @@ -258,42 +262,46 @@ where K: Borrow + Ord + Display { // not recognized as a feature is pegged as a `Crate`. Here we handle the case // where the dependency exists but is non-optional. It branches on namespaced // just to provide the correct string for the crate dependency in the error. - (&Crate(ref dep), true, false) => if namespaced { - bail!( - "Feature `{}` includes `crate:{}` which is not an \ - optional dependency.\nConsider adding \ - `optional = true` to the dependency", - feature, - dep - ) - } else { - bail!( - "Feature `{}` depends on `{}` which is not an \ - optional dependency.\nConsider adding \ - `optional = true` to the dependency", - feature, - dep - ) - }, + (&Crate(ref dep), true, false) => { + if namespaced { + bail!( + "Feature `{}` includes `crate:{}` which is not an \ + optional dependency.\nConsider adding \ + `optional = true` to the dependency", + feature, + dep + ) + } else { + bail!( + "Feature `{}` depends on `{}` which is not an \ + optional dependency.\nConsider adding \ + `optional = true` to the dependency", + feature, + dep + ) + } + } // If namespaced, the value was tagged as a dependency; if not namespaced, // this could be anything not defined as a feature. This handles the case // where no such dependency is actually defined; again, the branch on // namespaced here is just to provide the correct string in the error. - (&Crate(ref dep), false, _) => if namespaced { - bail!( - "Feature `{}` includes `crate:{}` which is not a known \ - dependency", - feature, - dep - ) - } else { - bail!( - "Feature `{}` includes `{}` which is neither a dependency nor \ - another feature", - feature, - dep - ) - }, + (&Crate(ref dep), false, _) => { + if namespaced { + bail!( + "Feature `{}` includes `crate:{}` which is not a known \ + dependency", + feature, + dep + ) + } else { + bail!( + "Feature `{}` includes `{}` which is neither a dependency nor \ + another feature", + feature, + dep + ) + } + } (&Crate(_), true, true) => {} // If the value is a feature for one of the dependencies, bail out if no such // dependency is actually defined in the manifest. @@ -372,11 +380,13 @@ impl FeatureValue { use self::FeatureValue::*; match *self { Feature(ref f) => f.to_string(), - Crate(ref c) => if s.namespaced_features() { - format!("crate:{}", &c) - } else { - c.to_string() - }, + Crate(ref c) => { + if s.namespaced_features() { + format!("crate:{}", &c) + } else { + c.to_string() + } + } CrateFeature(ref c, ref f) => [c.as_ref(), f.as_ref()].join("/"), } } diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs index e09bdf11268..1b29eb62093 100644 --- a/src/cargo/core/workspace.rs +++ b/src/cargo/core/workspace.rs @@ -236,7 +236,8 @@ impl<'cfg> Workspace<'cfg> { } pub fn profiles(&self) -> &Profiles { - let root = self.root_manifest + let root = self + .root_manifest .as_ref() .unwrap_or(&self.current_manifest); match *self.packages.get(root) { @@ -253,8 +254,9 @@ impl<'cfg> Workspace<'cfg> { match self.root_manifest { Some(ref p) => p, None => &self.current_manifest, - }.parent() - .unwrap() + } + .parent() + .unwrap() } pub fn target_dir(&self) -> Filesystem { @@ -425,8 +427,8 @@ impl<'cfg> Workspace<'cfg> { let root_package = self.packages.load(&root_manifest_path)?; match *root_package.workspace_config() { WorkspaceConfig::Root(ref root_config) => { - members_paths = - root_config.members_paths(root_config.members.as_ref().unwrap_or(&vec![]))?; + members_paths = root_config + .members_paths(root_config.members.as_ref().unwrap_or(&vec![]))?; default_members_paths = if let Some(ref default) = root_config.default_members { Some(root_config.members_paths(default)?) } else { @@ -475,7 +477,8 @@ impl<'cfg> Workspace<'cfg> { if self.members.contains(&manifest_path) { return Ok(()); } - if is_path_dep && !manifest_path.parent().unwrap().starts_with(self.root()) + if is_path_dep + && !manifest_path.parent().unwrap().starts_with(self.root()) && self.find_root(&manifest_path)? != self.root_manifest { // If `manifest_path` is a path dependency outside of the workspace, @@ -655,7 +658,8 @@ impl<'cfg> Workspace<'cfg> { } if let Some(ref root_manifest) = self.root_manifest { - for pkg in self.members() + for pkg in self + .members() .filter(|p| p.manifest_path() != root_manifest) { let manifest = pkg.manifest(); @@ -699,7 +703,7 @@ impl<'cfg> Workspace<'cfg> { return Ok(p); } let source_id = SourceId::for_path(manifest_path.parent().unwrap())?; - let (package, _nested_paths) = ops::read_package(manifest_path, &source_id, self.config)?; + let (package, _nested_paths) = ops::read_package(manifest_path, source_id, self.config)?; loaded.insert(manifest_path.to_path_buf(), package.clone()); Ok(package) } @@ -745,10 +749,7 @@ impl<'cfg> Workspace<'cfg> { for warning in warnings { if warning.is_critical { let err = format_err!("{}", warning.message); - let cx = format_err!( - "failed to parse manifest at `{}`", - path.display() - ); + let cx = format_err!("failed to parse manifest at `{}`", path.display()); return Err(err.context(cx).into()); } else { let msg = if self.root_manifest.is_none() { @@ -782,7 +783,7 @@ impl<'cfg> Packages<'cfg> { Entry::Vacant(v) => { let source_id = SourceId::for_path(key)?; let (manifest, _nested_paths) = - read_manifest(manifest_path, &source_id, self.config)?; + read_manifest(manifest_path, source_id, self.config)?; Ok(v.insert(match manifest { EitherManifest::Real(manifest) => { MaybePackage::Package(Package::new(manifest, manifest_path)) @@ -843,7 +844,8 @@ impl WorkspaceRootConfig { /// /// This method does NOT consider the `members` list. fn is_excluded(&self, manifest_path: &Path) -> bool { - let excluded = self.exclude + let excluded = self + .exclude .iter() .any(|ex| manifest_path.starts_with(self.root_dir.join(ex))); @@ -886,9 +888,9 @@ impl WorkspaceRootConfig { None => return Ok(Vec::new()), }; let res = glob(path).chain_err(|| format_err!("could not parse pattern `{}`", &path))?; - let res = res.map(|p| { - p.chain_err(|| format_err!("unable to match path to pattern `{}`", &path)) - }).collect::, _>>()?; + let res = res + .map(|p| p.chain_err(|| format_err!("unable to match path to pattern `{}`", &path))) + .collect::, _>>()?; Ok(res) } } diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index cce7390aeed..f25b05e78ea 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -73,13 +73,13 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) -> CargoResult<()> } else { precise.to_string() }; - dep.source_id().clone().with_precise(Some(precise)) + dep.source_id().with_precise(Some(precise)) } - None => dep.source_id().clone().with_precise(None), + None => dep.source_id().with_precise(None), }); } } - registry.add_sources(&sources)?; + registry.add_sources(sources)?; } let resolve = ops::resolve_with_previous( @@ -141,7 +141,7 @@ pub fn update_lockfile(ws: &Workspace, opts: &UpdateOptions) -> CargoResult<()> previous_resolve: &'a Resolve, resolve: &'a Resolve, ) -> Vec<(Vec<&'a PackageId>, Vec<&'a PackageId>)> { - fn key(dep: &PackageId) -> (&str, &SourceId) { + fn key(dep: &PackageId) -> (&str, SourceId) { (dep.name().as_str(), dep.source_id()) } diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index c38019af1d2..ec0ca5f387c 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -1,26 +1,26 @@ use std::collections::btree_map::Entry; use std::collections::{BTreeMap, BTreeSet}; -use std::{env, fs}; use std::io::prelude::*; use std::io::SeekFrom; use std::path::{Path, PathBuf}; use std::sync::Arc; +use std::{env, fs}; use semver::{Version, VersionReq}; use tempfile::Builder as TempFileBuilder; use toml; +use core::compiler::{DefaultExecutor, Executor}; +use core::package::PackageSet; +use core::source::SourceMap; use core::{Dependency, Edition, Package, PackageIdSpec, Source, SourceId}; use core::{PackageId, Workspace}; -use core::source::SourceMap; -use core::package::PackageSet; -use core::compiler::{DefaultExecutor, Executor}; use ops::{self, CompileFilter}; use sources::{GitSource, PathSource, SourceConfigMap}; -use util::{internal, Config}; -use util::{FileLock, Filesystem}; use util::errors::{CargoResult, CargoResultExt}; use util::paths; +use util::{internal, Config}; +use util::{FileLock, Filesystem}; #[derive(Deserialize, Serialize)] #[serde(untagged)] @@ -59,7 +59,7 @@ impl Drop for Transaction { pub fn install( root: Option<&str>, krates: Vec<&str>, - source_id: &SourceId, + source_id: SourceId, from_cwd: bool, vers: Option<&str>, opts: &ops::CompileOptions, @@ -154,7 +154,7 @@ fn install_one( root: &Filesystem, map: &SourceConfigMap, krate: Option<&str>, - source_id: &SourceId, + source_id: SourceId, from_cwd: bool, vers: Option<&str>, opts: &ops::CompileOptions, @@ -182,7 +182,9 @@ fn install_one( src.path().display() ) })?; - select_pkg(src, krate, vers, config, false, &mut |path| path.read_packages())? + select_pkg(src, krate, vers, config, false, &mut |path| { + path.read_packages() + })? } else { select_pkg( map.load(source_id)?, @@ -255,20 +257,19 @@ fn install_one( } let exec: Arc = Arc::new(DefaultExecutor); - let compile = - ops::compile_ws(&ws, Some(source), opts, &exec).chain_err(|| { - if let Some(td) = td_opt.take() { - // preserve the temporary directory, so the user can inspect it - td.into_path(); - } + let compile = ops::compile_ws(&ws, Some(source), opts, &exec).chain_err(|| { + if let Some(td) = td_opt.take() { + // preserve the temporary directory, so the user can inspect it + td.into_path(); + } - format_err!( - "failed to compile `{}`, intermediate artifacts can be \ - found at `{}`", - pkg, - ws.target_dir().display() - ) - })?; + format_err!( + "failed to compile `{}`, intermediate artifacts can be \ + found at `{}`", + pkg, + ws.target_dir().display() + ) + })?; let binaries: Vec<(&str, &Path)> = compile .binaries .iter() @@ -368,7 +369,8 @@ fn install_one( } // Remove empty metadata lines. - let pkgs = list.v1 + let pkgs = list + .v1 .iter() .filter_map(|(p, set)| { if set.is_empty() { @@ -410,7 +412,7 @@ fn install_one( Ok(()) } -fn path_source<'a>(source_id: &SourceId, config: &'a Config) -> CargoResult> { +fn path_source<'a>(source_id: SourceId, config: &'a Config) -> CargoResult> { let path = source_id .url() .to_file_path() @@ -439,7 +441,8 @@ where Some(v) => { // If the version begins with character <, >, =, ^, ~ parse it as a // version range, otherwise parse it as a specific version - let first = v.chars() + let first = v + .chars() .nth(0) .ok_or_else(|| format_err!("no version provided for the `--vers` flag"))?; @@ -495,16 +498,13 @@ where } else { vers }; - let dep = Dependency::parse_no_deprecated( - name, - vers_spec, - source.source_id(), - )?; + let dep = Dependency::parse_no_deprecated(name, vers_spec, source.source_id())?; let deps = source.query_vec(&dep)?; let pkgid = match deps.iter().map(|p| p.package_id()).max() { Some(pkgid) => pkgid, None => { - let vers_info = vers.map(|v| format!(" with version `{}`", v)) + let vers_info = vers + .map(|v| format!(" with version `{}`", v)) .unwrap_or_default(); bail!( "could not find `{}` in {}{}", @@ -624,7 +624,8 @@ fn find_duplicates( } }; match *filter { - CompileFilter::Default { .. } => pkg.targets() + CompileFilter::Default { .. } => pkg + .targets() .iter() .filter(|t| t.is_bin()) .filter_map(|t| check(t.name().to_string())) @@ -671,7 +672,7 @@ fn read_crate_list(file: &FileLock) -> CargoResult { }), } })() - .chain_err(|| { + .chain_err(|| { format_err!( "failed to parse crate metadata at `{}`", file.path().to_string_lossy() @@ -689,7 +690,7 @@ fn write_crate_list(file: &FileLock, listing: CrateListingV1) -> CargoResult<()> file.write_all(data.as_bytes())?; Ok(()) })() - .chain_err(|| { + .chain_err(|| { format_err!( "failed to write crate metadata at `{}`", file.path().to_string_lossy() @@ -782,17 +783,14 @@ pub fn uninstall_one( uninstall_pkgid(crate_metadata, metadata, &pkgid, bins, config) } -fn uninstall_cwd( - root: &Filesystem, - bins: &[String], - config: &Config, -) -> CargoResult<()> { +fn uninstall_cwd(root: &Filesystem, bins: &[String], config: &Config) -> CargoResult<()> { let crate_metadata = metadata(config, root)?; let metadata = read_crate_list(&crate_metadata)?; let source_id = SourceId::for_path(config.cwd())?; - let src = path_source(&source_id, config)?; - let (pkg, _source) = - select_pkg(src, None, None, config, true, &mut |path| path.read_packages())?; + let src = path_source(source_id, config)?; + let (pkg, _source) = select_pkg(src, None, None, config, true, &mut |path| { + path.read_packages() + })?; let pkgid = pkg.package_id(); uninstall_pkgid(crate_metadata, metadata, pkgid, bins, config) } @@ -821,7 +819,8 @@ fn uninstall_pkgid( } } - let bins = bins.iter() + let bins = bins + .iter() .map(|s| { if s.ends_with(env::consts::EXE_SUFFIX) { s.to_string() @@ -865,7 +864,8 @@ fn metadata(config: &Config, root: &Filesystem) -> CargoResult { fn resolve_root(flag: Option<&str>, config: &Config) -> CargoResult { let config_root = config.get_path("install.root")?; - Ok(flag.map(PathBuf::from) + Ok(flag + .map(PathBuf::from) .or_else(|| env::var_os("CARGO_INSTALL_ROOT").map(PathBuf::from)) .or_else(move || config_root.map(|v| v.val)) .map(Filesystem::new) diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index 37c82238ad5..b0b628102b1 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -10,13 +10,13 @@ use git2; use serde_json; use tar::{Archive, Builder, EntryType, Header}; -use core::{Package, Source, SourceId, Workspace}; use core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor}; +use core::{Package, Source, SourceId, Workspace}; +use ops; use sources::PathSource; -use util::{self, internal, Config, FileLock}; -use util::paths; use util::errors::{CargoResult, CargoResultExt}; -use ops; +use util::paths; +use util::{self, internal, Config, FileLock}; pub struct PackageOpts<'cfg> { pub config: &'cfg Config, @@ -60,7 +60,8 @@ pub fn package(ws: &Workspace, opts: &PackageOpts) -> CargoResult = src.list_files(pkg)? + let mut list: Vec<_> = src + .list_files(pkg)? .iter() .map(|file| util::without_prefix(file, root).unwrap().to_path_buf()) .collect(); @@ -175,7 +176,7 @@ fn check_repo_state( p: &Package, src_files: &[PathBuf], config: &Config, - allow_dirty: bool + allow_dirty: bool, ) -> CargoResult> { if let Ok(repo) = git2::Repository::discover(p.root()) { if let Some(workdir) = repo.workdir() { @@ -194,7 +195,8 @@ fn check_repo_state( config.shell().verbose(|shell| { shell.warn(format!( "No (git) Cargo.toml found at `{}` in workdir `{}`", - path.display(), workdir.display() + path.display(), + workdir.display() )) })?; } @@ -212,7 +214,7 @@ fn check_repo_state( p: &Package, src_files: &[PathBuf], repo: &git2::Repository, - allow_dirty: bool + allow_dirty: bool, ) -> CargoResult> { let workdir = repo.workdir().unwrap(); let dirty = src_files @@ -256,12 +258,15 @@ fn check_repo_state( fn check_vcs_file_collision(pkg: &Package, src_files: &[PathBuf]) -> CargoResult<()> { let root = pkg.root(); let vcs_info_path = Path::new(VCS_INFO_FILE); - let collision = src_files.iter().find(|&p| { - util::without_prefix(&p, root).unwrap() == vcs_info_path - }); + let collision = src_files + .iter() + .find(|&p| util::without_prefix(&p, root).unwrap() == vcs_info_path); if collision.is_some() { - bail!("Invalid inclusion of reserved file name \ - {} in package source", VCS_INFO_FILE); + bail!( + "Invalid inclusion of reserved file name \ + {} in package source", + VCS_INFO_FILE + ); } Ok(()) } @@ -271,7 +276,7 @@ fn tar( src_files: &[PathBuf], vcs_info: Option<&serde_json::Value>, dst: &File, - filename: &str + filename: &str, ) -> CargoResult<()> { // Prepare the encoder and its header let filename = Path::new(filename); @@ -325,7 +330,8 @@ fn tar( .chain_err(|| format!("failed to add to archive: `{}`", relative))?; let mut file = File::open(file) .chain_err(|| format!("failed to open for archiving: `{}`", file.display()))?; - let metadata = file.metadata() + let metadata = file + .metadata() .chain_err(|| format!("could not learn metadata for: `{}`", relative))?; header.set_metadata(&metadata); @@ -367,9 +373,9 @@ fn tar( fnd ); let mut header = Header::new_ustar(); - header.set_path(&path).chain_err(|| { - format!("failed to add to archive: `{}`", fnd) - })?; + header + .set_path(&path) + .chain_err(|| format!("failed to add to archive: `{}`", fnd))?; let json = format!("{}\n", serde_json::to_string_pretty(json)?); let mut header = Header::new_ustar(); header.set_path(&path)?; @@ -377,9 +383,8 @@ fn tar( header.set_mode(0o644); header.set_size(json.len() as u64); header.set_cksum(); - ar.append(&header, json.as_bytes()).chain_err(|| { - internal(format!("could not archive source file `{}`", fnd)) - })?; + ar.append(&header, json.as_bytes()) + .chain_err(|| internal(format!("could not archive source file `{}`", fnd)))?; } if include_lockfile(pkg) { @@ -412,7 +417,8 @@ fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult config.shell().status("Verifying", pkg)?; let f = GzDecoder::new(tar.file()); - let dst = tar.parent() + let dst = tar + .parent() .join(&format!("{}-{}", pkg.name(), pkg.version())); if dst.exists() { paths::remove_dir_all(&dst)?; @@ -426,7 +432,7 @@ fn run_verify(ws: &Workspace, tar: &FileLock, opts: &PackageOpts) -> CargoResult // Manufacture an ephemeral workspace to ensure that even if the top-level // package has a workspace we can still build our new crate. let id = SourceId::for_path(&dst)?; - let mut src = PathSource::new(&dst, &id, ws.config()); + let mut src = PathSource::new(&dst, id, ws.config()); let new_pkg = src.root_package()?; let pkg_fingerprint = src.last_modified_file(&new_pkg)?; let ws = Workspace::ephemeral(new_pkg, config, None, true)?; diff --git a/src/cargo/ops/cargo_read_manifest.rs b/src/cargo/ops/cargo_read_manifest.rs index d3f9d3e4e15..540552b89bd 100644 --- a/src/cargo/ops/cargo_read_manifest.rs +++ b/src/cargo/ops/cargo_read_manifest.rs @@ -4,14 +4,14 @@ use std::io; use std::path::{Path, PathBuf}; use core::{EitherManifest, Package, PackageId, SourceId}; -use util::{self, Config}; use util::errors::{CargoError, CargoResult}; use util::important_paths::find_project_manifest_exact; use util::toml::read_manifest; +use util::{self, Config}; pub fn read_package( path: &Path, - source_id: &SourceId, + source_id: SourceId, config: &Config, ) -> CargoResult<(Package, Vec)> { trace!( @@ -34,7 +34,7 @@ pub fn read_package( pub fn read_packages( path: &Path, - source_id: &SourceId, + source_id: SourceId, config: &Config, ) -> CargoResult> { let mut all_packages = HashMap::new(); @@ -129,7 +129,7 @@ fn has_manifest(path: &Path) -> bool { fn read_nested_packages( path: &Path, all_packages: &mut HashMap, - source_id: &SourceId, + source_id: SourceId, config: &Config, visited: &mut HashSet, errors: &mut Vec, diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index 2993fb7060d..946e61fb6bd 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -5,9 +5,9 @@ use std::str; use std::time::Duration; use std::{cmp, env}; -use log::Level; -use curl::easy::{Easy, SslOpt, InfoType}; +use curl::easy::{Easy, InfoType, SslOpt}; use git2; +use log::Level; use registry::{NewCrate, NewCrateDependency, Registry}; use url::percent_encoding::{percent_encode, QUERY_ENCODE_SET}; @@ -68,7 +68,7 @@ pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> { opts.index.clone(), opts.registry.clone(), )?; - verify_dependencies(pkg, ®_id)?; + verify_dependencies(pkg, reg_id)?; // Prepare a tarball, with a non-surpressable warning if metadata // is missing since this is being put online. @@ -84,7 +84,8 @@ pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> { jobs: opts.jobs, registry: opts.registry.clone(), }, - )?.unwrap(); + )? + .unwrap(); // Upload said tarball to the specified destination opts.config @@ -95,14 +96,14 @@ pub fn publish(ws: &Workspace, opts: &PublishOpts) -> CargoResult<()> { pkg, tarball.file(), &mut registry, - ®_id, + reg_id, opts.dry_run, )?; Ok(()) } -fn verify_dependencies(pkg: &Package, registry_src: &SourceId) -> CargoResult<()> { +fn verify_dependencies(pkg: &Package, registry_src: SourceId) -> CargoResult<()> { for dep in pkg.dependencies().iter() { if dep.source_id().is_path() { if !dep.specified_req() { @@ -148,10 +149,11 @@ fn transmit( pkg: &Package, tarball: &File, registry: &mut Registry, - registry_id: &SourceId, + registry_id: SourceId, dry_run: bool, ) -> CargoResult<()> { - let deps = pkg.dependencies() + let deps = pkg + .dependencies() .iter() .map(|dep| { // If the dependency is from a different registry, then include the @@ -177,7 +179,8 @@ fn transmit( Kind::Normal => "normal", Kind::Build => "build", Kind::Development => "dev", - }.to_string(), + } + .to_string(), registry: dep_registry, explicit_name_in_toml: dep.explicit_name_in_toml().map(|s| s.to_string()), }) @@ -325,7 +328,7 @@ pub fn registry( let token = token.or(token_config); let sid = get_source_id(config, index_config.or(index), registry)?; let api_host = { - let mut src = RegistrySource::remote(&sid, config); + let mut src = RegistrySource::remote(sid, config); src.update() .chain_err(|| format!("failed to update {}", sid))?; (src.config()?).unwrap().api.unwrap() @@ -401,8 +404,7 @@ pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult< InfoType::HeaderOut => (">", Level::Debug), InfoType::DataIn => ("{", Level::Trace), InfoType::DataOut => ("}", Level::Trace), - InfoType::SslDataIn | - InfoType::SslDataOut => return, + InfoType::SslDataIn | InfoType::SslDataOut => return, _ => return, }; match str::from_utf8(data) { @@ -412,7 +414,12 @@ pub fn configure_http_handle(config: &Config, handle: &mut Easy) -> CargoResult< } } Err(_) => { - log!(level, "http-debug: {} ({} bytes of data)", prefix, data.len()); + log!( + level, + "http-debug: {} ({} bytes of data)", + prefix, + data.len() + ); } } })?; @@ -429,9 +436,11 @@ pub struct HttpTimeout { impl HttpTimeout { pub fn new(config: &Config) -> CargoResult { - let low_speed_limit = config.get::>("http.low-speed-limit")? + let low_speed_limit = config + .get::>("http.low-speed-limit")? .unwrap_or(10); - let seconds = config.get::>("http.timeout")? + let seconds = config + .get::>("http.timeout")? .or_else(|| env::var("HTTP_TIMEOUT").ok().and_then(|s| s.parse().ok())) .unwrap_or(30); Ok(HttpTimeout { @@ -623,8 +632,8 @@ fn get_source_id( (_, Some(i)) => SourceId::for_registry(&i.to_url()?), _ => { let map = SourceConfigMap::new(config)?; - let src = map.load(&SourceId::crates_io(config)?)?; - Ok(src.replaced_source_id().clone()) + let src = map.load(SourceId::crates_io(config)?)?; + Ok(src.replaced_source_id()) } } } @@ -650,7 +659,7 @@ pub fn search( let sid = get_source_id(config, index, reg)?; - let mut regsrc = RegistrySource::remote(&sid, config); + let mut regsrc = RegistrySource::remote(sid, config); let cfg = match regsrc.config() { Ok(c) => c, Err(_) => { diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index 275fbe752a4..e0a8aaf3d81 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -150,7 +150,7 @@ pub fn resolve_with_previous<'a, 'cfg>( // // TODO: This seems like a hokey reason to single out the registry as being // different - let mut to_avoid_sources: HashSet<&SourceId> = HashSet::new(); + let mut to_avoid_sources: HashSet = HashSet::new(); if let Some(to_avoid) = to_avoid { to_avoid_sources.extend( to_avoid @@ -161,10 +161,11 @@ pub fn resolve_with_previous<'a, 'cfg>( } let keep = |p: &&'a PackageId| { - !to_avoid_sources.contains(p.source_id()) && match to_avoid { - Some(set) => !set.contains(p), - None => true, - } + !to_avoid_sources.contains(&p.source_id()) + && match to_avoid { + Some(set) => !set.contains(p), + None => true, + } }; // In the case where a previous instance of resolve is available, we @@ -214,7 +215,7 @@ pub fn resolve_with_previous<'a, 'cfg>( } for member in ws.members() { - registry.add_sources(&[member.package_id().source_id().clone()])?; + registry.add_sources(Some(member.package_id().source_id()))?; } let mut summaries = Vec::new(); @@ -357,7 +358,7 @@ pub fn add_overrides<'a>( for (path, definition) in paths { let id = SourceId::for_path(&path)?; - let mut source = PathSource::new_recursive(&path, &id, ws.config()); + let mut source = PathSource::new_recursive(&path, id, ws.config()); source.update().chain_err(|| { format!( "failed to update path override `{}` \ @@ -401,7 +402,7 @@ fn register_previous_locks<'a>( resolve: &'a Resolve, keep: &Fn(&&'a PackageId) -> bool, ) { - let path_pkg = |id: &SourceId| { + let path_pkg = |id: SourceId| { if !id.is_path() { return None; } diff --git a/src/cargo/sources/config.rs b/src/cargo/sources/config.rs index ca52bc4d2b5..e70a2f8744b 100644 --- a/src/cargo/sources/config.rs +++ b/src/cargo/sources/config.rs @@ -11,9 +11,9 @@ use url::Url; use core::{GitReference, Source, SourceId}; use sources::{ReplacedSource, CRATES_IO_REGISTRY}; -use util::{Config, ToUrl}; use util::config::ConfigValue; use util::errors::{CargoResult, CargoResultExt}; +use util::{Config, ToUrl}; #[derive(Clone)] pub struct SourceConfigMap<'cfg> { @@ -72,9 +72,9 @@ impl<'cfg> SourceConfigMap<'cfg> { self.config } - pub fn load(&self, id: &SourceId) -> CargoResult> { + pub fn load(&self, id: SourceId) -> CargoResult> { debug!("loading: {}", id); - let mut name = match self.id2name.get(id) { + let mut name = match self.id2name.get(&id) { Some(name) => name, None => return Ok(id.load(self.config)?), }; @@ -98,7 +98,7 @@ impl<'cfg> SourceConfigMap<'cfg> { name = s; path = p; } - None if *id == cfg.id => return Ok(id.load(self.config)?), + None if id == cfg.id => return Ok(id.load(self.config)?), None => { new_id = cfg.id.with_precise(id.precise().map(|s| s.to_string())); break; @@ -143,11 +143,11 @@ restore the source replacement configuration to continue the build ); } - Ok(Box::new(ReplacedSource::new(id, &new_id, new_src))) + Ok(Box::new(ReplacedSource::new(id, new_id, new_src))) } fn add(&mut self, name: &str, cfg: SourceConfig) { - self.id2name.insert(cfg.id.clone(), name.to_string()); + self.id2name.insert(cfg.id, name.to_string()); self.cfgs.insert(name.to_string(), cfg); } diff --git a/src/cargo/sources/directory.rs b/src/cargo/sources/directory.rs index 0076b75d278..649ffaf4f19 100644 --- a/src/cargo/sources/directory.rs +++ b/src/cargo/sources/directory.rs @@ -8,12 +8,12 @@ use hex; use serde_json; -use core::{Dependency, Package, PackageId, Source, SourceId, Summary}; use core::source::MaybePackage; +use core::{Dependency, Package, PackageId, Source, SourceId, Summary}; use sources::PathSource; -use util::{Config, Sha256}; use util::errors::{CargoResult, CargoResultExt}; use util::paths; +use util::{Config, Sha256}; pub struct DirectorySource<'cfg> { source_id: SourceId, @@ -29,9 +29,9 @@ struct Checksum { } impl<'cfg> DirectorySource<'cfg> { - pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> DirectorySource<'cfg> { + pub fn new(path: &Path, id: SourceId, config: &'cfg Config) -> DirectorySource<'cfg> { DirectorySource { - source_id: id.clone(), + source_id: id, root: path.to_path_buf(), config, packages: HashMap::new(), @@ -71,8 +71,8 @@ impl<'cfg> Source for DirectorySource<'cfg> { true } - fn source_id(&self) -> &SourceId { - &self.source_id + fn source_id(&self) -> SourceId { + self.source_id } fn update(&mut self) -> CargoResult<()> { @@ -116,7 +116,7 @@ impl<'cfg> Source for DirectorySource<'cfg> { continue; } - let mut src = PathSource::new(&path, &self.source_id, self.config); + let mut src = PathSource::new(&path, self.source_id, self.config); src.update()?; let pkg = src.root_package()?; @@ -188,7 +188,7 @@ impl<'cfg> Source for DirectorySource<'cfg> { } } })() - .chain_err(|| format!("failed to calculate checksum of: {}", file.display()))?; + .chain_err(|| format!("failed to calculate checksum of: {}", file.display()))?; let actual = hex::encode(h.finish()); if &*actual != cksum { diff --git a/src/cargo/sources/git/source.rs b/src/cargo/sources/git/source.rs index 4a899598558..ccd5d9e82d1 100644 --- a/src/cargo/sources/git/source.rs +++ b/src/cargo/sources/git/source.rs @@ -2,14 +2,14 @@ use std::fmt::{self, Debug, Formatter}; use url::Url; -use core::source::{Source, SourceId, MaybePackage}; +use core::source::{MaybePackage, Source, SourceId}; use core::GitReference; use core::{Dependency, Package, PackageId, Summary}; -use util::Config; +use sources::git::utils::{GitRemote, GitRevision}; +use sources::PathSource; use util::errors::CargoResult; use util::hex::short_hash; -use sources::PathSource; -use sources::git::utils::{GitRemote, GitRevision}; +use util::Config; pub struct GitSource<'cfg> { remote: GitRemote, @@ -22,7 +22,7 @@ pub struct GitSource<'cfg> { } impl<'cfg> GitSource<'cfg> { - pub fn new(source_id: &SourceId, config: &'cfg Config) -> CargoResult> { + pub fn new(source_id: SourceId, config: &'cfg Config) -> CargoResult> { assert!(source_id.is_git(), "id is not git, id={}", source_id); let remote = GitRemote::new(source_id.url()); @@ -36,7 +36,7 @@ impl<'cfg> GitSource<'cfg> { let source = GitSource { remote, reference, - source_id: source_id.clone(), + source_id, path_source: None, rev: None, ident, @@ -60,7 +60,8 @@ impl<'cfg> GitSource<'cfg> { fn ident(url: &Url) -> CargoResult { let url = canonicalize_url(url)?; - let ident = url.path_segments() + let ident = url + .path_segments() .and_then(|mut s| s.next_back()) .unwrap_or(""); @@ -124,14 +125,16 @@ impl<'cfg> Debug for GitSource<'cfg> { impl<'cfg> Source for GitSource<'cfg> { fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { - let src = self.path_source + let src = self + .path_source .as_mut() .expect("BUG: update() must be called before query()"); src.query(dep, f) } fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { - let src = self.path_source + let src = self + .path_source .as_mut() .expect("BUG: update() must be called before query()"); src.fuzzy_query(dep, f) @@ -145,8 +148,8 @@ impl<'cfg> Source for GitSource<'cfg> { true } - fn source_id(&self) -> &SourceId { - &self.source_id + fn source_id(&self) -> SourceId { + self.source_id } fn update(&mut self) -> CargoResult<()> { @@ -190,7 +193,8 @@ impl<'cfg> Source for GitSource<'cfg> { // https://github.com/servo/servo/pull/14397 let short_id = db.to_short_id(&actual_rev).unwrap(); - let checkout_path = lock.parent() + let checkout_path = lock + .parent() .join("checkouts") .join(&self.ident) .join(short_id.as_str()); @@ -203,7 +207,7 @@ impl<'cfg> Source for GitSource<'cfg> { db.copy_to(actual_rev.clone(), &checkout_path, self.config)?; let source_id = self.source_id.with_precise(Some(actual_rev.to_string())); - let path_source = PathSource::new_recursive(&checkout_path, &source_id, self.config); + let path_source = PathSource::new_recursive(&checkout_path, source_id, self.config); self.path_source = Some(path_source); self.rev = Some(actual_rev); @@ -237,8 +241,8 @@ impl<'cfg> Source for GitSource<'cfg> { #[cfg(test)] mod test { - use url::Url; use super::ident; + use url::Url; use util::ToUrl; #[test] diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index 6115b626c20..170162359b2 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -5,15 +5,15 @@ use std::path::{Path, PathBuf}; use filetime::FileTime; use git2; use glob::Pattern; -use ignore::Match; use ignore::gitignore::GitignoreBuilder; +use ignore::Match; -use core::{Dependency, Package, PackageId, Source, SourceId, Summary}; use core::source::MaybePackage; +use core::{Dependency, Package, PackageId, Source, SourceId, Summary}; use ops; -use util::{self, internal, CargoResult}; use util::paths; use util::Config; +use util::{self, internal, CargoResult}; pub struct PathSource<'cfg> { source_id: SourceId, @@ -29,9 +29,9 @@ impl<'cfg> PathSource<'cfg> { /// /// This source will only return the package at precisely the `path` /// specified, and it will be an error if there's not a package at `path`. - pub fn new(path: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> { + pub fn new(path: &Path, source_id: SourceId, config: &'cfg Config) -> PathSource<'cfg> { PathSource { - source_id: id.clone(), + source_id, path: path.to_path_buf(), updated: false, packages: Vec::new(), @@ -48,7 +48,7 @@ impl<'cfg> PathSource<'cfg> { /// /// Note that this should be used with care and likely shouldn't be chosen /// by default! - pub fn new_recursive(root: &Path, id: &SourceId, config: &'cfg Config) -> PathSource<'cfg> { + pub fn new_recursive(root: &Path, id: SourceId, config: &'cfg Config) -> PathSource<'cfg> { PathSource { recursive: true, ..PathSource::new(root, id, config) @@ -78,10 +78,10 @@ impl<'cfg> PathSource<'cfg> { if self.updated { Ok(self.packages.clone()) } else if self.recursive { - ops::read_packages(&self.path, &self.source_id, self.config) + ops::read_packages(&self.path, self.source_id, self.config) } else { let path = self.path.join("Cargo.toml"); - let (pkg, _) = ops::read_package(&path, &self.source_id, self.config)?; + let (pkg, _) = ops::read_package(&path, self.source_id, self.config)?; Ok(vec![pkg]) } } @@ -127,13 +127,15 @@ impl<'cfg> PathSource<'cfg> { .map_err(|e| format_err!("could not parse glob pattern `{}`: {}", p, e)) }; - let glob_exclude = pkg.manifest() + let glob_exclude = pkg + .manifest() .exclude() .iter() .map(|p| glob_parse(p)) .collect::, _>>()?; - let glob_include = pkg.manifest() + let glob_include = pkg + .manifest() .include() .iter() .map(|p| glob_parse(p)) @@ -302,7 +304,8 @@ impl<'cfg> PathSource<'cfg> { ) -> CargoResult> { warn!("list_files_git {}", pkg.package_id()); let index = repo.index()?; - let root = repo.workdir() + let root = repo + .workdir() .ok_or_else(|| internal("Can't list files on a bare repository."))?; let pkg_path = pkg.root(); @@ -374,7 +377,8 @@ impl<'cfg> PathSource<'cfg> { if is_dir.unwrap_or_else(|| file_path.is_dir()) { warn!(" found submodule {}", file_path.display()); let rel = util::without_prefix(&file_path, root).unwrap(); - let rel = rel.to_str() + let rel = rel + .to_str() .ok_or_else(|| format_err!("invalid utf-8 filename: {}", rel.display()))?; // Git submodules are currently only named through `/` path // separators, explicitly not `\` which windows uses. Who knew? @@ -398,8 +402,8 @@ impl<'cfg> PathSource<'cfg> { #[cfg(unix)] fn join(path: &Path, data: &[u8]) -> CargoResult { - use std::os::unix::prelude::*; use std::ffi::OsStr; + use std::os::unix::prelude::*; Ok(path.join(::from_bytes(data))) } #[cfg(windows)] @@ -527,8 +531,8 @@ impl<'cfg> Source for PathSource<'cfg> { false } - fn source_id(&self) -> &SourceId { - &self.source_id + fn source_id(&self) -> SourceId { + self.source_id } fn update(&mut self) -> CargoResult<()> { diff --git a/src/cargo/sources/registry/index.rs b/src/cargo/sources/registry/index.rs index e9db7cfd10e..c2556d3f9aa 100644 --- a/src/cargo/sources/registry/index.rs +++ b/src/cargo/sources/registry/index.rs @@ -38,29 +38,29 @@ impl<'s> Iterator for UncanonicalizedIter<'s> { type Item = String; fn next(&mut self) -> Option { - if self.hyphen_combination_num > 0 && self.hyphen_combination_num.trailing_zeros() >= self.num_hyphen_underscore { + if self.hyphen_combination_num > 0 + && self.hyphen_combination_num.trailing_zeros() >= self.num_hyphen_underscore + { return None; } - let ret = Some(self.input - .chars() - .scan(0u16, |s, c| { - // the check against 15 here's to prevent - // shift overflow on inputs with more then 15 hyphens - if (c == '_' || c == '-') && *s <= 15 { - let switch = (self.hyphen_combination_num & (1u16 << *s)) > 0; - let out = if (c == '_') ^ switch { - '_' + let ret = Some( + self.input + .chars() + .scan(0u16, |s, c| { + // the check against 15 here's to prevent + // shift overflow on inputs with more then 15 hyphens + if (c == '_' || c == '-') && *s <= 15 { + let switch = (self.hyphen_combination_num & (1u16 << *s)) > 0; + let out = if (c == '_') ^ switch { '_' } else { '-' }; + *s += 1; + Some(out) } else { - '-' - }; - *s += 1; - Some(out) - } else { - Some(c) - } - }) - .collect()); + Some(c) + } + }) + .collect(), + ); self.hyphen_combination_num += 1; ret } @@ -78,14 +78,21 @@ fn no_hyphen() { fn two_hyphen() { assert_eq!( UncanonicalizedIter::new("te-_st").collect::>(), - vec!["te-_st".to_string(), "te__st".to_string(), "te--st".to_string(), "te_-st".to_string()] + vec![ + "te-_st".to_string(), + "te__st".to_string(), + "te--st".to_string(), + "te_-st".to_string() + ] ) } #[test] fn overflow_hyphen() { assert_eq!( - UncanonicalizedIter::new("te-_-_-_-_-_-_-_-_-st").take(100).count(), + UncanonicalizedIter::new("te-_-_-_-_-_-_-_-_-st") + .take(100) + .count(), 100 ) } @@ -101,13 +108,13 @@ pub struct RegistryIndex<'cfg> { impl<'cfg> RegistryIndex<'cfg> { pub fn new( - id: &SourceId, + source_id: SourceId, path: &Filesystem, config: &'cfg Config, locked: bool, ) -> RegistryIndex<'cfg> { RegistryIndex { - source_id: id.clone(), + source_id, path: path.clone(), cache: HashMap::new(), hashes: HashMap::new(), @@ -185,7 +192,7 @@ impl<'cfg> RegistryIndex<'cfg> { _ => format!("{}/{}/{}", &fs_name[0..2], &fs_name[2..4], fs_name), }; let mut ret = Vec::new(); - for path in UncanonicalizedIter::new(&raw_path).take(1024) { + for path in UncanonicalizedIter::new(&raw_path).take(1024) { let mut hit_closure = false; let err = load.load(&root, Path::new(&path), &mut |contents| { hit_closure = true; @@ -247,11 +254,11 @@ impl<'cfg> RegistryIndex<'cfg> { yanked, links, } = serde_json::from_str(line)?; - let pkgid = PackageId::new(&name, &vers, &self.source_id)?; + let pkgid = PackageId::new(&name, &vers, self.source_id)?; let name = pkgid.name(); let deps = deps .into_iter() - .map(|dep| dep.into_dep(&self.source_id)) + .map(|dep| dep.into_dep(self.source_id)) .collect::>>()?; let summary = Summary::new(pkgid, deps, &features, links, false)?; let summary = summary.set_checksum(cksum.clone()); @@ -268,7 +275,7 @@ impl<'cfg> RegistryIndex<'cfg> { load: &mut RegistryData, f: &mut FnMut(Summary), ) -> CargoResult<()> { - let source_id = self.source_id.clone(); + let source_id = self.source_id; let name = dep.package_name().as_str(); let summaries = self.summaries(name, load)?; let summaries = summaries diff --git a/src/cargo/sources/registry/mod.rs b/src/cargo/sources/registry/mod.rs index b061716f8f9..60c3d87850c 100644 --- a/src/cargo/sources/registry/mod.rs +++ b/src/cargo/sources/registry/mod.rs @@ -228,15 +228,17 @@ pub struct RegistryPackage<'a> { #[test] fn escaped_cher_in_json() { let _: RegistryPackage = serde_json::from_str( - r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{}}"# - ).unwrap(); + r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{}}"#, + ) + .unwrap(); let _: RegistryPackage = serde_json::from_str( r#"{"name":"a","vers":"0.0.1","deps":[],"cksum":"bae3","features":{"test":["k","q"]},"links":"a-sys"}"# ).unwrap(); // Now we add escaped cher all the places they can go // these are not valid, but it should error later than json parsing - let _: RegistryPackage = serde_json::from_str(r#"{ + let _: RegistryPackage = serde_json::from_str( + r#"{ "name":"This name has a escaped cher in it \n\t\" ", "vers":"0.0.1", "deps":[{ @@ -251,8 +253,9 @@ fn escaped_cher_in_json() { }], "cksum":"bae3", "features":{"test \n\t\" ":["k \n\t\" ","q \n\t\" "]}, - "links":" \n\t\" "}"# - ).unwrap(); + "links":" \n\t\" "}"#, + ) + .unwrap(); } #[derive(Deserialize)] @@ -282,7 +285,7 @@ struct RegistryDependency<'a> { impl<'a> RegistryDependency<'a> { /// Converts an encoded dependency in the registry to a cargo dependency - pub fn into_dep(self, default: &SourceId) -> CargoResult { + pub fn into_dep(self, default: SourceId) -> CargoResult { let RegistryDependency { name, req, @@ -298,15 +301,11 @@ impl<'a> RegistryDependency<'a> { let id = if let Some(registry) = registry { SourceId::for_registry(®istry.to_url()?)? } else { - default.clone() + default }; - - let mut dep = Dependency::parse_no_deprecated( - package.as_ref().unwrap_or(&name), - Some(&req), - &id, - )?; + let mut dep = + Dependency::parse_no_deprecated(package.as_ref().unwrap_or(&name), Some(&req), id)?; if package.is_some() { dep.set_explicit_name_in_toml(&name); } @@ -350,8 +349,12 @@ pub trait RegistryData { fn config(&mut self) -> CargoResult>; fn update_index(&mut self) -> CargoResult<()>; fn download(&mut self, pkg: &PackageId, checksum: &str) -> CargoResult; - fn finish_download(&mut self, pkg: &PackageId, checksum: &str, data: &[u8]) - -> CargoResult; + fn finish_download( + &mut self, + pkg: &PackageId, + checksum: &str, + data: &[u8], + ) -> CargoResult; fn is_crate_downloaded(&self, _pkg: &PackageId) -> bool { true @@ -360,34 +363,34 @@ pub trait RegistryData { pub enum MaybeLock { Ready(FileLock), - Download { url: String, descriptor: String } + Download { url: String, descriptor: String }, } mod index; mod local; mod remote; -fn short_name(id: &SourceId) -> String { - let hash = hex::short_hash(id); +fn short_name(id: SourceId) -> String { + let hash = hex::short_hash(&id); let ident = id.url().host_str().unwrap_or("").to_string(); format!("{}-{}", ident, hash) } impl<'cfg> RegistrySource<'cfg> { - pub fn remote(source_id: &SourceId, config: &'cfg Config) -> RegistrySource<'cfg> { + pub fn remote(source_id: SourceId, config: &'cfg Config) -> RegistrySource<'cfg> { let name = short_name(source_id); let ops = remote::RemoteRegistry::new(source_id, config, &name); RegistrySource::new(source_id, config, &name, Box::new(ops), true) } - pub fn local(source_id: &SourceId, path: &Path, config: &'cfg Config) -> RegistrySource<'cfg> { + pub fn local(source_id: SourceId, path: &Path, config: &'cfg Config) -> RegistrySource<'cfg> { let name = short_name(source_id); let ops = local::LocalRegistry::new(path, config, &name); RegistrySource::new(source_id, config, &name, Box::new(ops), false) } fn new( - source_id: &SourceId, + source_id: SourceId, config: &'cfg Config, name: &str, ops: Box, @@ -396,7 +399,7 @@ impl<'cfg> RegistrySource<'cfg> { RegistrySource { src_path: config.registry_source_path().join(name), config, - source_id: source_id.clone(), + source_id, updated: false, index: index::RegistryIndex::new(source_id, ops.index_path(), config, index_locked), index_locked, @@ -468,7 +471,7 @@ impl<'cfg> RegistrySource<'cfg> { self.ops.update_index()?; let path = self.ops.index_path(); self.index = - index::RegistryIndex::new(&self.source_id, path, self.config, self.index_locked); + index::RegistryIndex::new(self.source_id, path, self.config, self.index_locked); Ok(()) } @@ -476,7 +479,7 @@ impl<'cfg> RegistrySource<'cfg> { let path = self .unpack_package(package, &path) .chain_err(|| internal(format!("failed to unpack package `{}`", package)))?; - let mut src = PathSource::new(&path, &self.source_id, self.config); + let mut src = PathSource::new(&path, self.source_id, self.config); src.update()?; let pkg = match src.download(package)? { MaybePackage::Ready(pkg) => pkg, @@ -543,8 +546,8 @@ impl<'cfg> Source for RegistrySource<'cfg> { false } - fn source_id(&self) -> &SourceId { - &self.source_id + fn source_id(&self) -> SourceId { + self.source_id } fn update(&mut self) -> CargoResult<()> { @@ -566,18 +569,14 @@ impl<'cfg> Source for RegistrySource<'cfg> { fn download(&mut self, package: &PackageId) -> CargoResult { let hash = self.index.hash(package, &mut *self.ops)?; match self.ops.download(package, &hash)? { - MaybeLock::Ready(file) => { - self.get_pkg(package, file).map(MaybePackage::Ready) - } + MaybeLock::Ready(file) => self.get_pkg(package, file).map(MaybePackage::Ready), MaybeLock::Download { url, descriptor } => { Ok(MaybePackage::Download { url, descriptor }) } } } - fn finish_download(&mut self, package: &PackageId, data: Vec) - -> CargoResult - { + fn finish_download(&mut self, package: &PackageId, data: Vec) -> CargoResult { let hash = self.index.hash(package, &mut *self.ops)?; let file = self.ops.finish_download(package, &hash, &data)?; self.get_pkg(package, file) diff --git a/src/cargo/sources/registry/remote.rs b/src/cargo/sources/registry/remote.rs index 854206d0f92..9c1bf4557a4 100644 --- a/src/cargo/sources/registry/remote.rs +++ b/src/cargo/sources/registry/remote.rs @@ -1,23 +1,25 @@ use std::cell::{Cell, Ref, RefCell}; use std::fmt::Write as FmtWrite; -use std::io::SeekFrom; use std::io::prelude::*; +use std::io::SeekFrom; use std::mem; use std::path::Path; use std::str; use git2; use hex; -use serde_json; use lazycell::LazyCell; +use serde_json; use core::{PackageId, SourceId}; use sources::git; -use sources::registry::{RegistryConfig, RegistryData, CRATE_TEMPLATE, INDEX_LOCK, VERSION_TEMPLATE}; use sources::registry::MaybeLock; -use util::{FileLock, Filesystem}; -use util::{Config, Sha256}; +use sources::registry::{ + RegistryConfig, RegistryData, CRATE_TEMPLATE, INDEX_LOCK, VERSION_TEMPLATE, +}; use util::errors::{CargoResult, CargoResultExt}; +use util::{Config, Sha256}; +use util::{FileLock, Filesystem}; pub struct RemoteRegistry<'cfg> { index_path: Filesystem, @@ -30,11 +32,11 @@ pub struct RemoteRegistry<'cfg> { } impl<'cfg> RemoteRegistry<'cfg> { - pub fn new(source_id: &SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> { + pub fn new(source_id: SourceId, config: &'cfg Config, name: &str) -> RemoteRegistry<'cfg> { RemoteRegistry { index_path: config.registry_index_path().join(name), cache_path: config.registry_cache_path().join(name), - source_id: source_id.clone(), + source_id, config, tree: RefCell::new(None), repo: LazyCell::new(), @@ -54,9 +56,11 @@ impl<'cfg> RemoteRegistry<'cfg> { // Ok, now we need to lock and try the whole thing over again. trace!("acquiring registry index lock"); - let lock = - self.index_path - .open_rw(Path::new(INDEX_LOCK), self.config, "the registry index")?; + let lock = self.index_path.open_rw( + Path::new(INDEX_LOCK), + self.config, + "the registry index", + )?; match git2::Repository::open(&path) { Ok(repo) => Ok(repo), Err(_) => { @@ -79,9 +83,8 @@ impl<'cfg> RemoteRegistry<'cfg> { // things that we don't want. let mut opts = git2::RepositoryInitOptions::new(); opts.external_template(false); - Ok(git2::Repository::init_opts(&path, &opts).chain_err(|| { - "failed to initialized index git repository" - })?) + Ok(git2::Repository::init_opts(&path, &opts) + .chain_err(|| "failed to initialized index git repository")?) } } }) @@ -231,15 +234,22 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> { if !url.contains(CRATE_TEMPLATE) && !url.contains(VERSION_TEMPLATE) { write!(url, "/{}/{}/download", CRATE_TEMPLATE, VERSION_TEMPLATE).unwrap(); } - let url = url.replace(CRATE_TEMPLATE, &*pkg.name()) + let url = url + .replace(CRATE_TEMPLATE, &*pkg.name()) .replace(VERSION_TEMPLATE, &pkg.version().to_string()); - Ok(MaybeLock::Download { url, descriptor: pkg.to_string() }) + Ok(MaybeLock::Download { + url, + descriptor: pkg.to_string(), + }) } - fn finish_download(&mut self, pkg: &PackageId, checksum: &str, data: &[u8]) - -> CargoResult - { + fn finish_download( + &mut self, + pkg: &PackageId, + checksum: &str, + data: &[u8], + ) -> CargoResult { // Verify what we just downloaded let mut state = Sha256::new(); state.update(data); diff --git a/src/cargo/sources/replaced.rs b/src/cargo/sources/replaced.rs index e413de2d17b..006e514de0f 100644 --- a/src/cargo/sources/replaced.rs +++ b/src/cargo/sources/replaced.rs @@ -1,5 +1,5 @@ -use core::{Dependency, Package, PackageId, Source, SourceId, Summary}; use core::source::MaybePackage; +use core::{Dependency, Package, PackageId, Source, SourceId, Summary}; use util::errors::{CargoResult, CargoResultExt}; pub struct ReplacedSource<'cfg> { @@ -10,43 +10,25 @@ pub struct ReplacedSource<'cfg> { impl<'cfg> ReplacedSource<'cfg> { pub fn new( - to_replace: &SourceId, - replace_with: &SourceId, + to_replace: SourceId, + replace_with: SourceId, src: Box, ) -> ReplacedSource<'cfg> { ReplacedSource { - to_replace: to_replace.clone(), - replace_with: replace_with.clone(), + to_replace, + replace_with, inner: src, } } } impl<'cfg> Source for ReplacedSource<'cfg> { - fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { - let (replace_with, to_replace) = (&self.replace_with, &self.to_replace); - let dep = dep.clone().map_source(to_replace, replace_with); - - self.inner - .query( - &dep, - &mut |summary| f(summary.map_source(replace_with, to_replace)), - ) - .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?; - Ok(()) + fn source_id(&self) -> SourceId { + self.to_replace } - fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { - let (replace_with, to_replace) = (&self.replace_with, &self.to_replace); - let dep = dep.clone().map_source(to_replace, replace_with); - - self.inner - .fuzzy_query( - &dep, - &mut |summary| f(summary.map_source(replace_with, to_replace)), - ) - .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?; - Ok(()) + fn replaced_source_id(&self) -> SourceId { + self.replace_with } fn supports_checksums(&self) -> bool { @@ -57,12 +39,28 @@ impl<'cfg> Source for ReplacedSource<'cfg> { self.inner.requires_precise() } - fn source_id(&self) -> &SourceId { - &self.to_replace + fn query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { + let (replace_with, to_replace) = (self.replace_with, self.to_replace); + let dep = dep.clone().map_source(to_replace, replace_with); + + self.inner + .query(&dep, &mut |summary| { + f(summary.map_source(replace_with, to_replace)) + }) + .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?; + Ok(()) } - fn replaced_source_id(&self) -> &SourceId { - &self.replace_with + fn fuzzy_query(&mut self, dep: &Dependency, f: &mut FnMut(Summary)) -> CargoResult<()> { + let (replace_with, to_replace) = (self.replace_with, self.to_replace); + let dep = dep.clone().map_source(to_replace, replace_with); + + self.inner + .fuzzy_query(&dep, &mut |summary| { + f(summary.map_source(replace_with, to_replace)) + }) + .chain_err(|| format!("failed to query replaced source {}", self.to_replace))?; + Ok(()) } fn update(&mut self) -> CargoResult<()> { @@ -73,26 +71,26 @@ impl<'cfg> Source for ReplacedSource<'cfg> { } fn download(&mut self, id: &PackageId) -> CargoResult { - let id = id.with_source_id(&self.replace_with); - let pkg = self.inner + let id = id.with_source_id(self.replace_with); + let pkg = self + .inner .download(&id) .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?; Ok(match pkg { MaybePackage::Ready(pkg) => { - MaybePackage::Ready(pkg.map_source(&self.replace_with, &self.to_replace)) + MaybePackage::Ready(pkg.map_source(self.replace_with, self.to_replace)) } other @ MaybePackage::Download { .. } => other, }) } - fn finish_download(&mut self, id: &PackageId, data: Vec) - -> CargoResult - { - let id = id.with_source_id(&self.replace_with); - let pkg = self.inner + fn finish_download(&mut self, id: &PackageId, data: Vec) -> CargoResult { + let id = id.with_source_id(self.replace_with); + let pkg = self + .inner .finish_download(&id, data) .chain_err(|| format!("failed to download replaced source {}", self.to_replace))?; - Ok(pkg.map_source(&self.replace_with, &self.to_replace)) + Ok(pkg.map_source(self.replace_with, self.to_replace)) } fn fingerprint(&self, id: &Package) -> CargoResult { @@ -100,12 +98,16 @@ impl<'cfg> Source for ReplacedSource<'cfg> { } fn verify(&self, id: &PackageId) -> CargoResult<()> { - let id = id.with_source_id(&self.replace_with); + let id = id.with_source_id(self.replace_with); self.inner.verify(&id) } fn describe(&self) -> String { - format!("{} (which is replacing {})", self.inner.describe(), self.to_replace) + format!( + "{} (which is replacing {})", + self.inner.describe(), + self.to_replace + ) } fn is_replaced(&self) -> bool { diff --git a/src/cargo/util/toml/mod.rs b/src/cargo/util/toml/mod.rs index 514dbbbc7d7..d85219cc883 100644 --- a/src/cargo/util/toml/mod.rs +++ b/src/cargo/util/toml/mod.rs @@ -28,7 +28,7 @@ use self::targets::targets; pub fn read_manifest( path: &Path, - source_id: &SourceId, + source_id: SourceId, config: &Config, ) -> Result<(EitherManifest, Vec), ManifestError> { trace!( @@ -46,7 +46,7 @@ pub fn read_manifest( fn do_read_manifest( contents: &str, manifest_file: &Path, - source_id: &SourceId, + source_id: SourceId, config: &Config, ) -> CargoResult<(EitherManifest, Vec)> { let package_root = manifest_file.parent().unwrap(); @@ -517,7 +517,6 @@ impl<'de> de::Deserialize<'de> for StringOrVec { { let seq = de::value::SeqAccessDeserializer::new(v); Vec::deserialize(seq).map(StringOrVec) - } } @@ -661,7 +660,7 @@ pub struct TomlWorkspace { } impl TomlProject { - pub fn to_package_id(&self, source_id: &SourceId) -> CargoResult { + pub fn to_package_id(&self, source_id: SourceId) -> CargoResult { PackageId::new(&self.name, self.version.clone(), source_id) } } @@ -669,7 +668,7 @@ impl TomlProject { struct Context<'a, 'b> { pkgid: Option<&'a PackageId>, deps: &'a mut Vec, - source_id: &'a SourceId, + source_id: SourceId, nested_paths: &'a mut Vec, config: &'b Config, warnings: &'a mut Vec, @@ -789,7 +788,7 @@ impl TomlManifest { fn to_real_manifest( me: &Rc, - source_id: &SourceId, + source_id: SourceId, package_root: &Path, config: &Config, ) -> CargoResult<(Manifest, Vec)> { @@ -817,7 +816,11 @@ impl TomlManifest { if c == '_' || c == '-' { continue; } - bail!("Invalid character `{}` in package name: `{}`", c, package_name) + bail!( + "Invalid character `{}` in package name: `{}`", + c, + package_name + ) } let pkgid = project.to_package_id(source_id)?; @@ -1061,7 +1064,7 @@ impl TomlManifest { fn to_virtual_manifest( me: &Rc, - source_id: &SourceId, + source_id: SourceId, root: &Path, config: &Config, ) -> CargoResult<(VirtualManifest, Vec)> { @@ -1258,7 +1261,8 @@ impl TomlDependency { TomlDependency::Simple(ref version) => DetailedTomlDependency { version: Some(version.clone()), ..Default::default() - }.to_dependency(name, cx, kind), + } + .to_dependency(name, cx, kind), TomlDependency::Detailed(ref details) => details.to_dependency(name, cx, kind), } } @@ -1376,7 +1380,7 @@ impl DetailedTomlDependency { let path = util::normalize_path(&path); SourceId::for_path(&path)? } else { - cx.source_id.clone() + cx.source_id } } (None, None, Some(registry), None) => SourceId::alt_registry(cx.config, registry)?, @@ -1394,8 +1398,8 @@ impl DetailedTomlDependency { let version = self.version.as_ref().map(|v| &v[..]); let mut dep = match cx.pkgid { - Some(id) => Dependency::parse(pkg_name, version, &new_source_id, id, cx.config)?, - None => Dependency::parse_no_deprecated(pkg_name, version, &new_source_id)?, + Some(id) => Dependency::parse(pkg_name, version, new_source_id, id, cx.config)?, + None => Dependency::parse_no_deprecated(pkg_name, version, new_source_id)?, }; dep.set_features(self.features.iter().flat_map(|x| x)) .set_default_features( @@ -1405,7 +1409,7 @@ impl DetailedTomlDependency { ) .set_optional(self.optional.unwrap_or(false)) .set_platform(cx.platform.clone()) - .set_registry_id(®istry_id); + .set_registry_id(registry_id); if let Some(kind) = kind { dep.set_kind(kind); } diff --git a/tests/testsuite/search.rs b/tests/testsuite/search.rs index 714913acf7c..bd59b39e78c 100644 --- a/tests/testsuite/search.rs +++ b/tests/testsuite/search.rs @@ -67,7 +67,8 @@ fn setup() { .file( "config.json", &format!(r#"{{"dl":"{0}","api":"{0}"}}"#, api()), - ).build(); + ) + .build(); let base = api_path().join("api/v1/crates"); write_crates(&base); @@ -89,8 +90,10 @@ replace-with = 'dummy-registry' registry = '{reg}' "#, reg = registry_url(), - ).as_bytes(), - ).unwrap(); + ) + .as_bytes(), + ) + .unwrap(); } #[test] @@ -104,7 +107,7 @@ fn not_update() { let sid = SourceId::for_registry(®istry_url()).unwrap(); let cfg = Config::new(Shell::new(), paths::root(), paths::home().join(".cargo")); - let mut regsrc = RegistrySource::remote(&sid, &cfg); + let mut regsrc = RegistrySource::remote(sid, &cfg); regsrc.update().unwrap(); cargo_process("search postgres") @@ -142,9 +145,10 @@ fn simple() { fn simple_with_host() { setup(); - cargo_process("search postgres --host").arg(registry_url().to_string()) - .with_stderr( - "\ + cargo_process("search postgres --host") + .arg(registry_url().to_string()) + .with_stderr( + "\ [WARNING] The flag '--host' is no longer valid. Previous versions of Cargo accepted this flag, but it is being @@ -156,10 +160,8 @@ to update to a fixed version or contact the upstream maintainer about this warning. [UPDATING] `[CWD]/registry` index ", - ) - .with_stdout_contains( - "hoare = \"0.1.1\" # Design by contract style assertions for Rust", - ) + ) + .with_stdout_contains("hoare = \"0.1.1\" # Design by contract style assertions for Rust") .run(); } @@ -169,9 +171,12 @@ about this warning. fn simple_with_index_and_host() { setup(); - cargo_process("search postgres --index").arg(registry_url().to_string()).arg("--host").arg(registry_url().to_string()) - .with_stderr( - "\ + cargo_process("search postgres --index") + .arg(registry_url().to_string()) + .arg("--host") + .arg(registry_url().to_string()) + .with_stderr( + "\ [WARNING] The flag '--host' is no longer valid. Previous versions of Cargo accepted this flag, but it is being @@ -183,10 +188,8 @@ to update to a fixed version or contact the upstream maintainer about this warning. [UPDATING] `[CWD]/registry` index ", - ) - .with_stdout_contains( - "hoare = \"0.1.1\" # Design by contract style assertions for Rust", - ) + ) + .with_stdout_contains("hoare = \"0.1.1\" # Design by contract style assertions for Rust") .run(); } diff --git a/tests/testsuite/support/resolver.rs b/tests/testsuite/support/resolver.rs index fd2a8d6f0ce..a1544e30441 100644 --- a/tests/testsuite/support/resolver.rs +++ b/tests/testsuite/support/resolver.rs @@ -87,11 +87,11 @@ pub fn resolve_with_config_raw( Ok(()) } - fn describe_source(&self, _src: &SourceId) -> String { + fn describe_source(&self, _src: SourceId) -> String { String::new() } - fn is_replaced(&self, _src: &SourceId) -> bool { + fn is_replaced(&self, _src: SourceId) -> bool { false } } @@ -127,7 +127,7 @@ pub trait ToDep { impl ToDep for &'static str { fn to_dep(self) -> Dependency { - Dependency::parse_no_deprecated(self, Some("1.0.0"), ®istry_loc()).unwrap() + Dependency::parse_no_deprecated(self, Some("1.0.0"), registry_loc()).unwrap() } } @@ -149,14 +149,14 @@ impl ToPkgId for PackageId { impl<'a> ToPkgId for &'a str { fn to_pkgid(&self) -> PackageId { - PackageId::new(*self, "1.0.0", ®istry_loc()).unwrap() + PackageId::new(*self, "1.0.0", registry_loc()).unwrap() } } impl, U: AsRef> ToPkgId for (T, U) { fn to_pkgid(&self) -> PackageId { let (name, vers) = self; - PackageId::new(name.as_ref(), vers.as_ref(), ®istry_loc()).unwrap() + PackageId::new(name.as_ref(), vers.as_ref(), registry_loc()).unwrap() } } @@ -176,7 +176,7 @@ fn registry_loc() -> SourceId { static ref EXAMPLE_DOT_COM: SourceId = SourceId::for_registry(&"http://example.com".to_url().unwrap()).unwrap(); } - EXAMPLE_DOT_COM.clone() + *EXAMPLE_DOT_COM } pub fn pkg(name: T) -> Summary { @@ -201,7 +201,7 @@ pub fn pkg_dep(name: T, dep: Vec) -> Summary { } pub fn pkg_id(name: &str) -> PackageId { - PackageId::new(name, "1.0.0", ®istry_loc()).unwrap() + PackageId::new(name, "1.0.0", registry_loc()).unwrap() } fn pkg_id_loc(name: &str, loc: &str) -> PackageId { @@ -209,7 +209,7 @@ fn pkg_id_loc(name: &str, loc: &str) -> PackageId { let master = GitReference::Branch("master".to_string()); let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap(); - PackageId::new(name, "1.0.0", &source_id).unwrap() + PackageId::new(name, "1.0.0", source_id).unwrap() } pub fn pkg_loc(name: &str, loc: &str) -> Summary { @@ -232,7 +232,7 @@ pub fn dep(name: &str) -> Dependency { dep_req(name, "*") } pub fn dep_req(name: &str, req: &str) -> Dependency { - Dependency::parse_no_deprecated(name, Some(req), ®istry_loc()).unwrap() + Dependency::parse_no_deprecated(name, Some(req), registry_loc()).unwrap() } pub fn dep_req_kind(name: &str, req: &str, kind: Kind) -> Dependency { let mut dep = dep_req(name, req); @@ -244,7 +244,7 @@ pub fn dep_loc(name: &str, location: &str) -> Dependency { let url = location.to_url().unwrap(); let master = GitReference::Branch("master".to_string()); let source_id = SourceId::for_git(&url, master).unwrap(); - Dependency::parse_no_deprecated(name, Some("1.0.0"), &source_id).unwrap() + Dependency::parse_no_deprecated(name, Some("1.0.0"), source_id).unwrap() } pub fn dep_kind(name: &str, kind: Kind) -> Dependency { dep(name).set_kind(kind).clone() @@ -281,9 +281,7 @@ impl fmt::Debug for PrettyPrintRegistry { } else { write!(f, "pkg!((\"{}\", \"{}\") => [", s.name(), s.version())?; for d in s.dependencies() { - if d.kind() == Kind::Normal - && &d.version_req().to_string() == "*" - { + if d.kind() == Kind::Normal && &d.version_req().to_string() == "*" { write!(f, "dep(\"{}\"),", d.name_in_toml())?; } else if d.kind() == Kind::Normal { write!( From 7cd4a94db8168848b2a4d7c25fb47cf788573380 Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Sun, 25 Nov 2018 12:43:45 -0500 Subject: [PATCH 2/4] Other clippy things and fmt --- src/cargo/core/package.rs | 15 ++++--- src/cargo/core/profiles.rs | 2 +- src/cargo/core/resolver/encode.rs | 2 +- src/cargo/core/resolver/resolve.rs | 4 +- src/cargo/ops/cargo_install.rs | 8 ++-- src/cargo/ops/cargo_new.rs | 31 +++++++------ src/cargo/ops/lockfile.rs | 14 +++--- src/cargo/sources/registry/local.rs | 17 ++++--- src/cargo/sources/registry/mod.rs | 8 ++-- src/cargo/util/config.rs | 26 +++++------ src/cargo/util/toml/targets.rs | 70 ++++++++++++++--------------- 11 files changed, 101 insertions(+), 96 deletions(-) diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 9ba5affad63..0041c449bef 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -694,7 +694,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { self.updated_at.set(now); self.next_speed_check.set(now + self.timeout.dur); self.next_speed_check_bytes_threshold - .set(self.timeout.low_speed_limit as u64); + .set(u64::from(self.timeout.low_speed_limit)); dl.timed_out.set(None); dl.current.set(0); dl.total.set(0); @@ -741,8 +741,12 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { if let Some(pair) = results.pop() { break Ok(pair); } - assert!(self.pending.len() > 0); - let timeout = self.set.multi.get_timeout()?.unwrap_or(Duration::new(5, 0)); + assert!(!self.pending.is_empty()); + let timeout = self + .set + .multi + .get_timeout()? + .unwrap_or_else(|| Duration::new(5, 0)); self.set .multi .wait(&mut [], timeout) @@ -764,12 +768,12 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { if delta >= threshold { self.next_speed_check.set(now + self.timeout.dur); self.next_speed_check_bytes_threshold - .set(self.timeout.low_speed_limit as u64); + .set(u64::from(self.timeout.low_speed_limit)); } else { self.next_speed_check_bytes_threshold.set(threshold - delta); } } - if !self.tick(WhyTick::DownloadUpdate).is_ok() { + if self.tick(WhyTick::DownloadUpdate).is_err() { return false; } @@ -841,6 +845,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { } } +#[derive(Copy, Clone)] enum WhyTick<'a> { DownloadStarted, DownloadUpdate, diff --git a/src/cargo/core/profiles.rs b/src/cargo/core/profiles.rs index 669fed83955..beeabac6f01 100644 --- a/src/cargo/core/profiles.rs +++ b/src/cargo/core/profiles.rs @@ -596,7 +596,7 @@ impl UnitFor { /// Returns true if this unit is for a custom build script or one of its /// dependencies. - pub fn is_custom_build(&self) -> bool { + pub fn is_custom_build(self) -> bool { self.custom_build } diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index 25d55d7134f..c4e682da3d5 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -338,7 +338,7 @@ impl<'a, 'cfg> ser::Serialize for WorkspaceResolve<'a, 'cfg> { let encodable = ids .iter() - .filter_map(|&id| Some(encodable_resolve_node(id, self.resolve))) + .map(|&id| encodable_resolve_node(id, self.resolve)) .collect::>(); let mut metadata = self.resolve.metadata().clone(); diff --git a/src/cargo/core/resolver/resolve.rs b/src/cargo/core/resolver/resolve.rs index 14e37c54447..9b77065f9e5 100644 --- a/src/cargo/core/resolver/resolve.rs +++ b/src/cargo/core/resolver/resolve.rs @@ -239,9 +239,9 @@ unable to verify that `{0}` is the same as when the lockfile was generated let mut names = deps.iter().map(|d| { d.explicit_name_in_toml() .map(|s| s.as_str().replace("-", "_")) - .unwrap_or(crate_name.clone()) + .unwrap_or_else(|| crate_name.clone()) }); - let name = names.next().unwrap_or(crate_name.clone()); + let name = names.next().unwrap_or_else(|| crate_name.clone()); for n in names { if n == name { continue; diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index ec0ca5f387c..0b0dd026863 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -726,7 +726,7 @@ pub fn uninstall( let scheduled_error = if specs.len() == 1 { uninstall_one(&root, specs[0], bins, config)?; false - } else if specs.len() == 0 { + } else if specs.is_empty() { uninstall_cwd(&root, bins, config)?; false } else { @@ -780,7 +780,7 @@ pub fn uninstall_one( let crate_metadata = metadata(config, root)?; let metadata = read_crate_list(&crate_metadata)?; let pkgid = PackageIdSpec::query_str(spec, metadata.v1.keys())?.clone(); - uninstall_pkgid(crate_metadata, metadata, &pkgid, bins, config) + uninstall_pkgid(&crate_metadata, metadata, &pkgid, bins, config) } fn uninstall_cwd(root: &Filesystem, bins: &[String], config: &Config) -> CargoResult<()> { @@ -792,11 +792,11 @@ fn uninstall_cwd(root: &Filesystem, bins: &[String], config: &Config) -> CargoRe path.read_packages() })?; let pkgid = pkg.package_id(); - uninstall_pkgid(crate_metadata, metadata, pkgid, bins, config) + uninstall_pkgid(&crate_metadata, metadata, pkgid, bins, config) } fn uninstall_pkgid( - crate_metadata: FileLock, + crate_metadata: &FileLock, mut metadata: CrateListingV1, pkgid: &PackageId, bins: &[String], diff --git a/src/cargo/ops/cargo_new.rs b/src/cargo/ops/cargo_new.rs index 4be3af40a05..abcc0f38010 100644 --- a/src/cargo/ops/cargo_new.rs +++ b/src/cargo/ops/cargo_new.rs @@ -1,16 +1,16 @@ use std::collections::BTreeMap; use std::env; -use std::fs; use std::fmt; +use std::fs; use std::path::{Path, PathBuf}; use git2::Config as GitConfig; use git2::Repository as GitRepository; use core::{compiler, Workspace}; -use util::{internal, FossilRepo, GitRepo, HgRepo, PijulRepo, existing_vcs_repo}; -use util::{paths, Config}; use util::errors::{CargoResult, CargoResultExt}; +use util::{existing_vcs_repo, internal, FossilRepo, GitRepo, HgRepo, PijulRepo}; +use util::{paths, Config}; use toml; @@ -51,7 +51,8 @@ impl fmt::Display for NewProjectKind { match *self { NewProjectKind::Bin => "binary (application)", NewProjectKind::Lib => "library", - }.fmt(f) + } + .fmt(f) } } @@ -430,7 +431,8 @@ fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> { "/target\n", "**/*.rs.bk\n", if !opts.bin { "Cargo.lock\n" } else { "" }, - ].concat(); + ] + .concat(); // Mercurial glob ignores can't be rooted, so just sticking a 'syntax: glob' at the top of the // file will exclude too much. Instead, use regexp-based ignores. See 'hg help ignore' for // more. @@ -438,7 +440,8 @@ fn mk(config: &Config, opts: &MkOptions) -> CargoResult<()> { "^target/\n", "glob:*.rs.bk\n", if !opts.bin { "glob:Cargo.lock\n" } else { "" }, - ].concat(); + ] + .concat(); let vcs = opts.version_control.unwrap_or_else(|| { let in_existing_vcs = existing_vcs_repo(path.parent().unwrap_or(path), config.cwd()); @@ -553,15 +556,15 @@ edition = {} None => toml::Value::String("2018".to_string()), }, match opts.registry { - Some(registry) => { - format!("publish = {}\n", - toml::Value::Array(vec!(toml::Value::String(registry.to_string()))) - ) - } + Some(registry) => format!( + "publish = {}\n", + toml::Value::Array(vec!(toml::Value::String(registry.to_string()))) + ), None => "".to_string(), - }, + }, cargotoml_path_specifier - ).as_bytes(), + ) + .as_bytes(), )?; // Create all specified source files @@ -665,7 +668,7 @@ fn discover_author() -> CargoResult<(String, Option)> { // In some cases emails will already have <> remove them since they // are already added when needed. - if s.starts_with("<") && s.ends_with(">") { + if s.starts_with('<') && s.ends_with('>') { s = &s[1..s.len() - 1]; } diff --git a/src/cargo/ops/lockfile.rs b/src/cargo/ops/lockfile.rs index 5547a75059c..92ac5e1e393 100644 --- a/src/cargo/ops/lockfile.rs +++ b/src/cargo/ops/lockfile.rs @@ -20,12 +20,12 @@ pub fn load_pkg_lockfile(ws: &Workspace) -> CargoResult> { f.read_to_string(&mut s) .chain_err(|| format!("failed to read file: {}", f.path().display()))?; - let resolve = - (|| -> CargoResult> { - let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?; - let v: resolver::EncodableResolve = resolve.try_into()?; - Ok(Some(v.into_resolve(ws)?)) - })().chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?; + let resolve = (|| -> CargoResult> { + let resolve: toml::Value = cargo_toml::parse(&s, f.path(), ws.config())?; + let v: resolver::EncodableResolve = resolve.try_into()?; + Ok(Some(v.into_resolve(ws)?)) + })() + .chain_err(|| format!("failed to parse lock file at: {}", f.path().display()))?; Ok(resolve) } @@ -47,7 +47,7 @@ pub fn write_pkg_lockfile(ws: &Workspace, resolve: &Resolve) -> CargoResult<()> // This is in preparation for marking it as generated // https://github.com/rust-lang/cargo/issues/6180 if let Ok(orig) = &orig { - for line in orig.lines().take_while(|line| line.starts_with("#")) { + for line in orig.lines().take_while(|line| line.starts_with('#')) { out.push_str(line); out.push_str("\n"); } diff --git a/src/cargo/sources/registry/local.rs b/src/cargo/sources/registry/local.rs index 023e955b857..82c95052ee8 100644 --- a/src/cargo/sources/registry/local.rs +++ b/src/cargo/sources/registry/local.rs @@ -1,13 +1,13 @@ -use std::io::SeekFrom; use std::io::prelude::*; +use std::io::SeekFrom; use std::path::Path; use core::PackageId; use hex; -use sources::registry::{RegistryConfig, RegistryData, MaybeLock}; -use util::paths; -use util::{Config, Filesystem, Sha256, FileLock}; +use sources::registry::{MaybeLock, RegistryConfig, RegistryData}; use util::errors::{CargoResult, CargoResultExt}; +use util::paths; +use util::{Config, FileLock, Filesystem, Sha256}; pub struct LocalRegistry<'cfg> { index_path: Filesystem, @@ -104,9 +104,12 @@ impl<'cfg> RegistryData for LocalRegistry<'cfg> { Ok(MaybeLock::Ready(crate_file)) } - fn finish_download(&mut self, _pkg: &PackageId, _checksum: &str, _data: &[u8]) - -> CargoResult - { + fn finish_download( + &mut self, + _pkg: &PackageId, + _checksum: &str, + _data: &[u8], + ) -> CargoResult { panic!("this source doesn't download") } } diff --git a/src/cargo/sources/registry/mod.rs b/src/cargo/sources/registry/mod.rs index 60c3d87850c..7aa2fbf7dfd 100644 --- a/src/cargo/sources/registry/mod.rs +++ b/src/cargo/sources/registry/mod.rs @@ -475,9 +475,9 @@ impl<'cfg> RegistrySource<'cfg> { Ok(()) } - fn get_pkg(&mut self, package: &PackageId, path: FileLock) -> CargoResult { + fn get_pkg(&mut self, package: &PackageId, path: &FileLock) -> CargoResult { let path = self - .unpack_package(package, &path) + .unpack_package(package, path) .chain_err(|| internal(format!("failed to unpack package `{}`", package)))?; let mut src = PathSource::new(&path, self.source_id, self.config); src.update()?; @@ -569,7 +569,7 @@ impl<'cfg> Source for RegistrySource<'cfg> { fn download(&mut self, package: &PackageId) -> CargoResult { let hash = self.index.hash(package, &mut *self.ops)?; match self.ops.download(package, &hash)? { - MaybeLock::Ready(file) => self.get_pkg(package, file).map(MaybePackage::Ready), + MaybeLock::Ready(file) => self.get_pkg(package, &file).map(MaybePackage::Ready), MaybeLock::Download { url, descriptor } => { Ok(MaybePackage::Download { url, descriptor }) } @@ -579,7 +579,7 @@ impl<'cfg> Source for RegistrySource<'cfg> { fn finish_download(&mut self, package: &PackageId, data: Vec) -> CargoResult { let hash = self.index.hash(package, &mut *self.ops)?; let file = self.ops.finish_download(package, &hash, &data)?; - self.get_pkg(package, file) + self.get_pkg(package, &file) } fn fingerprint(&self, pkg: &Package) -> CargoResult { diff --git a/src/cargo/util/config.rs b/src/cargo/util/config.rs index 2faa31012e2..8a13f72813b 100644 --- a/src/cargo/util/config.rs +++ b/src/cargo/util/config.rs @@ -176,12 +176,10 @@ impl Config { /// The default cargo registry (`alternative-registry`) pub fn default_registry(&self) -> CargoResult> { - Ok( - match self.get_string("registry.default")? { - Some(registry) => Some(registry.val), - None => None, - } - ) + Ok(match self.get_string("registry.default")? { + Some(registry) => Some(registry.val), + None => None, + }) } /// Get a reference to the shell, for e.g. writing error messages @@ -245,7 +243,7 @@ impl Config { let argv0 = env::args_os() .map(PathBuf::from) .next() - .ok_or_else(||format_err!("no argv[0]"))?; + .ok_or_else(|| format_err!("no argv[0]"))?; paths::resolve_executable(&argv0) } @@ -458,10 +456,7 @@ impl Config { } } - pub fn get_path_and_args( - &self, - key: &str, - ) -> CargoResult)>> { + pub fn get_path_and_args(&self, key: &str) -> CargoResult)>> { if let Some(mut val) = self.get_list_or_split_string(key)? { if !val.val.is_empty() { return Ok(Some(Value { @@ -631,9 +626,7 @@ impl Config { self.load_values_from(&self.cwd) } - fn load_values_from(&self, path: &Path) - -> CargoResult> - { + fn load_values_from(&self, path: &Path) -> CargoResult> { let mut cfg = CV::Table(HashMap::new(), PathBuf::from(".")); let home = self.home_path.clone().into_path_unlocked(); @@ -654,7 +647,8 @@ impl Config { cfg.merge(value) .chain_err(|| format!("failed to merge configuration at `{}`", path.display()))?; Ok(()) - }).chain_err(|| "could not load Cargo configuration")?; + }) + .chain_err(|| "could not load Cargo configuration")?; self.load_credentials(&mut cfg)?; match cfg { @@ -790,7 +784,7 @@ impl Config { where F: FnMut() -> CargoResult, { - Ok(self.crates_io_source_id.try_borrow_with(f)?.clone()) + Ok(*(self.crates_io_source_id.try_borrow_with(f)?)) } pub fn creation_time(&self) -> Instant { diff --git a/src/cargo/util/toml/targets.rs b/src/cargo/util/toml/targets.rs index 81afec90926..69b0f85db77 100644 --- a/src/cargo/util/toml/targets.rs +++ b/src/cargo/util/toml/targets.rs @@ -10,16 +10,16 @@ //! It is a bit tricky because we need match explicit information from `Cargo.toml` //! with implicit info in directory layout. -use std::path::{Path, PathBuf}; -use std::fs::{self, DirEntry}; use std::collections::HashSet; +use std::fs::{self, DirEntry}; +use std::path::{Path, PathBuf}; -use core::{compiler, Edition, Feature, Features, Target}; -use util::errors::{CargoResult, CargoResultExt}; use super::{ LibKind, PathValue, StringOrBool, StringOrVec, TomlBenchTarget, TomlBinTarget, TomlExampleTarget, TomlLibTarget, TomlManifest, TomlTarget, TomlTestTarget, }; +use core::{compiler, Edition, Feature, Features, Target}; +use util::errors::{CargoResult, CargoResultExt}; pub fn targets( features: &Features, @@ -311,12 +311,8 @@ fn clean_bins( Err(e) => bail!("{}", e), }; - let mut target = Target::bin_target( - &bin.name(), - path, - bin.required_features.clone(), - edition, - ); + let mut target = + Target::bin_target(&bin.name(), path, bin.required_features.clone(), edition); configure(features, bin, &mut target)?; result.push(target); } @@ -413,12 +409,8 @@ fn clean_tests( let mut result = Vec::new(); for (path, toml) in targets { - let mut target = Target::test_target( - &toml.name(), - path, - toml.required_features.clone(), - edition, - ); + let mut target = + Target::test_target(&toml.name(), path, toml.required_features.clone(), edition); configure(features, &toml, &mut target)?; result.push(target); } @@ -472,12 +464,8 @@ fn clean_benches( let mut result = Vec::new(); for (path, toml) in targets { - let mut target = Target::bench_target( - &toml.name(), - path, - toml.required_features.clone(), - edition, - ); + let mut target = + Target::bench_target(&toml.name(), path, toml.required_features.clone(), edition); configure(features, &toml, &mut target)?; result.push(target); } @@ -544,7 +532,14 @@ fn clean_targets_with_legacy_path( validate_unique_names(&toml_targets, target_kind)?; let mut result = Vec::new(); for target in toml_targets { - let path = target_path(&target, inferred, target_kind, package_root, edition, legacy_path); + let path = target_path( + &target, + inferred, + target_kind, + package_root, + edition, + legacy_path, + ); let path = match path { Ok(path) => path, Err(e) => { @@ -634,12 +629,13 @@ fn toml_targets_and_inferred( ) -> Vec { let inferred_targets = inferred_to_toml_targets(inferred); match toml_targets { - None => + None => { if let Some(false) = autodiscover { vec![] } else { inferred_targets - }, + } + } Some(targets) => { let mut targets = targets.clone(); @@ -726,9 +722,11 @@ fn validate_has_name( target_kind: &str, ) -> CargoResult<()> { match target.name { - Some(ref name) => if name.trim().is_empty() { - bail!("{} target names cannot be empty", target_kind_human) - }, + Some(ref name) => { + if name.trim().is_empty() { + bail!("{} target names cannot be empty", target_kind_human) + } + } None => bail!( "{} target {}.name is required", target_kind_human, @@ -755,11 +753,7 @@ fn validate_unique_names(targets: &[TomlTarget], target_kind: &str) -> CargoResu Ok(()) } -fn configure( - features: &Features, - toml: &TomlTarget, - target: &mut Target, -) -> CargoResult<()> { +fn configure(features: &Features, toml: &TomlTarget, target: &mut Target) -> CargoResult<()> { let t2 = target.clone(); target .set_tested(toml.test.unwrap_or_else(|| t2.tested())) @@ -773,8 +767,14 @@ fn configure( (Some(false), _) | (_, Some(false)) => false, }); if let Some(edition) = toml.edition.clone() { - features.require(Feature::edition()).chain_err(|| "editions are unstable")?; - target.set_edition(edition.parse().chain_err(|| "failed to parse the `edition` key")?); + features + .require(Feature::edition()) + .chain_err(|| "editions are unstable")?; + target.set_edition( + edition + .parse() + .chain_err(|| "failed to parse the `edition` key")?, + ); } Ok(()) } From dad9fe6618a3a37ff752915d4dd8016d275935ea Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Sun, 25 Nov 2018 21:20:00 -0500 Subject: [PATCH 3/4] add `clippy::` --- src/bin/cargo/main.rs | 6 +++--- src/cargo/lib.rs | 37 +++++++++++++++++++------------------ 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/src/bin/cargo/main.rs b/src/bin/cargo/main.rs index 979d8949427..4dd90777f72 100644 --- a/src/bin/cargo/main.rs +++ b/src/bin/cargo/main.rs @@ -1,5 +1,5 @@ -#![cfg_attr(feature = "cargo-clippy", allow(too_many_arguments))] // large project -#![cfg_attr(feature = "cargo-clippy", allow(redundant_closure))] // there's a false positive +#![cfg_attr(feature = "cargo-clippy", allow(clippy::too_many_arguments))] // large project +#![cfg_attr(feature = "cargo-clippy", allow(clippy::redundant_closure))] // there's a false positive extern crate cargo; extern crate clap; @@ -13,10 +13,10 @@ extern crate serde_derive; extern crate serde_json; extern crate toml; +use std::collections::BTreeSet; use std::env; use std::fs; use std::path::{Path, PathBuf}; -use std::collections::BTreeSet; use cargo::core::shell::Shell; use cargo::util::{self, command_prelude, lev_distance, CargoResult, CliResult, Config}; diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs index 8359faceabe..fe3f7f0ef0a 100644 --- a/src/cargo/lib.rs +++ b/src/cargo/lib.rs @@ -1,18 +1,17 @@ #![cfg_attr(test, deny(warnings))] - // Clippy isn't enforced by CI, and know that @alexcrichton isn't a fan :) -#![cfg_attr(feature = "cargo-clippy", allow(boxed_local))] // bug rust-lang-nursery/rust-clippy#1123 -#![cfg_attr(feature = "cargo-clippy", allow(cyclomatic_complexity))] // large project -#![cfg_attr(feature = "cargo-clippy", allow(derive_hash_xor_eq))] // there's an intentional incoherence -#![cfg_attr(feature = "cargo-clippy", allow(explicit_into_iter_loop))] // explicit loops are clearer -#![cfg_attr(feature = "cargo-clippy", allow(explicit_iter_loop))] // explicit loops are clearer -#![cfg_attr(feature = "cargo-clippy", allow(identity_op))] // used for vertical alignment -#![cfg_attr(feature = "cargo-clippy", allow(implicit_hasher))] // large project -#![cfg_attr(feature = "cargo-clippy", allow(large_enum_variant))] // large project -#![cfg_attr(feature = "cargo-clippy", allow(redundant_closure_call))] // closures over try catch blocks -#![cfg_attr(feature = "cargo-clippy", allow(too_many_arguments))] // large project -#![cfg_attr(feature = "cargo-clippy", allow(type_complexity))] // there's an exceptionally complex type -#![cfg_attr(feature = "cargo-clippy", allow(wrong_self_convention))] // perhaps Rc should be special cased in Clippy? +#![cfg_attr(feature = "cargo-clippy", allow(clippy::boxed_local))] // bug rust-lang-nursery/rust-clippy#1123 +#![cfg_attr(feature = "cargo-clippy", allow(clippy::cyclomatic_complexity))] // large project +#![cfg_attr(feature = "cargo-clippy", allow(clippy::derive_hash_xor_eq))] // there's an intentional incoherence +#![cfg_attr(feature = "cargo-clippy", allow(clippy::explicit_into_iter_loop))] // explicit loops are clearer +#![cfg_attr(feature = "cargo-clippy", allow(clippy::explicit_iter_loop))] // explicit loops are clearer +#![cfg_attr(feature = "cargo-clippy", allow(clippy::identity_op))] // used for vertical alignment +#![cfg_attr(feature = "cargo-clippy", allow(clippy::implicit_hasher))] // large project +#![cfg_attr(feature = "cargo-clippy", allow(clippy::large_enum_variant))] // large project +#![cfg_attr(feature = "cargo-clippy", allow(clippy::redundant_closure_call))] // closures over try catch blocks +#![cfg_attr(feature = "cargo-clippy", allow(clippy::too_many_arguments))] // large project +#![cfg_attr(feature = "cargo-clippy", allow(clippy::type_complexity))] // there's an exceptionally complex type +#![cfg_attr(feature = "cargo-clippy", allow(clippy::wrong_self_convention))] // perhaps Rc should be special cased in Clippy? extern crate atty; extern crate bytesize; @@ -55,6 +54,7 @@ extern crate serde_derive; extern crate serde_ignored; #[macro_use] extern crate serde_json; +extern crate im_rc; extern crate shell_escape; extern crate tar; extern crate tempfile; @@ -62,18 +62,17 @@ extern crate termcolor; extern crate toml; extern crate unicode_width; extern crate url; -extern crate im_rc; use std::fmt; -use serde::ser; use failure::Error; +use serde::ser; -use core::Shell; use core::shell::Verbosity::Verbose; +use core::Shell; -pub use util::{CargoError, CargoResult, CliError, CliResult, Config}; pub use util::errors::Internal; +pub use util::{CargoError, CargoResult, CliError, CliResult, Config}; pub const CARGO_ENV: &str = "CARGO"; @@ -210,7 +209,9 @@ fn handle_cause(cargo_err: &Error, shell: &mut Shell) -> bool { pub fn version() -> VersionInfo { macro_rules! option_env_str { - ($name:expr) => { option_env!($name).map(|s| s.to_string()) } + ($name:expr) => { + option_env!($name).map(|s| s.to_string()) + }; } // So this is pretty horrible... From 92485f8c14d29dc08a196f873f5b767e07c39429 Mon Sep 17 00:00:00 2001 From: Eh2406 Date: Sun, 25 Nov 2018 21:45:43 -0500 Subject: [PATCH 4/4] Other clippy things and fmt --- src/cargo/core/compiler/build_context/mod.rs | 30 +-- .../compiler/context/unit_dependencies.rs | 137 ++++++-------- src/cargo/core/compiler/job_queue.rs | 33 ++-- src/cargo/core/package.rs | 2 +- src/cargo/core/resolver/mod.rs | 23 ++- src/cargo/core/shell.rs | 22 +-- src/cargo/ops/fix.rs | 172 ++++++++++-------- src/cargo/util/lev_distance.rs | 2 +- src/cargo/util/read2.rs | 9 +- 9 files changed, 217 insertions(+), 213 deletions(-) diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs index 358751c7f07..a6d8be47f35 100644 --- a/src/cargo/core/compiler/build_context/mod.rs +++ b/src/cargo/core/compiler/build_context/mod.rs @@ -88,7 +88,8 @@ impl<'a, 'cfg> BuildContext<'a, 'cfg> { } pub fn extern_crate_name(&self, unit: &Unit<'a>, dep: &Unit<'a>) -> CargoResult { - self.resolve.extern_crate_name(unit.pkg.package_id(), dep.pkg.package_id(), dep.target) + self.resolve + .extern_crate_name(unit.pkg.package_id(), dep.pkg.package_id(), dep.target) } /// Whether a dependency should be compiled for the host or target platform, @@ -266,10 +267,12 @@ impl TargetConfig { let list = value.list(k)?; output.cfgs.extend(list.iter().map(|v| v.0.clone())); } - "rustc-env" => for (name, val) in value.table(k)?.0 { - let val = val.string(name)?.0; - output.env.push((name.clone(), val.to_string())); - }, + "rustc-env" => { + for (name, val) in value.table(k)?.0 { + let val = val.string(name)?.0; + output.env.push((name.clone(), val.to_string())); + } + } "warning" | "rerun-if-changed" | "rerun-if-env-changed" => { bail!("`{}` is not supported in build script overrides", k); } @@ -342,7 +345,8 @@ fn env_args( // First try RUSTFLAGS from the environment if let Ok(a) = env::var(name) { - let args = a.split(' ') + let args = a + .split(' ') .map(str::trim) .filter(|s| !s.is_empty()) .map(str::to_string); @@ -351,7 +355,8 @@ fn env_args( let mut rustflags = Vec::new(); - let name = name.chars() + let name = name + .chars() .flat_map(|c| c.to_lowercase()) .collect::(); // Then the target.*.rustflags value... @@ -367,13 +372,10 @@ fn env_args( // ...including target.'cfg(...)'.rustflags if let Some(target_cfg) = target_cfg { if let Some(table) = config.get_table("target")? { - let cfgs = table.val.keys().filter_map(|key| { - if CfgExpr::matches_key(key, target_cfg) { - Some(key) - } else { - None - } - }); + let cfgs = table + .val + .keys() + .filter(|key| CfgExpr::matches_key(key, target_cfg)); // Note that we may have multiple matching `[target]` sections and // because we're passing flags to the compiler this can affect diff --git a/src/cargo/core/compiler/context/unit_dependencies.rs b/src/cargo/core/compiler/context/unit_dependencies.rs index afc819df21f..6c4d72976bd 100644 --- a/src/cargo/core/compiler/context/unit_dependencies.rs +++ b/src/cargo/core/compiler/context/unit_dependencies.rs @@ -18,12 +18,12 @@ use std::cell::RefCell; use std::collections::{HashMap, HashSet}; -use CargoResult; +use super::{BuildContext, CompileMode, Kind, Unit}; use core::dependency::Kind as DepKind; -use core::profiles::UnitFor; -use core::{Package, Target, PackageId}; use core::package::Downloads; -use super::{BuildContext, CompileMode, Kind, Unit}; +use core::profiles::UnitFor; +use core::{Package, PackageId, Target}; +use CargoResult; struct State<'a: 'tmp, 'cfg: 'a, 'tmp> { bcx: &'tmp BuildContext<'a, 'cfg>, @@ -74,11 +74,11 @@ pub fn build_unit_dependencies<'a, 'cfg>( deps_of(unit, &mut state, unit_for)?; } - if state.waiting_on_download.len() > 0 { + if !state.waiting_on_download.is_empty() { state.finish_some_downloads()?; state.deps.clear(); } else { - break + break; } } trace!("ALL UNIT DEPENDENCIES {:#?}", state.deps); @@ -128,46 +128,43 @@ fn compute_deps<'a, 'cfg, 'tmp>( let bcx = state.bcx; let id = unit.pkg.package_id(); - let deps = bcx.resolve.deps(id) - .filter(|&(_id, deps)| { - assert!(!deps.is_empty()); - deps.iter().any(|dep| { - // If this target is a build command, then we only want build - // dependencies, otherwise we want everything *other than* build - // dependencies. - if unit.target.is_custom_build() != dep.is_build() { - return false; - } + let deps = bcx.resolve.deps(id).filter(|&(_id, deps)| { + assert!(!deps.is_empty()); + deps.iter().any(|dep| { + // If this target is a build command, then we only want build + // dependencies, otherwise we want everything *other than* build + // dependencies. + if unit.target.is_custom_build() != dep.is_build() { + return false; + } - // If this dependency is *not* a transitive dependency, then it - // only applies to test/example targets - if !dep.is_transitive() && - !unit.target.is_test() && - !unit.target.is_example() && - !unit.mode.is_any_test() - { - return false; - } + // If this dependency is *not* a transitive dependency, then it + // only applies to test/example targets + if !dep.is_transitive() + && !unit.target.is_test() + && !unit.target.is_example() + && !unit.mode.is_any_test() + { + return false; + } - // If this dependency is only available for certain platforms, - // make sure we're only enabling it for that platform. - if !bcx.dep_platform_activated(dep, unit.kind) { - return false; - } + // If this dependency is only available for certain platforms, + // make sure we're only enabling it for that platform. + if !bcx.dep_platform_activated(dep, unit.kind) { + return false; + } - // If the dependency is optional, then we're only activating it - // if the corresponding feature was activated - if dep.is_optional() && - !bcx.resolve.features(id).contains(&*dep.name_in_toml()) - { - return false; - } + // If the dependency is optional, then we're only activating it + // if the corresponding feature was activated + if dep.is_optional() && !bcx.resolve.features(id).contains(&*dep.name_in_toml()) { + return false; + } - // If we've gotten past all that, then this dependency is - // actually used! - true - }) - }); + // If we've gotten past all that, then this dependency is + // actually used! + true + }) + }); let mut ret = Vec::new(); for (id, _) in deps { @@ -181,14 +178,7 @@ fn compute_deps<'a, 'cfg, 'tmp>( }; let mode = check_or_build_mode(unit.mode, lib); let dep_unit_for = unit_for.with_for_host(lib.for_host()); - let unit = new_unit( - bcx, - pkg, - lib, - dep_unit_for, - unit.kind.for_target(lib), - mode, - ); + let unit = new_unit(bcx, pkg, lib, dep_unit_for, unit.kind.for_target(lib), mode); ret.push((unit, dep_unit_for)); } @@ -211,7 +201,8 @@ fn compute_deps<'a, 'cfg, 'tmp>( // If any integration tests/benches are being run, make sure that // binaries are built as well. - if !unit.mode.is_check() && unit.mode.is_any_test() + if !unit.mode.is_check() + && unit.mode.is_any_test() && (unit.target.is_test() || unit.target.is_bench()) { ret.extend( @@ -282,7 +273,8 @@ fn compute_deps_doc<'a, 'cfg, 'tmp>( state: &mut State<'a, 'cfg, 'tmp>, ) -> CargoResult, UnitFor)>> { let bcx = state.bcx; - let deps = bcx.resolve + let deps = bcx + .resolve .deps(unit.pkg.package_id()) .filter(|&(_id, deps)| { deps.iter().any(|dep| match dep.kind() { @@ -308,14 +300,7 @@ fn compute_deps_doc<'a, 'cfg, 'tmp>( // However, for plugins/proc-macros, deps should be built like normal. let mode = check_or_build_mode(unit.mode, lib); let dep_unit_for = UnitFor::new_normal().with_for_host(lib.for_host()); - let lib_unit = new_unit( - bcx, - dep, - lib, - dep_unit_for, - unit.kind.for_target(lib), - mode, - ); + let lib_unit = new_unit(bcx, dep, lib, dep_unit_for, unit.kind.for_target(lib), mode); ret.push((lib_unit, dep_unit_for)); if let CompileMode::Doc { deps: true } = unit.mode { // Document this lib as well. @@ -348,14 +333,7 @@ fn maybe_lib<'a>( ) -> Option<(Unit<'a>, UnitFor)> { unit.pkg.targets().iter().find(|t| t.linkable()).map(|t| { let mode = check_or_build_mode(unit.mode, t); - let unit = new_unit( - bcx, - unit.pkg, - t, - unit_for, - unit.kind.for_target(t), - mode, - ); + let unit = new_unit(bcx, unit.pkg, t, unit_for, unit.kind.for_target(t), mode); (unit, unit_for) }) } @@ -453,7 +431,8 @@ fn connect_run_custom_build_deps(state: &mut State) { for (unit, deps) in state.deps.iter() { for dep in deps { if dep.mode == CompileMode::RunCustomBuild { - reverse_deps.entry(dep) + reverse_deps + .entry(dep) .or_insert_with(HashSet::new) .insert(unit); } @@ -469,7 +448,11 @@ fn connect_run_custom_build_deps(state: &mut State) { // `links`, then we depend on that package's build script! Here we use // `dep_build_script` to manufacture an appropriate build script unit to // depend on. - for unit in state.deps.keys().filter(|k| k.mode == CompileMode::RunCustomBuild) { + for unit in state + .deps + .keys() + .filter(|k| k.mode == CompileMode::RunCustomBuild) + { let reverse_deps = match reverse_deps.get(unit) { Some(set) => set, None => continue, @@ -479,9 +462,9 @@ fn connect_run_custom_build_deps(state: &mut State) { .iter() .flat_map(|reverse_dep| state.deps[reverse_dep].iter()) .filter(|other| { - other.pkg != unit.pkg && - other.target.linkable() && - other.pkg.manifest().links().is_some() + other.pkg != unit.pkg + && other.target.linkable() + && other.pkg.manifest().links().is_some() }) .filter_map(|other| dep_build_script(other, state.bcx).map(|p| p.0)) .collect::>(); @@ -502,15 +485,15 @@ impl<'a, 'cfg, 'tmp> State<'a, 'cfg, 'tmp> { fn get(&mut self, id: &'a PackageId) -> CargoResult> { let mut pkgs = self.pkgs.borrow_mut(); if let Some(pkg) = pkgs.get(id) { - return Ok(Some(pkg)) + return Ok(Some(pkg)); } if !self.waiting_on_download.insert(id) { - return Ok(None) + return Ok(None); } if let Some(pkg) = self.downloads.start(id)? { pkgs.insert(id, pkg); self.waiting_on_download.remove(id); - return Ok(Some(pkg)) + return Ok(Some(pkg)); } Ok(None) } @@ -535,7 +518,7 @@ impl<'a, 'cfg, 'tmp> State<'a, 'cfg, 'tmp> { // less than this let's recompute the whole unit dependency graph // again and try to find some more packages to download. if self.downloads.remaining() < 5 { - break + break; } } Ok(()) diff --git a/src/cargo/core/compiler/job_queue.rs b/src/cargo/core/compiler/job_queue.rs index 806412bcda0..687f74d3ebf 100644 --- a/src/cargo/core/compiler/job_queue.rs +++ b/src/cargo/core/compiler/job_queue.rs @@ -3,9 +3,9 @@ use std::collections::HashSet; use std::fmt; use std::io; use std::mem; +use std::process::Output; use std::sync::mpsc::{channel, Receiver, Sender}; use std::sync::Arc; -use std::process::Output; use crossbeam_utils; use crossbeam_utils::thread::Scope; @@ -15,14 +15,14 @@ use core::profiles::Profile; use core::{PackageId, Target, TargetKind}; use handle_error; use util; +use util::diagnostic_server::{self, DiagnosticPrinter}; use util::{internal, profile, CargoResult, CargoResultExt, ProcessBuilder}; use util::{Config, DependencyQueue, Dirty, Fresh, Freshness}; use util::{Progress, ProgressStyle}; -use util::diagnostic_server::{self, DiagnosticPrinter}; +use super::context::OutputFile; use super::job::Job; use super::{BuildContext, BuildPlan, CompileMode, Context, Kind, Unit}; -use super::context::OutputFile; /// A management structure of the entire dependency graph to compile. /// @@ -105,7 +105,8 @@ impl<'a> JobState<'a> { cmd: ProcessBuilder, filenames: Arc>, ) { - let _ = self.tx + let _ = self + .tx .send(Message::BuildPlanMsg(module_name, cmd, filenames)); } @@ -115,7 +116,7 @@ impl<'a> JobState<'a> { prefix: Option, capture_output: bool, ) -> CargoResult { - let prefix = prefix.unwrap_or_else(|| String::new()); + let prefix = prefix.unwrap_or_else(String::new); cmd.exec_with_streaming( &mut |out| { let _ = self.tx.send(Message::Stdout(format!("{}{}", prefix, out))); @@ -187,23 +188,23 @@ impl<'a> JobQueue<'a> { let tx = self.tx.clone(); let tx = unsafe { mem::transmute::>, Sender>>(tx) }; let tx2 = tx.clone(); - let helper = cx.jobserver + let helper = cx + .jobserver .clone() .into_helper_thread(move |token| { drop(tx.send(Message::Token(token))); }) .chain_err(|| "failed to create helper thread for jobserver management")?; - let _diagnostic_server = cx.bcx.build_config + let _diagnostic_server = cx + .bcx + .build_config .rustfix_diagnostic_server .borrow_mut() .take() - .map(move |srv| { - srv.start(move |msg| drop(tx2.send(Message::FixDiagnostic(msg)))) - }); + .map(move |srv| srv.start(move |msg| drop(tx2.send(Message::FixDiagnostic(msg))))); - crossbeam_utils::thread::scope(|scope| { - self.drain_the_queue(cx, plan, scope, &helper) - }).expect("child threads should't panic") + crossbeam_utils::thread::scope(|scope| self.drain_the_queue(cx, plan, scope, &helper)) + .expect("child threads should't panic") } fn drain_the_queue( @@ -276,7 +277,9 @@ impl<'a> JobQueue<'a> { tokens.truncate(self.active.len() - 1); let count = total - self.queue.len(); - let active_names = self.active.iter() + let active_names = self + .active + .iter() .map(Key::name_for_progress) .collect::>(); drop(progress.tick_now(count, total, &format!(": {}", active_names.join(", ")))); @@ -299,7 +302,7 @@ impl<'a> JobQueue<'a> { Message::Stderr(err) => { let mut shell = cx.bcx.config.shell(); shell.print_ansi(err.as_bytes())?; - shell.err().write(b"\n")?; + shell.err().write_all(b"\n")?; } Message::FixDiagnostic(msg) => { print.print(&msg)?; diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 0041c449bef..bb85567bdfd 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -530,7 +530,7 @@ impl<'a, 'cfg> Downloads<'a, 'cfg> { // first crate finishes downloading so we inform immediately that we're // downloading crates here. if self.downloads_finished == 0 - && self.pending.len() == 0 + && self.pending.is_empty() && !self.progress.borrow().as_ref().unwrap().is_enabled() { self.set diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 961c9bd666f..9b22202608b 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -802,20 +802,19 @@ fn find_candidate( // active in this back up we know that we're guaranteed to not actually // make any progress. As a result if we hit this condition we can // completely skip this backtrack frame and move on to the next. - if !backtracked { - if frame + if !backtracked + && frame .context .is_conflicting(Some(parent.package_id()), conflicting_activations) - { - trace!( - "{} = \"{}\" skip as not solving {}: {:?}", - frame.dep.package_name(), - frame.dep.version_req(), - parent.package_id(), - conflicting_activations - ); - continue; - } + { + trace!( + "{} = \"{}\" skip as not solving {}: {:?}", + frame.dep.package_name(), + frame.dep.version_req(), + parent.package_id(), + conflicting_activations + ); + continue; } return Some((candidate, has_another, frame)); diff --git a/src/cargo/core/shell.rs b/src/cargo/core/shell.rs index 023a87e6562..cda5a2dceb2 100644 --- a/src/cargo/core/shell.rs +++ b/src/cargo/core/shell.rs @@ -28,10 +28,12 @@ pub struct Shell { impl fmt::Debug for Shell { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.err { - ShellOut::Write(_) => f.debug_struct("Shell") + ShellOut::Write(_) => f + .debug_struct("Shell") .field("verbosity", &self.verbosity) .finish(), - ShellOut::Stream { color_choice, .. } => f.debug_struct("Shell") + ShellOut::Stream { color_choice, .. } => f + .debug_struct("Shell") .field("verbosity", &self.verbosity) .field("color_choice", &color_choice) .finish(), @@ -376,13 +378,13 @@ mod imp { mod imp { extern crate winapi; - use std::{cmp, mem, ptr}; use self::winapi::um::fileapi::*; use self::winapi::um::handleapi::*; use self::winapi::um::processenv::*; use self::winapi::um::winbase::*; use self::winapi::um::wincon::*; use self::winapi::um::winnt::*; + use std::{cmp, mem, ptr}; pub(super) use super::default_err_erase_line as err_erase_line; @@ -391,19 +393,20 @@ mod imp { let stdout = GetStdHandle(STD_ERROR_HANDLE); let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed(); if GetConsoleScreenBufferInfo(stdout, &mut csbi) != 0 { - return Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize) + return Some((csbi.srWindow.Right - csbi.srWindow.Left) as usize); } // On mintty/msys/cygwin based terminals, the above fails with // INVALID_HANDLE_VALUE. Use an alternate method which works // in that case as well. - let h = CreateFileA("CONOUT$\0".as_ptr() as *const CHAR, + let h = CreateFileA( + "CONOUT$\0".as_ptr() as *const CHAR, GENERIC_READ | GENERIC_WRITE, FILE_SHARE_READ | FILE_SHARE_WRITE, ptr::null_mut(), OPEN_EXISTING, 0, - ptr::null_mut() + ptr::null_mut(), ); if h == INVALID_HANDLE_VALUE { return None; @@ -424,15 +427,12 @@ mod imp { // GetConsoleScreenBufferInfo returns accurate information. return Some(cmp::min(60, width)); } - return None; + None } } } -#[cfg(any( - all(unix, not(any(target_os = "linux", target_os = "macos"))), - windows, -))] +#[cfg(any(all(unix, not(any(target_os = "linux", target_os = "macos"))), windows,))] fn default_err_erase_line(shell: &mut Shell) { if let Some(max_width) = imp::stderr_width() { let blank = " ".repeat(max_width); diff --git a/src/cargo/ops/fix.rs b/src/cargo/ops/fix.rs index 2c1a9cf445a..734ed54456c 100644 --- a/src/cargo/ops/fix.rs +++ b/src/cargo/ops/fix.rs @@ -1,4 +1,4 @@ -use std::collections::{HashMap, HashSet, BTreeSet}; +use std::collections::{BTreeSet, HashMap, HashSet}; use std::env; use std::ffi::OsString; use std::fs; @@ -14,10 +14,10 @@ use serde_json; use core::Workspace; use ops::{self, CompileOptions}; -use util::errors::CargoResult; -use util::{LockServer, LockServerClient, existing_vcs_repo}; use util::diagnostic_server::{Message, RustfixDiagnosticServer}; +use util::errors::CargoResult; use util::paths; +use util::{existing_vcs_repo, LockServer, LockServerClient}; const FIX_ENV: &str = "__CARGO_FIX_PLZ"; const BROKEN_CODE_ENV: &str = "__CARGO_FIX_BROKEN_CODE"; @@ -43,37 +43,46 @@ pub fn fix(ws: &Workspace, opts: &mut FixOptions) -> CargoResult<()> { // Spin up our lock server which our subprocesses will use to synchronize // fixes. let lock_server = LockServer::new()?; - opts.compile_opts.build_config.extra_rustc_env.push(( - FIX_ENV.to_string(), - lock_server.addr().to_string(), - )); + opts.compile_opts + .build_config + .extra_rustc_env + .push((FIX_ENV.to_string(), lock_server.addr().to_string())); let _started = lock_server.start()?; opts.compile_opts.build_config.force_rebuild = true; if opts.broken_code { let key = BROKEN_CODE_ENV.to_string(); - opts.compile_opts.build_config.extra_rustc_env.push((key, "1".to_string())); + opts.compile_opts + .build_config + .extra_rustc_env + .push((key, "1".to_string())); } if opts.edition { let key = EDITION_ENV.to_string(); - opts.compile_opts.build_config.extra_rustc_env.push((key, "1".to_string())); + opts.compile_opts + .build_config + .extra_rustc_env + .push((key, "1".to_string())); } else if let Some(edition) = opts.prepare_for { - opts.compile_opts.build_config.extra_rustc_env.push(( - PREPARE_FOR_ENV.to_string(), - edition.to_string(), - )); + opts.compile_opts + .build_config + .extra_rustc_env + .push((PREPARE_FOR_ENV.to_string(), edition.to_string())); } if opts.idioms { - opts.compile_opts.build_config.extra_rustc_env.push(( - IDIOMS_ENV.to_string(), - "1".to_string(), - )); + opts.compile_opts + .build_config + .extra_rustc_env + .push((IDIOMS_ENV.to_string(), "1".to_string())); } opts.compile_opts.build_config.cargo_as_rustc_wrapper = true; - *opts.compile_opts.build_config.rustfix_diagnostic_server.borrow_mut() = - Some(RustfixDiagnosticServer::new()?); + *opts + .compile_opts + .build_config + .rustfix_diagnostic_server + .borrow_mut() = Some(RustfixDiagnosticServer::new()?); ops::compile(ws, &opts.compile_opts)?; Ok(()) @@ -81,17 +90,19 @@ pub fn fix(ws: &Workspace, opts: &mut FixOptions) -> CargoResult<()> { fn check_version_control(opts: &FixOptions) -> CargoResult<()> { if opts.allow_no_vcs { - return Ok(()) + return Ok(()); } let config = opts.compile_opts.config; if !existing_vcs_repo(config.cwd(), config.cwd()) { - bail!("no VCS found for this package and `cargo fix` can potentially \ - perform destructive changes; if you'd like to suppress this \ - error pass `--allow-no-vcs`") + bail!( + "no VCS found for this package and `cargo fix` can potentially \ + perform destructive changes; if you'd like to suppress this \ + error pass `--allow-no-vcs`" + ) } if opts.allow_dirty && opts.allow_staged { - return Ok(()) + return Ok(()); } let mut dirty_files = Vec::new(); @@ -103,26 +114,27 @@ fn check_version_control(opts: &FixOptions) -> CargoResult<()> { if let Some(path) = status.path() { match status.status() { git2::Status::CURRENT => (), - git2::Status::INDEX_NEW | - git2::Status::INDEX_MODIFIED | - git2::Status::INDEX_DELETED | - git2::Status::INDEX_RENAMED | - git2::Status::INDEX_TYPECHANGE => + git2::Status::INDEX_NEW + | git2::Status::INDEX_MODIFIED + | git2::Status::INDEX_DELETED + | git2::Status::INDEX_RENAMED + | git2::Status::INDEX_TYPECHANGE => { if !opts.allow_staged { staged_files.push(path.to_string()) - }, - _ => + } + } + _ => { if !opts.allow_dirty { dirty_files.push(path.to_string()) - }, + } + } }; } - } } if dirty_files.is_empty() && staged_files.is_empty() { - return Ok(()) + return Ok(()); } let mut files_list = String::new(); @@ -137,13 +149,16 @@ fn check_version_control(opts: &FixOptions) -> CargoResult<()> { files_list.push_str(" (staged)\n"); } - bail!("the working directory of this package has uncommitted changes, and \ - `cargo fix` can potentially perform destructive changes; if you'd \ - like to suppress this error pass `--allow-dirty`, `--allow-staged`, \ - or commit the changes to these files:\n\ - \n\ - {}\n\ - ", files_list); + bail!( + "the working directory of this package has uncommitted changes, and \ + `cargo fix` can potentially perform destructive changes; if you'd \ + like to suppress this error pass `--allow-dirty`, `--allow-staged`, \ + or commit the changes to these files:\n\ + \n\ + {}\n\ + ", + files_list + ); } pub fn fix_maybe_exec_rustc() -> CargoResult { @@ -190,7 +205,8 @@ pub fn fix_maybe_exec_rustc() -> CargoResult { Message::Fixing { file: path.clone(), fixes: file.fixes_applied, - }.post()?; + } + .post()?; } } @@ -231,9 +247,12 @@ struct FixedFile { original_code: String, } -fn rustfix_crate(lock_addr: &str, rustc: &Path, filename: &Path, args: &FixArgs) - -> Result -{ +fn rustfix_crate( + lock_addr: &str, + rustc: &Path, + filename: &Path, + args: &FixArgs, +) -> Result { args.verify_not_preparing_for_enabled_edition()?; // First up we want to make sure that each crate is only checked by one @@ -293,7 +312,7 @@ fn rustfix_crate(lock_addr: &str, rustc: &Path, filename: &Path, args: &FixArgs) let mut progress_yet_to_be_made = false; for (path, file) in fixes.files.iter_mut() { if file.errors_applying_fixes.is_empty() { - continue + continue; } // If anything was successfully fixed *and* there's at least one // error, then assume the error was spurious and we'll try again on @@ -303,7 +322,7 @@ fn rustfix_crate(lock_addr: &str, rustc: &Path, filename: &Path, args: &FixArgs) } } if !progress_yet_to_be_made { - break + break; } } @@ -314,7 +333,8 @@ fn rustfix_crate(lock_addr: &str, rustc: &Path, filename: &Path, args: &FixArgs) Message::ReplaceFailed { file: path.clone(), message: error, - }.post()?; + } + .post()?; } } @@ -325,9 +345,12 @@ fn rustfix_crate(lock_addr: &str, rustc: &Path, filename: &Path, args: &FixArgs) /// /// This will fill in the `fixes` map with original code, suggestions applied, /// and any errors encountered while fixing files. -fn rustfix_and_fix(fixes: &mut FixedCrate, rustc: &Path, filename: &Path, args: &FixArgs) - -> Result<(), Error> -{ +fn rustfix_and_fix( + fixes: &mut FixedCrate, + rustc: &Path, + filename: &Path, + args: &FixArgs, +) -> Result<(), Error> { // If not empty, filter by these lints // // TODO: Implement a way to specify this @@ -336,7 +359,8 @@ fn rustfix_and_fix(fixes: &mut FixedCrate, rustc: &Path, filename: &Path, args: let mut cmd = Command::new(rustc); cmd.arg("--error-format=json"); args.apply(&mut cmd); - let output = cmd.output() + let output = cmd + .output() .with_context(|_| format!("failed to execute `{}`", rustc.display()))?; // If rustc didn't succeed for whatever reasons then we're very likely to be @@ -361,13 +385,12 @@ fn rustfix_and_fix(fixes: &mut FixedCrate, rustc: &Path, filename: &Path, args: // indicating fixes that we can apply. let stderr = str::from_utf8(&output.stderr).context("failed to parse rustc stderr as utf-8")?; - let suggestions = stderr.lines() + let suggestions = stderr + .lines() .filter(|x| !x.is_empty()) .inspect(|y| trace!("line: {}", y)) - // Parse each line of stderr ignoring errors as they may not all be json .filter_map(|line| serde_json::from_str::(line).ok()) - // From each diagnostic try to extract suggestions from rustc .filter_map(|diag| rustfix::collect_suggestions(&diag, &only, fix_mode)); @@ -426,13 +449,13 @@ fn rustfix_and_fix(fixes: &mut FixedCrate, rustc: &Path, filename: &Path, args: // code, so save it. If the file already exists then the original code // doesn't need to be updated as we've just read an interim state with // some fixes but perhaps not all. - let fixed_file = fixes.files.entry(file.clone()) - .or_insert_with(|| { - FixedFile { - errors_applying_fixes: Vec::new(), - fixes_applied: 0, - original_code: code.clone(), - } + let fixed_file = fixes + .files + .entry(file.clone()) + .or_insert_with(|| FixedFile { + errors_applying_fixes: Vec::new(), + fixes_applied: 0, + original_code: code.clone(), }); let mut fixed = CodeFix::new(&code); @@ -446,8 +469,7 @@ fn rustfix_and_fix(fixes: &mut FixedCrate, rustc: &Path, filename: &Path, args: } } let new_code = fixed.finish()?; - fs::write(&file, new_code) - .with_context(|_| format!("failed to write file `{}`", file))?; + fs::write(&file, new_code).with_context(|_| format!("failed to write file `{}`", file))?; } Ok(()) @@ -525,17 +547,15 @@ impl FixArgs { let mut ret = FixArgs::default(); for arg in env::args_os().skip(1) { let path = PathBuf::from(arg); - if path.extension().and_then(|s| s.to_str()) == Some("rs") { - if path.exists() { - ret.file = Some(path); - continue - } + if path.extension().and_then(|s| s.to_str()) == Some("rs") && path.exists() { + ret.file = Some(path); + continue; } if let Some(s) = path.to_str() { let prefix = "--edition="; if s.starts_with(prefix) { ret.enabled_edition = Some(s[prefix.len()..].to_string()); - continue + continue; } } ret.other.push(path.into()); @@ -554,12 +574,11 @@ impl FixArgs { if let Some(path) = &self.file { cmd.arg(path); } - cmd.args(&self.other) - .arg("--cap-lints=warn"); + cmd.args(&self.other).arg("--cap-lints=warn"); if let Some(edition) = &self.enabled_edition { cmd.arg("--edition").arg(edition); - if self.idioms && self.primary_package { - if edition == "2018" { cmd.arg("-Wrust-2018-idioms"); } + if self.idioms && self.primary_package && edition == "2018" { + cmd.arg("-Wrust-2018-idioms"); } } if self.primary_package { @@ -586,7 +605,7 @@ impl FixArgs { None => return Ok(()), }; if edition != enabled { - return Ok(()) + return Ok(()); } let path = match &self.file { Some(s) => s, @@ -596,7 +615,8 @@ impl FixArgs { Message::EditionAlreadyEnabled { file: path.display().to_string(), edition: edition.to_string(), - }.post()?; + } + .post()?; process::exit(1); } diff --git a/src/cargo/util/lev_distance.rs b/src/cargo/util/lev_distance.rs index c4a7e9856bf..034fb728788 100644 --- a/src/cargo/util/lev_distance.rs +++ b/src/cargo/util/lev_distance.rs @@ -8,7 +8,7 @@ pub fn lev_distance(me: &str, t: &str) -> usize { return me.chars().count(); } - let mut dcol = (0..t.len() + 1).collect::>(); + let mut dcol = (0..=t.len()).collect::>(); let mut t_last = 0; for (i, sc) in me.chars().enumerate() { diff --git a/src/cargo/util/read2.rs b/src/cargo/util/read2.rs index 13a50a724ba..74ec2cc114a 100644 --- a/src/cargo/util/read2.rs +++ b/src/cargo/util/read2.rs @@ -2,12 +2,12 @@ pub use self::imp::read2; #[cfg(unix)] mod imp { - use std::io::prelude::*; + use libc; use std::io; + use std::io::prelude::*; use std::mem; use std::os::unix::prelude::*; use std::process::{ChildStderr, ChildStdout}; - use libc; pub fn read2( mut out_pipe: ChildStdout, @@ -177,9 +177,6 @@ mod imp { if v.capacity() == v.len() { v.reserve(1); } - slice::from_raw_parts_mut( - v.as_mut_ptr().offset(v.len() as isize), - v.capacity() - v.len(), - ) + slice::from_raw_parts_mut(v.as_mut_ptr().add(v.len()), v.capacity() - v.len()) } }