From dedcd97e92f190ee99aa271c6cfd8978f123d731 Mon Sep 17 00:00:00 2001 From: Igor Matuszewski Date: Tue, 25 Jun 2019 23:08:10 +0200 Subject: [PATCH 1/6] Use f{32,64}::from_bits --- src/libserialize/opaque.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libserialize/opaque.rs b/src/libserialize/opaque.rs index a6a5c318079f1..75988198eb9b5 100644 --- a/src/libserialize/opaque.rs +++ b/src/libserialize/opaque.rs @@ -296,13 +296,13 @@ impl<'a> serialize::Decoder for Decoder<'a> { #[inline] fn read_f64(&mut self) -> Result { let bits = self.read_u64()?; - Ok(unsafe { ::std::mem::transmute(bits) }) + Ok(f64::from_bits(bits)) } #[inline] fn read_f32(&mut self) -> Result { let bits = self.read_u32()?; - Ok(unsafe { ::std::mem::transmute(bits) }) + Ok(f32::from_bits(bits)) } #[inline] From 12806b70506508c4ec187f0223e1e86f89167448 Mon Sep 17 00:00:00 2001 From: Igor Matuszewski Date: Tue, 25 Jun 2019 23:22:45 +0200 Subject: [PATCH 2/6] Fix clippy::redundant_field_names --- .../vec_linked_list.rs | 2 +- .../annotate_snippet_emitter_writer.rs | 2 +- src/librustc_errors/diagnostic.rs | 2 +- src/librustc_target/spec/fuchsia_base.rs | 2 +- src/libserialize/json.rs | 2 +- src/libsyntax/ast.rs | 4 ++-- src/libsyntax/ext/build.rs | 2 +- src/libsyntax/ext/expand.rs | 6 +++--- src/libsyntax/ext/tt/quoted.rs | 2 +- src/libsyntax/ext/tt/transcribe.rs | 6 +++--- src/libsyntax/feature_gate.rs | 2 +- src/libsyntax/parse/parser.rs | 18 +++++++++--------- src/libsyntax/source_map.rs | 8 ++++---- 13 files changed, 29 insertions(+), 29 deletions(-) diff --git a/src/librustc_data_structures/vec_linked_list.rs b/src/librustc_data_structures/vec_linked_list.rs index c00c707a43542..0fb8060031843 100644 --- a/src/librustc_data_structures/vec_linked_list.rs +++ b/src/librustc_data_structures/vec_linked_list.rs @@ -8,7 +8,7 @@ where Ls: Links, { VecLinkedListIterator { - links: links, + links, current: first, } } diff --git a/src/librustc_errors/annotate_snippet_emitter_writer.rs b/src/librustc_errors/annotate_snippet_emitter_writer.rs index 7ed2fddf72d23..3641d355ef19c 100644 --- a/src/librustc_errors/annotate_snippet_emitter_writer.rs +++ b/src/librustc_errors/annotate_snippet_emitter_writer.rs @@ -94,7 +94,7 @@ impl<'a> DiagnosticConverter<'a> { annotation_type: Self::annotation_type_for_level(self.level), }), footer: vec![], - slices: slices, + slices, }) } else { // FIXME(#59346): Is it ok to return None if there's no source_map? diff --git a/src/librustc_errors/diagnostic.rs b/src/librustc_errors/diagnostic.rs index fc1fd960c4ace..424d7c0038389 100644 --- a/src/librustc_errors/diagnostic.rs +++ b/src/librustc_errors/diagnostic.rs @@ -388,7 +388,7 @@ impl Diagnostic { }], msg: msg.to_owned(), style: SuggestionStyle::CompletelyHidden, - applicability: applicability, + applicability, }); self } diff --git a/src/librustc_target/spec/fuchsia_base.rs b/src/librustc_target/spec/fuchsia_base.rs index 4e4f2fa0cf34c..48749dff941ac 100644 --- a/src/librustc_target/spec/fuchsia_base.rs +++ b/src/librustc_target/spec/fuchsia_base.rs @@ -19,7 +19,7 @@ pub fn opts() -> TargetOptions { is_like_fuchsia: true, linker_is_gnu: true, has_rpath: false, - pre_link_args: pre_link_args, + pre_link_args, pre_link_objects_exe: vec![ "Scrt1.o".to_string() ], diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs index 2c3bea80e349b..826954ce2168c 100644 --- a/src/libserialize/json.rs +++ b/src/libserialize/json.rs @@ -461,7 +461,7 @@ impl<'a> Encoder<'a> { /// Creates a new JSON encoder whose output will be written to the writer /// specified. pub fn new(writer: &'a mut dyn fmt::Write) -> Encoder<'a> { - Encoder { writer: writer, is_emitting_map_key: false, } + Encoder { writer, is_emitting_map_key: false, } } } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index f2fac16db01d2..c627596bbdf20 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -1832,7 +1832,7 @@ impl Arg { lt, MutTy { ty: infer_ty, - mutbl: mutbl, + mutbl, }, ), span, @@ -2120,7 +2120,7 @@ impl PolyTraitRef { PolyTraitRef { bound_generic_params: generic_params, trait_ref: TraitRef { - path: path, + path, ref_id: DUMMY_NODE_ID, }, span, diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 9d4bf7d518d75..baf1031de1e7c 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -815,7 +815,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn pat(&self, span: Span, pat: PatKind) -> P { - P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: span }) + P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span }) } fn pat_wild(&self, span: Span) -> P { self.pat(span, PatKind::Wild) diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 945cf36af46fe..5473f55aa3370 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -231,7 +231,7 @@ pub struct MacroExpander<'a, 'b> { impl<'a, 'b> MacroExpander<'a, 'b> { pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self { - MacroExpander { cx: cx, monotonic: monotonic } + MacroExpander { cx, monotonic } } pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { @@ -377,7 +377,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { _ => item.clone(), }; invocations.push(Invocation { - kind: InvocationKind::Derive { path: path.clone(), item: item }, + kind: InvocationKind::Derive { path: path.clone(), item }, fragment_kind: invoc.fragment_kind, expansion_data: ExpansionData { mark, @@ -944,7 +944,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { } fn collect_bang(&mut self, mac: ast::Mac, span: Span, kind: AstFragmentKind) -> AstFragment { - self.collect(kind, InvocationKind::Bang { mac: mac, ident: None, span: span }) + self.collect(kind, InvocationKind::Bang { mac, ident: None, span }) } fn collect_attr(&mut self, diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 6f5ce89bc315a..ccf9db842ab6e 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -319,7 +319,7 @@ fn parse_tree( tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited( span, Lrc::new(Delimited { - delim: delim, + delim, tts: parse( tts.into(), expect_matchers, diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index c51f4b20c31c0..ea7f8e356aa63 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -23,8 +23,8 @@ enum Frame { impl Frame { /// Construct a new frame around the delimited set of tokens. fn new(tts: Vec) -> Frame { - let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts: tts }); - Frame::Delimited { forest: forest, idx: 0, span: DelimSpan::dummy() } + let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts }); + Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() } } } @@ -248,7 +248,7 @@ pub fn transcribe( // the previous results (from outside the Delimited). quoted::TokenTree::Delimited(mut span, delimited) => { span = span.apply_mark(cx.current_expansion.mark); - stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span }); + stack.push(Frame::Delimited { forest: delimited, idx: 0, span }); result_stack.push(mem::replace(&mut result, Vec::new())); } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index c405acd8ee3f6..a6e8441a915e0 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -1665,7 +1665,7 @@ impl<'a> Context<'a> { } pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features: &Features) { - let cx = Context { features: features, parse_sess: parse_sess, plugin_attributes: &[] }; + let cx = Context { features, parse_sess, plugin_attributes: &[] }; cx.check_attribute( attr, attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name).map(|a| *a)), diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index a1440f2eba47e..fc206580e3811 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -290,10 +290,10 @@ crate enum LastToken { } impl TokenCursorFrame { - fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self { + fn new(span: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self { TokenCursorFrame { - delim: delim, - span: sp, + delim, + span, open_delim: delim == token::NoDelim, tree_cursor: tts.clone().into_trees(), close_delim: delim == token::NoDelim, @@ -1449,7 +1449,7 @@ impl<'a> Parser<'a> { let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None }; let mutbl = self.parse_mutability(); let ty = self.parse_ty_no_plus()?; - return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty: ty, mutbl: mutbl })); + return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty, mutbl })); } fn parse_ptr(&mut self) -> PResult<'a, MutTy> { @@ -1467,7 +1467,7 @@ impl<'a> Parser<'a> { Mutability::Immutable }; let t = self.parse_ty_no_plus()?; - Ok(MutTy { ty: t, mutbl: mutbl }) + Ok(MutTy { ty: t, mutbl }) } fn is_named_argument(&self) -> bool { @@ -4366,7 +4366,7 @@ impl<'a> Parser<'a> { self.report_invalid_macro_expansion_item(); } - (ident, ast::MacroDef { tokens: tokens, legacy: true }) + (ident, ast::MacroDef { tokens, legacy: true }) } _ => return Ok(None), }; @@ -6789,12 +6789,12 @@ impl<'a> Parser<'a> { let hi = self.token.span; self.expect(&token::Semi)?; Ok(ast::ForeignItem { - ident: ident, - attrs: attrs, + ident, + attrs, node: ForeignItemKind::Ty, id: ast::DUMMY_NODE_ID, span: lo.to(hi), - vis: vis + vis }) } diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs index c0307263387ec..ac30cbb471aea 100644 --- a/src/libsyntax/source_map.rs +++ b/src/libsyntax/source_map.rs @@ -150,7 +150,7 @@ impl SourceMap { -> SourceMap { SourceMap { files: Default::default(), - file_loader: file_loader, + file_loader, path_mapping, } } @@ -396,7 +396,7 @@ impl SourceMap { let f = (*self.files.borrow().source_files)[idx].clone(); match f.lookup_line(pos) { - Some(line) => Ok(SourceFileAndLine { sf: f, line: line }), + Some(line) => Ok(SourceFileAndLine { sf: f, line }), None => Err(f) } } @@ -511,7 +511,7 @@ impl SourceMap { start_col, end_col: hi.col }); - Ok(FileLines {file: lo.file, lines: lines}) + Ok(FileLines {file: lo.file, lines}) } /// Extracts the source surrounding the given `Span` using the `extract_source` function. The @@ -820,7 +820,7 @@ impl SourceMap { let idx = self.lookup_source_file_idx(bpos); let sf = (*self.files.borrow().source_files)[idx].clone(); let offset = bpos - sf.start_pos; - SourceFileAndBytePos {sf: sf, pos: offset} + SourceFileAndBytePos {sf, pos: offset} } /// Converts an absolute BytePos to a CharPos relative to the source_file. From 6c93b47c010f09d6f1ec8303898984944fdbd6f0 Mon Sep 17 00:00:00 2001 From: Igor Matuszewski Date: Wed, 26 Jun 2019 14:04:37 +0200 Subject: [PATCH 3/6] Fix clippy::cast_losless --- src/librustc_data_structures/fingerprint.rs | 4 ++-- src/librustc_data_structures/sip128.rs | 6 +++--- src/librustc_data_structures/stable_hasher.rs | 2 +- src/libserialize/json.rs | 12 ++++++------ src/libserialize/leb128.rs | 2 +- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/librustc_data_structures/fingerprint.rs b/src/librustc_data_structures/fingerprint.rs index c4c0db5801209..7975c62b90fb6 100644 --- a/src/librustc_data_structures/fingerprint.rs +++ b/src/librustc_data_structures/fingerprint.rs @@ -39,8 +39,8 @@ impl Fingerprint { // you want. #[inline] pub fn combine_commutative(self, other: Fingerprint) -> Fingerprint { - let a = (self.1 as u128) << 64 | self.0 as u128; - let b = (other.1 as u128) << 64 | other.0 as u128; + let a = u128::from(self.1) << 64 | u128::from(self.0); + let b = u128::from(other.1) << 64 | u128::from(other.0); let c = a.wrapping_add(b); diff --git a/src/librustc_data_structures/sip128.rs b/src/librustc_data_structures/sip128.rs index 06f157f972932..7838a90089024 100644 --- a/src/librustc_data_structures/sip128.rs +++ b/src/librustc_data_structures/sip128.rs @@ -70,15 +70,15 @@ unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 { let mut i = 0; // current byte index (from LSB) in the output u64 let mut out = 0; if i + 3 < len { - out = load_int_le!(buf, start + i, u32) as u64; + out = u64::from(load_int_le!(buf, start + i, u32)); i += 4; } if i + 1 < len { - out |= (load_int_le!(buf, start + i, u16) as u64) << (i * 8); + out |= u64::from(load_int_le!(buf, start + i, u16)) << (i * 8); i += 2 } if i < len { - out |= (*buf.get_unchecked(start + i) as u64) << (i * 8); + out |= u64::from(*buf.get_unchecked(start + i)) << (i * 8); i += 1; } debug_assert_eq!(i, len); diff --git a/src/librustc_data_structures/stable_hasher.rs b/src/librustc_data_structures/stable_hasher.rs index 0c81c27a96ee5..47dfc1d1688d0 100644 --- a/src/librustc_data_structures/stable_hasher.rs +++ b/src/librustc_data_structures/stable_hasher.rs @@ -44,7 +44,7 @@ impl StableHasher { impl StableHasherResult for u128 { fn finish(hasher: StableHasher) -> Self { let (_0, _1) = hasher.finalize(); - (_0 as u128) | ((_1 as u128) << 64) + u128::from(_0) | (u128::from(_1) << 64) } } diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs index 826954ce2168c..a7e7c09f9ae44 100644 --- a/src/libserialize/json.rs +++ b/src/libserialize/json.rs @@ -513,7 +513,7 @@ impl<'a> crate::Encoder for Encoder<'a> { emit_enquoted_if_mapkey!(self, fmt_number_or_null(v)) } fn emit_f32(&mut self, v: f32) -> EncodeResult { - self.emit_f64(v as f64) + self.emit_f64(f64::from(v)) } fn emit_char(&mut self, v: char) -> EncodeResult { @@ -763,7 +763,7 @@ impl<'a> crate::Encoder for PrettyEncoder<'a> { emit_enquoted_if_mapkey!(self, fmt_number_or_null(v)) } fn emit_f32(&mut self, v: f32) -> EncodeResult { - self.emit_f64(v as f64) + self.emit_f64(f64::from(v)) } fn emit_char(&mut self, v: char) -> EncodeResult { @@ -1698,12 +1698,12 @@ impl> Parser { if n2 < 0xDC00 || n2 > 0xDFFF { return self.error(LoneLeadingSurrogateInHexEscape) } - let c = (((n1 - 0xD800) as u32) << 10 | - (n2 - 0xDC00) as u32) + 0x1_0000; + let c = (u32::from(n1 - 0xD800) << 10 | + u32::from(n2 - 0xDC00)) + 0x1_0000; res.push(char::from_u32(c).unwrap()); } - n => match char::from_u32(n as u32) { + n => match char::from_u32(u32::from(n)) { Some(c) => res.push(c), None => return self.error(InvalidUnicodeCodePoint), }, @@ -2405,7 +2405,7 @@ impl ToJson for Json { } impl ToJson for f32 { - fn to_json(&self) -> Json { (*self as f64).to_json() } + fn to_json(&self) -> Json { f64::from(*self).to_json() } } impl ToJson for f64 { diff --git a/src/libserialize/leb128.rs b/src/libserialize/leb128.rs index 16ff59489e718..f9d80842d7558 100644 --- a/src/libserialize/leb128.rs +++ b/src/libserialize/leb128.rs @@ -123,7 +123,7 @@ pub fn read_signed_leb128(data: &[u8], start_position: usize) -> (i128, usize) { loop { byte = data[position]; position += 1; - result |= ((byte & 0x7F) as i128) << shift; + result |= i128::from(byte & 0x7F) << shift; shift += 7; if (byte & 0x80) == 0 { From 33f58baf6eb6beea0ec5c93b1c81714b05eae378 Mon Sep 17 00:00:00 2001 From: Igor Matuszewski Date: Wed, 26 Jun 2019 14:07:08 +0200 Subject: [PATCH 4/6] Fix clippy::redundant_closure --- src/librustc_data_structures/obligation_forest/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustc_data_structures/obligation_forest/mod.rs b/src/librustc_data_structures/obligation_forest/mod.rs index 4490e5f86d2bd..557e5e2186f11 100644 --- a/src/librustc_data_structures/obligation_forest/mod.rs +++ b/src/librustc_data_structures/obligation_forest/mod.rs @@ -263,7 +263,7 @@ impl ObligationForest { done_cache: Default::default(), waiting_cache: Default::default(), scratch: Some(vec![]), - obligation_tree_id_generator: (0..).map(|i| ObligationTreeId(i)), + obligation_tree_id_generator: (0..).map(ObligationTreeId), error_cache: Default::default(), } } From 1af1f6277e78cb3e1d9c3aafc1e5a65c58916023 Mon Sep 17 00:00:00 2001 From: Igor Matuszewski Date: Wed, 26 Jun 2019 14:11:40 +0200 Subject: [PATCH 5/6] Fix clippy::print_with_newline --- src/librustc_errors/emitter.rs | 4 ++-- src/libsyntax/print/pp.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index fca8298409a61..a2717ab7ad8a9 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -1339,7 +1339,7 @@ impl EmitterWriter { } let mut dst = self.dst.writable(); - match write!(dst, "\n") { + match writeln!(dst) { Err(e) => panic!("failed to emit error: {}", e), _ => { match dst.flush() { @@ -1598,7 +1598,7 @@ fn emit_to_destination(rendered_buffer: &[Vec], dst.reset()?; } if !short_message && (!lvl.is_failure_note() || pos != rendered_buffer.len() - 1) { - write!(dst, "\n")?; + writeln!(dst)?; } } dst.flush()?; diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 45eb6995a7699..f5412f3e21602 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -497,7 +497,7 @@ impl<'a> Printer<'a> { pub fn print_newline(&mut self, amount: isize) -> io::Result<()> { debug!("NEWLINE {}", amount); - let ret = write!(self.out, "\n"); + let ret = writeln!(self.out); self.pending_indentation = 0; self.indent(amount); ret From ad62b4203ce3f0bd4c7c348aeabca4f49d5ce075 Mon Sep 17 00:00:00 2001 From: Igor Matuszewski Date: Wed, 26 Jun 2019 14:14:27 +0200 Subject: [PATCH 6/6] Fix clippy::precedence --- src/librustc_data_structures/sip128.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustc_data_structures/sip128.rs b/src/librustc_data_structures/sip128.rs index 7838a90089024..e5de359e4759e 100644 --- a/src/librustc_data_structures/sip128.rs +++ b/src/librustc_data_structures/sip128.rs @@ -237,7 +237,7 @@ impl Hasher for SipHasher128 { if self.ntail != 0 { needed = 8 - self.ntail; - self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << 8 * self.ntail; + self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << (8 * self.ntail); if length < needed { self.ntail += length; return