Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

libsyntax: Fix some Clippy warnings #62131

Merged
merged 6 commits into from
Jun 28, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/librustc_data_structures/fingerprint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ impl Fingerprint {
// you want.
#[inline]
pub fn combine_commutative(self, other: Fingerprint) -> Fingerprint {
let a = (self.1 as u128) << 64 | self.0 as u128;
let b = (other.1 as u128) << 64 | other.0 as u128;
let a = u128::from(self.1) << 64 | u128::from(self.0);
let b = u128::from(other.1) << 64 | u128::from(other.0);

let c = a.wrapping_add(b);

Expand Down
2 changes: 1 addition & 1 deletion src/librustc_data_structures/obligation_forest/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ impl<O: ForestObligation> ObligationForest<O> {
done_cache: Default::default(),
waiting_cache: Default::default(),
scratch: Some(vec![]),
obligation_tree_id_generator: (0..).map(|i| ObligationTreeId(i)),
obligation_tree_id_generator: (0..).map(ObligationTreeId),
error_cache: Default::default(),
}
}
Expand Down
8 changes: 4 additions & 4 deletions src/librustc_data_structures/sip128.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,15 +70,15 @@ unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 {
let mut i = 0; // current byte index (from LSB) in the output u64
let mut out = 0;
if i + 3 < len {
out = load_int_le!(buf, start + i, u32) as u64;
out = u64::from(load_int_le!(buf, start + i, u32));
i += 4;
}
if i + 1 < len {
out |= (load_int_le!(buf, start + i, u16) as u64) << (i * 8);
out |= u64::from(load_int_le!(buf, start + i, u16)) << (i * 8);
i += 2
}
if i < len {
out |= (*buf.get_unchecked(start + i) as u64) << (i * 8);
out |= u64::from(*buf.get_unchecked(start + i)) << (i * 8);
i += 1;
}
debug_assert_eq!(i, len);
Expand Down Expand Up @@ -237,7 +237,7 @@ impl Hasher for SipHasher128 {

if self.ntail != 0 {
needed = 8 - self.ntail;
self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << 8 * self.ntail;
self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << (8 * self.ntail);
if length < needed {
self.ntail += length;
return
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_data_structures/stable_hasher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ impl<W: StableHasherResult> StableHasher<W> {
impl StableHasherResult for u128 {
fn finish(hasher: StableHasher<Self>) -> Self {
let (_0, _1) = hasher.finalize();
(_0 as u128) | ((_1 as u128) << 64)
u128::from(_0) | (u128::from(_1) << 64)
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/librustc_data_structures/vec_linked_list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ where
Ls: Links,
{
VecLinkedListIterator {
links: links,
links,
current: first,
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_errors/annotate_snippet_emitter_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ impl<'a> DiagnosticConverter<'a> {
annotation_type: Self::annotation_type_for_level(self.level),
}),
footer: vec![],
slices: slices,
slices,
})
} else {
// FIXME(#59346): Is it ok to return None if there's no source_map?
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_errors/diagnostic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,7 @@ impl Diagnostic {
}],
msg: msg.to_owned(),
style: SuggestionStyle::CompletelyHidden,
applicability: applicability,
applicability,
});
self
}
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_errors/emitter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1339,7 +1339,7 @@ impl EmitterWriter {
}

let mut dst = self.dst.writable();
match write!(dst, "\n") {
match writeln!(dst) {
Err(e) => panic!("failed to emit error: {}", e),
_ => {
match dst.flush() {
Expand Down Expand Up @@ -1598,7 +1598,7 @@ fn emit_to_destination(rendered_buffer: &[Vec<StyledString>],
dst.reset()?;
}
if !short_message && (!lvl.is_failure_note() || pos != rendered_buffer.len() - 1) {
write!(dst, "\n")?;
writeln!(dst)?;
}
}
dst.flush()?;
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_target/spec/fuchsia_base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ pub fn opts() -> TargetOptions {
is_like_fuchsia: true,
linker_is_gnu: true,
has_rpath: false,
pre_link_args: pre_link_args,
pre_link_args,
pre_link_objects_exe: vec![
"Scrt1.o".to_string()
],
Expand Down
14 changes: 7 additions & 7 deletions src/libserialize/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -461,7 +461,7 @@ impl<'a> Encoder<'a> {
/// Creates a new JSON encoder whose output will be written to the writer
/// specified.
pub fn new(writer: &'a mut dyn fmt::Write) -> Encoder<'a> {
Encoder { writer: writer, is_emitting_map_key: false, }
Encoder { writer, is_emitting_map_key: false, }
}
}

Expand Down Expand Up @@ -513,7 +513,7 @@ impl<'a> crate::Encoder for Encoder<'a> {
emit_enquoted_if_mapkey!(self, fmt_number_or_null(v))
}
fn emit_f32(&mut self, v: f32) -> EncodeResult {
self.emit_f64(v as f64)
self.emit_f64(f64::from(v))
}

fn emit_char(&mut self, v: char) -> EncodeResult {
Expand Down Expand Up @@ -763,7 +763,7 @@ impl<'a> crate::Encoder for PrettyEncoder<'a> {
emit_enquoted_if_mapkey!(self, fmt_number_or_null(v))
}
fn emit_f32(&mut self, v: f32) -> EncodeResult {
self.emit_f64(v as f64)
self.emit_f64(f64::from(v))
}

fn emit_char(&mut self, v: char) -> EncodeResult {
Expand Down Expand Up @@ -1698,12 +1698,12 @@ impl<T: Iterator<Item=char>> Parser<T> {
if n2 < 0xDC00 || n2 > 0xDFFF {
return self.error(LoneLeadingSurrogateInHexEscape)
}
let c = (((n1 - 0xD800) as u32) << 10 |
(n2 - 0xDC00) as u32) + 0x1_0000;
let c = (u32::from(n1 - 0xD800) << 10 |
u32::from(n2 - 0xDC00)) + 0x1_0000;
res.push(char::from_u32(c).unwrap());
}

n => match char::from_u32(n as u32) {
n => match char::from_u32(u32::from(n)) {
Some(c) => res.push(c),
None => return self.error(InvalidUnicodeCodePoint),
},
Expand Down Expand Up @@ -2405,7 +2405,7 @@ impl ToJson for Json {
}

impl ToJson for f32 {
fn to_json(&self) -> Json { (*self as f64).to_json() }
fn to_json(&self) -> Json { f64::from(*self).to_json() }
}

impl ToJson for f64 {
Expand Down
2 changes: 1 addition & 1 deletion src/libserialize/leb128.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ pub fn read_signed_leb128(data: &[u8], start_position: usize) -> (i128, usize) {
loop {
byte = data[position];
position += 1;
result |= ((byte & 0x7F) as i128) << shift;
result |= i128::from(byte & 0x7F) << shift;
shift += 7;

if (byte & 0x80) == 0 {
Expand Down
4 changes: 2 additions & 2 deletions src/libserialize/opaque.rs
Original file line number Diff line number Diff line change
Expand Up @@ -296,13 +296,13 @@ impl<'a> serialize::Decoder for Decoder<'a> {
#[inline]
fn read_f64(&mut self) -> Result<f64, Self::Error> {
let bits = self.read_u64()?;
Ok(unsafe { ::std::mem::transmute(bits) })
Ok(f64::from_bits(bits))
}

#[inline]
fn read_f32(&mut self) -> Result<f32, Self::Error> {
let bits = self.read_u32()?;
Ok(unsafe { ::std::mem::transmute(bits) })
Ok(f32::from_bits(bits))
}

#[inline]
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1832,7 +1832,7 @@ impl Arg {
lt,
MutTy {
ty: infer_ty,
mutbl: mutbl,
mutbl,
},
),
span,
Expand Down Expand Up @@ -2120,7 +2120,7 @@ impl PolyTraitRef {
PolyTraitRef {
bound_generic_params: generic_params,
trait_ref: TraitRef {
path: path,
path,
ref_id: DUMMY_NODE_ID,
},
span,
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -815,7 +815,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {


fn pat(&self, span: Span, pat: PatKind) -> P<ast::Pat> {
P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: span })
P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span })
}
fn pat_wild(&self, span: Span) -> P<ast::Pat> {
self.pat(span, PatKind::Wild)
Expand Down
6 changes: 3 additions & 3 deletions src/libsyntax/ext/expand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ pub struct MacroExpander<'a, 'b> {

impl<'a, 'b> MacroExpander<'a, 'b> {
pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self {
MacroExpander { cx: cx, monotonic: monotonic }
MacroExpander { cx, monotonic }
}

pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate {
Expand Down Expand Up @@ -377,7 +377,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
_ => item.clone(),
};
invocations.push(Invocation {
kind: InvocationKind::Derive { path: path.clone(), item: item },
kind: InvocationKind::Derive { path: path.clone(), item },
fragment_kind: invoc.fragment_kind,
expansion_data: ExpansionData {
mark,
Expand Down Expand Up @@ -944,7 +944,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
}

fn collect_bang(&mut self, mac: ast::Mac, span: Span, kind: AstFragmentKind) -> AstFragment {
self.collect(kind, InvocationKind::Bang { mac: mac, ident: None, span: span })
self.collect(kind, InvocationKind::Bang { mac, ident: None, span })
}

fn collect_attr(&mut self,
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/tt/quoted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,7 @@ fn parse_tree(
tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
span,
Lrc::new(Delimited {
delim: delim,
delim,
tts: parse(
tts.into(),
expect_matchers,
Expand Down
6 changes: 3 additions & 3 deletions src/libsyntax/ext/tt/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ enum Frame {
impl Frame {
/// Construct a new frame around the delimited set of tokens.
fn new(tts: Vec<quoted::TokenTree>) -> Frame {
let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts: tts });
Frame::Delimited { forest: forest, idx: 0, span: DelimSpan::dummy() }
let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts });
Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
}
}

Expand Down Expand Up @@ -248,7 +248,7 @@ pub fn transcribe(
// the previous results (from outside the Delimited).
quoted::TokenTree::Delimited(mut span, delimited) => {
span = span.apply_mark(cx.current_expansion.mark);
stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
stack.push(Frame::Delimited { forest: delimited, idx: 0, span });
result_stack.push(mem::replace(&mut result, Vec::new()));
}

Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/feature_gate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1665,7 +1665,7 @@ impl<'a> Context<'a> {
}

pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features: &Features) {
let cx = Context { features: features, parse_sess: parse_sess, plugin_attributes: &[] };
let cx = Context { features, parse_sess, plugin_attributes: &[] };
cx.check_attribute(
attr,
attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name).map(|a| *a)),
Expand Down
18 changes: 9 additions & 9 deletions src/libsyntax/parse/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -290,10 +290,10 @@ crate enum LastToken {
}

impl TokenCursorFrame {
fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
fn new(span: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
TokenCursorFrame {
delim: delim,
span: sp,
delim,
span,
open_delim: delim == token::NoDelim,
tree_cursor: tts.clone().into_trees(),
close_delim: delim == token::NoDelim,
Expand Down Expand Up @@ -1449,7 +1449,7 @@ impl<'a> Parser<'a> {
let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None };
let mutbl = self.parse_mutability();
let ty = self.parse_ty_no_plus()?;
return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty: ty, mutbl: mutbl }));
return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty, mutbl }));
}

fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
Expand All @@ -1467,7 +1467,7 @@ impl<'a> Parser<'a> {
Mutability::Immutable
};
let t = self.parse_ty_no_plus()?;
Ok(MutTy { ty: t, mutbl: mutbl })
Ok(MutTy { ty: t, mutbl })
}

fn is_named_argument(&self) -> bool {
Expand Down Expand Up @@ -4366,7 +4366,7 @@ impl<'a> Parser<'a> {
self.report_invalid_macro_expansion_item();
}

(ident, ast::MacroDef { tokens: tokens, legacy: true })
(ident, ast::MacroDef { tokens, legacy: true })
}
_ => return Ok(None),
};
Expand Down Expand Up @@ -6789,12 +6789,12 @@ impl<'a> Parser<'a> {
let hi = self.token.span;
self.expect(&token::Semi)?;
Ok(ast::ForeignItem {
ident: ident,
attrs: attrs,
ident,
attrs,
node: ForeignItemKind::Ty,
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
vis: vis
vis
})
}

Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/print/pp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -497,7 +497,7 @@ impl<'a> Printer<'a> {

pub fn print_newline(&mut self, amount: isize) -> io::Result<()> {
debug!("NEWLINE {}", amount);
let ret = write!(self.out, "\n");
let ret = writeln!(self.out);
self.pending_indentation = 0;
self.indent(amount);
ret
Expand Down
8 changes: 4 additions & 4 deletions src/libsyntax/source_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ impl SourceMap {
-> SourceMap {
SourceMap {
files: Default::default(),
file_loader: file_loader,
file_loader,
path_mapping,
}
}
Expand Down Expand Up @@ -396,7 +396,7 @@ impl SourceMap {
let f = (*self.files.borrow().source_files)[idx].clone();

match f.lookup_line(pos) {
Some(line) => Ok(SourceFileAndLine { sf: f, line: line }),
Some(line) => Ok(SourceFileAndLine { sf: f, line }),
None => Err(f)
}
}
Expand Down Expand Up @@ -511,7 +511,7 @@ impl SourceMap {
start_col,
end_col: hi.col });

Ok(FileLines {file: lo.file, lines: lines})
Ok(FileLines {file: lo.file, lines})
}

/// Extracts the source surrounding the given `Span` using the `extract_source` function. The
Expand Down Expand Up @@ -820,7 +820,7 @@ impl SourceMap {
let idx = self.lookup_source_file_idx(bpos);
let sf = (*self.files.borrow().source_files)[idx].clone();
let offset = bpos - sf.start_pos;
SourceFileAndBytePos {sf: sf, pos: offset}
SourceFileAndBytePos {sf, pos: offset}
}

/// Converts an absolute BytePos to a CharPos relative to the source_file.
Expand Down