diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index bbece1ee5e3d4..89bf10244dac2 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -34,7 +34,10 @@ pub struct TokenAndSpan { impl Default for TokenAndSpan { fn default() -> Self { - TokenAndSpan { tok: token::Whitespace, sp: syntax_pos::DUMMY_SP } + TokenAndSpan { + tok: token::Whitespace, + sp: syntax_pos::DUMMY_SP, + } } } @@ -54,8 +57,9 @@ pub struct StringReader<'a> { /// If part of a filemap is being re-lexed, this should be set to false. pub save_new_lines_and_multibyte: bool, // cached: - pub peek_tok: token::Token, - pub peek_span: Span, + peek_tok: token::Token, + peek_span: Span, + peek_span_src_raw: Span, pub fatal_errs: Vec>, // cache a direct reference to the source text, so that we don't have to // retrieve it via `self.filemap.src.as_ref().unwrap()` all the time. @@ -63,13 +67,20 @@ pub struct StringReader<'a> { /// Stack of open delimiters and their spans. Used for error message. token: token::Token, span: Span, + /// The raw source span which *does not* take `override_span` into account + span_src_raw: Span, open_braces: Vec<(token::DelimToken, Span)>, pub override_span: Option, } impl<'a> StringReader<'a> { fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { - unwrap_or!(self.override_span, Span::new(lo, hi, NO_EXPANSION)) + self.mk_sp_and_raw(lo, hi).0 + } + fn mk_sp_and_raw(&self, lo: BytePos, hi: BytePos) -> (Span, Span) { + let raw = Span::new(lo, hi, NO_EXPANSION); + let real = unwrap_or!(self.override_span, raw); + (real, raw) } fn mk_ident(&self, string: &str) -> Ident { let mut ident = Ident::from_str(string); @@ -121,6 +132,7 @@ impl<'a> StringReader<'a> { sp: self.peek_span, }; self.advance_token()?; + self.span_src_raw = self.peek_span_src_raw; Ok(ret_val) } @@ -180,10 +192,12 @@ impl<'a> StringReader<'a> { // dummy values; not read peek_tok: token::Eof, peek_span: syntax_pos::DUMMY_SP, + peek_span_src_raw: syntax_pos::DUMMY_SP, src, fatal_errs: Vec::new(), token: token::Eof, span: syntax_pos::DUMMY_SP, + span_src_raw: syntax_pos::DUMMY_SP, open_braces: Vec::new(), override_span: None, } @@ -325,17 +339,25 @@ impl<'a> StringReader<'a> { fn advance_token(&mut self) -> Result<(), ()> { match self.scan_whitespace_or_comment() { Some(comment) => { + self.peek_span_src_raw = comment.sp; self.peek_span = comment.sp; self.peek_tok = comment.tok; } None => { if self.is_eof() { self.peek_tok = token::Eof; - self.peek_span = self.mk_sp(self.filemap.end_pos, self.filemap.end_pos); + let (real, raw) = self.mk_sp_and_raw( + self.filemap.end_pos, + self.filemap.end_pos, + ); + self.peek_span = real; + self.peek_span_src_raw = raw; } else { let start_bytepos = self.pos; self.peek_tok = self.next_token_inner()?; - self.peek_span = self.mk_sp(start_bytepos, self.pos); + let (real, raw) = self.mk_sp_and_raw(start_bytepos, self.pos); + self.peek_span = real; + self.peek_span_src_raw = raw; }; } } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index a2c81e2475428..278b8c991f7b8 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -18,9 +18,7 @@ impl<'a> StringReader<'a> { pub fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { let mut tts = Vec::new(); while self.token != token::Eof { - let tree = self.parse_token_tree()?; - let is_joint = tree.span().hi() == self.span.lo() && token::is_op(&self.token); - tts.push(if is_joint { tree.joint() } else { tree.into() }); + tts.push(self.parse_token_tree()?); } Ok(TokenStream::concat(tts)) } @@ -32,19 +30,17 @@ impl<'a> StringReader<'a> { if let token::CloseDelim(..) = self.token { return TokenStream::concat(tts); } - let tree = match self.parse_token_tree() { - Ok(tree) => tree, + match self.parse_token_tree() { + Ok(tree) => tts.push(tree), Err(mut e) => { e.emit(); return TokenStream::concat(tts); } - }; - let is_joint = tree.span().hi() == self.span.lo() && token::is_op(&self.token); - tts.push(if is_joint { tree.joint() } else { tree.into() }); + } } } - fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> { + fn parse_token_tree(&mut self) -> PResult<'a, TokenStream> { match self.token { token::Eof => { let msg = "this file contains an un-closed delimiter"; @@ -115,7 +111,7 @@ impl<'a> StringReader<'a> { Ok(TokenTree::Delimited(span, Delimited { delim, tts: tts.into(), - })) + }).into()) }, token::CloseDelim(_) => { // An unexpected closing delimiter (i.e., there is no @@ -127,8 +123,13 @@ impl<'a> StringReader<'a> { }, _ => { let tt = TokenTree::Token(self.span, self.token.clone()); + // Note that testing for joint-ness here is done via the raw + // source span as the joint-ness is a property of the raw source + // rather than wanting to take `override_span` into account. + let raw = self.span_src_raw; self.real_token(); - Ok(tt) + let is_joint = raw.hi() == self.span_src_raw.lo() && token::is_op(&self.token); + Ok(if is_joint { tt.joint() } else { tt.into() }) } } } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 5575614a4d482..034be6a6864ce 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -581,6 +581,8 @@ impl Token { if tokens.probably_equal_for_proc_macro(&tokens_for_real) { return tokens } + info!("cached tokens found, but they're not \"probably equal\", \ + going with stringified version"); } return tokens_for_real } diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/not-joint.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/not-joint.rs new file mode 100644 index 0000000000000..062a914534f13 --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/not-joint.rs @@ -0,0 +1,40 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// no-prefer-dynamic + +#![crate_type = "proc-macro"] +#![feature(proc_macro)] + +extern crate proc_macro; + +use proc_macro::*; + +#[proc_macro] +pub fn tokens(input: TokenStream) -> TokenStream { + assert_nothing_joint(input); + TokenStream::empty() +} + +#[proc_macro_attribute] +pub fn nothing(_: TokenStream, input: TokenStream) -> TokenStream { + assert_nothing_joint(input); + TokenStream::empty() +} + +fn assert_nothing_joint(s: TokenStream) { + for tt in s { + match tt { + TokenTree::Group(g) => assert_nothing_joint(g.stream()), + TokenTree::Punct(p) => assert_eq!(p.spacing(), Spacing::Alone), + _ => {} + } + } +} diff --git a/src/test/run-pass-fulldeps/proc-macro/not-joint.rs b/src/test/run-pass-fulldeps/proc-macro/not-joint.rs new file mode 100644 index 0000000000000..34dfae9f1580a --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/not-joint.rs @@ -0,0 +1,35 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:not-joint.rs + +#![feature(proc_macro)] + +extern crate not_joint as bar; +use bar::{tokens, nothing}; + +tokens![< -]; + +#[nothing] +a![< -]; + +#[nothing] +b!{< -} + +#[nothing] +c!(< -); + +#[nothing] +fn foo() { + //! dox + let x = 2 < - 3; +} + +fn main() {}