Skip to content

Commit

Permalink
Rollup merge of rust-lang#76291 - matklad:spacing, r=petrochenkov
Browse files Browse the repository at this point in the history
Rename IsJoint -> Spacing

Builds on rust-lang#76286 and might conflict with rust-lang#76285

r? @petrochenkov
  • Loading branch information
matklad authored Sep 4, 2020
2 parents 5861900 + ccf41dd commit b1101dc
Show file tree
Hide file tree
Showing 11 changed files with 86 additions and 97 deletions.
16 changes: 8 additions & 8 deletions compiler/rustc_ast/src/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use crate::ast::{Path, PathSegment};
use crate::mut_visit::visit_clobber;
use crate::ptr::P;
use crate::token::{self, CommentKind, Token};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing};

use rustc_index::bit_set::GrowableBitSet;
use rustc_span::source_map::{BytePos, Spanned};
Expand Down Expand Up @@ -361,7 +361,7 @@ pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool {
}

impl MetaItem {
fn token_trees_and_joints(&self) -> Vec<TreeAndJoint> {
fn token_trees_and_spacings(&self) -> Vec<TreeAndSpacing> {
let mut idents = vec![];
let mut last_pos = BytePos(0 as u32);
for (i, segment) in self.path.segments.iter().enumerate() {
Expand All @@ -374,7 +374,7 @@ impl MetaItem {
idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)).into());
last_pos = segment.ident.span.hi();
}
idents.extend(self.kind.token_trees_and_joints(self.span));
idents.extend(self.kind.token_trees_and_spacings(self.span));
idents
}

Expand Down Expand Up @@ -447,7 +447,7 @@ impl MetaItemKind {
if i > 0 {
tts.push(TokenTree::token(token::Comma, span).into());
}
tts.extend(item.token_trees_and_joints())
tts.extend(item.token_trees_and_spacings())
}
MacArgs::Delimited(
DelimSpan::from_single(span),
Expand All @@ -458,7 +458,7 @@ impl MetaItemKind {
}
}

fn token_trees_and_joints(&self, span: Span) -> Vec<TreeAndJoint> {
fn token_trees_and_spacings(&self, span: Span) -> Vec<TreeAndSpacing> {
match *self {
MetaItemKind::Word => vec![],
MetaItemKind::NameValue(ref lit) => {
Expand All @@ -470,7 +470,7 @@ impl MetaItemKind {
if i > 0 {
tokens.push(TokenTree::token(token::Comma, span).into());
}
tokens.extend(item.token_trees_and_joints())
tokens.extend(item.token_trees_and_spacings())
}
vec![
TokenTree::Delimited(
Expand Down Expand Up @@ -553,9 +553,9 @@ impl NestedMetaItem {
}
}

fn token_trees_and_joints(&self) -> Vec<TreeAndJoint> {
fn token_trees_and_spacings(&self) -> Vec<TreeAndSpacing> {
match *self {
NestedMetaItem::MetaItem(ref item) => item.token_trees_and_joints(),
NestedMetaItem::MetaItem(ref item) => item.token_trees_and_spacings(),
NestedMetaItem::Literal(ref lit) => vec![lit.token_tree().into()],
}
}
Expand Down
41 changes: 19 additions & 22 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ impl TokenTree {
}

pub fn joint(self) -> TokenStream {
TokenStream::new(vec![(self, Joint)])
TokenStream::new(vec![(self, Spacing::Joint)])
}

pub fn token(kind: TokenKind, span: Span) -> TokenTree {
Expand Down Expand Up @@ -125,22 +125,20 @@ where
/// instead of a representation of the abstract syntax tree.
/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
#[derive(Clone, Debug, Default, Encodable, Decodable)]
pub struct TokenStream(pub Lrc<Vec<TreeAndJoint>>);
pub struct TokenStream(pub Lrc<Vec<TreeAndSpacing>>);

pub type TreeAndJoint = (TokenTree, IsJoint);
pub type TreeAndSpacing = (TokenTree, Spacing);

// `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
#[cfg(target_arch = "x86_64")]
rustc_data_structures::static_assert_size!(TokenStream, 8);

#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable)]
pub enum IsJoint {
pub enum Spacing {
Alone,
Joint,
NonJoint,
}

use IsJoint::*;

impl TokenStream {
/// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
/// separating the two arguments with a comma for diagnostic suggestions.
Expand All @@ -153,7 +151,7 @@ impl TokenStream {
let sp = match (&ts, &next) {
(_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
(
(TokenTree::Token(token_left), NonJoint),
(TokenTree::Token(token_left), Spacing::Alone),
(TokenTree::Token(token_right), _),
) if ((token_left.is_ident() && !token_left.is_reserved_ident())
|| token_left.is_lit())
Expand All @@ -162,11 +160,11 @@ impl TokenStream {
{
token_left.span
}
((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
((TokenTree::Delimited(sp, ..), Spacing::Alone), _) => sp.entire(),
_ => continue,
};
let sp = sp.shrink_to_hi();
let comma = (TokenTree::token(token::Comma, sp), NonJoint);
let comma = (TokenTree::token(token::Comma, sp), Spacing::Alone);
suggestion = Some((pos, comma, sp));
}
}
Expand All @@ -184,19 +182,19 @@ impl TokenStream {

impl From<TokenTree> for TokenStream {
fn from(tree: TokenTree) -> TokenStream {
TokenStream::new(vec![(tree, NonJoint)])
TokenStream::new(vec![(tree, Spacing::Alone)])
}
}

impl From<TokenTree> for TreeAndJoint {
fn from(tree: TokenTree) -> TreeAndJoint {
(tree, NonJoint)
impl From<TokenTree> for TreeAndSpacing {
fn from(tree: TokenTree) -> TreeAndSpacing {
(tree, Spacing::Alone)
}
}

impl iter::FromIterator<TokenTree> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<TreeAndJoint>>())
TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<TreeAndSpacing>>())
}
}

Expand All @@ -209,7 +207,7 @@ impl PartialEq<TokenStream> for TokenStream {
}

impl TokenStream {
pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
pub fn new(streams: Vec<TreeAndSpacing>) -> TokenStream {
TokenStream(Lrc::new(streams))
}

Expand Down Expand Up @@ -320,11 +318,11 @@ impl TokenStreamBuilder {
// If `self` is not empty and the last tree within the last stream is a
// token tree marked with `Joint`...
if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut() {
if let Some((TokenTree::Token(last_token), Joint)) = last_stream_lrc.last() {
if let Some((TokenTree::Token(last_token), Spacing::Joint)) = last_stream_lrc.last() {
// ...and `stream` is not empty and the first tree within it is
// a token tree...
let TokenStream(ref mut stream_lrc) = stream;
if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() {
if let Some((TokenTree::Token(token), spacing)) = stream_lrc.first() {
// ...and the two tokens can be glued together...
if let Some(glued_tok) = last_token.glue(&token) {
// ...then do so, by overwriting the last token
Expand All @@ -337,8 +335,7 @@ impl TokenStreamBuilder {
// Overwrite the last token tree with the merged
// token.
let last_vec_mut = Lrc::make_mut(last_stream_lrc);
*last_vec_mut.last_mut().unwrap() =
(TokenTree::Token(glued_tok), *is_joint);
*last_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing);

// Remove the first token tree from `stream`. (This
// is almost always the only tree in `stream`.)
Expand Down Expand Up @@ -375,7 +372,7 @@ impl Iterator for Cursor {
type Item = TokenTree;

fn next(&mut self) -> Option<TokenTree> {
self.next_with_joint().map(|(tree, _)| tree)
self.next_with_spacing().map(|(tree, _)| tree)
}
}

Expand All @@ -384,7 +381,7 @@ impl Cursor {
Cursor { stream, index: 0 }
}

pub fn next_with_joint(&mut self) -> Option<TreeAndJoint> {
pub fn next_with_spacing(&mut self) -> Option<TreeAndSpacing> {
if self.index < self.stream.len() {
self.index += 1;
Some(self.stream.0[self.index - 1].clone())
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_expand/src/mbe/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};

use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast::token::{self, NtTT, Token};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
use rustc_ast::MacCall;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
Expand Down Expand Up @@ -111,7 +111,7 @@ pub(super) fn transcribe<'a>(
//
// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
// again, and we are done transcribing.
let mut result: Vec<TreeAndJoint> = Vec::new();
let mut result: Vec<TreeAndSpacing> = Vec::new();
let mut result_stack = Vec::new();
let mut marker = Marker(cx.current_expansion.id, transparency);

Expand Down
12 changes: 6 additions & 6 deletions compiler/rustc_expand/src/proc_macro_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use crate::base::ExtCtxt;

use rustc_ast as ast;
use rustc_ast::token;
use rustc_ast::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
use rustc_ast::tokenstream::{self, DelimSpan, Spacing::*, TokenStream, TreeAndSpacing};
use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
use rustc_errors::Diagnostic;
Expand Down Expand Up @@ -47,15 +47,15 @@ impl ToInternal<token::DelimToken> for Delimiter {
}
}

impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
impl FromInternal<(TreeAndSpacing, &'_ ParseSess, &'_ mut Vec<Self>)>
for TokenTree<Group, Punct, Ident, Literal>
{
fn from_internal(
((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>),
((tree, spacing), sess, stack): (TreeAndSpacing, &ParseSess, &mut Vec<Self>),
) -> Self {
use rustc_ast::token::*;

let joint = is_joint == Joint;
let joint = spacing == Joint;
let Token { kind, span } = match tree {
tokenstream::TokenTree::Delimited(span, delim, tts) => {
let delimiter = Delimiter::from_internal(delim);
Expand Down Expand Up @@ -261,7 +261,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
};

let tree = tokenstream::TokenTree::token(kind, span);
TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
TokenStream::new(vec![(tree, if joint { Joint } else { Alone })])
}
}

Expand Down Expand Up @@ -444,7 +444,7 @@ impl server::TokenStreamIter for Rustc<'_> {
) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
loop {
let tree = iter.stack.pop().or_else(|| {
let next = iter.cursor.next_with_joint()?;
let next = iter.cursor.next_with_spacing()?;
Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
})?;
// A hack used to pass AST fragments to attribute and derive macros
Expand Down
57 changes: 22 additions & 35 deletions compiler/rustc_parse/src/lexer/mod.rs
Original file line number Diff line number Diff line change
@@ -1,22 +1,19 @@
use rustc_ast::ast::AttrStyle;
use rustc_ast::token::{self, CommentKind, Token, TokenKind};
use rustc_ast::tokenstream::IsJoint;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError};
use rustc_lexer::Base;
use rustc_lexer::{unescape, RawStrError};
use rustc_ast::tokenstream::{Spacing, TokenStream};
use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError, PResult};
use rustc_lexer::unescape::{self, Mode};
use rustc_lexer::{Base, DocStyle, RawStrError};
use rustc_session::parse::ParseSess;
use rustc_span::symbol::{sym, Symbol};
use rustc_span::{BytePos, Pos, Span};

use std::char;
use tracing::debug;

mod tokentrees;
mod unescape_error_reporting;
mod unicode_chars;

use rustc_lexer::{unescape::Mode, DocStyle};
use unescape_error_reporting::{emit_unescape_error, push_escaped_char};

#[derive(Clone, Debug)]
Expand All @@ -28,7 +25,17 @@ pub struct UnmatchedBrace {
pub candidate_span: Option<Span>,
}

crate struct StringReader<'a> {
crate fn parse_token_trees<'a>(
sess: &'a ParseSess,
src: &'a str,
start_pos: BytePos,
override_span: Option<Span>,
) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
StringReader { sess, start_pos, pos: start_pos, end_src_index: src.len(), src, override_span }
.into_token_trees()
}

struct StringReader<'a> {
sess: &'a ParseSess,
/// Initial position, read-only.
start_pos: BytePos,
Expand All @@ -37,38 +44,18 @@ crate struct StringReader<'a> {
/// Stop reading src at this index.
end_src_index: usize,
/// Source text to tokenize.
src: Lrc<String>,
src: &'a str,
override_span: Option<Span>,
}

impl<'a> StringReader<'a> {
crate fn new(
sess: &'a ParseSess,
source_file: Lrc<rustc_span::SourceFile>,
override_span: Option<Span>,
) -> Self {
let src = source_file.src.clone().unwrap_or_else(|| {
sess.span_diagnostic
.bug(&format!("cannot lex `source_file` without source: {}", source_file.name));
});

StringReader {
sess,
start_pos: source_file.start_pos,
pos: source_file.start_pos,
end_src_index: src.len(),
src,
override_span,
}
}

fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
}

/// Returns the next token, and info about preceding whitespace, if any.
fn next_token(&mut self) -> (IsJoint, Token) {
let mut is_joint = IsJoint::Joint;
fn next_token(&mut self) -> (Spacing, Token) {
let mut spacing = Spacing::Joint;

// Skip `#!` at the start of the file
let start_src_index = self.src_index(self.pos);
Expand All @@ -77,7 +64,7 @@ impl<'a> StringReader<'a> {
if is_beginning_of_file {
if let Some(shebang_len) = rustc_lexer::strip_shebang(text) {
self.pos = self.pos + BytePos::from_usize(shebang_len);
is_joint = IsJoint::NonJoint;
spacing = Spacing::Alone;
}
}

Expand All @@ -88,7 +75,7 @@ impl<'a> StringReader<'a> {

if text.is_empty() {
let span = self.mk_sp(self.pos, self.pos);
return (is_joint, Token::new(token::Eof, span));
return (spacing, Token::new(token::Eof, span));
}

let token = rustc_lexer::first_token(text);
Expand All @@ -101,9 +88,9 @@ impl<'a> StringReader<'a> {
match self.cook_lexer_token(token.kind, start) {
Some(kind) => {
let span = self.mk_sp(start, self.pos);
return (is_joint, Token::new(kind, span));
return (spacing, Token::new(kind, span));
}
None => is_joint = IsJoint::NonJoint,
None => spacing = Spacing::Alone,
}
}
}
Expand Down
Loading

0 comments on commit b1101dc

Please sign in to comment.