Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Run rustfmt and add doc comments to libsyntax/ext/tt/quoted.rs #47603

Merged
merged 4 commits into from
Jan 30, 2018
Merged
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
212 changes: 173 additions & 39 deletions src/libsyntax/ext/tt/quoted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,29 +10,34 @@

use ast;
use ext::tt::macro_parser;
use parse::{ParseSess, token};
use parse::{token, ParseSess};
use print::pprust;
use symbol::keywords;
use syntax_pos::{DUMMY_SP, Span, BytePos};
use syntax_pos::{BytePos, Span, DUMMY_SP};
use tokenstream;

use std::rc::Rc;

/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
/// thatthat the delimiter itself might be `NoDelim`.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Typo

#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Delimited {
pub delim: token::DelimToken,
pub tts: Vec<TokenTree>,
}

impl Delimited {
/// Return the opening delimiter (possibly `NoDelim`).
pub fn open_token(&self) -> token::Token {
token::OpenDelim(self.delim)
}

/// Return the closing delimiter (possibly `NoDelim`).
pub fn close_token(&self) -> token::Token {
token::CloseDelim(self.delim)
}

/// Return a `self::TokenTree` witha a `Span` corresponding to the opening delimiter.
Copy link
Contributor

@jseyfried jseyfried Jan 29, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

typo: witha

pub fn open_tt(&self, span: Span) -> TokenTree {
let open_span = if span == DUMMY_SP {
DUMMY_SP
Expand All @@ -42,6 +47,7 @@ impl Delimited {
TokenTree::Token(open_span, self.open_token())
}

/// Return a `self::TokenTree` witha a `Span` corresponding to the closing delimiter.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

typo: witha

pub fn close_tt(&self, span: Span) -> TokenTree {
let close_span = if span == DUMMY_SP {
DUMMY_SP
Expand All @@ -68,12 +74,14 @@ pub struct SequenceRepetition {
/// for token sequences.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum KleeneOp {
/// Kleene star (`*`) for zero or more repetitions
ZeroOrMore,
/// Kleene star (`+`) for one or more repetitions
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Kleene plus

OneOrMore,
}

/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
/// are "first-class" token trees.
/// are "first-class" token trees. Useful for parsing macros.
#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub enum TokenTree {
Token(Span, token::Token),
Expand All @@ -83,10 +91,15 @@ pub enum TokenTree {
/// E.g. `$var`
MetaVar(Span, ast::Ident),
/// E.g. `$var:expr`. This is only used in the left hand side of MBE macros.
MetaVarDecl(Span, ast::Ident /* name to bind */, ast::Ident /* kind of nonterminal */),
MetaVarDecl(
Span,
ast::Ident, /* name to bind */
ast::Ident, /* kind of nonterminal */
),
}

impl TokenTree {
/// Return the number of tokens in the tree.
pub fn len(&self) -> usize {
match *self {
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
Expand All @@ -98,6 +111,8 @@ impl TokenTree {
}
}

/// Returns true if the given token tree contains no other tokens. This is vacuously true for
/// single tokens or metavar/decls, but may be false for delimited trees or sequences.
pub fn is_empty(&self) -> bool {
match *self {
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
Expand All @@ -109,6 +124,7 @@ impl TokenTree {
}
}

/// Get the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
pub fn get_tt(&self, index: usize) -> TokenTree {
match (self, index) {
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
Expand All @@ -131,21 +147,48 @@ impl TokenTree {
/// Retrieve the `TokenTree`'s span.
pub fn span(&self) -> Span {
match *self {
TokenTree::Token(sp, _) |
TokenTree::MetaVar(sp, _) |
TokenTree::MetaVarDecl(sp, _, _) |
TokenTree::Delimited(sp, _) |
TokenTree::Sequence(sp, _) => sp,
TokenTree::Token(sp, _)
| TokenTree::MetaVar(sp, _)
| TokenTree::MetaVarDecl(sp, _, _)
| TokenTree::Delimited(sp, _)
| TokenTree::Sequence(sp, _) => sp,
}
}
}

pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &ParseSess)
-> Vec<TokenTree> {
/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
/// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
/// collection of `TokenTree` for use in parsing a macro.
///
/// # Parameters
///
/// - `input`: a token stream to read from, the contents of which we are parsing.
/// - `expect_matchers`: `parse` can be used to parse either the "patterns" or the "body" of a
/// macro. Both take roughly the same form _except_ that in a pattern, metavars are declared with
/// their "matcher" type. For example `$var:expr` or `$id:ident`. In this example, `expr` and
/// `ident` are "matchers". They are not present in the body of a macro rule -- just in the
/// pattern, so we pass a parameter to indicate whether to expect them or not.
/// - `sess`: the parsing session. Any errors will be emitted to this session.
///
/// # Returns
///
/// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
pub fn parse(
input: tokenstream::TokenStream,
expect_matchers: bool,
sess: &ParseSess,
) -> Vec<TokenTree> {
// Will contain the final collection of `self::TokenTree`
let mut result = Vec::new();

// For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
// additional trees if need be.
let mut trees = input.trees();
while let Some(tree) = trees.next() {
let tree = parse_tree(tree, &mut trees, expect_matchers, sess);

// Given the parsed tree, if there is a metavar and we are expecting matchers, actually
// parse out the matcher (i.e. in `$id:ident` this would parse the `:` and `ident`).
match tree {
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
let span = match trees.next() {
Expand All @@ -154,78 +197,149 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars
Some(kind) => {
let span = end_sp.with_lo(start_sp.lo());
result.push(TokenTree::MetaVarDecl(span, ident, kind));
continue
continue;
}
_ => end_sp,
},
tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
tree => tree.as_ref()
.map(tokenstream::TokenTree::span)
.unwrap_or(span),
},
tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
tree => tree.as_ref()
.map(tokenstream::TokenTree::span)
.unwrap_or(start_sp),
};
sess.missing_fragment_specifiers.borrow_mut().insert(span);
result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident()));
result.push(TokenTree::MetaVarDecl(
span,
ident,
keywords::Invalid.ident(),
));
}

// Not a metavar or no matchers allowed, so just return the tree
_ => result.push(tree),
}
}
result
}

fn parse_tree<I>(tree: tokenstream::TokenTree,
trees: &mut I,
expect_matchers: bool,
sess: &ParseSess)
-> TokenTree
where I: Iterator<Item = tokenstream::TokenTree>,
/// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Specifically, this takes a
/// generic `TokenTree`, such as is used in the rest of the compiler, and returns a `TokenTree`
/// for use in parsing a macro.
///
/// Converting the given tree may involve reading more tokens.
///
/// # Parameters
///
/// - `tree`: the tree wish to convert.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we wish to convert

/// - `trees`: an iterator over trees. We may need to read more tokens from it in order to finish
/// converting `tree`
/// - `expect_matchers`: same as for `parse` (see above).
/// - `sess`: the parsing session. Any errors will be emitted to this session.
fn parse_tree<I>(
tree: tokenstream::TokenTree,
trees: &mut I,
expect_matchers: bool,
sess: &ParseSess,
) -> TokenTree
where
I: Iterator<Item = tokenstream::TokenTree>,
{
// Depending on what `tree` is, we could be parsing different parts of a macro
match tree {
// `tree` is a `$` token. Look at the next token in `trees`
tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
// `tree` is followed by a delimited set of token trees. This indicates the beginning
// of a repetition sequence in the macro (e.g. `$(pat)*`).
Some(tokenstream::TokenTree::Delimited(span, delimited)) => {
// Must have `(` not `{` or `[`
if delimited.delim != token::Paren {
let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
let msg = format!("expected `(`, found `{}`", tok);
sess.span_diagnostic.span_err(span, &msg);
}
// Parse the contents of the sequence itself
let sequence = parse(delimited.tts.into(), expect_matchers, sess);
// Get the Kleen operator and optional separator
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Kleene

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Haha... it originally autocorrected to Kleenex, and my fix was incorrect 😛

let (separator, op) = parse_sep_and_kleene_op(trees, span, sess);
// Count the number of captured "names" (i.e. named metavars)
let name_captures = macro_parser::count_names(&sequence);
TokenTree::Sequence(span, Rc::new(SequenceRepetition {
tts: sequence,
separator,
op,
num_captures: name_captures,
}))
TokenTree::Sequence(
span,
Rc::new(SequenceRepetition {
tts: sequence,
separator,
op,
num_captures: name_captures,
}),
)
}

// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
// metavariable that names the crate of the invokation.
Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
let ident = token.ident().unwrap();
let span = ident_span.with_lo(span.lo());
if ident.name == keywords::Crate.name() {
let ident = ast::Ident { name: keywords::DollarCrate.name(), ..ident };
let ident = ast::Ident {
name: keywords::DollarCrate.name(),
..ident
};
TokenTree::Token(span, token::Ident(ident))
} else {
TokenTree::MetaVar(span, ident)
}
}

// `tree` is followed by a random token. This is an error.
Some(tokenstream::TokenTree::Token(span, tok)) => {
let msg = format!("expected identifier, found `{}`", pprust::token_to_string(&tok));
let msg = format!(
"expected identifier, found `{}`",
pprust::token_to_string(&tok)
);
sess.span_diagnostic.span_err(span, &msg);
TokenTree::MetaVar(span, keywords::Invalid.ident())
}

// There are no more tokens. Just return the `$` we already have.
None => TokenTree::Token(span, token::Dollar),
},

// `tree` is an arbitrary token. Keep it.
tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok),
tokenstream::TokenTree::Delimited(span, delimited) => {
TokenTree::Delimited(span, Rc::new(Delimited {

// `tree` is the beginning of a delimited set of tokens (e.g. `(` or `{`). We need to
// descend into the delimited set and further parse it.
tokenstream::TokenTree::Delimited(span, delimited) => TokenTree::Delimited(
span,
Rc::new(Delimited {
delim: delimited.delim,
tts: parse(delimited.tts.into(), expect_matchers, sess),
}))
}
}),
),
}
}

fn parse_sep_and_kleene_op<I>(input: &mut I, span: Span, sess: &ParseSess)
-> (Option<token::Token>, KleeneOp)
where I: Iterator<Item = tokenstream::TokenTree>,
/// Attempt to parse a single Kleene star, possibly with a separator.
///
/// For example, in a pattern such as `$(a),*`, `a` is the pattern to be repeated, `,` is the
/// separator, and `*` is the Kleene operator. This function is specifically concerned with parsing
/// the last two tokens of such a pattern: namely, the optional separator and the Kleene operator
/// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some
/// stream of tokens in an invokation of a macro.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sp: invocation

///
/// This function will take some input iterator `input` corresponding to `span` and a parsing
/// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene
/// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
/// error with the appropriate span is emitted to `sess` and a dummy value is returned.
fn parse_sep_and_kleene_op<I>(
input: &mut I,
span: Span,
sess: &ParseSess,
) -> (Option<token::Token>, KleeneOp)
where
I: Iterator<Item = tokenstream::TokenTree>,
{
fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
match *token {
Expand All @@ -235,20 +349,40 @@ fn parse_sep_and_kleene_op<I>(input: &mut I, span: Span, sess: &ParseSess)
}
}

// We attempt to look at the next two token trees in `input`. I will call the first #1 and the
// second #2. If #1 and #2 don't match a valid KleeneOp with/without separator, that is an
// error, and we should emit an error on the most specific span possible.
let span = match input.next() {
// #1 is a token
Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) {
// #1 is a KleeneOp with no separator
Some(op) => return (None, op),

// #1 is not a KleeneOp, but may be a separator... need to look at #2
None => match input.next() {
// #2 is a token
Some(tokenstream::TokenTree::Token(span, tok2)) => match kleene_op(&tok2) {
// #2 is a KleeneOp, so #1 must be a separator
Some(op) => return (Some(tok), op),

// #2 is not a KleeneOp... error
None => span,
},
tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
}

// #2 is not a token at all... error
tree => tree.as_ref()
.map(tokenstream::TokenTree::span)
.unwrap_or(span),
},
},
tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),

// #1 is not a token at all... error
tree => tree.as_ref()
.map(tokenstream::TokenTree::span)
.unwrap_or(span),
};

// Error...
sess.span_diagnostic.span_err(span, "expected `*` or `+`");
(None, KleeneOp::ZeroOrMore)
}