Skip to content

Commit

Permalink
Auto merge of rust-lang#97114 - klensy:cursor-ref, r=petrochenkov
Browse files Browse the repository at this point in the history
use CursorRef more

This allows skipping clone of `TreeAndSpacing` (and `TokenTree`).
  • Loading branch information
bors committed May 19, 2022
2 parents 67a9bcb + 05f459e commit 6913c74
Show file tree
Hide file tree
Showing 11 changed files with 37 additions and 26 deletions.
2 changes: 1 addition & 1 deletion compiler/rustc_ast/src/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -552,7 +552,7 @@ impl MetaItemKind {
) -> Option<MetaItemKind> {
match tokens.next() {
Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
MetaItemKind::name_value_from_tokens(&mut inner_tokens.into_trees())
}
Some(TokenTree::Token(token)) => {
Lit::from_token(&token).ok().map(MetaItemKind::NameValue)
Expand Down
13 changes: 11 additions & 2 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -442,8 +442,8 @@ impl TokenStream {
}
}

pub fn trees(&self) -> Cursor {
self.clone().into_trees()
pub fn trees(&self) -> CursorRef<'_> {
CursorRef::new(self)
}

pub fn into_trees(self) -> Cursor {
Expand Down Expand Up @@ -538,12 +538,21 @@ pub struct CursorRef<'t> {
}

impl<'t> CursorRef<'t> {
fn new(stream: &'t TokenStream) -> Self {
CursorRef { stream, index: 0 }
}

#[inline]
fn next_with_spacing(&mut self) -> Option<&'t TreeAndSpacing> {
self.stream.0.get(self.index).map(|tree| {
self.index += 1;
tree
})
}

pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
}
}

impl<'t> Iterator for CursorRef<'t> {
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_ast_pretty/src/pprust/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -550,9 +550,9 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn print_tts(&mut self, tts: &TokenStream, convert_dollar_crate: bool) {
let mut iter = tts.trees().peekable();
while let Some(tt) = iter.next() {
self.print_tt(&tt, convert_dollar_crate);
self.print_tt(tt, convert_dollar_crate);
if let Some(next) = iter.peek() {
if tt_prepend_space(next, &tt) {
if tt_prepend_space(next, tt) {
self.space();
}
}
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_expand/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,7 @@ impl<'a> StripUnconfigured<'a> {

// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
// for `attr` when we expand it to `#[attr]`
let mut orig_trees = orig_tokens.trees();
let mut orig_trees = orig_tokens.into_trees();
let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }) = orig_trees.next().unwrap() else {
panic!("Bad tokens for attribute {:?}", attr);
};
Expand Down
26 changes: 14 additions & 12 deletions compiler/rustc_expand/src/mbe/metavar_expr.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use rustc_ast::token::{self, Delimiter};
use rustc_ast::tokenstream::{Cursor, TokenStream, TokenTree};
use rustc_ast::tokenstream::{CursorRef, TokenStream, TokenTree};
use rustc_ast::{LitIntType, LitKind};
use rustc_ast_pretty::pprust;
use rustc_errors::{Applicability, PResult};
Expand Down Expand Up @@ -71,12 +71,14 @@ impl MetaVarExpr {
}

// Checks if there are any remaining tokens. For example, `${ignore(ident ... a b c ...)}`
fn check_trailing_token<'sess>(iter: &mut Cursor, sess: &'sess ParseSess) -> PResult<'sess, ()> {
fn check_trailing_token<'sess>(
iter: &mut CursorRef<'_>,
sess: &'sess ParseSess,
) -> PResult<'sess, ()> {
if let Some(tt) = iter.next() {
let mut diag = sess.span_diagnostic.struct_span_err(
tt.span(),
&format!("unexpected token: {}", pprust::tt_to_string(&tt)),
);
let mut diag = sess
.span_diagnostic
.struct_span_err(tt.span(), &format!("unexpected token: {}", pprust::tt_to_string(tt)));
diag.span_note(tt.span(), "meta-variable expression must not have trailing tokens");
Err(diag)
} else {
Expand All @@ -86,7 +88,7 @@ fn check_trailing_token<'sess>(iter: &mut Cursor, sess: &'sess ParseSess) -> PRe

/// Parse a meta-variable `count` expression: `count(ident[, depth])`
fn parse_count<'sess>(
iter: &mut Cursor,
iter: &mut CursorRef<'_>,
sess: &'sess ParseSess,
span: Span,
) -> PResult<'sess, MetaVarExpr> {
Expand All @@ -97,7 +99,7 @@ fn parse_count<'sess>(

/// Parses the depth used by index(depth) and length(depth).
fn parse_depth<'sess>(
iter: &mut Cursor,
iter: &mut CursorRef<'_>,
sess: &'sess ParseSess,
span: Span,
) -> PResult<'sess, usize> {
Expand All @@ -110,7 +112,7 @@ fn parse_depth<'sess>(
"meta-variable expression depth must be a literal"
));
};
if let Ok(lit_kind) = LitKind::from_lit_token(lit)
if let Ok(lit_kind) = LitKind::from_lit_token(*lit)
&& let LitKind::Int(n_u128, LitIntType::Unsuffixed) = lit_kind
&& let Ok(n_usize) = usize::try_from(n_u128)
{
Expand All @@ -124,15 +126,15 @@ fn parse_depth<'sess>(

/// Parses an generic ident
fn parse_ident<'sess>(
iter: &mut Cursor,
iter: &mut CursorRef<'_>,
sess: &'sess ParseSess,
span: Span,
) -> PResult<'sess, Ident> {
if let Some(tt) = iter.next() && let TokenTree::Token(token) = tt {
if let Some((elem, false)) = token.ident() {
return Ok(elem);
}
let token_str = pprust::token_to_string(&token);
let token_str = pprust::token_to_string(token);
let mut err = sess.span_diagnostic.struct_span_err(
span,
&format!("expected identifier, found `{}`", &token_str)
Expand All @@ -150,7 +152,7 @@ fn parse_ident<'sess>(

/// Tries to move the iterator forward returning `true` if there is a comma. If not, then the
/// iterator is not modified and the result is `false`.
fn try_eat_comma(iter: &mut Cursor) -> bool {
fn try_eat_comma(iter: &mut CursorRef<'_>) -> bool {
if let Some(TokenTree::Token(token::Token { kind: token::Comma, .. })) = iter.look_ahead(0) {
let _ = iter.next();
return true;
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_expand/src/mbe/quoted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ pub(super) fn parse(

// For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
// additional trees if need be.
let mut trees = input.trees();
let mut trees = input.into_trees();
while let Some(tree) = trees.next() {
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually
// parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_expand/src/parse/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ fn bad_path_expr_1() {
fn string_to_tts_macro() {
create_default_session_globals_then(|| {
let tts: Vec<_> =
string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).into_trees().collect();
let tts: &[TokenTree] = &tts[..];

match tts {
Expand Down Expand Up @@ -293,7 +293,7 @@ fn ttdelim_span() {
.unwrap();

let tts: Vec<_> = match expr.kind {
ast::ExprKind::MacCall(ref mac) => mac.args.inner_tokens().trees().collect(),
ast::ExprKind::MacCall(ref mac) => mac.args.inner_tokens().into_trees().collect(),
_ => panic!("not a macro"),
};

Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_expand/src/proc_macro_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -484,7 +484,7 @@ impl server::TokenStream for Rustc<'_, '_> {
tree.to_internal()
}
fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
TokenStreamIter { cursor: stream.trees(), stack: vec![] }
TokenStreamIter { cursor: stream.into_trees(), stack: vec![] }
}
}

Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_expand/src/tokenstream/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ fn test_concat() {
fn test_to_from_bijection() {
create_default_session_globals_then(|| {
let test_start = string_to_ts("foo::bar(baz)");
let test_end = test_start.trees().collect();
let test_end = test_start.trees().cloned().collect();
assert_eq!(test_start, test_end)
})
}
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_session/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ impl<'a> FlattenNonterminals<'a> {
fn can_skip(stream: &TokenStream) -> bool {
stream.trees().all(|tree| match tree {
TokenTree::Token(token) => !matches!(token.kind, token::Interpolated(_)),
TokenTree::Delimited(_, _, inner) => can_skip(&inner),
TokenTree::Delimited(_, _, inner) => can_skip(inner),
})
}

Expand Down
4 changes: 2 additions & 2 deletions src/tools/rustfmt/src/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ fn rewrite_macro_inner(
}
// Format well-known macros which cannot be parsed as a valid AST.
if macro_name == "lazy_static!" && !has_comment {
if let success @ Some(..) = format_lazy_static(context, shape, ts.trees().collect()) {
if let success @ Some(..) = format_lazy_static(context, shape, ts.clone()) {
return success;
}
}
Expand Down Expand Up @@ -855,7 +855,7 @@ impl MacroArgParser {

/// Returns a collection of parsed macro def's arguments.
fn parse(mut self, tokens: TokenStream) -> Option<Vec<ParsedMacroArg>> {
let mut iter = tokens.trees();
let mut iter = tokens.into_trees();

while let Some(tok) = iter.next() {
match tok {
Expand Down

0 comments on commit 6913c74

Please sign in to comment.