Skip to content

Commit

Permalink
[has 23 ui test failures] Remove Nonterminal::NtExpr.
Browse files Browse the repository at this point in the history
  • Loading branch information
nnethercote committed May 10, 2022
1 parent c3da0d4 commit 6b6f97f
Show file tree
Hide file tree
Showing 20 changed files with 255 additions and 195 deletions.
6 changes: 3 additions & 3 deletions compiler/rustc_ast/src/ast_like.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ impl AstLike for crate::token::Nonterminal {
match self {
Nonterminal::NtItem(item) => item.attrs(),
Nonterminal::NtStmt(stmt) => stmt.attrs(),
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.attrs(),
Nonterminal::NtLiteral(expr) => expr.attrs(),
Nonterminal::NtPat(_)
| Nonterminal::NtTy(_)
| Nonterminal::NtMeta(_)
Expand All @@ -60,7 +60,7 @@ impl AstLike for crate::token::Nonterminal {
match self {
Nonterminal::NtItem(item) => item.visit_attrs(f),
Nonterminal::NtStmt(stmt) => stmt.visit_attrs(f),
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.visit_attrs(f),
Nonterminal::NtLiteral(expr) => expr.visit_attrs(f),
Nonterminal::NtPat(_)
| Nonterminal::NtTy(_)
| Nonterminal::NtMeta(_)
Expand All @@ -75,7 +75,7 @@ impl AstLike for crate::token::Nonterminal {
match self {
Nonterminal::NtItem(item) => item.tokens_mut(),
Nonterminal::NtStmt(stmt) => stmt.tokens_mut(),
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.tokens_mut(),
Nonterminal::NtLiteral(expr) => expr.tokens_mut(),
Nonterminal::NtPat(pat) => pat.tokens_mut(),
Nonterminal::NtTy(ty) => ty.tokens_mut(),
Nonterminal::NtMeta(attr_item) => attr_item.tokens_mut(),
Expand Down
1 change: 0 additions & 1 deletion compiler/rustc_ast/src/mut_visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -776,7 +776,6 @@ pub fn visit_nonterminal<T: MutVisitor>(nt: &mut token::Nonterminal, vis: &mut T
})
}),
token::NtPat(pat) => vis.visit_pat(pat),
token::NtExpr(expr) => vis.visit_expr(expr),
token::NtTy(ty) => vis.visit_ty(ty),
token::NtIdent(ident, _is_raw) => vis.visit_ident(ident),
token::NtLifetime(ident) => vis.visit_ident(ident),
Expand Down
27 changes: 8 additions & 19 deletions compiler/rustc_ast/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -379,7 +379,7 @@ impl Token {
match self.uninterpolate().kind {
Ident(name, is_raw) =>
ident_can_begin_expr(name, self.span, is_raw), // value name or keyword
OpenDelim(..) | // tuple, array or block
OpenDelim(..) | // tuple, array, block, or macro output
Literal(..) | // literal
Not | // operator not
BinOp(Minus) | // unary minus
Expand All @@ -394,7 +394,6 @@ impl Token {
Lifetime(..) | // labeled loop
Pound => true, // expression attributes
Interpolated(ref nt) => matches!(**nt, NtLiteral(..) |
NtExpr(..) |
NtBlock(..) |
NtPath(..)),
_ => false,
Expand Down Expand Up @@ -424,8 +423,8 @@ impl Token {
/// Returns `true` if the token can appear at the start of a const param.
pub fn can_begin_const_arg(&self) -> bool {
match self.kind {
OpenDelim(Delimiter::Brace) => true,
Interpolated(ref nt) => matches!(**nt, NtExpr(..) | NtBlock(..) | NtLiteral(..)),
OpenDelim(Delimiter::Brace | Delimiter::Invisible { .. }) => true,
Interpolated(ref nt) => matches!(**nt, NtBlock(..) | NtLiteral(..)),
_ => self.can_begin_literal_maybe_minus(),
}
}
Expand Down Expand Up @@ -454,17 +453,8 @@ impl Token {
match self.uninterpolate().kind {
Literal(..) | BinOp(Minus) => true,
Ident(name, false) if name.is_bool_lit() => true,
Interpolated(ref nt) => match &**nt {
NtLiteral(_) => true,
NtExpr(e) => match &e.kind {
ast::ExprKind::Lit(_) => true,
ast::ExprKind::Unary(ast::UnOp::Neg, e) => {
matches!(&e.kind, ast::ExprKind::Lit(_))
}
_ => false,
},
_ => false,
},
OpenDelim(Delimiter::Invisible { .. }) => true,
Interpolated(ref nt) => matches!(**nt, NtLiteral(_)),
_ => false,
}
}
Expand Down Expand Up @@ -541,9 +531,10 @@ impl Token {
/// Would `maybe_whole_expr` in `parser.rs` return `Ok(..)`?
/// That is, is this a pre-parsed expression dropped into the token stream
/// (which happens while parsing the result of macro expansion)?
// njn: need to do anything with this?
pub fn is_whole_expr(&self) -> bool {
if let Interpolated(ref nt) = self.kind
&& let NtExpr(_) | NtLiteral(_) | NtPath(_) | NtBlock(_) = **nt
&& let NtLiteral(_) | NtPath(_) | NtBlock(_) = **nt
{
return true;
}
Expand Down Expand Up @@ -699,7 +690,6 @@ pub enum Nonterminal {
NtBlock(P<ast::Block>),
NtStmt(P<ast::Stmt>),
NtPat(P<ast::Pat>),
NtExpr(P<ast::Expr>),
NtTy(P<ast::Ty>),
NtIdent(Ident, /* is_raw */ bool),
NtLifetime(Ident),
Expand Down Expand Up @@ -799,7 +789,7 @@ impl Nonterminal {
NtBlock(block) => block.span,
NtStmt(stmt) => stmt.span,
NtPat(pat) => pat.span,
NtExpr(expr) | NtLiteral(expr) => expr.span,
NtLiteral(expr) => expr.span,
NtTy(ty) => ty.span,
NtIdent(ident, _) | NtLifetime(ident) => ident.span,
NtMeta(attr_item) => attr_item.span(),
Expand Down Expand Up @@ -832,7 +822,6 @@ impl fmt::Debug for Nonterminal {
NtBlock(..) => f.pad("NtBlock(..)"),
NtStmt(..) => f.pad("NtStmt(..)"),
NtPat(..) => f.pad("NtPat(..)"),
NtExpr(..) => f.pad("NtExpr(..)"),
NtTy(..) => f.pad("NtTy(..)"),
NtIdent(..) => f.pad("NtIdent(..)"),
NtLiteral(..) => f.pad("NtLiteral(..)"),
Expand Down
4 changes: 3 additions & 1 deletion compiler/rustc_ast/src/util/literal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -215,14 +215,16 @@ impl Lit {
/// Converts arbitrary token into an AST literal.
///
/// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation.
// njn: need to do something here? It's hard to keep in sync with
// can_begin_literal_or_bool when a literal can span 3 tokens: `«`, `lit`, `»`
pub fn from_token(token: &Token) -> Result<Lit, LitError> {
let lit = match token.uninterpolate().kind {
token::Ident(name, false) if name.is_bool_lit() => {
token::Lit::new(token::Bool, name, None)
}
token::Literal(lit) => lit,
token::Interpolated(ref nt) => {
if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt
if let token::NtLiteral(expr) = &**nt
&& let ast::ExprKind::Lit(lit) = &expr.kind
{
return Ok(lit.clone());
Expand Down
1 change: 0 additions & 1 deletion compiler/rustc_ast_pretty/src/pprust/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -727,7 +727,6 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere

fn nonterminal_to_string(&self, nt: &Nonterminal) -> String {
match *nt {
token::NtExpr(ref e) => self.expr_to_string(e),
token::NtMeta(ref e) => self.attr_item_to_string(e),
token::NtTy(ref e) => self.ty_to_string(e),
token::NtPath(ref e) => self.path_to_string(e),
Expand Down
8 changes: 1 addition & 7 deletions compiler/rustc_builtin_macros/src/cfg_eval.rs
Original file line number Diff line number Diff line change
Expand Up @@ -173,13 +173,7 @@ impl CfgEval<'_, '_> {
}
_ => unreachable!(),
};
let nt = annotatable.into_nonterminal();

let mut orig_tokens = rustc_parse::nt_to_tokenstream(
&nt,
&self.cfg.sess.parse_sess,
CanSynthesizeMissingTokens::No,
);
let mut orig_tokens = annotatable.into_tokens(&self.cfg.sess.parse_sess);

// 'Flatten' all nonterminals (i.e. `TokenKind::Interpolated`)
// to `None`-delimited groups containing the corresponding tokens. This
Expand Down
13 changes: 8 additions & 5 deletions compiler/rustc_expand/src/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use rustc_data_structures::sync::{self, Lrc};
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, MultiSpan, PResult};
use rustc_lint_defs::builtin::PROC_MACRO_BACK_COMPAT;
use rustc_lint_defs::BuiltinLintDiagnostics;
use rustc_parse::{self, nt_to_tokenstream, parser, MACRO_ARGUMENTS};
use rustc_parse::{self, expr_to_tokenstream, nt_to_tokenstream, parser, MACRO_ARGUMENTS};
use rustc_session::{parse::ParseSess, Limit, Session};
use rustc_span::def_id::{CrateNum, DefId, LocalDefId};
use rustc_span::edition::Edition;
Expand Down Expand Up @@ -119,8 +119,8 @@ impl Annotatable {
token::NtItem(P(item.and_then(ast::ForeignItem::into_item)))
}
Annotatable::Stmt(stmt) => token::NtStmt(stmt),
Annotatable::Expr(expr) => token::NtExpr(expr),
Annotatable::Arm(..)
Annotatable::Expr(..)
| Annotatable::Arm(..)
| Annotatable::ExprField(..)
| Annotatable::PatField(..)
| Annotatable::GenericParam(..)
Expand All @@ -131,8 +131,11 @@ impl Annotatable {
}
}

crate fn into_tokens(self, sess: &ParseSess) -> TokenStream {
nt_to_tokenstream(&self.into_nonterminal(), sess, CanSynthesizeMissingTokens::No)
pub fn into_tokens(self, sess: &ParseSess) -> TokenStream {
match self {
Annotatable::Expr(expr) => expr_to_tokenstream(&expr, sess, CanSynthesizeMissingTokens::No),
_ => nt_to_tokenstream(&self.into_nonterminal(), sess, CanSynthesizeMissingTokens::No),
}
}

pub fn expect_item(self) -> P<ast::Item> {
Expand Down
5 changes: 5 additions & 0 deletions compiler/rustc_expand/src/mbe/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ crate use ParseResult::*;

use crate::mbe::{KleeneOp, TokenTree};

use rustc_ast as ast;
use rustc_ast::token::{self, DocComment, Nonterminal, NonterminalKind, Token};
use rustc_parse::parser::{NtOrTt, Parser};
use rustc_span::symbol::MacroRulesNormalizedIdent;
Expand Down Expand Up @@ -344,6 +345,9 @@ crate enum NamedMatch {
// A metavar match of type `tt`.
MatchedTokenTree(rustc_ast::tokenstream::TokenTree),

// njn: comment
MatchedExpr(ast::ptr::P<ast::Expr>), // njn: quals

// A metavar match of any type other than `tt`.
MatchedNonterminal(Lrc<Nonterminal>),
}
Expand Down Expand Up @@ -624,6 +628,7 @@ impl TtParser {
let m = match nt {
NtOrTt::Nt(nt) => MatchedNonterminal(Lrc::new(nt)),
NtOrTt::Tt(tt) => MatchedTokenTree(tt),
NtOrTt::Expr(e) => MatchedExpr(e),
};
mp.push_match(next_metavar, seq_depth, m);
mp.idx += 1;
Expand Down
33 changes: 27 additions & 6 deletions compiler/rustc_expand/src/mbe/transcribe.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
use crate::base::ExtCtxt;
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, MatchedTokenTree, NamedMatch};
use crate::mbe::macro_parser::{NamedMatch, NamedMatch::*};
use crate::mbe::{self, MetaVarExpr};
use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
use rustc_ast::tokenstream::CanSynthesizeMissingTokens;
use rustc_ast::tokenstream::{DelimSpan, Spacing, TokenStream, TokenTree, TreeAndSpacing};
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{pluralize, PResult};
use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
use rustc_parse::expr_to_tokenstream;
use rustc_span::hygiene::{LocalExpnId, Transparency};
use rustc_span::symbol::{sym, Ident, MacroRulesNormalizedIdent};
use rustc_span::Span;
Expand Down Expand Up @@ -230,13 +232,31 @@ pub(super) fn transcribe<'a>(
result.push(token.into());
}
MatchedNonterminal(ref nt) => {
// njn: update comment
// Other variables are emitted into the output stream as groups with
// `Delimiter::Invisible` to maintain parsing priorities.
// `Interpolated` is currently used for such groups in rustc parser.
marker.visit_span(&mut sp);
let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
let token =
TokenTree::token(token::Interpolated(nt.clone()), sp);
result.push(token.into());
}
MatchedExpr(ref e) => {
// njn: update comment
let tts = expr_to_tokenstream(
e,
&cx.sess.parse_sess,
CanSynthesizeMissingTokens::No,
);
marker.visit_span(&mut sp);
let tt = TokenTree::Delimited(
DelimSpan::from_single(sp),
// njn: `skip: false`!
Delimiter::Invisible { skip: false },
tts,
);
result.push((tt, Spacing::Alone));
}
MatchedSeq(..) => {
// We were unable to descend far enough. This is an error.
return Err(cx.struct_span_err(
Expand Down Expand Up @@ -306,7 +326,7 @@ fn lookup_cur_matched<'a>(
let mut matched = matched;
for &(idx, _) in repeats {
match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => break,
MatchedTokenTree(_) | MatchedNonterminal(_) | MatchedExpr(_) => break,
MatchedSeq(ref ads) => matched = ads.get(idx).unwrap(),
}
}
Expand Down Expand Up @@ -396,7 +416,7 @@ fn lockstep_iter_size(
let name = MacroRulesNormalizedIdent::new(name);
match lookup_cur_matched(name, interpolations, repeats) {
Some(matched) => match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
MatchedTokenTree(_) | MatchedNonterminal(_) | MatchedExpr(_) => LockstepIterSize::Unconstrained,
MatchedSeq(ref ads) => LockstepIterSize::Constraint(ads.len(), name),
},
_ => LockstepIterSize::Unconstrained,
Expand Down Expand Up @@ -443,7 +463,8 @@ fn count_repetitions<'a>(
sp: &DelimSpan,
) -> PResult<'a, usize> {
match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => {
// njn: flip order of these arms, and in similar matches above
MatchedTokenTree(_) | MatchedNonterminal(_) | MatchedExpr(_) => {
if declared_lhs_depth == 0 {
return Err(cx.struct_span_err(
sp.entire(),
Expand Down
32 changes: 29 additions & 3 deletions compiler/rustc_parse/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -290,15 +290,15 @@ pub fn nt_to_tokenstream(
Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.as_ref()),
Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.as_ref()),
Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.as_ref()),
Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => {
Nonterminal::NtLiteral(ref expr) => {
prepend_attrs(&expr.attrs, expr.tokens.as_ref())
}
};

if let Some(tokens) = tokens {
return tokens;
tokens
} else if matches!(synthesize_tokens, CanSynthesizeMissingTokens::Yes) {
return fake_token_stream(sess, nt);
fake_token_stream(sess, nt)
} else {
panic!(
"Missing tokens for nt {:?} at {:?}: {:?}",
Expand All @@ -309,6 +309,26 @@ pub fn nt_to_tokenstream(
}
}

// njn: comments
pub fn expr_to_tokenstream(
expr: &ast::Expr,
sess: &ParseSess,
synthesize_tokens: CanSynthesizeMissingTokens,
) -> TokenStream {
if let Some(tokens) = prepend_attrs(&expr.attrs, expr.tokens.as_ref()) {
tokens
} else if matches!(synthesize_tokens, CanSynthesizeMissingTokens::Yes) {
fake_token_stream_expr(sess, expr)
} else {
panic!(
"Missing tokens for expr {:?} at {:?}: {:?}",
expr,
expr.span,
pprust::expr_to_string(expr)
);
}
}

fn prepend_attrs(attrs: &[Attribute], tokens: Option<&LazyTokenStream>) -> Option<TokenStream> {
let tokens = tokens?;
if attrs.is_empty() {
Expand All @@ -328,6 +348,12 @@ pub fn fake_token_stream(sess: &ParseSess, nt: &Nonterminal) -> TokenStream {
parse_stream_from_source_str(filename, source, sess, Some(nt.span()))
}

pub fn fake_token_stream_expr(sess: &ParseSess, expr: &ast::Expr) -> TokenStream {
let source = pprust::expr_to_string(expr);
let filename = FileName::macro_expansion_source_code(&source);
parse_stream_from_source_str(filename, source, sess, Some(expr.span))
}

pub fn fake_token_stream_for_crate(sess: &ParseSess, krate: &ast::Crate) -> TokenStream {
let source = pprust::crate_to_string_for_macros(krate);
let filename = FileName::macro_expansion_source_code(&source);
Expand Down
Loading

0 comments on commit 6b6f97f

Please sign in to comment.