diff --git a/Cargo.lock b/Cargo.lock index 3a4f028e695f4..2a875d68334ae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3809,6 +3809,7 @@ dependencies = [ "rustc_lexer", "rustc_lint_defs", "rustc_macros", + "rustc_middle", "rustc_parse", "rustc_serialize", "rustc_session", diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index 099a6096d0b5d..7fb99b6a5e00e 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -17,8 +17,9 @@ use rustc_span::symbol::{Ident, Symbol}; use rustc_span::{edition::Edition, ErrorGuaranteed, Span, DUMMY_SP}; use std::borrow::Cow; use std::fmt; +use std::hash::Hash; -#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic, Hash)] pub enum CommentKind { Line, Block, diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index fb666550e9302..8e18741db1dc1 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -106,7 +106,6 @@ where } } } - pub trait ToAttrTokenStream: sync::DynSend + sync::DynSync { fn to_attr_token_stream(&self) -> AttrTokenStream; } diff --git a/compiler/rustc_expand/Cargo.toml b/compiler/rustc_expand/Cargo.toml index ce014364b0d01..5e9092c84d074 100644 --- a/compiler/rustc_expand/Cargo.toml +++ b/compiler/rustc_expand/Cargo.toml @@ -20,6 +20,7 @@ rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_lexer = { path = "../rustc_lexer" } rustc_lint_defs = { path = "../rustc_lint_defs" } rustc_macros = { path = "../rustc_macros" } +rustc_middle = { path = "../rustc_middle" } rustc_parse = { path = "../rustc_parse" } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index 12868a666056d..6068ca23a58be 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -16,6 +16,7 @@ use rustc_errors::{DiagCtxt, ErrorGuaranteed, PResult}; use rustc_feature::Features; use rustc_lint_defs::builtin::PROC_MACRO_BACK_COMPAT; use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiag, RegisteredTools}; +use rustc_middle::expand::{CanRetry, TcxMacroExpander}; use rustc_parse::{parser, MACRO_ARGUMENTS}; use rustc_session::config::CollapseMacroDebuginfo; use rustc_session::{parse::ParseSess, Limit, Session}; @@ -676,6 +677,11 @@ pub enum SyntaxExtensionKind { Box, ), + TcxLegacyBang( + /// An expander with signature TokenStream -> AST. + Lrc, + ), + /// A token-based attribute macro. Attr( /// An expander with signature (TokenStream, TokenStream) -> TokenStream. @@ -749,7 +755,9 @@ impl SyntaxExtension { /// Returns which kind of macro calls this syntax extension. pub fn macro_kind(&self) -> MacroKind { match self.kind { - SyntaxExtensionKind::Bang(..) | SyntaxExtensionKind::LegacyBang(..) => MacroKind::Bang, + SyntaxExtensionKind::Bang(..) + | SyntaxExtensionKind::LegacyBang(..) + | SyntaxExtensionKind::TcxLegacyBang(..) => MacroKind::Bang, SyntaxExtensionKind::Attr(..) | SyntaxExtensionKind::LegacyAttr(..) | SyntaxExtensionKind::NonMacroAttr => MacroKind::Attr, @@ -1031,6 +1039,12 @@ pub trait ResolverExpand { /// Tools registered with `#![register_tool]` and used by tool attributes and lints. fn registered_tools(&self) -> &RegisteredTools; + + fn expand_legacy_bang( + &self, + invoc_id: LocalExpnId, + current_expansion: LocalExpnId, + ) -> Result<(TokenStream, usize), CanRetry>; } pub trait LintStoreExpand { diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index a049ac251e1e7..e4712c697099d 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -14,7 +14,9 @@ use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter}; use rustc_ast::tokenstream::TokenStream; use rustc_ast::visit::{self, try_visit, walk_list, AssocCtxt, Visitor, VisitorResult}; -use rustc_ast::{AssocItemKind, AstNodeWrapper, AttrArgs, AttrStyle, AttrVec, ExprKind}; +use rustc_ast::{ + AssocItemKind, AstNodeWrapper, AttrArgs, AttrStyle, AttrVec, ExprKind, DUMMY_NODE_ID, +}; use rustc_ast::{ForeignItemKind, HasAttrs, HasNodeId}; use rustc_ast::{Inline, ItemKind, MacStmtStyle, MetaItemKind, ModKind}; use rustc_ast::{NestedMetaItem, NodeId, PatKind, StmtKind, TyKind}; @@ -35,11 +37,15 @@ use rustc_span::hygiene::SyntaxContext; use rustc_span::symbol::{sym, Ident}; use rustc_span::{ErrorGuaranteed, FileName, LocalExpnId, Span}; +use crate::mbe::macro_rules::{trace_macros_note, ParserAnyMacro}; +use rustc_middle::expand::CanRetry; +use rustc_middle::ty::TyCtxt; use smallvec::SmallVec; use std::ops::Deref; use std::path::PathBuf; use std::rc::Rc; use std::{iter, mem}; +use tracing::debug; macro_rules! ast_fragments { ( @@ -387,6 +393,18 @@ pub struct MacroExpander<'a, 'b> { monotonic: bool, // cf. `cx.monotonic_expander()` } +pub fn expand_legacy_bang<'tcx>( + tcx: TyCtxt<'tcx>, + key: (LocalExpnId, LocalExpnId), +) -> Result<(&'tcx TokenStream, usize), CanRetry> { + let (invoc_id, current_expansion) = key; + let map = tcx.macro_map.borrow(); + let (arg, span, expander) = map.get(&invoc_id).as_ref().unwrap(); + expander + .expand(&tcx.sess, *span, arg.clone(), current_expansion) + .map(|(tts, i)| (tcx.arena.alloc(tts) as &TokenStream, i)) +} + impl<'a, 'b> MacroExpander<'a, 'b> { pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self { MacroExpander { cx, monotonic } @@ -672,6 +690,67 @@ impl<'a, 'b> MacroExpander<'a, 'b> { Err(guar) => return ExpandResult::Ready(fragment_kind.dummy(span, guar)), } } + SyntaxExtensionKind::TcxLegacyBang(expander) => { + // Macros defined in the current crate have a real node id, + // whereas macros from an external crate have a dummy id. + if self.cx.trace_macros() { + let msg = format!( + "expanding `{}! {{ {} }}`", + expander.name(), + pprust::tts_to_string(&mac.args.tokens) + ); + trace_macros_note(&mut self.cx.expansions, span, msg); + } + + // Macros defined in the current crate have a real node id, + // whereas macros from an external crate have a dummy id.\ + let tok_result: Box = match self + .cx + .resolver + .expand_legacy_bang(invoc.expansion_data.id, self.cx.current_expansion.id) + { + Ok((tts, i)) => { + if self.cx.trace_macros() { + let msg = format!("to `{}`", pprust::tts_to_string(&tts)); + trace_macros_note(&mut self.cx.expansions, span, msg); + } + let is_local = expander.node_id() != DUMMY_NODE_ID; + if is_local { + self.cx.resolver.record_macro_rule_usage(expander.node_id(), i); + } + + // Let the context choose how to interpret the result. + // Weird, but useful for X-macros. + Box::new(ParserAnyMacro::new( + Parser::new(&self.cx.sess.psess, tts.clone(), None), + // Pass along the original expansion site and the name of the macro, + // so we can print a useful error message if the parse of the expanded + // macro leaves unparsed tokens. + span, + expander.name(), + self.cx.current_expansion.lint_node_id, + self.cx.current_expansion.is_trailing_mac, + expander.arm_span(i), + is_local, + )) + } + Err(CanRetry::No(guar)) => { + debug!("Will not retry matching as an error was emitted already"); + DummyResult::any(span, guar) + } + Err(CanRetry::Yes) => { + // Retry and emit a better error. + DummyResult::any_valid(span) + } + }; + let result = if let Some(result) = fragment_kind.make_from(tok_result) { + result + } else { + let guar = self.error_wrong_fragment_kind(fragment_kind, &mac, span); + fragment_kind.dummy(span, guar) + }; + result + } SyntaxExtensionKind::LegacyBang(expander) => { let tok_result = match expander.expand(self.cx, span, mac.args.tokens.clone()) { ExpandResult::Ready(tok_result) => tok_result, diff --git a/compiler/rustc_expand/src/lib.rs b/compiler/rustc_expand/src/lib.rs index 4222c9fe90616..be9f15ddfa437 100644 --- a/compiler/rustc_expand/src/lib.rs +++ b/compiler/rustc_expand/src/lib.rs @@ -26,6 +26,7 @@ mod placeholders; mod proc_macro_server; pub use mbe::macro_rules::compile_declarative_macro; +use rustc_middle::query::Providers; pub mod base; pub mod config; pub mod expand; @@ -34,4 +35,8 @@ pub mod module; #[allow(rustc::untranslatable_diagnostic)] pub mod proc_macro; +pub fn provide(providers: &mut Providers) { + providers.expand_legacy_bang = expand::expand_legacy_bang; +} + rustc_fluent_macro::fluent_messages! { "../messages.ftl" } diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs index 442fd654b6ae7..b611c70c16220 100644 --- a/compiler/rustc_expand/src/mbe/diagnostics.rs +++ b/compiler/rustc_expand/src/mbe/diagnostics.rs @@ -18,7 +18,7 @@ use tracing::debug; use super::macro_rules::{parser_from_cx, NoopTracker}; -pub(super) fn failed_to_match_macro<'cx>( +pub(crate) fn failed_to_match_macro<'cx>( cx: &'cx mut ExtCtxt<'_>, sp: Span, def_span: Span, diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 8f18055f83817..5b9413105f07b 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -28,9 +28,10 @@ use rustc_session::Session; use rustc_span::edition::Edition; use rustc_span::hygiene::Transparency; use rustc_span::symbol::{kw, sym, Ident, MacroRulesNormalizedIdent}; -use rustc_span::Span; +use rustc_span::{LocalExpnId, Span}; use tracing::{debug, instrument, trace, trace_span}; +use rustc_data_structures::sync::Lrc; use std::borrow::Cow; use std::collections::hash_map::Entry; use std::{mem, slice}; @@ -38,6 +39,8 @@ use std::{mem, slice}; use super::diagnostics; use super::macro_parser::{NamedMatches, NamedParseResult}; +use rustc_middle::expand::{CanRetry, TcxMacroExpander}; + pub(crate) struct ParserAnyMacro<'a> { parser: Parser<'a>, @@ -53,6 +56,18 @@ pub(crate) struct ParserAnyMacro<'a> { } impl<'a> ParserAnyMacro<'a> { + pub(crate) fn new( + parser: Parser<'a>, + site_span: Span, + macro_ident: Ident, + lint_node_id: NodeId, + is_trailing_mac: bool, + arm_span: Span, + is_local: bool, + ) -> Self { + Self { parser, site_span, macro_ident, lint_node_id, is_trailing_mac, arm_span, is_local } + } + pub(crate) fn make(mut self: Box>, kind: AstFragmentKind) -> AstFragment { let ParserAnyMacro { site_span, @@ -127,6 +142,54 @@ impl TTMacroExpander for MacroRulesMacroExpander { } } +impl TcxMacroExpander for MacroRulesMacroExpander { + fn expand( + &self, + sess: &Session, + sp: Span, + input: TokenStream, + expand_id: LocalExpnId, + ) -> Result<(TokenStream, usize), CanRetry> { + // Track nothing for the best performance. + let try_success_result = + try_match_macro(&sess.psess, self.name, &input, &self.lhses, &mut NoopTracker); + + match try_success_result { + Ok((i, named_matches)) => { + let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &self.rhses[i] { + mbe::TokenTree::Delimited(span, _, delimited) => (&delimited, *span), + _ => sess.dcx().span_bug(sp, "malformed macro rhs"), + }; + + // rhs has holes ( `$id` and `$(...)` that need filled) + match transcribe( + &sess.psess, + &named_matches, + rhs, + rhs_span, + self.transparency, + expand_id, + ) { + Ok(tts) => Ok((tts, i)), + Err(err) => Err(CanRetry::No(err.emit())), + } + } + Err(e) => Err(e), + } + } + + fn name(&self) -> Ident { + self.name + } + + fn arm_span(&self, rhs: usize) -> Span { + self.rhses[rhs].span() + } + + fn node_id(&self) -> NodeId { + self.node_id + } +} struct DummyExpander(ErrorGuaranteed); impl TTMacroExpander for DummyExpander { @@ -140,7 +203,11 @@ impl TTMacroExpander for DummyExpander { } } -fn trace_macros_note(cx_expansions: &mut FxIndexMap>, sp: Span, message: String) { +pub(crate) fn trace_macros_note( + cx_expansions: &mut FxIndexMap>, + sp: Span, + message: String, +) { let sp = sp.macro_backtrace().last().map_or(sp, |trace| trace.call_site); cx_expansions.entry(sp).or_default().push(message); } @@ -224,7 +291,14 @@ fn expand_macro<'cx>( let arm_span = rhses[i].span(); // rhs has holes ( `$id` and `$(...)` that need filled) - let tts = match transcribe(cx, &named_matches, rhs, rhs_span, transparency) { + let tts = match transcribe( + &cx.sess.psess, + &named_matches, + rhs, + rhs_span, + transparency, + cx.current_expansion.id, + ) { Ok(tts) => tts, Err(err) => { let guar = err.emit(); @@ -270,12 +344,6 @@ fn expand_macro<'cx>( } } -pub(super) enum CanRetry { - Yes, - /// We are not allowed to retry macro expansion as a fatal error has been emitted already. - No(ErrorGuaranteed), -} - /// Try expanding the macro. Returns the index of the successful arm and its named_matches if it was successful, /// and nothing if it failed. On failure, it's the callers job to use `track` accordingly to record all errors /// correctly. @@ -382,7 +450,19 @@ pub fn compile_declarative_macro( ) }; let dummy_syn_ext = |guar| (mk_syn_ext(Box::new(DummyExpander(guar))), Vec::new()); - + let mk_tcx_syn_ext = |expander| { + SyntaxExtension::new( + sess, + features, + SyntaxExtensionKind::TcxLegacyBang(expander), + def.span, + Vec::new(), + edition, + def.ident.name, + &def.attrs, + def.id != DUMMY_NODE_ID, + ) + }; let dcx = &sess.psess.dcx; let lhs_nm = Ident::new(sym::lhs, def.span); let rhs_nm = Ident::new(sym::rhs, def.span); @@ -595,7 +675,7 @@ pub fn compile_declarative_macro( }) .collect(); - let expander = Box::new(MacroRulesMacroExpander { + let expander = Lrc::new(MacroRulesMacroExpander { name: def.ident, span: def.span, node_id: def.id, @@ -603,7 +683,7 @@ pub fn compile_declarative_macro( lhses, rhses, }); - (mk_syn_ext(expander), rule_spans) + (mk_tcx_syn_ext(expander), rule_spans) } fn check_lhs_nt_follows( diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index 3901b82eb52ec..313da026c2359 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -1,4 +1,3 @@ -use crate::base::ExtCtxt; use crate::errors::{ CountRepetitionMisplaced, MetaVarExprUnrecognizedVar, MetaVarsDifSeqMatchers, MustRepeatOnce, NoSyntaxVarsExprRepeat, VarStillRepeating, @@ -11,10 +10,12 @@ use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, Toke use rustc_data_structures::fx::FxHashMap; use rustc_errors::{pluralize, Diag, PResult}; use rustc_parse::parser::ParseNtResult; +use rustc_errors::DiagCtxt; use rustc_span::hygiene::{LocalExpnId, Transparency}; use rustc_span::symbol::{sym, Ident, MacroRulesNormalizedIdent}; use rustc_span::{with_metavar_spans, Span, SyntaxContext}; +use rustc_session::parse::ParseSess; use smallvec::{smallvec, SmallVec}; use std::mem; @@ -99,11 +100,12 @@ impl<'a> Iterator for Frame<'a> { /// /// Along the way, we do some additional error checking. pub(super) fn transcribe<'a>( - cx: &ExtCtxt<'a>, + sess: &'a ParseSess, interp: &FxHashMap, src: &mbe::Delimited, src_span: DelimSpan, transparency: Transparency, + expand_id: LocalExpnId, ) -> PResult<'a, TokenStream> { // Nothing for us to transcribe... if src.tts.is_empty() { @@ -137,7 +139,7 @@ pub(super) fn transcribe<'a>( // again, and we are done transcribing. let mut result: Vec = Vec::new(); let mut result_stack = Vec::new(); - let mut marker = Marker(cx.current_expansion.id, transparency, Default::default()); + let mut marker = Marker(expand_id, transparency, Default::default()); loop { // Look at the last frame on the stack. @@ -201,8 +203,8 @@ pub(super) fn transcribe<'a>( seq @ mbe::TokenTree::Sequence(_, seq_rep) => { match lockstep_iter_size(seq, interp, &repeats) { LockstepIterSize::Unconstrained => { - return Err(cx - .dcx() + return Err(sess + .dcx .create_err(NoSyntaxVarsExprRepeat { span: seq.span() })); } @@ -211,8 +213,8 @@ pub(super) fn transcribe<'a>( // happens when two meta-variables are used in the same repetition in a // sequence, but they come from different sequence matchers and repeat // different amounts. - return Err(cx - .dcx() + return Err(sess + .dcx .create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg })); } @@ -227,8 +229,8 @@ pub(super) fn transcribe<'a>( // FIXME: this really ought to be caught at macro definition // time... It happens when the Kleene operator in the matcher and // the body for the same meta-variable do not match. - return Err(cx - .dcx() + return Err(sess + .dcx .create_err(MustRepeatOnce { span: sp.entire() })); } } else { @@ -259,7 +261,7 @@ pub(super) fn transcribe<'a>( MatchedSingle(ParseNtResult::Tt(tt)) => { // `tt`s are emitted into the output stream directly as "raw tokens", // without wrapping them into groups. - maybe_use_metavar_location(cx, &stack, sp, tt, &mut marker) + maybe_use_metavar_location(sess, &stack, sp, tt, &mut marker) } MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => { marker.visit_span(&mut sp); @@ -280,7 +282,7 @@ pub(super) fn transcribe<'a>( } MatchedSeq(..) => { // We were unable to descend far enough. This is an error. - return Err(cx.dcx().create_err(VarStillRepeating { span: sp, ident })); + return Err(sess.dcx.create_err(VarStillRepeating { span: sp, ident })); } }; result.push(tt) @@ -299,7 +301,15 @@ pub(super) fn transcribe<'a>( // Replace meta-variable expressions with the result of their expansion. mbe::TokenTree::MetaVarExpr(sp, expr) => { - transcribe_metavar_expr(cx, expr, interp, &mut marker, &repeats, &mut result, sp)?; + transcribe_metavar_expr( + expr, + interp, + &mut marker, + &repeats, + &mut result, + sp, + &sess.dcx, + )?; } // If we are entering a new delimiter, we push its contents to the `stack` to be @@ -359,7 +369,7 @@ pub(super) fn transcribe<'a>( /// combine with each other and not with tokens outside of the sequence. /// - The metavariable span comes from a different crate, then we prefer the more local span. fn maybe_use_metavar_location( - cx: &ExtCtxt<'_>, + sess: &ParseSess, stack: &[Frame<'_>], mut metavar_span: Span, orig_tt: &TokenTree, @@ -397,7 +407,7 @@ fn maybe_use_metavar_location( && insert(mspans, dspan.entire(), metavar_span) }), }; - if no_collision || cx.source_map().is_imported(metavar_span) { + if no_collision || sess.source_map().is_imported(metavar_span) { return orig_tt.clone(); } @@ -558,23 +568,21 @@ fn lockstep_iter_size( /// * `[ $( ${count(foo, 1)} ),* ]` will return an error because `${count(foo, 1)}` is /// declared inside a single repetition and the index `1` implies two nested repetitions. fn count_repetitions<'a>( - cx: &ExtCtxt<'a>, depth_user: usize, mut matched: &NamedMatch, repeats: &[(usize, usize)], sp: &DelimSpan, + dcx: &'a DiagCtxt, ) -> PResult<'a, usize> { // Recursively count the number of matches in `matched` at given depth // (or at the top-level of `matched` if no depth is given). - fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> { - match matched { + fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> { match matched { MatchedSingle(_) => Ok(1), MatchedSeq(named_matches) => { if depth_curr == depth_max { Ok(named_matches.len()) } else { - named_matches.iter().map(|elem| count(depth_curr + 1, depth_max, elem)).sum() - } + named_matches.iter().map(|elem| count(depth_curr + 1, depth_max, elem)).sum() } } } } @@ -595,7 +603,7 @@ fn count_repetitions<'a>( .and_then(|el| el.checked_sub(repeats.len())) .unwrap_or_default(); if depth_user > depth_max { - return Err(out_of_bounds_err(cx, depth_max + 1, sp.entire(), "count")); + return Err(out_of_bounds_err(dcx, depth_max + 1, sp.entire(), "count")); } // `repeats` records all of the nested levels at which we are currently @@ -611,7 +619,7 @@ fn count_repetitions<'a>( } if let MatchedSingle(_) = matched { - return Err(cx.dcx().create_err(CountRepetitionMisplaced { span: sp.entire() })); + return Err(dcx.create_err(CountRepetitionMisplaced { span: sp.entire() })); } count(depth_user, depth_max, matched) @@ -619,22 +627,21 @@ fn count_repetitions<'a>( /// Returns a `NamedMatch` item declared on the LHS given an arbitrary [Ident] fn matched_from_ident<'ctx, 'interp, 'rslt>( - cx: &ExtCtxt<'ctx>, ident: Ident, interp: &'interp FxHashMap, + dcx: &'ctx DiagCtxt, ) -> PResult<'ctx, &'rslt NamedMatch> where 'interp: 'rslt, { let span = ident.span; let key = MacroRulesNormalizedIdent::new(ident); - interp.get(&key).ok_or_else(|| cx.dcx().create_err(MetaVarExprUnrecognizedVar { span, key })) + interp.get(&key).ok_or_else(|| dcx.create_err(MetaVarExprUnrecognizedVar { span, key })) } /// Used by meta-variable expressions when an user input is out of the actual declared bounds. For /// example, index(999999) in an repetition of only three elements. -fn out_of_bounds_err<'a>(cx: &ExtCtxt<'a>, max: usize, span: Span, ty: &str) -> Diag<'a> { - let msg = if max == 0 { +fn out_of_bounds_err<'a>(dcx: &'a DiagCtxt, max: usize, span: Span, ty: &str) -> Diag<'a> { let msg = if max == 0 { format!( "meta-variable expression `{ty}` with depth parameter \ must be called inside of a macro repetition" @@ -645,17 +652,17 @@ fn out_of_bounds_err<'a>(cx: &ExtCtxt<'a>, max: usize, span: Span, ty: &str) -> must be less than {max}" ) }; - cx.dcx().struct_span_err(span, msg) + dcx.struct_span_err(span, msg) } fn transcribe_metavar_expr<'a>( - cx: &ExtCtxt<'a>, expr: &MetaVarExpr, interp: &FxHashMap, marker: &mut Marker, repeats: &[(usize, usize)], result: &mut Vec, sp: &DelimSpan, + dcx: &'a DiagCtxt, ) -> PResult<'a, ()> { let mut visited_span = || { let mut span = sp.entire(); @@ -664,8 +671,8 @@ fn transcribe_metavar_expr<'a>( }; match *expr { MetaVarExpr::Count(original_ident, depth) => { - let matched = matched_from_ident(cx, original_ident, interp)?; - let count = count_repetitions(cx, depth, matched, repeats, sp)?; + let matched = matched_from_ident(original_ident, interp, dcx)?; + let count = count_repetitions(depth, matched, repeats, sp, dcx)?; let tt = TokenTree::token_alone( TokenKind::lit(token::Integer, sym::integer(count), None), visited_span(), @@ -674,7 +681,7 @@ fn transcribe_metavar_expr<'a>( } MetaVarExpr::Ignore(original_ident) => { // Used to ensure that `original_ident` is present in the LHS - let _ = matched_from_ident(cx, original_ident, interp)?; + let _ = matched_from_ident(original_ident, interp, dcx)?; } MetaVarExpr::Index(depth) => match repeats.iter().nth_back(depth) { Some((index, _)) => { @@ -683,7 +690,7 @@ fn transcribe_metavar_expr<'a>( visited_span(), )); } - None => return Err(out_of_bounds_err(cx, repeats.len(), sp.entire(), "index")), + None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "index")), }, MetaVarExpr::Len(depth) => match repeats.iter().nth_back(depth) { Some((_, length)) => { @@ -692,7 +699,7 @@ fn transcribe_metavar_expr<'a>( visited_span(), )); } - None => return Err(out_of_bounds_err(cx, repeats.len(), sp.entire(), "len")), + None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "len")), }, } Ok(()) diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index 76d5d7a3ac2fd..eddaa1ac34fa7 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -620,6 +620,7 @@ pub static DEFAULT_QUERY_PROVIDERS: LazyLock = LazyLock::new(|| { rustc_monomorphize::provide(providers); rustc_privacy::provide(providers); rustc_resolve::provide(providers); + rustc_expand::provide(providers); rustc_hir_analysis::provide(providers); rustc_hir_typeck::provide(providers); ty::provide(providers); diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index 7392eb6c2bb4d..9c603aeb4150e 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -116,6 +116,7 @@ macro_rules! arena_types { [decode] specialization_graph: rustc_middle::traits::specialization_graph::Graph, [] crate_inherent_impls: rustc_middle::ty::CrateInherentImpls, [] hir_owner_nodes: rustc_hir::OwnerNodes<'tcx>, + [] expand_legacy_bang: rustc_ast::tokenstream::TokenStream, ]); ) } diff --git a/compiler/rustc_middle/src/expand.rs b/compiler/rustc_middle/src/expand.rs new file mode 100644 index 0000000000000..0e061c8234dc9 --- /dev/null +++ b/compiler/rustc_middle/src/expand.rs @@ -0,0 +1,29 @@ +use rustc_ast::tokenstream::TokenStream; +use rustc_ast::NodeId; +use rustc_macros::HashStable_Generic; +use rustc_session::Session; +use rustc_span::symbol::Ident; +use rustc_span::{ErrorGuaranteed, LocalExpnId, Span}; + +pub trait TcxMacroExpander { + fn expand( + &self, + _sess: &Session, + _span: Span, + _input: TokenStream, + _expand_id: LocalExpnId, + ) -> Result<(TokenStream, usize), CanRetry>; + + fn name(&self) -> Ident; + + fn arm_span(&self, rhs: usize) -> Span; + + fn node_id(&self) -> NodeId; +} + +#[derive(Copy, Clone, HashStable_Generic, Debug)] +pub enum CanRetry { + Yes, + /// We are not allowed to retry macro expansion as a fatal error has been emitted already. + No(ErrorGuaranteed), +} diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs index 04fd4c8d0f7b9..5db33222c97ab 100644 --- a/compiler/rustc_middle/src/lib.rs +++ b/compiler/rustc_middle/src/lib.rs @@ -75,6 +75,7 @@ mod macros; #[macro_use] pub mod arena; pub mod error; +pub mod expand; pub mod hir; pub mod hooks; pub mod infer; @@ -93,6 +94,8 @@ pub mod query; #[macro_use] pub mod dep_graph; +use rustc_span::HashStableContext; + // Allows macros to refer to this crate as `::rustc_middle` extern crate self as rustc_middle; diff --git a/compiler/rustc_middle/src/query/erase.rs b/compiler/rustc_middle/src/query/erase.rs index 320d49ea64674..f78788fd9401e 100644 --- a/compiler/rustc_middle/src/query/erase.rs +++ b/compiler/rustc_middle/src/query/erase.rs @@ -3,6 +3,8 @@ use crate::query::CyclePlaceholder; use crate::traits; use crate::ty::adjustment::CoerceUnsizedInfo; use crate::ty::{self, Ty}; +use rustc_ast::tokenstream::TokenStream; +use rustc_middle::expand::CanRetry; use std::intrinsics::transmute_unchecked; use std::mem::{size_of, MaybeUninit}; @@ -169,6 +171,10 @@ impl EraseType for Result>, CyclePlaceholder> { type Result = [u8; size_of::>, CyclePlaceholder>>()]; } +impl EraseType for Result<(&'_ TokenStream, usize), CanRetry> { + type Result = [u8; size_of::>()]; +} + impl EraseType for Option<&'_ T> { type Result = [u8; size_of::>()]; } diff --git a/compiler/rustc_middle/src/query/keys.rs b/compiler/rustc_middle/src/query/keys.rs index faa137019cb92..33a410b6d2d23 100644 --- a/compiler/rustc_middle/src/query/keys.rs +++ b/compiler/rustc_middle/src/query/keys.rs @@ -11,7 +11,7 @@ use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LocalModDefId, ModDefId, LO use rustc_hir::hir_id::{HirId, OwnerId}; use rustc_query_system::query::{DefIdCache, DefaultCache, SingleCache, VecCache}; use rustc_span::symbol::{Ident, Symbol}; -use rustc_span::{Span, DUMMY_SP}; +use rustc_span::{LocalExpnId, Span, DUMMY_SP}; use rustc_target::abi; /// Placeholder for `CrateNum`'s "local" counterpart @@ -584,3 +584,15 @@ impl<'tcx> Key for (ValidityRequirement, ty::ParamEnvAnd<'tcx, Ty<'tcx>>) { } } } + +impl Key for (LocalExpnId, LocalExpnId) { + type Cache = DefaultCache; + + fn default_span(&self, _: TyCtxt<'_>) -> Span { + DUMMY_SP + } + + fn ty_def_id(&self) -> Option { + None + } +} diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index c2f7a227f6661..5f27ac3cdcfb5 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -52,10 +52,13 @@ use crate::ty::{ self, print::describe_as_module, CrateInherentImpls, ParamEnvAnd, Ty, TyCtxt, UnusedGenericParams, }; + +use crate::expand::CanRetry; use crate::ty::{GenericArg, GenericArgsRef}; use rustc_arena::TypedArena; use rustc_ast as ast; use rustc_ast::expand::{allocator::AllocatorKind, StrippedCfgItem}; +use rustc_ast::tokenstream::TokenStream; use rustc_attr as attr; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; @@ -82,7 +85,7 @@ use rustc_session::lint::LintExpectationId; use rustc_session::Limits; use rustc_span::def_id::LOCAL_CRATE; use rustc_span::symbol::Symbol; -use rustc_span::{Span, DUMMY_SP}; +use rustc_span::{LocalExpnId, Span, DUMMY_SP}; use rustc_target::abi; use rustc_target::spec::PanicStrategy; use std::mem; @@ -115,6 +118,12 @@ rustc_queries! { desc { "triggering a delayed bug for testing incremental" } } + query expand_legacy_bang(key: (LocalExpnId, LocalExpnId)) -> Result<(&'tcx TokenStream, usize), CanRetry> { + eval_always + no_hash + desc { "expand legacy bang" } + } + /// Collects the list of all tools registered using `#![register_tool]`. query registered_tools(_: ()) -> &'tcx ty::RegisteredTools { arena_cache diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index d75f250275bce..a78bcc1ed27e4 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -69,13 +69,15 @@ use rustc_session::lint::Lint; use rustc_session::{Limit, MetadataKind, Session}; use rustc_span::def_id::{DefPathHash, StableCrateId, CRATE_DEF_ID}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; -use rustc_span::{Span, DUMMY_SP}; +use rustc_span::{LocalExpnId, Span, DUMMY_SP}; use rustc_target::abi::{FieldIdx, Layout, LayoutS, TargetDataLayout, VariantIdx}; use rustc_target::spec::abi; use rustc_type_ir::TyKind::*; use rustc_type_ir::WithCachedTypeInfo; use rustc_type_ir::{CollectAndApply, Interner, TypeFlags}; +use rustc_ast::tokenstream::TokenStream; +use rustc_middle::expand::TcxMacroExpander; use std::assert_matches::assert_matches; use std::borrow::Borrow; use std::cmp::Ordering; @@ -854,8 +856,14 @@ pub struct GlobalCtxt<'tcx> { /// Stores memory for globals (statics/consts). pub(crate) alloc_map: Lock>, - current_gcx: CurrentGcx, + + pub macro_map: RwLock< + FxHashMap< + LocalExpnId, + (TokenStream, Span, Lrc), + >, + >, } impl<'tcx> GlobalCtxt<'tcx> { @@ -1084,6 +1092,7 @@ impl<'tcx> TyCtxt<'tcx> { canonical_param_env_cache: Default::default(), data_layout, alloc_map: Lock::new(interpret::AllocMap::new()), + macro_map: RwLock::new(Default::default()), current_gcx, } } diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs index f8d245f94e53a..e260a75bab094 100644 --- a/compiler/rustc_resolve/src/macros.rs +++ b/compiler/rustc_resolve/src/macros.rs @@ -9,6 +9,7 @@ use crate::{BuiltinMacroState, Determinacy, MacroData, Used}; use crate::{DeriveData, Finalize, ParentScope, ResolutionError, Resolver, ScopeSet}; use crate::{ModuleKind, ModuleOrUniformRoot, NameBinding, PathResult, Segment, ToNameBinding}; use rustc_ast::expand::StrippedCfgItem; +use rustc_ast::tokenstream::TokenStream; use rustc_ast::{self as ast, attr, Crate, Inline, ItemKind, ModKind, NodeId}; use rustc_ast_pretty::pprust; use rustc_attr::StabilityLevel; @@ -21,6 +22,7 @@ use rustc_expand::compile_declarative_macro; use rustc_expand::expand::{AstFragment, Invocation, InvocationKind, SupportsMacroExpansion}; use rustc_hir::def::{self, DefKind, Namespace, NonMacroAttrKind}; use rustc_hir::def_id::{CrateNum, DefId, LocalDefId}; +use rustc_middle::expand::CanRetry; use rustc_middle::middle::stability; use rustc_middle::ty::RegisteredTools; use rustc_middle::ty::{TyCtxt, Visibility}; @@ -300,6 +302,14 @@ impl<'a, 'tcx> ResolverExpand for Resolver<'a, 'tcx> { ), self.create_stable_hashing_context(), ); + if let SyntaxExtensionKind::TcxLegacyBang(tcx_expander) = &ext.kind { + if let InvocationKind::Bang { ref mac, span } = invoc.kind { + self.tcx + .macro_map + .borrow_mut() + .insert(invoc_id, (mac.args.tokens.clone(), span, tcx_expander.clone())); + } + } Ok(ext) } @@ -452,6 +462,16 @@ impl<'a, 'tcx> ResolverExpand for Resolver<'a, 'tcx> { fn registered_tools(&self) -> &RegisteredTools { self.registered_tools } + + fn expand_legacy_bang( + &self, + invoc_id: LocalExpnId, + current_expansion: LocalExpnId, + ) -> Result<(TokenStream, usize), CanRetry> { + self.tcx() + .expand_legacy_bang((invoc_id, current_expansion)) + .map(|(tts, i)| (tts.clone(), i)) + } } impl<'a, 'tcx> Resolver<'a, 'tcx> { diff --git a/compiler/rustc_span/src/hygiene.rs b/compiler/rustc_span/src/hygiene.rs index aa4bcefab9391..23e280af0d9e3 100644 --- a/compiler/rustc_span/src/hygiene.rs +++ b/compiler/rustc_span/src/hygiene.rs @@ -1550,3 +1550,9 @@ impl HashStable for ExpnId { hash.hash_stable(ctx, hasher); } } + +impl HashStable for LocalExpnId { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { + self.to_expn_id().hash_stable(ctx, hasher) + } +}