Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cache declarative macro expansion on disk (for incremental comp.). Based on #128605 #128747

Draft
wants to merge 8 commits into
base: master
Choose a base branch
from
1 change: 1 addition & 0 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -3689,6 +3689,7 @@ dependencies = [
"rustc_lexer",
"rustc_lint_defs",
"rustc_macros",
"rustc_middle",
"rustc_parse",
"rustc_serialize",
"rustc_session",
Expand Down
3 changes: 2 additions & 1 deletion compiler/rustc_ast/src/token.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use std::borrow::Cow;
use std::fmt;
use std::hash::Hash;

use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::Lrc;
Expand All @@ -21,7 +22,7 @@ use crate::ast;
use crate::ptr::P;
use crate::util::case::Case;

#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic, Hash)]
pub enum CommentKind {
Line,
Block,
Expand Down
11 changes: 8 additions & 3 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,6 @@ where
}
}
}

pub trait ToAttrTokenStream: sync::DynSend + sync::DynSync {
fn to_attr_token_stream(&self) -> AttrTokenStream;
}
Expand Down Expand Up @@ -140,13 +139,19 @@ impl fmt::Debug for LazyAttrTokenStream {

impl<S: SpanEncoder> Encodable<S> for LazyAttrTokenStream {
fn encode(&self, _s: &mut S) {
panic!("Attempted to encode LazyAttrTokenStream");
tracing::debug!("ENCODING {self:?}");
self.to_attr_token_stream().encode(_s);
// panic!("Attempted to encode {self:?}");
}
}

impl<D: SpanDecoder> Decodable<D> for LazyAttrTokenStream {
fn decode(_d: &mut D) -> Self {
panic!("Attempted to decode LazyAttrTokenStream");
let ats = AttrTokenStream::decode(_d);
let res = LazyAttrTokenStream::new(ats);
tracing::debug!("DECODED {res:?}");
res
// panic!("Attempted to decode LazyAttrTokenStream");
}
}

Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_expand/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ rustc_fluent_macro = { path = "../rustc_fluent_macro" }
rustc_lexer = { path = "../rustc_lexer" }
rustc_lint_defs = { path = "../rustc_lint_defs" }
rustc_macros = { path = "../rustc_macros" }
rustc_middle = { path = "../rustc_middle" }
rustc_parse = { path = "../rustc_parse" }
rustc_serialize = { path = "../rustc_serialize" }
rustc_session = { path = "../rustc_session" }
Expand Down
15 changes: 14 additions & 1 deletion compiler/rustc_expand/src/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ use rustc_data_structures::sync::{self, Lrc};
use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, PResult};
use rustc_feature::Features;
use rustc_lint_defs::{BufferedEarlyLint, RegisteredTools};
use rustc_middle::expand::TcxMacroExpander;
use rustc_parse::parser::Parser;
use rustc_parse::MACRO_ARGUMENTS;
use rustc_session::config::CollapseMacroDebuginfo;
Expand Down Expand Up @@ -676,6 +677,11 @@ pub enum SyntaxExtensionKind {
Box<dyn TTMacroExpander + sync::DynSync + sync::DynSend>,
),

TcxLegacyBang(
/// An expander with signature TokenStream -> AST.
Lrc<dyn TcxMacroExpander + sync::DynSync + sync::DynSend>,
),

/// A token-based attribute macro.
Attr(
/// An expander with signature (TokenStream, TokenStream) -> TokenStream.
Expand Down Expand Up @@ -754,7 +760,8 @@ impl SyntaxExtension {
match self.kind {
SyntaxExtensionKind::Bang(..)
| SyntaxExtensionKind::LegacyBang(..)
| SyntaxExtensionKind::GlobDelegation(..) => MacroKind::Bang,
| SyntaxExtensionKind::GlobDelegation(..)
| SyntaxExtensionKind::TcxLegacyBang(..) => MacroKind::Bang,
SyntaxExtensionKind::Attr(..)
| SyntaxExtensionKind::LegacyAttr(..)
| SyntaxExtensionKind::NonMacroAttr => MacroKind::Attr,
Expand Down Expand Up @@ -1072,6 +1079,12 @@ pub trait ResolverExpand {
trait_def_id: DefId,
impl_def_id: LocalDefId,
) -> Result<Vec<(Ident, Option<Ident>)>, Indeterminate>;

fn expand_legacy_bang(
&self,
invoc_id: LocalExpnId,
current_expansion: LocalExpnId,
) -> Result<(TokenStream, usize), (Span, ErrorGuaranteed)>;
}

pub trait LintStoreExpand {
Expand Down
94 changes: 93 additions & 1 deletion compiler/rustc_expand/src/expand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,16 @@ use rustc_ast::visit::{self, try_visit, walk_list, AssocCtxt, Visitor, VisitorRe
use rustc_ast::{
AssocItemKind, AstNodeWrapper, AttrArgs, AttrStyle, AttrVec, ExprKind, ForeignItemKind,
HasAttrs, HasNodeId, Inline, ItemKind, MacStmtStyle, MetaItemKind, ModKind, NestedMetaItem,
NodeId, PatKind, StmtKind, TyKind,
NodeId, PatKind, StmtKind, TyKind, DUMMY_NODE_ID,
};
use rustc_ast_pretty::pprust;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::Lrc;
use rustc_errors::PResult;
use rustc_feature::Features;
use rustc_middle::ty::TyCtxt;
use rustc_parse::parser::{
AttemptLocalParseRecovery, CommaRecoveryMode, ForceCollect, Parser, RecoverColon, RecoverComma,
};
Expand All @@ -40,6 +43,7 @@ use crate::errors::{
WrongFragmentKind,
};
use crate::mbe::diagnostics::annotate_err_with_kind;
use crate::mbe::macro_rules::{trace_macros_note, ParserAnyMacro};
use crate::module::{mod_dir_path, parse_external_mod, DirOwnership, ParsedExternalMod};
use crate::placeholders::{placeholder, PlaceholderExpander};

Expand Down Expand Up @@ -394,6 +398,37 @@ pub struct MacroExpander<'a, 'b> {
monotonic: bool, // cf. `cx.monotonic_expander()`
}

#[tracing::instrument(level = "debug", skip(tcx))]
pub fn expand_legacy_bang<'tcx>(
tcx: TyCtxt<'tcx>,
key: (LocalExpnId, LocalExpnId, Fingerprint),
) -> Result<(&'tcx TokenStream, usize), (Span, ErrorGuaranteed)> {
use tracing::debug;

let (invoc_id, current_expansion, arg_fingerprint) = key;

let map = tcx.macro_map.borrow();
let (arg, span, expander) = map.get(&invoc_id).as_ref().unwrap();
debug!(?arg);

// this (i.e., debug-printing `span`) somehow made the test pass??
// tracing::debug!(?span);

let arg_hash: Fingerprint = tcx.with_stable_hashing_context(|mut hcx| {
let mut hasher = StableHasher::new();
arg.flattened().hash_stable(&mut hcx, &mut hasher);
hasher.finish()
});

// sanity-check, to make sure we're not running for (maybe) old arguments
// that were loaded from the cache. this would certainly be a bug.
assert_eq!(arg_fingerprint, arg_hash);

expander
.expand(&tcx.sess, *span, arg.clone(), current_expansion)
.map(|(tts, i)| (tcx.arena.alloc(tts) as &TokenStream, i))
}

impl<'a, 'b> MacroExpander<'a, 'b> {
pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self {
MacroExpander { cx, monotonic }
Expand Down Expand Up @@ -679,6 +714,63 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
Err(guar) => return ExpandResult::Ready(fragment_kind.dummy(span, guar)),
}
}
SyntaxExtensionKind::TcxLegacyBang(expander) => {
// Macros defined in the current crate have a real node id,
// whereas macros from an external crate have a dummy id.
if self.cx.trace_macros() {
let msg = format!(
"expanding `{}! {{ {} }}`",
expander.name(),
pprust::tts_to_string(&mac.args.tokens)
);
trace_macros_note(&mut self.cx.expansions, span, msg);
}

// Macros defined in the current crate have a real node id,
// whereas macros from an external crate have a dummy id.\
let tok_result: Box<dyn MacResult> = match self
.cx
.resolver
.expand_legacy_bang(invoc.expansion_data.id, self.cx.current_expansion.id)
{
Ok((tts, i)) => {
if self.cx.trace_macros() {
let msg = format!("to `{}`", pprust::tts_to_string(&tts));
trace_macros_note(&mut self.cx.expansions, span, msg);
}
let is_local = expander.node_id() != DUMMY_NODE_ID;
if is_local {
self.cx.resolver.record_macro_rule_usage(expander.node_id(), i);
}

// Let the context choose how to interpret the result.
// Weird, but useful for X-macros.
Box::new(ParserAnyMacro::new(
Parser::new(&self.cx.sess.psess, tts.clone(), None),
// Pass along the original expansion site and the name of the macro,
// so we can print a useful error message if the parse of the expanded
// macro leaves unparsed tokens.
span,
expander.name(),
self.cx.current_expansion.lint_node_id,
self.cx.current_expansion.is_trailing_mac,
expander.arm_span(i),
is_local,
))
}
Err((span, guar)) => {
self.cx.trace_macros_diag();
DummyResult::any(span, guar)
}
};
let result = if let Some(result) = fragment_kind.make_from(tok_result) {
result
} else {
let guar = self.error_wrong_fragment_kind(fragment_kind, &mac, span);
fragment_kind.dummy(span, guar)
};
result
}
SyntaxExtensionKind::LegacyBang(expander) => {
let tok_result = match expander.expand(self.cx, span, mac.args.tokens.clone()) {
ExpandResult::Ready(tok_result) => tok_result,
Expand Down
5 changes: 5 additions & 0 deletions compiler/rustc_expand/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ mod placeholders;
mod proc_macro_server;

pub use mbe::macro_rules::compile_declarative_macro;
use rustc_middle::query::Providers;
pub mod base;
pub mod config;
pub mod expand;
Expand All @@ -34,4 +35,8 @@ pub mod module;
#[allow(rustc::untranslatable_diagnostic)]
pub mod proc_macro;

pub fn provide(providers: &mut Providers) {
providers.expand_legacy_bang = expand::expand_legacy_bang;
}

rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
51 changes: 24 additions & 27 deletions compiler/rustc_expand/src/mbe/diagnostics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,42 +3,40 @@ use std::borrow::Cow;
use rustc_ast::token::{self, Token, TokenKind};
use rustc_ast::tokenstream::TokenStream;
use rustc_ast_pretty::pprust;
use rustc_errors::{Applicability, Diag, DiagMessage};
use rustc_errors::{Applicability, Diag, DiagCtxtHandle, DiagMessage};
use rustc_macros::Subdiagnostic;
use rustc_parse::parser::{Parser, Recovery};
use rustc_session::parse::ParseSess;
use rustc_span::source_map::SourceMap;
use rustc_span::symbol::Ident;
use rustc_span::{ErrorGuaranteed, Span};
use tracing::debug;

use super::macro_rules::{parser_from_cx, NoopTracker};
use crate::base::{DummyResult, ExtCtxt, MacResult};
use crate::expand::{parse_ast_fragment, AstFragmentKind};
use crate::mbe::macro_parser::ParseResult::*;
use crate::mbe::macro_parser::{MatcherLoc, NamedParseResult, TtParser};
use crate::mbe::macro_rules::{try_match_macro, Tracker};

pub(super) fn failed_to_match_macro<'cx>(
cx: &'cx mut ExtCtxt<'_>,
pub(crate) fn failed_to_match_macro(
psess: &ParseSess,
sp: Span,
def_span: Span,
name: Ident,
arg: TokenStream,
lhses: &[Vec<MatcherLoc>],
) -> Box<dyn MacResult + 'cx> {
let psess = &cx.sess.psess;

) -> (Span, ErrorGuaranteed) {
// An error occurred, try the expansion again, tracking the expansion closely for better
// diagnostics.
let mut tracker = CollectTrackerAndEmitter::new(cx, sp);
let mut tracker = CollectTrackerAndEmitter::new(psess.dcx(), sp);

let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut tracker);

if try_success_result.is_ok() {
// Nonterminal parser recovery might turn failed matches into successful ones,
// but for that it must have emitted an error already
assert!(
tracker.cx.dcx().has_errors().is_some(),
tracker.dcx.has_errors().is_some(),
"Macro matching returned a success on the second try"
);
}
Expand All @@ -50,15 +48,15 @@ pub(super) fn failed_to_match_macro<'cx>(

let Some(BestFailure { token, msg: label, remaining_matcher, .. }) = tracker.best_failure
else {
return DummyResult::any(sp, cx.dcx().span_delayed_bug(sp, "failed to match a macro"));
return (sp, psess.dcx().span_delayed_bug(sp, "failed to match a macro"));
};

let span = token.span.substitute_dummy(sp);

let mut err = cx.dcx().struct_span_err(span, parse_failure_msg(&token, None));
let mut err = psess.dcx().struct_span_err(span, parse_failure_msg(&token, None));
err.span_label(span, label);
if !def_span.is_dummy() && !cx.source_map().is_imported(def_span) {
err.span_label(cx.source_map().guess_head_span(def_span), "when calling this macro");
if !def_span.is_dummy() && !psess.source_map().is_imported(def_span) {
err.span_label(psess.source_map().guess_head_span(def_span), "when calling this macro");
}

annotate_doc_comment(&mut err, psess.source_map(), span);
Expand All @@ -76,7 +74,7 @@ pub(super) fn failed_to_match_macro<'cx>(
err.note("captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens");
err.note("see <https://doc.rust-lang.org/nightly/reference/macros-by-example.html#forwarding-a-matched-fragment> for more information");

if !def_span.is_dummy() && !cx.source_map().is_imported(def_span) {
if !def_span.is_dummy() && !psess.source_map().is_imported(def_span) {
err.help("try using `:tt` instead in the macro definition");
}
}
Expand Down Expand Up @@ -104,18 +102,17 @@ pub(super) fn failed_to_match_macro<'cx>(
}
}
let guar = err.emit();
cx.trace_macros_diag();
DummyResult::any(sp, guar)
(sp, guar)
}

/// The tracker used for the slow error path that collects useful info for diagnostics.
struct CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
cx: &'a mut ExtCtxt<'cx>,
struct CollectTrackerAndEmitter<'dcx, 'matcher> {
dcx: DiagCtxtHandle<'dcx>,
remaining_matcher: Option<&'matcher MatcherLoc>,
/// Which arm's failure should we report? (the one furthest along)
best_failure: Option<BestFailure>,
root_span: Span,
result: Option<Box<dyn MacResult + 'cx>>,
result: Option<(Span, ErrorGuaranteed)>,
}

struct BestFailure {
Expand All @@ -131,7 +128,7 @@ impl BestFailure {
}
}

impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
impl<'dcx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'dcx, 'matcher> {
type Failure = (Token, u32, &'static str);

fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure {
Expand All @@ -151,7 +148,7 @@ impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx,
Success(_) => {
// Nonterminal parser recovery might turn failed matches into successful ones,
// but for that it must have emitted an error already
self.cx.dcx().span_delayed_bug(
self.dcx.span_delayed_bug(
self.root_span,
"should not collect detailed info for successful macro match",
);
Expand All @@ -177,10 +174,10 @@ impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx,
}
Error(err_sp, msg) => {
let span = err_sp.substitute_dummy(self.root_span);
let guar = self.cx.dcx().span_err(span, msg.clone());
self.result = Some(DummyResult::any(span, guar));
let guar = self.dcx.span_err(span, msg.clone());
self.result = Some((span, guar));
}
ErrorReported(guar) => self.result = Some(DummyResult::any(self.root_span, *guar)),
ErrorReported(guar) => self.result = Some((self.root_span, *guar)),
}
}

Expand All @@ -193,9 +190,9 @@ impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx,
}
}

impl<'a, 'cx> CollectTrackerAndEmitter<'a, 'cx, '_> {
fn new(cx: &'a mut ExtCtxt<'cx>, root_span: Span) -> Self {
Self { cx, remaining_matcher: None, best_failure: None, root_span, result: None }
impl<'dcx> CollectTrackerAndEmitter<'dcx, '_> {
fn new(dcx: DiagCtxtHandle<'dcx>, root_span: Span) -> Self {
Self { dcx, remaining_matcher: None, best_failure: None, root_span, result: None }
}
}

Expand Down
Loading
Loading