From 2a7ae04a6872edd8a1bffa620fde53a2eb2964e1 Mon Sep 17 00:00:00 2001 From: mark Date: Wed, 11 Jul 2018 20:54:12 -0500 Subject: [PATCH 1/7] Extend ParseSess to support buffering lints --- src/librustc/lint/builtin.rs | 10 +++++++++ src/librustc/lint/mod.rs | 9 +++++++++ src/librustc_driver/driver.rs | 8 ++++++++ src/libsyntax/early_buffered_lints.rs | 29 +++++++++++++++++++++++++++ src/libsyntax/lib.rs | 2 ++ src/libsyntax/parse/mod.rs | 23 +++++++++++++++++++-- 6 files changed, 79 insertions(+), 2 deletions(-) create mode 100644 src/libsyntax/early_buffered_lints.rs diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 47c5f464131af..495b4d32e0651 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -331,6 +331,15 @@ declare_lint! { via the module system" } +/// Some lints that are buffered from `libsyntax`. See `syntax::early_buffered_lints`. +pub mod parser { + declare_lint! { + pub QUESTION_MARK_MACRO_SEP, + Warn, + "detects the use of `?` as a macro separator" + } +} + /// Does nothing as a lint pass, but registers some `Lint`s /// which are used by other parts of the compiler. #[derive(Copy, Clone)] @@ -389,6 +398,7 @@ impl LintPass for HardwiredLints { WHERE_CLAUSES_OBJECT_SAFETY, PROC_MACRO_DERIVE_RESOLUTION_FALLBACK, MACRO_USE_EXTERN_CRATE, + parser::QUESTION_MARK_MACRO_SEP, ) } } diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index c0f3c351d2627..a5c82aa63034f 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -38,10 +38,12 @@ use hir::def_id::{CrateNum, LOCAL_CRATE}; use hir::intravisit; use hir; use lint::builtin::BuiltinLintDiagnostics; +use lint::builtin::parser::QUESTION_MARK_MACRO_SEP; use session::{Session, DiagnosticMessageId}; use std::{hash, ptr}; use syntax::ast; use syntax::codemap::{MultiSpan, ExpnFormat}; +use syntax::early_buffered_lints::BufferedEarlyLintId; use syntax::edition::Edition; use syntax::symbol::Symbol; use syntax::visit as ast_visit; @@ -86,6 +88,13 @@ pub struct Lint { } impl Lint { + /// Returns the `rust::lint::Lint` for a `syntax::early_buffered_lints::BufferedEarlyLintId`. + pub fn from_parser_lint_id(lint_id: BufferedEarlyLintId) -> &'static Self { + match lint_id { + BufferedEarlyLintId::QuestionMarkMacroSep => QUESTION_MARK_MACRO_SEP, + } + } + /// Get the lint's name, with ASCII letters converted to lowercase. pub fn name_lower(&self) -> String { self.name.to_ascii_lowercase() diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index c016a131507e8..3e14ec6f8d490 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -52,6 +52,7 @@ use std::path::{Path, PathBuf}; use rustc_data_structures::sync::{self, Lrc, Lock}; use std::sync::mpsc; use syntax::{self, ast, attr, diagnostics, visit}; +use syntax::early_buffered_lints::BufferedEarlyLint; use syntax::ext::base::ExtCtxt; use syntax::fold::Folder; use syntax::parse::{self, PResult}; @@ -696,6 +697,13 @@ pub fn phase_1_parse_input<'a>( hir_stats::print_ast_stats(&krate, "PRE EXPANSION AST STATS"); } + // Add all buffered lints from the `ParseSess` to the `Session`. + let mut parse_sess_buffered = sess.parse_sess.buffered_lints.borrow_mut(); + for BufferedEarlyLint{id, span, msg, lint_id} in parse_sess_buffered.drain(..) { + let lint = lint::Lint::from_parser_lint_id(lint_id); + sess.buffer_lint(lint, id, span, &msg); + } + Ok(krate) } diff --git a/src/libsyntax/early_buffered_lints.rs b/src/libsyntax/early_buffered_lints.rs new file mode 100644 index 0000000000000..204e07625adef --- /dev/null +++ b/src/libsyntax/early_buffered_lints.rs @@ -0,0 +1,29 @@ +//! Allows the buffering of lints for later. +//! +//! Since we cannot have a dependency on `librustc`, we implement some types here that are somewhat +//! redundant. Later, these types can be converted to types for use by the rest of the compiler. + +use syntax::ast::NodeId; +use syntax_pos::MultiSpan; + +/// Since we cannot import `LintId`s from `rustc::lint`, we define some Ids here which can later be +/// passed to `rustc::lint::Lint::from_parser_lint_id` to get a `rustc::lint::Lint`. +pub enum BufferedEarlyLintId { + /// Usage of `?` as a macro separator is deprecated. + QuestionMarkMacroSep, +} + +/// Stores buffered lint info which can later be passed to `librustc`. +pub struct BufferedEarlyLint { + /// The span of code that we are linting on. + pub span: MultiSpan, + + /// The lint message. + pub msg: String, + + /// The `NodeId` of the AST node that generated the lint. + pub id: NodeId, + + /// A lint Id that can be passed to `rustc::lint::Lint::from_parser_lint_id`. + pub lint_id: BufferedEarlyLintId, +} diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index ffaad9bf94c18..d241ae1d44227 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -181,6 +181,8 @@ pub mod ext { } } +pub mod early_buffered_lints; + #[cfg(test)] mod test_snippet; diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 1754e5f1b9ad8..5dbf569766e5a 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -11,9 +11,10 @@ //! The main parser interface use rustc_data_structures::sync::{Lrc, Lock}; -use ast::{self, CrateConfig}; +use ast::{self, CrateConfig, NodeId}; +use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; use codemap::{CodeMap, FilePathMapping}; -use syntax_pos::{Span, FileMap, FileName}; +use syntax_pos::{Span, FileMap, FileName, MultiSpan}; use errors::{Handler, ColorConfig, DiagnosticBuilder}; use feature_gate::UnstableFeatures; use parse::parser::Parser; @@ -57,6 +58,7 @@ pub struct ParseSess { /// Used to determine and report recursive mod inclusions included_mod_stack: Lock>, code_map: Lrc, + pub buffered_lints: Lock>, } impl ParseSess { @@ -80,12 +82,29 @@ impl ParseSess { included_mod_stack: Lock::new(vec![]), code_map, non_modrs_mods: Lock::new(vec![]), + buffered_lints: Lock::new(vec![]), } } pub fn codemap(&self) -> &CodeMap { &self.code_map } + + pub fn buffer_lint>(&self, + lint_id: BufferedEarlyLintId, + span: S, + id: NodeId, + msg: &str, + ) { + self.buffered_lints + .borrow_mut() + .push(BufferedEarlyLint{ + span: span.into(), + id, + msg: msg.into(), + lint_id, + }); + } } #[derive(Clone)] From 8eb4941e30d2a40bc03840dd0d99beb5aaf8159d Mon Sep 17 00:00:00 2001 From: mark Date: Fri, 15 Jun 2018 21:49:00 -0500 Subject: [PATCH 2/7] Implement 2015 vs 2018 `?` kleene op + test --- src/librustc/lint/mod.rs | 5 +- src/librustc_lint/builtin.rs | 4 +- src/libsyntax/ext/expand.rs | 18 +- src/libsyntax/ext/tt/macro_rules.rs | 462 +++++++++++------- src/libsyntax/ext/tt/quoted.rs | 263 +++++++--- src/test/parse-fail/issue-33569.rs | 2 +- .../auxiliary/procedural_mbe_matching.rs | 7 +- src/test/run-pass/macro-at-most-once-rep.rs | 49 +- ...feature-gate-macro_at_most_once_rep.stderr | 11 - src/test/ui/issue-39388.rs | 2 +- ...ost-once-rep-2015-ques-rep-feature-flag.rs | 28 ++ ...once-rep-2015-ques-rep-feature-flag.stderr | 18 + .../macro-at-most-once-rep-2015-ques-rep.rs} | 18 +- ...acro-at-most-once-rep-2015-ques-rep.stderr | 18 + .../macro-at-most-once-rep-2015-ques-sep.rs | 34 ++ ...acro-at-most-once-rep-2015-ques-sep.stderr | 12 + ...acro-at-most-once-rep-2018-feature-gate.rs | 45 ++ ...-at-most-once-rep-2018-feature-gate.stderr | 71 +++ .../ui/macros/macro-at-most-once-rep-2018.rs | 53 ++ .../macros/macro-at-most-once-rep-2018.stderr | 74 +++ .../ui/macros/macro-at-most-once-rep-ambig.rs | 53 -- .../macro-at-most-once-rep-ambig.stderr | 80 --- 22 files changed, 862 insertions(+), 465 deletions(-) delete mode 100644 src/test/ui/feature-gate-macro_at_most_once_rep.stderr create mode 100644 src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep-feature-flag.rs create mode 100644 src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep-feature-flag.stderr rename src/test/ui/{feature-gate-macro_at_most_once_rep.rs => macros/macro-at-most-once-rep-2015-ques-rep.rs} (54%) create mode 100644 src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep.stderr create mode 100644 src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.rs create mode 100644 src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.stderr create mode 100644 src/test/ui/macros/macro-at-most-once-rep-2018-feature-gate.rs create mode 100644 src/test/ui/macros/macro-at-most-once-rep-2018-feature-gate.stderr create mode 100644 src/test/ui/macros/macro-at-most-once-rep-2018.rs create mode 100644 src/test/ui/macros/macro-at-most-once-rep-2018.stderr delete mode 100644 src/test/ui/macros/macro-at-most-once-rep-ambig.rs delete mode 100644 src/test/ui/macros/macro-at-most-once-rep-ambig.stderr diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index a5c82aa63034f..3c1b205620892 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -127,7 +127,7 @@ macro_rules! declare_lint { }; ); ($vis: vis $NAME: ident, $Level: ident, $desc: expr, - $lint_edition: expr => $edition_level: ident $(,)? + $lint_edition: expr => $edition_level: ident ) => ( $vis static $NAME: &$crate::lint::Lint = &$crate::lint::Lint { name: stringify!($NAME), @@ -142,7 +142,8 @@ macro_rules! declare_lint { /// Declare a static `LintArray` and return it as an expression. #[macro_export] macro_rules! lint_array { - ($( $lint:expr ),* $(,)?) => {{ + ($( $lint:expr ),* ,) => { lint_array!( $($lint),* ) }; + ($( $lint:expr ),*) => {{ vec![$($lint),*] }} } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index 3ffbdc7b7dc3a..e6aa7c0d16c6a 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -617,7 +617,7 @@ declare_lint! { pub ANONYMOUS_PARAMETERS, Allow, "detects anonymous parameters", - Edition::Edition2018 => Warn, + Edition::Edition2018 => Warn } /// Checks for use of anonymous parameters (RFC 1685) @@ -1706,7 +1706,7 @@ impl LintPass for SoftLints { UNIONS_WITH_DROP_FIELDS, UNREACHABLE_PUB, TYPE_ALIAS_BOUNDS, - TRIVIAL_BOUNDS, + TRIVIAL_BOUNDS ) } } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index dc461d0a15d4b..b84046d105051 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -44,8 +44,10 @@ macro_rules! ast_fragments { ( $($Kind:ident($AstTy:ty) { $kind_name:expr; - $(one fn $fold_ast:ident; fn $visit_ast:ident;)? - $(many fn $fold_ast_elt:ident; fn $visit_ast_elt:ident;)? + // FIXME: HACK: this should be `$(one ...)?` and `$(many ...)?` but `?` macro + // repetition was removed from 2015 edition in #51587 because of ambiguities. + $(one fn $fold_ast:ident; fn $visit_ast:ident;)* + $(many fn $fold_ast_elt:ident; fn $visit_ast_elt:ident;)* fn $make_ast:ident; })* ) => { @@ -100,11 +102,11 @@ macro_rules! ast_fragments { AstFragment::OptExpr(expr) => AstFragment::OptExpr(expr.and_then(|expr| folder.fold_opt_expr(expr))), $($(AstFragment::$Kind(ast) => - AstFragment::$Kind(folder.$fold_ast(ast)),)?)* + AstFragment::$Kind(folder.$fold_ast(ast)),)*)* $($(AstFragment::$Kind(ast) => AstFragment::$Kind(ast.into_iter() .flat_map(|ast| folder.$fold_ast_elt(ast)) - .collect()),)?)* + .collect()),)*)* } } @@ -112,10 +114,10 @@ macro_rules! ast_fragments { match *self { AstFragment::OptExpr(Some(ref expr)) => visitor.visit_expr(expr), AstFragment::OptExpr(None) => {} - $($(AstFragment::$Kind(ref ast) => visitor.$visit_ast(ast),)?)* + $($(AstFragment::$Kind(ref ast) => visitor.$visit_ast(ast),)*)* $($(AstFragment::$Kind(ref ast) => for ast_elt in &ast[..] { visitor.$visit_ast_elt(ast_elt); - })?)* + })*)* } } } @@ -126,10 +128,10 @@ macro_rules! ast_fragments { } $($(fn $fold_ast(&mut self, ast: $AstTy) -> $AstTy { self.expand_fragment(AstFragment::$Kind(ast)).$make_ast() - })?)* + })*)* $($(fn $fold_ast_elt(&mut self, ast_elt: <$AstTy as IntoIterator>::Item) -> $AstTy { self.expand_fragment(AstFragment::$Kind(SmallVector::one(ast_elt))).$make_ast() - })?)* + })*)* } impl<'a> MacResult for ::ext::tt::macro_rules::ParserAnyMacro<'a> { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 9ebead1062ebc..05e59d6b47cc0 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -8,28 +8,33 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use {ast, attr}; -use syntax_pos::{Span, DUMMY_SP}; use edition::Edition; -use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension}; -use ext::base::{NormalTT, TTMacroExpander}; -use ext::expand::{AstFragment, AstFragmentKind}; -use ext::tt::macro_parser::{Success, Error, Failure}; -use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; -use ext::tt::macro_parser::{parse, parse_failure_msg}; -use ext::tt::quoted; -use ext::tt::transcribe::transcribe; +use ext::{ + base::{DummyResult, ExtCtxt, MacResult, NormalTT, SyntaxExtension, TTMacroExpander}, + expand::{AstFragment, AstFragmentKind}, + tt::{ + macro_parser::{ + parse, parse_failure_msg, Error, Failure, MatchedNonterminal, MatchedSeq, Success, + }, + quoted, + transcribe::transcribe, + }, +}; use feature_gate::{self, emit_feature_err, Features, GateIssue}; -use parse::{Directory, ParseSess}; -use parse::parser::Parser; -use parse::token::{self, NtTT}; -use parse::token::Token::*; +use parse::{ + parser::Parser, + token::{self, NtTT, Token::*}, + Directory, ParseSess, +}; use symbol::Symbol; +use syntax_pos::{Span, DUMMY_SP}; use tokenstream::{TokenStream, TokenTree}; +use {ast, attr}; -use std::borrow::Cow; -use std::collections::HashMap; -use std::collections::hash_map::Entry; +use std::{ + borrow::Cow, + collections::{hash_map::Entry, HashMap}, +}; use rustc_data_structures::sync::Lrc; @@ -39,12 +44,16 @@ pub struct ParserAnyMacro<'a> { /// Span of the expansion site of the macro this parser is for site_span: Span, /// The ident of the macro we're parsing - macro_ident: ast::Ident + macro_ident: ast::Ident, } impl<'a> ParserAnyMacro<'a> { pub fn make(mut self: Box>, kind: AstFragmentKind) -> AstFragment { - let ParserAnyMacro { site_span, macro_ident, ref mut parser } = *self; + let ParserAnyMacro { + site_span, + macro_ident, + ref mut parser, + } = *self; let fragment = panictry!(parser.parse_ast_fragment(kind, true)); // We allow semicolons at the end of expressions -- e.g. the semicolon in @@ -77,17 +86,16 @@ impl TTMacroExpander for MacroRulesMacroExpander { if !self.valid { return DummyResult::any(sp); } - generic_extension(cx, - sp, - self.name, - input, - &self.lhses, - &self.rhses) + generic_extension(cx, sp, self.name, input, &self.lhses, &self.rhses) } } fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) { - let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp); + let sp = sp + .macro_backtrace() + .last() + .map(|trace| trace.call_site) + .unwrap_or(sp); let values: &mut Vec = cx.expansions.entry(sp).or_insert_with(Vec::new); values.push(message); } @@ -108,10 +116,11 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, let mut best_fail_spot = DUMMY_SP; let mut best_fail_tok = None; - for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers + for (i, lhs) in lhses.iter().enumerate() { + // try each arm's matchers let lhs_tt = match *lhs { quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..], - _ => cx.span_bug(sp, "malformed macro lhs") + _ => cx.span_bug(sp, "malformed macro lhs"), }; match TokenTree::parse(cx, lhs_tt, arg.clone()) { @@ -147,7 +156,11 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, ownership: cx.current_expansion.directory_ownership, }; let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false); - p.root_module_name = cx.current_expansion.module.mod_path.last() + p.root_module_name = cx + .current_expansion + .module + .mod_path + .last() .map(|id| id.as_str().to_string()); p.process_potential_macro_variable(); @@ -160,16 +173,14 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, // so we can print a useful error message if the parse of the expanded // macro leaves unparsed tokens. site_span: sp, - macro_ident: name - }) + macro_ident: name, + }); } Failure(sp, tok) => if sp.lo() >= best_fail_spot.lo() { best_fail_spot = sp; best_fail_tok = Some(tok); }, - Error(err_sp, ref msg) => { - cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]) - } + Error(err_sp, ref msg) => cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]), } } @@ -185,8 +196,12 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, // Holy self-referential! /// Converts a `macro_rules!` invocation into a syntax extension. -pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition: Edition) - -> SyntaxExtension { +pub fn compile( + sess: &ParseSess, + features: &Features, + def: &ast::Item, + edition: Edition, +) -> SyntaxExtension { let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs")); let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs")); @@ -202,33 +217,47 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition: // ...quasiquoting this would be nice. // These spans won't matter, anyways let argument_gram = vec![ - quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition { - tts: vec![ - quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")), - quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), - quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")), - ], - separator: Some(if body.legacy { token::Semi } else { token::Comma }), - op: quoted::KleeneOp::OneOrMore, - num_captures: 2, - })), + quoted::TokenTree::Sequence( + DUMMY_SP, + Lrc::new(quoted::SequenceRepetition { + tts: vec![ + quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")), + quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), + quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")), + ], + separator: Some(if body.legacy { + token::Semi + } else { + token::Comma + }), + op: quoted::KleeneOp::OneOrMore, + num_captures: 2, + }), + ), // to phase into semicolon-termination instead of semicolon-separation - quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition { - tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)], - separator: None, - op: quoted::KleeneOp::ZeroOrMore, - num_captures: 0 - })), + quoted::TokenTree::Sequence( + DUMMY_SP, + Lrc::new(quoted::SequenceRepetition { + tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)], + separator: None, + op: quoted::KleeneOp::ZeroOrMore, + num_captures: 0, + }), + ), ]; let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) { Success(m) => m, Failure(sp, tok) => { let s = parse_failure_msg(tok); - sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise(); + sess.span_diagnostic + .span_fatal(sp.substitute_dummy(def.span), &s) + .raise(); } Error(sp, s) => { - sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise(); + sess.span_diagnostic + .span_fatal(sp.substitute_dummy(def.span), &s) + .raise(); } }; @@ -236,35 +265,57 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition: // Extract the arguments: let lhses = match *argument_map[&lhs_nm] { - MatchedSeq(ref s, _) => { - s.iter().map(|m| { + MatchedSeq(ref s, _) => s + .iter() + .map(|m| { if let MatchedNonterminal(ref nt) = *m { if let NtTT(ref tt) = **nt { - let tt = quoted::parse(tt.clone().into(), true, sess, features, &def.attrs) - .pop().unwrap(); + let tt = quoted::parse( + tt.clone().into(), + true, + sess, + features, + &def.attrs, + edition, + ).pop() + .unwrap(); valid &= check_lhs_nt_follows(sess, features, &def.attrs, &tt); return tt; } } - sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") - }).collect::>() - } - _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") + sess.span_diagnostic + .span_bug(def.span, "wrong-structured lhs") + }) + .collect::>(), + _ => sess + .span_diagnostic + .span_bug(def.span, "wrong-structured lhs"), }; let rhses = match *argument_map[&rhs_nm] { - MatchedSeq(ref s, _) => { - s.iter().map(|m| { + MatchedSeq(ref s, _) => s + .iter() + .map(|m| { if let MatchedNonterminal(ref nt) = *m { if let NtTT(ref tt) = **nt { - return quoted::parse(tt.clone().into(), false, sess, features, &def.attrs) - .pop().unwrap(); + return quoted::parse( + tt.clone().into(), + false, + sess, + features, + &def.attrs, + edition, + ).pop() + .unwrap(); } } - sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") - }).collect::>() - } - _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs") + sess.span_diagnostic + .span_bug(def.span, "wrong-structured lhs") + }) + .collect::>(), + _ => sess + .span_diagnostic + .span_bug(def.span, "wrong-structured rhs"), }; for rhs in &rhses { @@ -293,14 +344,14 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition: } } - let unstable_feature = attr::find_stability(&sess.span_diagnostic, - &def.attrs, def.span).and_then(|stability| { - if let attr::StabilityLevel::Unstable { issue, .. } = stability.level { - Some((stability.feature, issue)) - } else { - None - } - }); + let unstable_feature = attr::find_stability(&sess.span_diagnostic, &def.attrs, def.span) + .and_then(|stability| { + if let attr::StabilityLevel::Unstable { issue, .. } = stability.level { + Some((stability.feature, issue)) + } else { + None + } + }); NormalTT { expander, @@ -323,10 +374,12 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition: } } -fn check_lhs_nt_follows(sess: &ParseSess, - features: &Features, - attrs: &[ast::Attribute], - lhs: "ed::TokenTree) -> bool { +fn check_lhs_nt_follows( + sess: &ParseSess, + features: &Features, + attrs: &[ast::Attribute], + lhs: "ed::TokenTree, +) -> bool { // lhs is going to be like TokenTree::Delimited(...), where the // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. if let quoted::TokenTree::Delimited(_, ref tts) = *lhs { @@ -351,15 +404,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { return false; }, TokenTree::Sequence(span, ref seq) => { - if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| { - match *seq_tt { - TokenTree::MetaVarDecl(_, _, id) => id.name == "vis", - TokenTree::Sequence(_, ref sub_seq) => - sub_seq.op == quoted::KleeneOp::ZeroOrMore, - _ => false, + if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| match *seq_tt { + TokenTree::MetaVarDecl(_, _, id) => id.name == "vis", + TokenTree::Sequence(_, ref sub_seq) => { + sub_seq.op == quoted::KleeneOp::ZeroOrMore } + _ => false, }) { - sess.span_diagnostic.span_err(span, "repetition matches empty token tree"); + sess.span_diagnostic + .span_err(span, "repetition matches empty token tree"); return false; } if !check_lhs_no_empty_seq(sess, &seq.tts) { @@ -375,15 +428,19 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { fn check_rhs(sess: &ParseSess, rhs: "ed::TokenTree) -> bool { match *rhs { quoted::TokenTree::Delimited(..) => return true, - _ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited") + _ => sess + .span_diagnostic + .span_err(rhs.span(), "macro rhs must be delimited"), } false } -fn check_matcher(sess: &ParseSess, - features: &Features, - attrs: &[ast::Attribute], - matcher: &[quoted::TokenTree]) -> bool { +fn check_matcher( + sess: &ParseSess, + features: &Features, + attrs: &[ast::Attribute], + matcher: &[quoted::TokenTree], +) -> bool { let first_sets = FirstSets::new(matcher); let empty_suffix = TokenSet::empty(); let err = sess.span_diagnostic.err_count(); @@ -417,7 +474,9 @@ impl FirstSets { fn new(tts: &[quoted::TokenTree]) -> FirstSets { use self::quoted::TokenTree; - let mut sets = FirstSets { first: HashMap::new() }; + let mut sets = FirstSets { + first: HashMap::new(), + }; build_recur(&mut sets, tts); return sets; @@ -456,8 +515,9 @@ impl FirstSets { // If the sequence contents can be empty, then the first // token could be the separator token itself. - if let (Some(ref sep), true) = (seq_rep.separator.clone(), - subfirst.maybe_empty) { + if let (Some(ref sep), true) = + (seq_rep.separator.clone(), subfirst.maybe_empty) + { first.add_one_maybe(TokenTree::Token(sp, sep.clone())); } @@ -465,7 +525,10 @@ impl FirstSets { if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore { // If sequence is potentially empty, then // union them (preserving first emptiness). - first.add_all(&TokenSet { maybe_empty: true, ..subfirst }); + first.add_all(&TokenSet { + maybe_empty: true, + ..subfirst + }); } else { // Otherwise, sequence guaranteed // non-empty; replace first. @@ -499,19 +562,18 @@ impl FirstSets { TokenTree::Sequence(sp, ref seq_rep) => { match self.first.get(&sp) { Some(&Some(ref subfirst)) => { - // If the sequence contents can be empty, then the first // token could be the separator token itself. - if let (Some(ref sep), true) = (seq_rep.separator.clone(), - subfirst.maybe_empty) { + if let (Some(ref sep), true) = + (seq_rep.separator.clone(), subfirst.maybe_empty) + { first.add_one_maybe(TokenTree::Token(sp, sep.clone())); } assert!(first.maybe_empty); first.add_all(subfirst); - if subfirst.maybe_empty || - seq_rep.op == quoted::KleeneOp::ZeroOrMore { + if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore { // continue scanning for more first // tokens, but also make sure we // restore empty-tracking state @@ -559,12 +621,20 @@ struct TokenSet { impl TokenSet { // Returns a set for the empty sequence. - fn empty() -> Self { TokenSet { tokens: Vec::new(), maybe_empty: true } } + fn empty() -> Self { + TokenSet { + tokens: Vec::new(), + maybe_empty: true, + } + } // Returns the set `{ tok }` for the single-token (and thus // non-empty) sequence [tok]. fn singleton(tok: quoted::TokenTree) -> Self { - TokenSet { tokens: vec![tok], maybe_empty: false } + TokenSet { + tokens: vec![tok], + maybe_empty: false, + } } // Changes self to be the set `{ tok }`. @@ -628,12 +698,14 @@ impl TokenSet { // // Requires that `first_sets` is pre-computed for `matcher`; // see `FirstSets::new`. -fn check_matcher_core(sess: &ParseSess, - features: &Features, - attrs: &[ast::Attribute], - first_sets: &FirstSets, - matcher: &[quoted::TokenTree], - follow: &TokenSet) -> TokenSet { +fn check_matcher_core( + sess: &ParseSess, + features: &Features, + attrs: &[ast::Attribute], + first_sets: &FirstSets, + matcher: &[quoted::TokenTree], + follow: &TokenSet, +) -> TokenSet { use self::quoted::TokenTree; let mut last = TokenSet::empty(); @@ -643,11 +715,13 @@ fn check_matcher_core(sess: &ParseSess, // then ensure T can also be followed by any element of FOLLOW. 'each_token: for i in 0..matcher.len() { let token = &matcher[i]; - let suffix = &matcher[i+1..]; + let suffix = &matcher[i + 1..]; let build_suffix_first = || { let mut s = first_sets.first(suffix); - if s.maybe_empty { s.add_all(follow); } + if s.maybe_empty { + s.add_all(follow); + } s }; @@ -663,9 +737,12 @@ fn check_matcher_core(sess: &ParseSess, let can_be_followed_by_any; if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, attrs, token) { let msg = format!("invalid fragment specifier `{}`", bad_frag); - sess.span_diagnostic.struct_span_err(token.span(), &msg) - .help("valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, \ - `pat`, `ty`, `literal`, `path`, `meta`, `tt`, `item` and `vis`") + sess.span_diagnostic + .struct_span_err(token.span(), &msg) + .help( + "valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, \ + `pat`, `ty`, `literal`, `path`, `meta`, `tt`, `item` and `vis`", + ) .emit(); // (This eliminates false positives and duplicates // from error messages.) @@ -719,12 +796,8 @@ fn check_matcher_core(sess: &ParseSess, // At this point, `suffix_first` is built, and // `my_suffix` is some TokenSet that we can use // for checking the interior of `seq_rep`. - let next = check_matcher_core(sess, - features, - attrs, - first_sets, - &seq_rep.tts, - my_suffix); + let next = + check_matcher_core(sess, features, attrs, first_sets, &seq_rep.tts, my_suffix); if next.maybe_empty { last.add_all(&next); } else { @@ -746,16 +819,17 @@ fn check_matcher_core(sess: &ParseSess, for next_token in &suffix_first.tokens { match is_in_follow(next_token, &frag_spec.as_str()) { Err((msg, help)) => { - sess.span_diagnostic.struct_span_err(next_token.span(), &msg) - .help(help).emit(); + sess.span_diagnostic + .struct_span_err(next_token.span(), &msg) + .help(help) + .emit(); // don't bother reporting every source of // conflict for a particular element of `last`. continue 'each_last; } Ok(true) => {} Ok(false) => { - let may_be = if last.tokens.len() == 1 && - suffix_first.tokens.len() == 1 + let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1 { "is" } else { @@ -764,12 +838,14 @@ fn check_matcher_core(sess: &ParseSess, sess.span_diagnostic.span_err( next_token.span(), - &format!("`${name}:{frag}` {may_be} followed by `{next}`, which \ - is not allowed for `{frag}` fragments", - name=name, - frag=frag_spec, - next=quoted_tt_to_string(next_token), - may_be=may_be) + &format!( + "`${name}:{frag}` {may_be} followed by `{next}`, which \ + is not allowed for `{frag}` fragments", + name = name, + frag = frag_spec, + next = quoted_tt_to_string(next_token), + may_be = may_be + ), ); } } @@ -834,16 +910,16 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result { // anything can follow block, the braces provide an easy boundary to // maintain Ok(true) - }, - "stmt" | "expr" => match *tok { + } + "stmt" | "expr" => match *tok { TokenTree::Token(_, ref tok) => match *tok { FatArrow | Comma | Semi => Ok(true), - _ => Ok(false) + _ => Ok(false), }, _ => Ok(false), }, @@ -851,16 +927,23 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result match *tok { FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true), Ident(i, false) if i.name == "if" || i.name == "in" => Ok(true), - _ => Ok(false) + _ => Ok(false), }, _ => Ok(false), }, "path" | "ty" => match *tok { TokenTree::Token(_, ref tok) => match *tok { - OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | - Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true), + OpenDelim(token::DelimToken::Brace) + | OpenDelim(token::DelimToken::Bracket) + | Comma + | FatArrow + | Colon + | Eq + | Gt + | Semi + | BinOp(token::Or) => Ok(true), Ident(i, false) if i.name == "as" || i.name == "where" => Ok(true), - _ => Ok(false) + _ => Ok(false), }, TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true), _ => Ok(false), @@ -868,43 +951,49 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result { // being a single token, idents and lifetimes are harmless Ok(true) - }, + } "literal" => { // literals may be of a single token, or two tokens (negative numbers) Ok(true) - }, + } "meta" | "tt" => { // being either a single token or a delimited sequence, tt is // harmless Ok(true) - }, + } "vis" => { // Explicitly disallow `priv`, on the off chance it comes back. match *tok { TokenTree::Token(_, ref tok) => match *tok { Comma => Ok(true), Ident(i, is_raw) if is_raw || i.name != "priv" => Ok(true), - ref tok => Ok(tok.can_begin_type()) + ref tok => Ok(tok.can_begin_type()), }, - TokenTree::MetaVarDecl(_, _, frag) if frag.name == "ident" - || frag.name == "ty" - || frag.name == "path" => Ok(true), - _ => Ok(false) + TokenTree::MetaVarDecl(_, _, frag) + if frag.name == "ident" || frag.name == "ty" || frag.name == "path" => + { + Ok(true) + } + _ => Ok(false), } - }, + } "" => Ok(true), // keywords::Invalid - _ => Err((format!("invalid fragment specifier `{}`", frag), - "valid fragment specifiers are `ident`, `block`, \ - `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt`, \ - `literal`, `item` and `vis`")) + _ => Err(( + format!("invalid fragment specifier `{}`", frag), + "valid fragment specifiers are `ident`, `block`, \ + `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt`, \ + `literal`, `item` and `vis`", + )), } } } -fn has_legal_fragment_specifier(sess: &ParseSess, - features: &Features, - attrs: &[ast::Attribute], - tok: "ed::TokenTree) -> Result<(), String> { +fn has_legal_fragment_specifier( + sess: &ParseSess, + features: &Features, + attrs: &[ast::Attribute], + tok: "ed::TokenTree, +) -> Result<(), String> { debug!("has_legal_fragment_specifier({:?})", tok); if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok { let frag_name = frag_spec.as_str(); @@ -916,38 +1005,45 @@ fn has_legal_fragment_specifier(sess: &ParseSess, Ok(()) } -fn is_legal_fragment_specifier(sess: &ParseSess, - features: &Features, - attrs: &[ast::Attribute], - frag_name: &str, - frag_span: Span) -> bool { +fn is_legal_fragment_specifier( + sess: &ParseSess, + features: &Features, + attrs: &[ast::Attribute], + frag_name: &str, + frag_span: Span, +) -> bool { match frag_name { - "item" | "block" | "stmt" | "expr" | "pat" | "lifetime" | - "path" | "ty" | "ident" | "meta" | "tt" | "" => true, + "item" | "block" | "stmt" | "expr" | "pat" | "lifetime" | "path" | "ty" | "ident" + | "meta" | "tt" | "" => true, "literal" => { - if !features.macro_literal_matcher && - !attr::contains_name(attrs, "allow_internal_unstable") { + if !features.macro_literal_matcher + && !attr::contains_name(attrs, "allow_internal_unstable") + { let explain = feature_gate::EXPLAIN_LITERAL_MATCHER; - emit_feature_err(sess, - "macro_literal_matcher", - frag_span, - GateIssue::Language, - explain); + emit_feature_err( + sess, + "macro_literal_matcher", + frag_span, + GateIssue::Language, + explain, + ); } true - }, + } "vis" => { - if !features.macro_vis_matcher && - !attr::contains_name(attrs, "allow_internal_unstable") { + if !features.macro_vis_matcher && !attr::contains_name(attrs, "allow_internal_unstable") + { let explain = feature_gate::EXPLAIN_VIS_MATCHER; - emit_feature_err(sess, - "macro_vis_matcher", - frag_span, - GateIssue::Language, - explain); + emit_feature_err( + sess, + "macro_vis_matcher", + frag_span, + GateIssue::Language, + explain, + ); } true - }, + } _ => false, } } @@ -957,7 +1053,9 @@ fn quoted_tt_to_string(tt: "ed::TokenTree) -> String { quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok), quoted::TokenTree::MetaVar(_, name) => format!("${}", name), quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), - _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \ - in follow set checker"), + _ => panic!( + "unexpected quoted::TokenTree::{{Sequence or Delimited}} \ + in follow set checker" + ), } } diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index d21ffabb62e3b..1bca6dd065323 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -8,17 +8,17 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use {ast, attr}; use ext::tt::macro_parser; use feature_gate::{self, emit_feature_err, Features, GateIssue}; use parse::{token, ParseSess}; use print::pprust; use symbol::keywords; -use syntax_pos::{BytePos, Span}; +use syntax_pos::{edition::Edition, BytePos, Span, DUMMY_SP}; use tokenstream; +use {ast, attr}; -use std::iter::Peekable; use rustc_data_structures::sync::Lrc; +use std::iter::Peekable; /// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note /// that the delimiter itself might be `NoDelim`. @@ -174,6 +174,7 @@ impl TokenTree { /// - `sess`: the parsing session. Any errors will be emitted to this session. /// - `features`, `attrs`: language feature flags and attributes so that we know whether to use /// unstable features or not. +/// - `edition`: which edition are we in. /// /// # Returns /// @@ -184,6 +185,7 @@ pub fn parse( sess: &ParseSess, features: &Features, attrs: &[ast::Attribute], + edition: Edition, ) -> Vec { // Will contain the final collection of `self::TokenTree` let mut result = Vec::new(); @@ -194,7 +196,15 @@ pub fn parse( while let Some(tree) = trees.next() { // Given the parsed tree, if there is a metavar and we are expecting matchers, actually // parse out the matcher (i.e. in `$id:ident` this would parse the `:` and `ident`). - let tree = parse_tree(tree, &mut trees, expect_matchers, sess, features, attrs); + let tree = parse_tree( + tree, + &mut trees, + expect_matchers, + sess, + features, + attrs, + edition, + ); match tree { TokenTree::MetaVar(start_sp, ident) if expect_matchers => { let span = match trees.next() { @@ -207,11 +217,13 @@ pub fn parse( } _ => end_sp, }, - tree => tree.as_ref() + tree => tree + .as_ref() .map(tokenstream::TokenTree::span) .unwrap_or(span), }, - tree => tree.as_ref() + tree => tree + .as_ref() .map(tokenstream::TokenTree::span) .unwrap_or(start_sp), }; @@ -252,6 +264,7 @@ fn parse_tree( sess: &ParseSess, features: &Features, attrs: &[ast::Attribute], + edition: Edition, ) -> TokenTree where I: Iterator, @@ -270,9 +283,17 @@ where sess.span_diagnostic.span_err(span, &msg); } // Parse the contents of the sequence itself - let sequence = parse(delimited.tts.into(), expect_matchers, sess, features, attrs); + let sequence = parse( + delimited.tts.into(), + expect_matchers, + sess, + features, + attrs, + edition, + ); // Get the Kleene operator and optional separator - let (separator, op) = parse_sep_and_kleene_op(trees, span, sess, features, attrs); + let (separator, op) = + parse_sep_and_kleene_op(trees, span, sess, features, attrs, edition); // Count the number of captured "names" (i.e. named metavars) let name_captures = macro_parser::count_names(&sequence); TokenTree::Sequence( @@ -322,7 +343,14 @@ where span, Lrc::new(Delimited { delim: delimited.delim, - tts: parse(delimited.tts.into(), expect_matchers, sess, features, attrs), + tts: parse( + delimited.tts.into(), + expect_matchers, + sess, + features, + attrs, + edition, + ), }), ), } @@ -341,22 +369,23 @@ fn kleene_op(token: &token::Token) -> Option { /// Parse the next token tree of the input looking for a KleeneOp. Returns /// -/// - Ok(Ok(op)) if the next token tree is a KleeneOp +/// - Ok(Ok((op, span))) if the next token tree is a KleeneOp /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp /// - Err(span) if the next token tree is not a token fn parse_kleene_op( input: &mut I, span: Span, -) -> Result, Span> +) -> Result, Span> where I: Iterator, { match input.next() { Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) { - Some(op) => Ok(Ok(op)), + Some(op) => Ok(Ok((op, span))), None => Ok(Err((tok, span))), }, - tree => Err(tree.as_ref() + tree => Err(tree + .as_ref() .map(tokenstream::TokenTree::span) .unwrap_or(span)), } @@ -374,12 +403,34 @@ where /// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an /// error with the appropriate span is emitted to `sess` and a dummy value is returned. +/// +/// NOTE: In 2015 edition, * and + are the only Kleene operators and `?` is a separator. In 2018, +/// `?` is a Kleene op and not a separator. fn parse_sep_and_kleene_op( input: &mut Peekable, span: Span, sess: &ParseSess, features: &Features, attrs: &[ast::Attribute], + edition: Edition, +) -> (Option, KleeneOp) +where + I: Iterator, +{ + match edition { + Edition::Edition2015 => parse_sep_and_kleene_op_2015(input, span, sess, features, attrs), + Edition::Edition2018 => parse_sep_and_kleene_op_2018(input, span, sess, features, attrs), + _ => unimplemented!(), + } +} + +// `?` is a separator (with a migration warning) and never a KleeneOp. +fn parse_sep_and_kleene_op_2015( + input: &mut Peekable, + span: Span, + sess: &ParseSess, + _features: &Features, + _attrs: &[ast::Attribute], ) -> (Option, KleeneOp) where I: Iterator, @@ -388,14 +439,14 @@ where let span = match parse_kleene_op(input, span) { // #1 is a `+` or `*` KleeneOp // - // `?` is ambiguous: it could be a separator or a Kleene::ZeroOrOne, so we need to look - // ahead one more token to be sure. - Ok(Ok(op)) if op != KleeneOp::ZeroOrOne => return (None, op), - - // #1 is `?` token, but it could be a Kleene::ZeroOrOne without a separator or it could - // be a `?` separator followed by any Kleene operator. We need to look ahead 1 token to - // find out which. - Ok(Ok(op)) => { + // `?` is ambiguous: it could be a separator (warning) or a Kleene::ZeroOrOne (error), so + // we need to look ahead one more token to be sure. + Ok(Ok((op, _))) if op != KleeneOp::ZeroOrOne => return (None, op), + + // #1 is `?` token, but it could be a Kleene::ZeroOrOne (error in 2015) without a separator + // or it could be a `?` separator followed by any Kleene operator. We need to look ahead 1 + // token to find out which. + Ok(Ok((op, op1_span))) => { assert_eq!(op, KleeneOp::ZeroOrOne); // Lookahead at #2. If it is a KleenOp, then #1 is a separator. @@ -406,71 +457,147 @@ where }; if is_1_sep { - // #1 is a separator and #2 should be a KleepeOp::* + // #1 is a separator and #2 should be a KleepeOp. // (N.B. We need to advance the input iterator.) match parse_kleene_op(input, span) { - // #2 is a KleeneOp (this is the only valid option) :) - Ok(Ok(op)) if op == KleeneOp::ZeroOrOne => { - if !features.macro_at_most_once_rep - && !attr::contains_name(attrs, "allow_internal_unstable") - { - let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP; - emit_feature_err( - sess, - "macro_at_most_once_rep", - span, - GateIssue::Language, - explain, - ); - } + // #2 is `?`, which is not allowed as a Kleene op in 2015 edition. + Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => { + sess.span_diagnostic + .struct_span_err(op2_span, "expected `*` or `+`") + .note("`?` is not a macro repetition operator") + .emit(); + + // Return a dummy + return (None, KleeneOp::ZeroOrMore); + } + + // #2 is a Kleene op, which is the the only valid option + Ok(Ok((op, _))) => { + // Warn that `?` as a separator will be deprecated + sess.span_diagnostic.span_warn( + op1_span, + "using `?` as a separator is deprecated and will be \ + a hard error in an upcoming edition", + ); + return (Some(token::Question), op); } - Ok(Ok(op)) => return (Some(token::Question), op), // #2 is a random token (this is an error) :( - Ok(Err((_, span))) => span, + Ok(Err((_, _))) => op1_span, // #2 is not even a token at all :( - Err(span) => span, + Err(_) => op1_span, } } else { - if !features.macro_at_most_once_rep - && !attr::contains_name(attrs, "allow_internal_unstable") - { - let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP; - emit_feature_err( - sess, - "macro_at_most_once_rep", - span, - GateIssue::Language, - explain, - ); - } + // `?` is not allowed as a Kleene op in 2015 + sess.span_diagnostic + .struct_span_err(op1_span, "expected `*` or `+`") + .note("`?` is not a macro repetition operator") + .emit(); + + // Return a dummy + return (None, KleeneOp::ZeroOrMore); + } + } + + // #1 is a separator followed by #2, a KleeneOp + Ok(Err((tok, span))) => match parse_kleene_op(input, span) { + // #2 is a `?`, which is not allowed as a Kleene op in 2015 edition. + Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => { + sess.span_diagnostic + .struct_span_err(op2_span, "expected `*` or `+`") + .note("`?` is not a macro repetition operator") + .emit(); + + // Return a dummy + return (None, KleeneOp::ZeroOrMore); + } + + // #2 is a KleeneOp :D + Ok(Ok((op, _))) => return (Some(tok), op), + + // #2 is a random token :( + Ok(Err((_, span))) => span, + + // #2 is not a token at all :( + Err(span) => span, + }, + + // #1 is not a token + Err(span) => span, + }; + + sess.span_diagnostic.span_err(span, "expected `*` or `+`"); + + // Return a dummy + (None, KleeneOp::ZeroOrMore) +} + +// `?` is a Kleene op, not a separator +fn parse_sep_and_kleene_op_2018( + input: &mut Peekable, + span: Span, + sess: &ParseSess, + features: &Features, + attrs: &[ast::Attribute], +) -> (Option, KleeneOp) +where + I: Iterator, +{ + // We basically look at two token trees here, denoted as #1 and #2 below + let span = match parse_kleene_op(input, span) { + // #1 is a `?` (needs feature gate) + Ok(Ok((op, op1_span))) if op == KleeneOp::ZeroOrOne => { + if !features.macro_at_most_once_rep + && !attr::contains_name(attrs, "allow_internal_unstable") + { + let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP; + emit_feature_err( + sess, + "macro_at_most_once_rep", + op1_span, + GateIssue::Language, + explain, + ); - // #2 is a random tree and #1 is KleeneOp::ZeroOrOne + op1_span + } else { return (None, op); } } + // #1 is a `+` or `*` KleeneOp + Ok(Ok((op, _))) => return (None, op), + // #1 is a separator followed by #2, a KleeneOp Ok(Err((tok, span))) => match parse_kleene_op(input, span) { - // #2 is a KleeneOp :D - Ok(Ok(op)) if op == KleeneOp::ZeroOrOne => { + // #2 is the `?` Kleene op, which does not take a separator (error) + Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => { + // Error! + if !features.macro_at_most_once_rep && !attr::contains_name(attrs, "allow_internal_unstable") { - let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP; - emit_feature_err( - sess, - "macro_at_most_once_rep", + // FIXME: when `?` as a Kleene op is stabilized, we only need the "does not + // take a macro separator" error (i.e. the `else` case). + sess.span_diagnostic + .struct_span_err(op2_span, "expected `*` or `+`") + .note("`?` is not a macro repetition operator") + .emit(); + } else { + sess.span_diagnostic.span_err( span, - GateIssue::Language, - explain, + "the `?` macro repetition operator does not take a separator", ); } - return (Some(tok), op); + + // Return a dummy + return (None, KleeneOp::ZeroOrMore); } - Ok(Ok(op)) => return (Some(tok), op), + + // #2 is a KleeneOp :D + Ok(Ok((op, _))) => return (Some(tok), op), // #2 is a random token :( Ok(Err((_, span))) => span, @@ -483,13 +610,15 @@ where Err(span) => span, }; - if !features.macro_at_most_once_rep - && !attr::contains_name(attrs, "allow_internal_unstable") - { + // If we ever get to this point, we have experienced an "unexpected token" error + + if !features.macro_at_most_once_rep && !attr::contains_name(attrs, "allow_internal_unstable") { + sess.span_diagnostic.span_err(span, "expected `*` or `+`"); + } else { sess.span_diagnostic .span_err(span, "expected one of: `*`, `+`, or `?`"); - } else { - sess.span_diagnostic.span_err(span, "expected `*` or `+`"); } + + // Return a dummy (None, KleeneOp::ZeroOrMore) } diff --git a/src/test/parse-fail/issue-33569.rs b/src/test/parse-fail/issue-33569.rs index af90d0a83c926..15d491719a6d5 100644 --- a/src/test/parse-fail/issue-33569.rs +++ b/src/test/parse-fail/issue-33569.rs @@ -13,7 +13,7 @@ macro_rules! foo { { $+ } => { //~ ERROR expected identifier, found `+` //~^ ERROR missing fragment specifier - $(x)(y) //~ ERROR expected one of: `*`, `+`, or `?` + $(x)(y) //~ ERROR expected `*` or `+` } } diff --git a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs index fd8f7b9e384f3..664e9eeb3e04e 100644 --- a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs +++ b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs @@ -29,11 +29,9 @@ use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; use syntax::ext::tt::macro_parser::{Success, Failure, Error}; use syntax::ext::tt::macro_parser::parse_failure_msg; use syntax::ptr::P; -use syntax_pos::Span; +use syntax_pos::{Span, edition::Edition}; use rustc_plugin::Registry; -use std::cell::RefCell; - fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree]) -> Box { @@ -42,7 +40,8 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree]) true, cx.parse_sess, &Features::new(), - &[]); + &[], + Edition::Edition2015); let map = match TokenTree::parse(cx, &mbe_matcher, args.iter().cloned().collect()) { Success(map) => map, Failure(_, tok) => { diff --git a/src/test/run-pass/macro-at-most-once-rep.rs b/src/test/run-pass/macro-at-most-once-rep.rs index b7e942f938321..dcf2222ba6dea 100644 --- a/src/test/run-pass/macro-at-most-once-rep.rs +++ b/src/test/run-pass/macro-at-most-once-rep.rs @@ -18,6 +18,8 @@ // // This test focuses on non-error cases and making sure the correct number of repetitions happen. +// compile-flags: --edition=2018 + #![feature(macro_at_most_once_rep)] macro_rules! foo { @@ -32,57 +34,10 @@ macro_rules! foo { } } } -macro_rules! baz { - ($($a:ident),? ; $num:expr) => { { // comma separator is meaningless for `?` - let mut x = 0; - - $( - x += $a; - )? - - assert_eq!(x, $num); - } } -} - -macro_rules! barplus { - ($($a:ident)?+ ; $num:expr) => { { - let mut x = 0; - - $( - x += $a; - )+ - - assert_eq!(x, $num); - } } -} - -macro_rules! barstar { - ($($a:ident)?* ; $num:expr) => { { - let mut x = 0; - - $( - x += $a; - )* - - assert_eq!(x, $num); - } } -} - pub fn main() { let a = 1; // accept 0 or 1 repetitions foo!( ; 0); foo!(a ; 1); - baz!( ; 0); - baz!(a ; 1); - - // Make sure using ? as a separator works as before - barplus!(a ; 1); - barplus!(a?a ; 2); - barplus!(a?a?a ; 3); - barstar!( ; 0); - barstar!(a ; 1); - barstar!(a?a ; 2); - barstar!(a?a?a ; 3); } diff --git a/src/test/ui/feature-gate-macro_at_most_once_rep.stderr b/src/test/ui/feature-gate-macro_at_most_once_rep.stderr deleted file mode 100644 index 9ca71d937f859..0000000000000 --- a/src/test/ui/feature-gate-macro_at_most_once_rep.stderr +++ /dev/null @@ -1,11 +0,0 @@ -error[E0658]: using the `?` macro Kleene operator for "at most one" repetition is unstable (see issue #48075) - --> $DIR/feature-gate-macro_at_most_once_rep.rs:14:20 - | -LL | macro_rules! m { ($(a)?) => {} } - | ^^^ - | - = help: add #![feature(macro_at_most_once_rep)] to the crate attributes to enable - -error: aborting due to previous error - -For more information about this error, try `rustc --explain E0658`. diff --git a/src/test/ui/issue-39388.rs b/src/test/ui/issue-39388.rs index 6da049374086a..15eef429eab97 100644 --- a/src/test/ui/issue-39388.rs +++ b/src/test/ui/issue-39388.rs @@ -11,7 +11,7 @@ #![allow(unused_macros)] macro_rules! assign { - (($($a:tt)*) = ($($b:tt))*) => { //~ ERROR expected one of: `*`, `+`, or `?` + (($($a:tt)*) = ($($b:tt))*) => { //~ ERROR expected `*` or `+` $($a)* = $($b)* } } diff --git a/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep-feature-flag.rs b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep-feature-flag.rs new file mode 100644 index 0000000000000..fd7925ea3eeeb --- /dev/null +++ b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep-feature-flag.rs @@ -0,0 +1,28 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test behavior of `?` macro _kleene op_ under the 2015 edition. Namely, it doesn't exist, even +// with the feature flag. + +// gate-test-macro_at_most_once_rep +// compile-flags: --edition=2015 + +#![feature(macro_at_most_once_rep)] + +macro_rules! bar { + ($(a)?) => {} //~ERROR expected `*` or `+` +} + +macro_rules! baz { + ($(a),?) => {} //~ERROR expected `*` or `+` +} + +fn main() {} + diff --git a/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep-feature-flag.stderr b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep-feature-flag.stderr new file mode 100644 index 0000000000000..5f687900421bf --- /dev/null +++ b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep-feature-flag.stderr @@ -0,0 +1,18 @@ +error: expected `*` or `+` + --> $DIR/macro-at-most-once-rep-2015-ques-rep-feature-flag.rs:20:10 + | +LL | ($(a)?) => {} //~ERROR expected `*` or `+` + | ^ + | + = note: `?` is not a macro repetition operator + +error: expected `*` or `+` + --> $DIR/macro-at-most-once-rep-2015-ques-rep-feature-flag.rs:24:11 + | +LL | ($(a),?) => {} //~ERROR expected `*` or `+` + | ^ + | + = note: `?` is not a macro repetition operator + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/feature-gate-macro_at_most_once_rep.rs b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep.rs similarity index 54% rename from src/test/ui/feature-gate-macro_at_most_once_rep.rs rename to src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep.rs index bdce1952a9a21..90bc19739b872 100644 --- a/src/test/ui/feature-gate-macro_at_most_once_rep.rs +++ b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep.rs @@ -1,4 +1,4 @@ -// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -8,12 +8,16 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// Test that `?` macro Kleene operator can not be used when the `macro_at_most_once_rep` feature -// gate is not used. +// Test behavior of `?` macro _kleene op_ under the 2015 edition. Namely, it doesn't exist. -macro_rules! m { ($(a)?) => {} } -//~^ ERROR using the `?` macro Kleene operator for "at most one" repetition is unstable +// compile-flags: --edition=2015 -fn main() { - m!(); +macro_rules! bar { + ($(a)?) => {} //~ERROR expected `*` or `+` } + +macro_rules! baz { + ($(a),?) => {} //~ERROR expected `*` or `+` +} + +fn main() {} diff --git a/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep.stderr b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep.stderr new file mode 100644 index 0000000000000..8681b5d5be54e --- /dev/null +++ b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-rep.stderr @@ -0,0 +1,18 @@ +error: expected `*` or `+` + --> $DIR/macro-at-most-once-rep-2015-ques-rep.rs:16:10 + | +LL | ($(a)?) => {} //~ERROR expected `*` or `+` + | ^ + | + = note: `?` is not a macro repetition operator + +error: expected `*` or `+` + --> $DIR/macro-at-most-once-rep-2015-ques-rep.rs:20:11 + | +LL | ($(a),?) => {} //~ERROR expected `*` or `+` + | ^ + | + = note: `?` is not a macro repetition operator + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.rs b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.rs new file mode 100644 index 0000000000000..4b3dfbdc2e8a5 --- /dev/null +++ b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.rs @@ -0,0 +1,34 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test behavior of `?` macro _separator_ under the 2015 edition. Namely, `?` can be used as a +// separator, but you get a migration warning for the edition. + +// compile-flags: --edition=2015 +// compile-pass + +macro_rules! bar { + ($(a)?*) => {} //~WARN using `?` as a separator +} + +macro_rules! baz { + ($(a)?+) => {} //~WARN using `?` as a separator +} + +fn main() { + bar!(); + bar!(a); + bar!(a?a); + bar!(a?a?a?a?a); + + baz!(a); + baz!(a?a); + baz!(a?a?a?a?a); +} diff --git a/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.stderr b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.stderr new file mode 100644 index 0000000000000..0ab4138864e00 --- /dev/null +++ b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.stderr @@ -0,0 +1,12 @@ +warning: using `?` as a separator is deprecated and will be a hard error in an upcoming edition + --> $DIR/macro-at-most-once-rep-2015-ques-sep.rs:18:10 + | +LL | ($(a)?*) => {} //~WARN using `?` as a separator + | ^ + +warning: using `?` as a separator is deprecated and will be a hard error in an upcoming edition + --> $DIR/macro-at-most-once-rep-2015-ques-sep.rs:22:10 + | +LL | ($(a)?+) => {} //~WARN using `?` as a separator + | ^ + diff --git a/src/test/ui/macros/macro-at-most-once-rep-2018-feature-gate.rs b/src/test/ui/macros/macro-at-most-once-rep-2018-feature-gate.rs new file mode 100644 index 0000000000000..f3107d4f1e4f6 --- /dev/null +++ b/src/test/ui/macros/macro-at-most-once-rep-2018-feature-gate.rs @@ -0,0 +1,45 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Feature gate test for macro_at_most_once_rep under 2018 edition. + +// gate-test-macro_at_most_once_rep +// compile-flags: --edition=2018 + +macro_rules! foo { + ($(a)?) => {} + //~^ERROR using the `?` macro Kleene operator for + //~|ERROR expected `*` or `+` +} + +macro_rules! baz { + ($(a),?) => {} //~ERROR expected `*` or `+` +} + +macro_rules! barplus { + ($(a)?+) => {} + //~^ERROR using the `?` macro Kleene operator for + //~|ERROR expected `*` or `+` +} + +macro_rules! barstar { + ($(a)?*) => {} + //~^ERROR using the `?` macro Kleene operator for + //~|ERROR expected `*` or `+` +} + +pub fn main() { + foo!(); + foo!(a); + foo!(a?); //~ ERROR no rules expected the token `?` + foo!(a?a); //~ ERROR no rules expected the token `?` + foo!(a?a?a); //~ ERROR no rules expected the token `?` +} + diff --git a/src/test/ui/macros/macro-at-most-once-rep-2018-feature-gate.stderr b/src/test/ui/macros/macro-at-most-once-rep-2018-feature-gate.stderr new file mode 100644 index 0000000000000..22f1c94fced6f --- /dev/null +++ b/src/test/ui/macros/macro-at-most-once-rep-2018-feature-gate.stderr @@ -0,0 +1,71 @@ +error[E0658]: using the `?` macro Kleene operator for "at most one" repetition is unstable (see issue #48075) + --> $DIR/macro-at-most-once-rep-2018-feature-gate.rs:17:10 + | +LL | ($(a)?) => {} + | ^ + | + = help: add #![feature(macro_at_most_once_rep)] to the crate attributes to enable + +error: expected `*` or `+` + --> $DIR/macro-at-most-once-rep-2018-feature-gate.rs:17:10 + | +LL | ($(a)?) => {} + | ^ + +error: expected `*` or `+` + --> $DIR/macro-at-most-once-rep-2018-feature-gate.rs:23:11 + | +LL | ($(a),?) => {} //~ERROR expected `*` or `+` + | ^ + | + = note: `?` is not a macro repetition operator + +error[E0658]: using the `?` macro Kleene operator for "at most one" repetition is unstable (see issue #48075) + --> $DIR/macro-at-most-once-rep-2018-feature-gate.rs:27:10 + | +LL | ($(a)?+) => {} + | ^ + | + = help: add #![feature(macro_at_most_once_rep)] to the crate attributes to enable + +error: expected `*` or `+` + --> $DIR/macro-at-most-once-rep-2018-feature-gate.rs:27:10 + | +LL | ($(a)?+) => {} + | ^ + +error[E0658]: using the `?` macro Kleene operator for "at most one" repetition is unstable (see issue #48075) + --> $DIR/macro-at-most-once-rep-2018-feature-gate.rs:33:10 + | +LL | ($(a)?*) => {} + | ^ + | + = help: add #![feature(macro_at_most_once_rep)] to the crate attributes to enable + +error: expected `*` or `+` + --> $DIR/macro-at-most-once-rep-2018-feature-gate.rs:33:10 + | +LL | ($(a)?*) => {} + | ^ + +error: no rules expected the token `?` + --> $DIR/macro-at-most-once-rep-2018-feature-gate.rs:41:11 + | +LL | foo!(a?); //~ ERROR no rules expected the token `?` + | ^ + +error: no rules expected the token `?` + --> $DIR/macro-at-most-once-rep-2018-feature-gate.rs:42:11 + | +LL | foo!(a?a); //~ ERROR no rules expected the token `?` + | ^ + +error: no rules expected the token `?` + --> $DIR/macro-at-most-once-rep-2018-feature-gate.rs:43:11 + | +LL | foo!(a?a?a); //~ ERROR no rules expected the token `?` + | ^ + +error: aborting due to 10 previous errors + +For more information about this error, try `rustc --explain E0658`. diff --git a/src/test/ui/macros/macro-at-most-once-rep-2018.rs b/src/test/ui/macros/macro-at-most-once-rep-2018.rs new file mode 100644 index 0000000000000..958a7e0cdf444 --- /dev/null +++ b/src/test/ui/macros/macro-at-most-once-rep-2018.rs @@ -0,0 +1,53 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Tests that `?` is a Kleene op and not a macro separator in the 2018 edition. + +// compile-flags: --edition=2018 + +#![feature(macro_at_most_once_rep)] + +macro_rules! foo { + ($(a)?) => {} +} + +macro_rules! baz { + ($(a),?) => {} //~ERROR the `?` macro repetition operator +} + +macro_rules! barplus { + ($(a)?+) => {} // ok. matches "a+" and "+" +} + +macro_rules! barstar { + ($(a)?*) => {} // ok. matches "a*" and "*" +} + +pub fn main() { + foo!(); + foo!(a); + foo!(a?); //~ ERROR no rules expected the token `?` + foo!(a?a); //~ ERROR no rules expected the token `?` + foo!(a?a?a); //~ ERROR no rules expected the token `?` + + barplus!(); //~ERROR unexpected end of macro invocation + barplus!(a); //~ERROR unexpected end of macro invocation + barplus!(a?); //~ ERROR no rules expected the token `?` + barplus!(a?a); //~ ERROR no rules expected the token `?` + barplus!(a+); + barplus!(+); + + barstar!(); //~ERROR unexpected end of macro invocation + barstar!(a); //~ERROR unexpected end of macro invocation + barstar!(a?); //~ ERROR no rules expected the token `?` + barstar!(a?a); //~ ERROR no rules expected the token `?` + barstar!(a*); + barstar!(*); +} diff --git a/src/test/ui/macros/macro-at-most-once-rep-2018.stderr b/src/test/ui/macros/macro-at-most-once-rep-2018.stderr new file mode 100644 index 0000000000000..0a15bdb10686d --- /dev/null +++ b/src/test/ui/macros/macro-at-most-once-rep-2018.stderr @@ -0,0 +1,74 @@ +error: the `?` macro repetition operator does not take a separator + --> $DIR/macro-at-most-once-rep-2018.rs:22:10 + | +LL | ($(a),?) => {} //~ERROR the `?` macro repetition operator + | ^ + +error: no rules expected the token `?` + --> $DIR/macro-at-most-once-rep-2018.rs:36:11 + | +LL | foo!(a?); //~ ERROR no rules expected the token `?` + | ^ + +error: no rules expected the token `?` + --> $DIR/macro-at-most-once-rep-2018.rs:37:11 + | +LL | foo!(a?a); //~ ERROR no rules expected the token `?` + | ^ + +error: no rules expected the token `?` + --> $DIR/macro-at-most-once-rep-2018.rs:38:11 + | +LL | foo!(a?a?a); //~ ERROR no rules expected the token `?` + | ^ + +error: unexpected end of macro invocation + --> $DIR/macro-at-most-once-rep-2018.rs:40:5 + | +LL | barplus!(); //~ERROR unexpected end of macro invocation + | ^^^^^^^^^^^ + +error: unexpected end of macro invocation + --> $DIR/macro-at-most-once-rep-2018.rs:41:14 + | +LL | barplus!(a); //~ERROR unexpected end of macro invocation + | ^ + +error: no rules expected the token `?` + --> $DIR/macro-at-most-once-rep-2018.rs:42:15 + | +LL | barplus!(a?); //~ ERROR no rules expected the token `?` + | ^ + +error: no rules expected the token `?` + --> $DIR/macro-at-most-once-rep-2018.rs:43:15 + | +LL | barplus!(a?a); //~ ERROR no rules expected the token `?` + | ^ + +error: unexpected end of macro invocation + --> $DIR/macro-at-most-once-rep-2018.rs:47:5 + | +LL | barstar!(); //~ERROR unexpected end of macro invocation + | ^^^^^^^^^^^ + +error: unexpected end of macro invocation + --> $DIR/macro-at-most-once-rep-2018.rs:48:14 + | +LL | barstar!(a); //~ERROR unexpected end of macro invocation + | ^ + +error: no rules expected the token `?` + --> $DIR/macro-at-most-once-rep-2018.rs:49:15 + | +LL | barstar!(a?); //~ ERROR no rules expected the token `?` + | ^ + +error: no rules expected the token `?` + --> $DIR/macro-at-most-once-rep-2018.rs:50:15 + | +LL | barstar!(a?a); //~ ERROR no rules expected the token `?` + | ^ + +error: aborting due to 12 previous errors + diff --git a/src/test/ui/macros/macro-at-most-once-rep-ambig.rs b/src/test/ui/macros/macro-at-most-once-rep-ambig.rs deleted file mode 100644 index a5660f8b41f8d..0000000000000 --- a/src/test/ui/macros/macro-at-most-once-rep-ambig.rs +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// The logic for parsing Kleene operators in macros has a special case to disambiguate `?`. -// Specifically, `$(pat)?` is the ZeroOrOne operator whereas `$(pat)?+` or `$(pat)?*` are the -// ZeroOrMore and OneOrMore operators using `?` as a separator. These tests are intended to -// exercise that logic in the macro parser. -// -// Moreover, we also throw in some tests for using a separator with `?`, which is meaningless but -// included for consistency with `+` and `*`. -// -// This test focuses on error cases. - -#![feature(macro_at_most_once_rep)] - -macro_rules! foo { - ($(a)?) => {} -} - -macro_rules! baz { - ($(a),?) => {} // comma separator is meaningless for `?` -} - -macro_rules! barplus { - ($(a)?+) => {} -} - -macro_rules! barstar { - ($(a)?*) => {} -} - -pub fn main() { - foo!(a?a?a); //~ ERROR no rules expected the token `?` - foo!(a?a); //~ ERROR no rules expected the token `?` - foo!(a?); //~ ERROR no rules expected the token `?` - baz!(a?a?a); //~ ERROR no rules expected the token `?` - baz!(a?a); //~ ERROR no rules expected the token `?` - baz!(a?); //~ ERROR no rules expected the token `?` - baz!(a,); //~ ERROR unexpected end of macro invocation - baz!(a?a?a,); //~ ERROR no rules expected the token `?` - baz!(a?a,); //~ ERROR no rules expected the token `?` - baz!(a?,); //~ ERROR no rules expected the token `?` - barplus!(); //~ ERROR unexpected end of macro invocation - barplus!(a?); //~ ERROR unexpected end of macro invocation - barstar!(a?); //~ ERROR unexpected end of macro invocation -} diff --git a/src/test/ui/macros/macro-at-most-once-rep-ambig.stderr b/src/test/ui/macros/macro-at-most-once-rep-ambig.stderr deleted file mode 100644 index d382082a57585..0000000000000 --- a/src/test/ui/macros/macro-at-most-once-rep-ambig.stderr +++ /dev/null @@ -1,80 +0,0 @@ -error: no rules expected the token `?` - --> $DIR/macro-at-most-once-rep-ambig.rs:40:11 - | -LL | foo!(a?a?a); //~ ERROR no rules expected the token `?` - | ^ - -error: no rules expected the token `?` - --> $DIR/macro-at-most-once-rep-ambig.rs:41:11 - | -LL | foo!(a?a); //~ ERROR no rules expected the token `?` - | ^ - -error: no rules expected the token `?` - --> $DIR/macro-at-most-once-rep-ambig.rs:42:11 - | -LL | foo!(a?); //~ ERROR no rules expected the token `?` - | ^ - -error: no rules expected the token `?` - --> $DIR/macro-at-most-once-rep-ambig.rs:43:11 - | -LL | baz!(a?a?a); //~ ERROR no rules expected the token `?` - | ^ - -error: no rules expected the token `?` - --> $DIR/macro-at-most-once-rep-ambig.rs:44:11 - | -LL | baz!(a?a); //~ ERROR no rules expected the token `?` - | ^ - -error: no rules expected the token `?` - --> $DIR/macro-at-most-once-rep-ambig.rs:45:11 - | -LL | baz!(a?); //~ ERROR no rules expected the token `?` - | ^ - -error: unexpected end of macro invocation - --> $DIR/macro-at-most-once-rep-ambig.rs:46:11 - | -LL | baz!(a,); //~ ERROR unexpected end of macro invocation - | ^ - -error: no rules expected the token `?` - --> $DIR/macro-at-most-once-rep-ambig.rs:47:11 - | -LL | baz!(a?a?a,); //~ ERROR no rules expected the token `?` - | ^ - -error: no rules expected the token `?` - --> $DIR/macro-at-most-once-rep-ambig.rs:48:11 - | -LL | baz!(a?a,); //~ ERROR no rules expected the token `?` - | ^ - -error: no rules expected the token `?` - --> $DIR/macro-at-most-once-rep-ambig.rs:49:11 - | -LL | baz!(a?,); //~ ERROR no rules expected the token `?` - | ^ - -error: unexpected end of macro invocation - --> $DIR/macro-at-most-once-rep-ambig.rs:50:5 - | -LL | barplus!(); //~ ERROR unexpected end of macro invocation - | ^^^^^^^^^^^ - -error: unexpected end of macro invocation - --> $DIR/macro-at-most-once-rep-ambig.rs:51:15 - | -LL | barplus!(a?); //~ ERROR unexpected end of macro invocation - | ^ - -error: unexpected end of macro invocation - --> $DIR/macro-at-most-once-rep-ambig.rs:52:15 - | -LL | barstar!(a?); //~ ERROR unexpected end of macro invocation - | ^ - -error: aborting due to 13 previous errors - From 63c2d06a0d777f78048963cc55630631505de83b Mon Sep 17 00:00:00 2001 From: mark Date: Tue, 26 Jun 2018 23:59:01 -0500 Subject: [PATCH 3/7] update nightly book --- .../src/language-features/macro-at-most-once-rep.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/doc/unstable-book/src/language-features/macro-at-most-once-rep.md b/src/doc/unstable-book/src/language-features/macro-at-most-once-rep.md index ec9d85db107d4..251fc7209122c 100644 --- a/src/doc/unstable-book/src/language-features/macro-at-most-once-rep.md +++ b/src/doc/unstable-book/src/language-features/macro-at-most-once-rep.md @@ -1,13 +1,15 @@ # `macro_at_most_once_rep` -The tracking issue for this feature is: TODO(mark-i-m) +NOTE: This feature is only available in the 2018 Edition. + +The tracking issue for this feature is: #48075 With this feature gate enabled, one can use `?` as a Kleene operator meaning "0 or 1 repetitions" in a macro definition. Previously only `+` and `*` were allowed. For example: -```rust +```rust,ignore #![feature(macro_at_most_once_rep)] macro_rules! foo { From 5d872727e02d9cc67813fa7f00763e355d39ae06 Mon Sep 17 00:00:00 2001 From: mark Date: Mon, 2 Jul 2018 19:44:01 -0500 Subject: [PATCH 4/7] Fix test and errors --- src/librustc/macros.rs | 16 +- src/libsyntax/ext/tt/macro_rules.rs | 447 +++++++++++----------------- src/libsyntax/ext/tt/quoted.rs | 2 +- 3 files changed, 193 insertions(+), 272 deletions(-) diff --git a/src/librustc/macros.rs b/src/librustc/macros.rs index ccd9024f4aaab..0bf1f4decc430 100644 --- a/src/librustc/macros.rs +++ b/src/librustc/macros.rs @@ -71,7 +71,9 @@ macro_rules! __impl_stable_hash_field { #[macro_export] macro_rules! impl_stable_hash_for { - (enum $enum_name:path { $( $variant:ident $( ( $($field:ident $(-> $delegate:tt)?),* ) )* ),* $(,)? }) => { + // FIXME(mark-i-m): Some of these should be `?` rather than `*`. See the git blame and change + // them back when `?` is supported again. + (enum $enum_name:path { $( $variant:ident $( ( $($field:ident $(-> $delegate:tt)*),* ) )* ),* $(,)* }) => { impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $enum_name { #[inline] fn hash_stable(&self, @@ -83,14 +85,15 @@ macro_rules! impl_stable_hash_for { match *self { $( $variant $( ( $(ref $field),* ) )* => { - $($( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*)* + $($( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );*)* } )* } } } }; - (struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => { + // FIXME(mark-i-m): same here. + (struct $struct_name:path { $($field:ident $(-> $delegate:tt)*),* $(,)* }) => { impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name { #[inline] fn hash_stable(&self, @@ -100,11 +103,12 @@ macro_rules! impl_stable_hash_for { $(ref $field),* } = *self; - $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );* + $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );* } } }; - (tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => { + // FIXME(mark-i-m): same here. + (tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)*),* $(,)* }) => { impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name { #[inline] fn hash_stable(&self, @@ -114,7 +118,7 @@ macro_rules! impl_stable_hash_for { $(ref $field),* ) = *self; - $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );* + $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)*) );* } } }; diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 05e59d6b47cc0..8912be5f69d11 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -8,33 +8,28 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use {ast, attr}; +use syntax_pos::{Span, DUMMY_SP}; use edition::Edition; -use ext::{ - base::{DummyResult, ExtCtxt, MacResult, NormalTT, SyntaxExtension, TTMacroExpander}, - expand::{AstFragment, AstFragmentKind}, - tt::{ - macro_parser::{ - parse, parse_failure_msg, Error, Failure, MatchedNonterminal, MatchedSeq, Success, - }, - quoted, - transcribe::transcribe, - }, -}; +use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension}; +use ext::base::{NormalTT, TTMacroExpander}; +use ext::expand::{AstFragment, AstFragmentKind}; +use ext::tt::macro_parser::{Success, Error, Failure}; +use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; +use ext::tt::macro_parser::{parse, parse_failure_msg}; +use ext::tt::quoted; +use ext::tt::transcribe::transcribe; use feature_gate::{self, emit_feature_err, Features, GateIssue}; -use parse::{ - parser::Parser, - token::{self, NtTT, Token::*}, - Directory, ParseSess, -}; +use parse::{Directory, ParseSess}; +use parse::parser::Parser; +use parse::token::{self, NtTT}; +use parse::token::Token::*; use symbol::Symbol; -use syntax_pos::{Span, DUMMY_SP}; use tokenstream::{TokenStream, TokenTree}; -use {ast, attr}; -use std::{ - borrow::Cow, - collections::{hash_map::Entry, HashMap}, -}; +use std::borrow::Cow; +use std::collections::HashMap; +use std::collections::hash_map::Entry; use rustc_data_structures::sync::Lrc; @@ -44,16 +39,12 @@ pub struct ParserAnyMacro<'a> { /// Span of the expansion site of the macro this parser is for site_span: Span, /// The ident of the macro we're parsing - macro_ident: ast::Ident, + macro_ident: ast::Ident } impl<'a> ParserAnyMacro<'a> { pub fn make(mut self: Box>, kind: AstFragmentKind) -> AstFragment { - let ParserAnyMacro { - site_span, - macro_ident, - ref mut parser, - } = *self; + let ParserAnyMacro { site_span, macro_ident, ref mut parser } = *self; let fragment = panictry!(parser.parse_ast_fragment(kind, true)); // We allow semicolons at the end of expressions -- e.g. the semicolon in @@ -86,16 +77,17 @@ impl TTMacroExpander for MacroRulesMacroExpander { if !self.valid { return DummyResult::any(sp); } - generic_extension(cx, sp, self.name, input, &self.lhses, &self.rhses) + generic_extension(cx, + sp, + self.name, + input, + &self.lhses, + &self.rhses) } } fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) { - let sp = sp - .macro_backtrace() - .last() - .map(|trace| trace.call_site) - .unwrap_or(sp); + let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp); let values: &mut Vec = cx.expansions.entry(sp).or_insert_with(Vec::new); values.push(message); } @@ -116,11 +108,10 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, let mut best_fail_spot = DUMMY_SP; let mut best_fail_tok = None; - for (i, lhs) in lhses.iter().enumerate() { - // try each arm's matchers + for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers let lhs_tt = match *lhs { quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..], - _ => cx.span_bug(sp, "malformed macro lhs"), + _ => cx.span_bug(sp, "malformed macro lhs") }; match TokenTree::parse(cx, lhs_tt, arg.clone()) { @@ -156,11 +147,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, ownership: cx.current_expansion.directory_ownership, }; let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false); - p.root_module_name = cx - .current_expansion - .module - .mod_path - .last() + p.root_module_name = cx.current_expansion.module.mod_path.last() .map(|id| id.as_str().to_string()); p.process_potential_macro_variable(); @@ -173,14 +160,16 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, // so we can print a useful error message if the parse of the expanded // macro leaves unparsed tokens. site_span: sp, - macro_ident: name, - }); + macro_ident: name + }) } Failure(sp, tok) => if sp.lo() >= best_fail_spot.lo() { best_fail_spot = sp; best_fail_tok = Some(tok); }, - Error(err_sp, ref msg) => cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]), + Error(err_sp, ref msg) => { + cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]) + } } } @@ -196,12 +185,8 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, // Holy self-referential! /// Converts a `macro_rules!` invocation into a syntax extension. -pub fn compile( - sess: &ParseSess, - features: &Features, - def: &ast::Item, - edition: Edition, -) -> SyntaxExtension { +pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition: Edition) + -> SyntaxExtension { let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs")); let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs")); @@ -217,47 +202,33 @@ pub fn compile( // ...quasiquoting this would be nice. // These spans won't matter, anyways let argument_gram = vec![ - quoted::TokenTree::Sequence( - DUMMY_SP, - Lrc::new(quoted::SequenceRepetition { - tts: vec![ - quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")), - quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), - quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")), - ], - separator: Some(if body.legacy { - token::Semi - } else { - token::Comma - }), - op: quoted::KleeneOp::OneOrMore, - num_captures: 2, - }), - ), + quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition { + tts: vec![ + quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")), + quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), + quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")), + ], + separator: Some(if body.legacy { token::Semi } else { token::Comma }), + op: quoted::KleeneOp::OneOrMore, + num_captures: 2, + })), // to phase into semicolon-termination instead of semicolon-separation - quoted::TokenTree::Sequence( - DUMMY_SP, - Lrc::new(quoted::SequenceRepetition { - tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)], - separator: None, - op: quoted::KleeneOp::ZeroOrMore, - num_captures: 0, - }), - ), + quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition { + tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)], + separator: None, + op: quoted::KleeneOp::ZeroOrMore, + num_captures: 0 + })), ]; let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) { Success(m) => m, Failure(sp, tok) => { let s = parse_failure_msg(tok); - sess.span_diagnostic - .span_fatal(sp.substitute_dummy(def.span), &s) - .raise(); + sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise(); } Error(sp, s) => { - sess.span_diagnostic - .span_fatal(sp.substitute_dummy(def.span), &s) - .raise(); + sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise(); } }; @@ -265,9 +236,8 @@ pub fn compile( // Extract the arguments: let lhses = match *argument_map[&lhs_nm] { - MatchedSeq(ref s, _) => s - .iter() - .map(|m| { + MatchedSeq(ref s, _) => { + s.iter().map(|m| { if let MatchedNonterminal(ref nt) = *m { if let NtTT(ref tt) = **nt { let tt = quoted::parse( @@ -277,25 +247,22 @@ pub fn compile( features, &def.attrs, edition, - ).pop() - .unwrap(); + ) + .pop() + .unwrap(); valid &= check_lhs_nt_follows(sess, features, &def.attrs, &tt); return tt; } } - sess.span_diagnostic - .span_bug(def.span, "wrong-structured lhs") - }) - .collect::>(), - _ => sess - .span_diagnostic - .span_bug(def.span, "wrong-structured lhs"), + sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") + }).collect::>() + } + _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") }; let rhses = match *argument_map[&rhs_nm] { - MatchedSeq(ref s, _) => s - .iter() - .map(|m| { + MatchedSeq(ref s, _) => { + s.iter().map(|m| { if let MatchedNonterminal(ref nt) = *m { if let NtTT(ref tt) = **nt { return quoted::parse( @@ -306,16 +273,13 @@ pub fn compile( &def.attrs, edition, ).pop() - .unwrap(); + .unwrap(); } } - sess.span_diagnostic - .span_bug(def.span, "wrong-structured lhs") - }) - .collect::>(), - _ => sess - .span_diagnostic - .span_bug(def.span, "wrong-structured rhs"), + sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") + }).collect::>() + } + _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs") }; for rhs in &rhses { @@ -344,14 +308,14 @@ pub fn compile( } } - let unstable_feature = attr::find_stability(&sess.span_diagnostic, &def.attrs, def.span) - .and_then(|stability| { - if let attr::StabilityLevel::Unstable { issue, .. } = stability.level { - Some((stability.feature, issue)) - } else { - None - } - }); + let unstable_feature = attr::find_stability(&sess.span_diagnostic, + &def.attrs, def.span).and_then(|stability| { + if let attr::StabilityLevel::Unstable { issue, .. } = stability.level { + Some((stability.feature, issue)) + } else { + None + } + }); NormalTT { expander, @@ -374,12 +338,10 @@ pub fn compile( } } -fn check_lhs_nt_follows( - sess: &ParseSess, - features: &Features, - attrs: &[ast::Attribute], - lhs: "ed::TokenTree, -) -> bool { +fn check_lhs_nt_follows(sess: &ParseSess, + features: &Features, + attrs: &[ast::Attribute], + lhs: "ed::TokenTree) -> bool { // lhs is going to be like TokenTree::Delimited(...), where the // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. if let quoted::TokenTree::Delimited(_, ref tts) = *lhs { @@ -404,15 +366,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { return false; }, TokenTree::Sequence(span, ref seq) => { - if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| match *seq_tt { - TokenTree::MetaVarDecl(_, _, id) => id.name == "vis", - TokenTree::Sequence(_, ref sub_seq) => { - sub_seq.op == quoted::KleeneOp::ZeroOrMore + if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| { + match *seq_tt { + TokenTree::MetaVarDecl(_, _, id) => id.name == "vis", + TokenTree::Sequence(_, ref sub_seq) => + sub_seq.op == quoted::KleeneOp::ZeroOrMore, + _ => false, } - _ => false, }) { - sess.span_diagnostic - .span_err(span, "repetition matches empty token tree"); + sess.span_diagnostic.span_err(span, "repetition matches empty token tree"); return false; } if !check_lhs_no_empty_seq(sess, &seq.tts) { @@ -428,19 +390,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { fn check_rhs(sess: &ParseSess, rhs: "ed::TokenTree) -> bool { match *rhs { quoted::TokenTree::Delimited(..) => return true, - _ => sess - .span_diagnostic - .span_err(rhs.span(), "macro rhs must be delimited"), + _ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited") } false } -fn check_matcher( - sess: &ParseSess, - features: &Features, - attrs: &[ast::Attribute], - matcher: &[quoted::TokenTree], -) -> bool { +fn check_matcher(sess: &ParseSess, + features: &Features, + attrs: &[ast::Attribute], + matcher: &[quoted::TokenTree]) -> bool { let first_sets = FirstSets::new(matcher); let empty_suffix = TokenSet::empty(); let err = sess.span_diagnostic.err_count(); @@ -474,9 +432,7 @@ impl FirstSets { fn new(tts: &[quoted::TokenTree]) -> FirstSets { use self::quoted::TokenTree; - let mut sets = FirstSets { - first: HashMap::new(), - }; + let mut sets = FirstSets { first: HashMap::new() }; build_recur(&mut sets, tts); return sets; @@ -515,9 +471,8 @@ impl FirstSets { // If the sequence contents can be empty, then the first // token could be the separator token itself. - if let (Some(ref sep), true) = - (seq_rep.separator.clone(), subfirst.maybe_empty) - { + if let (Some(ref sep), true) = (seq_rep.separator.clone(), + subfirst.maybe_empty) { first.add_one_maybe(TokenTree::Token(sp, sep.clone())); } @@ -525,10 +480,7 @@ impl FirstSets { if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore { // If sequence is potentially empty, then // union them (preserving first emptiness). - first.add_all(&TokenSet { - maybe_empty: true, - ..subfirst - }); + first.add_all(&TokenSet { maybe_empty: true, ..subfirst }); } else { // Otherwise, sequence guaranteed // non-empty; replace first. @@ -562,18 +514,19 @@ impl FirstSets { TokenTree::Sequence(sp, ref seq_rep) => { match self.first.get(&sp) { Some(&Some(ref subfirst)) => { + // If the sequence contents can be empty, then the first // token could be the separator token itself. - if let (Some(ref sep), true) = - (seq_rep.separator.clone(), subfirst.maybe_empty) - { + if let (Some(ref sep), true) = (seq_rep.separator.clone(), + subfirst.maybe_empty) { first.add_one_maybe(TokenTree::Token(sp, sep.clone())); } assert!(first.maybe_empty); first.add_all(subfirst); - if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore { + if subfirst.maybe_empty || + seq_rep.op == quoted::KleeneOp::ZeroOrMore { // continue scanning for more first // tokens, but also make sure we // restore empty-tracking state @@ -621,20 +574,12 @@ struct TokenSet { impl TokenSet { // Returns a set for the empty sequence. - fn empty() -> Self { - TokenSet { - tokens: Vec::new(), - maybe_empty: true, - } - } + fn empty() -> Self { TokenSet { tokens: Vec::new(), maybe_empty: true } } // Returns the set `{ tok }` for the single-token (and thus // non-empty) sequence [tok]. fn singleton(tok: quoted::TokenTree) -> Self { - TokenSet { - tokens: vec![tok], - maybe_empty: false, - } + TokenSet { tokens: vec![tok], maybe_empty: false } } // Changes self to be the set `{ tok }`. @@ -698,14 +643,12 @@ impl TokenSet { // // Requires that `first_sets` is pre-computed for `matcher`; // see `FirstSets::new`. -fn check_matcher_core( - sess: &ParseSess, - features: &Features, - attrs: &[ast::Attribute], - first_sets: &FirstSets, - matcher: &[quoted::TokenTree], - follow: &TokenSet, -) -> TokenSet { +fn check_matcher_core(sess: &ParseSess, + features: &Features, + attrs: &[ast::Attribute], + first_sets: &FirstSets, + matcher: &[quoted::TokenTree], + follow: &TokenSet) -> TokenSet { use self::quoted::TokenTree; let mut last = TokenSet::empty(); @@ -715,13 +658,11 @@ fn check_matcher_core( // then ensure T can also be followed by any element of FOLLOW. 'each_token: for i in 0..matcher.len() { let token = &matcher[i]; - let suffix = &matcher[i + 1..]; + let suffix = &matcher[i+1..]; let build_suffix_first = || { let mut s = first_sets.first(suffix); - if s.maybe_empty { - s.add_all(follow); - } + if s.maybe_empty { s.add_all(follow); } s }; @@ -737,12 +678,9 @@ fn check_matcher_core( let can_be_followed_by_any; if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, attrs, token) { let msg = format!("invalid fragment specifier `{}`", bad_frag); - sess.span_diagnostic - .struct_span_err(token.span(), &msg) - .help( - "valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, \ - `pat`, `ty`, `literal`, `path`, `meta`, `tt`, `item` and `vis`", - ) + sess.span_diagnostic.struct_span_err(token.span(), &msg) + .help("valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, \ + `pat`, `ty`, `literal`, `path`, `meta`, `tt`, `item` and `vis`") .emit(); // (This eliminates false positives and duplicates // from error messages.) @@ -796,8 +734,12 @@ fn check_matcher_core( // At this point, `suffix_first` is built, and // `my_suffix` is some TokenSet that we can use // for checking the interior of `seq_rep`. - let next = - check_matcher_core(sess, features, attrs, first_sets, &seq_rep.tts, my_suffix); + let next = check_matcher_core(sess, + features, + attrs, + first_sets, + &seq_rep.tts, + my_suffix); if next.maybe_empty { last.add_all(&next); } else { @@ -819,17 +761,16 @@ fn check_matcher_core( for next_token in &suffix_first.tokens { match is_in_follow(next_token, &frag_spec.as_str()) { Err((msg, help)) => { - sess.span_diagnostic - .struct_span_err(next_token.span(), &msg) - .help(help) - .emit(); + sess.span_diagnostic.struct_span_err(next_token.span(), &msg) + .help(help).emit(); // don't bother reporting every source of // conflict for a particular element of `last`. continue 'each_last; } Ok(true) => {} Ok(false) => { - let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1 + let may_be = if last.tokens.len() == 1 && + suffix_first.tokens.len() == 1 { "is" } else { @@ -838,14 +779,12 @@ fn check_matcher_core( sess.span_diagnostic.span_err( next_token.span(), - &format!( - "`${name}:{frag}` {may_be} followed by `{next}`, which \ - is not allowed for `{frag}` fragments", - name = name, - frag = frag_spec, - next = quoted_tt_to_string(next_token), - may_be = may_be - ), + &format!("`${name}:{frag}` {may_be} followed by `{next}`, which \ + is not allowed for `{frag}` fragments", + name=name, + frag=frag_spec, + next=quoted_tt_to_string(next_token), + may_be=may_be) ); } } @@ -910,16 +849,16 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result { // anything can follow block, the braces provide an easy boundary to // maintain Ok(true) - } - "stmt" | "expr" => match *tok { + }, + "stmt" | "expr" => match *tok { TokenTree::Token(_, ref tok) => match *tok { FatArrow | Comma | Semi => Ok(true), - _ => Ok(false), + _ => Ok(false) }, _ => Ok(false), }, @@ -927,23 +866,16 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result match *tok { FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true), Ident(i, false) if i.name == "if" || i.name == "in" => Ok(true), - _ => Ok(false), + _ => Ok(false) }, _ => Ok(false), }, "path" | "ty" => match *tok { TokenTree::Token(_, ref tok) => match *tok { - OpenDelim(token::DelimToken::Brace) - | OpenDelim(token::DelimToken::Bracket) - | Comma - | FatArrow - | Colon - | Eq - | Gt - | Semi - | BinOp(token::Or) => Ok(true), + OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | + Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true), Ident(i, false) if i.name == "as" || i.name == "where" => Ok(true), - _ => Ok(false), + _ => Ok(false) }, TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true), _ => Ok(false), @@ -951,49 +883,43 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result { // being a single token, idents and lifetimes are harmless Ok(true) - } + }, "literal" => { // literals may be of a single token, or two tokens (negative numbers) Ok(true) - } + }, "meta" | "tt" => { // being either a single token or a delimited sequence, tt is // harmless Ok(true) - } + }, "vis" => { // Explicitly disallow `priv`, on the off chance it comes back. match *tok { TokenTree::Token(_, ref tok) => match *tok { Comma => Ok(true), Ident(i, is_raw) if is_raw || i.name != "priv" => Ok(true), - ref tok => Ok(tok.can_begin_type()), + ref tok => Ok(tok.can_begin_type()) }, - TokenTree::MetaVarDecl(_, _, frag) - if frag.name == "ident" || frag.name == "ty" || frag.name == "path" => - { - Ok(true) - } - _ => Ok(false), + TokenTree::MetaVarDecl(_, _, frag) if frag.name == "ident" + || frag.name == "ty" + || frag.name == "path" => Ok(true), + _ => Ok(false) } - } + }, "" => Ok(true), // keywords::Invalid - _ => Err(( - format!("invalid fragment specifier `{}`", frag), - "valid fragment specifiers are `ident`, `block`, \ - `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt`, \ - `literal`, `item` and `vis`", - )), + _ => Err((format!("invalid fragment specifier `{}`", frag), + "valid fragment specifiers are `ident`, `block`, \ + `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt`, \ + `literal`, `item` and `vis`")) } } } -fn has_legal_fragment_specifier( - sess: &ParseSess, - features: &Features, - attrs: &[ast::Attribute], - tok: "ed::TokenTree, -) -> Result<(), String> { +fn has_legal_fragment_specifier(sess: &ParseSess, + features: &Features, + attrs: &[ast::Attribute], + tok: "ed::TokenTree) -> Result<(), String> { debug!("has_legal_fragment_specifier({:?})", tok); if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok { let frag_name = frag_spec.as_str(); @@ -1005,45 +931,38 @@ fn has_legal_fragment_specifier( Ok(()) } -fn is_legal_fragment_specifier( - sess: &ParseSess, - features: &Features, - attrs: &[ast::Attribute], - frag_name: &str, - frag_span: Span, -) -> bool { +fn is_legal_fragment_specifier(sess: &ParseSess, + features: &Features, + attrs: &[ast::Attribute], + frag_name: &str, + frag_span: Span) -> bool { match frag_name { - "item" | "block" | "stmt" | "expr" | "pat" | "lifetime" | "path" | "ty" | "ident" - | "meta" | "tt" | "" => true, + "item" | "block" | "stmt" | "expr" | "pat" | "lifetime" | + "path" | "ty" | "ident" | "meta" | "tt" | "" => true, "literal" => { - if !features.macro_literal_matcher - && !attr::contains_name(attrs, "allow_internal_unstable") - { + if !features.macro_literal_matcher && + !attr::contains_name(attrs, "allow_internal_unstable") { let explain = feature_gate::EXPLAIN_LITERAL_MATCHER; - emit_feature_err( - sess, - "macro_literal_matcher", - frag_span, - GateIssue::Language, - explain, - ); + emit_feature_err(sess, + "macro_literal_matcher", + frag_span, + GateIssue::Language, + explain); } true - } + }, "vis" => { - if !features.macro_vis_matcher && !attr::contains_name(attrs, "allow_internal_unstable") - { + if !features.macro_vis_matcher && + !attr::contains_name(attrs, "allow_internal_unstable") { let explain = feature_gate::EXPLAIN_VIS_MATCHER; - emit_feature_err( - sess, - "macro_vis_matcher", - frag_span, - GateIssue::Language, - explain, - ); + emit_feature_err(sess, + "macro_vis_matcher", + frag_span, + GateIssue::Language, + explain); } true - } + }, _ => false, } } @@ -1053,9 +972,7 @@ fn quoted_tt_to_string(tt: "ed::TokenTree) -> String { quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok), quoted::TokenTree::MetaVar(_, name) => format!("${}", name), quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), - _ => panic!( - "unexpected quoted::TokenTree::{{Sequence or Delimited}} \ - in follow set checker" - ), + _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \ + in follow set checker"), } } diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 1bca6dd065323..e209e077bf4db 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -13,7 +13,7 @@ use feature_gate::{self, emit_feature_err, Features, GateIssue}; use parse::{token, ParseSess}; use print::pprust; use symbol::keywords; -use syntax_pos::{edition::Edition, BytePos, Span, DUMMY_SP}; +use syntax_pos::{edition::Edition, BytePos, Span}; use tokenstream; use {ast, attr}; From b206aedb1bbdd45578668afd7f5589b1b812fd22 Mon Sep 17 00:00:00 2001 From: mark Date: Thu, 12 Jul 2018 21:25:02 -0500 Subject: [PATCH 5/7] make it a migration lint --- src/librustc_lint/lib.rs | 15 +++++++++--- src/libsyntax/early_buffered_lints.rs | 10 ++++++++ src/libsyntax/ext/tt/macro_rules.rs | 2 ++ src/libsyntax/ext/tt/quoted.rs | 33 ++++++++++++++++++++++++--- 4 files changed, 54 insertions(+), 6 deletions(-) diff --git a/src/librustc_lint/lib.rs b/src/librustc_lint/lib.rs index cab51fbd98775..798c289ac2f9f 100644 --- a/src/librustc_lint/lib.rs +++ b/src/librustc_lint/lib.rs @@ -44,9 +44,13 @@ extern crate syntax_pos; use rustc::lint; use rustc::lint::{LateContext, LateLintPass, LintPass, LintArray}; -use rustc::lint::builtin::{BARE_TRAIT_OBJECTS, ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE, - ELIDED_LIFETIMES_IN_PATHS}; -use rustc::lint::builtin::MACRO_USE_EXTERN_CRATE; +use rustc::lint::builtin::{ + BARE_TRAIT_OBJECTS, + ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE, + MACRO_USE_EXTERN_CRATE, + ELIDED_LIFETIMES_IN_PATHS, + parser::QUESTION_MARK_MACRO_SEP +}; use rustc::session; use rustc::util; use rustc::hir; @@ -321,6 +325,11 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { reference: "issue #50504 ", edition: None, }, + FutureIncompatibleInfo { + id: LintId::of(QUESTION_MARK_MACRO_SEP), + reference: "issue #48075 ", + edition: Some(Edition::Edition2018), + } ]); // Register renamed and removed lints diff --git a/src/libsyntax/early_buffered_lints.rs b/src/libsyntax/early_buffered_lints.rs index 204e07625adef..a976af1435d23 100644 --- a/src/libsyntax/early_buffered_lints.rs +++ b/src/libsyntax/early_buffered_lints.rs @@ -1,3 +1,13 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + //! Allows the buffering of lints for later. //! //! Since we cannot have a dependency on `librustc`, we implement some types here that are somewhat diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 8912be5f69d11..c9ec2c7d1e86a 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -247,6 +247,7 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition: features, &def.attrs, edition, + def.id, ) .pop() .unwrap(); @@ -272,6 +273,7 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition: features, &def.attrs, edition, + def.id, ).pop() .unwrap(); } diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index e209e077bf4db..357fc77a3a79d 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use ast::NodeId; +use early_buffered_lints::BufferedEarlyLintId; use ext::tt::macro_parser; use feature_gate::{self, emit_feature_err, Features, GateIssue}; use parse::{token, ParseSess}; @@ -175,6 +177,7 @@ impl TokenTree { /// - `features`, `attrs`: language feature flags and attributes so that we know whether to use /// unstable features or not. /// - `edition`: which edition are we in. +/// - `macro_node_id`: the NodeId of the macro we are parsing. /// /// # Returns /// @@ -186,6 +189,7 @@ pub fn parse( features: &Features, attrs: &[ast::Attribute], edition: Edition, + macro_node_id: NodeId, ) -> Vec { // Will contain the final collection of `self::TokenTree` let mut result = Vec::new(); @@ -204,6 +208,7 @@ pub fn parse( features, attrs, edition, + macro_node_id, ); match tree { TokenTree::MetaVar(start_sp, ident) if expect_matchers => { @@ -265,6 +270,7 @@ fn parse_tree( features: &Features, attrs: &[ast::Attribute], edition: Edition, + macro_node_id: NodeId, ) -> TokenTree where I: Iterator, @@ -290,10 +296,19 @@ where features, attrs, edition, + macro_node_id, ); // Get the Kleene operator and optional separator let (separator, op) = - parse_sep_and_kleene_op(trees, span, sess, features, attrs, edition); + parse_sep_and_kleene_op( + trees, + span, + sess, + features, + attrs, + edition, + macro_node_id, + ); // Count the number of captured "names" (i.e. named metavars) let name_captures = macro_parser::count_names(&sequence); TokenTree::Sequence( @@ -350,6 +365,7 @@ where features, attrs, edition, + macro_node_id, ), }), ), @@ -413,12 +429,20 @@ fn parse_sep_and_kleene_op( features: &Features, attrs: &[ast::Attribute], edition: Edition, + macro_node_id: NodeId, ) -> (Option, KleeneOp) where I: Iterator, { match edition { - Edition::Edition2015 => parse_sep_and_kleene_op_2015(input, span, sess, features, attrs), + Edition::Edition2015 => parse_sep_and_kleene_op_2015( + input, + span, + sess, + features, + attrs, + macro_node_id, + ), Edition::Edition2018 => parse_sep_and_kleene_op_2018(input, span, sess, features, attrs), _ => unimplemented!(), } @@ -431,6 +455,7 @@ fn parse_sep_and_kleene_op_2015( sess: &ParseSess, _features: &Features, _attrs: &[ast::Attribute], + macro_node_id: NodeId, ) -> (Option, KleeneOp) where I: Iterator, @@ -474,8 +499,10 @@ where // #2 is a Kleene op, which is the the only valid option Ok(Ok((op, _))) => { // Warn that `?` as a separator will be deprecated - sess.span_diagnostic.span_warn( + sess.buffer_lint( + BufferedEarlyLintId::QuestionMarkMacroSep, op1_span, + macro_node_id, "using `?` as a separator is deprecated and will be \ a hard error in an upcoming edition", ); From 6cb09ccf9f524590d7cc9f8c97732742446ae2b2 Mon Sep 17 00:00:00 2001 From: mark Date: Fri, 13 Jul 2018 23:40:29 -0500 Subject: [PATCH 6/7] dump lints _after_ parsing macros --- src/librustc_driver/driver.rs | 16 +++++++++------- src/libsyntax/parse/mod.rs | 6 +++--- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 3e14ec6f8d490..91392ab013d6c 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -697,13 +697,6 @@ pub fn phase_1_parse_input<'a>( hir_stats::print_ast_stats(&krate, "PRE EXPANSION AST STATS"); } - // Add all buffered lints from the `ParseSess` to the `Session`. - let mut parse_sess_buffered = sess.parse_sess.buffered_lints.borrow_mut(); - for BufferedEarlyLint{id, span, msg, lint_id} in parse_sess_buffered.drain(..) { - let lint = lint::Lint::from_parser_lint_id(lint_id); - sess.buffer_lint(lint, id, span, &msg); - } - Ok(krate) } @@ -1074,6 +1067,15 @@ where ) }); + // Add all buffered lints from the `ParseSess` to the `Session`. + sess.parse_sess.buffered_lints.with_lock(|buffered_lints| { + info!("{} parse sess buffered_lints", buffered_lints.len()); + for BufferedEarlyLint{id, span, msg, lint_id} in buffered_lints.drain(..) { + let lint = lint::Lint::from_parser_lint_id(lint_id); + sess.buffer_lint(lint, id, span, &msg); + } + }); + // Done with macro expansion! after_expand(&krate)?; diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 5dbf569766e5a..d029509f0c12d 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -96,14 +96,14 @@ impl ParseSess { id: NodeId, msg: &str, ) { - self.buffered_lints - .borrow_mut() - .push(BufferedEarlyLint{ + self.buffered_lints.with_lock(|buffered_lints| { + buffered_lints.push(BufferedEarlyLint{ span: span.into(), id, msg: msg.into(), lint_id, }); + }); } } From 10ee0f68a6815fafa69f58daf347f0c2a8339f32 Mon Sep 17 00:00:00 2001 From: mark Date: Fri, 13 Jul 2018 23:48:15 -0500 Subject: [PATCH 7/7] Allow by default, fix tests --- src/librustc/lint/builtin.rs | 2 +- src/libsyntax/parse/lexer/mod.rs | 1 + .../auxiliary/procedural_mbe_matching.rs | 6 ++++-- src/test/ui/issue-39388.stderr | 4 ++-- .../macro-at-most-once-rep-2015-ques-sep.rs | 4 ++++ .../macro-at-most-once-rep-2015-ques-sep.stderr | 16 ++++++++++++++-- 6 files changed, 26 insertions(+), 7 deletions(-) diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 495b4d32e0651..4184cba7db3ef 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -335,7 +335,7 @@ declare_lint! { pub mod parser { declare_lint! { pub QUESTION_MARK_MACRO_SEP, - Warn, + Allow, "detects the use of `?` as a macro separator" } } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 9748e2947eebd..4b077aa8dd4c6 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1807,6 +1807,7 @@ mod tests { raw_identifier_spans: Lock::new(Vec::new()), registered_diagnostics: Lock::new(ErrorMap::new()), non_modrs_mods: Lock::new(vec![]), + buffered_lints: Lock::new(vec![]), } } diff --git a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs index 664e9eeb3e04e..f1777745e06c3 100644 --- a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs +++ b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs @@ -20,7 +20,7 @@ extern crate rustc_plugin; use syntax::feature_gate::Features; use syntax::parse::token::{NtExpr, NtPat}; -use syntax::ast::{Ident, Pat}; +use syntax::ast::{Ident, Pat, NodeId}; use syntax::tokenstream::{TokenTree}; use syntax::ext::base::{ExtCtxt, MacResult, MacEager}; use syntax::ext::build::AstBuilder; @@ -41,7 +41,9 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree]) cx.parse_sess, &Features::new(), &[], - Edition::Edition2015); + Edition::Edition2015, + // not used... + NodeId::new(0)); let map = match TokenTree::parse(cx, &mbe_matcher, args.iter().cloned().collect()) { Success(map) => map, Failure(_, tok) => { diff --git a/src/test/ui/issue-39388.stderr b/src/test/ui/issue-39388.stderr index a38d38a51aded..dc19487f3af65 100644 --- a/src/test/ui/issue-39388.stderr +++ b/src/test/ui/issue-39388.stderr @@ -1,7 +1,7 @@ -error: expected one of: `*`, `+`, or `?` +error: expected `*` or `+` --> $DIR/issue-39388.rs:14:22 | -LL | (($($a:tt)*) = ($($b:tt))*) => { //~ ERROR expected one of: `*`, `+`, or `?` +LL | (($($a:tt)*) = ($($b:tt))*) => { //~ ERROR expected `*` or `+` | ^^^^^^^ error: aborting due to previous error diff --git a/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.rs b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.rs index 4b3dfbdc2e8a5..2e06b4bd5c240 100644 --- a/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.rs +++ b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.rs @@ -14,12 +14,16 @@ // compile-flags: --edition=2015 // compile-pass +#![warn(rust_2018_compatibility)] + macro_rules! bar { ($(a)?*) => {} //~WARN using `?` as a separator + //~^WARN this was previously accepted } macro_rules! baz { ($(a)?+) => {} //~WARN using `?` as a separator + //~^WARN this was previously accepted } fn main() { diff --git a/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.stderr b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.stderr index 0ab4138864e00..db1872c24134a 100644 --- a/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.stderr +++ b/src/test/ui/macros/macro-at-most-once-rep-2015-ques-sep.stderr @@ -1,12 +1,24 @@ warning: using `?` as a separator is deprecated and will be a hard error in an upcoming edition - --> $DIR/macro-at-most-once-rep-2015-ques-sep.rs:18:10 + --> $DIR/macro-at-most-once-rep-2015-ques-sep.rs:20:10 | LL | ($(a)?*) => {} //~WARN using `?` as a separator | ^ + | +note: lint level defined here + --> $DIR/macro-at-most-once-rep-2015-ques-sep.rs:17:9 + | +LL | #![warn(rust_2018_compatibility)] + | ^^^^^^^^^^^^^^^^^^^^^^^ + = note: #[warn(question_mark_macro_sep)] implied by #[warn(rust_2018_compatibility)] + = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in the 2018 edition! + = note: for more information, see issue #48075 warning: using `?` as a separator is deprecated and will be a hard error in an upcoming edition - --> $DIR/macro-at-most-once-rep-2015-ques-sep.rs:22:10 + --> $DIR/macro-at-most-once-rep-2015-ques-sep.rs:25:10 | LL | ($(a)?+) => {} //~WARN using `?` as a separator | ^ + | + = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in the 2018 edition! + = note: for more information, see issue #48075