Skip to content

Commit

Permalink
Stop using DiagnosticBuilder::buffer in the parser.
Browse files Browse the repository at this point in the history
One consequence is that errors returned by
`maybe_new_parser_from_source_str` now must be consumed, so a bunch of
places that previously ignored those errors now cancel them. (Most of
them explicitly dropped the errors before. I guess that was to indicate
"we are explicitly ignoring these", though I'm not 100% sure.)
  • Loading branch information
nnethercote committed Jan 11, 2024
1 parent 4e7c6c2 commit fe6912a
Show file tree
Hide file tree
Showing 6 changed files with 32 additions and 27 deletions.
11 changes: 7 additions & 4 deletions compiler/rustc_interface/src/interface.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ pub(crate) fn parse_cfg(dcx: &DiagCtxt, cfgs: Vec<String>) -> Cfg {
Ok(..) => {}
Err(err) => err.cancel(),
},
Err(errs) => drop(errs),
Err(errs) => errs.into_iter().for_each(|err| err.cancel()),
}

// If the user tried to use a key="value" flag, but is missing the quotes, provide
Expand Down Expand Up @@ -129,9 +129,12 @@ pub(crate) fn parse_check_cfg(dcx: &DiagCtxt, specs: Vec<String>) -> CheckCfg {
error!("expected `cfg(name, values(\"value1\", \"value2\", ... \"valueN\"))`")
};

let Ok(mut parser) = maybe_new_parser_from_source_str(&sess, filename, s.to_string())
else {
expected_error();
let mut parser = match maybe_new_parser_from_source_str(&sess, filename, s.to_string()) {
Ok(parser) => parser,
Err(errs) => {
errs.into_iter().for_each(|err| err.cancel());
expected_error();
}
};

let meta_item = match parser.parse_meta_item() {
Expand Down
8 changes: 4 additions & 4 deletions compiler/rustc_parse/src/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use rustc_ast::ast::{self, AttrStyle};
use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::util::unicode::contains_text_flow_control_chars;
use rustc_errors::{error_code, Applicability, DiagCtxt, Diagnostic, StashKey};
use rustc_errors::{error_code, Applicability, DiagCtxt, DiagnosticBuilder, StashKey};
use rustc_lexer::unescape::{self, EscapeError, Mode};
use rustc_lexer::{Base, DocStyle, RawStrError};
use rustc_lexer::{Cursor, LiteralKind};
Expand Down Expand Up @@ -47,7 +47,7 @@ pub(crate) fn parse_token_trees<'sess, 'src>(
mut src: &'src str,
mut start_pos: BytePos,
override_span: Option<Span>,
) -> Result<TokenStream, Vec<Diagnostic>> {
) -> Result<TokenStream, Vec<DiagnosticBuilder<'sess>>> {
// Skip `#!`, if present.
if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
src = &src[shebang_len..];
Expand Down Expand Up @@ -76,13 +76,13 @@ pub(crate) fn parse_token_trees<'sess, 'src>(
let mut buffer = Vec::with_capacity(1);
for unmatched in unmatched_delims {
if let Some(err) = make_unclosed_delims_error(unmatched, sess) {
err.buffer(&mut buffer);
buffer.push(err);
}
}
if let Err(errs) = res {
// Add unclosing delimiter or diff marker errors
for err in errs {
err.buffer(&mut buffer);
buffer.push(err);
}
}
Err(buffer)
Expand Down
26 changes: 13 additions & 13 deletions compiler/rustc_parse/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ use rustc_ast::tokenstream::TokenStream;
use rustc_ast::{AttrItem, Attribute, MetaItem};
use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Diagnostic, PResult};
use rustc_errors::{DiagnosticBuilder, FatalError, PResult};
use rustc_session::parse::ParseSess;
use rustc_span::{FileName, SourceFile, Span};

Expand All @@ -45,14 +45,13 @@ rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
/// A variant of 'panictry!' that works on a `Vec<Diagnostic>` instead of a single
/// `DiagnosticBuilder`.
macro_rules! panictry_buffer {
($handler:expr, $e:expr) => {{
use rustc_errors::FatalError;
($e:expr) => {{
use std::result::Result::{Err, Ok};
match $e {
Ok(e) => e,
Err(errs) => {
for e in errs {
$handler.emit_diagnostic(e);
e.emit();
}
FatalError.raise()
}
Expand Down Expand Up @@ -100,16 +99,17 @@ pub fn parse_stream_from_source_str(

/// Creates a new parser from a source string.
pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
panictry_buffer!(&sess.dcx, maybe_new_parser_from_source_str(sess, name, source))
panictry_buffer!(maybe_new_parser_from_source_str(sess, name, source))
}

/// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
/// token stream.
/// token stream; these must be consumed via `emit`, `cancel`, etc., otherwise a panic will occur
/// when they are dropped.
pub fn maybe_new_parser_from_source_str(
sess: &ParseSess,
name: FileName,
source: String,
) -> Result<Parser<'_>, Vec<Diagnostic>> {
) -> Result<Parser<'_>, Vec<DiagnosticBuilder<'_>>> {
maybe_source_file_to_parser(sess, sess.source_map().new_source_file(name, source))
}

Expand All @@ -125,15 +125,15 @@ pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, sp: Option<Spa
err.emit();
});

panictry_buffer!(&sess.dcx, maybe_source_file_to_parser(sess, source_file))
panictry_buffer!(maybe_source_file_to_parser(sess, source_file))
}

/// Given a session and a `source_file`, return a parser. Returns any buffered errors from lexing
/// the initial token stream.
fn maybe_source_file_to_parser(
sess: &ParseSess,
source_file: Lrc<SourceFile>,
) -> Result<Parser<'_>, Vec<Diagnostic>> {
) -> Result<Parser<'_>, Vec<DiagnosticBuilder<'_>>> {
let end_pos = source_file.end_position();
let stream = maybe_file_to_stream(sess, source_file, None)?;
let mut parser = stream_to_parser(sess, stream, None);
Expand All @@ -152,16 +152,16 @@ pub fn source_file_to_stream(
source_file: Lrc<SourceFile>,
override_span: Option<Span>,
) -> TokenStream {
panictry_buffer!(&sess.dcx, maybe_file_to_stream(sess, source_file, override_span))
panictry_buffer!(maybe_file_to_stream(sess, source_file, override_span))
}

/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
/// parsing the token stream.
fn maybe_file_to_stream(
sess: &ParseSess,
fn maybe_file_to_stream<'sess>(
sess: &'sess ParseSess,
source_file: Lrc<SourceFile>,
override_span: Option<Span>,
) -> Result<TokenStream, Vec<Diagnostic>> {
) -> Result<TokenStream, Vec<DiagnosticBuilder<'sess>>> {
let src = source_file.src.as_ref().unwrap_or_else(|| {
sess.dcx.bug(format!(
"cannot lex `source_file` without source: {}",
Expand Down
4 changes: 2 additions & 2 deletions src/librustdoc/clean/render_macro_matchers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,8 @@ fn snippet_equal_to_token(tcx: TyCtxt<'_>, matcher: &TokenTree) -> Option<String
let mut parser =
match rustc_parse::maybe_new_parser_from_source_str(&sess, file_name, snippet.clone()) {
Ok(parser) => parser,
Err(diagnostics) => {
drop(diagnostics);
Err(errs) => {
errs.into_iter().for_each(|err| err.cancel());
return None;
}
};
Expand Down
8 changes: 5 additions & 3 deletions src/librustdoc/doctest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -589,7 +589,7 @@ pub(crate) fn make_test(
let mut parser = match maybe_new_parser_from_source_str(&sess, filename, source) {
Ok(p) => p,
Err(errs) => {
drop(errs);
errs.into_iter().for_each(|err| err.cancel());
return (found_main, found_extern_crate, found_macro);
}
};
Expand Down Expand Up @@ -759,8 +759,10 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool {
let mut parser =
match maybe_new_parser_from_source_str(&sess, filename, source.to_owned()) {
Ok(p) => p,
Err(_) => {
// If there is an unclosed delimiter, an error will be returned by the tokentrees.
Err(errs) => {
errs.into_iter().for_each(|err| err.cancel());
// If there is an unclosed delimiter, an error will be returned by the
// tokentrees.
return false;
}
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ pub fn check(
let mut parser = match maybe_new_parser_from_source_str(&sess, filename, code) {
Ok(p) => p,
Err(errs) => {
drop(errs);
errs.into_iter().for_each(|err| err.cancel());
return (false, test_attr_spans);
},
};
Expand Down

0 comments on commit fe6912a

Please sign in to comment.