diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index 12d6f7cc33ddc..d819e3cb8152e 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -8,7 +8,7 @@ use crate::ast::{Path, PathSegment}; use crate::mut_visit::visit_clobber; use crate::ptr::P; use crate::token::{self, CommentKind, Token}; -use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint}; +use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing}; use rustc_index::bit_set::GrowableBitSet; use rustc_span::source_map::{BytePos, Spanned}; @@ -361,7 +361,7 @@ pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool { } impl MetaItem { - fn token_trees_and_joints(&self) -> Vec { + fn token_trees_and_spacings(&self) -> Vec { let mut idents = vec![]; let mut last_pos = BytePos(0 as u32); for (i, segment) in self.path.segments.iter().enumerate() { @@ -374,7 +374,7 @@ impl MetaItem { idents.push(TokenTree::Token(Token::from_ast_ident(segment.ident)).into()); last_pos = segment.ident.span.hi(); } - idents.extend(self.kind.token_trees_and_joints(self.span)); + idents.extend(self.kind.token_trees_and_spacings(self.span)); idents } @@ -447,7 +447,7 @@ impl MetaItemKind { if i > 0 { tts.push(TokenTree::token(token::Comma, span).into()); } - tts.extend(item.token_trees_and_joints()) + tts.extend(item.token_trees_and_spacings()) } MacArgs::Delimited( DelimSpan::from_single(span), @@ -458,7 +458,7 @@ impl MetaItemKind { } } - fn token_trees_and_joints(&self, span: Span) -> Vec { + fn token_trees_and_spacings(&self, span: Span) -> Vec { match *self { MetaItemKind::Word => vec![], MetaItemKind::NameValue(ref lit) => { @@ -470,7 +470,7 @@ impl MetaItemKind { if i > 0 { tokens.push(TokenTree::token(token::Comma, span).into()); } - tokens.extend(item.token_trees_and_joints()) + tokens.extend(item.token_trees_and_spacings()) } vec![ TokenTree::Delimited( @@ -553,9 +553,9 @@ impl NestedMetaItem { } } - fn token_trees_and_joints(&self) -> Vec { + fn token_trees_and_spacings(&self) -> Vec { match *self { - NestedMetaItem::MetaItem(ref item) => item.token_trees_and_joints(), + NestedMetaItem::MetaItem(ref item) => item.token_trees_and_spacings(), NestedMetaItem::Literal(ref lit) => vec![lit.token_tree().into()], } } diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 151acddae840e..f201f0b5c6643 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -83,7 +83,7 @@ impl TokenTree { } pub fn joint(self) -> TokenStream { - TokenStream::new(vec![(self, Joint)]) + TokenStream::new(vec![(self, Spacing::Joint)]) } pub fn token(kind: TokenKind, span: Span) -> TokenTree { @@ -125,22 +125,20 @@ where /// instead of a representation of the abstract syntax tree. /// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat. #[derive(Clone, Debug, Default, Encodable, Decodable)] -pub struct TokenStream(pub Lrc>); +pub struct TokenStream(pub Lrc>); -pub type TreeAndJoint = (TokenTree, IsJoint); +pub type TreeAndSpacing = (TokenTree, Spacing); // `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger. #[cfg(target_arch = "x86_64")] rustc_data_structures::static_assert_size!(TokenStream, 8); #[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable)] -pub enum IsJoint { +pub enum Spacing { + Alone, Joint, - NonJoint, } -use IsJoint::*; - impl TokenStream { /// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream` /// separating the two arguments with a comma for diagnostic suggestions. @@ -153,7 +151,7 @@ impl TokenStream { let sp = match (&ts, &next) { (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue, ( - (TokenTree::Token(token_left), NonJoint), + (TokenTree::Token(token_left), Spacing::Alone), (TokenTree::Token(token_right), _), ) if ((token_left.is_ident() && !token_left.is_reserved_ident()) || token_left.is_lit()) @@ -162,11 +160,11 @@ impl TokenStream { { token_left.span } - ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(), + ((TokenTree::Delimited(sp, ..), Spacing::Alone), _) => sp.entire(), _ => continue, }; let sp = sp.shrink_to_hi(); - let comma = (TokenTree::token(token::Comma, sp), NonJoint); + let comma = (TokenTree::token(token::Comma, sp), Spacing::Alone); suggestion = Some((pos, comma, sp)); } } @@ -184,19 +182,19 @@ impl TokenStream { impl From for TokenStream { fn from(tree: TokenTree) -> TokenStream { - TokenStream::new(vec![(tree, NonJoint)]) + TokenStream::new(vec![(tree, Spacing::Alone)]) } } -impl From for TreeAndJoint { - fn from(tree: TokenTree) -> TreeAndJoint { - (tree, NonJoint) +impl From for TreeAndSpacing { + fn from(tree: TokenTree) -> TreeAndSpacing { + (tree, Spacing::Alone) } } impl iter::FromIterator for TokenStream { fn from_iter>(iter: I) -> Self { - TokenStream::new(iter.into_iter().map(Into::into).collect::>()) + TokenStream::new(iter.into_iter().map(Into::into).collect::>()) } } @@ -209,7 +207,7 @@ impl PartialEq for TokenStream { } impl TokenStream { - pub fn new(streams: Vec) -> TokenStream { + pub fn new(streams: Vec) -> TokenStream { TokenStream(Lrc::new(streams)) } @@ -320,11 +318,11 @@ impl TokenStreamBuilder { // If `self` is not empty and the last tree within the last stream is a // token tree marked with `Joint`... if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut() { - if let Some((TokenTree::Token(last_token), Joint)) = last_stream_lrc.last() { + if let Some((TokenTree::Token(last_token), Spacing::Joint)) = last_stream_lrc.last() { // ...and `stream` is not empty and the first tree within it is // a token tree... let TokenStream(ref mut stream_lrc) = stream; - if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() { + if let Some((TokenTree::Token(token), spacing)) = stream_lrc.first() { // ...and the two tokens can be glued together... if let Some(glued_tok) = last_token.glue(&token) { // ...then do so, by overwriting the last token @@ -337,8 +335,7 @@ impl TokenStreamBuilder { // Overwrite the last token tree with the merged // token. let last_vec_mut = Lrc::make_mut(last_stream_lrc); - *last_vec_mut.last_mut().unwrap() = - (TokenTree::Token(glued_tok), *is_joint); + *last_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing); // Remove the first token tree from `stream`. (This // is almost always the only tree in `stream`.) @@ -375,7 +372,7 @@ impl Iterator for Cursor { type Item = TokenTree; fn next(&mut self) -> Option { - self.next_with_joint().map(|(tree, _)| tree) + self.next_with_spacing().map(|(tree, _)| tree) } } @@ -384,7 +381,7 @@ impl Cursor { Cursor { stream, index: 0 } } - pub fn next_with_joint(&mut self) -> Option { + pub fn next_with_spacing(&mut self) -> Option { if self.index < self.stream.len() { self.index += 1; Some(self.stream.0[self.index - 1].clone()) diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index b908a12c1fc9e..0e5c5fe4d4473 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -4,7 +4,7 @@ use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch}; use rustc_ast::mut_visit::{self, MutVisitor}; use rustc_ast::token::{self, NtTT, Token}; -use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint}; +use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing}; use rustc_ast::MacCall; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lrc; @@ -111,7 +111,7 @@ pub(super) fn transcribe<'a>( // // Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level // again, and we are done transcribing. - let mut result: Vec = Vec::new(); + let mut result: Vec = Vec::new(); let mut result_stack = Vec::new(); let mut marker = Marker(cx.current_expansion.id, transparency); diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 39c82f97e0a39..ec41fd7a3eebe 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -2,7 +2,7 @@ use crate::base::ExtCtxt; use rustc_ast as ast; use rustc_ast::token; -use rustc_ast::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint}; +use rustc_ast::tokenstream::{self, DelimSpan, Spacing::*, TokenStream, TreeAndSpacing}; use rustc_ast_pretty::pprust; use rustc_data_structures::sync::Lrc; use rustc_errors::Diagnostic; @@ -47,15 +47,15 @@ impl ToInternal for Delimiter { } } -impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec)> +impl FromInternal<(TreeAndSpacing, &'_ ParseSess, &'_ mut Vec)> for TokenTree { fn from_internal( - ((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec), + ((tree, spacing), sess, stack): (TreeAndSpacing, &ParseSess, &mut Vec), ) -> Self { use rustc_ast::token::*; - let joint = is_joint == Joint; + let joint = spacing == Joint; let Token { kind, span } = match tree { tokenstream::TokenTree::Delimited(span, delim, tts) => { let delimiter = Delimiter::from_internal(delim); @@ -261,7 +261,7 @@ impl ToInternal for TokenTree { }; let tree = tokenstream::TokenTree::token(kind, span); - TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })]) + TokenStream::new(vec![(tree, if joint { Joint } else { Alone })]) } } @@ -444,7 +444,7 @@ impl server::TokenStreamIter for Rustc<'_> { ) -> Option> { loop { let tree = iter.stack.pop().or_else(|| { - let next = iter.cursor.next_with_joint()?; + let next = iter.cursor.next_with_spacing()?; Some(TokenTree::from_internal((next, self.sess, &mut iter.stack))) })?; // A hack used to pass AST fragments to attribute and derive macros diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index 034442b798b29..32b124970cf7c 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -1,22 +1,19 @@ use rustc_ast::ast::AttrStyle; use rustc_ast::token::{self, CommentKind, Token, TokenKind}; -use rustc_ast::tokenstream::IsJoint; -use rustc_data_structures::sync::Lrc; -use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError}; -use rustc_lexer::Base; -use rustc_lexer::{unescape, RawStrError}; +use rustc_ast::tokenstream::{Spacing, TokenStream}; +use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError, PResult}; +use rustc_lexer::unescape::{self, Mode}; +use rustc_lexer::{Base, DocStyle, RawStrError}; use rustc_session::parse::ParseSess; use rustc_span::symbol::{sym, Symbol}; use rustc_span::{BytePos, Pos, Span}; -use std::char; use tracing::debug; mod tokentrees; mod unescape_error_reporting; mod unicode_chars; -use rustc_lexer::{unescape::Mode, DocStyle}; use unescape_error_reporting::{emit_unescape_error, push_escaped_char}; #[derive(Clone, Debug)] @@ -28,7 +25,17 @@ pub struct UnmatchedBrace { pub candidate_span: Option, } -crate struct StringReader<'a> { +crate fn parse_token_trees<'a>( + sess: &'a ParseSess, + src: &'a str, + start_pos: BytePos, + override_span: Option, +) -> (PResult<'a, TokenStream>, Vec) { + StringReader { sess, start_pos, pos: start_pos, end_src_index: src.len(), src, override_span } + .into_token_trees() +} + +struct StringReader<'a> { sess: &'a ParseSess, /// Initial position, read-only. start_pos: BytePos, @@ -37,38 +44,18 @@ crate struct StringReader<'a> { /// Stop reading src at this index. end_src_index: usize, /// Source text to tokenize. - src: Lrc, + src: &'a str, override_span: Option, } impl<'a> StringReader<'a> { - crate fn new( - sess: &'a ParseSess, - source_file: Lrc, - override_span: Option, - ) -> Self { - let src = source_file.src.clone().unwrap_or_else(|| { - sess.span_diagnostic - .bug(&format!("cannot lex `source_file` without source: {}", source_file.name)); - }); - - StringReader { - sess, - start_pos: source_file.start_pos, - pos: source_file.start_pos, - end_src_index: src.len(), - src, - override_span, - } - } - fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi)) } /// Returns the next token, and info about preceding whitespace, if any. - fn next_token(&mut self) -> (IsJoint, Token) { - let mut is_joint = IsJoint::Joint; + fn next_token(&mut self) -> (Spacing, Token) { + let mut spacing = Spacing::Joint; // Skip `#!` at the start of the file let start_src_index = self.src_index(self.pos); @@ -77,7 +64,7 @@ impl<'a> StringReader<'a> { if is_beginning_of_file { if let Some(shebang_len) = rustc_lexer::strip_shebang(text) { self.pos = self.pos + BytePos::from_usize(shebang_len); - is_joint = IsJoint::NonJoint; + spacing = Spacing::Alone; } } @@ -88,7 +75,7 @@ impl<'a> StringReader<'a> { if text.is_empty() { let span = self.mk_sp(self.pos, self.pos); - return (is_joint, Token::new(token::Eof, span)); + return (spacing, Token::new(token::Eof, span)); } let token = rustc_lexer::first_token(text); @@ -101,9 +88,9 @@ impl<'a> StringReader<'a> { match self.cook_lexer_token(token.kind, start) { Some(kind) => { let span = self.mk_sp(start, self.pos); - return (is_joint, Token::new(kind, span)); + return (spacing, Token::new(kind, span)); } - None => is_joint = IsJoint::NonJoint, + None => spacing = Spacing::Alone, } } } diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs index d5977ca3c7d2f..0f364bffb134e 100644 --- a/compiler/rustc_parse/src/lexer/tokentrees.rs +++ b/compiler/rustc_parse/src/lexer/tokentrees.rs @@ -3,8 +3,8 @@ use super::{StringReader, UnmatchedBrace}; use rustc_ast::token::{self, DelimToken, Token}; use rustc_ast::tokenstream::{ DelimSpan, - IsJoint::{self, *}, - TokenStream, TokenTree, TreeAndJoint, + Spacing::{self, *}, + TokenStream, TokenTree, TreeAndSpacing, }; use rustc_ast_pretty::pprust::token_to_string; use rustc_data_structures::fx::FxHashMap; @@ -12,7 +12,7 @@ use rustc_errors::PResult; use rustc_span::Span; impl<'a> StringReader<'a> { - crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec) { + pub(super) fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec) { let mut tt_reader = TokenTreesReader { string_reader: self, token: Token::dummy(), @@ -77,7 +77,7 @@ impl<'a> TokenTreesReader<'a> { } } - fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> { + fn parse_token_tree(&mut self) -> PResult<'a, TreeAndSpacing> { let sm = self.string_reader.sess.source_map(); match self.token.kind { @@ -262,29 +262,29 @@ impl<'a> TokenTreesReader<'a> { } _ => { let tt = TokenTree::Token(self.token.take()); - let mut is_joint = self.bump(); + let mut spacing = self.bump(); if !self.token.is_op() { - is_joint = NonJoint; + spacing = Alone; } - Ok((tt, is_joint)) + Ok((tt, spacing)) } } } - fn bump(&mut self) -> IsJoint { - let (joint_to_prev, token) = self.string_reader.next_token(); + fn bump(&mut self) -> Spacing { + let (spacing, token) = self.string_reader.next_token(); self.token = token; - joint_to_prev + spacing } } #[derive(Default)] struct TokenStreamBuilder { - buf: Vec, + buf: Vec, } impl TokenStreamBuilder { - fn push(&mut self, (tree, joint): TreeAndJoint) { + fn push(&mut self, (tree, joint): TreeAndSpacing) { if let Some((TokenTree::Token(prev_token), Joint)) = self.buf.last() { if let TokenTree::Token(token) = &tree { if let Some(glued) = prev_token.glue(token) { diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index 8dc0db01ecb51..40e2e34aa0589 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -332,7 +332,7 @@ const ASCII_ARRAY: &[(char, &str, Option)] = &[ ('"', "Quotation Mark", None), ]; -crate fn check_for_substitution<'a>( +pub(super) fn check_for_substitution<'a>( reader: &StringReader<'a>, pos: BytePos, ch: char, diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 462279b0a9e03..e7fd74f551aaa 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -8,7 +8,7 @@ use rustc_ast as ast; use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind}; -use rustc_ast::tokenstream::{self, IsJoint, TokenStream, TokenTree}; +use rustc_ast::tokenstream::{self, Spacing, TokenStream, TokenTree}; use rustc_ast_pretty::pprust; use rustc_data_structures::sync::Lrc; use rustc_errors::{Diagnostic, FatalError, Level, PResult}; @@ -200,8 +200,13 @@ pub fn maybe_file_to_stream( source_file: Lrc, override_span: Option, ) -> Result<(TokenStream, Vec), Vec> { - let srdr = lexer::StringReader::new(sess, source_file, override_span); - let (token_trees, unmatched_braces) = srdr.into_token_trees(); + let src = source_file.src.as_ref().unwrap_or_else(|| { + sess.span_diagnostic + .bug(&format!("cannot lex `source_file` without source: {}", source_file.name)); + }); + + let (token_trees, unmatched_braces) = + lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span); match token_trees { Ok(stream) => Ok((stream, unmatched_braces)), @@ -432,7 +437,7 @@ pub fn tokenstream_probably_equal_for_proc_macro( // issue #75734 tracks resolving this. nt_to_tokenstream(nt, sess, *span).into_trees() } else { - TokenStream::new(vec![(tree, IsJoint::NonJoint)]).into_trees() + TokenStream::new(vec![(tree, Spacing::Alone)]).into_trees() } }; diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 84edfecad192f..d22d08cd14438 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -15,7 +15,7 @@ pub use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{self, DelimToken, Token, TokenKind}; -use rustc_ast::tokenstream::{self, DelimSpan, TokenStream, TokenTree, TreeAndJoint}; +use rustc_ast::tokenstream::{self, DelimSpan, TokenStream, TokenTree, TreeAndSpacing}; use rustc_ast::DUMMY_NODE_ID; use rustc_ast::{self as ast, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe}; use rustc_ast::{Async, MacArgs, MacDelimiter, Mutability, StrLit, Visibility, VisibilityKind}; @@ -118,7 +118,7 @@ impl<'a> Drop for Parser<'a> { struct TokenCursor { frame: TokenCursorFrame, stack: Vec, - cur_token: Option, + cur_token: Option, collecting: Option, } @@ -136,7 +136,7 @@ struct TokenCursorFrame { struct Collecting { /// Holds the current tokens captured during the most /// recent call to `collect_tokens` - buf: Vec, + buf: Vec, /// The depth of the `TokenCursor` stack at the time /// collection was started. When we encounter a `TokenTree::Delimited`, /// we want to record the `TokenTree::Delimited` itself, @@ -167,7 +167,7 @@ impl TokenCursor { let tree = if !self.frame.open_delim { self.frame.open_delim = true; TokenTree::open_tt(self.frame.span, self.frame.delim).into() - } else if let Some(tree) = self.frame.tree_cursor.next_with_joint() { + } else if let Some(tree) = self.frame.tree_cursor.next_with_spacing() { tree } else if !self.frame.close_delim { self.frame.close_delim = true; @@ -1154,7 +1154,7 @@ impl<'a> Parser<'a> { f: impl FnOnce(&mut Self) -> PResult<'a, R>, ) -> PResult<'a, (R, TokenStream)> { // Record all tokens we parse when parsing this item. - let tokens: Vec = self.token_cursor.cur_token.clone().into_iter().collect(); + let tokens: Vec = self.token_cursor.cur_token.clone().into_iter().collect(); debug!("collect_tokens: starting with {:?}", tokens); // We need special handling for the case where `collect_tokens` is called diff --git a/src/test/ui/ast-json/ast-json-noexpand-output.stdout b/src/test/ui/ast-json/ast-json-noexpand-output.stdout index d0942f78bb86c..031c0d0cae51c 100644 --- a/src/test/ui/ast-json/ast-json-noexpand-output.stdout +++ b/src/test/ui/ast-json/ast-json-noexpand-output.stdout @@ -1 +1 @@ -{"module":{"inner":{"lo":0,"hi":0},"items":[{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"NonJoint"]]}]}}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]} +{"module":{"inner":{"lo":0,"hi":0},"items":[{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]}}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]} diff --git a/src/test/ui/ast-json/ast-json-output.stdout b/src/test/ui/ast-json/ast-json-output.stdout index dc06fd74a4bb5..9b3b6870cbe7e 100644 --- a/src/test/ui/ast-json/ast-json-output.stdout +++ b/src/test/ui/ast-json/ast-json-output.stdout @@ -1 +1 @@ -{"module":{"inner":{"lo":0,"hi":0},"items":[{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"prelude_import","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":"Empty"}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Use","fields":[{"prefix":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"{{root}}","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"std","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"prelude","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"v1","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"kind":"Glob","span":{"lo":0,"hi":0}}]},"tokens":null},{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"macro_use","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":"Empty"}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"std","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null},{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"NonJoint"]]}]}}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]} +{"module":{"inner":{"lo":0,"hi":0},"items":[{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"prelude_import","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":"Empty"}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Use","fields":[{"prefix":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"{{root}}","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"std","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"prelude","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"v1","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"kind":"Glob","span":{"lo":0,"hi":0}}]},"tokens":null},{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"macro_use","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":"Empty"}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"std","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null},{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"node":"Inherited","span":{"lo":0,"hi":0}},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}]},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]}}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]}