Skip to content

Commit

Permalink
Auto merge of rust-lang#78782 - petrochenkov:nodoctok, r=Aaron1011
Browse files Browse the repository at this point in the history
Do not collect tokens for doc comments

Doc comment is a single token and AST has all the information to re-create it precisely.
Doc comments are also responsible for majority of calls to `collect_tokens` (with `num_calls == 1` and `num_calls == 0`, cc rust-lang#78736).

(I also moved token collection into `fn parse_attribute` to deduplicate code a bit.)

r? `@Aaron1011`
  • Loading branch information
bors committed Nov 12, 2020
2 parents 77180db + 12de1e8 commit 5a6a41e
Show file tree
Hide file tree
Showing 17 changed files with 140 additions and 161 deletions.
3 changes: 1 addition & 2 deletions compiler/rustc_ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2439,13 +2439,12 @@ pub struct Attribute {
/// or the construct this attribute is contained within (inner).
pub style: AttrStyle,
pub span: Span,
pub tokens: Option<LazyTokenStream>,
}

#[derive(Clone, Encodable, Decodable, Debug)]
pub enum AttrKind {
/// A normal attribute.
Normal(AttrItem),
Normal(AttrItem, Option<LazyTokenStream>),

/// A doc comment (e.g. `/// ...`, `//! ...`, `/** ... */`, `/*! ... */`).
/// Doc attributes (e.g. `#[doc="..."]`) are represented with the `Normal`
Expand Down
53 changes: 32 additions & 21 deletions compiler/rustc_ast/src/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use crate::ast::{Path, PathSegment};
use crate::mut_visit::visit_clobber;
use crate::ptr::P;
use crate::token::{self, CommentKind, Token};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream, TokenTree, TreeAndSpacing};

use rustc_index::bit_set::GrowableBitSet;
use rustc_span::source_map::{BytePos, Spanned};
Expand Down Expand Up @@ -120,15 +120,15 @@ impl NestedMetaItem {
impl Attribute {
pub fn has_name(&self, name: Symbol) -> bool {
match self.kind {
AttrKind::Normal(ref item) => item.path == name,
AttrKind::Normal(ref item, _) => item.path == name,
AttrKind::DocComment(..) => false,
}
}

/// For a single-segment attribute, returns its name; otherwise, returns `None`.
pub fn ident(&self) -> Option<Ident> {
match self.kind {
AttrKind::Normal(ref item) => {
AttrKind::Normal(ref item, _) => {
if item.path.segments.len() == 1 {
Some(item.path.segments[0].ident)
} else {
Expand All @@ -144,14 +144,14 @@ impl Attribute {

pub fn value_str(&self) -> Option<Symbol> {
match self.kind {
AttrKind::Normal(ref item) => item.meta(self.span).and_then(|meta| meta.value_str()),
AttrKind::Normal(ref item, _) => item.meta(self.span).and_then(|meta| meta.value_str()),
AttrKind::DocComment(..) => None,
}
}

pub fn meta_item_list(&self) -> Option<Vec<NestedMetaItem>> {
match self.kind {
AttrKind::Normal(ref item) => match item.meta(self.span) {
AttrKind::Normal(ref item, _) => match item.meta(self.span) {
Some(MetaItem { kind: MetaItemKind::List(list), .. }) => Some(list),
_ => None,
},
Expand All @@ -160,7 +160,7 @@ impl Attribute {
}

pub fn is_word(&self) -> bool {
if let AttrKind::Normal(item) = &self.kind {
if let AttrKind::Normal(item, _) = &self.kind {
matches!(item.args, MacArgs::Empty)
} else {
false
Expand Down Expand Up @@ -246,15 +246,15 @@ impl AttrItem {
impl Attribute {
pub fn is_doc_comment(&self) -> bool {
match self.kind {
AttrKind::Normal(_) => false,
AttrKind::Normal(..) => false,
AttrKind::DocComment(..) => true,
}
}

pub fn doc_str(&self) -> Option<Symbol> {
match self.kind {
AttrKind::DocComment(.., data) => Some(data),
AttrKind::Normal(ref item) if item.path == sym::doc => {
AttrKind::Normal(ref item, _) if item.path == sym::doc => {
item.meta(self.span).and_then(|meta| meta.value_str())
}
_ => None,
Expand All @@ -263,25 +263,37 @@ impl Attribute {

pub fn get_normal_item(&self) -> &AttrItem {
match self.kind {
AttrKind::Normal(ref item) => item,
AttrKind::Normal(ref item, _) => item,
AttrKind::DocComment(..) => panic!("unexpected doc comment"),
}
}

pub fn unwrap_normal_item(self) -> AttrItem {
match self.kind {
AttrKind::Normal(item) => item,
AttrKind::Normal(item, _) => item,
AttrKind::DocComment(..) => panic!("unexpected doc comment"),
}
}

/// Extracts the MetaItem from inside this Attribute.
pub fn meta(&self) -> Option<MetaItem> {
match self.kind {
AttrKind::Normal(ref item) => item.meta(self.span),
AttrKind::Normal(ref item, _) => item.meta(self.span),
AttrKind::DocComment(..) => None,
}
}

pub fn tokens(&self) -> TokenStream {
match self.kind {
AttrKind::Normal(_, ref tokens) => tokens
.as_ref()
.unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self))
.create_token_stream(),
AttrKind::DocComment(comment_kind, data) => TokenStream::from(TokenTree::Token(
Token::new(token::DocComment(comment_kind, self.style, data), self.span),
)),
}
}
}

/* Constructors */
Expand Down Expand Up @@ -321,11 +333,16 @@ crate fn mk_attr_id() -> AttrId {
}

pub fn mk_attr(style: AttrStyle, path: Path, args: MacArgs, span: Span) -> Attribute {
mk_attr_from_item(style, AttrItem { path, args, tokens: None }, span)
mk_attr_from_item(AttrItem { path, args, tokens: None }, None, style, span)
}

pub fn mk_attr_from_item(style: AttrStyle, item: AttrItem, span: Span) -> Attribute {
Attribute { kind: AttrKind::Normal(item), id: mk_attr_id(), style, span, tokens: None }
pub fn mk_attr_from_item(
item: AttrItem,
tokens: Option<LazyTokenStream>,
style: AttrStyle,
span: Span,
) -> Attribute {
Attribute { kind: AttrKind::Normal(item, tokens), id: mk_attr_id(), style, span }
}

/// Returns an inner attribute with the given value and span.
Expand All @@ -344,13 +361,7 @@ pub fn mk_doc_comment(
data: Symbol,
span: Span,
) -> Attribute {
Attribute {
kind: AttrKind::DocComment(comment_kind, data),
id: mk_attr_id(),
style,
span,
tokens: None,
}
Attribute { kind: AttrKind::DocComment(comment_kind, data), id: mk_attr_id(), style, span }
}

pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool {
Expand Down
6 changes: 3 additions & 3 deletions compiler/rustc_ast/src/mut_visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -586,17 +586,17 @@ pub fn noop_visit_local<T: MutVisitor>(local: &mut P<Local>, vis: &mut T) {
}

pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
let Attribute { kind, id: _, style: _, span, tokens } = attr;
let Attribute { kind, id: _, style: _, span } = attr;
match kind {
AttrKind::Normal(AttrItem { path, args, tokens }) => {
AttrKind::Normal(AttrItem { path, args, tokens }, attr_tokens) => {
vis.visit_path(path);
visit_mac_args(args, vis);
visit_lazy_tts(tokens, vis);
visit_lazy_tts(attr_tokens, vis);
}
AttrKind::DocComment(..) => {}
}
vis.visit_span(span);
visit_lazy_tts(tokens, vis);
}

pub fn noop_visit_mac<T: MutVisitor>(mac: &mut MacCall, vis: &mut T) {
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_ast/src/visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -881,7 +881,7 @@ pub fn walk_vis<'a, V: Visitor<'a>>(visitor: &mut V, vis: &'a Visibility) {

pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute) {
match attr.kind {
AttrKind::Normal(ref item) => walk_mac_args(visitor, &item.args),
AttrKind::Normal(ref item, ref _tokens) => walk_mac_args(visitor, &item.args),
AttrKind::DocComment(..) => {}
}
}
Expand Down
17 changes: 10 additions & 7 deletions compiler/rustc_ast_lowering/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -966,17 +966,20 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// Note that we explicitly do not walk the path. Since we don't really
// lower attributes (we use the AST version) there is nowhere to keep
// the `HirId`s. We don't actually need HIR version of attributes anyway.
// Tokens are also not needed after macro expansion and parsing.
let kind = match attr.kind {
AttrKind::Normal(ref item) => AttrKind::Normal(AttrItem {
path: item.path.clone(),
args: self.lower_mac_args(&item.args),
tokens: None,
}),
AttrKind::Normal(ref item, _) => AttrKind::Normal(
AttrItem {
path: item.path.clone(),
args: self.lower_mac_args(&item.args),
tokens: None,
},
None,
),
AttrKind::DocComment(comment_kind, data) => AttrKind::DocComment(comment_kind, data),
};

// Tokens aren't needed after macro expansion and parsing
Attribute { kind, id: attr.id, style: attr.style, span: attr.span, tokens: None }
Attribute { kind, id: attr.id, style: attr.style, span: attr.span }
}

fn lower_mac_args(&mut self, args: &MacArgs) -> MacArgs {
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_ast_pretty/src/pprust/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
}
self.maybe_print_comment(attr.span.lo());
match attr.kind {
ast::AttrKind::Normal(ref item) => {
ast::AttrKind::Normal(ref item, _) => {
match attr.style {
ast::AttrStyle::Inner => self.word("#!["),
ast::AttrStyle::Outer => self.word("#["),
Expand Down
12 changes: 5 additions & 7 deletions compiler/rustc_expand/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -291,8 +291,7 @@ impl<'a> StripUnconfigured<'a> {
expanded_attrs
.into_iter()
.flat_map(|(item, span)| {
let orig_tokens =
attr.tokens.as_ref().unwrap_or_else(|| panic!("Missing tokens for {:?}", attr));
let orig_tokens = attr.tokens();

// We are taking an attribute of the form `#[cfg_attr(pred, attr)]`
// and producing an attribute of the form `#[attr]`. We
Expand All @@ -302,7 +301,7 @@ impl<'a> StripUnconfigured<'a> {

// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
// for `attr` when we expand it to `#[attr]`
let pound_token = orig_tokens.create_token_stream().trees().next().unwrap();
let pound_token = orig_tokens.trees().next().unwrap();
if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) {
panic!("Bad tokens for attribute {:?}", attr);
}
Expand All @@ -316,13 +315,12 @@ impl<'a> StripUnconfigured<'a> {
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
.create_token_stream(),
);

let mut attr = attr::mk_attr_from_item(attr.style, item, span);
attr.tokens = Some(LazyTokenStream::new(TokenStream::new(vec![
let tokens = Some(LazyTokenStream::new(TokenStream::new(vec![
(pound_token, Spacing::Alone),
(bracket_group, Spacing::Alone),
])));
self.process_cfg_attr(attr)

self.process_cfg_attr(attr::mk_attr_from_item(item, tokens, attr.style, span))
})
.collect()
}
Expand Down
10 changes: 4 additions & 6 deletions compiler/rustc_expand/src/expand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1776,15 +1776,13 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {

let meta = attr::mk_list_item(Ident::with_dummy_span(sym::doc), items);
*at = ast::Attribute {
kind: ast::AttrKind::Normal(AttrItem {
path: meta.path,
args: meta.kind.mac_args(meta.span),
tokens: None,
}),
kind: ast::AttrKind::Normal(
AttrItem { path: meta.path, args: meta.kind.mac_args(meta.span), tokens: None },
None,
),
span: at.span,
id: at.id,
style: at.style,
tokens: None,
};
} else {
noop_visit_attribute(at, self)
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_middle/src/ich/impls_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@ impl<'ctx> rustc_ast::HashStableContext for StableHashingContext<'ctx> {
debug_assert!(!attr.ident().map_or(false, |ident| self.is_ignored_attr(ident.name)));
debug_assert!(!attr.is_doc_comment());

let ast::Attribute { kind, id: _, style, span, tokens } = attr;
if let ast::AttrKind::Normal(item) = kind {
let ast::Attribute { kind, id: _, style, span } = attr;
if let ast::AttrKind::Normal(item, tokens) = kind {
item.hash_stable(self, hasher);
style.hash_stable(self, hasher);
span.hash_stable(self, hasher);
Expand Down
7 changes: 1 addition & 6 deletions compiler/rustc_parse/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -616,12 +616,7 @@ fn prepend_attrs(
if attr.style == ast::AttrStyle::Inner {
return None;
}
builder.push(
attr.tokens
.as_ref()
.unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr))
.create_token_stream(),
);
builder.push(attr.tokens());
}
builder.push(tokens);
Some(builder.build())
Expand Down
Loading

0 comments on commit 5a6a41e

Please sign in to comment.