Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 8 pull requests #134456

Closed
wants to merge 28 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
7eb0d84
refactor: replace &PathBuf with &Path to enhance generality
Integral-Tech Dec 17, 2024
a676872
Clarify the match ergonomics 2024 migration lint's output
dianne Dec 16, 2024
70b8527
Correctly check the edition of subpatterns in the pattern migration 2…
dianne Dec 17, 2024
77e9051
Improve the pattern migration 2024 migration lint's message
dianne Dec 17, 2024
a105cd6
Use field init shorthand where possible
joshtriplett Dec 17, 2024
28c6d0b
Add the edition guide link from the match 2024 migration lint to the …
dianne Dec 17, 2024
0bf6e82
Change the lookahead in `MacroParser::new`.
nnethercote Dec 11, 2024
3575e79
Simplify `RefTokenTreeCursor::look_ahead`.
nnethercote Dec 11, 2024
809975c
Rename `RefTokenTreeCursor`.
nnethercote Dec 11, 2024
c82d586
Remove `Peekable<TokenStreamIter>` uses.
nnethercote Dec 11, 2024
fd83954
Factor out repeated code from `eat_dollar`.
nnethercote Dec 11, 2024
39305bf
Fix `x build --stage 1 std` when using cg_cranelift as the default ba…
jyn514 Dec 17, 2024
2903356
Overhaul `TokenTreeCursor`.
nnethercote Dec 10, 2024
4977640
Fix const conditions for RPITITs
compiler-errors Dec 5, 2024
8a85bdc
Remove a comment that shouldn't have been committed.
nnethercote Dec 18, 2024
20bff63
fix(LazyCell): documentation of get[_mut] was wrong
jalil-salame Dec 18, 2024
64abe8b
Simplify `AllKeywords`.
nnethercote Dec 13, 2024
1564318
Only have one source of truth for keywords.
nnethercote Dec 13, 2024
ed5b91a
Move `gen` in the keyword list.
nnethercote Dec 13, 2024
6de550c
Improve comments on `Keywords`.
nnethercote Dec 17, 2024
322962a
Rollup merge of #133926 - compiler-errors:const-conditions, r=lcnr
jieyouxu Dec 18, 2024
de7895d
Rollup merge of #134161 - nnethercote:overhaul-token-cursors, r=spast…
jieyouxu Dec 18, 2024
240406c
Rollup merge of #134253 - nnethercote:overhaul-keywords, r=petrochenkov
jieyouxu Dec 18, 2024
7b9787f
Rollup merge of #134394 - dianne:clarify-pat-2024-migration, r=compil…
jieyouxu Dec 18, 2024
5394198
Rollup merge of #134420 - Integral-Tech:pathbuf-refactor, r=compiler-…
jieyouxu Dec 18, 2024
4126cd3
Rollup merge of #134443 - joshtriplett:use-field-init-shorthand, r=lq…
jieyouxu Dec 18, 2024
f42e098
Rollup merge of #134444 - jyn514:cranelift-std, r=bjorn3
jieyouxu Dec 18, 2024
9f17602
Rollup merge of #134452 - jalil-salame:fix-lazy-cell-docs, r=tgross35
jieyouxu Dec 18, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 30 additions & 39 deletions compiler/rustc_ast/src/attr/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
//! Functions dealing with attributes and meta items.

use std::fmt::Debug;
use std::iter;
use std::sync::atomic::{AtomicU32, Ordering};

use rustc_index::bit_set::GrowableBitSet;
Expand All @@ -16,7 +15,9 @@ use crate::ast::{
};
use crate::ptr::P;
use crate::token::{self, CommentKind, Delimiter, Token};
use crate::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenTree};
use crate::tokenstream::{
DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenStreamIter, TokenTree,
};
use crate::util::comments;
use crate::util::literal::escape_string_symbol;

Expand Down Expand Up @@ -365,22 +366,19 @@ impl MetaItem {
}
}

fn from_tokens<'a, I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
where
I: Iterator<Item = &'a TokenTree>,
{
fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItem> {
// FIXME: Share code with `parse_path`.
let tt = tokens.next().map(|tt| TokenTree::uninterpolate(tt));
let tt = iter.next().map(|tt| TokenTree::uninterpolate(tt));
let path = match tt.as_deref() {
Some(&TokenTree::Token(
Token { kind: ref kind @ (token::Ident(..) | token::PathSep), span },
_,
)) => 'arm: {
let mut segments = if let &token::Ident(name, _) = kind {
if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
tokens.peek()
iter.peek()
{
tokens.next();
iter.next();
thin_vec![PathSegment::from_ident(Ident::new(name, span))]
} else {
break 'arm Path::from_ident(Ident::new(name, span));
Expand All @@ -390,16 +388,16 @@ impl MetaItem {
};
loop {
if let Some(&TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) =
tokens.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref()
iter.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref()
{
segments.push(PathSegment::from_ident(Ident::new(name, span)));
} else {
return None;
}
if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
tokens.peek()
iter.peek()
{
tokens.next();
iter.next();
} else {
break;
}
Expand All @@ -420,8 +418,8 @@ impl MetaItem {
}
_ => return None,
};
let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi());
let kind = MetaItemKind::from_tokens(tokens)?;
let list_closing_paren_pos = iter.peek().map(|tt| tt.span().hi());
let kind = MetaItemKind::from_tokens(iter)?;
let hi = match &kind {
MetaItemKind::NameValue(lit) => lit.span.hi(),
MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(path.span.hi()),
Expand All @@ -438,25 +436,23 @@ impl MetaItem {
impl MetaItemKind {
// public because it can be called in the hir
pub fn list_from_tokens(tokens: TokenStream) -> Option<ThinVec<MetaItemInner>> {
let mut tokens = tokens.trees().peekable();
let mut iter = tokens.iter();
let mut result = ThinVec::new();
while tokens.peek().is_some() {
let item = MetaItemInner::from_tokens(&mut tokens)?;
while iter.peek().is_some() {
let item = MetaItemInner::from_tokens(&mut iter)?;
result.push(item);
match tokens.next() {
match iter.next() {
None | Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) => {}
_ => return None,
}
}
Some(result)
}

fn name_value_from_tokens<'a>(
tokens: &mut impl Iterator<Item = &'a TokenTree>,
) -> Option<MetaItemKind> {
match tokens.next() {
fn name_value_from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItemKind> {
match iter.next() {
Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => {
MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
MetaItemKind::name_value_from_tokens(&mut inner_tokens.iter())
}
Some(TokenTree::Token(token, _)) => {
MetaItemLit::from_token(token).map(MetaItemKind::NameValue)
Expand All @@ -465,19 +461,17 @@ impl MetaItemKind {
}
}

fn from_tokens<'a>(
tokens: &mut iter::Peekable<impl Iterator<Item = &'a TokenTree>>,
) -> Option<MetaItemKind> {
match tokens.peek() {
fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItemKind> {
match iter.peek() {
Some(TokenTree::Delimited(.., Delimiter::Parenthesis, inner_tokens)) => {
let inner_tokens = inner_tokens.clone();
tokens.next();
iter.next();
MetaItemKind::list_from_tokens(inner_tokens).map(MetaItemKind::List)
}
Some(TokenTree::Delimited(..)) => None,
Some(TokenTree::Token(Token { kind: token::Eq, .. }, _)) => {
tokens.next();
MetaItemKind::name_value_from_tokens(tokens)
iter.next();
MetaItemKind::name_value_from_tokens(iter)
}
_ => Some(MetaItemKind::Word),
}
Expand Down Expand Up @@ -593,22 +587,19 @@ impl MetaItemInner {
self.meta_item().is_some()
}

fn from_tokens<'a, I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemInner>
where
I: Iterator<Item = &'a TokenTree>,
{
match tokens.peek() {
fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItemInner> {
match iter.peek() {
Some(TokenTree::Token(token, _)) if let Some(lit) = MetaItemLit::from_token(token) => {
tokens.next();
iter.next();
return Some(MetaItemInner::Lit(lit));
}
Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => {
tokens.next();
return MetaItemInner::from_tokens(&mut inner_tokens.trees().peekable());
iter.next();
return MetaItemInner::from_tokens(&mut inner_tokens.iter());
}
_ => {}
}
MetaItem::from_tokens(tokens).map(MetaItemInner::MetaItem)
MetaItem::from_tokens(iter).map(MetaItemInner::MetaItem)
}
}

Expand Down
8 changes: 7 additions & 1 deletion compiler/rustc_ast/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -903,7 +903,8 @@ impl Token {
self.is_non_raw_ident_where(|id| id.name == kw)
}

/// Returns `true` if the token is a given keyword, `kw` or if `case` is `Insensitive` and this token is an identifier equal to `kw` ignoring the case.
/// Returns `true` if the token is a given keyword, `kw` or if `case` is `Insensitive` and this
/// token is an identifier equal to `kw` ignoring the case.
pub fn is_keyword_case(&self, kw: Symbol, case: Case) -> bool {
self.is_keyword(kw)
|| (case == Case::Insensitive
Expand All @@ -916,6 +917,11 @@ impl Token {
self.is_non_raw_ident_where(Ident::is_path_segment_keyword)
}

/// Don't use this unless you're doing something very loose and heuristic-y.
pub fn is_any_keyword(&self) -> bool {
self.is_non_raw_ident_where(Ident::is_any_keyword)
}

/// Returns true for reserved identifiers used internally for elided lifetimes,
/// unnamed method parameters, crate root module, error recovery etc.
pub fn is_special_ident(&self) -> bool {
Expand Down
76 changes: 22 additions & 54 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ where
CTX: crate::HashStableContext,
{
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
for sub_tt in self.trees() {
for sub_tt in self.iter() {
sub_tt.hash_stable(hcx, hasher);
}
}
Expand Down Expand Up @@ -406,7 +406,7 @@ impl Eq for TokenStream {}

impl PartialEq<TokenStream> for TokenStream {
fn eq(&self, other: &TokenStream) -> bool {
self.trees().eq(other.trees())
self.iter().eq(other.iter())
}
}

Expand All @@ -423,24 +423,24 @@ impl TokenStream {
self.0.len()
}

pub fn trees(&self) -> RefTokenTreeCursor<'_> {
RefTokenTreeCursor::new(self)
pub fn get(&self, index: usize) -> Option<&TokenTree> {
self.0.get(index)
}

pub fn into_trees(self) -> TokenTreeCursor {
TokenTreeCursor::new(self)
pub fn iter(&self) -> TokenStreamIter<'_> {
TokenStreamIter::new(self)
}

/// Compares two `TokenStream`s, checking equality without regarding span information.
pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
let mut t1 = self.trees();
let mut t2 = other.trees();
for (t1, t2) in iter::zip(&mut t1, &mut t2) {
if !t1.eq_unspanned(t2) {
let mut iter1 = self.iter();
let mut iter2 = other.iter();
for (tt1, tt2) in iter::zip(&mut iter1, &mut iter2) {
if !tt1.eq_unspanned(tt2) {
return false;
}
}
t1.next().is_none() && t2.next().is_none()
iter1.next().is_none() && iter2.next().is_none()
}

/// Create a token stream containing a single token with alone spacing. The
Expand Down Expand Up @@ -509,7 +509,7 @@ impl TokenStream {
#[must_use]
pub fn flattened(&self) -> TokenStream {
fn can_skip(stream: &TokenStream) -> bool {
stream.trees().all(|tree| match tree {
stream.iter().all(|tree| match tree {
TokenTree::Token(token, _) => !matches!(
token.kind,
token::NtIdent(..) | token::NtLifetime(..) | token::Interpolated(..)
Expand All @@ -522,7 +522,7 @@ impl TokenStream {
return self.clone();
}

self.trees().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
self.iter().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
}

// If `vec` is not empty, try to glue `tt` onto its last token. The return
Expand Down Expand Up @@ -665,25 +665,26 @@ impl TokenStream {
}
}

/// By-reference iterator over a [`TokenStream`], that produces `&TokenTree`
/// items.
#[derive(Clone)]
pub struct RefTokenTreeCursor<'t> {
pub struct TokenStreamIter<'t> {
stream: &'t TokenStream,
index: usize,
}

impl<'t> RefTokenTreeCursor<'t> {
impl<'t> TokenStreamIter<'t> {
fn new(stream: &'t TokenStream) -> Self {
RefTokenTreeCursor { stream, index: 0 }
TokenStreamIter { stream, index: 0 }
}

pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
self.stream.0.get(self.index + n)
// Peeking could be done via `Peekable`, but most iterators need peeking,
// and this is simple and avoids the need to use `peekable` and `Peekable`
// at all the use sites.
pub fn peek(&self) -> Option<&'t TokenTree> {
self.stream.0.get(self.index)
}
}

impl<'t> Iterator for RefTokenTreeCursor<'t> {
impl<'t> Iterator for TokenStreamIter<'t> {
type Item = &'t TokenTree;

fn next(&mut self) -> Option<&'t TokenTree> {
Expand All @@ -694,39 +695,6 @@ impl<'t> Iterator for RefTokenTreeCursor<'t> {
}
}

/// Owning by-value iterator over a [`TokenStream`], that produces `&TokenTree`
/// items.
///
/// Doesn't impl `Iterator` because Rust doesn't permit an owning iterator to
/// return `&T` from `next`; the need for an explicit lifetime in the `Item`
/// associated type gets in the way. Instead, use `next_ref` (which doesn't
/// involve associated types) for getting individual elements, or
/// `RefTokenTreeCursor` if you really want an `Iterator`, e.g. in a `for`
/// loop.
#[derive(Clone, Debug)]
pub struct TokenTreeCursor {
pub stream: TokenStream,
index: usize,
}

impl TokenTreeCursor {
fn new(stream: TokenStream) -> Self {
TokenTreeCursor { stream, index: 0 }
}

#[inline]
pub fn next_ref(&mut self) -> Option<&TokenTree> {
self.stream.0.get(self.index).map(|tree| {
self.index += 1;
tree
})
}

pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
self.stream.0.get(self.index + n)
}
}

#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)]
pub struct DelimSpan {
pub open: Span,
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_ast_pretty/src/pprust/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -725,7 +725,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
// E.g. we have seen cases where a proc macro can handle `a :: b` but not
// `a::b`. See #117433 for some examples.
fn print_tts(&mut self, tts: &TokenStream, convert_dollar_crate: bool) {
let mut iter = tts.trees().peekable();
let mut iter = tts.iter().peekable();
while let Some(tt) = iter.next() {
let spacing = self.print_tt(tt, convert_dollar_crate);
if let Some(next) = iter.peek() {
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_builtin_macros/src/concat_idents.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ pub(crate) fn expand_concat_idents<'cx>(
}

let mut res_str = String::new();
for (i, e) in tts.trees().enumerate() {
for (i, e) in tts.iter().enumerate() {
if i & 1 == 1 {
match e {
TokenTree::Token(Token { kind: token::Comma, .. }, _) => {}
Expand Down
6 changes: 3 additions & 3 deletions compiler/rustc_builtin_macros/src/trace_macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,17 @@ pub(crate) fn expand_trace_macros(
sp: Span,
tt: TokenStream,
) -> MacroExpanderResult<'static> {
let mut cursor = tt.trees();
let mut iter = tt.iter();
let mut err = false;
let value = match &cursor.next() {
let value = match iter.next() {
Some(TokenTree::Token(token, _)) if token.is_keyword(kw::True) => true,
Some(TokenTree::Token(token, _)) if token.is_keyword(kw::False) => false,
_ => {
err = true;
false
}
};
err |= cursor.next().is_some();
err |= iter.next().is_some();
if err {
cx.dcx().emit_err(errors::TraceMacros { span: sp });
} else {
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_const_eval/src/interpret/projection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ where
let actual_to = if from_end {
if from.checked_add(to).is_none_or(|to| to > len) {
// This can only be reached in ConstProp and non-rustc-MIR.
throw_ub!(BoundsCheckFailed { len: len, index: from.saturating_add(to) });
throw_ub!(BoundsCheckFailed { len, index: from.saturating_add(to) });
}
len.checked_sub(to).unwrap()
} else {
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_driver_impl/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ use std::fmt::Write as _;
use std::fs::{self, File};
use std::io::{self, IsTerminal, Read, Write};
use std::panic::{self, PanicHookInfo, catch_unwind};
use std::path::PathBuf;
use std::path::{Path, PathBuf};
use std::process::{self, Command, Stdio};
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, OnceLock};
Expand Down Expand Up @@ -460,7 +460,7 @@ fn run_compiler(
})
}

fn dump_feature_usage_metrics(tcxt: TyCtxt<'_>, metrics_dir: &PathBuf) {
fn dump_feature_usage_metrics(tcxt: TyCtxt<'_>, metrics_dir: &Path) {
let output_filenames = tcxt.output_filenames(());
let mut metrics_file_name = std::ffi::OsString::from("unstable_feature_usage_metrics-");
let mut metrics_path = output_filenames.with_directory_and_extension(metrics_dir, "json");
Expand Down
Loading
Loading