From c1245e16df93ffc583234063265afce8dbd7b43e Mon Sep 17 00:00:00 2001 From: Ali Bektas Date: Thu, 1 Aug 2024 02:36:31 +0200 Subject: [PATCH 001/124] Add new assist toggle_macro_delimiter --- .../src/handlers/toggle_macro_delimiter.rs | 227 ++++++++++++++++++ .../crates/ide-assists/src/lib.rs | 2 + 2 files changed, 229 insertions(+) create mode 100644 src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs new file mode 100644 index 0000000000000..490957d04a071 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs @@ -0,0 +1,227 @@ +use ide_db::assists::{AssistId, AssistKind}; +use syntax::{ + ast::{self, make}, + ted, AstNode, T, +}; + +use crate::{AssistContext, Assists}; + +// Assist: toggle_macro_delimiter +// +// Change macro delimiters in the order of `( -> { -> [ -> (`. +// +// ``` +// macro_rules! sth (); +// sth! $0( ); +// ``` +// -> +// ``` +// macro_rules! sth! (); +// sth! { } +// ``` +pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + #[derive(Debug)] + enum MacroDelims { + LPar, + RPar, + LBra, + RBra, + LCur, + RCur, + } + + enum MakroTypes { + MacroRules(ast::MacroRules), + MacroCall(ast::MacroCall), + } + + let makro = if let Some(mc) = ctx.find_node_at_offset_with_descend::() { + MakroTypes::MacroCall(mc) + } else if let Some(mr) = ctx.find_node_at_offset_with_descend::() { + MakroTypes::MacroRules(mr) + } else { + return None; + }; + + let cursor_offset = ctx.offset(); + let token_tree = match makro { + MakroTypes::MacroRules(mr) => mr.token_tree()?.clone_for_update(), + MakroTypes::MacroCall(md) => md.token_tree()?.clone_for_update(), + }; + + let token_tree_text_range = token_tree.syntax().text_range(); + let ltoken = token_tree.left_delimiter_token()?; + let rtoken = token_tree.right_delimiter_token()?; + + if !ltoken.text_range().contains(cursor_offset) && !rtoken.text_range().contains(cursor_offset) + { + return None; + } + + let token = match ltoken.kind() { + T!['{'] => MacroDelims::LCur, + T!['('] => MacroDelims::LPar, + T!['['] => MacroDelims::LBra, + T!['}'] => MacroDelims::RBra, + T![')'] => MacroDelims::RPar, + T!['}'] => MacroDelims::RCur, + _ => return None, + }; + + acc.add( + AssistId("add_braces", AssistKind::Refactor), + match token { + MacroDelims::LPar => "Replace delimiters with braces", + MacroDelims::RPar => "Replace delimiters with braces", + MacroDelims::LBra => "Replace delimiters with parentheses", + MacroDelims::RBra => "Replace delimiters with parentheses", + MacroDelims::LCur => "Replace delimiters with brackets", + MacroDelims::RCur => "Replace delimiters with brackets", + }, + token_tree.syntax().text_range(), + |builder| { + match token { + MacroDelims::LPar | MacroDelims::RPar => { + ted::replace(ltoken, make::token(T!['{'])); + ted::replace(rtoken, make::token(T!['}'])); + } + MacroDelims::LBra | MacroDelims::RBra => { + ted::replace(ltoken, make::token(T!['('])); + ted::replace(rtoken, make::token(T![')'])); + } + MacroDelims::LCur | MacroDelims::RCur => { + ted::replace(ltoken, make::token(T!['['])); + ted::replace(rtoken, make::token(T![']'])); + } + } + builder.replace(token_tree_text_range, token_tree.syntax().text()); + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn test_par() { + check_assist( + toggle_macro_delimiter, + r#" +macro_rules! sth (); +sth! $0( ); + "#, + r#" +macro_rules! sth (); +sth! { }; + "#, + ) + } + + #[test] + fn test_braclets() { + check_assist( + toggle_macro_delimiter, + r#" +macro_rules! sth (); +sth! $0{ }; + "#, + r#" +macro_rules! sth (); +sth! [ ]; + "#, + ) + } + + #[test] + fn test_brackets() { + check_assist( + toggle_macro_delimiter, + r#" +macro_rules! sth (); +sth! $0[ ]; + "#, + r#" +macro_rules! sth (); +sth! ( ); + "#, + ) + } + + #[test] + fn test_indent() { + check_assist( + toggle_macro_delimiter, + r#" +mod abc { + macro_rules! sth (); + sth! $0{ }; +} + "#, + r#" +mod abc { + macro_rules! sth (); + sth! [ ]; +} + "#, + ) + } + + #[test] + fn test_rules_par() { + check_assist( + toggle_macro_delimiter, + r#" +macro_rules! sth $0(); +sth! ( ); + "#, + r#" +macro_rules! sth {}; +sth! ( ); + "#, + ) + } + + #[test] + fn test_rules_braclets() { + check_assist( + toggle_macro_delimiter, + r#" +macro_rules! sth $0{}; +sth! ( ); + "#, + r#" +macro_rules! sth []; +sth! ( ); + "#, + ) + } + + #[test] + fn test_rules_brackets() { + check_assist( + toggle_macro_delimiter, + r#" +macro_rules! sth $0[]; +sth! ( ); + "#, + r#" +macro_rules! sth (); +sth! ( ); + "#, + ) + } + + #[test] + fn test_unrelated_par() { + check_assist_not_applicable( + toggle_macro_delimiter, + r#" +macro_rules! sth [def$0()]; +sth! ( ); + "#, + ) + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index a9399ba6b7f47..b97f0f27398b2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -213,6 +213,7 @@ mod handlers { mod term_search; mod toggle_async_sugar; mod toggle_ignore; + mod toggle_macro_delimiter; mod unmerge_match_arm; mod unmerge_use; mod unnecessary_async; @@ -343,6 +344,7 @@ mod handlers { split_import::split_import, term_search::term_search, toggle_ignore::toggle_ignore, + toggle_macro_delimiter::toggle_macro_delimiter, unmerge_match_arm::unmerge_match_arm, unmerge_use::unmerge_use, unnecessary_async::unnecessary_async, From 662f02fc9bbd996a001779d5ec1b31a495944049 Mon Sep 17 00:00:00 2001 From: Ali Bektas Date: Fri, 2 Aug 2024 02:00:26 +0200 Subject: [PATCH 002/124] Make toggle_macro_delimiters work only for macro_calls --- .../src/handlers/toggle_macro_delimiter.rs | 153 +++++++++++------- .../crates/ide-assists/src/tests/generated.rs | 21 +++ 2 files changed, 112 insertions(+), 62 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs index 490957d04a071..6fa5ff761f680 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs @@ -11,12 +11,18 @@ use crate::{AssistContext, Assists}; // Change macro delimiters in the order of `( -> { -> [ -> (`. // // ``` -// macro_rules! sth (); +// macro_rules! sth { +// () => {}; +// } +// // sth! $0( ); // ``` // -> // ``` -// macro_rules! sth! (); +// macro_rules! sth { +// () => {}; +// } +// // sth! { } // ``` pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { @@ -30,26 +36,13 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>) RCur, } - enum MakroTypes { - MacroRules(ast::MacroRules), - MacroCall(ast::MacroCall), - } - - let makro = if let Some(mc) = ctx.find_node_at_offset_with_descend::() { - MakroTypes::MacroCall(mc) - } else if let Some(mr) = ctx.find_node_at_offset_with_descend::() { - MakroTypes::MacroRules(mr) - } else { - return None; - }; + let makro = ctx.find_node_at_offset_with_descend::()?.clone_for_update(); + let makro_text_range = makro.syntax().text_range(); let cursor_offset = ctx.offset(); - let token_tree = match makro { - MakroTypes::MacroRules(mr) => mr.token_tree()?.clone_for_update(), - MakroTypes::MacroCall(md) => md.token_tree()?.clone_for_update(), - }; + let semicolon = makro.semicolon_token(); + let token_tree = makro.token_tree()?; - let token_tree_text_range = token_tree.syntax().text_range(); let ltoken = token_tree.left_delimiter_token()?; let rtoken = token_tree.right_delimiter_token()?; @@ -84,6 +77,9 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>) MacroDelims::LPar | MacroDelims::RPar => { ted::replace(ltoken, make::token(T!['{'])); ted::replace(rtoken, make::token(T!['}'])); + if let Some(sc) = semicolon { + ted::remove(sc); + } } MacroDelims::LBra | MacroDelims::RBra => { ted::replace(ltoken, make::token(T!['('])); @@ -94,7 +90,7 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>) ted::replace(rtoken, make::token(T![']'])); } } - builder.replace(token_tree_text_range, token_tree.syntax().text()); + builder.replace(makro_text_range, makro.syntax().text()); }, ) } @@ -110,12 +106,18 @@ mod tests { check_assist( toggle_macro_delimiter, r#" -macro_rules! sth (); +macro_rules! sth { + () => {}; +} + sth! $0( ); "#, r#" -macro_rules! sth (); -sth! { }; +macro_rules! sth { + () => {}; +} + +sth! { } "#, ) } @@ -125,11 +127,17 @@ sth! { }; check_assist( toggle_macro_delimiter, r#" -macro_rules! sth (); +macro_rules! sth { + () => {}; +} + sth! $0{ }; "#, r#" -macro_rules! sth (); +macro_rules! sth { + () => {}; +} + sth! [ ]; "#, ) @@ -140,11 +148,17 @@ sth! [ ]; check_assist( toggle_macro_delimiter, r#" -macro_rules! sth (); +macro_rules! sth { + () => {}; +} + sth! $0[ ]; "#, r#" -macro_rules! sth (); +macro_rules! sth { + () => {}; +} + sth! ( ); "#, ) @@ -156,13 +170,19 @@ sth! ( ); toggle_macro_delimiter, r#" mod abc { - macro_rules! sth (); + macro_rules! sth { + () => {}; + } + sth! $0{ }; } "#, r#" mod abc { - macro_rules! sth (); + macro_rules! sth { + () => {}; + } + sth! [ ]; } "#, @@ -170,58 +190,67 @@ mod abc { } #[test] - fn test_rules_par() { - check_assist( + fn test_unrelated_par() { + check_assist_not_applicable( toggle_macro_delimiter, r#" -macro_rules! sth $0(); -sth! ( ); - "#, - r#" -macro_rules! sth {}; -sth! ( ); - "#, - ) - } +macro_rules! prt { + ($e:expr) => {{ + println!("{}", stringify! {$e}); + }}; +} + +prt!(($03 + 5)); - #[test] - fn test_rules_braclets() { - check_assist( - toggle_macro_delimiter, - r#" -macro_rules! sth $0{}; -sth! ( ); - "#, - r#" -macro_rules! sth []; -sth! ( ); "#, ) } #[test] - fn test_rules_brackets() { + fn test_longer_macros() { check_assist( toggle_macro_delimiter, r#" -macro_rules! sth $0[]; -sth! ( ); - "#, +macro_rules! prt { + ($e:expr) => {{ + println!("{}", stringify! {$e}); + }}; +} + +prt! $0((3 + 5)); +"#, r#" -macro_rules! sth (); -sth! ( ); - "#, +macro_rules! prt { + ($e:expr) => {{ + println!("{}", stringify! {$e}); + }}; +} + +prt! {(3 + 5)} +"#, ) } + // FIXME @alibektas : Inner macro_call is not seen as such. So this doesn't work. #[test] - fn test_unrelated_par() { + fn test_nested_macros() { check_assist_not_applicable( toggle_macro_delimiter, r#" -macro_rules! sth [def$0()]; -sth! ( ); - "#, +macro_rules! prt { + ($e:expr) => {{ + println!("{}", stringify! {$e}); + }}; +} + +macro_rules! abc { + ($e:expr) => {{ + println!("{}", stringify! {$e}); + }}; +} + +prt! {abc!($03 + 5)}; +"#, ) } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index a2287b2977ddd..05e1aff5f9f96 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -3091,6 +3091,27 @@ fn arithmetics { ) } +#[test] +fn doctest_toggle_macro_delimiter() { + check_doc_test( + "toggle_macro_delimiter", + r#####" +macro_rules! sth { + () => {}; +} + +sth! $0( ); +"#####, + r#####" +macro_rules! sth { + () => {}; +} + +sth! { } +"#####, + ) +} + #[test] fn doctest_unmerge_match_arm() { check_doc_test( From aa6e8d3db553721eed97d5b6afc64d44eb9f8a2b Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Mon, 12 Aug 2024 02:47:31 +0300 Subject: [PATCH 003/124] Allow declaring cfg groups in rust-project.json, to help sharing common cfgs --- .../crates/project-model/src/project_json.rs | 34 +- .../crates/project-model/src/tests.rs | 6 + .../project-model/test_data/cfg-groups.json | 26 + .../output/rust_project_cfg_groups.txt | 545 ++++++++++++++++++ src/tools/rust-analyzer/docs/user/manual.adoc | 12 + 5 files changed, 619 insertions(+), 4 deletions(-) create mode 100644 src/tools/rust-analyzer/crates/project-model/test_data/cfg-groups.json create mode 100644 src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs index 7dea0c3839841..a09c7a77abceb 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs @@ -52,7 +52,7 @@ use base_db::{CrateDisplayName, CrateName}; use cfg::CfgAtom; use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; -use rustc_hash::FxHashMap; +use rustc_hash::{FxHashMap, FxHashSet}; use serde::{de, Deserialize, Serialize}; use span::Edition; @@ -122,6 +122,25 @@ impl ProjectJson { None => None, }; + let cfg = crate_data + .cfg_groups + .iter() + .flat_map(|cfg_extend| { + let cfg_group = data.cfg_groups.get(cfg_extend); + match cfg_group { + Some(cfg_group) => cfg_group.0.iter().cloned(), + None => { + tracing::error!( + "Unknown cfg group `{cfg_extend}` in crate `{}`", + crate_data.display_name.as_deref().unwrap_or(""), + ); + [].iter().cloned() + } + } + }) + .chain(crate_data.cfg.0) + .collect(); + Crate { display_name: crate_data .display_name @@ -131,7 +150,7 @@ impl ProjectJson { edition: crate_data.edition.into(), version: crate_data.version.as_ref().map(ToString::to_string), deps: crate_data.deps, - cfg: crate_data.cfg, + cfg, target: crate_data.target, env: crate_data.env, proc_macro_dylib_path: crate_data @@ -306,11 +325,17 @@ pub enum RunnableKind { pub struct ProjectJsonData { sysroot: Option, sysroot_src: Option, + #[serde(default)] + cfg_groups: FxHashMap, crates: Vec, #[serde(default)] runnables: Vec, } +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq, Default)] +#[serde(transparent)] +struct CfgList(#[serde(with = "cfg_")] Vec); + #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] struct CrateData { display_name: Option, @@ -320,8 +345,9 @@ struct CrateData { version: Option, deps: Vec, #[serde(default)] - #[serde(with = "cfg_")] - cfg: Vec, + cfg_groups: FxHashSet, + #[serde(default)] + cfg: CfgList, target: Option, #[serde(default)] env: FxHashMap, diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index e3bc81e1963d9..8c0b475584945 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -233,6 +233,12 @@ fn rust_project_hello_world_project_model() { ); } +#[test] +fn rust_project_cfg_groups() { + let (crate_graph, _proc_macros) = load_rust_project("cfg-groups.json"); + check_crate_graph(crate_graph, expect_file!["../test_data/output/rust_project_cfg_groups.txt"]); +} + #[test] fn rust_project_is_proc_macro_has_proc_macro_dep() { let (crate_graph, _proc_macros) = load_rust_project("is-proc-macro-project.json"); diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/cfg-groups.json b/src/tools/rust-analyzer/crates/project-model/test_data/cfg-groups.json new file mode 100644 index 0000000000000..29f83afd9d4e5 --- /dev/null +++ b/src/tools/rust-analyzer/crates/project-model/test_data/cfg-groups.json @@ -0,0 +1,26 @@ +{ + "sysroot_src": null, + "cfg_groups": { + "group1": ["group1_cfg=\"some_config\"", "group1_other_cfg=\"other_config\""], + "group2": ["group2_cfg=\"yet_another_config\""] + }, + "crates": [ + { + "display_name": "hello_world", + "root_module": "$ROOT$src/lib.rs", + "edition": "2018", + "cfg_groups": ["group1", "group2"], + "deps": [], + "is_workspace_member": true + }, + { + "display_name": "other_crate", + "root_module": "$ROOT$src/lib.rs", + "edition": "2018", + "cfg_groups": ["group2"], + "cfg": ["group2_cfg=\"fourth_config\"", "unrelated_cfg"], + "deps": [], + "is_workspace_member": true + } + ] +} diff --git a/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt new file mode 100644 index 0000000000000..8261e5a2d907a --- /dev/null +++ b/src/tools/rust-analyzer/crates/project-model/test_data/output/rust_project_cfg_groups.txt @@ -0,0 +1,545 @@ +{ + 0: CrateData { + root_file_id: FileId( + 1, + ), + edition: Edition2021, + version: None, + display_name: Some( + CrateDisplayName { + crate_name: CrateName( + "alloc", + ), + canonical_name: "alloc", + }, + ), + cfg_options: CfgOptions( + [ + "debug_assertions", + "miri", + ], + ), + potential_cfg_options: None, + env: Env { + entries: {}, + }, + dependencies: [ + Dependency { + crate_id: Idx::(1), + name: CrateName( + "core", + ), + prelude: true, + sysroot: false, + }, + ], + origin: Lang( + Alloc, + ), + is_proc_macro: false, + }, + 1: CrateData { + root_file_id: FileId( + 2, + ), + edition: Edition2021, + version: None, + display_name: Some( + CrateDisplayName { + crate_name: CrateName( + "core", + ), + canonical_name: "core", + }, + ), + cfg_options: CfgOptions( + [ + "debug_assertions", + "miri", + ], + ), + potential_cfg_options: None, + env: Env { + entries: {}, + }, + dependencies: [], + origin: Lang( + Core, + ), + is_proc_macro: false, + }, + 2: CrateData { + root_file_id: FileId( + 3, + ), + edition: Edition2021, + version: None, + display_name: Some( + CrateDisplayName { + crate_name: CrateName( + "panic_abort", + ), + canonical_name: "panic_abort", + }, + ), + cfg_options: CfgOptions( + [ + "debug_assertions", + "miri", + ], + ), + potential_cfg_options: None, + env: Env { + entries: {}, + }, + dependencies: [], + origin: Lang( + Other, + ), + is_proc_macro: false, + }, + 3: CrateData { + root_file_id: FileId( + 4, + ), + edition: Edition2021, + version: None, + display_name: Some( + CrateDisplayName { + crate_name: CrateName( + "panic_unwind", + ), + canonical_name: "panic_unwind", + }, + ), + cfg_options: CfgOptions( + [ + "debug_assertions", + "miri", + ], + ), + potential_cfg_options: None, + env: Env { + entries: {}, + }, + dependencies: [], + origin: Lang( + Other, + ), + is_proc_macro: false, + }, + 4: CrateData { + root_file_id: FileId( + 5, + ), + edition: Edition2021, + version: None, + display_name: Some( + CrateDisplayName { + crate_name: CrateName( + "proc_macro", + ), + canonical_name: "proc_macro", + }, + ), + cfg_options: CfgOptions( + [ + "debug_assertions", + "miri", + ], + ), + potential_cfg_options: None, + env: Env { + entries: {}, + }, + dependencies: [ + Dependency { + crate_id: Idx::(6), + name: CrateName( + "std", + ), + prelude: true, + sysroot: false, + }, + Dependency { + crate_id: Idx::(1), + name: CrateName( + "core", + ), + prelude: true, + sysroot: false, + }, + ], + origin: Lang( + ProcMacro, + ), + is_proc_macro: false, + }, + 5: CrateData { + root_file_id: FileId( + 6, + ), + edition: Edition2021, + version: None, + display_name: Some( + CrateDisplayName { + crate_name: CrateName( + "profiler_builtins", + ), + canonical_name: "profiler_builtins", + }, + ), + cfg_options: CfgOptions( + [ + "debug_assertions", + "miri", + ], + ), + potential_cfg_options: None, + env: Env { + entries: {}, + }, + dependencies: [], + origin: Lang( + Other, + ), + is_proc_macro: false, + }, + 6: CrateData { + root_file_id: FileId( + 7, + ), + edition: Edition2021, + version: None, + display_name: Some( + CrateDisplayName { + crate_name: CrateName( + "std", + ), + canonical_name: "std", + }, + ), + cfg_options: CfgOptions( + [ + "debug_assertions", + "miri", + ], + ), + potential_cfg_options: None, + env: Env { + entries: {}, + }, + dependencies: [ + Dependency { + crate_id: Idx::(0), + name: CrateName( + "alloc", + ), + prelude: true, + sysroot: false, + }, + Dependency { + crate_id: Idx::(3), + name: CrateName( + "panic_unwind", + ), + prelude: true, + sysroot: false, + }, + Dependency { + crate_id: Idx::(2), + name: CrateName( + "panic_abort", + ), + prelude: true, + sysroot: false, + }, + Dependency { + crate_id: Idx::(1), + name: CrateName( + "core", + ), + prelude: true, + sysroot: false, + }, + Dependency { + crate_id: Idx::(5), + name: CrateName( + "profiler_builtins", + ), + prelude: true, + sysroot: false, + }, + Dependency { + crate_id: Idx::(9), + name: CrateName( + "unwind", + ), + prelude: true, + sysroot: false, + }, + Dependency { + crate_id: Idx::(7), + name: CrateName( + "std_detect", + ), + prelude: true, + sysroot: false, + }, + Dependency { + crate_id: Idx::(8), + name: CrateName( + "test", + ), + prelude: true, + sysroot: false, + }, + ], + origin: Lang( + Std, + ), + is_proc_macro: false, + }, + 7: CrateData { + root_file_id: FileId( + 8, + ), + edition: Edition2021, + version: None, + display_name: Some( + CrateDisplayName { + crate_name: CrateName( + "std_detect", + ), + canonical_name: "std_detect", + }, + ), + cfg_options: CfgOptions( + [ + "debug_assertions", + "miri", + ], + ), + potential_cfg_options: None, + env: Env { + entries: {}, + }, + dependencies: [], + origin: Lang( + Other, + ), + is_proc_macro: false, + }, + 8: CrateData { + root_file_id: FileId( + 9, + ), + edition: Edition2021, + version: None, + display_name: Some( + CrateDisplayName { + crate_name: CrateName( + "test", + ), + canonical_name: "test", + }, + ), + cfg_options: CfgOptions( + [ + "debug_assertions", + "miri", + ], + ), + potential_cfg_options: None, + env: Env { + entries: {}, + }, + dependencies: [], + origin: Lang( + Test, + ), + is_proc_macro: false, + }, + 9: CrateData { + root_file_id: FileId( + 10, + ), + edition: Edition2021, + version: None, + display_name: Some( + CrateDisplayName { + crate_name: CrateName( + "unwind", + ), + canonical_name: "unwind", + }, + ), + cfg_options: CfgOptions( + [ + "debug_assertions", + "miri", + ], + ), + potential_cfg_options: None, + env: Env { + entries: {}, + }, + dependencies: [], + origin: Lang( + Other, + ), + is_proc_macro: false, + }, + 10: CrateData { + root_file_id: FileId( + 11, + ), + edition: Edition2018, + version: None, + display_name: Some( + CrateDisplayName { + crate_name: CrateName( + "hello_world", + ), + canonical_name: "hello_world", + }, + ), + cfg_options: CfgOptions( + [ + "group1_cfg=some_config", + "group1_other_cfg=other_config", + "group2_cfg=yet_another_config", + "rust_analyzer", + ], + ), + potential_cfg_options: None, + env: Env { + entries: {}, + }, + dependencies: [ + Dependency { + crate_id: Idx::(1), + name: CrateName( + "core", + ), + prelude: true, + sysroot: true, + }, + Dependency { + crate_id: Idx::(0), + name: CrateName( + "alloc", + ), + prelude: false, + sysroot: true, + }, + Dependency { + crate_id: Idx::(6), + name: CrateName( + "std", + ), + prelude: true, + sysroot: true, + }, + Dependency { + crate_id: Idx::(8), + name: CrateName( + "test", + ), + prelude: false, + sysroot: true, + }, + Dependency { + crate_id: Idx::(4), + name: CrateName( + "proc_macro", + ), + prelude: false, + sysroot: true, + }, + ], + origin: Local { + repo: None, + name: Some( + "hello_world", + ), + }, + is_proc_macro: false, + }, + 11: CrateData { + root_file_id: FileId( + 12, + ), + edition: Edition2018, + version: None, + display_name: Some( + CrateDisplayName { + crate_name: CrateName( + "other_crate", + ), + canonical_name: "other_crate", + }, + ), + cfg_options: CfgOptions( + [ + "group2_cfg=fourth_config", + "group2_cfg=yet_another_config", + "rust_analyzer", + "unrelated_cfg", + ], + ), + potential_cfg_options: None, + env: Env { + entries: {}, + }, + dependencies: [ + Dependency { + crate_id: Idx::(1), + name: CrateName( + "core", + ), + prelude: true, + sysroot: true, + }, + Dependency { + crate_id: Idx::(0), + name: CrateName( + "alloc", + ), + prelude: false, + sysroot: true, + }, + Dependency { + crate_id: Idx::(6), + name: CrateName( + "std", + ), + prelude: true, + sysroot: true, + }, + Dependency { + crate_id: Idx::(8), + name: CrateName( + "test", + ), + prelude: false, + sysroot: true, + }, + Dependency { + crate_id: Idx::(4), + name: CrateName( + "proc_macro", + ), + prelude: false, + sysroot: true, + }, + ], + origin: Local { + repo: None, + name: Some( + "other_crate", + ), + }, + is_proc_macro: false, + }, +} \ No newline at end of file diff --git a/src/tools/rust-analyzer/docs/user/manual.adoc b/src/tools/rust-analyzer/docs/user/manual.adoc index 703ec66921c4c..246ebdab2c91a 100644 --- a/src/tools/rust-analyzer/docs/user/manual.adoc +++ b/src/tools/rust-analyzer/docs/user/manual.adoc @@ -705,6 +705,12 @@ interface JsonProject { /// several different "sysroots" in one graph of /// crates. sysroot_src?: string; + /// List of groups of common cfg values, to allow + /// sharing them between crates. + /// + /// Maps from group name to its cfgs. Cfg follow + /// the same format as `Crate.cfg`. + cfg_groups?: { [key: string]: string[]; }; /// The set of crates comprising the current /// project. Must include all transitive /// dependencies as well as sysroot crate (libstd, @@ -754,6 +760,12 @@ interface Crate { include_dirs: string[], exclude_dirs: string[], }, + /// List of cfg groups this crate inherits. + /// + /// All cfg in these groups will be concatenated to + /// `cfg`. It is impossible to replace a value from + /// the groups. + cfg_groups?: string[]; /// The set of cfgs activated for a given crate, like /// `["unix", "feature=\"foo\"", "feature=\"bar\""]`. cfg: string[]; From 5e058db6828acf4df9f765f59522fc0dbe19ae60 Mon Sep 17 00:00:00 2001 From: Wilfred Hughes Date: Tue, 13 Aug 2024 15:20:09 -0700 Subject: [PATCH 004/124] docs: Add a doc comment for OpQueue Add an explanatory sentence and some sample code to help readers understand why this struct exists. --- .../crates/rust-analyzer/src/op_queue.rs | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs index 99f9e9829c936..eab9733872402 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs @@ -3,6 +3,26 @@ pub(crate) type Cause = String; +/// A single-item queue that allows callers to request an operation to +/// be performed later. +/// +/// ``` +/// let queue = OpQueue::default(); +/// +/// // Request work to be done. +/// queue.request_op("user pushed a button", ()); +/// +/// // In a later iteration of the server loop, we start the work. +/// if let Some((_cause, ())) = queue.should_start_op() { +/// dbg!("Some slow operation here"); +/// } +/// +/// // In an even later iteration of the server loop, we can see that the work +/// // was completed. +/// if !queue.op_in_progress() { +/// dbg!("Work has been done!"); +/// } +/// ``` #[derive(Debug)] pub(crate) struct OpQueue { op_requested: Option<(Cause, Args)>, From 4a14155efa3d8a276d45bb87f52e92d83935e19d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Wed, 14 Aug 2024 14:38:39 +0300 Subject: [PATCH 005/124] Be more resilient to bad language item definitions in binop inference --- .../crates/hir-ty/src/infer/expr.rs | 21 +++++++----- .../crates/hir-ty/src/tests/simple.rs | 33 +++++++++++++++++++ 2 files changed, 45 insertions(+), 9 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index f5eb37f427876..e40c6a63f661d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -793,11 +793,12 @@ impl InferenceContext<'_> { .trait_data(index_trait) .method_by_name(&Name::new_symbol_root(sym::index.clone())) { - let substs = TyBuilder::subst_for_def(self.db, index_trait, None) - .push(self_ty.clone()) - .push(index_ty.clone()) - .build(); - self.write_method_resolution(tgt_expr, func, substs); + let subst = TyBuilder::subst_for_def(self.db, index_trait, None); + if subst.remaining() != 2 { + return self.err_ty(); + } + let subst = subst.push(self_ty.clone()).push(index_ty.clone()).build(); + self.write_method_resolution(tgt_expr, func, subst); } let assoc = self.resolve_ops_index_output(); let res = self.resolve_associated_type_with_params( @@ -1295,10 +1296,12 @@ impl InferenceContext<'_> { // HACK: We can use this substitution for the function because the function itself doesn't // have its own generic parameters. - let subst = TyBuilder::subst_for_def(self.db, trait_, None) - .push(lhs_ty.clone()) - .push(rhs_ty.clone()) - .build(); + let subst = TyBuilder::subst_for_def(self.db, trait_, None); + if subst.remaining() != 2 { + return Ty::new(Interner, TyKind::Error); + } + let subst = subst.push(lhs_ty.clone()).push(rhs_ty.clone()).build(); + self.write_method_resolution(tgt_expr, func, subst.clone()); let method_ty = self.db.value_ty(func.into()).unwrap().substitute(Interner, &subst); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs index d83a34298eab0..a15670b3c85d9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs @@ -3686,3 +3686,36 @@ fn main() { "#, ); } + +#[test] +fn infer_bad_lang_item() { + check_infer( + r#" +#[lang="eq"] +pub trait Eq { + fn eq(&self, ) -> bool; + +} + +#[lang="shr"] +pub trait Shr { + fn shr(&self, rhs: &RHS) -> Result; +} + +fn test() -> bool { + 1 >> 1; + 1 == 1; +} +"#, + expect![[r#" + 39..43 'self': &'? Self + 114..118 'self': &'? Self + 120..123 'rhs': &'? RHS + 163..190 '{ ...= 1; }': bool + 169..170 '1': i32 + 169..175 '1 >> 1': {unknown} + 181..182 '1': i32 + 181..187 '1 == 1': {unknown} + "#]], + ); +} From 81b68b2078e480673e89496807238b97ab033722 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Wed, 14 Aug 2024 09:37:20 +0900 Subject: [PATCH 006/124] fix: Panic while canonicalizing erroneous projection type --- .../rust-analyzer/crates/hir-ty/src/infer.rs | 3 ++- .../crates/hir-ty/src/tests/regression.rs | 21 +++++++++++++++++++ .../rust-analyzer/crates/hir-ty/src/traits.rs | 14 +++++++++++-- 3 files changed, 35 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 45d423d03c021..5a7deab3a440a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -1436,7 +1436,8 @@ impl<'a> InferenceContext<'a> { let remaining = unresolved.map(|it| path.segments()[it..].len()).filter(|it| it > &0); let ty = match ty.kind(Interner) { TyKind::Alias(AliasTy::Projection(proj_ty)) => { - self.db.normalize_projection(proj_ty.clone(), self.table.trait_env.clone()) + let ty = self.table.normalize_projection_ty(proj_ty.clone()); + self.table.resolve_ty_shallow(&ty) } _ => ty, }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index 281983861207b..26e9243e7388f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -2141,3 +2141,24 @@ fn test() { }"#, ); } + +#[test] +fn issue_17866() { + check_infer( + r#" +trait T { + type A; +} + +type Foo = ::A; + +fn main() { + Foo {}; +} +"#, + expect![[r#" + 60..75 '{ Foo {}; }': () + 66..72 'Foo {}': {unknown} + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs index c46382a0ea822..00791c51491d7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs @@ -14,13 +14,13 @@ use hir_def::{ }; use hir_expand::name::Name; use intern::sym; -use stdx::panic_context; +use stdx::{never, panic_context}; use triomphe::Arc; use crate::{ db::HirDatabase, infer::unify::InferenceTable, utils::UnevaluatedConstEvaluatorFolder, AliasEq, AliasTy, Canonical, DomainGoal, Goal, Guidance, InEnvironment, Interner, ProjectionTy, - ProjectionTyExt, Solution, TraitRefExt, Ty, TyKind, WhereClause, + ProjectionTyExt, Solution, TraitRefExt, Ty, TyKind, TypeFlags, WhereClause, }; /// This controls how much 'time' we give the Chalk solver before giving up. @@ -90,6 +90,16 @@ pub(crate) fn normalize_projection_query( projection: ProjectionTy, env: Arc, ) -> Ty { + if projection.substitution.iter(Interner).any(|arg| { + arg.ty(Interner) + .is_some_and(|ty| ty.data(Interner).flags.intersects(TypeFlags::HAS_TY_INFER)) + }) { + never!( + "Invoking `normalize_projection_query` with a projection type containing inference var" + ); + return TyKind::Error.intern(Interner); + } + let mut table = InferenceTable::new(db, env); let ty = table.normalize_projection_ty(projection); table.resolve_completely(ty) From b0183f81f4bd9745f4259fb1350202e71931ce34 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Thu, 15 Aug 2024 01:49:12 +0900 Subject: [PATCH 007/124] fix: Panic while displaying associated function with a type annotation --- .../crates/hir-ty/src/display.rs | 15 +++++----- .../crates/ide/src/hover/tests.rs | 28 +++++++++++++++++++ 2 files changed, 35 insertions(+), 8 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index 7c481958d1a09..5a96d396ab89d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -1019,26 +1019,25 @@ impl HirDisplay for Ty { let (parent_len, self_param, type_, const_, impl_, lifetime) = generics.provenance_split(); let parameters = parameters.as_slice(Interner); + debug_assert_eq!( + parameters.len(), + parent_len + self_param as usize + type_ + const_ + impl_ + lifetime + ); // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self? if parameters.len() - impl_ > 0 { // `parameters` are in the order of fn's params (including impl traits), fn's lifetimes + let parameters = + generic_args_sans_defaults(f, Some(generic_def_id), parameters); let without_impl = self_param as usize + type_ + const_ + lifetime; // parent's params (those from enclosing impl or trait, if any). let (fn_params, parent_params) = parameters.split_at(without_impl + impl_); - debug_assert_eq!(parent_params.len(), parent_len); - - let parent_params = - generic_args_sans_defaults(f, Some(generic_def_id), parent_params); - let fn_params = - &generic_args_sans_defaults(f, Some(generic_def_id), fn_params) - [0..without_impl]; write!(f, "<")?; hir_fmt_generic_arguments(f, parent_params, None)?; if !parent_params.is_empty() && !fn_params.is_empty() { write!(f, ", ")?; } - hir_fmt_generic_arguments(f, fn_params, None)?; + hir_fmt_generic_arguments(f, &fn_params[0..without_impl], None)?; write!(f, ">")?; } } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 516e32ef91725..18f0aeba29ea8 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -8602,3 +8602,31 @@ fn test() { "#]], ); } + +#[test] +fn issue_17871() { + check( + r#" +trait T { + fn f(); +} + +struct S {} +impl T for S { + fn f() {} +} + +fn main() { + let x$0 = S::f::; +} +"#, + expect![[r#" + *x* + + ```rust + // size = 0, align = 1 + let x: fn f() + ``` + "#]], + ); +} From a783e30c931a64f4862ab65c104449df846b0c3c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 15 Aug 2024 14:50:57 +0200 Subject: [PATCH 008/124] internal: Properly check the edition for edition dependent syntax kinds --- .../hir-def/src/macro_expansion_tests/mod.rs | 20 ++- .../crates/hir-expand/src/name.rs | 7 +- .../crates/ide-completion/src/context.rs | 5 +- .../insert_whitespace_into_node.rs | 4 +- .../ide-db/src/syntax_helpers/node_ext.rs | 4 +- .../crates/ide/src/goto_definition.rs | 4 +- .../crates/ide/src/highlight_related.rs | 4 +- .../rust-analyzer/crates/ide/src/hover.rs | 3 +- .../crates/ide/src/hover/render.rs | 3 +- .../crates/ide/src/references.rs | 4 +- .../rust-analyzer/crates/ide/src/rename.rs | 12 +- .../ide/src/syntax_highlighting/highlight.rs | 3 +- .../ide/src/syntax_highlighting/macro_.rs | 3 +- .../crates/parser/src/lexed_str.rs | 13 +- .../crates/parser/src/shortcuts.rs | 10 +- .../parser/src/syntax_kind/generated.rs | 145 ++++++++++++++---- .../crates/syntax-bridge/src/lib.rs | 3 +- .../syntax-bridge/src/to_parser_input.rs | 10 +- .../rust-analyzer/crates/syntax/rust.ungram | 26 ++-- .../crates/syntax/src/ast/make.rs | 2 +- .../rust-analyzer/crates/syntax/src/utils.rs | 4 +- .../xtask/src/codegen/grammar.rs | 113 +++++++++++--- .../xtask/src/codegen/grammar/ast_src.rs | 74 ++++++++- 23 files changed, 347 insertions(+), 129 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index b430e2cefb35d..7f761192517c4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -25,7 +25,7 @@ use hir_expand::{ InFile, MacroFileId, MacroFileIdExt, }; use intern::Symbol; -use span::Span; +use span::{Edition, Span}; use stdx::{format_to, format_to_acc}; use syntax::{ ast::{self, edit::IndentLevel}, @@ -257,21 +257,25 @@ fn pretty_print_macro_expansion( (T![;] | T!['{'] | T!['}'], _) => "\n", (_, T!['}']) => "\n", (IDENT | LIFETIME_IDENT, IDENT | LIFETIME_IDENT) => " ", - _ if prev_kind.is_keyword() && curr_kind.is_keyword() => " ", - (IDENT, _) if curr_kind.is_keyword() => " ", - (_, IDENT) if prev_kind.is_keyword() => " ", + _ if prev_kind.is_keyword(Edition::CURRENT) + && curr_kind.is_keyword(Edition::CURRENT) => + { + " " + } + (IDENT, _) if curr_kind.is_keyword(Edition::CURRENT) => " ", + (_, IDENT) if prev_kind.is_keyword(Edition::CURRENT) => " ", (T![>], IDENT) => " ", - (T![>], _) if curr_kind.is_keyword() => " ", + (T![>], _) if curr_kind.is_keyword(Edition::CURRENT) => " ", (T![->], _) | (_, T![->]) => " ", (T![&&], _) | (_, T![&&]) => " ", (T![,], _) => " ", (T![:], IDENT | T!['(']) => " ", - (T![:], _) if curr_kind.is_keyword() => " ", + (T![:], _) if curr_kind.is_keyword(Edition::CURRENT) => " ", (T![fn], T!['(']) => "", - (T![']'], _) if curr_kind.is_keyword() => " ", + (T![']'], _) if curr_kind.is_keyword(Edition::CURRENT) => " ", (T![']'], T![#]) => "\n", (T![Self], T![::]) => "", - _ if prev_kind.is_keyword() => " ", + _ if prev_kind.is_keyword(Edition::CURRENT) => " ", _ => "", }; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs index d012d272d743d..2a52aaba6af99 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs @@ -104,14 +104,17 @@ impl Name { /// Resolve a name from the text of token. fn resolve(raw_text: &str) -> Name { + // FIXME: Edition match raw_text.strip_prefix("r#") { // When `raw_text` starts with "r#" but the name does not coincide with any // keyword, we never need the prefix so we strip it. - Some(text) if !is_raw_identifier(text) => Name::new_ref(text), + Some(text) if !is_raw_identifier(text, span::Edition::CURRENT) => Name::new_ref(text), // Keywords (in the current edition) *can* be used as a name in earlier editions of // Rust, e.g. "try" in Rust 2015. Even in such cases, we keep track of them in their // escaped form. - None if is_raw_identifier(raw_text) => Name::new_text(&format!("r#{}", raw_text)), + None if is_raw_identifier(raw_text, span::Edition::CURRENT) => { + Name::new_text(&format!("r#{}", raw_text)) + } _ => Name::new_text(raw_text), } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index 952d9217c71d4..5b64a98a8bba5 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -15,7 +15,7 @@ use ide_db::{ }; use syntax::{ ast::{self, AttrKind, NameOrNameRef}, - AstNode, SmolStr, + AstNode, Edition, SmolStr, SyntaxKind::{self, *}, SyntaxToken, TextRange, TextSize, T, }; @@ -468,7 +468,8 @@ impl CompletionContext<'_> { TextRange::at(self.original_token.text_range().start(), TextSize::from(1)) } IDENT | LIFETIME_IDENT | UNDERSCORE | INT_NUMBER => self.original_token.text_range(), - _ if kind.is_keyword() => self.original_token.text_range(), + // FIXME: Edition + _ if kind.is_keyword(Edition::CURRENT) => self.original_token.text_range(), _ => TextRange::empty(self.position.offset), } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs index 97b6d4a572a21..6714e411e2f7a 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs @@ -1,4 +1,5 @@ //! Utilities for formatting macro expanded nodes until we get a proper formatter. +use span::Edition; use syntax::{ ast::make, ted::{self, Position}, @@ -131,5 +132,6 @@ pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode { } fn is_text(k: SyntaxKind) -> bool { - k.is_keyword() || k.is_literal() || k == IDENT || k == UNDERSCORE + // FIXME: Edition + k.is_keyword(Edition::CURRENT) || k.is_literal() || k == IDENT || k == UNDERSCORE } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs index 37238cc61d3b7..d10a55120a32f 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -1,6 +1,7 @@ //! Various helper functions to work with SyntaxNodes. use itertools::Itertools; use parser::T; +use span::Edition; use syntax::{ ast::{self, HasLoopBody, MacroCall, PathSegmentKind, VisibilityKind}, AstNode, AstToken, Preorder, RustLanguage, WalkEvent, @@ -461,7 +462,8 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option None, + // FIXME: Edition + Some(tok) if tok.kind().is_keyword(Edition::CURRENT) => None, // don't include the right token tree parenthesis if it exists tok @ Some(_) if tok == r_paren => None, // only nodes that we can find are other TokenTrees, those are unexpected in this parse though diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index 8a8bc07945f1a..b1d2b34ea48d6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -17,7 +17,7 @@ use ide_db::{ }; use itertools::Itertools; -use span::FileId; +use span::{Edition, FileId}; use syntax::{ ast::{self, HasLoopBody}, match_ast, AstNode, AstToken, @@ -55,7 +55,7 @@ pub(crate) fn goto_definition( | COMMENT => 4, // index and prefix ops T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3, - kind if kind.is_keyword() => 2, + kind if kind.is_keyword(Edition::CURRENT) => 2, T!['('] | T![')'] => 2, kind if kind.is_trivia() => 0, _ => 1, diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs index 8fcd38b4e3435..946dca85d4613 100644 --- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs +++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs @@ -11,7 +11,7 @@ use ide_db::{ }, FxHashMap, FxHashSet, RootDatabase, }; -use span::EditionedFileId; +use span::{Edition, EditionedFileId}; use syntax::{ ast::{self, HasLoopBody}, match_ast, AstNode, @@ -65,7 +65,7 @@ pub(crate) fn highlight_related( let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind { T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?` T![->] => 4, - kind if kind.is_keyword() => 3, + kind if kind.is_keyword(Edition::CURRENT) => 3, IDENT | INT_NUMBER => 2, T![|] => 1, _ => 0, diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index 500674e32b30a..75f8ac2d2b1b8 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -14,6 +14,7 @@ use ide_db::{ FileRange, FxIndexSet, RootDatabase, }; use itertools::{multizip, Itertools}; +use span::Edition; use syntax::{ast, AstNode, SyntaxKind::*, SyntaxNode, T}; use crate::{ @@ -140,7 +141,7 @@ fn hover_simple( | T![_] => 4, // index and prefix ops and closure pipe T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] | T![|] => 3, - kind if kind.is_keyword() => 2, + kind if kind.is_keyword(Edition::CURRENT) => 2, T!['('] | T![')'] => 2, kind if kind.is_trivia() => 0, _ => 1, diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs index 7091b15b8a4ca..13b7ba1e2ba86 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs @@ -20,6 +20,7 @@ use rustc_apfloat::{ ieee::{Half as f16, Quad as f128}, Float, }; +use span::Edition; use stdx::format_to; use syntax::{algo, ast, match_ast, AstNode, AstToken, Direction, SyntaxToken, T}; @@ -251,7 +252,7 @@ pub(super) fn keyword( config: &HoverConfig, token: &SyntaxToken, ) -> Option { - if !token.kind().is_keyword() || !config.documentation || !config.keywords { + if !token.kind().is_keyword(Edition::CURRENT) || !config.documentation || !config.keywords { return None; } let parent = token.parent()?; diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index 64b82b31c7417..aef162bf0acea 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -17,6 +17,7 @@ use ide_db::{ }; use itertools::Itertools; use nohash_hasher::IntMap; +use span::Edition; use syntax::{ ast::{self, HasName}, match_ast, AstNode, @@ -305,7 +306,8 @@ fn handle_control_flow_keywords( FilePosition { file_id, offset }: FilePosition, ) -> Option { let file = sema.parse_guess_edition(file_id); - let token = file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword())?; + let token = + file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword(Edition::CURRENT))?; let references = match token.kind() { T![fn] | T![return] | T![try] => highlight_related::highlight_exit_points(sema, token), diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs index 9581474ca7bd2..ed0e3d89ddc66 100644 --- a/src/tools/rust-analyzer/crates/ide/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs @@ -6,6 +6,7 @@ use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics}; use ide_db::{ + base_db::SourceDatabase, defs::{Definition, NameClass, NameRefClass}, rename::{bail, format_err, source_edit_from_references, IdentifierKind}, source_change::SourceChangeBuilder, @@ -162,11 +163,12 @@ pub(crate) fn will_rename_file( let sema = Semantics::new(db); let module = sema.file_to_module_def(file_id)?; let def = Definition::Module(module); - let mut change = if is_raw_identifier(new_name_stem) { - def.rename(&sema, &SmolStr::from_iter(["r#", new_name_stem])).ok()? - } else { - def.rename(&sema, new_name_stem).ok()? - }; + let mut change = + if is_raw_identifier(new_name_stem, db.crate_graph()[module.krate().into()].edition) { + def.rename(&sema, &SmolStr::from_iter(["r#", new_name_stem])).ok()? + } else { + def.rename(&sema, new_name_stem).ok()? + }; change.file_system_edits.clear(); Some(change) } diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs index 291073f87735c..70fdc0a1b3a7e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs @@ -6,6 +6,7 @@ use ide_db::{ defs::{Definition, IdentClass, NameClass, NameRefClass}, FxHashMap, RootDatabase, SymbolKind, }; +use span::Edition; use stdx::hash_once; use syntax::{ ast, match_ast, AstNode, AstToken, NodeOrToken, @@ -41,7 +42,7 @@ pub(super) fn token(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> O HlTag::None.into() } p if p.is_punct() => punctuation(sema, token, p), - k if k.is_keyword() => keyword(sema, token, k)?, + k if k.is_keyword(Edition::CURRENT) => keyword(sema, token, k)?, _ => return None, }; Some(highlight) diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs index 1099d9c23b721..460fc4fe141c7 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs @@ -1,4 +1,5 @@ //! Syntax highlighting for macro_rules!. +use span::Edition; use syntax::{SyntaxKind, SyntaxToken, TextRange, T}; use crate::{HlRange, HlTag}; @@ -117,7 +118,7 @@ fn update_macro_state(state: &mut MacroMatcherParseState, tok: &SyntaxToken) { fn is_metavariable(token: &SyntaxToken) -> Option { match token.kind() { - kind if kind == SyntaxKind::IDENT || kind.is_keyword() => { + kind if kind == SyntaxKind::IDENT || kind.is_keyword(Edition::CURRENT) => { if let Some(_dollar) = token.prev_token().filter(|t| t.kind() == T![$]) { return Some(token.text_range()); } diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs index 13fc61074d091..ff924830ae017 100644 --- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs +++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs @@ -178,19 +178,8 @@ impl<'a> Converter<'a> { rustc_lexer::TokenKind::Whitespace => WHITESPACE, rustc_lexer::TokenKind::Ident if token_text == "_" => UNDERSCORE, - rustc_lexer::TokenKind::Ident - if ["async", "await", "dyn", "try"].contains(&token_text) - && !self.edition.at_least_2018() => - { - IDENT - } - rustc_lexer::TokenKind::Ident - if token_text == "gen" && !self.edition.at_least_2024() => - { - IDENT - } rustc_lexer::TokenKind::Ident => { - SyntaxKind::from_keyword(token_text).unwrap_or(IDENT) + SyntaxKind::from_keyword(token_text, self.edition).unwrap_or(IDENT) } rustc_lexer::TokenKind::InvalidPrefix | rustc_lexer::TokenKind::InvalidIdent => { err = "Ident contains invalid characters"; diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs index 1cf81e79b03cf..7adedba7c4382 100644 --- a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs +++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs @@ -35,12 +35,10 @@ impl LexedStr<'_> { was_joint = false } else if kind == SyntaxKind::IDENT { let token_text = self.text(i); - let contextual_kw = if !edition.at_least_2018() && token_text == "dyn" { - SyntaxKind::DYN_KW - } else { - SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT) - }; - res.push_ident(contextual_kw); + res.push_ident( + SyntaxKind::from_contextual_keyword(token_text, edition) + .unwrap_or(SyntaxKind::IDENT), + ) } else { if was_joint { res.was_joint(); diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs index 7bddf88740136..eaacd88e3e327 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs @@ -1,6 +1,7 @@ //! Generated by `cargo codegen grammar`, do not edit by hand. #![allow(bad_style, missing_docs, unreachable_pub)] +use crate::Edition; #[doc = r" The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`."] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] #[repr(u16)] @@ -64,8 +65,6 @@ pub enum SyntaxKind { SELF_TYPE_KW, ABSTRACT_KW, AS_KW, - ASYNC_KW, - AWAIT_KW, BECOME_KW, BOX_KW, BREAK_KW, @@ -73,7 +72,6 @@ pub enum SyntaxKind { CONTINUE_KW, CRATE_KW, DO_KW, - DYN_KW, ELSE_KW, ENUM_KW, EXTERN_KW, @@ -81,7 +79,6 @@ pub enum SyntaxKind { FINAL_KW, FN_KW, FOR_KW, - GEN_KW, IF_KW, IMPL_KW, IN_KW, @@ -103,7 +100,6 @@ pub enum SyntaxKind { SUPER_KW, TRAIT_KW, TRUE_KW, - TRY_KW, TYPE_KW, TYPEOF_KW, UNSAFE_KW, @@ -114,13 +110,18 @@ pub enum SyntaxKind { WHILE_KW, YIELD_KW, ASM_KW, + ASYNC_KW, AUTO_KW, + AWAIT_KW, BUILTIN_KW, DEFAULT_KW, + DYN_KW, FORMAT_ARGS_KW, + GEN_KW, MACRO_RULES_KW, OFFSET_OF_KW, RAW_KW, + TRY_KW, UNION_KW, YEET_KW, BYTE, @@ -296,14 +297,14 @@ pub enum SyntaxKind { } use self::SyntaxKind::*; impl SyntaxKind { - pub fn is_keyword(self) -> bool { + #[doc = r" Checks whether this syntax kind is a strict keyword for the given edition."] + #[doc = r" Strict keywords are identifiers that are always considered keywords."] + pub fn is_strict_keyword(self, edition: Edition) -> bool { matches!( self, SELF_TYPE_KW | ABSTRACT_KW | AS_KW - | ASYNC_KW - | AWAIT_KW | BECOME_KW | BOX_KW | BREAK_KW @@ -311,7 +312,6 @@ impl SyntaxKind { | CONTINUE_KW | CRATE_KW | DO_KW - | DYN_KW | ELSE_KW | ENUM_KW | EXTERN_KW @@ -319,7 +319,6 @@ impl SyntaxKind { | FINAL_KW | FN_KW | FOR_KW - | GEN_KW | IF_KW | IMPL_KW | IN_KW @@ -341,7 +340,6 @@ impl SyntaxKind { | SUPER_KW | TRAIT_KW | TRUE_KW - | TRY_KW | TYPE_KW | TYPEOF_KW | UNSAFE_KW @@ -351,17 +349,103 @@ impl SyntaxKind { | WHERE_KW | WHILE_KW | YIELD_KW - | ASM_KW - | AUTO_KW - | BUILTIN_KW - | DEFAULT_KW - | FORMAT_ARGS_KW - | MACRO_RULES_KW - | OFFSET_OF_KW - | RAW_KW - | UNION_KW - | YEET_KW - ) + ) || match self { + ASYNC_KW if Edition::Edition2018 <= edition => true, + AWAIT_KW if Edition::Edition2018 <= edition => true, + DYN_KW if Edition::Edition2018 <= edition => true, + GEN_KW if Edition::Edition2024 <= edition => true, + TRY_KW if Edition::Edition2018 <= edition => true, + _ => false, + } + } + #[doc = r" Checks whether this syntax kind is a weak keyword for the given edition."] + #[doc = r" Weak keywords are identifiers that are considered keywords only in certain contexts."] + pub fn is_contextual_keyword(self, edition: Edition) -> bool { + match self { + ASM_KW => true, + AUTO_KW => true, + BUILTIN_KW => true, + DEFAULT_KW => true, + DYN_KW if edition < Edition::Edition2018 => true, + FORMAT_ARGS_KW => true, + MACRO_RULES_KW => true, + OFFSET_OF_KW => true, + RAW_KW => true, + UNION_KW => true, + YEET_KW => true, + _ => false, + } + } + #[doc = r" Checks whether this syntax kind is a strict or weak keyword for the given edition."] + pub fn is_keyword(self, edition: Edition) -> bool { + matches!( + self, + SELF_TYPE_KW + | ABSTRACT_KW + | AS_KW + | BECOME_KW + | BOX_KW + | BREAK_KW + | CONST_KW + | CONTINUE_KW + | CRATE_KW + | DO_KW + | ELSE_KW + | ENUM_KW + | EXTERN_KW + | FALSE_KW + | FINAL_KW + | FN_KW + | FOR_KW + | IF_KW + | IMPL_KW + | IN_KW + | LET_KW + | LOOP_KW + | MACRO_KW + | MATCH_KW + | MOD_KW + | MOVE_KW + | MUT_KW + | OVERRIDE_KW + | PRIV_KW + | PUB_KW + | REF_KW + | RETURN_KW + | SELF_KW + | STATIC_KW + | STRUCT_KW + | SUPER_KW + | TRAIT_KW + | TRUE_KW + | TYPE_KW + | TYPEOF_KW + | UNSAFE_KW + | UNSIZED_KW + | USE_KW + | VIRTUAL_KW + | WHERE_KW + | WHILE_KW + | YIELD_KW + ) || match self { + ASYNC_KW if Edition::Edition2018 <= edition => true, + AWAIT_KW if Edition::Edition2018 <= edition => true, + DYN_KW if Edition::Edition2018 <= edition => true, + GEN_KW if Edition::Edition2024 <= edition => true, + TRY_KW if Edition::Edition2018 <= edition => true, + ASM_KW => true, + AUTO_KW => true, + BUILTIN_KW => true, + DEFAULT_KW => true, + DYN_KW if edition < Edition::Edition2018 => true, + FORMAT_ARGS_KW => true, + MACRO_RULES_KW => true, + OFFSET_OF_KW => true, + RAW_KW => true, + UNION_KW => true, + YEET_KW => true, + _ => false, + } } pub fn is_punct(self) -> bool { matches!( @@ -434,13 +518,11 @@ impl SyntaxKind { | STRING ) } - pub fn from_keyword(ident: &str) -> Option { + pub fn from_keyword(ident: &str, edition: Edition) -> Option { let kw = match ident { "Self" => SELF_TYPE_KW, "abstract" => ABSTRACT_KW, "as" => AS_KW, - "async" => ASYNC_KW, - "await" => AWAIT_KW, "become" => BECOME_KW, "box" => BOX_KW, "break" => BREAK_KW, @@ -448,7 +530,6 @@ impl SyntaxKind { "continue" => CONTINUE_KW, "crate" => CRATE_KW, "do" => DO_KW, - "dyn" => DYN_KW, "else" => ELSE_KW, "enum" => ENUM_KW, "extern" => EXTERN_KW, @@ -456,7 +537,6 @@ impl SyntaxKind { "final" => FINAL_KW, "fn" => FN_KW, "for" => FOR_KW, - "gen" => GEN_KW, "if" => IF_KW, "impl" => IMPL_KW, "in" => IN_KW, @@ -478,7 +558,6 @@ impl SyntaxKind { "super" => SUPER_KW, "trait" => TRAIT_KW, "true" => TRUE_KW, - "try" => TRY_KW, "type" => TYPE_KW, "typeof" => TYPEOF_KW, "unsafe" => UNSAFE_KW, @@ -488,16 +567,22 @@ impl SyntaxKind { "where" => WHERE_KW, "while" => WHILE_KW, "yield" => YIELD_KW, + "async" if Edition::Edition2018 <= edition => ASYNC_KW, + "await" if Edition::Edition2018 <= edition => AWAIT_KW, + "dyn" if Edition::Edition2018 <= edition => DYN_KW, + "gen" if Edition::Edition2024 <= edition => GEN_KW, + "try" if Edition::Edition2018 <= edition => TRY_KW, _ => return None, }; Some(kw) } - pub fn from_contextual_keyword(ident: &str) -> Option { + pub fn from_contextual_keyword(ident: &str, edition: Edition) -> Option { let kw = match ident { "asm" => ASM_KW, "auto" => AUTO_KW, "builtin" => BUILTIN_KW, "default" => DEFAULT_KW, + "dyn" if edition < Edition::Edition2018 => DYN_KW, "format_args" => FORMAT_ARGS_KW, "macro_rules" => MACRO_RULES_KW, "offset_of" => OFFSET_OF_KW, @@ -544,4 +629,4 @@ impl SyntaxKind { } } #[macro_export] -macro_rules ! T { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } +macro_rules ! T { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs index b0afd245c52e3..5153db237275c 100644 --- a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs @@ -307,7 +307,8 @@ where tt::Ident::new(&text, conv.span_for(abs_range)).into() } UNDERSCORE => make_ident!(), - k if k.is_keyword() => make_ident!(), + // FIXME: Edition + k if k.is_keyword(Edition::CURRENT) => make_ident!(), k if k.is_literal() => { let text = token.to_text(conv); let span = conv.span_for(abs_range); diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs index 2c54899268834..14216e3093285 100644 --- a/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs @@ -64,14 +64,12 @@ pub fn to_parser_input( "_" => res.push(T![_]), i if i.starts_with('\'') => res.push(LIFETIME_IDENT), _ if ident.is_raw.yes() => res.push(IDENT), - "gen" if !edition.at_least_2024() => res.push(IDENT), - "dyn" if !edition.at_least_2018() => res.push_ident(DYN_KW), - "async" | "await" | "try" if !edition.at_least_2018() => res.push(IDENT), - text => match SyntaxKind::from_keyword(text) { + text => match SyntaxKind::from_keyword(text, edition) { Some(kind) => res.push(kind), None => { - let contextual_keyword = SyntaxKind::from_contextual_keyword(text) - .unwrap_or(SyntaxKind::IDENT); + let contextual_keyword = + SyntaxKind::from_contextual_keyword(text, edition) + .unwrap_or(SyntaxKind::IDENT); res.push_ident(contextual_keyword); } }, diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram index c23bcd6914910..069be2df3a440 100644 --- a/src/tools/rust-analyzer/crates/syntax/rust.ungram +++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram @@ -9,8 +9,6 @@ // // -- comment // Name = -- non-terminal definition // 'ident' -- keyword or punct token (terminal) -// '?ident' -- contextual keyword (terminal) -// too) // '#ident' -- generic token (terminal) // '@ident' -- literal token (terminal) // A B -- sequence @@ -152,7 +150,7 @@ Item = MacroRules = Attr* Visibility? - '?macro_rules' '!' Name + 'macro_rules' '!' Name TokenTree MacroDef = @@ -188,7 +186,7 @@ UseTreeList = Fn = Attr* Visibility? - '?default'? 'const'? 'async'? 'unsafe'? Abi? + 'default'? 'const'? 'async'? 'unsafe'? Abi? 'fn' Name GenericParamList? ParamList RetType? WhereClause? (body:BlockExpr | ';') @@ -220,7 +218,7 @@ RetType = TypeAlias = Attr* Visibility? - '?default'? + 'default'? 'type' Name GenericParamList? (':' TypeBoundList?)? WhereClause? ('=' Type)? ';' @@ -263,7 +261,7 @@ Variant = Union = Attr* Visibility? - '?union' Name GenericParamList? WhereClause? + 'union' Name GenericParamList? WhereClause? RecordFieldList // A Data Type. @@ -276,7 +274,7 @@ Adt = Const = Attr* Visibility? - '?default'? + 'default'? 'const' (Name | '_') ':' Type ('=' body:Expr)? ';' @@ -287,7 +285,7 @@ Static = Trait = Attr* Visibility? - 'unsafe'? '?auto'? + 'unsafe'? 'auto'? 'trait' Name GenericParamList? (':' TypeBoundList?)? WhereClause? AssocItemList @@ -306,7 +304,7 @@ AssocItem = Impl = Attr* Visibility? - '?default'? 'unsafe'? + 'default'? 'unsafe'? 'impl' GenericParamList? ('const'? '!'? trait:Type 'for')? self_ty:Type WhereClause? AssocItemList @@ -387,13 +385,13 @@ Expr = | UnderscoreExpr OffsetOfExpr = - Attr* '?builtin' '#' '?offset_of' '(' Type ',' fields:(NameRef ('.' NameRef)* ) ')' + Attr* 'builtin' '#' 'offset_of' '(' Type ',' fields:(NameRef ('.' NameRef)* ) ')' AsmExpr = - Attr* '?builtin' '#' '?asm' '(' Expr ')' + Attr* 'builtin' '#' 'asm' '(' Expr ')' FormatArgsExpr = - Attr* '?builtin' '#' '?format_args' '(' + Attr* 'builtin' '#' 'format_args' '(' template:Expr (',' args:(FormatArgsArg (',' FormatArgsArg)* ','?)? )? ')' @@ -425,7 +423,7 @@ StmtList = '}' RefExpr = - Attr* '&' (('?raw' 'const'?)| ('?raw'? 'mut') ) Expr + Attr* '&' (('raw' 'const'?)| ('raw'? 'mut') ) Expr TryExpr = Attr* Expr '?' @@ -550,7 +548,7 @@ YieldExpr = Attr* 'yield' Expr? YeetExpr = - Attr* 'do' '?yeet' Expr? + Attr* 'do' 'yeet' Expr? LetExpr = Attr* 'let' Pat '=' Expr diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index 0228d9dd7135c..645575c25ee64 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -117,7 +117,7 @@ pub fn name_ref(name_ref: &str) -> ast::NameRef { ast_from_text(&format!("fn f() {{ {raw_escape}{name_ref}; }}")) } fn raw_ident_esc(ident: &str) -> &'static str { - if is_raw_identifier(ident) { + if is_raw_identifier(ident, Edition::CURRENT) { "r#" } else { "" diff --git a/src/tools/rust-analyzer/crates/syntax/src/utils.rs b/src/tools/rust-analyzer/crates/syntax/src/utils.rs index a38f8b2b55d4e..77d49b442e0ad 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/utils.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/utils.rs @@ -2,7 +2,7 @@ use crate::SyntaxKind; -pub fn is_raw_identifier(name: &str) -> bool { - let is_keyword = SyntaxKind::from_keyword(name).is_some(); +pub fn is_raw_identifier(name: &str, edition: parser::Edition) -> bool { + let is_keyword = SyntaxKind::from_keyword(name, edition).is_some(); is_keyword && !matches!(name, "self" | "crate" | "super" | "Self") } diff --git a/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs b/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs index 0352539754b05..39e06f9642db1 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs @@ -396,24 +396,66 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String { let punctuation = grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::>(); - let x = |&name| match name { + let fmt_kw_as_variant = |&name| match name { "Self" => format_ident!("SELF_TYPE_KW"), name => format_ident!("{}_KW", to_upper_snake_case(name)), }; - let full_keywords_values = grammar.keywords; - let full_keywords = full_keywords_values.iter().map(x); + let strict_keywords = grammar.keywords; + let strict_keywords_variants = + strict_keywords.iter().map(fmt_kw_as_variant).collect::>(); + let strict_keywords_tokens = strict_keywords.iter().map(|it| format_ident!("{it}")); - let contextual_keywords_values = &grammar.contextual_keywords; - let contextual_keywords = contextual_keywords_values.iter().map(x); + let edition_dependent_keywords_variants_match_arm = grammar + .edition_dependent_keywords + .iter() + .map(|(kw, ed)| { + let kw = fmt_kw_as_variant(kw); + quote! { #kw if #ed <= edition } + }) + .collect::>(); + let edition_dependent_keywords_str_match_arm = grammar + .edition_dependent_keywords + .iter() + .map(|(kw, ed)| { + quote! { #kw if #ed <= edition } + }) + .collect::>(); + let edition_dependent_keywords_variants = grammar + .edition_dependent_keywords + .iter() + .map(|(kw, _)| fmt_kw_as_variant(kw)) + .collect::>(); + let edition_dependent_keywords_tokens = + grammar.edition_dependent_keywords.iter().map(|(it, _)| format_ident!("{it}")); + + let contextual_keywords = grammar.contextual_keywords; + let contextual_keywords_variants = + contextual_keywords.iter().map(fmt_kw_as_variant).collect::>(); + let contextual_keywords_tokens = contextual_keywords.iter().map(|it| format_ident!("{it}")); + let contextual_keywords_str_match_arm = grammar.contextual_keywords.iter().map(|kw| { + match grammar.edition_dependent_keywords.iter().find(|(ed_kw, _)| ed_kw == kw) { + Some((_, ed)) => quote! { #kw if edition < #ed }, + None => quote! { #kw }, + } + }); + let contextual_keywords_variants_match_arm = grammar + .contextual_keywords + .iter() + .map(|kw_s| { + let kw = fmt_kw_as_variant(kw_s); + match grammar.edition_dependent_keywords.iter().find(|(ed_kw, _)| ed_kw == kw_s) { + Some((_, ed)) => quote! { #kw if edition < #ed }, + None => quote! { #kw }, + } + }) + .collect::>(); - let all_keywords_values = grammar - .keywords + let non_strict_keyword_variants = contextual_keywords_variants .iter() - .chain(grammar.contextual_keywords.iter()) - .copied() + .chain(edition_dependent_keywords_variants.iter()) + .sorted() + .dedup() .collect::>(); - let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw)); - let all_keywords = all_keywords_values.iter().map(x).collect::>(); let literals = grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::>(); @@ -424,6 +466,8 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String { let ast = quote! { #![allow(bad_style, missing_docs, unreachable_pub)] + use crate::Edition; + /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] #[repr(u16)] @@ -435,7 +479,8 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String { #[doc(hidden)] EOF, #(#punctuation,)* - #(#all_keywords,)* + #(#strict_keywords_variants,)* + #(#non_strict_keyword_variants,)* #(#literals,)* #(#tokens,)* #(#nodes,)* @@ -447,31 +492,55 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String { use self::SyntaxKind::*; impl SyntaxKind { - pub fn is_keyword(self) -> bool { - matches!(self, #(#all_keywords)|*) + /// Checks whether this syntax kind is a strict keyword for the given edition. + /// Strict keywords are identifiers that are always considered keywords. + pub fn is_strict_keyword(self, edition: Edition) -> bool { + matches!(self, #(#strict_keywords_variants)|*) + || match self { + #(#edition_dependent_keywords_variants_match_arm => true,)* + _ => false, + } } - pub fn is_punct(self) -> bool { + /// Checks whether this syntax kind is a weak keyword for the given edition. + /// Weak keywords are identifiers that are considered keywords only in certain contexts. + pub fn is_contextual_keyword(self, edition: Edition) -> bool { + match self { + #(#contextual_keywords_variants_match_arm => true,)* + _ => false, + } + } - matches!(self, #(#punctuation)|*) + /// Checks whether this syntax kind is a strict or weak keyword for the given edition. + pub fn is_keyword(self, edition: Edition) -> bool { + matches!(self, #(#strict_keywords_variants)|*) + || match self { + #(#edition_dependent_keywords_variants_match_arm => true,)* + #(#contextual_keywords_variants_match_arm => true,)* + _ => false, + } + } + pub fn is_punct(self) -> bool { + matches!(self, #(#punctuation)|*) } pub fn is_literal(self) -> bool { matches!(self, #(#literals)|*) } - pub fn from_keyword(ident: &str) -> Option { + pub fn from_keyword(ident: &str, edition: Edition) -> Option { let kw = match ident { - #(#full_keywords_values => #full_keywords,)* + #(#strict_keywords => #strict_keywords_variants,)* + #(#edition_dependent_keywords_str_match_arm => #edition_dependent_keywords_variants,)* _ => return None, }; Some(kw) } - pub fn from_contextual_keyword(ident: &str) -> Option { + pub fn from_contextual_keyword(ident: &str, edition: Edition) -> Option { let kw = match ident { - #(#contextual_keywords_values => #contextual_keywords,)* + #(#contextual_keywords_str_match_arm => #contextual_keywords_variants,)* _ => return None, }; Some(kw) @@ -489,7 +558,9 @@ fn generate_syntax_kinds(grammar: KindsSrc) -> String { #[macro_export] macro_rules! T { #([#punctuation_values] => { $crate::SyntaxKind::#punctuation };)* - #([#all_keywords_idents] => { $crate::SyntaxKind::#all_keywords };)* + #([#strict_keywords_tokens] => { $crate::SyntaxKind::#strict_keywords_variants };)* + #([#contextual_keywords_tokens] => { $crate::SyntaxKind::#contextual_keywords_variants };)* + #([#edition_dependent_keywords_tokens] => { $crate::SyntaxKind::#edition_dependent_keywords_variants };)* [lifetime_ident] => { $crate::SyntaxKind::LIFETIME_IDENT }; [int_number] => { $crate::SyntaxKind::INT_NUMBER }; [ident] => { $crate::SyntaxKind::IDENT }; diff --git a/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs b/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs index 3444f89908b93..34151bd958765 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs @@ -1,5 +1,7 @@ //! Defines input for code generation process. +use quote::ToTokens; + use crate::codegen::grammar::to_upper_snake_case; #[derive(Copy, Clone, Debug)] @@ -10,6 +12,35 @@ pub(crate) struct KindsSrc { pub(crate) literals: &'static [&'static str], pub(crate) tokens: &'static [&'static str], pub(crate) nodes: &'static [&'static str], + pub(crate) edition_dependent_keywords: &'static [(&'static str, Edition)], +} + +#[allow(dead_code)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub(super) enum Edition { + Edition2015, + Edition2018, + Edition2021, + Edition2024, +} + +impl ToTokens for Edition { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + match self { + Edition::Edition2015 => { + tokens.extend(quote::quote! { Edition::Edition2015 }); + } + Edition::Edition2018 => { + tokens.extend(quote::quote! { Edition::Edition2018 }); + } + Edition::Edition2021 => { + tokens.extend(quote::quote! { Edition::Edition2021 }); + } + Edition::Edition2024 => { + tokens.extend(quote::quote! { Edition::Edition2024 }); + } + } + } } /// The punctuations of the language. @@ -75,17 +106,32 @@ const EOF: &str = "EOF"; const RESERVED: &[&str] = &[ "abstract", "become", "box", "do", "final", "macro", "override", "priv", "typeof", "unsized", - "virtual", "yield", "try", + "virtual", "yield", +]; +// keywords that are keywords only in specific parse contexts +#[doc(alias = "WEAK_KEYWORDS")] +const CONTEXTUAL_KEYWORDS: &[&str] = + &["macro_rules", "union", "default", "raw", "dyn", "auto", "yeet"]; +// keywords we use for special macro expansions +const CONTEXTUAL_BUILTIN_KEYWORDS: &[&str] = &["builtin", "offset_of", "format_args", "asm"]; +// keywords that are keywords depending on the edition +const EDITION_DEPENDENT_KEYWORDS: &[(&str, Edition)] = &[ + ("try", Edition::Edition2018), + ("dyn", Edition::Edition2018), + ("async", Edition::Edition2018), + ("await", Edition::Edition2018), + ("gen", Edition::Edition2024), ]; -const CONTEXTUAL_RESERVED: &[&str] = &[]; pub(crate) fn generate_kind_src( nodes: &[AstNodeSrc], enums: &[AstEnumSrc], grammar: &ungrammar::Grammar, ) -> KindsSrc { + let mut contextual_keywords: Vec<&_> = + CONTEXTUAL_KEYWORDS.iter().chain(CONTEXTUAL_BUILTIN_KEYWORDS).copied().collect(); + let mut keywords: Vec<&_> = Vec::new(); - let mut contextual_keywords: Vec<&_> = Vec::new(); let mut tokens: Vec<&_> = TOKENS.to_vec(); let mut literals: Vec<&_> = Vec::new(); let mut used_puncts = vec![false; PUNCT.len()]; @@ -103,9 +149,7 @@ pub(crate) fn generate_kind_src( ("#", token) if !token.is_empty() => { tokens.push(String::leak(to_upper_snake_case(token))); } - ("?", kw) if !kw.is_empty() => { - contextual_keywords.push(String::leak(kw.to_owned())); - } + _ if contextual_keywords.contains(&name) => {} _ if name.chars().all(char::is_alphabetic) => { keywords.push(String::leak(name.to_owned())); } @@ -124,9 +168,14 @@ pub(crate) fn generate_kind_src( keywords.extend(RESERVED.iter().copied()); keywords.sort(); keywords.dedup(); - contextual_keywords.extend(CONTEXTUAL_RESERVED.iter().copied()); contextual_keywords.sort(); contextual_keywords.dedup(); + let mut edition_dependent_keywords: Vec<(&_, _)> = EDITION_DEPENDENT_KEYWORDS.to_vec(); + edition_dependent_keywords.sort(); + edition_dependent_keywords.dedup(); + + keywords.retain(|&it| !contextual_keywords.contains(&it)); + keywords.retain(|&it| !edition_dependent_keywords.iter().any(|&(kw, _)| kw == it)); // we leak things here for simplicity, that way we don't have to deal with lifetimes // The execution is a one shot job so thats fine @@ -142,12 +191,21 @@ pub(crate) fn generate_kind_src( nodes.sort(); let keywords = Vec::leak(keywords); let contextual_keywords = Vec::leak(contextual_keywords); + let edition_dependent_keywords = Vec::leak(edition_dependent_keywords); let literals = Vec::leak(literals); literals.sort(); let tokens = Vec::leak(tokens); tokens.sort(); - KindsSrc { punct: PUNCT, nodes, keywords, contextual_keywords, literals, tokens } + KindsSrc { + punct: PUNCT, + nodes, + keywords, + contextual_keywords, + edition_dependent_keywords, + literals, + tokens, + } } #[derive(Default, Debug)] From bf4d31d11baf0fe71e635fe3b65fe108b64afbb5 Mon Sep 17 00:00:00 2001 From: David Richey Date: Thu, 15 Aug 2024 13:49:45 -0500 Subject: [PATCH 009/124] Add scip/lsif flag to exclude vendored libaries --- src/tools/rust-analyzer/crates/ide/src/lib.rs | 4 +- .../crates/ide/src/static_index.rs | 65 ++++++++++++++++--- .../crates/rust-analyzer/src/cli/flags.rs | 9 +++ .../crates/rust-analyzer/src/cli/lsif.rs | 10 ++- .../crates/rust-analyzer/src/cli/scip.rs | 26 ++++++-- 5 files changed, 95 insertions(+), 19 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index eff4bc3d3766d..ba0aaae19c9fd 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -104,7 +104,9 @@ pub use crate::{ rename::RenameError, runnables::{Runnable, RunnableKind, TestId}, signature_help::SignatureHelp, - static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData}, + static_index::{ + StaticIndex, StaticIndexedFile, TokenId, TokenStaticData, VendoredLibrariesConfig, + }, syntax_highlighting::{ tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag}, HighlightConfig, HlRange, diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index eaccee08e8c01..d3c0cac8206af 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -119,6 +119,11 @@ fn documentation_for_definition( def.docs(sema.db, famous_defs.as_ref()) } +pub enum VendoredLibrariesConfig<'a> { + Included { workspace_root: &'a VfsPath }, + Excluded, +} + impl StaticIndex<'_> { fn add_file(&mut self, file_id: FileId) { let current_crate = crates_for(self.db, file_id).pop().map(Into::into); @@ -230,15 +235,22 @@ impl StaticIndex<'_> { self.files.push(result); } - pub fn compute<'a>(analysis: &'a Analysis, workspace_root: &VfsPath) -> StaticIndex<'a> { + pub fn compute<'a>( + analysis: &'a Analysis, + vendored_libs_config: VendoredLibrariesConfig<'_>, + ) -> StaticIndex<'a> { let db = &*analysis.db; let work = all_modules(db).into_iter().filter(|module| { let file_id = module.definition_source_file_id(db).original_file(db); let source_root = db.file_source_root(file_id.into()); let source_root = db.source_root(source_root); - let is_vendored = source_root - .path_for_file(&file_id.into()) - .is_some_and(|module_path| module_path.starts_with(workspace_root)); + let is_vendored = match vendored_libs_config { + VendoredLibrariesConfig::Included { workspace_root } => source_root + .path_for_file(&file_id.into()) + .is_some_and(|module_path| module_path.starts_with(workspace_root)), + VendoredLibrariesConfig::Excluded => false, + }; + !source_root.is_library || is_vendored }); let mut this = StaticIndex { @@ -268,10 +280,11 @@ mod tests { use ide_db::{base_db::VfsPath, FileRange, FxHashSet}; use syntax::TextSize; - fn check_all_ranges(ra_fixture: &str) { + use super::VendoredLibrariesConfig; + + fn check_all_ranges(ra_fixture: &str, vendored_libs_config: VendoredLibrariesConfig<'_>) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); - let s = - StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned())); + let s = StaticIndex::compute(&analysis, vendored_libs_config); let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect(); for f in s.files { for (range, _) in f.tokens { @@ -288,10 +301,9 @@ mod tests { } #[track_caller] - fn check_definitions(ra_fixture: &str) { + fn check_definitions(ra_fixture: &str, vendored_libs_config: VendoredLibrariesConfig<'_>) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); - let s = - StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned())); + let s = StaticIndex::compute(&analysis, vendored_libs_config); let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect(); for (_, t) in s.tokens.iter() { if let Some(t) = t.definition { @@ -319,6 +331,9 @@ struct Foo; enum E { X(Foo) } //^ ^ ^^^ "#, + VendoredLibrariesConfig::Included { + workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()), + }, ); check_definitions( r#" @@ -327,6 +342,9 @@ struct Foo; enum E { X(Foo) } //^ ^ "#, + VendoredLibrariesConfig::Included { + workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()), + }, ); } @@ -349,6 +367,9 @@ pub func() { } "#, + VendoredLibrariesConfig::Included { + workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()), + }, ); } @@ -367,9 +388,30 @@ struct ExternalLibrary(i32); struct VendoredLibrary(i32); //^^^^^^^^^^^^^^^ ^^^ "#, + VendoredLibrariesConfig::Included { + workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()), + }, ); } + #[test] + fn vendored_crate_excluded() { + check_all_ranges( + r#" +//- /workspace/main.rs crate:main deps:external,vendored +struct Main(i32); + //^^^^ ^^^ + +//- /external/lib.rs new_source_root:library crate:external@0.1.0,https://a.b/foo.git library +struct ExternalLibrary(i32); + +//- /workspace/vendored/lib.rs new_source_root:library crate:vendored@0.1.0,https://a.b/bar.git library +struct VendoredLibrary(i32); +"#, + VendoredLibrariesConfig::Excluded, + ) + } + #[test] fn derives() { check_all_ranges( @@ -384,6 +426,9 @@ pub macro Copy {} struct Hello(i32); //^^^^^ ^^^ "#, + VendoredLibrariesConfig::Included { + workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()), + }, ); } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs index 2a3e74c680b92..16d90de661ad1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs @@ -138,6 +138,9 @@ xflags::xflags! { cmd lsif { required path: PathBuf + + /// Exclude code from vendored libraries from the resulting index. + optional --exclude-vendored-libraries } cmd scip { @@ -148,6 +151,9 @@ xflags::xflags! { /// A path to an json configuration file that can be used to customize cargo behavior. optional --config-path config_path: PathBuf + + /// Exclude code from vendored libraries from the resulting index. + optional --exclude-vendored-libraries } } } @@ -259,6 +265,8 @@ pub struct Search { #[derive(Debug)] pub struct Lsif { pub path: PathBuf, + + pub exclude_vendored_libraries: bool, } #[derive(Debug)] @@ -267,6 +275,7 @@ pub struct Scip { pub output: Option, pub config_path: Option, + pub exclude_vendored_libraries: bool, } impl RustAnalyzer { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs index 016f0edc2dd3c..89fe712ced020 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs @@ -5,7 +5,7 @@ use std::time::Instant; use ide::{ Analysis, AnalysisHost, FileId, FileRange, MonikerKind, PackageInformation, RootDatabase, - StaticIndex, StaticIndexedFile, TokenId, TokenStaticData, + StaticIndex, StaticIndexedFile, TokenId, TokenStaticData, VendoredLibrariesConfig, }; use ide_db::{line_index::WideEncoding, LineIndexDatabase}; use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; @@ -296,7 +296,13 @@ impl flags::Lsif { let db = host.raw_database(); let analysis = host.analysis(); - let si = StaticIndex::compute(&analysis, &path.clone().into()); + let vendored_libs_config = if self.exclude_vendored_libraries { + VendoredLibrariesConfig::Excluded + } else { + VendoredLibrariesConfig::Included { workspace_root: &path.clone().into() } + }; + + let si = StaticIndex::compute(&analysis, vendored_libs_config); let mut lsif = LsifManager::new(&analysis, db, &vfs); lsif.add_vertex(lsif::Vertex::MetaData(lsif::MetaData { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index 2fc0ef8d4daac..34b20851dd6bb 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -4,7 +4,7 @@ use std::{path::PathBuf, time::Instant}; use ide::{ AnalysisHost, LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile, - SymbolInformationKind, TextRange, TokenId, + SymbolInformationKind, TextRange, TokenId, VendoredLibrariesConfig, }; use ide_db::LineIndexDatabase; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; @@ -63,7 +63,13 @@ impl flags::Scip { let db = host.raw_database(); let analysis = host.analysis(); - let si = StaticIndex::compute(&analysis, &root.clone().into()); + let vendored_libs_config = if self.exclude_vendored_libraries { + VendoredLibrariesConfig::Excluded + } else { + VendoredLibrariesConfig::Included { workspace_root: &root.clone().into() } + }; + + let si = StaticIndex::compute(&analysis, vendored_libs_config); let metadata = scip_types::Metadata { version: scip_types::ProtocolVersion::UnspecifiedProtocolVersion.into(), @@ -352,8 +358,12 @@ mod test { let (host, position) = position(ra_fixture); let analysis = host.analysis(); - let si = - StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned())); + let si = StaticIndex::compute( + &analysis, + VendoredLibrariesConfig::Included { + workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()), + }, + ); let FilePosition { file_id, offset } = position; @@ -617,8 +627,12 @@ pub mod example_mod { host.raw_database_mut().apply_change(change_fixture.change); let analysis = host.analysis(); - let si = - StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned())); + let si = StaticIndex::compute( + &analysis, + VendoredLibrariesConfig::Included { + workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()), + }, + ); let file = si.files.first().unwrap(); let (_, token_id) = file.tokens.first().unwrap(); From 23c8dcd3cb78cc0adbbce29eefc84195325a2598 Mon Sep 17 00:00:00 2001 From: Tyler Mandry Date: Thu, 15 Aug 2024 15:18:15 -0700 Subject: [PATCH 010/124] Allow flycheck process to exit gracefully Assuming it isn't cancelled. Closes #17902. The only place CommandHandle::join is used is when the flycheck command finishes, so this commit changes the behavior of the method itself. --- src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs index f1009eb46602f..b19a1b8d16700 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs @@ -148,7 +148,6 @@ impl CommandHandle { } pub(crate) fn join(mut self) -> io::Result<()> { - let _ = self.child.0.kill(); let exit_status = self.child.0.wait()?; let (read_at_least_one_message, error) = self.thread.join()?; if read_at_least_one_message || exit_status.success() { From 642a0f84ca2f52c6e694ee9549379b86f7c04fd0 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Fri, 16 Aug 2024 09:53:37 +0300 Subject: [PATCH 011/124] Replace once_cell with std's recently stabilized OnceCell/Lock and LazyCell/Lock This doesn't get rid of the once_cell dependency, unfortunately, since we have dependencies that use it, but it's a nice to do cleanup. And when our deps will eventually get rid of once_cell we will get rid of it for free. --- src/tools/rust-analyzer/Cargo.lock | 7 ------ .../rust-analyzer/crates/hir-def/Cargo.toml | 1 - .../crates/hir-def/src/generics.rs | 24 ++++++++++++------- .../crates/hir-def/src/item_scope.rs | 5 ++-- .../crates/hir-def/src/item_tree.rs | 12 +++++----- .../rust-analyzer/crates/hir-ty/Cargo.toml | 1 - .../diagnostics/match_check/pat_analysis.rs | 7 +++--- .../rust-analyzer/crates/hir-ty/src/infer.rs | 3 +-- .../rust-analyzer/crates/hir-ty/src/lower.rs | 3 +-- .../rust-analyzer/crates/hir-ty/src/tests.rs | 6 ++--- src/tools/rust-analyzer/crates/hir/Cargo.toml | 1 - .../crates/ide-completion/Cargo.toml | 1 - .../src/completions/attribute.rs | 5 ++-- .../rust-analyzer/crates/ide-db/Cargo.toml | 1 - .../rust-analyzer/crates/ide-db/src/search.rs | 8 +++---- .../crates/ide-diagnostics/Cargo.toml | 1 - .../crates/ide-diagnostics/src/lib.rs | 11 +++++---- .../rust-analyzer/crates/syntax/Cargo.toml | 1 - .../crates/syntax/src/ast/make.rs | 5 ++-- 19 files changed, 49 insertions(+), 54 deletions(-) diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index b6bf516af1542..693e49c70fc9a 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -495,7 +495,6 @@ dependencies = [ "hir-ty", "intern", "itertools", - "once_cell", "rustc-hash", "smallvec", "span", @@ -528,7 +527,6 @@ dependencies = [ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "limit", "mbe", - "once_cell", "ra-ap-rustc_abi", "ra-ap-rustc_parse_format", "rustc-hash", @@ -595,7 +593,6 @@ dependencies = [ "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "limit", "nohash-hasher", - "once_cell", "oorandom", "project-model", "ra-ap-rustc_abi", @@ -691,7 +688,6 @@ dependencies = [ "hir", "ide-db", "itertools", - "once_cell", "smallvec", "stdx", "syntax", @@ -720,7 +716,6 @@ dependencies = [ "line-index 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "memchr", "nohash-hasher", - "once_cell", "parser", "profile", "rayon", @@ -746,7 +741,6 @@ dependencies = [ "hir", "ide-db", "itertools", - "once_cell", "paths", "serde_json", "stdx", @@ -1938,7 +1932,6 @@ dependencies = [ "expect-test", "indexmap", "itertools", - "once_cell", "parser", "ra-ap-rustc_lexer", "rayon", diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml index 5b9d227e3152c..c8ba5da449e99 100644 --- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml @@ -23,7 +23,6 @@ fst = { version = "0.4.7", default-features = false } indexmap.workspace = true itertools.workspace = true la-arena.workspace = true -once_cell = "1.17.0" rustc-hash.workspace = true tracing.workspace = true smallvec.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs index ebaaef66db6c8..6c34ee086aa9b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs @@ -12,7 +12,6 @@ use hir_expand::{ }; use intern::Interned; use la_arena::{Arena, RawIdx}; -use once_cell::unsync::Lazy; use stdx::impl_from; use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds}; use triomphe::Arc; @@ -394,11 +393,16 @@ impl GenericParams { // Don't create an `Expander` if not needed since this // could cause a reparse after the `ItemTree` has been created due to the spanmap. - let mut expander = Lazy::new(|| { - (module.def_map(db), Expander::new(db, loc.id.file_id(), module)) - }); + let mut expander = None; for param in func_data.params.iter() { - generic_params.fill_implicit_impl_trait_args(db, &mut expander, param); + generic_params.fill_implicit_impl_trait_args( + db, + &mut expander, + &mut || { + (module.def_map(db), Expander::new(db, loc.id.file_id(), module)) + }, + param, + ); } Interned::new(generic_params.finish()) } @@ -597,7 +601,9 @@ impl GenericParamsCollector { fn fill_implicit_impl_trait_args( &mut self, db: &dyn DefDatabase, - exp: &mut Lazy<(Arc, Expander), impl FnOnce() -> (Arc, Expander)>, + // FIXME: Change this back to `LazyCell` if https://github.com/rust-lang/libs-team/issues/429 is accepted. + exp: &mut Option<(Arc, Expander)>, + exp_fill: &mut dyn FnMut() -> (Arc, Expander), type_ref: &TypeRef, ) { type_ref.walk(&mut |type_ref| { @@ -617,7 +623,7 @@ impl GenericParamsCollector { } if let TypeRef::Macro(mc) = type_ref { let macro_call = mc.to_node(db.upcast()); - let (def_map, expander) = &mut **exp; + let (def_map, expander) = exp.get_or_insert_with(&mut *exp_fill); let module = expander.module.local_id; let resolver = |path: &_| { @@ -637,8 +643,8 @@ impl GenericParamsCollector { { let ctx = expander.ctx(db); let type_ref = TypeRef::from_ast(&ctx, expanded.tree()); - self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref); - exp.1.exit(mark); + self.fill_implicit_impl_trait_args(db, &mut *exp, exp_fill, &type_ref); + exp.get_or_insert_with(&mut *exp_fill).1.exit(mark); } } }); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index df6b1f55c1d96..485f434072dcb 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -1,12 +1,13 @@ //! Describes items defined or visible (ie, imported) in a certain scope. //! This is shared between modules and blocks. +use std::sync::LazyLock; + use base_db::CrateId; use hir_expand::{attrs::AttrId, db::ExpandDatabase, name::Name, AstId, MacroCallId}; use indexmap::map::Entry; use itertools::Itertools; use la_arena::Idx; -use once_cell::sync::Lazy; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; use stdx::format_to; @@ -129,7 +130,7 @@ struct DeriveMacroInvocation { derive_call_ids: SmallVec<[Option; 1]>, } -pub(crate) static BUILTIN_SCOPE: Lazy> = Lazy::new(|| { +pub(crate) static BUILTIN_SCOPE: LazyLock> = LazyLock::new(|| { BuiltinType::all_builtin_types() .iter() .map(|(name, ty)| (name.clone(), PerNs::types((*ty).into(), Visibility::Public, None))) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs index 28eebb286ed4b..10e6de31447ec 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs @@ -40,6 +40,7 @@ use std::{ fmt::{self, Debug}, hash::{Hash, Hasher}, ops::{Index, Range}, + sync::OnceLock, }; use ast::{AstNode, StructKind}; @@ -48,7 +49,6 @@ use either::Either; use hir_expand::{attrs::RawAttrs, name::Name, ExpandTo, HirFileId, InFile}; use intern::{Interned, Symbol}; use la_arena::{Arena, Idx, RawIdx}; -use once_cell::sync::OnceCell; use rustc_hash::FxHashMap; use smallvec::SmallVec; use span::{AstIdNode, FileAstId, SyntaxContextId}; @@ -101,7 +101,7 @@ pub struct ItemTree { impl ItemTree { pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc { let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered(); - static EMPTY: OnceCell> = OnceCell::new(); + static EMPTY: OnceLock> = OnceLock::new(); let syntax = db.parse_or_expand(file_id); @@ -152,7 +152,7 @@ impl ItemTree { pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc { let _p = tracing::info_span!("block_item_tree_query", ?block).entered(); - static EMPTY: OnceCell> = OnceCell::new(); + static EMPTY: OnceLock> = OnceLock::new(); let loc = block.lookup(db); let block = loc.ast_id.to_node(db.upcast()); @@ -626,9 +626,9 @@ impl Index for ItemTree { type Output = RawVisibility; fn index(&self, index: RawVisibilityId) -> &Self::Output { static VIS_PUB: RawVisibility = RawVisibility::Public; - static VIS_PRIV_IMPLICIT: OnceCell = OnceCell::new(); - static VIS_PRIV_EXPLICIT: OnceCell = OnceCell::new(); - static VIS_PUB_CRATE: OnceCell = OnceCell::new(); + static VIS_PRIV_IMPLICIT: OnceLock = OnceLock::new(); + static VIS_PRIV_EXPLICIT: OnceLock = OnceLock::new(); + static VIS_PUB_CRATE: OnceLock = OnceLock::new(); match index { RawVisibilityId::PRIV_IMPLICIT => VIS_PRIV_IMPLICIT.get_or_init(|| { diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml index b079b5675bd9e..989f0955e1e7d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml @@ -29,7 +29,6 @@ chalk-ir.workspace = true chalk-recursive.workspace = true chalk-derive.workspace = true la-arena.workspace = true -once_cell = "1.17.0" triomphe.workspace = true nohash-hasher.workspace = true typed-arena = "2.0.1" diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index a12e201cf3d97..dbc69f02c4049 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -1,10 +1,10 @@ //! Interface with `rustc_pattern_analysis`. +use std::cell::LazyCell; use std::fmt; use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; use intern::sym; -use once_cell::unsync::Lazy; use rustc_pattern_analysis::{ constructor::{Constructor, ConstructorSet, VariantVisibility}, usefulness::{compute_match_usefulness, PlaceValidity, UsefulnessReport}, @@ -388,8 +388,9 @@ impl<'db> PatCx for MatchCheckCtx<'db> { let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap(); // Whether we must not match the fields of this variant exhaustively. - let is_non_exhaustive = Lazy::new(|| self.is_foreign_non_exhaustive(adt)); - let visibilities = Lazy::new(|| self.db.field_visibilities(variant)); + let is_non_exhaustive = + LazyCell::new(|| self.is_foreign_non_exhaustive(adt)); + let visibilities = LazyCell::new(|| self.db.field_visibilities(variant)); self.list_variant_fields(ty, variant) .map(move |(fid, ty)| { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 804bc53905ad7..bb604f599cb9b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -22,7 +22,7 @@ mod pat; mod path; pub(crate) mod unify; -use std::{convert::identity, iter, ops::Index}; +use std::{cell::OnceCell, convert::identity, iter, ops::Index}; use chalk_ir::{ cast::Cast, @@ -50,7 +50,6 @@ use hir_expand::name::Name; use indexmap::IndexSet; use intern::sym; use la_arena::{ArenaMap, Entry}; -use once_cell::unsync::OnceCell; use rustc_hash::{FxHashMap, FxHashSet}; use stdx::{always, never}; use triomphe::Arc; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index 444628ff521d6..06c975a113f61 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -6,7 +6,7 @@ //! //! This usually involves resolving names, collecting generic arguments etc. use std::{ - cell::{Cell, RefCell, RefMut}, + cell::{Cell, OnceCell, RefCell, RefMut}, iter, ops::{self, Not as _}, }; @@ -43,7 +43,6 @@ use hir_def::{ use hir_expand::{name::Name, ExpandResult}; use intern::Interned; use la_arena::{Arena, ArenaMap}; -use once_cell::unsync::OnceCell; use rustc_hash::FxHashSet; use rustc_pattern_analysis::Captures; use smallvec::SmallVec; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs index 19619008e3d57..4e4734e554b15 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs @@ -11,6 +11,7 @@ mod simple; mod traits; use std::env; +use std::sync::LazyLock; use base_db::SourceDatabaseFileInputExt as _; use expect_test::Expect; @@ -24,7 +25,6 @@ use hir_def::{ AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId, }; use hir_expand::{db::ExpandDatabase, FileRange, InFile}; -use once_cell::race::OnceBool; use rustc_hash::FxHashMap; use stdx::format_to; use syntax::{ @@ -49,8 +49,8 @@ use crate::{ // `env UPDATE_EXPECT=1 cargo test -p hir_ty` to update the snapshots. fn setup_tracing() -> Option { - static ENABLE: OnceBool = OnceBool::new(); - if !ENABLE.get_or_init(|| env::var("CHALK_DEBUG").is_ok()) { + static ENABLE: LazyLock = LazyLock::new(|| env::var("CHALK_DEBUG").is_ok()); + if !*ENABLE { return None; } diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml index 324fa1c6a8571..26666d6feb085 100644 --- a/src/tools/rust-analyzer/crates/hir/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml @@ -20,7 +20,6 @@ itertools.workspace = true smallvec.workspace = true tracing.workspace = true triomphe.workspace = true -once_cell = "1.17.1" # local deps base-db.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml index 035b2fc0db0c4..614465b4d06bc 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml @@ -17,7 +17,6 @@ cov-mark = "2.0.0-pre.1" itertools.workspace = true tracing.workspace = true -once_cell = "1.17.0" smallvec.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs index a7a6cdebd361e..697dfe7e57db9 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs @@ -2,6 +2,8 @@ //! //! This module uses a bit of static metadata to provide completions for builtin-in attributes and lints. +use std::sync::LazyLock; + use ide_db::{ generated::lints::{ Lint, CLIPPY_LINTS, CLIPPY_LINT_GROUPS, DEFAULT_LINTS, FEATURES, RUSTDOC_LINTS, @@ -10,7 +12,6 @@ use ide_db::{ FxHashMap, SymbolKind, }; use itertools::Itertools; -use once_cell::sync::Lazy; use syntax::{ ast::{self, AttrKind}, AstNode, SyntaxKind, T, @@ -215,7 +216,7 @@ macro_rules! attrs { } #[rustfmt::skip] -static KIND_TO_ATTRIBUTES: Lazy> = Lazy::new(|| { +static KIND_TO_ATTRIBUTES: LazyLock> = LazyLock::new(|| { use SyntaxKind::*; [ ( diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml index 6714a99f80eb4..3bf958e796f1f 100644 --- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml @@ -19,7 +19,6 @@ tracing.workspace = true rayon.workspace = true fst = { version = "0.4.7", default-features = false } rustc-hash.workspace = true -once_cell = "1.17.0" either.workspace = true itertools.workspace = true arrayvec.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 9e01a6d44083c..d9ffb173a0ecc 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -4,6 +4,7 @@ //! get a super-set of matches. Then, we confirm each match using precise //! name resolution. +use std::cell::LazyCell; use std::mem; use base_db::{salsa::Database, SourceDatabase, SourceRootDatabase}; @@ -12,7 +13,6 @@ use hir::{ InFile, InRealFile, ModuleSource, PathResolution, Semantics, Visibility, }; use memchr::memmem::Finder; -use once_cell::unsync::Lazy; use parser::SyntaxKind; use rustc_hash::FxHashMap; use span::EditionedFileId; @@ -543,7 +543,7 @@ impl<'a> FindUsages<'a> { for (text, file_id, search_range) in scope_files(sema, &search_scope) { self.sema.db.unwind_if_cancelled(); - let tree = Lazy::new(move || sema.parse(file_id).syntax().clone()); + let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone()); // Search for occurrences of the items name for offset in match_indices(&text, finder, search_range) { @@ -589,7 +589,7 @@ impl<'a> FindUsages<'a> { for (text, file_id, search_range) in scope_files(sema, &scope) { self.sema.db.unwind_if_cancelled(); - let tree = Lazy::new(move || sema.parse(file_id).syntax().clone()); + let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone()); for offset in match_indices(&text, finder, search_range) { for name_ref in @@ -641,7 +641,7 @@ impl<'a> FindUsages<'a> { let search_range = search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); - let tree = Lazy::new(|| sema.parse(file_id).syntax().clone()); + let tree = LazyCell::new(|| sema.parse(file_id).syntax().clone()); let finder = &Finder::new("self"); for offset in match_indices(&text, finder, search_range) { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml index 9c3a279a945dd..bf54f4ab3224f 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml @@ -18,7 +18,6 @@ either.workspace = true itertools.workspace = true serde_json.workspace = true tracing.workspace = true -once_cell = "1.17.0" # local deps stdx.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index a61c5f0cd4db2..3e8ff128752cc 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -75,6 +75,8 @@ mod handlers { #[cfg(test)] mod tests; +use std::sync::LazyLock; + use hir::{diagnostics::AnyDiagnostic, InFile, Semantics}; use ide_db::{ assists::{Assist, AssistId, AssistKind, AssistResolveStrategy}, @@ -86,7 +88,6 @@ use ide_db::{ syntax_helpers::node_ext::parse_tt_as_comma_sep_paths, EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, SnippetCap, }; -use once_cell::sync::Lazy; use stdx::never; use syntax::{ ast::{self, AstNode}, @@ -512,11 +513,11 @@ pub fn full_diagnostics( // `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros -static RUSTC_LINT_GROUPS_DICT: Lazy>> = - Lazy::new(|| build_group_dict(DEFAULT_LINT_GROUPS, &["warnings", "__RA_EVERY_LINT"], "")); +static RUSTC_LINT_GROUPS_DICT: LazyLock>> = + LazyLock::new(|| build_group_dict(DEFAULT_LINT_GROUPS, &["warnings", "__RA_EVERY_LINT"], "")); -static CLIPPY_LINT_GROUPS_DICT: Lazy>> = - Lazy::new(|| build_group_dict(CLIPPY_LINT_GROUPS, &["__RA_EVERY_LINT"], "clippy::")); +static CLIPPY_LINT_GROUPS_DICT: LazyLock>> = + LazyLock::new(|| build_group_dict(CLIPPY_LINT_GROUPS, &["__RA_EVERY_LINT"], "clippy::")); fn build_group_dict( lint_group: &'static [LintGroup], diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml index 994c21469ff45..d945fa59c9cd8 100644 --- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml +++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml @@ -18,7 +18,6 @@ either.workspace = true itertools.workspace = true rowan = "0.15.15" rustc-hash.workspace = true -once_cell = "1.17.0" indexmap.workspace = true smol_str.workspace = true triomphe.workspace = true diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index 0228d9dd7135c..0b1561663cc42 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -1152,12 +1152,13 @@ pub fn token(kind: SyntaxKind) -> SyntaxToken { } pub mod tokens { - use once_cell::sync::Lazy; + use std::sync::LazyLock; + use parser::Edition; use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken}; - pub(super) static SOURCE_FILE: Lazy> = Lazy::new(|| { + pub(super) static SOURCE_FILE: LazyLock> = LazyLock::new(|| { SourceFile::parse( "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let _ @ [] })\n;\n\nimpl A for B where: {}", Edition::CURRENT, ) From 0cbf6a7a7683d81095b3291be33e4be7936110d2 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Fri, 16 Aug 2024 10:02:36 +0300 Subject: [PATCH 012/124] Test for word boundary in `FindUsages` This speeds up short identifiers search significantly, while unlikely to have an effect on long identifiers (the analysis takes much longer than some character comparison). Tested by finding all references to `eq()` (from `PartialEq`) in the rust-analyzer repo. Total time went down from 100s to 10s (a 10x reduction!). --- .../rust-analyzer/crates/ide-db/src/search.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 9e01a6d44083c..d9af9f1871723 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -503,6 +503,20 @@ impl<'a> FindUsages<'a> { if !search_range.contains_inclusive(offset) { return None; } + // If this is not a word boundary, that means this is only part of an identifier, + // so it can't be what we're looking for. + // This speeds up short identifiers significantly. + if text[..idx] + .chars() + .next_back() + .is_some_and(|ch| matches!(ch, 'A'..='Z' | 'a'..='z' | '_')) + || text[idx + finder.needle().len()..] + .chars() + .next() + .is_some_and(|ch| matches!(ch, 'A'..='Z' | 'a'..='z' | '_' | '0'..='9')) + { + return None; + } Some(offset) }) } From 4ca597a4a8b724818a6635ef64e956a6f8a641d9 Mon Sep 17 00:00:00 2001 From: dfireBird Date: Sat, 29 Jun 2024 10:36:39 +0530 Subject: [PATCH 013/124] implement basic inferring of lifetimes --- .../rust-analyzer/crates/hir-ty/src/infer.rs | 20 +++++----- .../crates/hir-ty/src/infer/closure.rs | 2 +- .../crates/hir-ty/src/infer/coerce.rs | 37 +++++++++++------- .../crates/hir-ty/src/infer/expr.rs | 39 ++++++++++++++----- .../crates/hir-ty/src/infer/mutability.rs | 4 +- .../crates/hir-ty/src/infer/pat.rs | 17 ++++---- .../crates/hir-ty/src/infer/unify.rs | 14 +++---- .../crates/hir-ty/src/method_resolution.rs | 3 +- .../crates/hir-ty/src/mir/lower.rs | 2 +- .../rust-analyzer/crates/hir-ty/src/tests.rs | 13 ++----- .../crates/hir-ty/src/tests/coercion.rs | 22 +++++------ .../hir-ty/src/tests/method_resolution.rs | 14 +++---- .../crates/hir-ty/src/tests/simple.rs | 4 +- .../rust-analyzer/crates/hir/src/semantics.rs | 3 +- 14 files changed, 109 insertions(+), 85 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 42d73d105c814..8defeee5e8f49 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -55,7 +55,7 @@ use triomphe::Arc; use crate::{ db::HirDatabase, - error_lifetime, fold_tys, + fold_tys, generics::Generics, infer::{coerce::CoerceMany, unify::InferenceTable}, lower::ImplTraitLoweringMode, @@ -327,13 +327,13 @@ pub struct Adjustment { } impl Adjustment { - pub fn borrow(m: Mutability, ty: Ty) -> Self { - let ty = TyKind::Ref(m, error_lifetime(), ty).intern(Interner); - Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty } + pub fn borrow(m: Mutability, ty: Ty, lt: Lifetime) -> Self { + let ty = TyKind::Ref(m, lt.clone(), ty).intern(Interner); + Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(lt, m)), target: ty } } } -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum Adjust { /// Go from ! to any type. NeverToAny, @@ -353,18 +353,18 @@ pub enum Adjust { #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct OverloadedDeref(pub Option); -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum AutoBorrow { /// Converts from T to &T. - Ref(Mutability), + Ref(Lifetime, Mutability), /// Converts from T to *T. RawPtr(Mutability), } impl AutoBorrow { - fn mutability(self) -> Mutability { - let (AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) = self; - m + fn mutability(&self) -> Mutability { + let (AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) = self; + *m } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index 034ed2d691b4f..edaa1091fed10 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -474,7 +474,7 @@ impl InferenceContext<'_> { fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) { if let Some((last, rest)) = adjustment.split_last() { - match last.kind { + match &last.kind { Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => { self.walk_expr_with_adjust(tgt_expr, rest) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs index 6f85a4a4247c4..7e758c0b5173b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs @@ -18,14 +18,13 @@ use triomphe::Arc; use crate::{ autoderef::{Autoderef, AutoderefKind}, db::HirDatabase, - error_lifetime, infer::{ Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast, TypeError, TypeMismatch, }, utils::ClosureSubst, - Canonical, DomainGoal, FnAbi, FnPointer, FnSig, Guidance, InEnvironment, Interner, Solution, - Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, + Canonical, DomainGoal, FnAbi, FnPointer, FnSig, Guidance, InEnvironment, Interner, Lifetime, + Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, }; use super::unify::InferenceTable; @@ -301,7 +300,7 @@ impl InferenceTable<'_> { // Examine the supertype and consider auto-borrowing. match to_ty.kind(Interner) { TyKind::Raw(mt, _) => return self.coerce_ptr(from_ty, to_ty, *mt), - TyKind::Ref(mt, _, _) => return self.coerce_ref(from_ty, to_ty, *mt), + TyKind::Ref(mt, lt, _) => return self.coerce_ref(from_ty, to_ty, *mt, lt), _ => {} } @@ -377,11 +376,17 @@ impl InferenceTable<'_> { /// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`. /// To match `A` with `B`, autoderef will be performed, /// calling `deref`/`deref_mut` where necessary. - fn coerce_ref(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> CoerceResult { - let from_mt = match from_ty.kind(Interner) { - &TyKind::Ref(mt, _, _) => { - coerce_mutabilities(mt, to_mt)?; - mt + fn coerce_ref( + &mut self, + from_ty: Ty, + to_ty: &Ty, + to_mt: Mutability, + to_lt: &Lifetime, + ) -> CoerceResult { + let (_from_lt, from_mt) = match from_ty.kind(Interner) { + TyKind::Ref(mt, lt, _) => { + coerce_mutabilities(*mt, to_mt)?; + (lt.clone(), *mt) // clone is probably not good? } _ => return self.unify_and(&from_ty, to_ty, identity), }; @@ -427,8 +432,8 @@ impl InferenceTable<'_> { // compare those. Note that this means we use the target // mutability [1], since it may be that we are coercing // from `&mut T` to `&U`. - let lt = error_lifetime(); // FIXME: handle lifetimes correctly, see rustc - let derefd_from_ty = TyKind::Ref(to_mt, lt, referent_ty).intern(Interner); + let lt = to_lt; // FIXME: Involve rustc LUB and SUB flag checks + let derefd_from_ty = TyKind::Ref(to_mt, lt.clone(), referent_ty).intern(Interner); match autoderef.table.try_unify(&derefd_from_ty, to_ty) { Ok(result) => { found = Some(result.map(|()| derefd_from_ty)); @@ -472,8 +477,10 @@ impl InferenceTable<'_> { } let mut adjustments = auto_deref_adjust_steps(&autoderef); - adjustments - .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)), target: ty.clone() }); + adjustments.push(Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(to_lt.clone(), to_mt)), + target: ty.clone(), + }); success(adjustments, ty, goals) } @@ -621,11 +628,11 @@ impl InferenceTable<'_> { (TyKind::Ref(from_mt, _, from_inner), &TyKind::Ref(to_mt, _, _)) => { coerce_mutabilities(*from_mt, to_mt)?; - let lt = error_lifetime(); + let lt = self.new_lifetime_var(); Some(( Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() }, Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)), + kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), to_mt)), target: TyKind::Ref(to_mt, lt, from_inner.clone()).intern(Interner), }, )) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index e40c6a63f661d..89d92ea9af0b9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -635,7 +635,10 @@ impl InferenceContext<'_> { let inner_ty = self.infer_expr_inner(*expr, &expectation); match rawness { Rawness::RawPtr => TyKind::Raw(mutability, inner_ty), - Rawness::Ref => TyKind::Ref(mutability, error_lifetime(), inner_ty), + Rawness::Ref => { + let lt = self.table.new_lifetime_var(); + TyKind::Ref(mutability, lt, inner_ty) + } } .intern(Interner) } @@ -786,7 +789,11 @@ impl InferenceContext<'_> { adj.apply(&mut self.table, base_ty) }); // mutability will be fixed up in `InferenceContext::infer_mut`; - adj.push(Adjustment::borrow(Mutability::Not, self_ty.clone())); + adj.push(Adjustment::borrow( + Mutability::Not, + self_ty.clone(), + self.table.new_lifetime_var(), + )); self.write_expr_adj(*base, adj); if let Some(func) = self .db @@ -991,7 +998,7 @@ impl InferenceContext<'_> { match fn_x { FnTrait::FnOnce => (), FnTrait::FnMut => { - if let TyKind::Ref(Mutability::Mut, _, inner) = derefed_callee.kind(Interner) { + if let TyKind::Ref(Mutability::Mut, lt, inner) = derefed_callee.kind(Interner) { if adjustments .last() .map(|it| matches!(it.kind, Adjust::Borrow(_))) @@ -1000,15 +1007,27 @@ impl InferenceContext<'_> { // prefer reborrow to move adjustments .push(Adjustment { kind: Adjust::Deref(None), target: inner.clone() }); - adjustments.push(Adjustment::borrow(Mutability::Mut, inner.clone())) + adjustments.push(Adjustment::borrow( + Mutability::Mut, + inner.clone(), + lt.clone(), + )) } } else { - adjustments.push(Adjustment::borrow(Mutability::Mut, derefed_callee.clone())); + adjustments.push(Adjustment::borrow( + Mutability::Mut, + derefed_callee.clone(), + self.table.new_lifetime_var(), + )); } } FnTrait::Fn => { if !matches!(derefed_callee.kind(Interner), TyKind::Ref(Mutability::Not, _, _)) { - adjustments.push(Adjustment::borrow(Mutability::Not, derefed_callee.clone())); + adjustments.push(Adjustment::borrow( + Mutability::Not, + derefed_callee.clone(), + self.table.new_lifetime_var(), + )); } } } @@ -1313,11 +1332,11 @@ impl InferenceContext<'_> { Some(sig) => { let p_left = &sig.params()[0]; if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. }) { - if let &TyKind::Ref(mtbl, _, _) = p_left.kind(Interner) { + if let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner) { self.write_expr_adj( lhs, vec![Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(mtbl)), + kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), target: p_left.clone(), }], ); @@ -1325,11 +1344,11 @@ impl InferenceContext<'_> { } let p_right = &sig.params()[1]; if matches!(op, BinaryOp::CmpOp(..)) { - if let &TyKind::Ref(mtbl, _, _) = p_right.kind(Interner) { + if let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner) { self.write_expr_adj( rhs, vec![Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(mtbl)), + kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)), target: p_right.clone(), }], ); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs index 66267e08db62b..7fed5f0203bad 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs @@ -28,7 +28,7 @@ impl InferenceContext<'_> { Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => (), Adjust::Deref(Some(d)) => *d = OverloadedDeref(Some(mutability)), Adjust::Borrow(b) => match b { - AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m) => mutability = *m, + AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m) => mutability = *m, }, } } @@ -125,7 +125,7 @@ impl InferenceContext<'_> { .get_mut(&base) .and_then(|it| it.last_mut()); if let Some(Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(mutability)), + kind: Adjust::Borrow(AutoBorrow::Ref(_, mutability)), target, }) = base_adjustments { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index f3c6f13a08d07..50542b2acd459 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -12,7 +12,6 @@ use stdx::TupleExt; use crate::{ consteval::{try_const_usize, usize_const}, - error_lifetime, infer::{BindingMode, Expectation, InferenceContext, TypeMismatch}, lower::lower_to_chalk_mutability, primitive::UintTy, @@ -394,19 +393,20 @@ impl InferenceContext<'_> { expected: &Ty, default_bm: BindingMode, ) -> Ty { - let expectation = match expected.as_reference() { - Some((inner_ty, _lifetime, _exp_mut)) => inner_ty.clone(), + let (expectation_type, expectation_lt) = match expected.as_reference() { + Some((inner_ty, lifetime, _exp_mut)) => (inner_ty.clone(), lifetime.clone()), None => { let inner_ty = self.table.new_type_var(); + let inner_lt = self.table.new_lifetime_var(); let ref_ty = - TyKind::Ref(mutability, error_lifetime(), inner_ty.clone()).intern(Interner); + TyKind::Ref(mutability, inner_lt.clone(), inner_ty.clone()).intern(Interner); // Unification failure will be reported by the caller. self.unify(&ref_ty, expected); - inner_ty + (inner_ty, inner_lt) } }; - let subty = self.infer_pat(inner_pat, &expectation, default_bm); - TyKind::Ref(mutability, error_lifetime(), subty).intern(Interner) + let subty = self.infer_pat(inner_pat, &expectation_type, default_bm); + TyKind::Ref(mutability, expectation_lt, subty).intern(Interner) } fn infer_bind_pat( @@ -433,7 +433,8 @@ impl InferenceContext<'_> { let bound_ty = match mode { BindingMode::Ref(mutability) => { - TyKind::Ref(mutability, error_lifetime(), inner_ty.clone()).intern(Interner) + let inner_lt = self.table.new_lifetime_var(); + TyKind::Ref(mutability, inner_lt, inner_ty.clone()).intern(Interner) } BindingMode::Move => inner_ty.clone(), }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs index 3e3578b9f9b87..c0f5ddddcbe3e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs @@ -17,12 +17,12 @@ use triomphe::Arc; use super::{InferOk, InferResult, InferenceContext, TypeError}; use crate::{ - consteval::unknown_const, db::HirDatabase, error_lifetime, fold_generic_args, - fold_tys_and_consts, to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical, - Const, ConstValue, DebruijnIndex, DomainGoal, GenericArg, GenericArgData, Goal, GoalData, - Guidance, InEnvironment, InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind, ProjectionTy, - ProjectionTyExt, Scalar, Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, - TyKind, VariableKind, WhereClause, + consteval::unknown_const, db::HirDatabase, fold_generic_args, fold_tys_and_consts, + to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical, Const, ConstValue, + DebruijnIndex, DomainGoal, GenericArg, GenericArgData, Goal, GoalData, Guidance, InEnvironment, + InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind, ProjectionTy, ProjectionTyExt, Scalar, + Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind, + WhereClause, }; impl InferenceContext<'_> { @@ -105,7 +105,7 @@ impl> Canonicalized { VariableKind::Ty(TyVariableKind::Float) => ctx.new_float_var().cast(Interner), // Chalk can sometimes return new lifetime variables. We just replace them by errors // for now. - VariableKind::Lifetime => error_lifetime().cast(Interner), + VariableKind::Lifetime => ctx.new_lifetime_var().cast(Interner), VariableKind::Const(ty) => ctx.new_const_var(ty.clone()).cast(Interner), }), ); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index 8ba8071d36ec4..3ddfea13ab0c0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -542,7 +542,8 @@ impl ReceiverAdjustments { } } if let Some(m) = self.autoref { - let a = Adjustment::borrow(m, ty); + let lt = table.new_lifetime_var(); + let a = Adjustment::borrow(m, ty, lt); ty = a.target.clone(); adjust.push(a); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 9aa2eeebc1757..b1db9b24f738f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -337,7 +337,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into()); Ok(Some(current)) } - Adjust::Borrow(AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) => { + Adjust::Borrow(AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) => { let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs index eded786a642c5..efd8546216d4b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs @@ -26,6 +26,7 @@ use hir_def::{ AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId, }; use hir_expand::{db::ExpandDatabase, FileRange, InFile}; +use itertools::Itertools; use rustc_hash::FxHashMap; use stdx::format_to; use syntax::{ @@ -94,7 +95,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour let mut had_annotations = false; let mut mismatches = FxHashMap::default(); let mut types = FxHashMap::default(); - let mut adjustments = FxHashMap::<_, Vec<_>>::default(); + let mut adjustments = FxHashMap::default(); for (file_id, annotations) in db.extract_annotations() { for (range, expected) in annotations { let file_range = FileRange { file_id, range }; @@ -107,13 +108,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour } else if expected.starts_with("adjustments:") { adjustments.insert( file_range, - expected - .trim_start_matches("adjustments:") - .trim() - .split(',') - .map(|it| it.trim().to_owned()) - .filter(|it| !it.is_empty()) - .collect(), + expected.trim_start_matches("adjustments:").trim().to_owned(), ); } else { panic!("unexpected annotation: {expected}"); @@ -200,7 +195,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour adjustments .iter() .map(|Adjustment { kind, .. }| format!("{kind:?}")) - .collect::>() + .join(", ") ); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs index 526db2af6dccb..908bbc248c604 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/coercion.rs @@ -49,7 +49,7 @@ fn let_stmt_coerce() { //- minicore: coerce_unsized fn test() { let x: &[isize] = &[1]; - // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize) + // ^^^^ adjustments: Deref(None), Borrow(Ref('?3, Not)), Pointer(Unsize) let x: *const [isize] = &[1]; // ^^^^ adjustments: Deref(None), Borrow(RawPtr(Not)), Pointer(Unsize) } @@ -96,7 +96,7 @@ fn foo(x: &[T]) -> &[T] { x } fn test() { let x = if true { foo(&[1]) - // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize) + // ^^^^ adjustments: Deref(None), Borrow(Ref('?8, Not)), Pointer(Unsize) } else { &[1] }; @@ -148,7 +148,7 @@ fn foo(x: &[T]) -> &[T] { x } fn test(i: i32) { let x = match i { 2 => foo(&[2]), - // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize) + // ^^^^ adjustments: Deref(None), Borrow(Ref('?10, Not)), Pointer(Unsize) 1 => &[1], _ => &[3], }; @@ -267,7 +267,7 @@ fn takes_ref_str(x: &str) {} fn returns_string() -> String { loop {} } fn test() { takes_ref_str(&{ returns_string() }); - // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not)) + // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('{error}, Not)) } "#, ); @@ -849,8 +849,8 @@ impl core::cmp::PartialEq for Struct { } fn test() { Struct == Struct; - // ^^^^^^ adjustments: Borrow(Ref(Not)) - // ^^^^^^ adjustments: Borrow(Ref(Not)) + // ^^^^^^ adjustments: Borrow(Ref('{error}, Not)) + // ^^^^^^ adjustments: Borrow(Ref('{error}, Not)) }", ); } @@ -866,7 +866,7 @@ impl core::ops::AddAssign for Struct { } fn test() { Struct += Struct; - // ^^^^^^ adjustments: Borrow(Ref(Mut)) + // ^^^^^^ adjustments: Borrow(Ref('{error}, Mut)) // ^^^^^^ adjustments: }", ); @@ -880,7 +880,7 @@ fn adjust_index() { fn test() { let x = [1, 2, 3]; x[2] = 6; - // ^ adjustments: Borrow(Ref(Mut)) + // ^ adjustments: Borrow(Ref('?8, Mut)) } ", ); @@ -905,11 +905,11 @@ impl core::ops::IndexMut for StructMut { } fn test() { Struct[0]; - // ^^^^^^ adjustments: Borrow(Ref(Not)) + // ^^^^^^ adjustments: Borrow(Ref('?2, Not)) StructMut[0]; - // ^^^^^^^^^ adjustments: Borrow(Ref(Not)) + // ^^^^^^^^^ adjustments: Borrow(Ref('?5, Not)) &mut StructMut[0]; - // ^^^^^^^^^ adjustments: Borrow(Ref(Mut)) + // ^^^^^^^^^ adjustments: Borrow(Ref('?8, Mut)) }", ); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs index 14e2e7465315c..610fc9b6b4562 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs @@ -1186,11 +1186,11 @@ fn test() { 89..109 '{ ... }': bool 99..103 'true': bool 123..167 '{ ...o(); }': () - 133..134 's': &'? S + 133..134 's': &'static S 137..151 'unsafe { f() }': &'static S 146..147 'f': fn f() -> &'static S 146..149 'f()': &'static S - 157..158 's': &'? S + 157..158 's': &'static S 157..164 's.foo()': bool "#]], ); @@ -1847,9 +1847,9 @@ impl Foo { } fn test() { Foo.foo(); - //^^^ adjustments: Borrow(Ref(Not)) + //^^^ adjustments: Borrow(Ref('?1, Not)) (&Foo).foo(); - // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)) + // ^^^^ adjustments: Deref(None), Borrow(Ref('?3, Not)) } "#, ); @@ -1863,7 +1863,7 @@ fn receiver_adjustment_unsize_array() { fn test() { let a = [1, 2, 3]; a.len(); -} //^ adjustments: Borrow(Ref(Not)), Pointer(Unsize) +} //^ adjustments: Borrow(Ref('?7, Not)), Pointer(Unsize) "#, ); } @@ -2076,7 +2076,7 @@ impl Foo { } fn test() { Box::new(Foo).foo(); - //^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref(Not)) + //^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref('?3, Not)) } "#, ); @@ -2094,7 +2094,7 @@ impl Foo { use core::mem::ManuallyDrop; fn test() { ManuallyDrop::new(Foo).foo(); - //^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not)) + //^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('?4, Not)) } "#, ); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs index a15670b3c85d9..1c6fa62e30ceb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs @@ -1201,8 +1201,8 @@ fn infer_array() { 209..215 '[1, 2]': [i32; 2] 210..211 '1': i32 213..214 '2': i32 - 225..226 'i': [&'? str; 2] - 229..239 '["a", "b"]': [&'? str; 2] + 225..226 'i': [&'static str; 2] + 229..239 '["a", "b"]': [&'static str; 2] 230..233 '"a"': &'static str 235..238 '"b"': &'static str 250..251 'b': [[&'? str; 1]; 2] diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index a377163162c2c..d086aee42855f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -1179,7 +1179,8 @@ impl<'db> SemanticsImpl<'db> { hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => { Adjust::Borrow(AutoBorrow::RawPtr(mutability(m))) } - hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => { + hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(_, m)) => { + // FIXME: Handle lifetimes here Adjust::Borrow(AutoBorrow::Ref(mutability(m))) } hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc), From 3d6129dccd36d754876672263ace3aa4ce56d655 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Fri, 16 Aug 2024 02:36:24 +0300 Subject: [PATCH 014/124] Properly account for editions in names This PR touches a lot of parts. But the main changes are changing `hir_expand::Name` to be raw edition-dependently and only when necessary (unrelated to how the user originally wrote the identifier), and changing `is_keyword()` and `is_raw_identifier()` to be edition-aware (this was done in #17896, but the FIXMEs were fixed here). It is possible that I missed some cases, but most IDE parts should properly escape (or not escape) identifiers now. The rules of thumb are: - If we show the identifier to the user, its rawness should be determined by the edition of the edited crate. This is nice for IDE features, but really important for changes we insert to the source code. - For tests, I chose `Edition::CURRENT` (so we only have to (maybe) update tests when an edition becomes stable, to avoid churn). - For debugging tools (helper methods and logs), I used `Edition::LATEST`. --- .../rust-analyzer/crates/hir-def/src/body.rs | 14 +- .../crates/hir-def/src/body/pretty.rs | 65 +++-- .../crates/hir-def/src/body/tests.rs | 6 +- .../crates/hir-def/src/find_path.rs | 7 +- .../crates/hir-def/src/hir/format_args.rs | 2 +- .../crates/hir-def/src/hir/type_ref.rs | 13 +- .../crates/hir-def/src/import_map.rs | 16 +- .../crates/hir-def/src/item_scope.rs | 3 +- .../crates/hir-def/src/item_tree.rs | 6 +- .../crates/hir-def/src/item_tree/pretty.rs | 86 +++--- .../crates/hir-def/src/item_tree/tests.rs | 3 +- .../crates/hir-def/src/nameres.rs | 6 +- .../crates/hir-def/src/nameres/collector.rs | 10 +- .../hir-def/src/nameres/tests/macros.rs | 5 +- .../src/nameres/tests/mod_resolution.rs | 8 +- .../rust-analyzer/crates/hir-def/src/path.rs | 19 +- .../crates/hir-def/src/pretty.rs | 84 +++--- .../crates/hir-expand/src/eager.rs | 3 +- .../crates/hir-expand/src/mod_path.rs | 46 +-- .../crates/hir-expand/src/name.rs | 93 +++---- .../crates/hir-ty/src/chalk_db.rs | 18 +- .../crates/hir-ty/src/consteval/tests.rs | 10 +- .../hir-ty/src/diagnostics/decl_check.rs | 70 +++-- .../crates/hir-ty/src/diagnostics/expr.rs | 30 +- .../hir-ty/src/diagnostics/match_check.rs | 35 ++- .../crates/hir-ty/src/display.rs | 116 +++++--- .../crates/hir-ty/src/infer/closure.rs | 4 +- .../crates/hir-ty/src/layout/tests.rs | 13 +- .../rust-analyzer/crates/hir-ty/src/lib.rs | 9 +- .../rust-analyzer/crates/hir-ty/src/mir.rs | 5 +- .../crates/hir-ty/src/mir/eval.rs | 17 +- .../crates/hir-ty/src/mir/eval/shim.rs | 6 +- .../crates/hir-ty/src/mir/eval/tests.rs | 6 +- .../crates/hir-ty/src/mir/lower.rs | 62 +++-- .../hir-ty/src/mir/lower/pattern_matching.rs | 3 +- .../crates/hir-ty/src/mir/pretty.rs | 48 ++-- .../rust-analyzer/crates/hir-ty/src/tls.rs | 19 +- .../rust-analyzer/crates/hir-ty/src/traits.rs | 3 +- .../rust-analyzer/crates/hir/src/attrs.rs | 8 +- .../rust-analyzer/crates/hir/src/display.rs | 69 ++--- src/tools/rust-analyzer/crates/hir/src/lib.rs | 84 ++++-- .../rust-analyzer/crates/hir/src/symbols.rs | 16 +- .../crates/hir/src/term_search/expr.rs | 55 ++-- .../src/handlers/add_missing_impl_members.rs | 23 +- .../src/handlers/add_missing_match_arms.rs | 3 +- .../ide-assists/src/handlers/auto_import.rs | 9 +- .../ide-assists/src/handlers/bool_to_enum.rs | 6 +- .../src/handlers/convert_bool_then.rs | 2 +- .../src/handlers/convert_into_to_from.rs | 5 +- .../handlers/convert_iter_for_each_to_for.rs | 12 +- .../convert_tuple_return_type_to_struct.rs | 2 +- .../handlers/destructure_struct_binding.rs | 8 +- .../src/handlers/expand_glob_import.rs | 4 +- .../src/handlers/extract_function.rs | 69 +++-- .../extract_struct_from_enum_variant.rs | 14 +- .../handlers/fill_record_pattern_fields.rs | 3 +- .../src/handlers/fix_visibility.rs | 14 +- .../src/handlers/generate_delegate_methods.rs | 3 +- .../src/handlers/generate_delegate_trait.rs | 23 +- .../src/handlers/generate_deref.rs | 12 +- .../generate_documentation_template.rs | 9 +- .../src/handlers/generate_function.rs | 24 +- .../src/handlers/generate_getter_or_setter.rs | 2 +- .../ide-assists/src/handlers/generate_new.rs | 5 +- .../src/handlers/inline_const_as_literal.rs | 11 +- .../src/handlers/move_const_to_impl.rs | 10 +- .../src/handlers/move_from_mod_rs.rs | 2 +- .../src/handlers/move_module_to_file.rs | 2 +- .../src/handlers/move_to_mod_rs.rs | 2 +- .../src/handlers/qualify_method_call.rs | 2 + .../ide-assists/src/handlers/qualify_path.rs | 22 +- .../src/handlers/reorder_fields.rs | 13 +- .../src/handlers/reorder_impl_items.rs | 5 +- .../replace_derive_with_manual_impl.rs | 5 +- .../replace_qualified_name_with_use.rs | 38 +-- .../ide-assists/src/handlers/term_search.rs | 6 +- .../src/handlers/toggle_async_sugar.rs | 3 +- .../crates/ide-assists/src/utils.rs | 20 +- .../ide-assists/src/utils/suggest_name.rs | 19 +- .../crates/ide-completion/src/completions.rs | 13 +- .../src/completions/attribute.rs | 29 +- .../src/completions/attribute/cfg.rs | 13 +- .../src/completions/attribute/derive.rs | 10 +- .../src/completions/attribute/lint.rs | 3 +- .../src/completions/attribute/macro_use.rs | 3 +- .../src/completions/attribute/repr.rs | 7 +- .../ide-completion/src/completions/dot.rs | 1 + .../src/completions/env_vars.rs | 2 +- .../src/completions/extern_abi.rs | 9 +- .../src/completions/extern_crate.rs | 3 +- .../src/completions/flyimport.rs | 2 +- .../src/completions/fn_param.rs | 8 +- .../src/completions/format_string.rs | 6 +- .../src/completions/item_list/trait_impl.rs | 14 +- .../src/completions/lifetime.rs | 2 +- .../ide-completion/src/completions/mod_.rs | 9 +- .../ide-completion/src/completions/postfix.rs | 13 +- .../ide-completion/src/completions/record.rs | 2 + .../ide-completion/src/completions/snippet.rs | 3 +- .../ide-completion/src/completions/use_.rs | 6 +- .../crates/ide-completion/src/context.rs | 9 +- .../ide-completion/src/context/analysis.rs | 2 +- .../crates/ide-completion/src/item.rs | 11 +- .../crates/ide-completion/src/lib.rs | 9 +- .../crates/ide-completion/src/render.rs | 70 +++-- .../ide-completion/src/render/const_.rs | 13 +- .../ide-completion/src/render/function.rs | 44 +-- .../ide-completion/src/render/literal.rs | 12 +- .../ide-completion/src/render/macro_.rs | 9 +- .../ide-completion/src/render/pattern.rs | 35 ++- .../ide-completion/src/render/type_alias.rs | 18 +- .../src/render/union_literal.rs | 34 ++- .../ide-completion/src/render/variant.rs | 20 +- .../crates/ide-completion/src/tests.rs | 24 +- .../src/tests/raw_identifiers.rs | 84 ++++++ .../rust-analyzer/crates/ide-db/src/defs.rs | 81 +++--- .../crates/ide-db/src/famous_defs.rs | 9 +- .../crates/ide-db/src/helpers.rs | 12 +- .../ide-db/src/imports/import_assets.rs | 6 +- .../crates/ide-db/src/path_transform.rs | 18 +- .../rust-analyzer/crates/ide-db/src/rename.rs | 74 +++-- .../insert_whitespace_into_node.rs | 5 +- .../ide-db/src/syntax_helpers/node_ext.rs | 8 +- .../rust-analyzer/crates/ide-db/src/traits.rs | 21 +- .../crates/ide-db/src/ty_filter.rs | 7 +- .../ide-db/src/use_trivial_constructor.rs | 4 +- .../src/handlers/expected_function.rs | 2 +- .../src/handlers/json_is_not_rust.rs | 15 +- .../src/handlers/missing_fields.rs | 24 +- .../src/handlers/moved_out_of_ref.rs | 2 +- .../src/handlers/mutability_errors.rs | 2 +- .../src/handlers/private_assoc_item.rs | 2 +- .../src/handlers/private_field.rs | 4 +- .../handlers/trait_impl_missing_assoc_item.rs | 2 +- .../trait_impl_redundant_assoc_item.rs | 10 +- .../src/handlers/type_mismatch.rs | 12 +- .../src/handlers/typed_hole.rs | 5 +- .../src/handlers/undeclared_label.rs | 2 +- .../src/handlers/unlinked_file.rs | 11 +- .../src/handlers/unreachable_label.rs | 2 +- .../src/handlers/unresolved_field.rs | 4 +- .../src/handlers/unresolved_macro_call.rs | 2 +- .../src/handlers/unresolved_method.rs | 9 +- .../src/handlers/unused_variables.rs | 18 +- .../crates/ide-diagnostics/src/lib.rs | 49 +++- .../crates/ide-ssr/src/matching.rs | 10 +- .../crates/ide-ssr/src/replacing.rs | 8 +- .../rust-analyzer/crates/ide/src/doc_links.rs | 63 +++-- .../crates/ide/src/expand_macro.rs | 10 +- .../crates/ide/src/goto_definition.rs | 4 +- .../crates/ide/src/highlight_related.rs | 4 +- .../rust-analyzer/crates/ide/src/hover.rs | 63 +++-- .../crates/ide/src/hover/render.rs | 153 +++++----- .../crates/ide/src/hover/tests.rs | 72 +++++ .../crates/ide/src/inlay_hints.rs | 14 +- .../crates/ide/src/inlay_hints/adjustment.rs | 6 +- .../crates/ide/src/inlay_hints/bind_pat.rs | 4 +- .../crates/ide/src/inlay_hints/chaining.rs | 4 +- .../ide/src/inlay_hints/closing_brace.rs | 8 +- .../crates/ide/src/inlay_hints/closure_ret.rs | 4 +- .../ide/src/inlay_hints/implicit_drop.rs | 4 +- .../crates/ide/src/inlay_hints/param_name.rs | 4 +- .../rust-analyzer/crates/ide/src/moniker.rs | 45 +-- .../crates/ide/src/navigation_target.rs | 100 ++++--- .../crates/ide/src/references.rs | 5 +- .../rust-analyzer/crates/ide/src/rename.rs | 261 ++++++++++++++++-- .../rust-analyzer/crates/ide/src/runnables.rs | 44 +-- .../crates/ide/src/signature_help.rs | 103 ++++--- .../crates/ide/src/static_index.rs | 16 +- .../crates/ide/src/syntax_highlighting.rs | 4 +- .../ide/src/syntax_highlighting/highlight.rs | 8 +- .../ide/src/syntax_highlighting/macro_.rs | 3 +- .../crates/ide/src/view_item_tree.rs | 2 +- .../crates/ide/src/view_memory_layout.rs | 16 +- .../crates/parser/src/syntax_kind.rs | 9 + .../crates/rust-analyzer/src/cli.rs | 4 +- .../rust-analyzer/src/cli/analysis_stats.rs | 77 ++++-- .../crates/syntax-bridge/src/lib.rs | 5 +- .../rust-analyzer/crates/syntax/src/utils.rs | 1 + 179 files changed, 2480 insertions(+), 1245 deletions(-) create mode 100644 src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs index a988317e046ef..f5fe8f8770130 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs @@ -14,7 +14,7 @@ use hir_expand::{name::Name, ExpandError, InFile}; use la_arena::{Arena, ArenaMap, Idx, RawIdx}; use rustc_hash::FxHashMap; use smallvec::SmallVec; -use span::MacroFileId; +use span::{Edition, MacroFileId}; use syntax::{ast, AstPtr, SyntaxNodePtr}; use triomphe::Arc; @@ -201,8 +201,13 @@ impl Body { self.block_scopes.iter().map(move |&block| (block, db.block_def_map(block))) } - pub fn pretty_print(&self, db: &dyn DefDatabase, owner: DefWithBodyId) -> String { - pretty::print_body_hir(db, self, owner) + pub fn pretty_print( + &self, + db: &dyn DefDatabase, + owner: DefWithBodyId, + edition: Edition, + ) -> String { + pretty::print_body_hir(db, self, owner, edition) } pub fn pretty_print_expr( @@ -210,8 +215,9 @@ impl Body { db: &dyn DefDatabase, owner: DefWithBodyId, expr: ExprId, + edition: Edition, ) -> String { - pretty::print_expr_hir(db, self, owner, expr) + pretty::print_expr_hir(db, self, owner, expr, edition) } fn new( diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs index edaee60937d15..55740a68acd6e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs @@ -3,6 +3,7 @@ use std::fmt::{self, Write}; use itertools::Itertools; +use span::Edition; use crate::{ hir::{ @@ -15,20 +16,26 @@ use crate::{ use super::*; -pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBodyId) -> String { +pub(super) fn print_body_hir( + db: &dyn DefDatabase, + body: &Body, + owner: DefWithBodyId, + edition: Edition, +) -> String { let header = match owner { - DefWithBodyId::FunctionId(it) => { - it.lookup(db).id.resolved(db, |it| format!("fn {}", it.name.display(db.upcast()))) - } + DefWithBodyId::FunctionId(it) => it + .lookup(db) + .id + .resolved(db, |it| format!("fn {}", it.name.display(db.upcast(), edition))), DefWithBodyId::StaticId(it) => it .lookup(db) .id - .resolved(db, |it| format!("static {} = ", it.name.display(db.upcast()))), + .resolved(db, |it| format!("static {} = ", it.name.display(db.upcast(), edition))), DefWithBodyId::ConstId(it) => it.lookup(db).id.resolved(db, |it| { format!( "const {} = ", match &it.name { - Some(name) => name.display(db.upcast()).to_string(), + Some(name) => name.display(db.upcast(), edition).to_string(), None => "_".to_owned(), } ) @@ -39,13 +46,13 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo let enum_loc = loc.parent.lookup(db); format!( "enum {}::{}", - enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast()), - loc.id.item_tree(db)[loc.id.value].name.display(db.upcast()), + enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition), + loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition), ) } }; - let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false }; + let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false, edition }; if let DefWithBodyId::FunctionId(it) = owner { p.buf.push('('); let function_data = &db.function_data(it); @@ -86,8 +93,10 @@ pub(super) fn print_expr_hir( body: &Body, _owner: DefWithBodyId, expr: ExprId, + edition: Edition, ) -> String { - let mut p = Printer { db, body, buf: String::new(), indent_level: 0, needs_indent: false }; + let mut p = + Printer { db, body, buf: String::new(), indent_level: 0, needs_indent: false, edition }; p.print_expr(expr); p.buf } @@ -113,6 +122,7 @@ struct Printer<'a> { buf: String, indent_level: usize, needs_indent: bool, + edition: Edition, } impl Write for Printer<'_> { @@ -173,13 +183,14 @@ impl Printer<'_> { Expr::OffsetOf(offset_of) => { w!(self, "builtin#offset_of("); self.print_type_ref(&offset_of.container); + let edition = self.edition; w!( self, ", {})", offset_of .fields .iter() - .format_with(".", |field, f| f(&field.display(self.db.upcast()))) + .format_with(".", |field, f| f(&field.display(self.db.upcast(), edition))) ); } Expr::Path(path) => self.print_path(path), @@ -201,7 +212,7 @@ impl Printer<'_> { } Expr::Loop { body, label } => { if let Some(lbl) = label { - w!(self, "{}: ", self.body[*lbl].name.display(self.db.upcast())); + w!(self, "{}: ", self.body[*lbl].name.display(self.db.upcast(), self.edition)); } w!(self, "loop "); self.print_expr(*body); @@ -221,10 +232,11 @@ impl Printer<'_> { } Expr::MethodCall { receiver, method_name, args, generic_args } => { self.print_expr(*receiver); - w!(self, ".{}", method_name.display(self.db.upcast())); + w!(self, ".{}", method_name.display(self.db.upcast(), self.edition)); if let Some(args) = generic_args { w!(self, "::<"); - print_generic_args(self.db, args, self).unwrap(); + let edition = self.edition; + print_generic_args(self.db, args, self, edition).unwrap(); w!(self, ">"); } w!(self, "("); @@ -259,13 +271,13 @@ impl Printer<'_> { Expr::Continue { label } => { w!(self, "continue"); if let Some(lbl) = label { - w!(self, " {}", self.body[*lbl].name.display(self.db.upcast())); + w!(self, " {}", self.body[*lbl].name.display(self.db.upcast(), self.edition)); } } Expr::Break { expr, label } => { w!(self, "break"); if let Some(lbl) = label { - w!(self, " {}", self.body[*lbl].name.display(self.db.upcast())); + w!(self, " {}", self.body[*lbl].name.display(self.db.upcast(), self.edition)); } if let Some(expr) = expr { self.whitespace(); @@ -307,9 +319,10 @@ impl Printer<'_> { } w!(self, "{{"); + let edition = self.edition; self.indented(|p| { for field in &**fields { - w!(p, "{}: ", field.name.display(self.db.upcast())); + w!(p, "{}: ", field.name.display(self.db.upcast(), edition)); p.print_expr(field.expr); wln!(p, ","); } @@ -326,7 +339,7 @@ impl Printer<'_> { } Expr::Field { expr, name } => { self.print_expr(*expr); - w!(self, ".{}", name.display(self.db.upcast())); + w!(self, ".{}", name.display(self.db.upcast(), self.edition)); } Expr::Await { expr } => { self.print_expr(*expr); @@ -464,8 +477,9 @@ impl Printer<'_> { } Expr::Literal(lit) => self.print_literal(lit), Expr::Block { id: _, statements, tail, label } => { - let label = - label.map(|lbl| format!("{}: ", self.body[lbl].name.display(self.db.upcast()))); + let label = label.map(|lbl| { + format!("{}: ", self.body[lbl].name.display(self.db.upcast(), self.edition)) + }); self.print_block(label.as_deref(), statements, tail); } Expr::Unsafe { id: _, statements, tail } => { @@ -539,9 +553,10 @@ impl Printer<'_> { } w!(self, " {{"); + let edition = self.edition; self.indented(|p| { for arg in args.iter() { - w!(p, "{}: ", arg.name.display(self.db.upcast())); + w!(p, "{}: ", arg.name.display(self.db.upcast(), edition)); p.print_pat(arg.pat); wln!(p, ","); } @@ -686,11 +701,13 @@ impl Printer<'_> { } fn print_type_ref(&mut self, ty: &TypeRef) { - print_type_ref(self.db, ty, self).unwrap(); + let edition = self.edition; + print_type_ref(self.db, ty, self, edition).unwrap(); } fn print_path(&mut self, path: &Path) { - print_path(self.db, path, self).unwrap(); + let edition = self.edition; + print_path(self.db, path, self, edition).unwrap(); } fn print_binding(&mut self, id: BindingId) { @@ -701,6 +718,6 @@ impl Printer<'_> { BindingAnnotation::Ref => "ref ", BindingAnnotation::RefMut => "ref mut ", }; - w!(self, "{}{}", mode, name.display(self.db.upcast())); + w!(self, "{}{}", mode, name.display(self.db.upcast(), self.edition)); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs index 0011d3a20c2a4..38fc01d8fca5f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs @@ -219,7 +219,7 @@ fn main() { }, ); }"#]] - .assert_eq(&body.pretty_print(&db, def)) + .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) } #[test] @@ -285,7 +285,7 @@ impl SsrError { ), ); }"#]] - .assert_eq(&body.pretty_print(&db, def)) + .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) } #[test] @@ -333,5 +333,5 @@ fn f(a: i32, b: u32) -> String { ); }; }"#]] - .assert_eq(&body.pretty_print(&db, def)) + .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT)) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs index 5a3a3e9189790..f5e03e5281e21 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs @@ -651,6 +651,7 @@ mod tests { use expect_test::{expect, Expect}; use hir_expand::db::ExpandDatabase; use itertools::Itertools; + use span::Edition; use stdx::format_to; use syntax::ast::AstNode; use test_fixture::WithFixture; @@ -717,8 +718,10 @@ mod tests { "{:7}(imports {}): {}\n", format!("{:?}", prefix), if ignore_local_imports { '✖' } else { '✔' }, - found_path - .map_or_else(|| "".to_owned(), |it| it.display(&db).to_string()), + found_path.map_or_else( + || "".to_owned(), + |it| it.display(&db, Edition::CURRENT).to_string() + ), ); } expect.assert_eq(&res); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs index 390e7da677f64..e1c3bd25bcf0f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/format_args.rs @@ -250,7 +250,7 @@ pub(crate) fn parse( } } ArgRef::Name(name, span) => { - let name = Name::new(name, tt::IdentIsRaw::No, call_ctx); + let name = Name::new(name, call_ctx); if let Some((index, _)) = args.by_name(&name) { record_usage(name, span); // Name found in `args`, so we resolve it to its index. diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs index 8f618b2d30343..b74cd90f6933a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs @@ -10,6 +10,7 @@ use hir_expand::{ AstId, }; use intern::{sym, Interned, Symbol}; +use span::Edition; use syntax::ast::{self, HasGenericArgs, HasName, IsString}; use crate::{ @@ -419,18 +420,22 @@ impl ConstRef { param.default_val().map(|default| Self::from_const_arg(lower_ctx, Some(default))) } - pub fn display<'a>(&'a self, db: &'a dyn ExpandDatabase) -> impl fmt::Display + 'a { - struct Display<'a>(&'a dyn ExpandDatabase, &'a ConstRef); + pub fn display<'a>( + &'a self, + db: &'a dyn ExpandDatabase, + edition: Edition, + ) -> impl fmt::Display + 'a { + struct Display<'a>(&'a dyn ExpandDatabase, &'a ConstRef, Edition); impl fmt::Display for Display<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self.1 { ConstRef::Scalar(s) => s.fmt(f), - ConstRef::Path(n) => n.display(self.0).fmt(f), + ConstRef::Path(n) => n.display(self.0, self.2).fmt(f), ConstRef::Complex(_) => f.write_str("{const}"), } } } - Display(db, self) + Display(db, self, edition) } // We special case literals and single identifiers, to speed up things. diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs index 8cc022e4c60f2..9574e5d9cd37b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs @@ -8,6 +8,7 @@ use hir_expand::name::Name; use itertools::Itertools; use rustc_hash::FxHashSet; use smallvec::SmallVec; +use span::Edition; use stdx::{format_to, TupleExt}; use syntax::ToSmolStr; use triomphe::Arc; @@ -66,7 +67,12 @@ impl ImportMap { for (k, v) in self.item_to_info_map.iter() { format_to!(out, "{:?} ({:?}) -> ", k, v.1); for v in &v.0 { - format_to!(out, "{}:{:?}, ", v.name.display(db.upcast()), v.container); + format_to!( + out, + "{}:{:?}, ", + v.name.display(db.upcast(), Edition::CURRENT), + v.container + ); } format_to!(out, "\n"); } @@ -83,7 +89,7 @@ impl ImportMap { // We've only collected items, whose name cannot be tuple field so unwrapping is fine. .flat_map(|(&item, (info, _))| { info.iter().enumerate().map(move |(idx, info)| { - (item, info.name.display(db.upcast()).to_smolstr(), idx as u32) + (item, info.name.unescaped().display(db.upcast()).to_smolstr(), idx as u32) }) }) .collect(); @@ -461,7 +467,7 @@ fn search_maps( query.search_mode.check( &query.query, query.case_sensitive, - &info.name.display(db.upcast()).to_smolstr(), + &info.name.unescaped().display(db.upcast()).to_smolstr(), ) }); res.extend(iter.map(TupleExt::head)); @@ -577,7 +583,7 @@ mod tests { Some(format!( "{}::{}", render_path(db, &trait_info[0]), - assoc_item_name.display(db.upcast()) + assoc_item_name.display(db.upcast(), Edition::CURRENT) )) } @@ -616,7 +622,7 @@ mod tests { module = parent; } - segments.iter().rev().map(|it| it.display(db.upcast())).join("::") + segments.iter().rev().map(|it| it.display(db.upcast(), Edition::CURRENT)).join("::") } #[test] diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index df6b1f55c1d96..89b011094a465 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -9,6 +9,7 @@ use la_arena::Idx; use once_cell::sync::Lazy; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; +use span::Edition; use stdx::format_to; use syntax::ast; @@ -706,7 +707,7 @@ impl ItemScope { format_to!( buf, "{}:", - name.map_or("_".to_owned(), |name| name.display(db).to_string()) + name.map_or("_".to_owned(), |name| name.display(db, Edition::LATEST).to_string()) ); if let Some((.., i)) = def.types { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs index 28eebb286ed4b..517f40ebd1c52 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs @@ -51,7 +51,7 @@ use la_arena::{Arena, Idx, RawIdx}; use once_cell::sync::OnceCell; use rustc_hash::FxHashMap; use smallvec::SmallVec; -use span::{AstIdNode, FileAstId, SyntaxContextId}; +use span::{AstIdNode, Edition, FileAstId, SyntaxContextId}; use stdx::never; use syntax::{ast, match_ast, SyntaxKind}; use triomphe::Arc; @@ -199,8 +199,8 @@ impl ItemTree { Attrs::filter(db, krate, self.raw_attrs(of).clone()) } - pub fn pretty_print(&self, db: &dyn DefDatabase) -> String { - pretty::print_item_tree(db, self) + pub fn pretty_print(&self, db: &dyn DefDatabase, edition: Edition) -> String { + pretty::print_item_tree(db, self, edition) } fn data(&self) -> &ItemTreeData { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs index 740759e6e3948..b5a65abce8696 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs @@ -3,7 +3,7 @@ use std::fmt::{self, Write}; use la_arena::{Idx, RawIdx}; -use span::ErasedFileAstId; +use span::{Edition, ErasedFileAstId}; use crate::{ generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget}, @@ -18,8 +18,9 @@ use crate::{ visibility::RawVisibility, }; -pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree) -> String { - let mut p = Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true }; +pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree, edition: Edition) -> String { + let mut p = + Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true, edition }; if let Some(attrs) = tree.attrs.get(&AttrOwner::TopLevel) { p.print_attrs(attrs, true, "\n"); @@ -56,6 +57,7 @@ struct Printer<'a> { buf: String, indent_level: usize, needs_indent: bool, + edition: Edition, } impl Printer<'_> { @@ -97,7 +99,7 @@ impl Printer<'_> { self, "#{}[{}{}]{}", inner, - attr.path.display(self.db.upcast()), + attr.path.display(self.db.upcast(), self.edition), attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(), separated_by, ); @@ -113,13 +115,14 @@ impl Printer<'_> { fn print_visibility(&mut self, vis: RawVisibilityId) { match &self.tree[vis] { RawVisibility::Module(path, _expl) => { - w!(self, "pub({}) ", path.display(self.db.upcast())) + w!(self, "pub({}) ", path.display(self.db.upcast(), self.edition)) } RawVisibility::Public => w!(self, "pub "), }; } fn print_fields(&mut self, parent: FieldParent, kind: FieldsShape, fields: &[Field]) { + let edition = self.edition; match kind { FieldsShape::Record => { self.whitespace(); @@ -131,7 +134,7 @@ impl Printer<'_> { "\n", ); this.print_visibility(*visibility); - w!(this, "{}: ", name.display(self.db.upcast())); + w!(this, "{}: ", name.display(self.db.upcast(), edition)); this.print_type_ref(type_ref); wln!(this, ","); } @@ -147,7 +150,7 @@ impl Printer<'_> { "\n", ); this.print_visibility(*visibility); - w!(this, "{}: ", name.display(self.db.upcast())); + w!(this, "{}: ", name.display(self.db.upcast(), edition)); this.print_type_ref(type_ref); wln!(this, ","); } @@ -186,20 +189,20 @@ impl Printer<'_> { fn print_use_tree(&mut self, use_tree: &UseTree) { match &use_tree.kind { UseTreeKind::Single { path, alias } => { - w!(self, "{}", path.display(self.db.upcast())); + w!(self, "{}", path.display(self.db.upcast(), self.edition)); if let Some(alias) = alias { - w!(self, " as {}", alias); + w!(self, " as {}", alias.display(self.edition)); } } UseTreeKind::Glob { path } => { if let Some(path) = path { - w!(self, "{}::", path.display(self.db.upcast())); + w!(self, "{}::", path.display(self.db.upcast(), self.edition)); } w!(self, "*"); } UseTreeKind::Prefixed { prefix, list } => { if let Some(prefix) = prefix { - w!(self, "{}::", prefix.display(self.db.upcast())); + w!(self, "{}::", prefix.display(self.db.upcast(), self.edition)); } w!(self, "{{"); for (i, tree) in list.iter().enumerate() { @@ -229,9 +232,9 @@ impl Printer<'_> { let ExternCrate { name, alias, visibility, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "extern crate {}", name.display(self.db.upcast())); + w!(self, "extern crate {}", name.display(self.db.upcast(), self.edition)); if let Some(alias) = alias { - w!(self, " as {}", alias); + w!(self, " as {}", alias.display(self.edition)); } wln!(self, ";"); } @@ -278,7 +281,7 @@ impl Printer<'_> { if let Some(abi) = abi { w!(self, "extern \"{}\" ", abi); } - w!(self, "fn {}", name.display(self.db.upcast())); + w!(self, "fn {}", name.display(self.db.upcast(), self.edition)); self.print_generic_params(explicit_generic_params, it.into()); w!(self, "("); if !params.is_empty() { @@ -314,7 +317,7 @@ impl Printer<'_> { &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "struct {}", name.display(self.db.upcast())); + w!(self, "struct {}", name.display(self.db.upcast(), self.edition)); self.print_generic_params(generic_params, it.into()); self.print_fields_and_where_clause( FieldParent::Struct(it), @@ -332,7 +335,7 @@ impl Printer<'_> { let Union { name, visibility, fields, generic_params, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "union {}", name.display(self.db.upcast())); + w!(self, "union {}", name.display(self.db.upcast(), self.edition)); self.print_generic_params(generic_params, it.into()); self.print_fields_and_where_clause( FieldParent::Union(it), @@ -346,15 +349,16 @@ impl Printer<'_> { let Enum { name, visibility, variants, generic_params, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "enum {}", name.display(self.db.upcast())); + w!(self, "enum {}", name.display(self.db.upcast(), self.edition)); self.print_generic_params(generic_params, it.into()); self.print_where_clause_and_opening_brace(generic_params); + let edition = self.edition; self.indented(|this| { for variant in FileItemTreeId::range_iter(variants.clone()) { let Variant { name, fields, shape: kind, ast_id } = &this.tree[variant]; this.print_ast_id(ast_id.erase()); this.print_attrs_of(variant, "\n"); - w!(this, "{}", name.display(self.db.upcast())); + w!(this, "{}", name.display(self.db.upcast(), edition)); this.print_fields(FieldParent::Variant(variant), *kind, fields); wln!(this, ","); } @@ -367,7 +371,7 @@ impl Printer<'_> { self.print_visibility(*visibility); w!(self, "const "); match name { - Some(name) => w!(self, "{}", name.display(self.db.upcast())), + Some(name) => w!(self, "{}", name.display(self.db.upcast(), self.edition)), None => w!(self, "_"), } w!(self, ": "); @@ -382,7 +386,7 @@ impl Printer<'_> { if *mutable { w!(self, "mut "); } - w!(self, "{}: ", name.display(self.db.upcast())); + w!(self, "{}: ", name.display(self.db.upcast(), self.edition)); self.print_type_ref(type_ref); w!(self, " = _;"); wln!(self); @@ -398,7 +402,7 @@ impl Printer<'_> { if *is_auto { w!(self, "auto "); } - w!(self, "trait {}", name.display(self.db.upcast())); + w!(self, "trait {}", name.display(self.db.upcast(), self.edition)); self.print_generic_params(generic_params, it.into()); self.print_where_clause_and_opening_brace(generic_params); self.indented(|this| { @@ -412,7 +416,7 @@ impl Printer<'_> { let TraitAlias { name, visibility, generic_params, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "trait {}", name.display(self.db.upcast())); + w!(self, "trait {}", name.display(self.db.upcast(), self.edition)); self.print_generic_params(generic_params, it.into()); w!(self, " = "); self.print_where_clause(generic_params); @@ -457,7 +461,7 @@ impl Printer<'_> { &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "type {}", name.display(self.db.upcast())); + w!(self, "type {}", name.display(self.db.upcast(), self.edition)); self.print_generic_params(generic_params, it.into()); if !bounds.is_empty() { w!(self, ": "); @@ -475,7 +479,7 @@ impl Printer<'_> { let Mod { name, visibility, kind, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - w!(self, "mod {}", name.display(self.db.upcast())); + w!(self, "mod {}", name.display(self.db.upcast(), self.edition)); match kind { ModKind::Inline { items } => { w!(self, " {{"); @@ -500,18 +504,22 @@ impl Printer<'_> { ctxt, expand_to ); - wln!(self, "{}!(...);", path.display(self.db.upcast())); + wln!(self, "{}!(...);", path.display(self.db.upcast(), self.edition)); } ModItem::MacroRules(it) => { let MacroRules { name, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); - wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db.upcast())); + wln!( + self, + "macro_rules! {} {{ ... }}", + name.display(self.db.upcast(), self.edition) + ); } ModItem::Macro2(it) => { let Macro2 { name, visibility, ast_id } = &self.tree[it]; self.print_ast_id(ast_id.erase()); self.print_visibility(*visibility); - wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast())); + wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast(), self.edition)); } } @@ -519,15 +527,18 @@ impl Printer<'_> { } fn print_type_ref(&mut self, type_ref: &TypeRef) { - print_type_ref(self.db, type_ref, self).unwrap(); + let edition = self.edition; + print_type_ref(self.db, type_ref, self, edition).unwrap(); } fn print_type_bounds(&mut self, bounds: &[Interned]) { - print_type_bounds(self.db, bounds, self).unwrap(); + let edition = self.edition; + print_type_bounds(self.db, bounds, self, edition).unwrap(); } fn print_path(&mut self, path: &Path) { - print_path(self.db, path, self).unwrap(); + let edition = self.edition; + print_path(self.db, path, self, edition).unwrap(); } fn print_generic_params(&mut self, params: &GenericParams, parent: GenericModItem) { @@ -543,7 +554,7 @@ impl Printer<'_> { } first = false; self.print_attrs_of(AttrOwner::LifetimeParamData(parent, idx), " "); - w!(self, "{}", lt.name.display(self.db.upcast())); + w!(self, "{}", lt.name.display(self.db.upcast(), self.edition)); } for (idx, x) in params.iter_type_or_consts() { if !first { @@ -553,11 +564,11 @@ impl Printer<'_> { self.print_attrs_of(AttrOwner::TypeOrConstParamData(parent, idx), " "); match x { TypeOrConstParamData::TypeParamData(ty) => match &ty.name { - Some(name) => w!(self, "{}", name.display(self.db.upcast())), + Some(name) => w!(self, "{}", name.display(self.db.upcast(), self.edition)), None => w!(self, "_anon_{}", idx.into_raw()), }, TypeOrConstParamData::ConstParamData(konst) => { - w!(self, "const {}: ", konst.name.display(self.db.upcast())); + w!(self, "const {}: ", konst.name.display(self.db.upcast(), self.edition)); self.print_type_ref(&konst.ty); } } @@ -580,6 +591,7 @@ impl Printer<'_> { } w!(self, "\nwhere"); + let edition = self.edition; self.indented(|this| { for (i, pred) in params.where_predicates().enumerate() { if i != 0 { @@ -592,8 +604,8 @@ impl Printer<'_> { wln!( this, "{}: {},", - target.name.display(self.db.upcast()), - bound.name.display(self.db.upcast()) + target.name.display(self.db.upcast(), edition), + bound.name.display(self.db.upcast(), edition) ); continue; } @@ -603,7 +615,7 @@ impl Printer<'_> { if i != 0 { w!(this, ", "); } - w!(this, "{}", lt.display(self.db.upcast())); + w!(this, "{}", lt.display(self.db.upcast(), edition)); } w!(this, "> "); (target, bound) @@ -613,7 +625,7 @@ impl Printer<'_> { match target { WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty), WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() { - Some(name) => w!(this, "{}", name.display(self.db.upcast())), + Some(name) => w!(this, "{}", name.display(self.db.upcast(), edition)), None => w!(this, "_anon_{}", id.into_raw()), }, } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs index c6930401a6f58..5c07369f4b5b6 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs @@ -1,4 +1,5 @@ use expect_test::{expect, Expect}; +use span::Edition; use test_fixture::WithFixture; use crate::{db::DefDatabase, test_db::TestDB}; @@ -6,7 +7,7 @@ use crate::{db::DefDatabase, test_db::TestDB}; fn check(ra_fixture: &str, expect: Expect) { let (db, file_id) = TestDB::with_single_file(ra_fixture); let item_tree = db.file_item_tree(file_id.into()); - let pretty = item_tree.pretty_print(&db); + let pretty = item_tree.pretty_print(&db, Edition::CURRENT); expect.assert_eq(&pretty); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 8825e46336326..11601c683e1ea 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -328,6 +328,10 @@ impl DefMap { /// The module id of a crate or block root. pub const ROOT: LocalModuleId = LocalModuleId::from_raw(la_arena::RawIdx::from_u32(0)); + pub fn edition(&self) -> Edition { + self.data.edition + } + pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, crate_id: CrateId) -> Arc { let crate_graph = db.crate_graph(); let krate = &crate_graph[crate_id]; @@ -550,7 +554,7 @@ impl DefMap { for (name, child) in map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0)) { - let path = format!("{path}::{}", name.display(db.upcast())); + let path = format!("{path}::{}", name.display(db.upcast(), Edition::LATEST)); buf.push('\n'); go(buf, db, map, &path, *child); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index debc5a44326ef..22eb5a174d393 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -548,7 +548,7 @@ impl DefCollector<'_> { types => { tracing::debug!( "could not resolve prelude path `{}` to module (resolved to {:?})", - path.display(self.db.upcast()), + path.display(self.db.upcast(), Edition::LATEST), types ); } @@ -768,7 +768,7 @@ impl DefCollector<'_> { } fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport { - let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db.upcast())) + let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db.upcast(), Edition::LATEST)) .entered(); tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition); match import.source { @@ -2151,7 +2151,7 @@ impl ModCollector<'_, '_> { } tracing::debug!( "non-builtin attribute {}", - attr.path.display(self.def_collector.db.upcast()) + attr.path.display(self.def_collector.db.upcast(), Edition::LATEST) ); let ast_id = AstIdWithPath::new( @@ -2286,8 +2286,8 @@ impl ModCollector<'_, '_> { stdx::always!( name == mac.name, "built-in macro {} has #[rustc_builtin_macro] which declares different name {}", - mac.name.display(self.def_collector.db.upcast()), - name.display(self.def_collector.db.upcast()) + mac.name.display(self.def_collector.db.upcast(), Edition::LATEST), + name.display(self.def_collector.db.upcast(), Edition::LATEST), ); helpers_opt = Some(helpers); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs index 390c934f6dac1..a05c4dcf9bd70 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs @@ -1,6 +1,7 @@ use expect_test::expect; use itertools::Itertools; +use span::Edition; use super::*; @@ -1100,7 +1101,7 @@ pub fn derive_macro_2(_item: TokenStream) -> TokenStream { assert_eq!(def_map.data.exported_derives.len(), 1); match def_map.data.exported_derives.values().next() { Some(helpers) => match &**helpers { - [attr] => assert_eq!(attr.display(&db).to_string(), "helper_attr"), + [attr] => assert_eq!(attr.display(&db, Edition::CURRENT).to_string(), "helper_attr"), _ => unreachable!(), }, _ => unreachable!(), @@ -1456,7 +1457,7 @@ fn proc_attr(a: TokenStream, b: TokenStream) -> TokenStream { a } let actual = def_map .macro_use_prelude .keys() - .map(|name| name.display(&db).to_string()) + .map(|name| name.display(&db, Edition::CURRENT).to_string()) .sorted() .join("\n"); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/mod_resolution.rs index 1327d9aa62e12..071b55c83d8de 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/mod_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/mod_resolution.rs @@ -144,14 +144,14 @@ pub struct Baz; crate::r#async Bar: t v - foo: t r#async: t - - crate::r#async::foo - Foo: t v + foo: t crate::r#async::r#async Baz: t v + + crate::r#async::foo + Foo: t v "#]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs index f90bc954a9b3d..077863c0c939c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/path.rs @@ -13,7 +13,8 @@ use crate::{ }; use hir_expand::name::Name; use intern::Interned; -use syntax::{ast, ToSmolStr}; +use span::Edition; +use syntax::ast; pub use hir_expand::mod_path::{path, ModPath, PathKind}; @@ -25,11 +26,21 @@ pub enum ImportAlias { Alias(Name), } -impl Display for ImportAlias { +impl ImportAlias { + pub fn display(&self, edition: Edition) -> impl Display + '_ { + ImportAliasDisplay { value: self, edition } + } +} + +struct ImportAliasDisplay<'a> { + value: &'a ImportAlias, + edition: Edition, +} +impl Display for ImportAliasDisplay<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { + match self.value { ImportAlias::Underscore => f.write_str("_"), - ImportAlias::Alias(name) => f.write_str(&name.display_no_db().to_smolstr()), + ImportAlias::Alias(name) => Display::fmt(&name.display_no_db(self.edition), f), } } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs index 3ee88b536fc4a..d5ef17a91fb2b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs @@ -5,6 +5,7 @@ use std::fmt::{self, Write}; use hir_expand::mod_path::PathKind; use intern::Interned; use itertools::Itertools; +use span::Edition; use crate::{ db::DefDatabase, @@ -13,46 +14,51 @@ use crate::{ type_ref::{Mutability, TraitBoundModifier, TypeBound, TypeRef}, }; -pub(crate) fn print_path(db: &dyn DefDatabase, path: &Path, buf: &mut dyn Write) -> fmt::Result { +pub(crate) fn print_path( + db: &dyn DefDatabase, + path: &Path, + buf: &mut dyn Write, + edition: Edition, +) -> fmt::Result { if let Path::LangItem(it, s) = path { write!(buf, "builtin#lang(")?; match *it { LangItemTarget::ImplDef(it) => write!(buf, "{it:?}")?, LangItemTarget::EnumId(it) => { - write!(buf, "{}", db.enum_data(it).name.display(db.upcast()))? + write!(buf, "{}", db.enum_data(it).name.display(db.upcast(), edition))? } LangItemTarget::Function(it) => { - write!(buf, "{}", db.function_data(it).name.display(db.upcast()))? + write!(buf, "{}", db.function_data(it).name.display(db.upcast(), edition))? } LangItemTarget::Static(it) => { - write!(buf, "{}", db.static_data(it).name.display(db.upcast()))? + write!(buf, "{}", db.static_data(it).name.display(db.upcast(), edition))? } LangItemTarget::Struct(it) => { - write!(buf, "{}", db.struct_data(it).name.display(db.upcast()))? + write!(buf, "{}", db.struct_data(it).name.display(db.upcast(), edition))? } LangItemTarget::Union(it) => { - write!(buf, "{}", db.union_data(it).name.display(db.upcast()))? + write!(buf, "{}", db.union_data(it).name.display(db.upcast(), edition))? } LangItemTarget::TypeAlias(it) => { - write!(buf, "{}", db.type_alias_data(it).name.display(db.upcast()))? + write!(buf, "{}", db.type_alias_data(it).name.display(db.upcast(), edition))? } LangItemTarget::Trait(it) => { - write!(buf, "{}", db.trait_data(it).name.display(db.upcast()))? + write!(buf, "{}", db.trait_data(it).name.display(db.upcast(), edition))? } LangItemTarget::EnumVariant(it) => { - write!(buf, "{}", db.enum_variant_data(it).name.display(db.upcast()))? + write!(buf, "{}", db.enum_variant_data(it).name.display(db.upcast(), edition))? } } if let Some(s) = s { - write!(buf, "::{}", s.display(db.upcast()))?; + write!(buf, "::{}", s.display(db.upcast(), edition))?; } return write!(buf, ")"); } match path.type_anchor() { Some(anchor) => { write!(buf, "<")?; - print_type_ref(db, anchor, buf)?; + print_type_ref(db, anchor, buf, edition)?; write!(buf, ">::")?; } None => match path.kind() { @@ -78,10 +84,10 @@ pub(crate) fn print_path(db: &dyn DefDatabase, path: &Path, buf: &mut dyn Write) write!(buf, "::")?; } - write!(buf, "{}", segment.name.display(db.upcast()))?; + write!(buf, "{}", segment.name.display(db.upcast(), edition))?; if let Some(generics) = segment.args_and_bindings { write!(buf, "::<")?; - print_generic_args(db, generics, buf)?; + print_generic_args(db, generics, buf, edition)?; write!(buf, ">")?; } @@ -94,12 +100,13 @@ pub(crate) fn print_generic_args( db: &dyn DefDatabase, generics: &GenericArgs, buf: &mut dyn Write, + edition: Edition, ) -> fmt::Result { let mut first = true; let args = if generics.has_self_type { let (self_ty, args) = generics.args.split_first().unwrap(); write!(buf, "Self=")?; - print_generic_arg(db, self_ty, buf)?; + print_generic_arg(db, self_ty, buf, edition)?; first = false; args } else { @@ -110,21 +117,21 @@ pub(crate) fn print_generic_args( write!(buf, ", ")?; } first = false; - print_generic_arg(db, arg, buf)?; + print_generic_arg(db, arg, buf, edition)?; } for binding in generics.bindings.iter() { if !first { write!(buf, ", ")?; } first = false; - write!(buf, "{}", binding.name.display(db.upcast()))?; + write!(buf, "{}", binding.name.display(db.upcast(), edition))?; if !binding.bounds.is_empty() { write!(buf, ": ")?; - print_type_bounds(db, &binding.bounds, buf)?; + print_type_bounds(db, &binding.bounds, buf, edition)?; } if let Some(ty) = &binding.type_ref { write!(buf, " = ")?; - print_type_ref(db, ty, buf)?; + print_type_ref(db, ty, buf, edition)?; } } Ok(()) @@ -134,11 +141,12 @@ pub(crate) fn print_generic_arg( db: &dyn DefDatabase, arg: &GenericArg, buf: &mut dyn Write, + edition: Edition, ) -> fmt::Result { match arg { - GenericArg::Type(ty) => print_type_ref(db, ty, buf), - GenericArg::Const(c) => write!(buf, "{}", c.display(db.upcast())), - GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast())), + GenericArg::Type(ty) => print_type_ref(db, ty, buf, edition), + GenericArg::Const(c) => write!(buf, "{}", c.display(db.upcast(), edition)), + GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast(), edition)), } } @@ -146,6 +154,7 @@ pub(crate) fn print_type_ref( db: &dyn DefDatabase, type_ref: &TypeRef, buf: &mut dyn Write, + edition: Edition, ) -> fmt::Result { // FIXME: deduplicate with `HirDisplay` impl match type_ref { @@ -157,18 +166,18 @@ pub(crate) fn print_type_ref( if i != 0 { write!(buf, ", ")?; } - print_type_ref(db, field, buf)?; + print_type_ref(db, field, buf, edition)?; } write!(buf, ")")?; } - TypeRef::Path(path) => print_path(db, path, buf)?, + TypeRef::Path(path) => print_path(db, path, buf, edition)?, TypeRef::RawPtr(pointee, mtbl) => { let mtbl = match mtbl { Mutability::Shared => "*const", Mutability::Mut => "*mut", }; write!(buf, "{mtbl} ")?; - print_type_ref(db, pointee, buf)?; + print_type_ref(db, pointee, buf, edition)?; } TypeRef::Reference(pointee, lt, mtbl) => { let mtbl = match mtbl { @@ -177,19 +186,19 @@ pub(crate) fn print_type_ref( }; write!(buf, "&")?; if let Some(lt) = lt { - write!(buf, "{} ", lt.name.display(db.upcast()))?; + write!(buf, "{} ", lt.name.display(db.upcast(), edition))?; } write!(buf, "{mtbl}")?; - print_type_ref(db, pointee, buf)?; + print_type_ref(db, pointee, buf, edition)?; } TypeRef::Array(elem, len) => { write!(buf, "[")?; - print_type_ref(db, elem, buf)?; - write!(buf, "; {}]", len.display(db.upcast()))?; + print_type_ref(db, elem, buf, edition)?; + write!(buf, "; {}]", len.display(db.upcast(), edition))?; } TypeRef::Slice(elem) => { write!(buf, "[")?; - print_type_ref(db, elem, buf)?; + print_type_ref(db, elem, buf, edition)?; write!(buf, "]")?; } TypeRef::Fn(args_and_ret, varargs, is_unsafe, abi) => { @@ -208,7 +217,7 @@ pub(crate) fn print_type_ref( if i != 0 { write!(buf, ", ")?; } - print_type_ref(db, typeref, buf)?; + print_type_ref(db, typeref, buf, edition)?; } if *varargs { if !args.is_empty() { @@ -217,7 +226,7 @@ pub(crate) fn print_type_ref( write!(buf, "...")?; } write!(buf, ") -> ")?; - print_type_ref(db, return_type, buf)?; + print_type_ref(db, return_type, buf, edition)?; } TypeRef::Macro(_ast_id) => { write!(buf, "")?; @@ -225,11 +234,11 @@ pub(crate) fn print_type_ref( TypeRef::Error => write!(buf, "{{unknown}}")?, TypeRef::ImplTrait(bounds) => { write!(buf, "impl ")?; - print_type_bounds(db, bounds, buf)?; + print_type_bounds(db, bounds, buf, edition)?; } TypeRef::DynTrait(bounds) => { write!(buf, "dyn ")?; - print_type_bounds(db, bounds, buf)?; + print_type_bounds(db, bounds, buf, edition)?; } } @@ -240,6 +249,7 @@ pub(crate) fn print_type_bounds( db: &dyn DefDatabase, bounds: &[Interned], buf: &mut dyn Write, + edition: Edition, ) -> fmt::Result { for (i, bound) in bounds.iter().enumerate() { if i != 0 { @@ -252,17 +262,17 @@ pub(crate) fn print_type_bounds( TraitBoundModifier::None => (), TraitBoundModifier::Maybe => write!(buf, "?")?, } - print_path(db, path, buf)?; + print_path(db, path, buf, edition)?; } TypeBound::ForLifetime(lifetimes, path) => { write!( buf, "for<{}> ", - lifetimes.iter().map(|it| it.display(db.upcast())).format(", ") + lifetimes.iter().map(|it| it.display(db.upcast(), edition)).format(", ") )?; - print_path(db, path, buf)?; + print_path(db, path, buf, edition)?; } - TypeBound::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast()))?, + TypeBound::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast(), edition))?, TypeBound::Error => write!(buf, "{{unknown}}")?, } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs index 3528b2dde73e7..1dadfe2ba99bb 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs @@ -176,9 +176,10 @@ fn eager_macro_recur( Some(path) => match macro_resolver(&path) { Some(def) => def, None => { + let edition = db.crate_graph()[krate].edition; error = Some(ExpandError::other( span_map.span_at(call.syntax().text_range().start()), - format!("unresolved macro {}", path.display(db)), + format!("unresolved macro {}", path.display(db, edition)), )); offset += call.syntax().text_range().len(); continue; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs index 2c26fe414d91e..dcf2af3997970 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs @@ -14,7 +14,7 @@ use crate::{ use base_db::CrateId; use intern::sym; use smallvec::SmallVec; -use span::SyntaxContextId; +use span::{Edition, SyntaxContextId}; use syntax::{ast, AstNode}; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -140,8 +140,12 @@ impl ModPath { UnescapedModPath(self) } - pub fn display<'a>(&'a self, db: &'a dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a { - Display { db, path: self } + pub fn display<'a>( + &'a self, + db: &'a dyn crate::db::ExpandDatabase, + edition: Edition, + ) -> impl fmt::Display + 'a { + Display { db, path: self, edition } } } @@ -154,11 +158,12 @@ impl Extend for ModPath { struct Display<'a> { db: &'a dyn ExpandDatabase, path: &'a ModPath, + edition: Edition, } impl fmt::Display for Display<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - display_fmt_path(self.db, self.path, f, true) + display_fmt_path(self.db, self.path, f, Escape::IfNeeded(self.edition)) } } @@ -169,7 +174,7 @@ struct UnescapedDisplay<'a> { impl fmt::Display for UnescapedDisplay<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - display_fmt_path(self.db, self.path.0, f, false) + display_fmt_path(self.db, self.path.0, f, Escape::No) } } @@ -178,11 +183,17 @@ impl From for ModPath { ModPath::from_segments(PathKind::Plain, iter::once(name)) } } + +enum Escape { + No, + IfNeeded(Edition), +} + fn display_fmt_path( db: &dyn ExpandDatabase, path: &ModPath, f: &mut fmt::Formatter<'_>, - escaped: bool, + escaped: Escape, ) -> fmt::Result { let mut first_segment = true; let mut add_segment = |s| -> fmt::Result { @@ -210,10 +221,9 @@ fn display_fmt_path( f.write_str("::")?; } first_segment = false; - if escaped { - segment.display(db).fmt(f)?; - } else { - segment.unescaped().display(db).fmt(f)?; + match escaped { + Escape::IfNeeded(edition) => segment.display(db, edition).fmt(f)?, + Escape::No => segment.unescaped().display(db).fmt(f)?, } } Ok(()) @@ -322,9 +332,11 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option PathKind::SELF, tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::super_ => { let mut deg = 1; - while let Some(tt::Leaf::Ident(tt::Ident { sym: text, span, is_raw })) = leaves.next() { + while let Some(tt::Leaf::Ident(tt::Ident { sym: text, span, is_raw: _ })) = + leaves.next() + { if *text != sym::super_ { - segments.push(Name::new_symbol_maybe_raw(text.clone(), *is_raw, span.ctx)); + segments.push(Name::new_symbol(text.clone(), span.ctx)); break; } deg += 1; @@ -333,19 +345,13 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option PathKind::Crate, tt::Leaf::Ident(ident) => { - segments.push(Name::new_symbol_maybe_raw( - ident.sym.clone(), - ident.is_raw, - ident.span.ctx, - )); + segments.push(Name::new_symbol(ident.sym.clone(), ident.span.ctx)); PathKind::Plain } _ => return None, }; segments.extend(leaves.filter_map(|leaf| match leaf { - ::tt::Leaf::Ident(ident) => { - Some(Name::new_symbol_maybe_raw(ident.sym.clone(), ident.is_raw, ident.span.ctx)) - } + ::tt::Leaf::Ident(ident) => Some(Name::new_symbol(ident.sym.clone(), ident.span.ctx)), _ => None, })); Some(ModPath { kind, segments }) diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs index 2a52aaba6af99..54313904a7ecd 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs @@ -3,22 +3,22 @@ use std::fmt; use intern::{sym, Symbol}; -use span::SyntaxContextId; -use syntax::{ast, utils::is_raw_identifier}; +use span::{Edition, SyntaxContextId}; +use syntax::ast; +use syntax::utils::is_raw_identifier; /// `Name` is a wrapper around string, which is used in hir for both references /// and declarations. In theory, names should also carry hygiene info, but we are /// not there yet! /// -/// Note that `Name` holds and prints escaped name i.e. prefixed with "r#" when it -/// is a raw identifier. Use [`unescaped()`][Name::unescaped] when you need the -/// name without "r#". +/// Note that the rawness (`r#`) of names does not depend on whether they are written raw. +/// This is because we want to show (in completions etc.) names as raw depending on the needs +/// of the current crate, for example if it is edition 2021 complete `gen` even if the defining +/// crate is in edition 2024 and wrote `r#gen`, and the opposite holds as well. #[derive(Clone, PartialEq, Eq, Hash)] pub struct Name { symbol: Symbol, ctx: (), - // FIXME: We should probably encode rawness as a property here instead, once we have hygiene - // in here we've got 4 bytes of padding to fill anyways } impl fmt::Debug for Name { @@ -42,6 +42,7 @@ impl PartialOrd for Name { } } +// No need to strip `r#`, all comparisons are done against well-known symbols. impl PartialEq for Name { fn eq(&self, sym: &Symbol) -> bool { self.symbol == *sym @@ -55,16 +56,16 @@ impl PartialEq for Symbol { } /// Wrapper of `Name` to print the name without "r#" even when it is a raw identifier. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct UnescapedName<'a>(&'a Name); -impl UnescapedName<'_> { - pub fn display(&self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + '_ { +impl<'a> UnescapedName<'a> { + pub fn display(self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a { _ = db; UnescapedDisplay { name: self } } #[doc(hidden)] - pub fn display_no_db(&self) -> impl fmt::Display + '_ { + pub fn display_no_db(self) -> impl fmt::Display + 'a { UnescapedDisplay { name: self } } } @@ -77,16 +78,9 @@ impl Name { Name { symbol: Symbol::intern(text), ctx: () } } - pub fn new(text: &str, raw: tt::IdentIsRaw, ctx: SyntaxContextId) -> Name { + pub fn new(text: &str, ctx: SyntaxContextId) -> Name { _ = ctx; - Name { - symbol: if raw.yes() { - Symbol::intern(&format!("{}{text}", raw.as_str())) - } else { - Symbol::intern(text) - }, - ctx: (), - } + Self::new_text(text) } pub fn new_tuple_field(idx: usize) -> Name { @@ -97,26 +91,9 @@ impl Name { Name { symbol: Symbol::intern(lt.text().as_str()), ctx: () } } - /// Shortcut to create a name from a string literal. - fn new_ref(text: &str) -> Name { - Name { symbol: Symbol::intern(text), ctx: () } - } - /// Resolve a name from the text of token. fn resolve(raw_text: &str) -> Name { - // FIXME: Edition - match raw_text.strip_prefix("r#") { - // When `raw_text` starts with "r#" but the name does not coincide with any - // keyword, we never need the prefix so we strip it. - Some(text) if !is_raw_identifier(text, span::Edition::CURRENT) => Name::new_ref(text), - // Keywords (in the current edition) *can* be used as a name in earlier editions of - // Rust, e.g. "try" in Rust 2015. Even in such cases, we keep track of them in their - // escaped form. - None if is_raw_identifier(raw_text, span::Edition::CURRENT) => { - Name::new_text(&format!("r#{}", raw_text)) - } - _ => Name::new_text(raw_text), - } + Name::new_text(raw_text.trim_start_matches("r#")) } /// A fake name for things missing in the source code. @@ -162,19 +139,23 @@ impl Name { UnescapedName(self) } - pub fn is_escaped(&self) -> bool { - self.symbol.as_str().starts_with("r#") + pub fn is_escaped(&self, edition: Edition) -> bool { + is_raw_identifier(self.symbol.as_str(), edition) } - pub fn display<'a>(&'a self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a { + pub fn display<'a>( + &'a self, + db: &dyn crate::db::ExpandDatabase, + edition: Edition, + ) -> impl fmt::Display + 'a { _ = db; - Display { name: self } + self.display_no_db(edition) } // FIXME: Remove this #[doc(hidden)] - pub fn display_no_db(&self) -> impl fmt::Display + '_ { - Display { name: self } + pub fn display_no_db(&self, edition: Edition) -> impl fmt::Display + '_ { + Display { name: self, needs_escaping: is_raw_identifier(self.symbol.as_str(), edition) } } pub fn symbol(&self) -> &Symbol { @@ -186,39 +167,39 @@ impl Name { Self { symbol, ctx: () } } - pub fn new_symbol_maybe_raw(sym: Symbol, raw: tt::IdentIsRaw, ctx: SyntaxContextId) -> Self { - if raw.no() { - Self { symbol: sym, ctx: () } - } else { - Name::new(sym.as_str(), raw, ctx) - } - } - // FIXME: This needs to go once we have hygiene pub const fn new_symbol_root(sym: Symbol) -> Self { Self { symbol: sym, ctx: () } } + + #[inline] + pub fn eq_ident(&self, ident: &str) -> bool { + self.as_str() == ident.trim_start_matches("r#") + } } struct Display<'a> { name: &'a Name, + needs_escaping: bool, } impl fmt::Display for Display<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.needs_escaping { + write!(f, "r#")?; + } fmt::Display::fmt(self.name.symbol.as_str(), f) } } struct UnescapedDisplay<'a> { - name: &'a UnescapedName<'a>, + name: UnescapedName<'a>, } impl fmt::Display for UnescapedDisplay<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let symbol = &self.name.0.symbol.as_str(); - let text = symbol.strip_prefix("r#").unwrap_or(symbol); - fmt::Display::fmt(&text, f) + let symbol = self.name.0.symbol.as_str(); + fmt::Display::fmt(symbol, f) } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index a151ee01e6455..a3e4da5d1afd7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -5,6 +5,7 @@ use std::{iter, ops::ControlFlow, sync::Arc}; use hir_expand::name::Name; use intern::sym; +use span::Edition; use tracing::debug; use chalk_ir::{cast::Caster, fold::shift::Shift, CanonicalVarKinds}; @@ -424,18 +425,19 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { fn trait_name(&self, trait_id: chalk_ir::TraitId) -> String { let id = from_chalk_trait_id(trait_id); - self.db.trait_data(id).name.display(self.db.upcast()).to_string() + self.db.trait_data(id).name.display(self.db.upcast(), self.edition()).to_string() } fn adt_name(&self, chalk_ir::AdtId(adt_id): AdtId) -> String { + let edition = self.edition(); match adt_id { hir_def::AdtId::StructId(id) => { - self.db.struct_data(id).name.display(self.db.upcast()).to_string() + self.db.struct_data(id).name.display(self.db.upcast(), edition).to_string() } hir_def::AdtId::EnumId(id) => { - self.db.enum_data(id).name.display(self.db.upcast()).to_string() + self.db.enum_data(id).name.display(self.db.upcast(), edition).to_string() } hir_def::AdtId::UnionId(id) => { - self.db.union_data(id).name.display(self.db.upcast()).to_string() + self.db.union_data(id).name.display(self.db.upcast(), edition).to_string() } } } @@ -445,7 +447,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { } fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId) -> String { let id = self.db.associated_ty_data(assoc_ty_id).name; - self.db.type_alias_data(id).name.display(self.db.upcast()).to_string() + self.db.type_alias_data(id).name.display(self.db.upcast(), self.edition()).to_string() } fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId) -> String { format!("Opaque_{}", opaque_ty_id.0) @@ -519,6 +521,10 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { } impl<'a> ChalkContext<'a> { + fn edition(&self) -> Edition { + self.db.crate_graph()[self.krate].edition + } + fn for_trait_impls( &self, trait_id: hir_def::TraitId, @@ -843,7 +849,7 @@ fn impl_def_datum( "impl {:?}: {}{} where {:?}", chalk_id, if negative { "!" } else { "" }, - trait_ref.display(db), + trait_ref.display(db, db.crate_graph()[krate].edition), where_clauses ); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs index dc3817ce3f40f..a0399a06f2369 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs @@ -1,3 +1,4 @@ +use base_db::SourceDatabase; use chalk_ir::Substitution; use hir_def::db::DefDatabase; use rustc_apfloat::{ @@ -94,9 +95,10 @@ fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) { fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String { let mut err = String::new(); let span_formatter = |file, range| format!("{file:?} {range:?}"); + let edition = db.crate_graph()[db.test_crate()].edition; match e { - ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter), - ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter), + ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter, edition), + ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter, edition), } .unwrap(); err @@ -110,7 +112,9 @@ fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result { - if db.const_data(x).name.as_ref()?.display(db).to_string() == "GOAL" { + if db.const_data(x).name.as_ref()?.display(db, file_id.edition()).to_string() + == "GOAL" + { Some(x) } else { None diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs index b093440060806..024fc32f86370 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs @@ -17,17 +17,18 @@ use std::fmt; use hir_def::{ data::adt::VariantData, db::DefDatabase, hir::Pat, src::HasSource, AdtId, AttrDefId, ConstId, - EnumId, EnumVariantId, FunctionId, ItemContainerId, Lookup, ModuleDefId, ModuleId, StaticId, - StructId, TraitId, TypeAliasId, + EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId, + StaticId, StructId, TraitId, TypeAliasId, }; use hir_expand::{ name::{AsName, Name}, - HirFileId, MacroFileIdExt, + HirFileId, HirFileIdExt, MacroFileIdExt, }; use intern::sym; use stdx::{always, never}; use syntax::{ ast::{self, HasName}, + utils::is_raw_identifier, AstNode, AstPtr, ToSmolStr, }; @@ -318,17 +319,21 @@ impl<'a> DeclValidator<'a> { /// This includes function parameters except for trait implementation associated functions. fn validate_func_body(&mut self, func: FunctionId) { let body = self.db.body(func.into()); + let edition = self.edition(func); let mut pats_replacements = body .pats .iter() .filter_map(|(pat_id, pat)| match pat { Pat::Bind { id, .. } => { let bind_name = &body.bindings[*id].name; + let mut suggested_text = + to_lower_snake_case(&bind_name.unescaped().display_no_db().to_smolstr())?; + if is_raw_identifier(&suggested_text, edition) { + suggested_text.insert_str(0, "r#"); + } let replacement = Replacement { current_name: bind_name.clone(), - suggested_text: to_lower_snake_case( - &bind_name.display_no_db().to_smolstr(), - )?, + suggested_text, expected_case: CaseType::LowerSnakeCase, }; Some((pat_id, replacement)) @@ -377,6 +382,11 @@ impl<'a> DeclValidator<'a> { } } + fn edition(&self, id: impl HasModule) -> span::Edition { + let krate = id.krate(self.db.upcast()); + self.db.crate_graph()[krate].edition + } + fn validate_struct(&mut self, struct_id: StructId) { // Check the structure name. let non_camel_case_allowed = @@ -405,16 +415,17 @@ impl<'a> DeclValidator<'a> { let VariantData::Record(fields) = data.variant_data.as_ref() else { return; }; + let edition = self.edition(struct_id); let mut struct_fields_replacements = fields .iter() .filter_map(|(_, field)| { - to_lower_snake_case(&field.name.display_no_db().to_smolstr()).map(|new_name| { - Replacement { + to_lower_snake_case(&field.name.display_no_db(edition).to_smolstr()).map( + |new_name| Replacement { current_name: field.name.clone(), suggested_text: new_name, expected_case: CaseType::LowerSnakeCase, - } - }) + }, + ) }) .peekable(); @@ -498,14 +509,17 @@ impl<'a> DeclValidator<'a> { self.validate_enum_variant_fields(*variant_id); } + let edition = self.edition(enum_id); let mut enum_variants_replacements = data .variants .iter() .filter_map(|(_, name)| { - to_camel_case(&name.display_no_db().to_smolstr()).map(|new_name| Replacement { - current_name: name.clone(), - suggested_text: new_name, - expected_case: CaseType::UpperCamelCase, + to_camel_case(&name.display_no_db(edition).to_smolstr()).map(|new_name| { + Replacement { + current_name: name.clone(), + suggested_text: new_name, + expected_case: CaseType::UpperCamelCase, + } }) }) .peekable(); @@ -566,16 +580,17 @@ impl<'a> DeclValidator<'a> { let VariantData::Record(fields) = variant_data.variant_data.as_ref() else { return; }; + let edition = self.edition(variant_id); let mut variant_field_replacements = fields .iter() .filter_map(|(_, field)| { - to_lower_snake_case(&field.name.display_no_db().to_smolstr()).map(|new_name| { - Replacement { + to_lower_snake_case(&field.name.display_no_db(edition).to_smolstr()).map( + |new_name| Replacement { current_name: field.name.clone(), suggested_text: new_name, expected_case: CaseType::LowerSnakeCase, - } - }) + }, + ) }) .peekable(); @@ -704,18 +719,22 @@ impl<'a> DeclValidator<'a> { ) where N: AstNode + HasName + fmt::Debug, S: HasSource, - L: Lookup = dyn DefDatabase + 'a>, + L: Lookup = dyn DefDatabase + 'a> + HasModule + Copy, { let to_expected_case_type = match expected_case { CaseType::LowerSnakeCase => to_lower_snake_case, CaseType::UpperSnakeCase => to_upper_snake_case, CaseType::UpperCamelCase => to_camel_case, }; - let Some(replacement) = - to_expected_case_type(&name.display(self.db.upcast()).to_smolstr()).map(|new_name| { - Replacement { current_name: name.clone(), suggested_text: new_name, expected_case } - }) - else { + let edition = self.edition(item_id); + let Some(replacement) = to_expected_case_type( + &name.display(self.db.upcast(), edition).to_smolstr(), + ) + .map(|new_name| Replacement { + current_name: name.clone(), + suggested_text: new_name, + expected_case, + }) else { return; }; @@ -748,12 +767,13 @@ impl<'a> DeclValidator<'a> { return; }; + let edition = file_id.original_file(self.db.upcast()).edition(); let diagnostic = IncorrectCase { file: file_id, ident_type, ident: AstPtr::new(&name_ast), expected_case: replacement.expected_case, - ident_text: replacement.current_name.display(self.db.upcast()).to_string(), + ident_text: replacement.current_name.display(self.db.upcast(), edition).to_string(), suggested_text: replacement.suggested_text, }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index 6e5a7cce9c976..f8b5c7d0ce2c8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -4,6 +4,7 @@ use std::fmt; +use base_db::CrateId; use chalk_solve::rust_ir::AdtKind; use either::Either; use hir_def::{ @@ -15,6 +16,7 @@ use intern::sym; use itertools::Itertools; use rustc_hash::FxHashSet; use rustc_pattern_analysis::constructor::Constructor; +use span::Edition; use syntax::{ ast::{self, UnaryOp}, AstNode, @@ -258,7 +260,13 @@ impl ExprValidator { if !witnesses.is_empty() { self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms { match_expr, - uncovered_patterns: missing_match_arms(&cx, scrut_ty, witnesses, m_arms.is_empty()), + uncovered_patterns: missing_match_arms( + &cx, + scrut_ty, + witnesses, + m_arms.is_empty(), + self.owner.krate(db.upcast()), + ), }); } } @@ -345,7 +353,13 @@ impl ExprValidator { if !witnesses.is_empty() { self.diagnostics.push(BodyValidationDiagnostic::NonExhaustiveLet { pat, - uncovered_patterns: missing_match_arms(&cx, ty, witnesses, false), + uncovered_patterns: missing_match_arms( + &cx, + ty, + witnesses, + false, + self.owner.krate(db.upcast()), + ), }); } } @@ -616,24 +630,26 @@ fn missing_match_arms<'p>( scrut_ty: &Ty, witnesses: Vec>, arms_is_empty: bool, + krate: CrateId, ) -> String { - struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>); + struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>, Edition); impl fmt::Display for DisplayWitness<'_, '_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let DisplayWitness(witness, cx) = *self; + let DisplayWitness(witness, cx, edition) = *self; let pat = cx.hoist_witness_pat(witness); - write!(f, "{}", pat.display(cx.db)) + write!(f, "{}", pat.display(cx.db, edition)) } } + let edition = cx.db.crate_graph()[krate].edition; let non_empty_enum = match scrut_ty.as_adt() { Some((AdtId::EnumId(e), _)) => !cx.db.enum_data(e).variants.is_empty(), _ => false, }; if arms_is_empty && !non_empty_enum { - format!("type `{}` is non-empty", scrut_ty.display(cx.db)) + format!("type `{}` is non-empty", scrut_ty.display(cx.db, edition)) } else { - let pat_display = |witness| DisplayWitness(witness, cx); + let pat_display = |witness| DisplayWitness(witness, cx, edition); const LIMIT: usize = 3; match &*witnesses { [witness] => format!("`{}` not covered", pat_display(witness)), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs index a0ee7c0748b19..4bc07bc9ec8fe 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs @@ -14,6 +14,7 @@ use hir_def::{ body::Body, data::adt::VariantData, hir::PatId, AdtId, EnumVariantId, LocalFieldId, VariantId, }; use hir_expand::name::Name; +use span::Edition; use stdx::{always, never}; use crate::{ @@ -151,7 +152,11 @@ impl<'a> PatCtxt<'a> { match (bm, ty.kind(Interner)) { (BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty, (BindingMode::Ref(_), _) => { - never!("`ref {}` has wrong type {:?}", name.display(self.db.upcast()), ty); + never!( + "`ref {}` has wrong type {:?}", + name.display(self.db.upcast(), Edition::LATEST), + ty + ); self.errors.push(PatternError::UnexpectedType); return Pat { ty: ty.clone(), kind: PatKind::Wild.into() }; } @@ -297,7 +302,7 @@ impl HirDisplay for Pat { PatKind::Wild => write!(f, "_"), PatKind::Never => write!(f, "!"), PatKind::Binding { name, subpattern } => { - write!(f, "{}", name.display(f.db.upcast()))?; + write!(f, "{}", name.display(f.db.upcast(), f.edition()))?; if let Some(subpattern) = subpattern { write!(f, " @ ")?; subpattern.hir_fmt(f)?; @@ -317,14 +322,22 @@ impl HirDisplay for Pat { if let Some(variant) = variant { match variant { VariantId::EnumVariantId(v) => { - write!(f, "{}", f.db.enum_variant_data(v).name.display(f.db.upcast()))?; - } - VariantId::StructId(s) => { - write!(f, "{}", f.db.struct_data(s).name.display(f.db.upcast()))? - } - VariantId::UnionId(u) => { - write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast()))? + write!( + f, + "{}", + f.db.enum_variant_data(v).name.display(f.db.upcast(), f.edition()) + )?; } + VariantId::StructId(s) => write!( + f, + "{}", + f.db.struct_data(s).name.display(f.db.upcast(), f.edition()) + )?, + VariantId::UnionId(u) => write!( + f, + "{}", + f.db.union_data(u).name.display(f.db.upcast(), f.edition()) + )?, }; let variant_data = variant.variant_data(f.db.upcast()); @@ -341,7 +354,9 @@ impl HirDisplay for Pat { write!( f, "{}: ", - rec_fields[p.field].name.display(f.db.upcast()) + rec_fields[p.field] + .name + .display(f.db.upcast(), f.edition()) )?; p.pattern.hir_fmt(f) }) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index 5a96d396ab89d..4ef053130a8be 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -33,6 +33,7 @@ use rustc_apfloat::{ Float, }; use smallvec::SmallVec; +use span::Edition; use stdx::{never, IsNoneOr}; use triomphe::Arc; @@ -131,7 +132,11 @@ pub trait HirDisplay { /// Returns a `Display`able type that is human-readable. /// Use this for showing types to the user (e.g. diagnostics) - fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self> + fn display<'a>( + &'a self, + db: &'a dyn HirDatabase, + edition: Edition, + ) -> HirDisplayWrapper<'a, Self> where Self: Sized, { @@ -142,7 +147,7 @@ pub trait HirDisplay { limited_size: None, omit_verbose_types: false, closure_style: ClosureStyle::ImplFn, - display_target: DisplayTarget::Diagnostics, + display_target: DisplayTarget::Diagnostics { edition }, show_container_bounds: false, } } @@ -153,6 +158,7 @@ pub trait HirDisplay { &'a self, db: &'a dyn HirDatabase, max_size: Option, + edition: Edition, ) -> HirDisplayWrapper<'a, Self> where Self: Sized, @@ -164,7 +170,7 @@ pub trait HirDisplay { limited_size: None, omit_verbose_types: true, closure_style: ClosureStyle::ImplFn, - display_target: DisplayTarget::Diagnostics, + display_target: DisplayTarget::Diagnostics { edition }, show_container_bounds: false, } } @@ -175,6 +181,7 @@ pub trait HirDisplay { &'a self, db: &'a dyn HirDatabase, limited_size: Option, + edition: Edition, ) -> HirDisplayWrapper<'a, Self> where Self: Sized, @@ -186,7 +193,7 @@ pub trait HirDisplay { limited_size, omit_verbose_types: true, closure_style: ClosureStyle::ImplFn, - display_target: DisplayTarget::Diagnostics, + display_target: DisplayTarget::Diagnostics { edition }, show_container_bounds: false, } } @@ -242,6 +249,7 @@ pub trait HirDisplay { &'a self, db: &'a dyn HirDatabase, show_container_bounds: bool, + edition: Edition, ) -> HirDisplayWrapper<'a, Self> where Self: Sized, @@ -253,13 +261,23 @@ pub trait HirDisplay { limited_size: None, omit_verbose_types: false, closure_style: ClosureStyle::ImplFn, - display_target: DisplayTarget::Diagnostics, + display_target: DisplayTarget::Diagnostics { edition }, show_container_bounds, } } } impl HirFormatter<'_> { + pub fn edition(&self) -> Edition { + match self.display_target { + DisplayTarget::Diagnostics { edition } => edition, + DisplayTarget::SourceCode { module_id, .. } => { + self.db.crate_graph()[module_id.krate()].edition + } + DisplayTarget::Test => Edition::CURRENT, + } + } + pub fn write_joined( &mut self, iter: impl IntoIterator, @@ -324,7 +342,7 @@ pub enum DisplayTarget { /// Display types for inlays, doc popups, autocompletion, etc... /// Showing `{unknown}` or not qualifying paths is fine here. /// There's no reason for this to fail. - Diagnostics, + Diagnostics { edition: Edition }, /// Display types for inserting them in source files. /// The generated code should compile, so paths need to be qualified. SourceCode { module_id: ModuleId, allow_opaque: bool }, @@ -460,7 +478,7 @@ impl HirDisplay for ProjectionTy { ">::{}", f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id)) .name - .display(f.db.upcast()) + .display(f.db.upcast(), f.edition()) )?; let proj_params_count = self.substitution.len(Interner) - trait_ref.substitution.len(Interner); @@ -499,7 +517,7 @@ impl HirDisplay for Const { let id = from_placeholder_idx(f.db, *idx); let generics = generics(f.db.upcast(), id.parent); let param_data = &generics[id.local_id]; - write!(f, "{}", param_data.name().unwrap().display(f.db.upcast()))?; + write!(f, "{}", param_data.name().unwrap().display(f.db.upcast(), f.edition()))?; Ok(()) } ConstValue::Concrete(c) => match &c.interned { @@ -633,7 +651,7 @@ fn render_const_scalar( TyKind::Adt(adt, _) if b.len() == 2 * size_of::() => match adt.0 { hir_def::AdtId::StructId(s) => { let data = f.db.struct_data(s); - write!(f, "&{}", data.name.display(f.db.upcast()))?; + write!(f, "&{}", data.name.display(f.db.upcast(), f.edition()))?; Ok(()) } _ => f.write_str(""), @@ -691,7 +709,7 @@ fn render_const_scalar( match adt.0 { hir_def::AdtId::StructId(s) => { let data = f.db.struct_data(s); - write!(f, "{}", data.name.display(f.db.upcast()))?; + write!(f, "{}", data.name.display(f.db.upcast(), f.edition()))?; let field_types = f.db.field_types(s.into()); render_variant_after_name( &data.variant_data, @@ -705,7 +723,7 @@ fn render_const_scalar( ) } hir_def::AdtId::UnionId(u) => { - write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast())) + write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast(), f.edition())) } hir_def::AdtId::EnumId(e) => { let Ok(target_data_layout) = f.db.target_data_layout(trait_env.krate) else { @@ -717,7 +735,7 @@ fn render_const_scalar( return f.write_str(""); }; let data = f.db.enum_variant_data(var_id); - write!(f, "{}", data.name.display(f.db.upcast()))?; + write!(f, "{}", data.name.display(f.db.upcast(), f.edition()))?; let field_types = f.db.field_types(var_id.into()); render_variant_after_name( &data.variant_data, @@ -802,11 +820,11 @@ fn render_variant_after_name( if matches!(data, VariantData::Record(_)) { write!(f, " {{")?; if let Some((id, data)) = it.next() { - write!(f, " {}: ", data.name.display(f.db.upcast()))?; + write!(f, " {}: ", data.name.display(f.db.upcast(), f.edition()))?; render_field(f, id)?; } for (id, data) in it { - write!(f, ", {}: ", data.name.display(f.db.upcast()))?; + write!(f, ", {}: ", data.name.display(f.db.upcast(), f.edition()))?; render_field(f, id)?; } write!(f, " }}")?; @@ -1000,15 +1018,23 @@ impl HirDisplay for Ty { CallableDefId::FunctionId(ff) => { write!(f, "fn ")?; f.start_location_link(def.into()); - write!(f, "{}", db.function_data(ff).name.display(f.db.upcast()))? + write!( + f, + "{}", + db.function_data(ff).name.display(f.db.upcast(), f.edition()) + )? } CallableDefId::StructId(s) => { f.start_location_link(def.into()); - write!(f, "{}", db.struct_data(s).name.display(f.db.upcast()))? + write!(f, "{}", db.struct_data(s).name.display(f.db.upcast(), f.edition()))? } CallableDefId::EnumVariantId(e) => { f.start_location_link(def.into()); - write!(f, "{}", db.enum_variant_data(e).name.display(f.db.upcast()))? + write!( + f, + "{}", + db.enum_variant_data(e).name.display(f.db.upcast(), f.edition()) + )? } }; f.end_location_link(); @@ -1053,13 +1079,13 @@ impl HirDisplay for Ty { TyKind::Adt(AdtId(def_id), parameters) => { f.start_location_link((*def_id).into()); match f.display_target { - DisplayTarget::Diagnostics | DisplayTarget::Test => { + DisplayTarget::Diagnostics { .. } | DisplayTarget::Test => { let name = match *def_id { hir_def::AdtId::StructId(it) => db.struct_data(it).name.clone(), hir_def::AdtId::UnionId(it) => db.union_data(it).name.clone(), hir_def::AdtId::EnumId(it) => db.enum_data(it).name.clone(), }; - write!(f, "{}", name.display(f.db.upcast()))?; + write!(f, "{}", name.display(f.db.upcast(), f.edition()))?; } DisplayTarget::SourceCode { module_id, allow_opaque: _ } => { if let Some(path) = find_path::find_path( @@ -1075,7 +1101,7 @@ impl HirDisplay for Ty { prefer_absolute: false, }, ) { - write!(f, "{}", path.display(f.db.upcast()))?; + write!(f, "{}", path.display(f.db.upcast(), f.edition()))?; } else { return Err(HirDisplayError::DisplaySourceCodeError( DisplaySourceCodeError::PathNotFound, @@ -1101,12 +1127,12 @@ impl HirDisplay for Ty { // Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types) if f.display_target.is_test() { f.start_location_link(trait_.into()); - write!(f, "{}", trait_data.name.display(f.db.upcast()))?; + write!(f, "{}", trait_data.name.display(f.db.upcast(), f.edition()))?; f.end_location_link(); write!(f, "::")?; f.start_location_link(type_alias.into()); - write!(f, "{}", type_alias_data.name.display(f.db.upcast()))?; + write!(f, "{}", type_alias_data.name.display(f.db.upcast(), f.edition()))?; f.end_location_link(); // Note that the generic args for the associated type come before those for the // trait (including the self type). @@ -1124,7 +1150,7 @@ impl HirDisplay for Ty { let alias = from_foreign_def_id(*type_alias); let type_alias = db.type_alias_data(alias); f.start_location_link(alias.into()); - write!(f, "{}", type_alias.name.display(f.db.upcast()))?; + write!(f, "{}", type_alias.name.display(f.db.upcast(), f.edition()))?; f.end_location_link(); } TyKind::OpaqueType(opaque_ty_id, parameters) => { @@ -1256,7 +1282,10 @@ impl HirDisplay for Ty { write!( f, "{}", - p.name.clone().unwrap_or_else(Name::missing).display(f.db.upcast()) + p.name + .clone() + .unwrap_or_else(Name::missing) + .display(f.db.upcast(), f.edition()) )? } TypeParamProvenance::ArgumentImplTrait => { @@ -1289,7 +1318,7 @@ impl HirDisplay for Ty { } }, TypeOrConstParamData::ConstParamData(p) => { - write!(f, "{}", p.name.display(f.db.upcast()))?; + write!(f, "{}", p.name.display(f.db.upcast(), f.edition()))?; } } } @@ -1632,7 +1661,7 @@ fn write_bounds_like_dyn_trait( // existential) here, which is the only thing that's // possible in actual Rust, and hence don't print it f.start_location_link(trait_.into()); - write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?; + write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast(), f.edition()))?; f.end_location_link(); if is_fn_trait { if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) { @@ -1706,7 +1735,7 @@ fn write_bounds_like_dyn_trait( let assoc_ty_id = from_assoc_type_id(proj.associated_ty_id); let type_alias = f.db.type_alias_data(assoc_ty_id); f.start_location_link(assoc_ty_id.into()); - write!(f, "{}", type_alias.name.display(f.db.upcast()))?; + write!(f, "{}", type_alias.name.display(f.db.upcast(), f.edition()))?; f.end_location_link(); let proj_arg_count = generics(f.db.upcast(), assoc_ty_id.into()).len_self(); @@ -1770,7 +1799,7 @@ fn fmt_trait_ref( } let trait_ = tr.hir_trait_id(); f.start_location_link(trait_.into()); - write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?; + write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast(), f.edition()))?; f.end_location_link(); let substs = tr.substitution.as_slice(Interner); hir_fmt_generics(f, &substs[1..], None, substs[0].ty(Interner)) @@ -1796,7 +1825,11 @@ impl HirDisplay for WhereClause { write!(f, ">::",)?; let type_alias = from_assoc_type_id(projection_ty.associated_ty_id); f.start_location_link(type_alias.into()); - write!(f, "{}", f.db.type_alias_data(type_alias).name.display(f.db.upcast()),)?; + write!( + f, + "{}", + f.db.type_alias_data(type_alias).name.display(f.db.upcast(), f.edition()), + )?; f.end_location_link(); write!(f, " = ")?; ty.hir_fmt(f)?; @@ -1832,7 +1865,7 @@ impl HirDisplay for LifetimeData { let id = lt_from_placeholder_idx(f.db, *idx); let generics = generics(f.db.upcast(), id.parent); let param_data = &generics[id.local_id]; - write!(f, "{}", param_data.name.display(f.db.upcast()))?; + write!(f, "{}", param_data.name.display(f.db.upcast(), f.edition()))?; Ok(()) } _ if f.display_target.is_source_code() => write!(f, "'_"), @@ -1913,7 +1946,7 @@ impl HirDisplay for TypeRef { }; write!(f, "&")?; if let Some(lifetime) = lifetime { - write!(f, "{} ", lifetime.name.display(f.db.upcast()))?; + write!(f, "{} ", lifetime.name.display(f.db.upcast(), f.edition()))?; } write!(f, "{mutability}")?; inner.hir_fmt(f)?; @@ -1921,7 +1954,7 @@ impl HirDisplay for TypeRef { TypeRef::Array(inner, len) => { write!(f, "[")?; inner.hir_fmt(f)?; - write!(f, "; {}]", len.display(f.db.upcast()))?; + write!(f, "; {}]", len.display(f.db.upcast(), f.edition()))?; } TypeRef::Slice(inner) => { write!(f, "[")?; @@ -1942,7 +1975,7 @@ impl HirDisplay for TypeRef { for index in 0..function_parameters.len() { let (param_name, param_type) = &function_parameters[index]; if let Some(name) = param_name { - write!(f, "{}: ", name.display(f.db.upcast()))?; + write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?; } param_type.hir_fmt(f)?; @@ -2000,12 +2033,15 @@ impl HirDisplay for TypeBound { } path.hir_fmt(f) } - TypeBound::Lifetime(lifetime) => write!(f, "{}", lifetime.name.display(f.db.upcast())), + TypeBound::Lifetime(lifetime) => { + write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition())) + } TypeBound::ForLifetime(lifetimes, path) => { + let edition = f.edition(); write!( f, "for<{}> ", - lifetimes.iter().map(|it| it.display(f.db.upcast())).format(", ") + lifetimes.iter().map(|it| it.display(f.db.upcast(), edition)).format(", ") )?; path.hir_fmt(f) } @@ -2071,7 +2107,7 @@ impl HirDisplay for Path { if !matches!(self.kind(), PathKind::Plain) || seg_idx > 0 { write!(f, "::")?; } - write!(f, "{}", segment.name.display(f.db.upcast()))?; + write!(f, "{}", segment.name.display(f.db.upcast(), f.edition()))?; if let Some(generic_args) = segment.args_and_bindings { // We should be in type context, so format as `Foo` instead of `Foo::`. // Do we actually format expressions? @@ -2116,7 +2152,7 @@ impl HirDisplay for Path { } else { write!(f, ", ")?; } - write!(f, "{}", binding.name.display(f.db.upcast()))?; + write!(f, "{}", binding.name.display(f.db.upcast(), f.edition()))?; match &binding.type_ref { Some(ty) => { write!(f, " = ")?; @@ -2150,9 +2186,11 @@ impl HirDisplay for hir_def::path::GenericArg { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { match self { hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f), - hir_def::path::GenericArg::Const(c) => write!(f, "{}", c.display(f.db.upcast())), + hir_def::path::GenericArg::Const(c) => { + write!(f, "{}", c.display(f.db.upcast(), f.edition())) + } hir_def::path::GenericArg::Lifetime(lifetime) => { - write!(f, "{}", lifetime.name.display(f.db.upcast())) + write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition())) } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index 034ed2d691b4f..03072790d026c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -255,7 +255,9 @@ impl CapturedItem { pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String { let body = db.body(owner); - let mut result = body[self.place.local].name.display(db.upcast()).to_string(); + let krate = owner.krate(db.upcast()); + let edition = db.crate_graph()[krate].edition; + let mut result = body[self.place.local].name.display(db.upcast(), edition).to_string(); let mut field_need_paren = false; for proj in &self.place.projections { match proj { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs index 8cb428a610a8c..f40d508f755a6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs @@ -42,19 +42,20 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result, LayoutErro hir_def::ModuleDefId::AdtId(x) => { let name = match x { hir_def::AdtId::StructId(x) => { - db.struct_data(x).name.display_no_db().to_smolstr() + db.struct_data(x).name.display_no_db(file_id.edition()).to_smolstr() } hir_def::AdtId::UnionId(x) => { - db.union_data(x).name.display_no_db().to_smolstr() + db.union_data(x).name.display_no_db(file_id.edition()).to_smolstr() } hir_def::AdtId::EnumId(x) => { - db.enum_data(x).name.display_no_db().to_smolstr() + db.enum_data(x).name.display_no_db(file_id.edition()).to_smolstr() } }; (name == "Goal").then_some(Either::Left(x)) } hir_def::ModuleDefId::TypeAliasId(x) => { - let name = db.type_alias_data(x).name.display_no_db().to_smolstr(); + let name = + db.type_alias_data(x).name.display_no_db(file_id.edition()).to_smolstr(); (name == "Goal").then_some(Either::Right(x)) } _ => None, @@ -94,7 +95,7 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result, LayoutErro .declarations() .find_map(|x| match x { hir_def::ModuleDefId::FunctionId(x) => { - let name = db.function_data(x).name.display_no_db().to_smolstr(); + let name = db.function_data(x).name.display_no_db(file_id.edition()).to_smolstr(); (name == "main").then_some(x) } _ => None, @@ -104,7 +105,7 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result, LayoutErro let b = hir_body .bindings .iter() - .find(|x| x.1.name.display_no_db().to_smolstr() == "goal") + .find(|x| x.1.name.display_no_db(file_id.edition()).to_smolstr() == "goal") .unwrap() .0; let infer = db.infer(function_id.into()); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index 4c9e0a1e11836..46611ae0de926 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -66,6 +66,7 @@ use intern::{sym, Symbol}; use la_arena::{Arena, Idx}; use mir::{MirEvalError, VTableMap}; use rustc_hash::{FxHashMap, FxHashSet}; +use span::Edition; use syntax::ast::{make, ConstArg}; use traits::FnTrait; use triomphe::Arc; @@ -1025,7 +1026,11 @@ where collector.placeholders.into_iter().collect() } -pub fn known_const_to_ast(konst: &Const, db: &dyn HirDatabase) -> Option { +pub fn known_const_to_ast( + konst: &Const, + db: &dyn HirDatabase, + edition: Edition, +) -> Option { if let ConstValue::Concrete(c) = &konst.interned().value { match c.interned { ConstScalar::UnevaluatedConst(GeneralConstId::InTypeConstId(cid), _) => { @@ -1035,5 +1040,5 @@ pub fn known_const_to_ast(konst: &Const, db: &dyn HirDatabase) -> Option (), } } - Some(make::expr_const_value(konst.display(db).to_string().as_str())) + Some(make::expr_const_value(konst.display(db, edition).to_string().as_str())) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs index 06a4236e0acd2..9c727cb4ed197 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs @@ -158,7 +158,10 @@ impl ProjectionElem { subst.at(Interner, 0).assert_ty_ref(Interner).clone() } _ => { - never!("Overloaded deref on type {} is not a projection", base.display(db)); + never!( + "Overloaded deref on type {} is not a projection", + base.display(db, db.crate_graph()[krate].edition) + ); TyKind::Error.intern(Interner) } }, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index f8083e898588e..590cb3952ef32 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -23,7 +23,7 @@ use rustc_apfloat::{ Float, }; use rustc_hash::{FxHashMap, FxHashSet}; -use span::FileId; +use span::{Edition, FileId}; use stdx::never; use syntax::{SyntaxNodePtr, TextRange}; use triomphe::Arc; @@ -358,6 +358,7 @@ impl MirEvalError { f: &mut String, db: &dyn HirDatabase, span_formatter: impl Fn(FileId, TextRange) -> String, + edition: Edition, ) -> std::result::Result<(), std::fmt::Error> { writeln!(f, "Mir eval error:")?; let mut err = self; @@ -370,7 +371,7 @@ impl MirEvalError { writeln!( f, "In function {} ({:?})", - function_name.name.display(db.upcast()), + function_name.name.display(db.upcast(), edition), func )?; } @@ -415,7 +416,7 @@ impl MirEvalError { write!( f, "Layout for type `{}` is not available due {err:?}", - ty.display(db).with_closure_style(ClosureStyle::ClosureWithId) + ty.display(db, edition).with_closure_style(ClosureStyle::ClosureWithId) )?; } MirEvalError::MirLowerError(func, err) => { @@ -423,16 +424,17 @@ impl MirEvalError { writeln!( f, "MIR lowering for function `{}` ({:?}) failed due:", - function_name.name.display(db.upcast()), + function_name.name.display(db.upcast(), edition), func )?; - err.pretty_print(f, db, span_formatter)?; + err.pretty_print(f, db, span_formatter, edition)?; } MirEvalError::ConstEvalError(name, err) => { MirLowerError::ConstEvalError((**name).into(), err.clone()).pretty_print( f, db, span_formatter, + edition, )?; } MirEvalError::UndefinedBehavior(_) @@ -2675,10 +2677,11 @@ impl Evaluator<'_> { let db = self.db.upcast(); let loc = variant.lookup(db); let enum_loc = loc.parent.lookup(db); + let edition = self.db.crate_graph()[self.crate_id].edition; let name = format!( "{}::{}", - enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast()), - loc.id.item_tree(db)[loc.id.value].name.display(db.upcast()), + enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition), + loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition), ); Err(MirEvalError::ConstEvalError(name, Box::new(e))) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index d76f53818716a..0cdad74a4f6c1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -856,7 +856,11 @@ impl Evaluator<'_> { Ok(ty_name) => ty_name, // Fallback to human readable display in case of `Err`. Ideally we want to use `display_source_code` to // render full paths. - Err(_) => ty.display(self.db).to_string(), + Err(_) => { + let krate = locals.body.owner.krate(self.db.upcast()); + let edition = self.db.crate_graph()[krate].edition; + ty.display(self.db, edition).to_string() + } }; let len = ty_name.len(); let addr = self.heap_allocate(len, 1)?; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs index b21a401fa764d..371a5278dc9a5 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs @@ -1,5 +1,5 @@ use hir_def::db::DefDatabase; -use span::EditionedFileId; +use span::{Edition, EditionedFileId}; use syntax::{TextRange, TextSize}; use test_fixture::WithFixture; @@ -15,7 +15,7 @@ fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String), .declarations() .find_map(|x| match x { hir_def::ModuleDefId::FunctionId(x) => { - if db.function_data(x).name.display(db).to_string() == "main" { + if db.function_data(x).name.display(db, Edition::CURRENT).to_string() == "main" { Some(x) } else { None @@ -63,7 +63,7 @@ fn check_pass_and_stdio(ra_fixture: &str, expected_stdout: &str, expected_stderr let span_formatter = |file, range: TextRange| { format!("{:?} {:?}..{:?}", file, line_index(range.start()), line_index(range.end())) }; - e.pretty_print(&mut err, &db, span_formatter).unwrap(); + e.pretty_print(&mut err, &db, span_formatter, Edition::CURRENT).unwrap(); panic!("Error in interpreting: {err}"); } Ok((stdout, stderr)) => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 9aa2eeebc1757..a15549f9f9ba5 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -21,7 +21,7 @@ use hir_expand::name::Name; use la_arena::ArenaMap; use rustc_apfloat::Float; use rustc_hash::FxHashMap; -use span::FileId; +use span::{Edition, FileId}; use syntax::TextRange; use triomphe::Arc; @@ -157,13 +157,18 @@ impl MirLowerError { f: &mut String, db: &dyn HirDatabase, span_formatter: impl Fn(FileId, TextRange) -> String, + edition: Edition, ) -> std::result::Result<(), std::fmt::Error> { match self { MirLowerError::ConstEvalError(name, e) => { writeln!(f, "In evaluating constant {name}")?; match &**e { - ConstEvalError::MirLowerError(e) => e.pretty_print(f, db, span_formatter)?, - ConstEvalError::MirEvalError(e) => e.pretty_print(f, db, span_formatter)?, + ConstEvalError::MirLowerError(e) => { + e.pretty_print(f, db, span_formatter, edition)? + } + ConstEvalError::MirEvalError(e) => { + e.pretty_print(f, db, span_formatter, edition)? + } } } MirLowerError::MissingFunctionDefinition(owner, it) => { @@ -171,15 +176,15 @@ impl MirLowerError { writeln!( f, "Missing function definition for {}", - body.pretty_print_expr(db.upcast(), *owner, *it) + body.pretty_print_expr(db.upcast(), *owner, *it, edition) )?; } MirLowerError::TypeMismatch(e) => match e { Some(e) => writeln!( f, "Type mismatch: Expected {}, found {}", - e.expected.display(db), - e.actual.display(db), + e.expected.display(db, edition), + e.actual.display(db, edition), )?, None => writeln!(f, "Type mismatch: types mismatch with {{unknown}}",)?, }, @@ -189,11 +194,11 @@ impl MirLowerError { writeln!( f, "Generic arg not provided for {}", - param.name().unwrap_or(&Name::missing()).display(db.upcast()) + param.name().unwrap_or(&Name::missing()).display(db.upcast(), edition) )?; writeln!(f, "Provided args: [")?; for g in subst.iter(Interner) { - write!(f, " {},", g.display(db))?; + write!(f, " {},", g.display(db, edition))?; } writeln!(f, "]")?; } @@ -242,8 +247,8 @@ impl From for MirLowerError { } impl MirLowerError { - fn unresolved_path(db: &dyn HirDatabase, p: &Path) -> Self { - Self::UnresolvedName(p.display(db).to_string()) + fn unresolved_path(db: &dyn HirDatabase, p: &Path, edition: Edition) -> Self { + Self::UnresolvedName(p.display(db, edition).to_string()) } } @@ -436,7 +441,8 @@ impl<'ctx> MirLowerCtx<'ctx> { VariantId::UnionId(_) => implementation_error!("Union variant as path"), } } else { - let unresolved_name = || MirLowerError::unresolved_path(self.db, p); + let unresolved_name = + || MirLowerError::unresolved_path(self.db, p, self.edition()); let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id); resolver .resolve_path_in_value_ns_fully(self.db.upcast(), p) @@ -662,7 +668,7 @@ impl<'ctx> MirLowerCtx<'ctx> { let (func_id, generic_args) = self.infer.method_resolution(expr_id).ok_or_else(|| { MirLowerError::UnresolvedMethod( - method_name.display(self.db.upcast()).to_string(), + method_name.display(self.db.upcast(), self.edition()).to_string(), ) })?; let func = Operand::from_fn(self.db, func_id, generic_args); @@ -803,7 +809,9 @@ impl<'ctx> MirLowerCtx<'ctx> { }; let variant_id = self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path { - Some(p) => MirLowerError::UnresolvedName(p.display(self.db).to_string()), + Some(p) => MirLowerError::UnresolvedName( + p.display(self.db, self.edition()).to_string(), + ), None => MirLowerError::RecordLiteralWithoutPath, })?; let subst = match self.expr_ty_without_adjust(expr_id).kind(Interner) { @@ -1378,7 +1386,9 @@ impl<'ctx> MirLowerCtx<'ctx> { "only `char` and numeric types are allowed in range patterns" ), }; - let unresolved_name = || MirLowerError::unresolved_path(self.db, c.as_ref()); + let edition = self.edition(); + let unresolved_name = + || MirLowerError::unresolved_path(self.db, c.as_ref(), edition); let resolver = self.owner.resolver(self.db.upcast()); let pr = resolver .resolve_path_in_value_ns(self.db.upcast(), c.as_ref()) @@ -1904,19 +1914,25 @@ impl<'ctx> MirLowerCtx<'ctx> { match r { Ok(r) => Ok(r), Err(e) => { + let edition = self.edition(); let db = self.db.upcast(); let loc = variant.lookup(db); let enum_loc = loc.parent.lookup(db); let name = format!( "{}::{}", - enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast()), - loc.id.item_tree(db)[loc.id.value].name.display(db.upcast()), + enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition), + loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition), ); Err(MirLowerError::ConstEvalError(name.into(), Box::new(e))) } } } + fn edition(&self) -> Edition { + let krate = self.owner.krate(self.db.upcast()); + self.db.crate_graph()[krate].edition + } + fn drop_until_scope( &mut self, scope_index: usize, @@ -2121,18 +2137,24 @@ pub fn mir_body_for_closure_query( } pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result> { + let krate = def.krate(db.upcast()); + let edition = db.crate_graph()[krate].edition; let detail = match def { - DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(), - DefWithBodyId::StaticId(it) => db.static_data(it).name.display(db.upcast()).to_string(), + DefWithBodyId::FunctionId(it) => { + db.function_data(it).name.display(db.upcast(), edition).to_string() + } + DefWithBodyId::StaticId(it) => { + db.static_data(it).name.display(db.upcast(), edition).to_string() + } DefWithBodyId::ConstId(it) => db .const_data(it) .name .clone() .unwrap_or_else(Name::missing) - .display(db.upcast()) + .display(db.upcast(), edition) .to_string(), DefWithBodyId::VariantId(it) => { - db.enum_variant_data(it).name.display(db.upcast()).to_string() + db.enum_variant_data(it).name.display(db.upcast(), edition).to_string() } DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"), }; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index 34e0f30afb7fe..b1c0d1f2b3901 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -347,7 +347,8 @@ impl MirLowerCtx<'_> { // A const don't bind anything. Only needs check. return Ok((current, current_else)); } - let unresolved_name = || MirLowerError::unresolved_path(self.db, p); + let unresolved_name = + || MirLowerError::unresolved_path(self.db, p, self.edition()); let resolver = self.owner.resolver(self.db.upcast()); let pr = resolver .resolve_path_in_value_ns(self.db.upcast(), p) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs index 0c641d7c6c2bc..df56071aa9af6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs @@ -9,6 +9,7 @@ use either::Either; use hir_def::{body::Body, hir::BindingId}; use hir_expand::{name::Name, Lookup}; use la_arena::ArenaMap; +use span::Edition; use crate::{ db::HirDatabase, @@ -44,18 +45,21 @@ impl MirBody { ctx.for_body(|this| match ctx.body.owner { hir_def::DefWithBodyId::FunctionId(id) => { let data = db.function_data(id); - w!(this, "fn {}() ", data.name.display(db.upcast())); + w!(this, "fn {}() ", data.name.display(db.upcast(), Edition::LATEST)); } hir_def::DefWithBodyId::StaticId(id) => { let data = db.static_data(id); - w!(this, "static {}: _ = ", data.name.display(db.upcast())); + w!(this, "static {}: _ = ", data.name.display(db.upcast(), Edition::LATEST)); } hir_def::DefWithBodyId::ConstId(id) => { let data = db.const_data(id); w!( this, "const {}: _ = ", - data.name.as_ref().unwrap_or(&Name::missing()).display(db.upcast()) + data.name + .as_ref() + .unwrap_or(&Name::missing()) + .display(db.upcast(), Edition::LATEST) ); } hir_def::DefWithBodyId::VariantId(id) => { @@ -64,8 +68,12 @@ impl MirBody { w!( this, "enum {}::{} = ", - enum_loc.id.item_tree(db.upcast())[enum_loc.id.value].name.display(db.upcast()), - loc.id.item_tree(db.upcast())[loc.id.value].name.display(db.upcast()), + enum_loc.id.item_tree(db.upcast())[enum_loc.id.value] + .name + .display(db.upcast(), Edition::LATEST), + loc.id.item_tree(db.upcast())[loc.id.value] + .name + .display(db.upcast(), Edition::LATEST), ) } hir_def::DefWithBodyId::InTypeConstId(id) => { @@ -122,7 +130,7 @@ impl HirDisplay for LocalName { match self { LocalName::Unknown(l) => write!(f, "_{}", u32::from(l.into_raw())), LocalName::Binding(n, l) => { - write!(f, "{}_{}", n.display(f.db.upcast()), u32::from(l.into_raw())) + write!(f, "{}_{}", n.display(f.db.upcast(), f.edition()), u32::from(l.into_raw())) } } } @@ -200,7 +208,7 @@ impl<'a> MirPrettyCtx<'a> { wln!( self, "let {}: {};", - self.local_name(id).display(self.db), + self.local_name(id).display_test(self.db), self.hir_display(&local.ty) ); } @@ -231,10 +239,18 @@ impl<'a> MirPrettyCtx<'a> { wln!(this, ";"); } StatementKind::StorageDead(p) => { - wln!(this, "StorageDead({})", this.local_name(*p).display(self.db)); + wln!( + this, + "StorageDead({})", + this.local_name(*p).display_test(self.db) + ); } StatementKind::StorageLive(p) => { - wln!(this, "StorageLive({})", this.local_name(*p).display(self.db)); + wln!( + this, + "StorageLive({})", + this.local_name(*p).display_test(self.db) + ); } StatementKind::Deinit(p) => { w!(this, "Deinit("); @@ -297,7 +313,7 @@ impl<'a> MirPrettyCtx<'a> { fn f(this: &mut MirPrettyCtx<'_>, local: LocalId, projections: &[PlaceElem]) { let Some((last, head)) = projections.split_last() else { // no projection - w!(this, "{}", this.local_name(local).display(this.db)); + w!(this, "{}", this.local_name(local).display_test(this.db)); return; }; match last { @@ -317,13 +333,13 @@ impl<'a> MirPrettyCtx<'a> { w!( this, " as {}).{}", - variant_name.display(this.db.upcast()), - name.display(this.db.upcast()) + variant_name.display(this.db.upcast(), Edition::LATEST), + name.display(this.db.upcast(), Edition::LATEST) ); } hir_def::VariantId::StructId(_) | hir_def::VariantId::UnionId(_) => { f(this, local, head); - w!(this, ".{}", name.display(this.db.upcast())); + w!(this, ".{}", name.display(this.db.upcast(), Edition::LATEST)); } } } @@ -337,7 +353,7 @@ impl<'a> MirPrettyCtx<'a> { } ProjectionElem::Index(l) => { f(this, local, head); - w!(this, "[{}]", this.local_name(*l).display(this.db)); + w!(this, "[{}]", this.local_name(*l).display_test(this.db)); } it => { f(this, local, head); @@ -387,7 +403,7 @@ impl<'a> MirPrettyCtx<'a> { Rvalue::Repeat(op, len) => { w!(self, "["); self.operand(op); - w!(self, "; {}]", len.display(self.db)); + w!(self, "; {}]", len.display_test(self.db)); } Rvalue::Aggregate(AggregateKind::Adt(_, _), it) => { w!(self, "Adt("); @@ -458,6 +474,6 @@ impl<'a> MirPrettyCtx<'a> { } fn hir_display(&self, ty: &'a T) -> impl Display + 'a { - ty.display(self.db).with_closure_style(ClosureStyle::ClosureWithSubst) + ty.display_test(self.db).with_closure_style(ClosureStyle::ClosureWithSubst) } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs index db5fa3205778d..6cb59491fac82 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tls.rs @@ -2,6 +2,7 @@ use std::fmt::{self, Display}; use itertools::Itertools; +use span::Edition; use crate::{ chalk_db, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, mapping::from_chalk, @@ -24,7 +25,7 @@ impl DebugContext<'_> { AdtId::UnionId(it) => self.0.union_data(it).name.clone(), AdtId::EnumId(it) => self.0.enum_data(it).name.clone(), }; - name.display(self.0.upcast()).fmt(f)?; + name.display(self.0.upcast(), Edition::LATEST).fmt(f)?; Ok(()) } @@ -35,7 +36,7 @@ impl DebugContext<'_> { ) -> Result<(), fmt::Error> { let trait_: hir_def::TraitId = from_chalk_trait_id(id); let trait_data = self.0.trait_data(trait_); - trait_data.name.display(self.0.upcast()).fmt(f)?; + trait_data.name.display(self.0.upcast(), Edition::LATEST).fmt(f)?; Ok(()) } @@ -54,8 +55,8 @@ impl DebugContext<'_> { write!( fmt, "{}::{}", - trait_data.name.display(self.0.upcast()), - type_alias_data.name.display(self.0.upcast()) + trait_data.name.display(self.0.upcast(), Edition::LATEST), + type_alias_data.name.display(self.0.upcast(), Edition::LATEST) )?; Ok(()) } @@ -75,7 +76,7 @@ impl DebugContext<'_> { let trait_ref = projection_ty.trait_ref(self.0); let trait_params = trait_ref.substitution.as_slice(Interner); let self_ty = trait_ref.self_type_parameter(Interner); - write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0.upcast()))?; + write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0.upcast(), Edition::LATEST))?; if trait_params.len() > 1 { write!( fmt, @@ -83,7 +84,7 @@ impl DebugContext<'_> { trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))), )?; } - write!(fmt, ">::{}", type_alias_data.name.display(self.0.upcast()))?; + write!(fmt, ">::{}", type_alias_data.name.display(self.0.upcast(), Edition::LATEST))?; let proj_params_count = projection_ty.substitution.len(Interner) - trait_params.len(); let proj_params = &projection_ty.substitution.as_slice(Interner)[..proj_params_count]; @@ -110,9 +111,11 @@ impl DebugContext<'_> { CallableDefId::EnumVariantId(e) => self.0.enum_variant_data(e).name.clone(), }; match def { - CallableDefId::FunctionId(_) => write!(fmt, "{{fn {}}}", name.display(self.0.upcast())), + CallableDefId::FunctionId(_) => { + write!(fmt, "{{fn {}}}", name.display(self.0.upcast(), Edition::LATEST)) + } CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => { - write!(fmt, "{{ctor {}}}", name.display(self.0.upcast())) + write!(fmt, "{{ctor {}}}", name.display(self.0.upcast(), Edition::LATEST)) } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs index 00791c51491d7..51ccd4ef293f9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs @@ -14,6 +14,7 @@ use hir_def::{ }; use hir_expand::name::Name; use intern::sym; +use span::Edition; use stdx::{never, panic_context}; use triomphe::Arc; @@ -114,7 +115,7 @@ pub(crate) fn trait_solve_query( ) -> Option { let detail = match &goal.value.goal.data(Interner) { GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => { - db.trait_data(it.hir_trait_id()).name.display(db.upcast()).to_string() + db.trait_data(it.hir_trait_id()).name.display(db.upcast(), Edition::LATEST).to_string() } GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(), _ => "??".to_owned(), diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs index 02d92620e05b6..af60c233e5519 100644 --- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs @@ -328,11 +328,9 @@ fn doc_modpath_from_str(link: &str) -> Option { }; let parts = first_segment.into_iter().chain(parts).map(|segment| match segment.parse() { Ok(idx) => Name::new_tuple_field(idx), - Err(_) => Name::new( - segment.split_once('<').map_or(segment, |it| it.0), - tt::IdentIsRaw::No, - SyntaxContextId::ROOT, - ), + Err(_) => { + Name::new(segment.split_once('<').map_or(segment, |it| it.0), SyntaxContextId::ROOT) + } }); Some(ModPath::from_segments(kind, parts)) }; diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs index 12dd8b5bf4f5d..923dca646673c 100644 --- a/src/tools/rust-analyzer/crates/hir/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir/src/display.rs @@ -84,7 +84,7 @@ impl HirDisplay for Function { if let Some(abi) = &data.abi { write!(f, "extern \"{}\" ", abi.as_str())?; } - write!(f, "fn {}", data.name.display(f.db.upcast()))?; + write!(f, "fn {}", data.name.display(f.db.upcast(), f.edition()))?; write_generic_params(GenericDefId::FunctionId(self.id), f)?; @@ -107,7 +107,7 @@ impl HirDisplay for Function { first = false; } match local { - Some(name) => write!(f, "{}: ", name.display(f.db.upcast()))?, + Some(name) => write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?, None => f.write_str("_: ")?, } type_ref.hir_fmt(f)?; @@ -177,7 +177,7 @@ fn write_impl_header(impl_: &Impl, f: &mut HirFormatter<'_>) -> Result<(), HirDi if let Some(trait_) = impl_.trait_(db) { let trait_data = db.trait_data(trait_.id); - write!(f, " {} for", trait_data.name.display(db.upcast()))?; + write!(f, " {} for", trait_data.name.display(db.upcast(), f.edition()))?; } f.write_char(' ')?; @@ -196,7 +196,7 @@ impl HirDisplay for SelfParam { { f.write_char('&')?; if let Some(lifetime) = lifetime { - write!(f, "{} ", lifetime.name.display(f.db.upcast()))?; + write!(f, "{} ", lifetime.name.display(f.db.upcast(), f.edition()))?; } if let hir_def::type_ref::Mutability::Mut = mut_ { f.write_str("mut ")?; @@ -227,7 +227,7 @@ impl HirDisplay for Struct { // FIXME: Render repr if its set explicitly? write_visibility(module_id, self.visibility(f.db), f)?; f.write_str("struct ")?; - write!(f, "{}", self.name(f.db).display(f.db.upcast()))?; + write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?; let def_id = GenericDefId::AdtId(AdtId::StructId(self.id)); write_generic_params(def_id, f)?; @@ -266,7 +266,7 @@ impl HirDisplay for Enum { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; f.write_str("enum ")?; - write!(f, "{}", self.name(f.db).display(f.db.upcast()))?; + write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?; let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id)); write_generic_params(def_id, f)?; @@ -283,7 +283,7 @@ impl HirDisplay for Union { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; f.write_str("union ")?; - write!(f, "{}", self.name(f.db).display(f.db.upcast()))?; + write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?; let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id)); write_generic_params(def_id, f)?; @@ -343,7 +343,7 @@ fn write_variants( } else { f.write_str("{\n")?; for variant in &variants[..count] { - write!(f, " {}", variant.name(f.db).display(f.db.upcast()))?; + write!(f, " {}", variant.name(f.db).display(f.db.upcast(), f.edition()))?; match variant.kind(f.db) { StructKind::Tuple => { let fields_str = @@ -372,21 +372,21 @@ fn write_variants( impl HirDisplay for Field { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?; - write!(f, "{}: ", self.name(f.db).display(f.db.upcast()))?; + write!(f, "{}: ", self.name(f.db).display(f.db.upcast(), f.edition()))?; self.ty(f.db).hir_fmt(f) } } impl HirDisplay for TupleField { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - write!(f, "pub {}: ", self.name().display(f.db.upcast()))?; + write!(f, "pub {}: ", self.name().display(f.db.upcast(), f.edition()))?; self.ty(f.db).hir_fmt(f) } } impl HirDisplay for Variant { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - write!(f, "{}", self.name(f.db).display(f.db.upcast()))?; + write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?; let data = self.variant_data(f.db); match &*data { VariantData::Unit => {} @@ -424,9 +424,9 @@ impl HirDisplay for ExternCrateDecl { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; f.write_str("extern crate ")?; - write!(f, "{}", self.name(f.db).display(f.db.upcast()))?; + write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?; if let Some(alias) = self.alias(f.db) { - write!(f, " as {alias}",)?; + write!(f, " as {}", alias.display(f.edition()))?; } Ok(()) } @@ -478,7 +478,7 @@ impl HirDisplay for TypeParam { match param_data { TypeOrConstParamData::TypeParamData(p) => match p.provenance { TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => { - write!(f, "{}", p.name.clone().unwrap().display(f.db.upcast()))? + write!(f, "{}", p.name.clone().unwrap().display(f.db.upcast(), f.edition()))? } TypeParamProvenance::ArgumentImplTrait => { return write_bounds_like_dyn_trait_with_prefix( @@ -491,7 +491,7 @@ impl HirDisplay for TypeParam { } }, TypeOrConstParamData::ConstParamData(p) => { - write!(f, "{}", p.name.display(f.db.upcast()))?; + write!(f, "{}", p.name.display(f.db.upcast(), f.edition()))?; } } @@ -525,13 +525,13 @@ impl HirDisplay for TypeParam { impl HirDisplay for LifetimeParam { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - write!(f, "{}", self.name(f.db).display(f.db.upcast())) + write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition())) } } impl HirDisplay for ConstParam { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - write!(f, "const {}: ", self.name(f.db).display(f.db.upcast()))?; + write!(f, "const {}: ", self.name(f.db).display(f.db.upcast(), f.edition()))?; self.ty(f.db).hir_fmt(f) } } @@ -563,7 +563,7 @@ fn write_generic_params( }; for (_, lifetime) in params.iter_lt() { delim(f)?; - write!(f, "{}", lifetime.name.display(f.db.upcast()))?; + write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition()))?; } for (_, ty) in params.iter_type_or_consts() { if let Some(name) = &ty.name() { @@ -573,7 +573,7 @@ fn write_generic_params( continue; } delim(f)?; - write!(f, "{}", name.display(f.db.upcast()))?; + write!(f, "{}", name.display(f.db.upcast(), f.edition()))?; if let Some(default) = &ty.default { f.write_str(" = ")?; default.hir_fmt(f)?; @@ -581,12 +581,12 @@ fn write_generic_params( } TypeOrConstParamData::ConstParamData(c) => { delim(f)?; - write!(f, "const {}: ", name.display(f.db.upcast()))?; + write!(f, "const {}: ", name.display(f.db.upcast(), f.edition()))?; c.ty.hir_fmt(f)?; if let Some(default) = &c.default { f.write_str(" = ")?; - write!(f, "{}", default.display(f.db.upcast()))?; + write!(f, "{}", default.display(f.db.upcast(), f.edition()))?; } } } @@ -639,7 +639,7 @@ fn write_where_predicates( let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target { WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f), WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() { - Some(name) => write!(f, "{}", name.display(f.db.upcast())), + Some(name) => write!(f, "{}", name.display(f.db.upcast(), f.edition())), None => f.write_str("{unnamed}"), }, }; @@ -668,12 +668,13 @@ fn write_where_predicates( bound.hir_fmt(f)?; } Lifetime { target, bound } => { - let target = target.name.display(f.db.upcast()); - let bound = bound.name.display(f.db.upcast()); + let target = target.name.display(f.db.upcast(), f.edition()); + let bound = bound.name.display(f.db.upcast(), f.edition()); write!(f, "{target}: {bound}")?; } ForLifetime { lifetimes, target, bound } => { - let lifetimes = lifetimes.iter().map(|it| it.display(f.db.upcast())).join(", "); + let lifetimes = + lifetimes.iter().map(|it| it.display(f.db.upcast(), f.edition())).join(", "); write!(f, "for<{lifetimes}> ")?; write_target(target, f)?; f.write_str(": ")?; @@ -685,7 +686,9 @@ fn write_where_predicates( f.write_str(" + ")?; match nxt { TypeBound { bound, .. } | ForLifetime { bound, .. } => bound.hir_fmt(f)?, - Lifetime { bound, .. } => write!(f, "{}", bound.name.display(f.db.upcast()))?, + Lifetime { bound, .. } => { + write!(f, "{}", bound.name.display(f.db.upcast(), f.edition()))? + } } } f.write_str(",")?; @@ -707,7 +710,7 @@ impl HirDisplay for Const { let data = db.const_data(self.id); f.write_str("const ")?; match &data.name { - Some(name) => write!(f, "{}: ", name.display(f.db.upcast()))?, + Some(name) => write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?, None => f.write_str("_: ")?, } data.type_ref.hir_fmt(f)?; @@ -723,7 +726,7 @@ impl HirDisplay for Static { if data.mutable { f.write_str("mut ")?; } - write!(f, "{}: ", data.name.display(f.db.upcast()))?; + write!(f, "{}: ", data.name.display(f.db.upcast(), f.edition()))?; data.type_ref.hir_fmt(f)?; Ok(()) } @@ -777,7 +780,7 @@ fn write_trait_header(trait_: &Trait, f: &mut HirFormatter<'_>) -> Result<(), Hi if data.is_auto { f.write_str("auto ")?; } - write!(f, "trait {}", data.name.display(f.db.upcast()))?; + write!(f, "trait {}", data.name.display(f.db.upcast(), f.edition()))?; write_generic_params(GenericDefId::TraitId(trait_.id), f)?; Ok(()) } @@ -786,7 +789,7 @@ impl HirDisplay for TraitAlias { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; let data = f.db.trait_alias_data(self.id); - write!(f, "trait {}", data.name.display(f.db.upcast()))?; + write!(f, "trait {}", data.name.display(f.db.upcast(), f.edition()))?; let def_id = GenericDefId::TraitAliasId(self.id); write_generic_params(def_id, f)?; f.write_str(" = ")?; @@ -802,7 +805,7 @@ impl HirDisplay for TypeAlias { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { write_visibility(self.module(f.db).id, self.visibility(f.db), f)?; let data = f.db.type_alias_data(self.id); - write!(f, "type {}", data.name.display(f.db.upcast()))?; + write!(f, "type {}", data.name.display(f.db.upcast(), f.edition()))?; let def_id = GenericDefId::TypeAliasId(self.id); write_generic_params(def_id, f)?; if !data.bounds.is_empty() { @@ -822,7 +825,7 @@ impl HirDisplay for Module { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { // FIXME: Module doesn't have visibility saved in data. match self.name(f.db) { - Some(name) => write!(f, "mod {}", name.display(f.db.upcast())), + Some(name) => write!(f, "mod {}", name.display(f.db.upcast(), f.edition())), None if self.is_crate_root() => match self.krate(f.db).display_name(f.db) { Some(name) => write!(f, "extern crate {name}"), None => f.write_str("extern crate {unknown}"), @@ -839,6 +842,6 @@ impl HirDisplay for Macro { hir_def::MacroId::MacroRulesId(_) => f.write_str("macro_rules!"), hir_def::MacroId::ProcMacroId(_) => f.write_str("proc_macro"), }?; - write!(f, " {}", self.name(f.db).display(f.db.upcast())) + write!(f, " {}", self.name(f.db).display(f.db.upcast(), f.edition())) } } diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 1a3becdf50e9b..57f810c7c79c7 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -340,13 +340,13 @@ impl ModuleDef { } } - pub fn canonical_path(&self, db: &dyn HirDatabase) -> Option { + pub fn canonical_path(&self, db: &dyn HirDatabase, edition: Edition) -> Option { let mut segments = vec![self.name(db)?]; for m in self.module(db)?.path_to_root(db) { segments.extend(m.name(db)) } segments.reverse(); - Some(segments.iter().map(|it| it.display(db.upcast())).join("::")) + Some(segments.iter().map(|it| it.display(db.upcast(), edition)).join("::")) } pub fn canonical_module_path( @@ -556,13 +556,14 @@ impl Module { style_lints: bool, ) { let _p = tracing::info_span!("Module::diagnostics", name = ?self.name(db)).entered(); + let edition = db.crate_graph()[self.id.krate()].edition; let def_map = self.id.def_map(db.upcast()); for diag in def_map.diagnostics() { if diag.in_module != self.id.local_id { // FIXME: This is accidentally quadratic. continue; } - emit_def_diagnostic(db, acc, diag); + emit_def_diagnostic(db, acc, diag, edition); } if !self.id.is_block_module() { @@ -582,7 +583,7 @@ impl Module { } ModuleDef::Trait(t) => { for diag in db.trait_data_with_diagnostics(t.id).1.iter() { - emit_def_diagnostic(db, acc, diag); + emit_def_diagnostic(db, acc, diag, edition); } for item in t.items(db) { @@ -599,19 +600,19 @@ impl Module { match adt { Adt::Struct(s) => { for diag in db.struct_data_with_diagnostics(s.id).1.iter() { - emit_def_diagnostic(db, acc, diag); + emit_def_diagnostic(db, acc, diag, edition); } } Adt::Union(u) => { for diag in db.union_data_with_diagnostics(u.id).1.iter() { - emit_def_diagnostic(db, acc, diag); + emit_def_diagnostic(db, acc, diag, edition); } } Adt::Enum(e) => { for v in e.variants(db) { acc.extend(ModuleDef::Variant(v).diagnostics(db, style_lints)); for diag in db.enum_variant_data_with_diagnostics(v.id).1.iter() { - emit_def_diagnostic(db, acc, diag); + emit_def_diagnostic(db, acc, diag, edition); } } } @@ -645,7 +646,7 @@ impl Module { let ast_id_map = db.ast_id_map(file_id); for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() { - emit_def_diagnostic(db, acc, diag); + emit_def_diagnostic(db, acc, diag, edition); } if inherent_impls.invalid_impls().contains(&impl_def.id) { @@ -869,23 +870,32 @@ fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec never!("declarative expander for non decl-macro: {:?}", e); return; }; + let krate = HasModule::krate(&m.id, db.upcast()); + let edition = db.crate_graph()[krate].edition; emit_def_diagnostic_( db, acc, &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() }, + edition, ); } } } -fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec, diag: &DefDiagnostic) { - emit_def_diagnostic_(db, acc, &diag.kind) +fn emit_def_diagnostic( + db: &dyn HirDatabase, + acc: &mut Vec, + diag: &DefDiagnostic, + edition: Edition, +) { + emit_def_diagnostic_(db, acc, &diag.kind, edition) } fn emit_def_diagnostic_( db: &dyn HirDatabase, acc: &mut Vec, diag: &DefDiagnosticKind, + edition: Edition, ) { match diag { DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => { @@ -910,7 +920,7 @@ fn emit_def_diagnostic_( MacroError { node: InFile::new(ast.file_id, item.syntax_node_ptr()), precise_location: None, - message: format!("{}: {message}", path.display(db.upcast())), + message: format!("{}: {message}", path.display(db.upcast(), edition)), error, } .into(), @@ -1764,7 +1774,7 @@ impl DefWithBody { /// A textual representation of the HIR of this def's body for debugging purposes. pub fn debug_hir(self, db: &dyn HirDatabase) -> String { let body = db.body(self.id()); - body.pretty_print(db.upcast(), self.id()) + body.pretty_print(db.upcast(), self.id(), Edition::CURRENT) } /// A textual representation of the MIR of this def's body for debugging purposes. @@ -2259,6 +2269,8 @@ impl Function { db: &dyn HirDatabase, span_formatter: impl Fn(FileId, TextRange) -> String, ) -> String { + let krate = HasModule::krate(&self.id, db.upcast()); + let edition = db.crate_graph()[krate].edition; let body = match db.monomorphized_mir_body( self.id.into(), Substitution::empty(Interner), @@ -2267,7 +2279,7 @@ impl Function { Ok(body) => body, Err(e) => { let mut r = String::new(); - _ = e.pretty_print(&mut r, db, &span_formatter); + _ = e.pretty_print(&mut r, db, &span_formatter, edition); return r; } }; @@ -2276,7 +2288,7 @@ impl Function { Ok(_) => "pass".to_owned(), Err(e) => { let mut r = String::new(); - _ = e.pretty_print(&mut r, db, &span_formatter); + _ = e.pretty_print(&mut r, db, &span_formatter, edition); r } }; @@ -2510,7 +2522,11 @@ impl Const { Type::from_value_def(db, self.id) } - pub fn render_eval(self, db: &dyn HirDatabase) -> Result { + pub fn render_eval( + self, + db: &dyn HirDatabase, + edition: Edition, + ) -> Result { let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?; let data = &c.data(Interner); if let TyKind::Scalar(s) = data.ty.kind(Interner) { @@ -2532,7 +2548,7 @@ impl Const { if let Ok(s) = mir::render_const_using_debug_impl(db, self.id, &c) { Ok(s) } else { - Ok(format!("{}", c.display(db))) + Ok(format!("{}", c.display(db, edition))) } } } @@ -3728,9 +3744,9 @@ impl ConstParam { Type::new(db, self.id.parent(), db.const_param_ty(self.id)) } - pub fn default(self, db: &dyn HirDatabase) -> Option { + pub fn default(self, db: &dyn HirDatabase, edition: Edition) -> Option { let arg = generic_arg_from_param(db, self.id.into())?; - known_const_to_ast(arg.constant(Interner)?, db) + known_const_to_ast(arg.constant(Interner)?, db, edition) } } @@ -4038,12 +4054,20 @@ impl Closure { TyKind::Closure(self.id, self.subst).intern(Interner) } - pub fn display_with_id(&self, db: &dyn HirDatabase) -> String { - self.clone().as_ty().display(db).with_closure_style(ClosureStyle::ClosureWithId).to_string() + pub fn display_with_id(&self, db: &dyn HirDatabase, edition: Edition) -> String { + self.clone() + .as_ty() + .display(db, edition) + .with_closure_style(ClosureStyle::ClosureWithId) + .to_string() } - pub fn display_with_impl(&self, db: &dyn HirDatabase) -> String { - self.clone().as_ty().display(db).with_closure_style(ClosureStyle::ImplFn).to_string() + pub fn display_with_impl(&self, db: &dyn HirDatabase, edition: Edition) -> String { + self.clone() + .as_ty() + .display(db, edition) + .with_closure_style(ClosureStyle::ImplFn) + .to_string() } pub fn captured_items(&self, db: &dyn HirDatabase) -> Vec { @@ -4704,18 +4728,20 @@ impl Type { pub fn type_and_const_arguments<'a>( &'a self, db: &'a dyn HirDatabase, + edition: Edition, ) -> impl Iterator + 'a { self.ty .strip_references() .as_adt() .into_iter() .flat_map(|(_, substs)| substs.iter(Interner)) - .filter_map(|arg| { + .filter_map(move |arg| { // arg can be either a `Ty` or `constant` if let Some(ty) = arg.ty(Interner) { - Some(format_smolstr!("{}", ty.display(db))) + Some(format_smolstr!("{}", ty.display(db, edition))) } else { - arg.constant(Interner).map(|const_| format_smolstr!("{}", const_.display(db))) + arg.constant(Interner) + .map(|const_| format_smolstr!("{}", const_.display(db, edition))) } }) } @@ -4724,13 +4750,17 @@ impl Type { pub fn generic_parameters<'a>( &'a self, db: &'a dyn HirDatabase, + edition: Edition, ) -> impl Iterator + 'a { // iterate the lifetime self.as_adt() - .and_then(|a| a.lifetime(db).map(|lt| lt.name.display_no_db().to_smolstr())) + .and_then(|a| { + // Lifetimes do not need edition-specific handling as they cannot be escaped. + a.lifetime(db).map(|lt| lt.name.display_no_db(Edition::Edition2015).to_smolstr()) + }) .into_iter() // add the type and const parameters - .chain(self.type_and_const_arguments(db)) + .chain(self.type_and_const_arguments(db, edition)) } pub fn iterate_method_candidates_with_traits( diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs index b1f5df681f2c1..cabb7e3db3d92 100644 --- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs +++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs @@ -9,6 +9,7 @@ use hir_def::{ }; use hir_expand::HirFileId; use hir_ty::{db::HirDatabase, display::HirDisplay}; +use span::Edition; use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr}; use crate::{Module, ModuleDef, Semantics}; @@ -54,6 +55,7 @@ pub struct SymbolCollector<'a> { symbols: Vec, work: Vec, current_container_name: Option, + edition: Edition, } /// Given a [`ModuleId`] and a [`HirDatabase`], use the DefMap for the module's crate to collect @@ -65,10 +67,13 @@ impl<'a> SymbolCollector<'a> { symbols: Default::default(), work: Default::default(), current_container_name: None, + edition: Edition::Edition2015, } } pub fn collect(&mut self, module: Module) { + self.edition = module.krate().edition(self.db); + // The initial work is the root module we're collecting, additional work will // be populated as we traverse the module's definitions. self.work.push(SymbolCollectorWork { module_id: module.into(), parent: None }); @@ -209,7 +214,8 @@ impl<'a> SymbolCollector<'a> { fn collect_from_impl(&mut self, impl_id: ImplId) { let impl_data = self.db.impl_data(impl_id); - let impl_name = Some(SmolStr::new(impl_data.self_ty.display(self.db).to_string())); + let impl_name = + Some(SmolStr::new(impl_data.self_ty.display(self.db, self.edition).to_string())); self.with_container_name(impl_name, |s| { for &assoc_item_id in impl_data.items.iter() { s.push_assoc_item(assoc_item_id) @@ -239,16 +245,16 @@ impl<'a> SymbolCollector<'a> { fn def_with_body_id_name(&self, body_id: DefWithBodyId) -> Option { match body_id { DefWithBodyId::FunctionId(id) => { - Some(self.db.function_data(id).name.display_no_db().to_smolstr()) + Some(self.db.function_data(id).name.display_no_db(self.edition).to_smolstr()) } DefWithBodyId::StaticId(id) => { - Some(self.db.static_data(id).name.display_no_db().to_smolstr()) + Some(self.db.static_data(id).name.display_no_db(self.edition).to_smolstr()) } DefWithBodyId::ConstId(id) => { - Some(self.db.const_data(id).name.as_ref()?.display_no_db().to_smolstr()) + Some(self.db.const_data(id).name.as_ref()?.display_no_db(self.edition).to_smolstr()) } DefWithBodyId::VariantId(id) => { - Some(self.db.enum_variant_data(id).name.display_no_db().to_smolstr()) + Some(self.db.enum_variant_data(id).name.display_no_db(self.edition).to_smolstr()) } DefWithBodyId::InTypeConstId(_) => Some("in type const".into()), } diff --git a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs index 0c8f6932c7170..6ad074e8e5c8f 100644 --- a/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs +++ b/src/tools/rust-analyzer/crates/hir/src/term_search/expr.rs @@ -7,6 +7,7 @@ use hir_ty::{ display::{DisplaySourceCodeError, HirDisplay}, }; use itertools::Itertools; +use span::Edition; use crate::{ Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Field, Function, Local, ModuleDef, @@ -29,9 +30,10 @@ fn mod_item_path_str( sema_scope: &SemanticsScope<'_>, def: &ModuleDef, cfg: ImportPathConfig, + edition: Edition, ) -> Result { let path = mod_item_path(sema_scope, def, cfg); - path.map(|it| it.display(sema_scope.db.upcast()).to_string()) + path.map(|it| it.display(sema_scope.db.upcast(), edition).to_string()) .ok_or(DisplaySourceCodeError::PathNotFound) } @@ -97,37 +99,38 @@ impl Expr { sema_scope: &SemanticsScope<'_>, many_formatter: &mut dyn FnMut(&Type) -> String, cfg: ImportPathConfig, + edition: Edition, ) -> Result { let db = sema_scope.db; - let mod_item_path_str = |s, def| mod_item_path_str(s, def, cfg); + let mod_item_path_str = |s, def| mod_item_path_str(s, def, cfg, edition); match self { Expr::Const(it) => match it.as_assoc_item(db).map(|it| it.container(db)) { Some(container) => { - let container_name = container_name(container, sema_scope, cfg)?; + let container_name = container_name(container, sema_scope, cfg, edition)?; let const_name = it .name(db) - .map(|c| c.display(db.upcast()).to_string()) + .map(|c| c.display(db.upcast(), edition).to_string()) .unwrap_or(String::new()); Ok(format!("{container_name}::{const_name}")) } None => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)), }, Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)), - Expr::Local(it) => Ok(it.name(db).display(db.upcast()).to_string()), - Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast()).to_string()), + Expr::Local(it) => Ok(it.name(db).display(db.upcast(), edition).to_string()), + Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast(), edition).to_string()), Expr::FamousType { value, .. } => Ok(value.to_string()), Expr::Function { func, params, .. } => { let args = params .iter() - .map(|f| f.gen_source_code(sema_scope, many_formatter, cfg)) + .map(|f| f.gen_source_code(sema_scope, many_formatter, cfg, edition)) .collect::, DisplaySourceCodeError>>()? .into_iter() .join(", "); match func.as_assoc_item(db).map(|it| it.container(db)) { Some(container) => { - let container_name = container_name(container, sema_scope, cfg)?; - let fn_name = func.name(db).display(db.upcast()).to_string(); + let container_name = container_name(container, sema_scope, cfg, edition)?; + let fn_name = func.name(db).display(db.upcast(), edition).to_string(); Ok(format!("{container_name}::{fn_name}({args})")) } None => { @@ -141,12 +144,13 @@ impl Expr { return Ok(many_formatter(&target.ty(db))); } - let func_name = func.name(db).display(db.upcast()).to_string(); + let func_name = func.name(db).display(db.upcast(), edition).to_string(); let self_param = func.self_param(db).unwrap(); - let target_str = target.gen_source_code(sema_scope, many_formatter, cfg)?; + let target_str = + target.gen_source_code(sema_scope, many_formatter, cfg, edition)?; let args = params .iter() - .map(|f| f.gen_source_code(sema_scope, many_formatter, cfg)) + .map(|f| f.gen_source_code(sema_scope, many_formatter, cfg, edition)) .collect::, DisplaySourceCodeError>>()? .into_iter() .join(", "); @@ -176,7 +180,7 @@ impl Expr { StructKind::Tuple => { let args = params .iter() - .map(|f| f.gen_source_code(sema_scope, many_formatter, cfg)) + .map(|f| f.gen_source_code(sema_scope, many_formatter, cfg, edition)) .collect::, DisplaySourceCodeError>>()? .into_iter() .join(", "); @@ -190,8 +194,8 @@ impl Expr { .map(|(a, f)| { let tmp = format!( "{}: {}", - f.name(db).display(db.upcast()), - a.gen_source_code(sema_scope, many_formatter, cfg)? + f.name(db).display(db.upcast(), edition), + a.gen_source_code(sema_scope, many_formatter, cfg, edition)? ); Ok(tmp) }) @@ -211,7 +215,7 @@ impl Expr { StructKind::Tuple => { let args = params .iter() - .map(|a| a.gen_source_code(sema_scope, many_formatter, cfg)) + .map(|a| a.gen_source_code(sema_scope, many_formatter, cfg, edition)) .collect::, DisplaySourceCodeError>>()? .into_iter() .join(", "); @@ -225,8 +229,8 @@ impl Expr { .map(|(a, f)| { let tmp = format!( "{}: {}", - f.name(db).display(db.upcast()), - a.gen_source_code(sema_scope, many_formatter, cfg)? + f.name(db).display(db.upcast(), edition), + a.gen_source_code(sema_scope, many_formatter, cfg, edition)? ); Ok(tmp) }) @@ -244,7 +248,7 @@ impl Expr { Expr::Tuple { params, .. } => { let args = params .iter() - .map(|a| a.gen_source_code(sema_scope, many_formatter, cfg)) + .map(|a| a.gen_source_code(sema_scope, many_formatter, cfg, edition)) .collect::, DisplaySourceCodeError>>()? .into_iter() .join(", "); @@ -256,8 +260,8 @@ impl Expr { return Ok(many_formatter(&expr.ty(db))); } - let strukt = expr.gen_source_code(sema_scope, many_formatter, cfg)?; - let field = field.name(db).display(db.upcast()).to_string(); + let strukt = expr.gen_source_code(sema_scope, many_formatter, cfg, edition)?; + let field = field.name(db).display(db.upcast(), edition).to_string(); Ok(format!("{strukt}.{field}")) } Expr::Reference(expr) => { @@ -265,7 +269,7 @@ impl Expr { return Ok(many_formatter(&expr.ty(db))); } - let inner = expr.gen_source_code(sema_scope, many_formatter, cfg)?; + let inner = expr.gen_source_code(sema_scope, many_formatter, cfg, edition)?; Ok(format!("&{inner}")) } Expr::Many(ty) => Ok(many_formatter(ty)), @@ -353,17 +357,18 @@ fn container_name( container: AssocItemContainer, sema_scope: &SemanticsScope<'_>, cfg: ImportPathConfig, + edition: Edition, ) -> Result { let container_name = match container { crate::AssocItemContainer::Trait(trait_) => { - mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_), cfg)? + mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_), cfg, edition)? } crate::AssocItemContainer::Impl(imp) => { let self_ty = imp.self_ty(sema_scope.db); // Should it be guaranteed that `mod_item_path` always exists? match self_ty.as_adt().and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg)) { - Some(path) => path.display(sema_scope.db.upcast()).to_string(), - None => self_ty.display(sema_scope.db).to_string(), + Some(path) => path.display(sema_scope.db.upcast(), edition).to_string(), + None => self_ty.display(sema_scope.db, edition).to_string(), } } }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs index 4cd15f1c755e1..7f8ea44fb12c4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -1,5 +1,8 @@ use hir::HasSource; -use syntax::ast::{self, make, AstNode}; +use syntax::{ + ast::{self, make, AstNode}, + Edition, +}; use crate::{ assist_context::{AssistContext, Assists}, @@ -150,14 +153,22 @@ fn add_missing_impl_members_inner( &missing_items, trait_, &new_impl_def, - target_scope, + &target_scope, ); if let Some(cap) = ctx.config.snippet_cap { let mut placeholder = None; if let DefaultMethods::No = mode { if let ast::AssocItem::Fn(func) = &first_new_item { - if try_gen_trait_body(ctx, func, trait_ref, &impl_def).is_none() { + if try_gen_trait_body( + ctx, + func, + trait_ref, + &impl_def, + target_scope.krate().edition(ctx.sema.db), + ) + .is_none() + { if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) { if m.syntax().text() == "todo!()" { @@ -182,9 +193,11 @@ fn try_gen_trait_body( func: &ast::Fn, trait_ref: hir::TraitRef, impl_def: &ast::Impl, + edition: Edition, ) -> Option<()> { - let trait_path = - make::ext::ident_path(&trait_ref.trait_().name(ctx.db()).display(ctx.db()).to_string()); + let trait_path = make::ext::ident_path( + &trait_ref.trait_().name(ctx.db()).display(ctx.db(), edition).to_string(), + ); let hir_ty = ctx.sema.resolve_type(&impl_def.self_ty()?)?; let adt = hir_ty.as_adt()?.source(ctx.db())?; gen_trait_fn_body(func, &trait_path, &adt.value, Some(trait_ref)) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs index f4569ca848ff4..b6abb06a2afc1 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -445,7 +445,8 @@ fn build_pat( ) -> Option { match var { ExtendedVariant::Variant(var) => { - let path = mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?); + let edition = module.krate().edition(db); + let path = mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?, edition); // FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though Some(match var.source(db)?.value.kind() { ast::StructKind::Tuple(field_list) => { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs index db53e49d84633..d86948818b163 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs @@ -8,7 +8,7 @@ use ide_db::{ insert_use::{insert_use, insert_use_as_alias, ImportScope}, }, }; -use syntax::{ast, AstNode, NodeOrToken, SyntaxElement}; +use syntax::{ast, AstNode, Edition, NodeOrToken, SyntaxElement}; use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel}; @@ -120,13 +120,14 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< // prioritize more relevant imports proposed_imports .sort_by_key(|import| Reverse(relevance_score(ctx, import, current_module.as_ref()))); + let edition = current_module.map(|it| it.krate().edition(ctx.db())).unwrap_or(Edition::CURRENT); let group_label = group_label(import_assets.import_candidate()); for import in proposed_imports { let import_path = import.import_path; let (assist_id, import_name) = - (AssistId("auto_import", AssistKind::QuickFix), import_path.display(ctx.db())); + (AssistId("auto_import", AssistKind::QuickFix), import_path.display(ctx.db(), edition)); acc.add_group( &group_label, assist_id, @@ -138,7 +139,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)), }; - insert_use(&scope, mod_path_to_ast(&import_path), &ctx.config.insert_use); + insert_use(&scope, mod_path_to_ast(&import_path, edition), &ctx.config.insert_use); }, ); @@ -165,7 +166,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< }; insert_use_as_alias( &scope, - mod_path_to_ast(&import_path), + mod_path_to_ast(&import_path, edition), &ctx.config.insert_use, ); }, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs index 3a0754d60f857..cd5fe0f86269c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bool_to_enum.rs @@ -339,6 +339,7 @@ fn augment_references_with_imports( let cfg = ctx.config.import_path_config(); + let edition = target_module.krate().edition(ctx.db()); references .into_iter() .filter_map(|FileReference { range, name, .. }| { @@ -361,7 +362,10 @@ fn augment_references_with_imports( cfg, ) .map(|mod_path| { - make::path_concat(mod_path_to_ast(&mod_path), make::path_from_text("Bool")) + make::path_concat( + mod_path_to_ast(&mod_path, edition), + make::path_from_text("Bool"), + ) }); import_scope.zip(path) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs index 77f9c66b354ee..a5c5b08d5b0c7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs @@ -159,7 +159,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_> }; // Verify this is `bool::then` that is being called. let func = ctx.sema.resolve_method_call(&mcall)?; - if func.name(ctx.sema.db).display(ctx.db()).to_string() != "then" { + if !func.name(ctx.sema.db).eq_ident("then") { return None; } let assoc = func.as_assoc_item(ctx.sema.db)?; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs index 5aa94590e679b..8c59ef4314f06 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_into_to_from.rs @@ -51,7 +51,10 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) - Some(hir::PathResolution::Def(module_def)) => module_def, _ => return None, }; - mod_path_to_ast(&module.find_path(ctx.db(), src_type_def, cfg)?) + mod_path_to_ast( + &module.find_path(ctx.db(), src_type_def, cfg)?, + module.krate().edition(ctx.db()), + ) }; let dest_type = match &ast_trait { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs index e86ff0dbebc62..3c9a91741047e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs @@ -114,12 +114,16 @@ pub(crate) fn convert_for_loop_with_for_each( |builder| { let mut buf = String::new(); - if let Some((expr_behind_ref, method)) = + if let Some((expr_behind_ref, method, krate)) = is_ref_and_impls_iter_method(&ctx.sema, &iterable) { // We have either "for x in &col" and col implements a method called iter // or "for x in &mut col" and col implements a method called iter_mut - format_to!(buf, "{expr_behind_ref}.{}()", method.display(ctx.db())); + format_to!( + buf, + "{expr_behind_ref}.{}()", + method.display(ctx.db(), krate.edition(ctx.db())) + ); } else if let ast::Expr::RangeExpr(..) = iterable { // range expressions need to be parenthesized for the syntax to be correct format_to!(buf, "({iterable})"); @@ -144,7 +148,7 @@ pub(crate) fn convert_for_loop_with_for_each( fn is_ref_and_impls_iter_method( sema: &hir::Semantics<'_, ide_db::RootDatabase>, iterable: &ast::Expr, -) -> Option<(ast::Expr, hir::Name)> { +) -> Option<(ast::Expr, hir::Name, hir::Crate)> { let ref_expr = match iterable { ast::Expr::RefExpr(r) => r, _ => return None, @@ -172,7 +176,7 @@ fn is_ref_and_impls_iter_method( return None; } - Some((expr_behind_ref, wanted_method)) + Some((expr_behind_ref, wanted_method, krate)) } /// Whether iterable implements core::Iterator diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index 0f0b4442d8ac2..91af9b05bbb85 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -211,7 +211,7 @@ fn augment_references_with_imports( ) .map(|mod_path| { make::path_concat( - mod_path_to_ast(&mod_path), + mod_path_to_ast(&mod_path, target_module.krate().edition(ctx.db())), make::path_from_text(struct_name), ) }); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs index 095b8f958d09a..b229b750e88ba 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_struct_binding.rs @@ -7,7 +7,7 @@ use ide_db::{ FxHashMap, FxHashSet, }; use itertools::Itertools; -use syntax::{ast, ted, AstNode, SmolStr, SyntaxNode, ToSmolStr}; +use syntax::{ast, ted, AstNode, Edition, SmolStr, SyntaxNode, ToSmolStr}; use text_edit::TextRange; use crate::{ @@ -81,6 +81,7 @@ struct StructEditData { has_private_members: bool, is_nested: bool, is_ref: bool, + edition: Edition, } fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option { @@ -145,6 +146,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option AssignmentEdit { let ident_pat = builder.make_mut(data.ident_pat.clone()); - let struct_path = mod_path_to_ast(&data.struct_def_path); + let struct_path = mod_path_to_ast(&data.struct_def_path, data.edition); let is_ref = ident_pat.ref_token().is_some(); let is_mut = ident_pat.mut_token().is_some(); @@ -247,7 +249,7 @@ fn generate_field_names(ctx: &AssistContext<'_>, data: &StructEditData) -> Vec<( .visible_fields .iter() .map(|field| { - let field_name = field.name(ctx.db()).display_no_db().to_smolstr(); + let field_name = field.name(ctx.db()).display_no_db(data.edition).to_smolstr(); let new_name = new_field_name(field_name.clone(), &data.names_in_scope); (field_name, new_name) }) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs index 9beb616d99bc3..3d6d37ad93d26 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs @@ -66,7 +66,9 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> let names_to_import = find_names_to_import(ctx, refs_in_target, imported_defs); let expanded = make::use_tree_list(names_to_import.iter().map(|n| { - let path = make::ext::ident_path(&n.display(ctx.db()).to_string()); + let path = make::ext::ident_path( + &n.display(ctx.db(), current_module.krate().edition(ctx.db())).to_string(), + ); make::use_tree(path, None, None, false) })) .clone_for_update(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs index 0a2cb6d5ef8ad..cfcb09b9f7d9f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs @@ -23,7 +23,7 @@ use syntax::{ self, edit::IndentLevel, edit_in_place::Indent, AstNode, AstToken, HasGenericParams, HasName, }, - match_ast, ted, SyntaxElement, + match_ast, ted, Edition, SyntaxElement, SyntaxKind::{self, COMMENT}, SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T, }; @@ -84,7 +84,6 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op }; let body = extraction_target(&node, range)?; - let (container_info, contains_tail_expr) = body.analyze_container(&ctx.sema)?; let (locals_used, self_param) = body.analyze(&ctx.sema); @@ -92,6 +91,9 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let insert_after = node_to_insert_after(&body, anchor)?; let semantics_scope = ctx.sema.scope(&insert_after)?; let module = semantics_scope.module(); + let edition = semantics_scope.krate().edition(ctx.db()); + + let (container_info, contains_tail_expr) = body.analyze_container(&ctx.sema, edition)?; let ret_ty = body.return_ty(ctx)?; let control_flow = body.external_control_flow(ctx, &container_info)?; @@ -217,7 +219,11 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op ); if let Some(mod_path) = mod_path { - insert_use(&scope, mod_path_to_ast(&mod_path), &ctx.config.insert_use); + insert_use( + &scope, + mod_path_to_ast(&mod_path, edition), + &ctx.config.insert_use, + ); } } } @@ -238,7 +244,13 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef { let mut names_in_scope = vec![]; semantics_scope.process_all_names(&mut |name, _| { - names_in_scope.push(name.display(semantics_scope.db.upcast()).to_string()) + names_in_scope.push( + name.display( + semantics_scope.db.upcast(), + semantics_scope.krate().edition(semantics_scope.db), + ) + .to_string(), + ) }); let default_name = "fun_name"; @@ -366,6 +378,7 @@ struct ContainerInfo { ret_type: Option, generic_param_lists: Vec, where_clauses: Vec, + edition: Edition, } /// Control flow that is exported from extracted function @@ -489,8 +502,8 @@ impl Param { } } - fn to_arg(&self, ctx: &AssistContext<'_>) -> ast::Expr { - let var = path_expr_from_local(ctx, self.var); + fn to_arg(&self, ctx: &AssistContext<'_>, edition: Edition) -> ast::Expr { + let var = path_expr_from_local(ctx, self.var, edition); match self.kind() { ParamKind::Value | ParamKind::MutValue => var, ParamKind::SharedRef => make::expr_ref(var, false), @@ -498,8 +511,13 @@ impl Param { } } - fn to_param(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Param { - let var = self.var.name(ctx.db()).display(ctx.db()).to_string(); + fn to_param( + &self, + ctx: &AssistContext<'_>, + module: hir::Module, + edition: Edition, + ) -> ast::Param { + let var = self.var.name(ctx.db()).display(ctx.db(), edition).to_string(); let var_name = make::name(&var); let pat = match self.kind() { ParamKind::MutValue => make::ident_pat(false, true, var_name), @@ -520,7 +538,7 @@ impl Param { } impl TryKind { - fn of_ty(ty: hir::Type, ctx: &AssistContext<'_>) -> Option { + fn of_ty(ty: hir::Type, ctx: &AssistContext<'_>, edition: Edition) -> Option { if ty.is_unknown() { // We favour Result for `expr?` return Some(TryKind::Result { ty }); @@ -529,7 +547,7 @@ impl TryKind { let name = adt.name(ctx.db()); // FIXME: use lang items to determine if it is std type or user defined // E.g. if user happens to define type named `Option`, we would have false positive - let name = &name.display(ctx.db()).to_string(); + let name = &name.display(ctx.db(), edition).to_string(); match name.as_str() { "Option" => Some(TryKind::Option), "Result" => Some(TryKind::Result { ty }), @@ -828,6 +846,7 @@ impl FunctionBody { fn analyze_container( &self, sema: &Semantics<'_, RootDatabase>, + edition: Edition, ) -> Option<(ContainerInfo, bool)> { let mut ancestors = self.parent()?.ancestors(); let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted); @@ -927,6 +946,7 @@ impl FunctionBody { ret_type: ty, generic_param_lists, where_clauses, + edition, }, contains_tail_expr, )) @@ -1015,7 +1035,7 @@ impl FunctionBody { let kind = match (try_expr, ret_expr, break_expr, continue_expr) { (Some(_), _, None, None) => { let ret_ty = container_info.ret_type.clone()?; - let kind = TryKind::of_ty(ret_ty, ctx)?; + let kind = TryKind::of_ty(ret_ty, ctx, container_info.edition)?; Some(FlowKind::Try { kind }) } @@ -1397,7 +1417,7 @@ fn fixup_call_site(builder: &mut SourceChangeBuilder, body: &FunctionBody) { fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> SyntaxNode { let ret_ty = fun.return_type(ctx); - let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx))); + let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx, fun.mods.edition))); let name = fun.name.clone(); let mut call_expr = if fun.self_param.is_some() { let self_arg = make::expr_path(make::ext::ident_path("self")); @@ -1420,13 +1440,13 @@ fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> Sy [] => None, [var] => { let name = var.local.name(ctx.db()); - let name = make::name(&name.display(ctx.db()).to_string()); + let name = make::name(&name.display(ctx.db(), fun.mods.edition).to_string()); Some(ast::Pat::IdentPat(make::ident_pat(false, var.mut_usage_outside_body, name))) } vars => { let binding_pats = vars.iter().map(|var| { let name = var.local.name(ctx.db()); - let name = make::name(&name.display(ctx.db()).to_string()); + let name = make::name(&name.display(ctx.db(), fun.mods.edition).to_string()); make::ident_pat(false, var.mut_usage_outside_body, name).into() }); Some(ast::Pat::TuplePat(make::tuple_pat(binding_pats))) @@ -1569,8 +1589,8 @@ impl FlowHandler { } } -fn path_expr_from_local(ctx: &AssistContext<'_>, var: Local) -> ast::Expr { - let name = var.name(ctx.db()).display(ctx.db()).to_string(); +fn path_expr_from_local(ctx: &AssistContext<'_>, var: Local, edition: Edition) -> ast::Expr { + let name = var.name(ctx.db()).display(ctx.db(), edition).to_string(); make::expr_path(make::ext::ident_path(&name)) } @@ -1581,7 +1601,7 @@ fn format_function( old_indent: IndentLevel, ) -> ast::Fn { let fun_name = make::name(&fun.name.text()); - let params = fun.make_param_list(ctx, module); + let params = fun.make_param_list(ctx, module, fun.mods.edition); let ret_ty = fun.make_ret_ty(ctx, module); let body = make_body(ctx, old_indent, fun); let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun); @@ -1707,9 +1727,14 @@ impl Function { type_params_in_descendant_paths.chain(type_params_in_params).collect() } - fn make_param_list(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::ParamList { + fn make_param_list( + &self, + ctx: &AssistContext<'_>, + module: hir::Module, + edition: Edition, + ) -> ast::ParamList { let self_param = self.self_param.clone(); - let params = self.params.iter().map(|param| param.to_param(ctx, module)); + let params = self.params.iter().map(|param| param.to_param(ctx, module, edition)); make::param_list(self_param, params) } @@ -1842,10 +1867,12 @@ fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) - None => match fun.outliving_locals.as_slice() { [] => {} [var] => { - tail_expr = Some(path_expr_from_local(ctx, var.local)); + tail_expr = Some(path_expr_from_local(ctx, var.local, fun.mods.edition)); } vars => { - let exprs = vars.iter().map(|var| path_expr_from_local(ctx, var.local)); + let exprs = vars + .iter() + .map(|var| path_expr_from_local(ctx, var.local, fun.mods.edition)); let expr = make::expr_tuple(exprs); tail_expr = Some(expr); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index a62fdeb617394..615b5d3f98b55 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -1,7 +1,7 @@ use std::iter; use either::Either; -use hir::{Module, ModuleDef, Name, Variant}; +use hir::{HasCrate, Module, ModuleDef, Name, Variant}; use ide_db::{ defs::Definition, helpers::mod_path_to_ast, @@ -16,7 +16,7 @@ use syntax::{ self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasAttrs, HasGenericParams, HasName, HasVisibility, }, - match_ast, ted, SyntaxElement, + match_ast, ted, Edition, SyntaxElement, SyntaxKind::*, SyntaxNode, T, }; @@ -58,6 +58,7 @@ pub(crate) fn extract_struct_from_enum_variant( "Extract struct from enum variant", target, |builder| { + let edition = enum_hir.krate(ctx.db()).edition(ctx.db()); let variant_hir_name = variant_hir.name(ctx.db()); let enum_module_def = ModuleDef::from(enum_hir); let usages = Definition::Variant(variant_hir).usages(&ctx.sema).all(); @@ -82,7 +83,7 @@ pub(crate) fn extract_struct_from_enum_variant( references, ); processed.into_iter().for_each(|(path, node, import)| { - apply_references(ctx.config.insert_use, path, node, import) + apply_references(ctx.config.insert_use, path, node, import, edition) }); } builder.edit_file(ctx.file_id()); @@ -98,7 +99,7 @@ pub(crate) fn extract_struct_from_enum_variant( references, ); processed.into_iter().for_each(|(path, node, import)| { - apply_references(ctx.config.insert_use, path, node, import) + apply_references(ctx.config.insert_use, path, node, import, edition) }); } @@ -169,7 +170,7 @@ fn existing_definition(db: &RootDatabase, variant_name: &ast::Name, variant: &Va ), _ => false, }) - .any(|(name, _)| name.display(db).to_string() == variant_name.to_string()) + .any(|(name, _)| name.eq_ident(variant_name.text().as_str())) } fn extract_generic_params( @@ -359,9 +360,10 @@ fn apply_references( segment: ast::PathSegment, node: SyntaxNode, import: Option<(ImportScope, hir::ModPath)>, + edition: Edition, ) { if let Some((scope, path)) = import { - insert_use(&scope, mod_path_to_ast(&path), &insert_use_cfg); + insert_use(&scope, mod_path_to_ast(&path, edition), &insert_use_cfg); } // deep clone to prevent cycle let path = make::path_from_segments(iter::once(segment.clone_subtree()), false); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fill_record_pattern_fields.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fill_record_pattern_fields.rs index 758f50d3f47bf..ee321864805e7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fill_record_pattern_fields.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fill_record_pattern_fields.rs @@ -45,8 +45,9 @@ pub(crate) fn fill_record_pattern_fields(acc: &mut Assists, ctx: &AssistContext< let new_field_list = make::record_pat_field_list(old_field_list.fields(), None).clone_for_update(); for (f, _) in missing_fields.iter() { + let edition = ctx.sema.scope(record_pat.syntax())?.krate().edition(ctx.db()); let field = make::record_pat_field_shorthand(make::name_ref( - &f.name(ctx.sema.db).display_no_db().to_smolstr(), + &f.name(ctx.sema.db).display_no_db(edition).to_smolstr(), )); new_field_list.add_field(field.clone_for_update()); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs index 9950f9c14749a..7a92d8911bf85 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs @@ -4,7 +4,7 @@ use hir::{ use ide_db::FileId; use syntax::{ ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _}, - AstNode, TextRange, ToSmolStr, + AstNode, TextRange, }; use crate::{AssistContext, AssistId, AssistKind, Assists}; @@ -48,7 +48,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) let (_, def) = module .scope(ctx.db(), None) .into_iter() - .find(|(name, _)| name.display_no_db().to_smolstr() == name_ref.text().as_str())?; + .find(|(name, _)| name.eq_ident(name_ref.text().as_str()))?; let ScopeDef::ModuleDef(def) = def else { return None; }; @@ -71,7 +71,10 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) let assist_label = match target_name { None => format!("Change visibility to {missing_visibility}"), Some(name) => { - format!("Change visibility of {} to {missing_visibility}", name.display(ctx.db())) + format!( + "Change visibility of {} to {missing_visibility}", + name.display(ctx.db(), current_module.krate().edition(ctx.db())) + ) } }; @@ -92,6 +95,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_> let (record_field_def, _, _) = ctx.sema.resolve_record_field(&record_field)?; let current_module = ctx.sema.scope(record_field.syntax())?.module(); + let current_edition = current_module.krate().edition(ctx.db()); let visibility = record_field_def.visibility(ctx.db()); if visibility.is_visible_from(ctx.db(), current_module.into()) { return None; @@ -123,8 +127,8 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_> let target_name = record_field_def.name(ctx.db()); let assist_label = format!( "Change visibility of {}.{} to {missing_visibility}", - parent_name.display(ctx.db()), - target_name.display(ctx.db()) + parent_name.display(ctx.db(), current_edition), + target_name.display(ctx.db(), current_edition) ); acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs index 2150003bc1437..bbb902f8a2a4d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -51,6 +51,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' let strukt = ctx.find_node_at_offset::()?; let strukt_name = strukt.name()?; let current_module = ctx.sema.scope(strukt.syntax())?.module(); + let current_edition = current_module.krate().edition(ctx.db()); let (field_name, field_ty, target) = match ctx.find_node_at_offset::() { Some(field) => { @@ -89,7 +90,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' methods.sort_by(|(a, _), (b, _)| a.cmp(b)); for (name, method) in methods { let adt = ast::Adt::Struct(strukt.clone()); - let name = name.display(ctx.db()).to_string(); + let name = name.display(ctx.db(), current_edition).to_string(); // if `find_struct_impl` returns None, that means that a function named `name` already exists. let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else { continue; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs index 5a3457e5b7a4a..933ab2058ec91 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -22,7 +22,7 @@ use syntax::{ WherePred, }, ted::{self, Position}, - AstNode, NodeOrToken, SmolStr, SyntaxKind, ToSmolStr, + AstNode, Edition, NodeOrToken, SmolStr, SyntaxKind, ToSmolStr, }; // Assist: generate_delegate_trait @@ -109,6 +109,7 @@ struct Field { ty: ast::Type, range: syntax::TextRange, impls: Vec, + edition: Edition, } impl Field { @@ -119,6 +120,7 @@ impl Field { let db = ctx.sema.db; let module = ctx.sema.file_to_module_def(ctx.file_id())?; + let edition = module.krate().edition(ctx.db()); let (name, range, ty) = match f { Either::Left(f) => { @@ -147,7 +149,7 @@ impl Field { } } - Some(Field { name, ty, range, impls }) + Some(Field { name, ty, range, impls, edition }) } } @@ -163,18 +165,18 @@ enum Delegee { } impl Delegee { - fn signature(&self, db: &dyn HirDatabase) -> String { + fn signature(&self, db: &dyn HirDatabase, edition: Edition) -> String { let mut s = String::new(); let (Delegee::Bound(it) | Delegee::Impls(it, _)) = self; for m in it.module(db).path_to_root(db).iter().rev() { if let Some(name) = m.name(db) { - s.push_str(&format!("{}::", name.display_no_db().to_smolstr())); + s.push_str(&format!("{}::", name.display_no_db(edition).to_smolstr())); } } - s.push_str(&it.name(db).display_no_db().to_smolstr()); + s.push_str(&it.name(db).display_no_db(edition).to_smolstr()); s } } @@ -212,9 +214,11 @@ impl Struct { // if self.hir_ty.impls_trait(db, trait_, &[]) { // continue; // } - let signature = delegee.signature(db); + let signature = delegee.signature(db, field.edition); - let Some(delegate) = generate_impl(ctx, self, &field.ty, &field.name, delegee) else { + let Some(delegate) = + generate_impl(ctx, self, &field.ty, &field.name, delegee, field.edition) + else { continue; }; @@ -240,6 +244,7 @@ fn generate_impl( field_ty: &ast::Type, field_name: &str, delegee: &Delegee, + edition: Edition, ) -> Option { let delegate: ast::Impl; let db = ctx.db(); @@ -259,7 +264,7 @@ fn generate_impl( strukt_params.clone(), strukt_params.map(|params| params.to_generic_args()), delegee.is_auto(db), - make::ty(&delegee.name(db).display_no_db().to_smolstr()), + make::ty(&delegee.name(db).display_no_db(edition).to_smolstr()), strukt_ty, bound_def.where_clause(), ast_strukt.where_clause(), @@ -350,7 +355,7 @@ fn generate_impl( let type_gen_args = strukt_params.clone().map(|params| params.to_generic_args()); let path_type = - make::ty(&trait_.name(db).display_no_db().to_smolstr()).clone_for_update(); + make::ty(&trait_.name(db).display_no_db(edition).to_smolstr()).clone_for_update(); transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type.syntax())?; // 3) Generate delegate trait impl diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs index 2ac7057fe7946..e558bb6da89bc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs @@ -4,7 +4,7 @@ use hir::{ModPath, ModuleDef}; use ide_db::{famous_defs::FamousDefs, RootDatabase}; use syntax::{ ast::{self, HasName}, - AstNode, SyntaxNode, + AstNode, Edition, SyntaxNode, }; use crate::{ @@ -77,6 +77,7 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( field_name.syntax(), deref_type_to_generate, trait_path, + module.krate().edition(ctx.db()), ) }, ) @@ -117,6 +118,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<() field_list_index, deref_type_to_generate, trait_path, + module.krate().edition(ctx.db()), ) }, ) @@ -130,6 +132,7 @@ fn generate_edit( field_name: impl Display, deref_type: DerefType, trait_path: ModPath, + edition: Edition, ) { let start_offset = strukt.syntax().text_range().end(); let impl_code = match deref_type { @@ -147,8 +150,11 @@ fn generate_edit( ), }; let strukt_adt = ast::Adt::Struct(strukt); - let deref_impl = - generate_trait_impl_text(&strukt_adt, &trait_path.display(db).to_string(), &impl_code); + let deref_impl = generate_trait_impl_text( + &strukt_adt, + &trait_path.display(db, edition).to_string(), + &impl_code, + ); edit.insert(start_offset, deref_impl); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs index 51dd4884547b5..c5c70c9f8eb6f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs @@ -5,7 +5,7 @@ use stdx::{format_to, to_lower_snake_case}; use syntax::{ algo::skip_whitespace_token, ast::{self, edit::IndentLevel, HasDocComments, HasGenericArgs, HasName}, - match_ast, AstNode, AstToken, + match_ast, AstNode, AstToken, Edition, }; use crate::assist_context::{AssistContext, Assists}; @@ -139,7 +139,8 @@ fn make_example_for_fn(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option Vec { } /// Helper function to build the path of the module in the which is the node -fn build_path(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option { +fn build_path(ast_func: &ast::Fn, ctx: &AssistContext<'_>, edition: Edition) -> Option { let crate_name = crate_name(ast_func, ctx)?; let leaf = self_partial_type(ast_func) .or_else(|| ast_func.name().map(|n| n.to_string())) .unwrap_or_else(|| "*".into()); let module_def: ModuleDef = ctx.sema.to_def(ast_func)?.module(ctx.db()).into(); - match module_def.canonical_path(ctx.db()) { + match module_def.canonical_path(ctx.db(), edition) { Some(path) => Some(format!("{crate_name}::{path}::{leaf}")), None => Some(format!("{crate_name}::{leaf}")), } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs index b2980d5c63010..cd29f77f0c95c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs @@ -17,7 +17,7 @@ use syntax::{ self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, BlockExpr, CallExpr, HasArgList, HasGenericParams, HasModuleItem, HasTypeBounds, }, - ted, SyntaxKind, SyntaxNode, TextRange, T, + ted, Edition, SyntaxKind, SyntaxNode, TextRange, T, }; use crate::{ @@ -175,6 +175,7 @@ fn add_func_to_accumulator( edit.edit_file(file); let target = function_builder.target.clone(); + let edition = function_builder.target_edition; let func = function_builder.render(ctx.config.snippet_cap, edit); if let Some(adt) = @@ -183,7 +184,7 @@ fn add_func_to_accumulator( { let name = make::ty_path(make::ext::ident_path(&format!( "{}", - adt.name(ctx.db()).display(ctx.db()) + adt.name(ctx.db()).display(ctx.db(), edition) ))); // FIXME: adt may have generic params. @@ -222,6 +223,7 @@ struct FunctionBuilder { should_focus_return_type: bool, visibility: Visibility, is_async: bool, + target_edition: Edition, } impl FunctionBuilder { @@ -237,6 +239,7 @@ impl FunctionBuilder { ) -> Option { let target_module = target_module.or_else(|| ctx.sema.scope(target.syntax()).map(|it| it.module()))?; + let target_edition = target_module.krate().edition(ctx.db()); let current_module = ctx.sema.scope(call.syntax())?.module(); let visibility = calculate_necessary_visibility(current_module, target_module, ctx); @@ -258,7 +261,9 @@ impl FunctionBuilder { // If generated function has the name "new" and is an associated function, we generate fn body // as a constructor and assume a "Self" return type. - if let Some(body) = make_fn_body_as_new_function(ctx, &fn_name.text(), adt_info) { + if let Some(body) = + make_fn_body_as_new_function(ctx, &fn_name.text(), adt_info, target_edition) + { ret_type = Some(make::ret_type(make::ty_path(make::ext::ident_path("Self")))); should_focus_return_type = false; fn_body = body; @@ -288,6 +293,7 @@ impl FunctionBuilder { should_focus_return_type, visibility, is_async, + target_edition, }) } @@ -299,6 +305,8 @@ impl FunctionBuilder { target_module: Module, target: GeneratedFunctionTarget, ) -> Option { + let target_edition = target_module.krate().edition(ctx.db()); + let current_module = ctx.sema.scope(call.syntax())?.module(); let visibility = calculate_necessary_visibility(current_module, target_module, ctx); @@ -336,6 +344,7 @@ impl FunctionBuilder { should_focus_return_type, visibility, is_async, + target_edition, }) } @@ -425,6 +434,7 @@ fn make_fn_body_as_new_function( ctx: &AssistContext<'_>, fn_name: &str, adt_info: &Option, + edition: Edition, ) -> Option { if fn_name != "new" { return None; @@ -441,7 +451,10 @@ fn make_fn_body_as_new_function( .iter() .map(|field| { make::record_expr_field( - make::name_ref(&format!("{}", field.name(ctx.db()).display(ctx.db()))), + make::name_ref(&format!( + "{}", + field.name(ctx.db()).display(ctx.db(), edition) + )), Some(placeholder_expr.clone()), ) }) @@ -1102,8 +1115,9 @@ fn fn_arg_type( if ty.is_reference() || ty.is_mutable_reference() { let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(fn_arg.syntax())?.krate()); + let target_edition = target_module.krate().edition(ctx.db()); convert_reference_type(ty.strip_references(), ctx.db(), famous_defs) - .map(|conversion| conversion.convert_type(ctx.db()).to_string()) + .map(|conversion| conversion.convert_type(ctx.db(), target_edition).to_string()) .or_else(|| ty.display_source_code(ctx.db(), target_module.into(), true).ok()) } else { ty.display_source_code(ctx.db(), target_module.into(), true).ok() diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs index 60214aaaf69ea..8e349e84a96c8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs @@ -233,7 +233,7 @@ fn generate_getter_from_info( .map(|conversion| { cov_mark::hit!(convert_reference_type); ( - conversion.convert_type(ctx.db()), + conversion.convert_type(ctx.db(), krate.edition(ctx.db())), conversion.getter(record_field_info.field_name.to_string()), ) }) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs index b985b5e66c4df..b9dede7cbdc36 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs @@ -64,10 +64,13 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option ctx.config.import_path_config(), )?; + let edition = current_module.krate().edition(ctx.db()); + let expr = use_trivial_constructor( ctx.sema.db, - ide_db::helpers::mod_path_to_ast(&type_path), + ide_db::helpers::mod_path_to_ast(&type_path, edition), &ty, + edition, )?; Some(make::record_expr_field(make::name_ref(&name.text()), Some(expr))) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_const_as_literal.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_const_as_literal.rs index 111ea50fdc941..f1c2acdd3ed91 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_const_as_literal.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_const_as_literal.rs @@ -51,10 +51,13 @@ pub(crate) fn inline_const_as_literal(acc: &mut Assists, ctx: &AssistContext<'_> | ast::Expr::MatchExpr(_) | ast::Expr::MacroExpr(_) | ast::Expr::BinExpr(_) - | ast::Expr::CallExpr(_) => match konst.render_eval(ctx.sema.db) { - Ok(result) => result, - Err(_) => return None, - }, + | ast::Expr::CallExpr(_) => { + let edition = ctx.sema.scope(variable.syntax())?.krate().edition(ctx.db()); + match konst.render_eval(ctx.sema.db, edition) { + Ok(result) => result, + Err(_) => return None, + } + } _ => return None, }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs index 3057745a97b7f..743ea9476150d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs @@ -104,9 +104,13 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> }; builder.delete(range_to_delete); - let const_ref = format!("Self::{}", name.display(ctx.db())); - for range in usages.file_ranges().map(|it| it.range) { - builder.replace(range, const_ref.clone()); + let usages = usages.iter().flat_map(|(file_id, usages)| { + let edition = file_id.edition(); + usages.iter().map(move |usage| (edition, usage.range)) + }); + for (edition, range) in usages { + let const_ref = format!("Self::{}", name.display(ctx.db(), edition)); + builder.replace(range, const_ref); } // Heuristically inserting the extracted const after the consecutive existing consts diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs index 14381085a782f..8a7a06b380f51 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_from_mod_rs.rs @@ -39,7 +39,7 @@ pub(crate) fn move_from_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op } let target = source_file.syntax().text_range(); - let module_name = module.name(ctx.db())?.display(ctx.db()).to_string(); + let module_name = module.name(ctx.db())?.unescaped().display(ctx.db()).to_string(); let path = format!("../{module_name}.rs"); let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path }; acc.add( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs index e679a68f4466d..9692b70592912 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_module_to_file.rs @@ -61,7 +61,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) -> .string_value_unescape() .is_none() => { - format_to!(buf, "{}/", name.display(db)) + format_to!(buf, "{}/", name.unescaped().display(db)) } _ => (), } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs index c89d54ff039df..2925e2334b44d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_to_mod_rs.rs @@ -39,7 +39,7 @@ pub(crate) fn move_to_mod_rs(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti } let target = source_file.syntax().text_range(); - let module_name = module.name(ctx.db())?.display(ctx.db()).to_string(); + let module_name = module.name(ctx.db())?.unescaped().display(ctx.db()).to_string(); let path = format!("./{module_name}/mod.rs"); let dst = AnchoredPathBuf { anchor: ctx.file_id().into(), path }; acc.add( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs index b1e98045fcfaa..14518c4d2cc4c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_method_call.rs @@ -42,6 +42,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> let resolved_call = ctx.sema.resolve_method_call(&call)?; let current_module = ctx.sema.scope(call.syntax())?.module(); + let current_edition = current_module.krate().edition(ctx.db()); let target_module_def = ModuleDef::from(resolved_call); let item_in_ns = ItemInNs::from(target_module_def); let receiver_path = current_module.find_path( @@ -61,6 +62,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> |replace_with: String| builder.replace(range, replace_with), &receiver_path, item_in_ns, + current_edition, ) }, ); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs index d8e7da15d5bf8..ac88861fe4fdc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/qualify_path.rs @@ -8,6 +8,7 @@ use ide_db::{ imports::import_assets::{ImportCandidate, LocatedImport}, }; use syntax::ast::HasGenericArgs; +use syntax::Edition; use syntax::{ ast, ast::{make, HasArgList}, @@ -93,6 +94,8 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option NodeOrToken::Token(t) => t.parent()?, }) .map(|scope| scope.module()); + let current_edition = + current_module.map(|it| it.krate().edition(ctx.db())).unwrap_or(Edition::CURRENT); // prioritize more relevant imports proposed_imports.sort_by_key(|import| { Reverse(super::auto_import::relevance_score(ctx, import, current_module.as_ref())) @@ -103,13 +106,14 @@ pub(crate) fn qualify_path(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option acc.add_group( &group_label, AssistId("qualify_path", AssistKind::QuickFix), - label(ctx.db(), candidate, &import), + label(ctx.db(), candidate, &import, current_edition), range, |builder| { qualify_candidate.qualify( |replace_with: String| builder.replace(range, replace_with), &import.import_path, import.item_to_import, + current_edition, ) }, ); @@ -130,8 +134,9 @@ impl QualifyCandidate<'_> { mut replacer: impl FnMut(String), import: &hir::ModPath, item: hir::ItemInNs, + edition: Edition, ) { - let import = mod_path_to_ast(import); + let import = mod_path_to_ast(import, edition); match self { QualifyCandidate::QualifierStart(segment, generics) => { let generics = generics.as_ref().map_or_else(String::new, ToString::to_string); @@ -203,7 +208,7 @@ fn find_trait_method( if let Some(hir::AssocItem::Function(method)) = trait_.items(db).into_iter().find(|item: &hir::AssocItem| { item.name(db) - .map(|name| name.display(db).to_string() == trait_method_name.to_string()) + .map(|name| name.eq_ident(trait_method_name.text().as_str())) .unwrap_or(false) }) { @@ -233,14 +238,19 @@ fn group_label(candidate: &ImportCandidate) -> GroupLabel { GroupLabel(format!("Qualify {name}")) } -fn label(db: &RootDatabase, candidate: &ImportCandidate, import: &LocatedImport) -> String { +fn label( + db: &RootDatabase, + candidate: &ImportCandidate, + import: &LocatedImport, + edition: Edition, +) -> String { let import_path = &import.import_path; match candidate { ImportCandidate::Path(candidate) if candidate.qualifier.is_none() => { - format!("Qualify as `{}`", import_path.display(db)) + format!("Qualify as `{}`", import_path.display(db, edition)) } - _ => format!("Qualify with `{}`", import_path.display(db)), + _ => format!("Qualify with `{}`", import_path.display(db, edition)), } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs index 0256256697740..df7a5112f126b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs @@ -1,7 +1,7 @@ use either::Either; use ide_db::FxHashMap; use itertools::Itertools; -use syntax::{ast, ted, AstNode}; +use syntax::{ast, ted, AstNode, SmolStr, ToSmolStr}; use crate::{AssistContext, AssistId, AssistKind, Assists}; @@ -25,8 +25,9 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti path.syntax().parent().and_then(>::cast)?; let ranks = compute_fields_ranks(&path, ctx)?; - let get_rank_of_field = - |of: Option<_>| *ranks.get(&of.unwrap_or_default()).unwrap_or(&usize::MAX); + let get_rank_of_field = |of: Option| { + *ranks.get(of.unwrap_or_default().trim_start_matches("r#")).unwrap_or(&usize::MAX) + }; let field_list = match &record { Either::Left(it) => Either::Left(it.record_expr_field_list()?), @@ -36,7 +37,7 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti Either::Left(it) => Either::Left(( it.fields() .sorted_unstable_by_key(|field| { - get_rank_of_field(field.field_name().map(|it| it.to_string())) + get_rank_of_field(field.field_name().map(|it| it.to_smolstr())) }) .collect::>(), it, @@ -44,7 +45,7 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti Either::Right(it) => Either::Right(( it.fields() .sorted_unstable_by_key(|field| { - get_rank_of_field(field.field_name().map(|it| it.to_string())) + get_rank_of_field(field.field_name().map(|it| it.to_smolstr())) }) .collect::>(), it, @@ -97,7 +98,7 @@ fn compute_fields_ranks( .fields(ctx.db()) .into_iter() .enumerate() - .map(|(idx, field)| (field.name(ctx.db()).display(ctx.db()).to_string(), idx)) + .map(|(idx, field)| (field.name(ctx.db()).unescaped().display(ctx.db()).to_string(), idx)) .collect(); Some(res) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs index cf135f83e7b6a..ada89ce7c40ac 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_impl_items.rs @@ -77,7 +77,8 @@ pub(crate) fn reorder_impl_items(acc: &mut Assists, ctx: &AssistContext<'_>) -> ast::AssocItem::MacroCall(_) => None, }; - name.and_then(|n| ranks.get(&n.to_string()).copied()).unwrap_or(usize::MAX) + name.and_then(|n| ranks.get(n.text().as_str().trim_start_matches("r#")).copied()) + .unwrap_or(usize::MAX) }) .collect(); @@ -114,7 +115,7 @@ fn compute_item_ranks( .iter() .flat_map(|i| i.name(ctx.db())) .enumerate() - .map(|(idx, name)| (name.display(ctx.db()).to_string(), idx)) + .map(|(idx, name)| (name.unescaped().display(ctx.db()).to_string(), idx)) .collect(), ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 5ff4af19fbf18..248f18789ce33 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -70,6 +70,7 @@ pub(crate) fn replace_derive_with_manual_impl( let current_module = ctx.sema.scope(adt.syntax())?.module(); let current_crate = current_module.krate(); + let current_edition = current_crate.edition(ctx.db()); let found_traits = items_locator::items_with_name( &ctx.sema, @@ -85,7 +86,7 @@ pub(crate) fn replace_derive_with_manual_impl( current_module .find_path(ctx.sema.db, hir::ModuleDef::Trait(trait_), ctx.config.import_path_config()) .as_ref() - .map(mod_path_to_ast) + .map(|path| mod_path_to_ast(path, current_edition)) .zip(Some(trait_)) }); @@ -214,7 +215,7 @@ fn impl_def_from_trait( let impl_def = generate_trait_impl(adt, make::ty_path(trait_path.clone())); let first_assoc_item = - add_trait_assoc_items_to_impl(sema, &trait_items, trait_, &impl_def, target_scope); + add_trait_assoc_items_to_impl(sema, &trait_items, trait_, &impl_def, &target_scope); // Generate a default `impl` function body for the derived trait. if let ast::AssocItem::Fn(ref func) = first_assoc_item { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs index d0aa835e79afd..65330b34c4ad2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs @@ -5,7 +5,7 @@ use ide_db::{ }; use syntax::{ ast::{self, make, HasGenericArgs}, - match_ast, ted, AstNode, SyntaxNode, + match_ast, ted, AstNode, Edition, SyntaxNode, }; use crate::{AssistContext, AssistId, AssistKind, Assists}; @@ -29,32 +29,32 @@ pub(crate) fn replace_qualified_name_with_use( acc: &mut Assists, ctx: &AssistContext<'_>, ) -> Option<()> { - let path: ast::Path = ctx.find_node_at_offset()?; + let original_path: ast::Path = ctx.find_node_at_offset()?; // We don't want to mess with use statements - if path.syntax().ancestors().find_map(ast::UseTree::cast).is_some() { + if original_path.syntax().ancestors().find_map(ast::UseTree::cast).is_some() { cov_mark::hit!(not_applicable_in_use); return None; } - if path.qualifier().is_none() { + if original_path.qualifier().is_none() { cov_mark::hit!(dont_import_trivial_paths); return None; } // only offer replacement for non assoc items - match ctx.sema.resolve_path(&path)? { + match ctx.sema.resolve_path(&original_path)? { hir::PathResolution::Def(def) if def.as_assoc_item(ctx.sema.db).is_none() => (), _ => return None, } // then search for an import for the first path segment of what we want to replace // that way it is less likely that we import the item from a different location due re-exports - let module = match ctx.sema.resolve_path(&path.first_qualifier_or_self())? { + let module = match ctx.sema.resolve_path(&original_path.first_qualifier_or_self())? { hir::PathResolution::Def(module @ hir::ModuleDef::Module(_)) => module, _ => return None, }; let starts_with_name_ref = !matches!( - path.first_segment().and_then(|it| it.kind()), + original_path.first_segment().and_then(|it| it.kind()), Some( ast::PathSegmentKind::CrateKw | ast::PathSegmentKind::SuperKw @@ -63,7 +63,7 @@ pub(crate) fn replace_qualified_name_with_use( ); let path_to_qualifier = starts_with_name_ref .then(|| { - ctx.sema.scope(path.syntax())?.module().find_use_path( + ctx.sema.scope(original_path.syntax())?.module().find_use_path( ctx.sema.db, module, ctx.config.insert_use.prefix_kind, @@ -72,8 +72,8 @@ pub(crate) fn replace_qualified_name_with_use( }) .flatten(); - let scope = ImportScope::find_insert_use_container(path.syntax(), &ctx.sema)?; - let target = path.syntax().text_range(); + let scope = ImportScope::find_insert_use_container(original_path.syntax(), &ctx.sema)?; + let target = original_path.syntax().text_range(); acc.add( AssistId("replace_qualified_name_with_use", AssistKind::RefactorRewrite), "Replace qualified path with use", @@ -86,13 +86,19 @@ pub(crate) fn replace_qualified_name_with_use( ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)), }; - shorten_paths(scope.as_syntax_node(), &path); - let path = drop_generic_args(&path); + shorten_paths(scope.as_syntax_node(), &original_path); + let path = drop_generic_args(&original_path); + let edition = ctx + .sema + .scope(original_path.syntax()) + .map(|semantics_scope| semantics_scope.krate().edition(ctx.db())) + .unwrap_or(Edition::CURRENT); // stick the found import in front of the to be replaced path - let path = match path_to_qualifier.and_then(|it| mod_path_to_ast(&it).qualifier()) { - Some(qualifier) => make::path_concat(qualifier, path), - None => path, - }; + let path = + match path_to_qualifier.and_then(|it| mod_path_to_ast(&it, edition).qualifier()) { + Some(qualifier) => make::path_concat(qualifier, path), + None => path, + }; insert_use(&scope, path, &ctx.config.insert_use); }, ) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs index 4913cfdea9440..66671c934c4b7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/term_search.rs @@ -48,15 +48,17 @@ pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< let mut formatter = |_: &hir::Type| String::from("todo!()"); + let edition = scope.krate().edition(ctx.db()); let paths = paths .into_iter() .filter_map(|path| { - path.gen_source_code(&scope, &mut formatter, ctx.config.import_path_config()).ok() + path.gen_source_code(&scope, &mut formatter, ctx.config.import_path_config(), edition) + .ok() }) .unique(); let macro_name = macro_call.name(ctx.sema.db); - let macro_name = macro_name.display(ctx.sema.db); + let macro_name = macro_name.display(ctx.sema.db, edition); for code in paths { acc.add_group( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs index 98975a324dc3b..9ab36bf7757d6 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_async_sugar.rs @@ -141,7 +141,8 @@ pub(crate) fn desugar_async_into_impl_future( ModuleDef::Trait(future_trait), ctx.config.import_path_config(), )?; - let trait_path = trait_path.display(ctx.db()); + let edition = scope.krate().edition(ctx.db()); + let trait_path = trait_path.display(ctx.db(), edition); acc.add( AssistId("desugar_async_into_impl_future", AssistKind::RefactorRewrite), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index c67693ea2bb89..b8a6f3b6dbeb2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -14,7 +14,7 @@ use syntax::{ edit_in_place::{AttrsOwnerEdit, Indent, Removable}, make, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace, }, - ted, AstNode, AstToken, Direction, NodeOrToken, SourceFile, + ted, AstNode, AstToken, Direction, Edition, NodeOrToken, SourceFile, SyntaxKind::*, SyntaxNode, SyntaxToken, TextRange, TextSize, T, }; @@ -174,7 +174,7 @@ pub fn add_trait_assoc_items_to_impl( original_items: &[InFile], trait_: hir::Trait, impl_: &ast::Impl, - target_scope: hir::SemanticsScope<'_>, + target_scope: &hir::SemanticsScope<'_>, ) -> ast::AssocItem { let new_indent_level = IndentLevel::from_node(impl_.syntax()) + 1; let items = original_items.iter().map(|InFile { file_id, value: original_item }| { @@ -195,7 +195,7 @@ pub fn add_trait_assoc_items_to_impl( // FIXME: Paths in nested macros are not handled well. See // `add_missing_impl_members::paths_in_nested_macro_should_get_transformed` test. let transform = - PathTransform::trait_impl(&target_scope, &source_scope, trait_, impl_.clone()); + PathTransform::trait_impl(target_scope, &source_scope, trait_, impl_.clone()); transform.apply(cloned_item.syntax()); } cloned_item.remove_attrs_and_docs(); @@ -684,31 +684,31 @@ enum ReferenceConversionType { } impl ReferenceConversion { - pub(crate) fn convert_type(&self, db: &dyn HirDatabase) -> ast::Type { + pub(crate) fn convert_type(&self, db: &dyn HirDatabase, edition: Edition) -> ast::Type { let ty = match self.conversion { - ReferenceConversionType::Copy => self.ty.display(db).to_string(), + ReferenceConversionType::Copy => self.ty.display(db, edition).to_string(), ReferenceConversionType::AsRefStr => "&str".to_owned(), ReferenceConversionType::AsRefSlice => { let type_argument_name = - self.ty.type_arguments().next().unwrap().display(db).to_string(); + self.ty.type_arguments().next().unwrap().display(db, edition).to_string(); format!("&[{type_argument_name}]") } ReferenceConversionType::Dereferenced => { let type_argument_name = - self.ty.type_arguments().next().unwrap().display(db).to_string(); + self.ty.type_arguments().next().unwrap().display(db, edition).to_string(); format!("&{type_argument_name}") } ReferenceConversionType::Option => { let type_argument_name = - self.ty.type_arguments().next().unwrap().display(db).to_string(); + self.ty.type_arguments().next().unwrap().display(db, edition).to_string(); format!("Option<&{type_argument_name}>") } ReferenceConversionType::Result => { let mut type_arguments = self.ty.type_arguments(); let first_type_argument_name = - type_arguments.next().unwrap().display(db).to_string(); + type_arguments.next().unwrap().display(db, edition).to_string(); let second_type_argument_name = - type_arguments.next().unwrap().display(db).to_string(); + type_arguments.next().unwrap().display(db, edition).to_string(); format!("Result<&{first_type_argument_name}, &{second_type_argument_name}>") } }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs index fc43d243b36ef..3130ef0695577 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs @@ -6,7 +6,7 @@ use itertools::Itertools; use stdx::to_lower_snake_case; use syntax::{ ast::{self, HasName}, - match_ast, AstNode, SmolStr, + match_ast, AstNode, Edition, SmolStr, }; /// Trait names, that will be ignored when in `impl Trait` and `dyn Trait` @@ -271,24 +271,25 @@ fn var_name_from_pat(pat: &ast::Pat) -> Option { fn from_type(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option { let ty = sema.type_of_expr(expr)?.adjusted(); let ty = ty.remove_ref().unwrap_or(ty); + let edition = sema.scope(expr.syntax())?.krate().edition(sema.db); - name_of_type(&ty, sema.db) + name_of_type(&ty, sema.db, edition) } -fn name_of_type(ty: &hir::Type, db: &RootDatabase) -> Option { +fn name_of_type(ty: &hir::Type, db: &RootDatabase, edition: Edition) -> Option { let name = if let Some(adt) = ty.as_adt() { - let name = adt.name(db).display(db).to_string(); + let name = adt.name(db).display(db, edition).to_string(); if WRAPPER_TYPES.contains(&name.as_str()) { let inner_ty = ty.type_arguments().next()?; - return name_of_type(&inner_ty, db); + return name_of_type(&inner_ty, db, edition); } name } else if let Some(trait_) = ty.as_dyn_trait() { - trait_name(&trait_, db)? + trait_name(&trait_, db, edition)? } else if let Some(traits) = ty.as_impl_traits(db) { - let mut iter = traits.filter_map(|t| trait_name(&t, db)); + let mut iter = traits.filter_map(|t| trait_name(&t, db, edition)); let name = iter.next()?; if iter.next().is_some() { return None; @@ -300,8 +301,8 @@ fn name_of_type(ty: &hir::Type, db: &RootDatabase) -> Option { normalize(&name) } -fn trait_name(trait_: &hir::Trait, db: &RootDatabase) -> Option { - let name = trait_.name(db).display(db).to_string(); +fn trait_name(trait_: &hir::Trait, db: &RootDatabase, edition: Edition) -> Option { + let name = trait_.name(db).display(db, edition).to_string(); if USELESS_TRAITS.contains(&name.as_str()) { return None; } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs index 58e9b724df2a0..b537150608bb0 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs @@ -85,6 +85,7 @@ impl Completions { CompletionItemKind::Keyword, ctx.source_range(), SmolStr::new_static(keyword), + ctx.edition, ); item.add_to(self, ctx.db); } @@ -124,7 +125,8 @@ impl Completions { kw: &str, snippet: &str, ) { - let mut item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw); + let mut item = + CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw, ctx.edition); match ctx.config.snippet_cap { Some(cap) => { @@ -149,7 +151,8 @@ impl Completions { kw: &str, snippet: &str, ) { - let mut item = CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw); + let mut item = + CompletionItem::new(CompletionItemKind::Keyword, ctx.source_range(), kw, ctx.edition); match ctx.config.snippet_cap { Some(cap) => item.insert_snippet(cap, snippet), @@ -544,7 +547,8 @@ impl Completions { CompletionItem::new( SymbolKind::LifetimeParam, ctx.source_range(), - name.display_no_db().to_smolstr(), + name.display_no_db(ctx.edition).to_smolstr(), + ctx.edition, ) .add_to(self, ctx.db) } @@ -553,7 +557,8 @@ impl Completions { CompletionItem::new( SymbolKind::Label, ctx.source_range(), - name.display_no_db().to_smolstr(), + name.display_no_db(ctx.edition).to_smolstr(), + ctx.edition, ) .add_to(self, ctx.db) } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs index a7a6cdebd361e..88f4461f4ddcc 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs @@ -48,11 +48,15 @@ pub(crate) fn complete_known_attribute_input( match path.text().as_str() { "repr" => repr::complete_repr(acc, ctx, tt), - "feature" => { - lint::complete_lint(acc, ctx, colon_prefix, &parse_tt_as_comma_sep_paths(tt)?, FEATURES) - } + "feature" => lint::complete_lint( + acc, + ctx, + colon_prefix, + &parse_tt_as_comma_sep_paths(tt, ctx.edition)?, + FEATURES, + ), "allow" | "warn" | "deny" | "forbid" => { - let existing_lints = parse_tt_as_comma_sep_paths(tt)?; + let existing_lints = parse_tt_as_comma_sep_paths(tt, ctx.edition)?; let lints: Vec = CLIPPY_LINT_GROUPS .iter() @@ -66,9 +70,12 @@ pub(crate) fn complete_known_attribute_input( lint::complete_lint(acc, ctx, colon_prefix, &existing_lints, &lints); } "cfg" => cfg::complete_cfg(acc, ctx), - "macro_use" => { - macro_use::complete_macro_use(acc, ctx, extern_crate, &parse_tt_as_comma_sep_paths(tt)?) - } + "macro_use" => macro_use::complete_macro_use( + acc, + ctx, + extern_crate, + &parse_tt_as_comma_sep_paths(tt, ctx.edition)?, + ), _ => (), } Some(()) @@ -130,8 +137,12 @@ pub(crate) fn complete_attribute_path( }); let add_completion = |attr_completion: &AttrCompletion| { - let mut item = - CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), attr_completion.label); + let mut item = CompletionItem::new( + SymbolKind::Attribute, + ctx.source_range(), + attr_completion.label, + ctx.edition, + ); if let Some(lookup) = attr_completion.lookup { item.lookup_by(lookup); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs index 6e7d50ede066e..cda0da13b26eb 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/cfg.rs @@ -8,7 +8,8 @@ use crate::{completions::Completions, context::CompletionContext, CompletionItem pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) { let add_completion = |item: &str| { - let mut completion = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), item); + let mut completion = + CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), item, ctx.edition); completion.insert_text(format!(r#""{item}""#)); acc.add(completion.build(ctx.db)); }; @@ -41,7 +42,12 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) { name => ctx.krate.potential_cfg(ctx.db).get_cfg_values(name).cloned().for_each(|s| { let s = s.as_str(); let insert_text = format!(r#""{s}""#); - let mut item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s); + let mut item = CompletionItem::new( + SymbolKind::BuiltinAttr, + ctx.source_range(), + s, + ctx.edition, + ); item.insert_text(insert_text); acc.add(item.build(ctx.db)); @@ -49,7 +55,8 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) { }, None => ctx.krate.potential_cfg(ctx.db).get_cfg_keys().cloned().unique().for_each(|s| { let s = s.as_str(); - let item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s); + let item = + CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s, ctx.edition); acc.add(item.build(ctx.db)); }), } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs index 0127a42824882..1f8927401b2f8 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/derive.rs @@ -62,7 +62,7 @@ pub(crate) fn complete_derive_path( _ => return acc.add_macro(ctx, path_ctx, mac, name), }; - let name_ = name.display_no_db().to_smolstr(); + let name_ = name.display_no_db(ctx.edition).to_smolstr(); let find = DEFAULT_DERIVE_DEPENDENCIES .iter() .find(|derive_completion| derive_completion.label == name_); @@ -72,10 +72,9 @@ pub(crate) fn complete_derive_path( let mut components = vec![derive_completion.label]; components.extend(derive_completion.dependencies.iter().filter( |&&dependency| { - !existing_derives - .iter() - .map(|it| it.name(ctx.db)) - .any(|it| it.display_no_db().to_smolstr() == dependency) + !existing_derives.iter().map(|it| it.name(ctx.db)).any(|it| { + it.display_no_db(ctx.edition).to_smolstr() == dependency + }) }, )); let lookup = components.join(", "); @@ -85,6 +84,7 @@ pub(crate) fn complete_derive_path( SymbolKind::Derive, ctx.source_range(), SmolStr::from_iter(label), + ctx.edition, ); if let Some(docs) = mac.docs(ctx.db) { item.documentation(docs); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs index f9dec53806466..d5f9cd5fc76cd 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/lint.rs @@ -54,7 +54,8 @@ pub(super) fn complete_lint( Some(qual) if !is_qualified => format!("{qual}::{name}"), _ => name.to_owned(), }; - let mut item = CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label); + let mut item = + CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label, ctx.edition); item.documentation(Documentation::new(description.to_owned())); item.add_to(acc, ctx.db) } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs index 7e3a62405a78c..deb12282c025b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/macro_use.rs @@ -28,7 +28,8 @@ pub(super) fn complete_macro_use( continue; } - let item = CompletionItem::new(SymbolKind::Macro, ctx.source_range(), mac_name); + let item = + CompletionItem::new(SymbolKind::Macro, ctx.source_range(), mac_name, ctx.edition); item.add_to(acc, ctx.db); } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs index 14f464b7753d7..12652b448925b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute/repr.rs @@ -30,7 +30,12 @@ pub(super) fn complete_repr( continue; } - let mut item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), label); + let mut item = CompletionItem::new( + SymbolKind::BuiltinAttr, + ctx.source_range(), + label, + ctx.edition, + ); if let Some(lookup) = lookup { item.lookup_by(lookup); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs index a07daf4c4e49c..d55bc3ea5d03a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/dot.rs @@ -29,6 +29,7 @@ pub(crate) fn complete_dot( CompletionItemKind::Keyword, ctx.source_range(), SmolStr::new_static("await"), + ctx.edition, ); item.detail("expr.await"); item.add_to(acc, ctx.db); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs index 23d93d3b7467e..c9013d1d17d43 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/env_vars.rs @@ -56,7 +56,7 @@ pub(crate) fn complete_cargo_env_vars( let range = original.text_range_between_quotes()?; CARGO_DEFINED_VARS.iter().for_each(|&(var, detail)| { - let mut item = CompletionItem::new(CompletionItemKind::Keyword, range, var); + let mut item = CompletionItem::new(CompletionItemKind::Keyword, range, var, ctx.edition); item.detail(detail); item.add_to(acc, ctx.db); }); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs index b5d5604c7519f..1a06e0a3a0e73 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_abi.rs @@ -52,8 +52,13 @@ pub(crate) fn complete_extern_abi( let abi_str = expanded; let source_range = abi_str.text_range_between_quotes()?; for &abi in SUPPORTED_CALLING_CONVENTIONS { - CompletionItem::new(CompletionItemKind::Keyword, source_range, SmolStr::new_static(abi)) - .add_to(acc, ctx.db); + CompletionItem::new( + CompletionItemKind::Keyword, + source_range, + SmolStr::new_static(abi), + ctx.edition, + ) + .add_to(acc, ctx.db); } Some(()) } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs index 2427f4e49f292..7cb710c2d963c 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/extern_crate.rs @@ -19,7 +19,8 @@ pub(crate) fn complete_extern_crate(acc: &mut Completions, ctx: &CompletionConte let mut item = CompletionItem::new( CompletionItemKind::SymbolKind(SymbolKind::Module), ctx.source_range(), - name.display_no_db().to_smolstr(), + name.display_no_db(ctx.edition).to_smolstr(), + ctx.edition, ); item.set_documentation(module.docs(ctx.db)); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs index fdce7c547a490..2a6b310d3a217 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs @@ -411,7 +411,7 @@ fn compute_fuzzy_completion_order_key( cov_mark::hit!(certain_fuzzy_order_test); let import_name = match proposed_mod_path.segments().last() { // FIXME: nasty alloc, this is a hot path! - Some(name) => name.display_no_db().to_smolstr().to_ascii_lowercase(), + Some(name) => name.unescaped().display_no_db().to_smolstr().to_ascii_lowercase(), None => return usize::MAX, }; match import_name.match_indices(user_input_lowercased).next() { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs index a59246229bb2d..ee3b817ee8cfb 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/fn_param.rs @@ -32,7 +32,7 @@ pub(crate) fn complete_fn_param( let comma_wrapper = comma_wrapper(ctx); let mut add_new_item_to_acc = |label: &str| { let mk_item = |label: &str, range: TextRange| { - CompletionItem::new(CompletionItemKind::Binding, range, label) + CompletionItem::new(CompletionItemKind::Binding, range, label, ctx.edition) }; let item = match &comma_wrapper { Some((fmt, range)) => mk_item(&fmt(label), *range), @@ -50,7 +50,7 @@ pub(crate) fn complete_fn_param( ParamKind::Closure(closure) => { let stmt_list = closure.syntax().ancestors().find_map(ast::StmtList::cast)?; params_from_stmt_list_scope(ctx, stmt_list, |name, ty| { - add_new_item_to_acc(&format!("{}: {ty}", name.display(ctx.db))); + add_new_item_to_acc(&format!("{}: {ty}", name.display(ctx.db, ctx.edition))); }); } } @@ -101,8 +101,8 @@ fn fill_fn_params( if let Some(stmt_list) = function.syntax().parent().and_then(ast::StmtList::cast) { params_from_stmt_list_scope(ctx, stmt_list, |name, ty| { file_params - .entry(format!("{}: {ty}", name.display(ctx.db))) - .or_insert(name.display(ctx.db).to_string()); + .entry(format!("{}: {ty}", name.display(ctx.db, ctx.edition))) + .or_insert(name.display(ctx.db, ctx.edition).to_string()); }); } remove_duplicated(&mut file_params, param_list.params()); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs index 23affc3659208..a87c60c694ac7 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/format_string.rs @@ -35,7 +35,8 @@ pub(crate) fn format_string( CompletionItem::new( CompletionItemKind::Binding, source_range, - name.display_no_db().to_smolstr(), + name.display_no_db(ctx.edition).to_smolstr(), + ctx.edition, ) .add_to(acc, ctx.db); }); @@ -50,7 +51,8 @@ pub(crate) fn format_string( CompletionItem::new( CompletionItemKind::SymbolKind(symbol_kind), source_range, - name.display_no_db().to_smolstr(), + name.display_no_db(ctx.edition).to_smolstr(), + ctx.edition, ) .add_to(acc, ctx.db); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs index 2fd7805e60d5e..fc6e1ebf05fc6 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -184,7 +184,7 @@ fn add_function_impl( let label = format_smolstr!( "{}fn {}({})", if is_async { "async " } else { "" }, - fn_name.display(ctx.db), + fn_name.display(ctx.db, ctx.edition), if func.assoc_fn_params(ctx.db).is_empty() { "" } else { ".." } ); @@ -194,11 +194,11 @@ fn add_function_impl( SymbolKind::Function }); - let mut item = CompletionItem::new(completion_kind, replacement_range, label); + let mut item = CompletionItem::new(completion_kind, replacement_range, label, ctx.edition); item.lookup_by(format!( "{}fn {}", if is_async { "async " } else { "" }, - fn_name.display(ctx.db) + fn_name.display(ctx.db, ctx.edition) )) .set_documentation(func.docs(ctx.db)) .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() }); @@ -262,7 +262,8 @@ fn add_type_alias_impl( let label = format_smolstr!("type {alias_name} ="); - let mut item = CompletionItem::new(SymbolKind::TypeAlias, replacement_range, label); + let mut item = + CompletionItem::new(SymbolKind::TypeAlias, replacement_range, label, ctx.edition); item.lookup_by(format!("type {alias_name}")) .set_documentation(type_alias.docs(ctx.db)) .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() }); @@ -320,7 +321,7 @@ fn add_const_impl( const_: hir::Const, impl_def: hir::Impl, ) { - let const_name = const_.name(ctx.db).map(|n| n.display_no_db().to_smolstr()); + let const_name = const_.name(ctx.db).map(|n| n.display_no_db(ctx.edition).to_smolstr()); if let Some(const_name) = const_name { if let Some(source) = ctx.sema.source(const_) { @@ -334,7 +335,8 @@ fn add_const_impl( let label = make_const_compl_syntax(&transformed_const, source.file_id.is_macro()); let replacement = format!("{label} "); - let mut item = CompletionItem::new(SymbolKind::Const, replacement_range, label); + let mut item = + CompletionItem::new(SymbolKind::Const, replacement_range, label, ctx.edition); item.lookup_by(format_smolstr!("const {const_name}")) .set_documentation(const_.docs(ctx.db)) .set_relevance(CompletionRelevance { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs index 03fe93c563ff8..9efc52428eff7 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/lifetime.rs @@ -41,7 +41,7 @@ pub(crate) fn complete_lifetime( if matches!( res, ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) - if param_lifetime != Some(&*name.display_no_db().to_smolstr()) + if param_lifetime != Some(&*name.display_no_db(ctx.edition).to_smolstr()) ) { acc.add_lifetime(ctx, name); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs index d9a10893bf52b..05e2892fdc85c 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs @@ -53,7 +53,7 @@ pub(crate) fn complete_mod( let existing_mod_declarations = current_module .children(ctx.db) - .filter_map(|module| Some(module.name(ctx.db)?.display(ctx.db).to_string())) + .filter_map(|module| Some(module.name(ctx.db)?.display(ctx.db, ctx.edition).to_string())) .filter(|module| module != ctx.original_token.text()) .collect::>(); @@ -99,7 +99,8 @@ pub(crate) fn complete_mod( if mod_under_caret.semicolon_token().is_none() { label.push(';'); } - let item = CompletionItem::new(SymbolKind::Module, ctx.source_range(), &label); + let item = + CompletionItem::new(SymbolKind::Module, ctx.source_range(), &label, ctx.edition); item.add_to(acc, ctx.db) }); @@ -140,7 +141,9 @@ fn directory_to_look_for_submodules( module_chain_to_containing_module_file(module, db) .into_iter() .filter_map(|module| module.name(db)) - .try_fold(base_directory, |path, name| path.join(&name.display_no_db().to_smolstr())) + .try_fold(base_directory, |path, name| { + path.join(&name.unescaped().display_no_db().to_smolstr()) + }) } fn module_chain_to_containing_module_file( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index 977e0d80a4d46..a632f148936dc 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -72,7 +72,10 @@ pub(crate) fn complete_postfix( let mut item = postfix_snippet( "drop", "fn drop(&mut self)", - &format!("{path}($0{receiver_text})", path = path.display(ctx.db)), + &format!( + "{path}($0{receiver_text})", + path = path.display(ctx.db, ctx.edition) + ), ); item.set_documentation(drop_fn.docs(ctx.db)); item.add_to(acc, ctx.db); @@ -335,8 +338,12 @@ fn build_postfix_snippet_builder<'ctx>( ) -> impl Fn(&str, &str, &str) -> Builder + 'ctx { move |label, detail, snippet| { let edit = TextEdit::replace(delete_range, snippet.to_owned()); - let mut item = - CompletionItem::new(CompletionItemKind::Snippet, ctx.source_range(), label); + let mut item = CompletionItem::new( + CompletionItemKind::Snippet, + ctx.source_range(), + label, + ctx.edition, + ); item.detail(detail).snippet_edit(cap, edit); let postfix_match = if ctx.original_token.text() == label { cov_mark::hit!(postfix_exact_match_is_high_priority); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs index 1dcf41f8dd235..117a5e3d9359b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/record.rs @@ -73,6 +73,7 @@ pub(crate) fn complete_record_expr_fields( CompletionItemKind::Snippet, ctx.source_range(), SmolStr::new_static(".."), + ctx.edition, ); item.insert_text("."); item.add_to(acc, ctx.db); @@ -101,6 +102,7 @@ pub(crate) fn add_default_update( SymbolKind::Field, ctx.source_range(), SmolStr::new_static(completion_text), + ctx.edition, ); let completion_text = completion_text.strip_prefix(ctx.token.text()).unwrap_or(completion_text); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs index e831113350ac5..357709e0c1fde 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/snippet.rs @@ -118,7 +118,8 @@ macro_rules! $1 { } fn snippet(ctx: &CompletionContext<'_>, cap: SnippetCap, label: &str, snippet: &str) -> Builder { - let mut item = CompletionItem::new(CompletionItemKind::Snippet, ctx.source_range(), label); + let mut item = + CompletionItem::new(CompletionItemKind::Snippet, ctx.source_range(), label, ctx.edition); item.insert_snippet(cap, snippet); item } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs index 8e5b55360dc11..45704549e6090 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/use_.rs @@ -107,7 +107,11 @@ pub(crate) fn complete_use_path( let item = CompletionItem::new( CompletionItemKind::SymbolKind(SymbolKind::Enum), ctx.source_range(), - format_smolstr!("{}::", e.name(ctx.db).display(ctx.db)), + format_smolstr!( + "{}::", + e.name(ctx.db).display(ctx.db, ctx.edition) + ), + ctx.edition, ); acc.add(item.build(ctx.db)); } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index 5b64a98a8bba5..bcd9df941947f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -437,6 +437,7 @@ pub(crate) struct CompletionContext<'a> { pub(crate) module: hir::Module, /// Whether nightly toolchain is used. Cached since this is looked up a lot. is_nightly: bool, + pub(crate) edition: Edition, /// The expected name of what we are completing. /// This is usually the parameter name of the function argument we are completing. @@ -467,9 +468,9 @@ impl CompletionContext<'_> { cov_mark::hit!(completes_if_lifetime_without_idents); TextRange::at(self.original_token.text_range().start(), TextSize::from(1)) } - IDENT | LIFETIME_IDENT | UNDERSCORE | INT_NUMBER => self.original_token.text_range(), - // FIXME: Edition - _ if kind.is_keyword(Edition::CURRENT) => self.original_token.text_range(), + LIFETIME_IDENT | UNDERSCORE | INT_NUMBER => self.original_token.text_range(), + // We want to consider all keywords in all editions. + _ if kind.is_any_identifier() => self.original_token.text_range(), _ => TextRange::empty(self.position.offset), } } @@ -717,6 +718,7 @@ impl<'a> CompletionContext<'a> { let krate = scope.krate(); let module = scope.module(); + let edition = krate.edition(db); let toolchain = db.toolchain_channel(krate.into()); // `toolchain == None` means we're in some detached files. Since we have no information on @@ -743,6 +745,7 @@ impl<'a> CompletionContext<'a> { krate, module, is_nightly, + edition, expected_name, expected_type, qualifier_ctx, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index 1e972b9b4ce06..ed359394f1c49 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -1385,7 +1385,7 @@ fn pattern_context_for( }).map(|enum_| enum_.variants(sema.db)) }) }).map(|variants| variants.iter().filter_map(|variant| { - let variant_name = variant.name(sema.db).display(sema.db).to_string(); + let variant_name = variant.name(sema.db).unescaped().display(sema.db).to_string(); let variant_already_present = match_arm_list.arms().any(|arm| { arm.pat().and_then(|pat| { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs index 3657a7d969b99..a30a115da1f1b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/item.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/item.rs @@ -10,7 +10,7 @@ use ide_db::{ use itertools::Itertools; use smallvec::SmallVec; use stdx::{impl_from, never}; -use syntax::{format_smolstr, SmolStr, TextRange, TextSize}; +use syntax::{format_smolstr, Edition, SmolStr, TextRange, TextSize}; use text_edit::TextEdit; use crate::{ @@ -400,6 +400,7 @@ impl CompletionItem { kind: impl Into, source_range: TextRange, label: impl Into, + edition: Edition, ) -> Builder { let label = label.into(); Builder { @@ -419,6 +420,7 @@ impl CompletionItem { ref_match: None, imports_to_add: Default::default(), doc_aliases: vec![], + edition, } } @@ -464,6 +466,7 @@ pub(crate) struct Builder { trigger_call_info: bool, relevance: CompletionRelevance, ref_match: Option<(Mutability, TextSize)>, + edition: Edition, } impl Builder { @@ -517,7 +520,7 @@ impl Builder { label_detail.replace(format_smolstr!( "{} (use {})", label_detail.as_deref().unwrap_or_default(), - import_edit.import_path.display(db) + import_edit.import_path.display(db, self.edition) )); } else if let Some(trait_name) = self.trait_name { label_detail.replace(format_smolstr!( @@ -536,8 +539,8 @@ impl Builder { .into_iter() .filter_map(|import| { Some(( - import.import_path.display(db).to_string(), - import.import_path.segments().last()?.display(db).to_string(), + import.import_path.display(db, self.edition).to_string(), + import.import_path.segments().last()?.display(db, self.edition).to_string(), )) }) .collect(); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index 90c1728074d4d..58d1fad09504f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -245,6 +245,7 @@ pub fn resolve_completion_edits( let current_module = sema.scope(position_for_import)?.module(); let current_crate = current_module.krate(); + let current_edition = current_crate.edition(db); let new_ast = scope.clone_for_update(); let mut import_insert = TextEdit::builder(); @@ -261,9 +262,13 @@ pub fn resolve_completion_edits( .filter_map(|candidate| { current_module.find_use_path(db, candidate, config.insert_use.prefix_kind, cfg) }) - .find(|mod_path| mod_path.display(db).to_string() == full_import_path); + .find(|mod_path| mod_path.display(db, current_edition).to_string() == full_import_path); if let Some(import_path) = import { - insert_use::insert_use(&new_ast, mod_path_to_ast(&import_path), &config.insert_use); + insert_use::insert_use( + &new_ast, + mod_path_to_ast(&import_path, current_edition), + &config.insert_use, + ); } }); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index 02d667c52056a..ff5ec3a29f3e1 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -17,7 +17,7 @@ use ide_db::{ imports::import_assets::LocatedImport, RootDatabase, SnippetCap, SymbolKind, }; -use syntax::{ast, format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange, ToSmolStr}; +use syntax::{ast, format_smolstr, AstNode, Edition, SmolStr, SyntaxKind, TextRange, ToSmolStr}; use text_edit::TextEdit; use crate::{ @@ -133,19 +133,22 @@ pub(crate) fn render_field( let db = ctx.db(); let is_deprecated = ctx.is_deprecated(field); let name = field.name(db); - let (name, escaped_name) = - (name.unescaped().display(db).to_smolstr(), name.display_no_db().to_smolstr()); + let (name, escaped_name) = ( + name.unescaped().display(db).to_smolstr(), + name.display_no_db(ctx.completion.edition).to_smolstr(), + ); let mut item = CompletionItem::new( SymbolKind::Field, ctx.source_range(), - field_with_receiver(db, receiver.as_ref(), &name), + field_with_receiver(db, receiver.as_ref(), &name, ctx.completion.edition), + ctx.completion.edition, ); item.set_relevance(CompletionRelevance { type_match: compute_type_match(ctx.completion, ty), exact_name_match: compute_exact_name_match(ctx.completion, name.as_str()), ..CompletionRelevance::default() }); - item.detail(ty.display(db).to_string()) + item.detail(ty.display(db, ctx.completion.edition).to_string()) .set_documentation(field.docs(db)) .set_deprecated(is_deprecated) .lookup_by(name); @@ -159,7 +162,8 @@ pub(crate) fn render_field( builder.replace( ctx.source_range(), - field_with_receiver(db, receiver.as_ref(), &escaped_name).into(), + field_with_receiver(db, receiver.as_ref(), &escaped_name, ctx.completion.edition) + .into(), ); let expected_fn_type = @@ -183,7 +187,12 @@ pub(crate) fn render_field( item.text_edit(builder.finish()); } else { - item.insert_text(field_with_receiver(db, receiver.as_ref(), &escaped_name)); + item.insert_text(field_with_receiver( + db, + receiver.as_ref(), + &escaped_name, + ctx.completion.edition, + )); } if let Some(receiver) = &dot_access.receiver { if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) { @@ -200,10 +209,11 @@ fn field_with_receiver( db: &RootDatabase, receiver: Option<&hir::Name>, field_name: &str, + edition: Edition, ) -> SmolStr { receiver.map_or_else( || field_name.into(), - |receiver| format_smolstr!("{}.{field_name}", receiver.display(db)), + |receiver| format_smolstr!("{}.{field_name}", receiver.display(db, edition)), ) } @@ -216,9 +226,16 @@ pub(crate) fn render_tuple_field( let mut item = CompletionItem::new( SymbolKind::Field, ctx.source_range(), - field_with_receiver(ctx.db(), receiver.as_ref(), &field.to_string()), + field_with_receiver( + ctx.db(), + receiver.as_ref(), + &field.to_string(), + ctx.completion.edition, + ), + ctx.completion.edition, ); - item.detail(ty.display(ctx.db()).to_string()).lookup_by(field.to_string()); + item.detail(ty.display(ctx.db(), ctx.completion.edition).to_string()) + .lookup_by(field.to_string()); item.build(ctx.db()) } @@ -226,8 +243,12 @@ pub(crate) fn render_type_inference( ty_string: String, ctx: &CompletionContext<'_>, ) -> CompletionItem { - let mut builder = - CompletionItem::new(CompletionItemKind::InferredType, ctx.source_range(), ty_string); + let mut builder = CompletionItem::new( + CompletionItemKind::InferredType, + ctx.source_range(), + ty_string, + ctx.edition, + ); builder.set_relevance(CompletionRelevance { is_definite: true, ..Default::default() }); builder.build(ctx.db) } @@ -296,7 +317,7 @@ pub(crate) fn render_expr( let cfg = ctx.config.import_path_config(); - let label = expr.gen_source_code(&ctx.scope, &mut label_formatter, cfg).ok()?; + let label = expr.gen_source_code(&ctx.scope, &mut label_formatter, cfg, ctx.edition).ok()?; let source_range = match ctx.original_token.parent() { Some(node) => match node.ancestors().find_map(ast::Path::cast) { @@ -306,10 +327,13 @@ pub(crate) fn render_expr( None => ctx.source_range(), }; - let mut item = CompletionItem::new(CompletionItemKind::Expression, source_range, label); + let mut item = + CompletionItem::new(CompletionItemKind::Expression, source_range, label, ctx.edition); - let snippet = - format!("{}$0", expr.gen_source_code(&ctx.scope, &mut snippet_formatter, cfg).ok()?); + let snippet = format!( + "{}$0", + expr.gen_source_code(&ctx.scope, &mut snippet_formatter, cfg, ctx.edition).ok()? + ); let edit = TextEdit::replace(source_range, snippet); item.snippet_edit(ctx.config.snippet_cap?, edit); item.documentation(Documentation::new(String::from("Autogenerated expression by term search"))); @@ -396,10 +420,10 @@ fn render_resolution_path( let config = completion.config; let requires_import = import_to_add.is_some(); - let name = local_name.display_no_db().to_smolstr(); + let name = local_name.display_no_db(ctx.completion.edition).to_smolstr(); let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution); - if local_name.is_escaped() { - item.insert_text(local_name.display_no_db().to_smolstr()); + if local_name.is_escaped(completion.edition) { + item.insert_text(local_name.display_no_db(completion.edition).to_smolstr()); } // Add `<>` for generic types let type_path_no_ty_args = matches!( @@ -421,14 +445,17 @@ fn render_resolution_path( item.lookup_by(name.clone()) .label(SmolStr::from_iter([&name, "<…>"])) .trigger_call_info() - .insert_snippet(cap, format!("{}<$0>", local_name.display(db))); + .insert_snippet( + cap, + format!("{}<$0>", local_name.display(db, completion.edition)), + ); } } } let mut set_item_relevance = |ty: Type| { if !ty.is_unknown() { - item.detail(ty.display(db).to_string()); + item.detail(ty.display(db, completion.edition).to_string()); } item.set_relevance(CompletionRelevance { @@ -485,6 +512,7 @@ fn render_resolution_simple_( kind, ctx.source_range(), local_name.unescaped().display(db).to_smolstr(), + ctx.completion.edition, ); item.set_relevance(ctx.completion_relevance()) .set_documentation(scope_def_docs(db, resolution)) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs index 3bfec0de6bca4..415d87c6239b4 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/const_.rs @@ -14,11 +14,14 @@ pub(crate) fn render_const(ctx: RenderContext<'_>, const_: hir::Const) -> Option fn render(ctx: RenderContext<'_>, const_: hir::Const) -> Option { let db = ctx.db(); let name = const_.name(db)?; - let (name, escaped_name) = - (name.unescaped().display(db).to_smolstr(), name.display(db).to_smolstr()); - let detail = const_.display(db).to_string(); + let (name, escaped_name) = ( + name.unescaped().display(db).to_smolstr(), + name.display(db, ctx.completion.edition).to_smolstr(), + ); + let detail = const_.display(db, ctx.completion.edition).to_string(); - let mut item = CompletionItem::new(SymbolKind::Const, ctx.source_range(), name); + let mut item = + CompletionItem::new(SymbolKind::Const, ctx.source_range(), name, ctx.completion.edition); item.set_documentation(ctx.docs(const_)) .set_deprecated(ctx.is_deprecated(const_) || ctx.is_deprecated_assoc_item(const_)) .detail(detail) @@ -26,7 +29,7 @@ fn render(ctx: RenderContext<'_>, const_: hir::Const) -> Option if let Some(actm) = const_.as_assoc_item(db) { if let Some(trt) = actm.container_or_implemented_trait(db) { - item.trait_name(trt.name(db).display_no_db().to_smolstr()); + item.trait_name(trt.name(db).display_no_db(ctx.completion.edition).to_smolstr()); } } item.insert_text(escaped_name); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs index 05b2d0ae386b4..74092b53f523f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs @@ -4,7 +4,7 @@ use hir::{db::HirDatabase, AsAssocItem, HirDisplay}; use ide_db::{SnippetCap, SymbolKind}; use itertools::Itertools; use stdx::{format_to, to_lower_snake_case}; -use syntax::{format_smolstr, AstNode, SmolStr, ToSmolStr}; +use syntax::{format_smolstr, AstNode, Edition, SmolStr, ToSmolStr}; use crate::{ context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind}, @@ -62,9 +62,16 @@ fn render( receiver.unescaped().display(ctx.db()), name.unescaped().display(ctx.db()) ), - format_smolstr!("{}.{}", receiver.display(ctx.db()), name.display(ctx.db())), + format_smolstr!( + "{}.{}", + receiver.display(ctx.db(), completion.edition), + name.display(ctx.db(), completion.edition) + ), + ), + _ => ( + name.unescaped().display(db).to_smolstr(), + name.display(db, completion.edition).to_smolstr(), ), - _ => (name.unescaped().display(db).to_smolstr(), name.display(db).to_smolstr()), }; let has_self_param = func.self_param(db).is_some(); let mut item = CompletionItem::new( @@ -75,6 +82,7 @@ fn render( }), ctx.source_range(), call.clone(), + completion.edition, ); let ret_type = func.ret_type(db); @@ -141,9 +149,9 @@ fn render( } let detail = if ctx.completion.config.full_function_signatures { - detail_full(db, func) + detail_full(db, func, ctx.completion.edition) } else { - detail(db, func) + detail(db, func, ctx.completion.edition) }; item.set_documentation(ctx.docs(func)) .set_deprecated(ctx.is_deprecated(func) || ctx.is_deprecated_assoc_item(func)) @@ -161,7 +169,9 @@ fn render( None => { if let Some(actm) = assoc_item { if let Some(trt) = actm.container_or_implemented_trait(db) { - item.trait_name(trt.name(db).display_no_db().to_smolstr()); + item.trait_name( + trt.name(db).display_no_db(ctx.completion.edition).to_smolstr(), + ); } } } @@ -219,7 +229,7 @@ pub(super) fn add_call_parens<'b>( params.iter().enumerate().format_with(", ", |(index, param), f| { match param.name(ctx.db) { Some(n) => { - let smol_str = n.display_no_db().to_smolstr(); + let smol_str = n.display_no_db(ctx.edition).to_smolstr(); let text = smol_str.as_str().trim_start_matches('_'); let ref_ = ref_of_param(ctx, text, param.ty()); f(&format_args!("${{{}:{ref_}{text}}}", index + offset)) @@ -238,7 +248,7 @@ pub(super) fn add_call_parens<'b>( format!( "{}(${{1:{}}}{}{})$0", escaped_name, - self_param.display(ctx.db), + self_param.display(ctx.db, ctx.edition), if params.is_empty() { "" } else { ", " }, function_params_snippet ) @@ -276,7 +286,7 @@ fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type) -> &'sta "" } -fn detail(db: &dyn HirDatabase, func: hir::Function) -> String { +fn detail(db: &dyn HirDatabase, func: hir::Function, edition: Edition) -> String { let mut ret_ty = func.ret_type(db); let mut detail = String::new(); @@ -293,15 +303,15 @@ fn detail(db: &dyn HirDatabase, func: hir::Function) -> String { format_to!(detail, "unsafe "); } - format_to!(detail, "fn({})", params_display(db, func)); + format_to!(detail, "fn({})", params_display(db, func, edition)); if !ret_ty.is_unit() { - format_to!(detail, " -> {}", ret_ty.display(db)); + format_to!(detail, " -> {}", ret_ty.display(db, edition)); } detail } -fn detail_full(db: &dyn HirDatabase, func: hir::Function) -> String { - let signature = format!("{}", func.display(db)); +fn detail_full(db: &dyn HirDatabase, func: hir::Function, edition: Edition) -> String { + let signature = format!("{}", func.display(db, edition)); let mut detail = String::with_capacity(signature.len()); for segment in signature.split_whitespace() { @@ -315,16 +325,16 @@ fn detail_full(db: &dyn HirDatabase, func: hir::Function) -> String { detail } -fn params_display(db: &dyn HirDatabase, func: hir::Function) -> String { +fn params_display(db: &dyn HirDatabase, func: hir::Function, edition: Edition) -> String { if let Some(self_param) = func.self_param(db) { let assoc_fn_params = func.assoc_fn_params(db); let params = assoc_fn_params .iter() .skip(1) // skip the self param because we are manually handling that - .map(|p| p.ty().display(db)); + .map(|p| p.ty().display(db, edition)); format!( "{}{}", - self_param.display(db), + self_param.display(db, edition), params.format_with("", |display, f| { f(&", ")?; f(&display) @@ -332,7 +342,7 @@ fn params_display(db: &dyn HirDatabase, func: hir::Function) -> String { ) } else { let assoc_fn_params = func.assoc_fn_params(db); - assoc_fn_params.iter().map(|p| p.ty().display(db)).join(", ") + assoc_fn_params.iter().map(|p| p.ty().display(db, edition)).join(", ") } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs index 27435307d5086..c71356e5300fc 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs @@ -76,16 +76,16 @@ fn render( }; let (qualified_name, escaped_qualified_name) = ( qualified_name.unescaped().display(ctx.db()).to_string(), - qualified_name.display(ctx.db()).to_string(), + qualified_name.display(ctx.db(), completion.edition).to_string(), ); let snippet_cap = ctx.snippet_cap(); let mut rendered = match kind { StructKind::Tuple if should_add_parens => { - render_tuple_lit(db, snippet_cap, &fields, &escaped_qualified_name) + render_tuple_lit(db, snippet_cap, &fields, &escaped_qualified_name, completion.edition) } StructKind::Record if should_add_parens => { - render_record_lit(db, snippet_cap, &fields, &escaped_qualified_name) + render_record_lit(db, snippet_cap, &fields, &escaped_qualified_name, completion.edition) } _ => RenderedLiteral { literal: escaped_qualified_name.clone(), @@ -103,7 +103,10 @@ fn render( } let label = format_literal_label(&qualified_name, kind, snippet_cap); let lookup = if qualified { - format_literal_lookup(&short_qualified_name.display(ctx.db()).to_string(), kind) + format_literal_lookup( + &short_qualified_name.display(ctx.db(), completion.edition).to_string(), + kind, + ) } else { format_literal_lookup(&qualified_name, kind) }; @@ -112,6 +115,7 @@ fn render( CompletionItemKind::SymbolKind(thing.symbol_kind()), ctx.source_range(), label, + completion.edition, ); item.lookup_by(lookup); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs index de715bcbfaf9d..6490171fbb48b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/macro_.rs @@ -46,8 +46,10 @@ fn render( ctx.source_range() }; - let (name, escaped_name) = - (name.unescaped().display(ctx.db()).to_smolstr(), name.display(ctx.db()).to_smolstr()); + let (name, escaped_name) = ( + name.unescaped().display(ctx.db()).to_smolstr(), + name.display(ctx.db(), completion.edition).to_smolstr(), + ); let docs = ctx.docs(macro_); let docs_str = docs.as_ref().map(Documentation::as_str).unwrap_or_default(); let is_fn_like = macro_.is_fn_like(completion.db); @@ -59,9 +61,10 @@ fn render( SymbolKind::from(macro_.kind(completion.db)), source_range, label(&ctx, needs_bang, bra, ket, &name), + completion.edition, ); item.set_deprecated(ctx.is_deprecated(macro_)) - .detail(macro_.display(completion.db).to_string()) + .detail(macro_.display(completion.db, completion.edition).to_string()) .set_documentation(docs) .set_relevance(ctx.completion_relevance()); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs index 598b8762b6885..5675dfb5c6ff2 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/pattern.rs @@ -3,7 +3,7 @@ use hir::{db::HirDatabase, Name, StructKind}; use ide_db::{documentation::HasDocs, SnippetCap}; use itertools::Itertools; -use syntax::{SmolStr, ToSmolStr}; +use syntax::{Edition, SmolStr, ToSmolStr}; use crate::{ context::{ParamContext, ParamKind, PathCompletionCtx, PatternContext}, @@ -31,8 +31,10 @@ pub(crate) fn render_struct_pat( } let name = local_name.unwrap_or_else(|| strukt.name(ctx.db())); - let (name, escaped_name) = - (name.unescaped().display(ctx.db()).to_smolstr(), name.display(ctx.db()).to_smolstr()); + let (name, escaped_name) = ( + name.unescaped().display(ctx.db()).to_smolstr(), + name.display(ctx.db(), ctx.completion.edition).to_smolstr(), + ); let kind = strukt.kind(ctx.db()); let label = format_literal_label(name.as_str(), kind, ctx.snippet_cap()); let lookup = format_literal_lookup(name.as_str(), kind); @@ -60,13 +62,13 @@ pub(crate) fn render_variant_pat( let (name, escaped_name) = match path { Some(path) => ( path.unescaped().display(ctx.db()).to_string().into(), - path.display(ctx.db()).to_string().into(), + path.display(ctx.db(), ctx.completion.edition).to_string().into(), ), None => { let name = local_name.unwrap_or_else(|| variant.name(ctx.db())); let it = ( name.unescaped().display(ctx.db()).to_smolstr(), - name.display(ctx.db()).to_smolstr(), + name.display(ctx.db(), ctx.completion.edition).to_smolstr(), ); it } @@ -119,7 +121,12 @@ fn build_completion( relevance.type_match = super::compute_type_match(ctx.completion, &adt_ty); } - let mut item = CompletionItem::new(CompletionItemKind::Binding, ctx.source_range(), label); + let mut item = CompletionItem::new( + CompletionItemKind::Binding, + ctx.source_range(), + label, + ctx.completion.edition, + ); item.set_documentation(ctx.docs(def)) .set_deprecated(ctx.is_deprecated(def)) .detail(&pat) @@ -142,9 +149,14 @@ fn render_pat( ) -> Option { let mut pat = match kind { StructKind::Tuple => render_tuple_as_pat(ctx.snippet_cap(), fields, name, fields_omitted), - StructKind::Record => { - render_record_as_pat(ctx.db(), ctx.snippet_cap(), fields, name, fields_omitted) - } + StructKind::Record => render_record_as_pat( + ctx.db(), + ctx.snippet_cap(), + fields, + name, + fields_omitted, + ctx.completion.edition, + ), StructKind::Unit => name.to_owned(), }; @@ -173,6 +185,7 @@ fn render_record_as_pat( fields: &[hir::Field], name: &str, fields_omitted: bool, + edition: Edition, ) -> String { let fields = fields.iter(); match snippet_cap { @@ -180,7 +193,7 @@ fn render_record_as_pat( format!( "{name} {{ {}{} }}", fields.enumerate().format_with(", ", |(idx, field), f| { - f(&format_args!("{}${}", field.name(db).display(db.upcast()), idx + 1)) + f(&format_args!("{}${}", field.name(db).display(db.upcast(), edition), idx + 1)) }), if fields_omitted { ", .." } else { "" }, name = name @@ -189,7 +202,7 @@ fn render_record_as_pat( None => { format!( "{name} {{ {}{} }}", - fields.map(|field| field.name(db).display_no_db().to_smolstr()).format(", "), + fields.map(|field| field.name(db).display_no_db(edition).to_smolstr()).format(", "), if fields_omitted { ", .." } else { "" }, name = name ) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs index b81caf24220bf..09eb19201c5b0 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/type_alias.rs @@ -33,14 +33,22 @@ fn render( let (name, escaped_name) = if with_eq { ( SmolStr::from_iter([&name.unescaped().display(db).to_smolstr(), " = "]), - SmolStr::from_iter([&name.display_no_db().to_smolstr(), " = "]), + SmolStr::from_iter([&name.display_no_db(ctx.completion.edition).to_smolstr(), " = "]), ) } else { - (name.unescaped().display(db).to_smolstr(), name.display_no_db().to_smolstr()) + ( + name.unescaped().display(db).to_smolstr(), + name.display_no_db(ctx.completion.edition).to_smolstr(), + ) }; - let detail = type_alias.display(db).to_string(); + let detail = type_alias.display(db, ctx.completion.edition).to_string(); - let mut item = CompletionItem::new(SymbolKind::TypeAlias, ctx.source_range(), name); + let mut item = CompletionItem::new( + SymbolKind::TypeAlias, + ctx.source_range(), + name, + ctx.completion.edition, + ); item.set_documentation(ctx.docs(type_alias)) .set_deprecated(ctx.is_deprecated(type_alias) || ctx.is_deprecated_assoc_item(type_alias)) .detail(detail) @@ -48,7 +56,7 @@ fn render( if let Some(actm) = type_alias.as_assoc_item(db) { if let Some(trt) = actm.container_or_implemented_trait(db) { - item.trait_name(trt.name(db).display_no_db().to_smolstr()); + item.trait_name(trt.name(db).display_no_db(ctx.completion.edition).to_smolstr()); } } item.insert_text(escaped_name); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs index ca7593c122ee2..e053e299d903a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/union_literal.rs @@ -22,21 +22,29 @@ pub(crate) fn render_union_literal( let name = local_name.unwrap_or_else(|| un.name(ctx.db())); let (qualified_name, escaped_qualified_name) = match path { - Some(p) => (p.unescaped().display(ctx.db()).to_string(), p.display(ctx.db()).to_string()), - None => { - (name.unescaped().display(ctx.db()).to_string(), name.display(ctx.db()).to_string()) - } + Some(p) => ( + p.unescaped().display(ctx.db()).to_string(), + p.display(ctx.db(), ctx.completion.edition).to_string(), + ), + None => ( + name.unescaped().display(ctx.db()).to_string(), + name.display(ctx.db(), ctx.completion.edition).to_string(), + ), }; let label = format_literal_label( - &name.display_no_db().to_smolstr(), + &name.display_no_db(ctx.completion.edition).to_smolstr(), StructKind::Record, ctx.snippet_cap(), ); - let lookup = format_literal_lookup(&name.display_no_db().to_smolstr(), StructKind::Record); + let lookup = format_literal_lookup( + &name.display_no_db(ctx.completion.edition).to_smolstr(), + StructKind::Record, + ); let mut item = CompletionItem::new( CompletionItemKind::SymbolKind(SymbolKind::Union), ctx.source_range(), label, + ctx.completion.edition, ); item.lookup_by(lookup); @@ -54,7 +62,10 @@ pub(crate) fn render_union_literal( escaped_qualified_name, fields .iter() - .map(|field| field.name(ctx.db()).display_no_db().to_smolstr()) + .map(|field| field + .name(ctx.db()) + .display_no_db(ctx.completion.edition) + .to_smolstr()) .format(",") ) } else { @@ -62,7 +73,10 @@ pub(crate) fn render_union_literal( "{} {{ {} }}", escaped_qualified_name, fields.iter().format_with(", ", |field, f| { - f(&format_args!("{}: ()", field.name(ctx.db()).display(ctx.db()))) + f(&format_args!( + "{}: ()", + field.name(ctx.db()).display(ctx.db(), ctx.completion.edition) + )) }) ) }; @@ -73,8 +87,8 @@ pub(crate) fn render_union_literal( fields.iter().format_with(", ", |field, f| { f(&format_args!( "{}: {}", - field.name(ctx.db()).display(ctx.db()), - field.ty(ctx.db()).display(ctx.db()) + field.name(ctx.db()).display(ctx.db(), ctx.completion.edition), + field.ty(ctx.db()).display(ctx.db(), ctx.completion.edition) )) }), if fields_omitted { ", .." } else { "" } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs index bc2df9e39f3d5..d8516ea107869 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/variant.rs @@ -4,7 +4,7 @@ use crate::context::CompletionContext; use hir::{db::HirDatabase, sym, HasAttrs, HasCrate, HasVisibility, HirDisplay, StructKind}; use ide_db::SnippetCap; use itertools::Itertools; -use syntax::SmolStr; +use syntax::{Edition, SmolStr}; /// A rendered struct, union, or enum variant, split into fields for actual /// auto-completion (`literal`, using `field: ()`) and display in the @@ -21,20 +21,29 @@ pub(crate) fn render_record_lit( snippet_cap: Option, fields: &[hir::Field], path: &str, + edition: Edition, ) -> RenderedLiteral { if snippet_cap.is_none() { return RenderedLiteral { literal: path.to_owned(), detail: path.to_owned() }; } let completions = fields.iter().enumerate().format_with(", ", |(idx, field), f| { if snippet_cap.is_some() { - f(&format_args!("{}: ${{{}:()}}", field.name(db).display(db.upcast()), idx + 1)) + f(&format_args!( + "{}: ${{{}:()}}", + field.name(db).display(db.upcast(), edition), + idx + 1 + )) } else { - f(&format_args!("{}: ()", field.name(db).display(db.upcast()))) + f(&format_args!("{}: ()", field.name(db).display(db.upcast(), edition))) } }); let types = fields.iter().format_with(", ", |field, f| { - f(&format_args!("{}: {}", field.name(db).display(db.upcast()), field.ty(db).display(db))) + f(&format_args!( + "{}: {}", + field.name(db).display(db.upcast(), edition), + field.ty(db).display(db, edition) + )) }); RenderedLiteral { @@ -50,6 +59,7 @@ pub(crate) fn render_tuple_lit( snippet_cap: Option, fields: &[hir::Field], path: &str, + edition: Edition, ) -> RenderedLiteral { if snippet_cap.is_none() { return RenderedLiteral { literal: path.to_owned(), detail: path.to_owned() }; @@ -62,7 +72,7 @@ pub(crate) fn render_tuple_lit( } }); - let types = fields.iter().format_with(", ", |field, f| f(&field.ty(db).display(db))); + let types = fields.iter().format_with(", ", |field, f| f(&field.ty(db).display(db, edition))); RenderedLiteral { literal: format!("{path}({completions})"), diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs index 415f2afeebbea..04ba7e1f41b62 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs @@ -17,6 +17,7 @@ mod item_list; mod pattern; mod predicate; mod proc_macros; +mod raw_identifiers; mod record; mod special; mod type_pos; @@ -105,22 +106,35 @@ pub(crate) fn completion_list_with_trigger_character( completion_list_with_config(TEST_CONFIG, ra_fixture, true, trigger_character) } -fn completion_list_with_config( +fn completion_list_with_config_raw( config: CompletionConfig, ra_fixture: &str, include_keywords: bool, trigger_character: Option, -) -> String { +) -> Vec { // filter out all but one built-in type completion for smaller test outputs let items = get_all_items(config, ra_fixture, trigger_character); - let items = items + items .into_iter() .filter(|it| it.kind != CompletionItemKind::BuiltinType || it.label == "u32") .filter(|it| include_keywords || it.kind != CompletionItemKind::Keyword) .filter(|it| include_keywords || it.kind != CompletionItemKind::Snippet) .sorted_by_key(|it| (it.kind, it.label.clone(), it.detail.as_ref().map(ToOwned::to_owned))) - .collect(); - render_completion_list(items) + .collect() +} + +fn completion_list_with_config( + config: CompletionConfig, + ra_fixture: &str, + include_keywords: bool, + trigger_character: Option, +) -> String { + render_completion_list(completion_list_with_config_raw( + config, + ra_fixture, + include_keywords, + trigger_character, + )) } /// Creates analysis from a multi-file fixture, returns positions marked with $0. diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs new file mode 100644 index 0000000000000..bc630189edc91 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs @@ -0,0 +1,84 @@ +use base_db::SourceDatabase; +use expect_test::{expect, Expect}; +use itertools::Itertools; + +use crate::tests::{completion_list_with_config_raw, position, TEST_CONFIG}; + +fn check(ra_fixture: &str, expect: Expect) { + let completions = completion_list_with_config_raw(TEST_CONFIG, ra_fixture, true, None); + let (db, position) = position(ra_fixture); + let mut actual = db.file_text(position.file_id).to_string(); + completions + .into_iter() + .exactly_one() + .expect("more than one completion") + .text_edit + .apply(&mut actual); + expect.assert_eq(&actual); +} + +#[test] +fn keyword_since_edition_completes_without_raw_on_old_edition() { + check( + r#" +//- /a.rs crate:a edition:2015 +pub fn dyn() {} + +//- /b.rs crate:b edition:2015 deps:a new_source_root:local +fn foo() { + a::dyn$0 +"#, + expect![[r#" + fn foo() { + a::dyn()$0 + "#]], + ); + + check( + r#" +//- /a.rs crate:a edition:2018 +pub fn r#dyn() {} + +//- /b.rs crate:b edition:2015 deps:a new_source_root:local +fn foo() { + a::dyn$0 +"#, + expect![[r#" + fn foo() { + a::dyn()$0 + "#]], + ); +} + +#[test] +fn keyword_since_edition_completes_with_raw_on_new_edition() { + check( + r#" +//- /a.rs crate:a edition:2015 +pub fn dyn() {} + +//- /b.rs crate:b edition:2018 deps:a new_source_root:local +fn foo() { + a::dyn$0 +"#, + expect![[r#" + fn foo() { + a::r#dyn()$0 + "#]], + ); + + check( + r#" +//- /a.rs crate:a edition:2018 +pub fn r#dyn() {} + +//- /b.rs crate:b edition:2018 deps:a new_source_root:local +fn foo() { + a::dyn$0 +"#, + expect![[r#" + fn foo() { + a::r#dyn()$0 + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs index 991bef344a310..5d4b1999088a0 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs @@ -14,10 +14,11 @@ use hir::{ ModuleDef, Name, PathResolution, Semantics, Static, StaticLifetime, ToolModule, Trait, TraitAlias, TupleField, TypeAlias, Variant, VariantDef, Visibility, }; +use span::Edition; use stdx::{format_to, impl_from}; use syntax::{ ast::{self, AstNode}, - match_ast, SyntaxKind, SyntaxNode, SyntaxToken, ToSmolStr, + match_ast, SyntaxKind, SyntaxNode, SyntaxToken, }; use crate::documentation::{Documentation, HasDocs}; @@ -157,6 +158,7 @@ impl Definition { &self, db: &RootDatabase, famous_defs: Option<&FamousDefs<'_, '_>>, + edition: Edition, ) -> Option { let docs = match self { Definition::Macro(it) => it.docs(db), @@ -173,8 +175,8 @@ impl Definition { Definition::BuiltinType(it) => { famous_defs.and_then(|fd| { // std exposes prim_{} modules with docstrings on the root to document the builtins - let primitive_mod = format!("prim_{}", it.name().display(fd.0.db)); - let doc_owner = find_std_module(fd, &primitive_mod)?; + let primitive_mod = format!("prim_{}", it.name().display(fd.0.db, edition)); + let doc_owner = find_std_module(fd, &primitive_mod, edition)?; doc_owner.docs(fd.0.db) }) } @@ -192,13 +194,18 @@ impl Definition { let AttributeTemplate { word, list, name_value_str } = it.template(db)?; let mut docs = "Valid forms are:".to_owned(); if word { - format_to!(docs, "\n - #\\[{}]", name.display(db)); + format_to!(docs, "\n - #\\[{}]", name.display(db, edition)); } if let Some(list) = list { - format_to!(docs, "\n - #\\[{}({})]", name.display(db), list); + format_to!(docs, "\n - #\\[{}({})]", name.display(db, edition), list); } if let Some(name_value_str) = name_value_str { - format_to!(docs, "\n - #\\[{} = {}]", name.display(db), name_value_str); + format_to!( + docs, + "\n - #\\[{} = {}]", + name.display(db, edition), + name_value_str + ); } Some(Documentation::new(docs.replace('*', "\\*"))) } @@ -218,57 +225,63 @@ impl Definition { }) } - pub fn label(&self, db: &RootDatabase) -> String { + pub fn label(&self, db: &RootDatabase, edition: Edition) -> String { match *self { - Definition::Macro(it) => it.display(db).to_string(), - Definition::Field(it) => it.display(db).to_string(), - Definition::TupleField(it) => it.display(db).to_string(), - Definition::Module(it) => it.display(db).to_string(), - Definition::Function(it) => it.display(db).to_string(), - Definition::Adt(it) => it.display(db).to_string(), - Definition::Variant(it) => it.display(db).to_string(), - Definition::Const(it) => it.display(db).to_string(), - Definition::Static(it) => it.display(db).to_string(), - Definition::Trait(it) => it.display(db).to_string(), - Definition::TraitAlias(it) => it.display(db).to_string(), - Definition::TypeAlias(it) => it.display(db).to_string(), - Definition::BuiltinType(it) => it.name().display(db).to_string(), - Definition::BuiltinLifetime(it) => it.name().display(db).to_string(), + Definition::Macro(it) => it.display(db, edition).to_string(), + Definition::Field(it) => it.display(db, edition).to_string(), + Definition::TupleField(it) => it.display(db, edition).to_string(), + Definition::Module(it) => it.display(db, edition).to_string(), + Definition::Function(it) => it.display(db, edition).to_string(), + Definition::Adt(it) => it.display(db, edition).to_string(), + Definition::Variant(it) => it.display(db, edition).to_string(), + Definition::Const(it) => it.display(db, edition).to_string(), + Definition::Static(it) => it.display(db, edition).to_string(), + Definition::Trait(it) => it.display(db, edition).to_string(), + Definition::TraitAlias(it) => it.display(db, edition).to_string(), + Definition::TypeAlias(it) => it.display(db, edition).to_string(), + Definition::BuiltinType(it) => it.name().display(db, edition).to_string(), + Definition::BuiltinLifetime(it) => it.name().display(db, edition).to_string(), Definition::Local(it) => { let ty = it.ty(db); - let ty_display = ty.display_truncated(db, None); + let ty_display = ty.display_truncated(db, None, edition); let is_mut = if it.is_mut(db) { "mut " } else { "" }; if it.is_self(db) { format!("{is_mut}self: {ty_display}") } else { let name = it.name(db); let let_kw = if it.is_param(db) { "" } else { "let " }; - format!("{let_kw}{is_mut}{}: {ty_display}", name.display(db)) + format!("{let_kw}{is_mut}{}: {ty_display}", name.display(db, edition)) } } Definition::SelfType(impl_def) => { let self_ty = &impl_def.self_ty(db); match self_ty.as_adt() { - Some(it) => it.display(db).to_string(), - None => self_ty.display(db).to_string(), + Some(it) => it.display(db, edition).to_string(), + None => self_ty.display(db, edition).to_string(), } } - Definition::GenericParam(it) => it.display(db).to_string(), - Definition::Label(it) => it.name(db).display(db).to_string(), - Definition::ExternCrateDecl(it) => it.display(db).to_string(), - Definition::BuiltinAttr(it) => format!("#[{}]", it.name(db).display(db)), - Definition::ToolModule(it) => it.name(db).display(db).to_string(), - Definition::DeriveHelper(it) => format!("derive_helper {}", it.name(db).display(db)), + Definition::GenericParam(it) => it.display(db, edition).to_string(), + Definition::Label(it) => it.name(db).display(db, edition).to_string(), + Definition::ExternCrateDecl(it) => it.display(db, edition).to_string(), + Definition::BuiltinAttr(it) => format!("#[{}]", it.name(db).display(db, edition)), + Definition::ToolModule(it) => it.name(db).display(db, edition).to_string(), + Definition::DeriveHelper(it) => { + format!("derive_helper {}", it.name(db).display(db, edition)) + } } } } -fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option { +fn find_std_module( + famous_defs: &FamousDefs<'_, '_>, + name: &str, + edition: Edition, +) -> Option { let db = famous_defs.0.db; let std_crate = famous_defs.std()?; let std_root_module = std_crate.root_module(); std_root_module.children(db).find(|module| { - module.name(db).map_or(false, |module| module.display(db).to_string() == name) + module.name(db).map_or(false, |module| module.display(db, edition).to_string() == name) }) } @@ -670,7 +683,7 @@ impl NameRefClass { hir::AssocItem::TypeAlias(it) => Some(it), _ => None, }) - .find(|alias| alias.name(sema.db).display_no_db().to_smolstr() == name_ref.text().as_str()) + .find(|alias| alias.name(sema.db).eq_ident(name_ref.text().as_str())) { return Some(NameRefClass::Definition(Definition::TypeAlias(ty))); } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs index 1a16a567f36ba..ba6e50abf658b 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs @@ -2,7 +2,6 @@ use base_db::{CrateOrigin, LangCrateOrigin, SourceDatabase}; use hir::{Crate, Enum, Function, Macro, Module, ScopeDef, Semantics, Trait}; -use syntax::ToSmolStr; use crate::RootDatabase; @@ -199,18 +198,14 @@ impl FamousDefs<'_, '_> { for segment in path { module = module.children(db).find_map(|child| { let name = child.name(db)?; - if name.display_no_db().to_smolstr() == segment { + if name.eq_ident(segment) { Some(child) } else { None } })?; } - let def = module - .scope(db, None) - .into_iter() - .find(|(name, _def)| name.display_no_db().to_smolstr() == trait_)? - .1; + let def = module.scope(db, None).into_iter().find(|(name, _def)| name.eq_ident(trait_))?.1; Some(def) } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs index e6638dde5d403..63c09af3d687b 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs @@ -4,7 +4,7 @@ use std::collections::VecDeque; use base_db::SourceRootDatabase; use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics}; -use span::FileId; +use span::{Edition, FileId}; use syntax::{ ast::{self, make}, AstToken, SyntaxKind, SyntaxToken, ToSmolStr, TokenAtOffset, @@ -35,7 +35,7 @@ pub fn pick_token(mut tokens: TokenAtOffset) -> Option } /// Converts the mod path struct into its ast representation. -pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path { +pub fn mod_path_to_ast(path: &hir::ModPath, edition: Edition) -> ast::Path { let _p = tracing::info_span!("mod_path_to_ast").entered(); let mut segments = Vec::new(); @@ -50,11 +50,9 @@ pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path { hir::PathKind::Abs => is_abs = true, } - segments.extend( - path.segments().iter().map(|segment| { - make::path_segment(make::name_ref(&segment.display_no_db().to_smolstr())) - }), - ); + segments.extend(path.segments().iter().map(|segment| { + make::path_segment(make::name_ref(&segment.display_no_db(edition).to_smolstr())) + })); make::path_from_segments(segments, is_abs) } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs index 1c4c15f2557d7..07e197334706e 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs @@ -9,7 +9,7 @@ use itertools::{EitherOrBoth, Itertools}; use rustc_hash::{FxHashMap, FxHashSet}; use syntax::{ ast::{self, make, HasName}, - AstNode, SmolStr, SyntaxNode, ToSmolStr, + AstNode, SmolStr, SyntaxNode, }; use crate::{ @@ -459,7 +459,7 @@ fn find_import_for_segment( unresolved_first_segment: &str, ) -> Option { let segment_is_name = item_name(db, original_item) - .map(|name| name.display_no_db().to_smolstr() == unresolved_first_segment) + .map(|name| name.eq_ident(unresolved_first_segment)) .unwrap_or(false); Some(if segment_is_name { @@ -483,7 +483,7 @@ fn module_with_segment_name( }; while let Some(module) = current_module { if let Some(module_name) = module.name(db) { - if module_name.display_no_db().to_smolstr() == segment_name { + if module_name.eq_ident(segment_name) { return Some(module); } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs index 0afa9163e31d2..49b3ca290f07d 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs @@ -5,6 +5,7 @@ use either::Either; use hir::{AsAssocItem, HirDisplay, ImportPathConfig, ModuleDef, SemanticsScope}; use itertools::Itertools; use rustc_hash::FxHashMap; +use span::Edition; use syntax::{ ast::{self, make, AstNode, HasGenericArgs}, ted, NodeOrToken, SyntaxNode, @@ -146,6 +147,7 @@ impl<'a> PathTransform<'a> { let mut type_substs: FxHashMap = Default::default(); let mut const_substs: FxHashMap = Default::default(); let mut defaulted_params: Vec = Default::default(); + let target_edition = target_module.krate().edition(self.source_scope.db); self.generic_def .into_iter() .flat_map(|it| it.type_or_const_params(db)) @@ -190,7 +192,7 @@ impl<'a> PathTransform<'a> { } } (Either::Left(k), None) => { - if let Some(default) = k.default(db) { + if let Some(default) = k.default(db, target_edition) { if let Some(default) = default.expr() { const_substs.insert(k, default.syntax().clone_for_update()); defaulted_params.push(Either::Right(k)); @@ -204,7 +206,9 @@ impl<'a> PathTransform<'a> { .into_iter() .flat_map(|it| it.lifetime_params(db)) .zip(self.substs.lifetimes.clone()) - .filter_map(|(k, v)| Some((k.name(db).display(db.upcast()).to_string(), v.lifetime()?))) + .filter_map(|(k, v)| { + Some((k.name(db).display(db.upcast(), target_edition).to_string(), v.lifetime()?)) + }) .collect(); let ctx = Ctx { type_substs, @@ -213,6 +217,7 @@ impl<'a> PathTransform<'a> { target_module, source_scope: self.source_scope, same_self_type: self.target_scope.has_same_self_type(self.source_scope), + target_edition, }; ctx.transform_default_values(defaulted_params); ctx @@ -226,6 +231,7 @@ struct Ctx<'a> { target_module: hir::Module, source_scope: &'a SemanticsScope<'a>, same_self_type: bool, + target_edition: Edition, } fn preorder_rev(item: &SyntaxNode) -> impl Iterator { @@ -318,7 +324,7 @@ impl Ctx<'_> { hir::ModuleDef::Trait(trait_ref), cfg, )?; - match make::ty_path(mod_path_to_ast(&found_path)) { + match make::ty_path(mod_path_to_ast(&found_path, self.target_edition)) { ast::Type::PathType(path_ty) => Some(path_ty), _ => None, } @@ -374,7 +380,7 @@ impl Ctx<'_> { }; let found_path = self.target_module.find_path(self.source_scope.db.upcast(), def, cfg)?; - let res = mod_path_to_ast(&found_path).clone_for_update(); + let res = mod_path_to_ast(&found_path, self.target_edition).clone_for_update(); if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) { if let Some(segment) = res.segment() { let old = segment.get_or_create_generic_arg_list(); @@ -417,7 +423,9 @@ impl Ctx<'_> { cfg, )?; - if let Some(qual) = mod_path_to_ast(&found_path).qualifier() { + if let Some(qual) = + mod_path_to_ast(&found_path, self.target_edition).qualifier() + { let res = make::path_concat(qual, path_ty.path()?).clone_for_update(); ted::replace(path.syntax(), res.syntax()); return Some(()); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs index 232f242828777..262eefeec00e4 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs @@ -29,6 +29,7 @@ use span::{Edition, EditionedFileId, FileId, SyntaxContextId}; use stdx::{never, TupleExt}; use syntax::{ ast::{self, HasName}, + utils::is_raw_identifier, AstNode, SyntaxKind, TextRange, T, }; use text_edit::{TextEdit, TextEditBuilder}; @@ -72,6 +73,9 @@ impl Definition { sema: &Semantics<'_, RootDatabase>, new_name: &str, ) -> Result { + // We append `r#` if needed. + let new_name = new_name.trim_start_matches("r#"); + // self.krate() returns None if // self is a built-in attr, built-in type or tool module. // it is not allowed for these defs to be renamed. @@ -227,8 +231,7 @@ fn rename_mod( module: hir::Module, new_name: &str, ) -> Result { - if IdentifierKind::classify(module.krate().edition(sema.db), new_name)? != IdentifierKind::Ident - { + if IdentifierKind::classify(new_name)? != IdentifierKind::Ident { bail!("Invalid name `{0}`: cannot rename module to {0}", new_name); } @@ -240,7 +243,6 @@ fn rename_mod( let InFile { file_id, value: def_source } = module.definition_source(sema.db); if let ModuleSource::SourceFile(..) = def_source { - let new_name = new_name.trim_start_matches("r#"); let anchor = file_id.original_file(sema.db).file_id(); let is_mod_rs = module.is_mod_rs(sema.db); @@ -289,9 +291,14 @@ fn rename_mod( .original_file_range_opt(sema.db) .map(TupleExt::head) { + let new_name = if is_raw_identifier(new_name, file_id.edition()) { + format!("r#{new_name}") + } else { + new_name.to_owned() + }; source_change.insert_source_edit( file_id.file_id(), - TextEdit::replace(file_range.range, new_name.to_owned()), + TextEdit::replace(file_range.range, new_name), ) }; } @@ -302,7 +309,10 @@ fn rename_mod( let def = Definition::Module(module); let usages = def.usages(sema).all(); let ref_edits = usages.iter().map(|(file_id, references)| { - (EditionedFileId::file_id(file_id), source_edit_from_references(references, def, new_name)) + ( + EditionedFileId::file_id(file_id), + source_edit_from_references(references, def, new_name, file_id.edition()), + ) }); source_change.extend(ref_edits); @@ -314,12 +324,7 @@ fn rename_reference( def: Definition, new_name: &str, ) -> Result { - let ident_kind = IdentifierKind::classify( - def.krate(sema.db) - .ok_or_else(|| RenameError("definition has no krate?".into()))? - .edition(sema.db), - new_name, - )?; + let ident_kind = IdentifierKind::classify(new_name)?; if matches!( def, @@ -351,7 +356,10 @@ fn rename_reference( } let mut source_change = SourceChange::default(); source_change.extend(usages.iter().map(|(file_id, references)| { - (EditionedFileId::file_id(file_id), source_edit_from_references(references, def, new_name)) + ( + EditionedFileId::file_id(file_id), + source_edit_from_references(references, def, new_name, file_id.edition()), + ) })); let mut insert_def_edit = |def| { @@ -367,7 +375,13 @@ pub fn source_edit_from_references( references: &[FileReference], def: Definition, new_name: &str, + edition: Edition, ) -> TextEdit { + let new_name = if is_raw_identifier(new_name, edition) { + format!("r#{new_name}") + } else { + new_name.to_owned() + }; let mut edit = TextEdit::builder(); // macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far let mut edited_ranges = Vec::new(); @@ -383,10 +397,10 @@ pub fn source_edit_from_references( // to make special rewrites like shorthand syntax and such, so just rename the node in // the macro input FileReferenceNode::NameRef(name_ref) if name_range == range => { - source_edit_from_name_ref(&mut edit, name_ref, new_name, def) + source_edit_from_name_ref(&mut edit, name_ref, &new_name, def) } FileReferenceNode::Name(name) if name_range == range => { - source_edit_from_name(&mut edit, name, new_name) + source_edit_from_name(&mut edit, name, &new_name) } _ => false, }; @@ -394,7 +408,7 @@ pub fn source_edit_from_references( let (range, new_name) = match name { FileReferenceNode::Lifetime(_) => ( TextRange::new(range.start() + syntax::TextSize::from(1), range.end()), - new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(), + new_name.strip_prefix('\'').unwrap_or(&new_name).to_owned(), ), _ => (range, new_name.to_owned()), }; @@ -522,6 +536,13 @@ fn source_edit_from_def( def: Definition, new_name: &str, ) -> Result<(FileId, TextEdit)> { + let new_name_edition_aware = |new_name: &str, file_id: EditionedFileId| { + if is_raw_identifier(new_name, file_id.edition()) { + format!("r#{new_name}") + } else { + new_name.to_owned() + } + }; let mut edit = TextEdit::builder(); if let Definition::Local(local) = def { let mut file_id = None; @@ -536,7 +557,7 @@ fn source_edit_from_def( { Some(FileRange { file_id: file_id2, range }) => { file_id = Some(file_id2); - edit.replace(range, new_name.to_owned()); + edit.replace(range, new_name_edition_aware(new_name, file_id2)); continue; } None => { @@ -550,7 +571,9 @@ fn source_edit_from_def( // special cases required for renaming fields/locals in Record patterns if let Some(pat_field) = pat.syntax().parent().and_then(ast::RecordPatField::cast) { if let Some(name_ref) = pat_field.name_ref() { - if new_name == name_ref.text() && pat.at_token().is_none() { + if new_name == name_ref.text().as_str().trim_start_matches("r#") + && pat.at_token().is_none() + { // Foo { field: ref mut local } -> Foo { ref mut field } // ^^^^^^ delete this // ^^^^^ replace this with `field` @@ -566,7 +589,10 @@ fn source_edit_from_def( // Foo { field: ref mut local @ local 2} -> Foo { field: ref mut new_name @ local2 } // Foo { field: ref mut local } -> Foo { field: ref mut new_name } // ^^^^^ replace this with `new_name` - edit.replace(name_range, new_name.to_owned()); + edit.replace( + name_range, + new_name_edition_aware(new_name, source.file_id), + ); } } else { // Foo { ref mut field } -> Foo { field: ref mut new_name } @@ -576,10 +602,10 @@ fn source_edit_from_def( pat.syntax().text_range().start(), format!("{}: ", pat_field.field_name().unwrap()), ); - edit.replace(name_range, new_name.to_owned()); + edit.replace(name_range, new_name_edition_aware(new_name, source.file_id)); } } else { - edit.replace(name_range, new_name.to_owned()); + edit.replace(name_range, new_name_edition_aware(new_name, source.file_id)); } } } @@ -599,7 +625,7 @@ fn source_edit_from_def( } _ => (range, new_name.to_owned()), }; - edit.replace(range, new_name); + edit.replace(range, new_name_edition_aware(&new_name, file_id)); Ok((file_id.file_id(), edit.finish())) } @@ -611,8 +637,9 @@ pub enum IdentifierKind { } impl IdentifierKind { - pub fn classify(edition: Edition, new_name: &str) -> Result { - match parser::LexedStr::single_token(edition, new_name) { + pub fn classify(new_name: &str) -> Result { + let new_name = new_name.trim_start_matches("r#"); + match parser::LexedStr::single_token(Edition::LATEST, new_name) { Some(res) => match res { (SyntaxKind::IDENT, _) => { if let Some(inner) = new_name.strip_prefix("r#") { @@ -626,6 +653,7 @@ impl IdentifierKind { (SyntaxKind::LIFETIME_IDENT, _) if new_name != "'static" && new_name != "'_" => { Ok(IdentifierKind::Lifetime) } + _ if is_raw_identifier(new_name, Edition::LATEST) => Ok(IdentifierKind::Ident), (_, Some(syntax_error)) => bail!("Invalid name `{}`: {}", new_name, syntax_error), (_, None) => bail!("Invalid name `{}`: not an identifier", new_name), }, diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs index 6714e411e2f7a..dd4a665e8eb4f 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/insert_whitespace_into_node.rs @@ -1,5 +1,4 @@ //! Utilities for formatting macro expanded nodes until we get a proper formatter. -use span::Edition; use syntax::{ ast::make, ted::{self, Position}, @@ -132,6 +131,6 @@ pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode { } fn is_text(k: SyntaxKind) -> bool { - // FIXME: Edition - k.is_keyword(Edition::CURRENT) || k.is_literal() || k == IDENT || k == UNDERSCORE + // Consider all keywords in all editions. + k.is_any_identifier() || k.is_literal() || k == UNDERSCORE } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs index d10a55120a32f..fa82902a74d5c 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -457,13 +457,15 @@ impl Iterator for TreeWithDepthIterator { } /// Parses the input token tree as comma separated plain paths. -pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option> { +pub fn parse_tt_as_comma_sep_paths( + input: ast::TokenTree, + edition: Edition, +) -> Option> { let r_paren = input.r_paren_token(); let tokens = input.syntax().children_with_tokens().skip(1).map_while(|it| match it.into_token() { // seeing a keyword means the attribute is unclosed so stop parsing here - // FIXME: Edition - Some(tok) if tok.kind().is_keyword(Edition::CURRENT) => None, + Some(tok) if tok.kind().is_keyword(edition) => None, // don't include the right token tree parenthesis if it exists tok @ Some(_) if tok == r_paren => None, // only nodes that we can find are other TokenTrees, those are unexpected in this parse though diff --git a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs index 48a585bf333bc..82aca50d03998 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs @@ -34,19 +34,20 @@ pub fn get_missing_assoc_items( // may share the same name as a function or constant. let mut impl_fns_consts = FxHashSet::default(); let mut impl_type = FxHashSet::default(); + let edition = imp.module(sema.db).krate().edition(sema.db); for item in imp.items(sema.db) { match item { hir::AssocItem::Function(it) => { - impl_fns_consts.insert(it.name(sema.db).display(sema.db).to_string()); + impl_fns_consts.insert(it.name(sema.db).display(sema.db, edition).to_string()); } hir::AssocItem::Const(it) => { if let Some(name) = it.name(sema.db) { - impl_fns_consts.insert(name.display(sema.db).to_string()); + impl_fns_consts.insert(name.display(sema.db, edition).to_string()); } } hir::AssocItem::TypeAlias(it) => { - impl_type.insert(it.name(sema.db).display(sema.db).to_string()); + impl_type.insert(it.name(sema.db).display(sema.db, edition).to_string()); } } } @@ -56,15 +57,14 @@ pub fn get_missing_assoc_items( .items(sema.db) .into_iter() .filter(|i| match i { - hir::AssocItem::Function(f) => { - !impl_fns_consts.contains(&f.name(sema.db).display(sema.db).to_string()) - } + hir::AssocItem::Function(f) => !impl_fns_consts + .contains(&f.name(sema.db).display(sema.db, edition).to_string()), hir::AssocItem::TypeAlias(t) => { - !impl_type.contains(&t.name(sema.db).display(sema.db).to_string()) + !impl_type.contains(&t.name(sema.db).display(sema.db, edition).to_string()) } hir::AssocItem::Const(c) => c .name(sema.db) - .map(|n| !impl_fns_consts.contains(&n.display(sema.db).to_string())) + .map(|n| !impl_fns_consts.contains(&n.display(sema.db, edition).to_string())) .unwrap_or_default(), }) .collect() @@ -116,6 +116,7 @@ mod tests { use expect_test::{expect, Expect}; use hir::FilePosition; use hir::Semantics; + use span::Edition; use syntax::ast::{self, AstNode}; use test_fixture::ChangeFixture; @@ -140,7 +141,7 @@ mod tests { sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap(); let trait_ = crate::traits::resolve_target_trait(&sema, &impl_block); let actual = match trait_ { - Some(trait_) => trait_.name(&db).display(&db).to_string(), + Some(trait_) => trait_.name(&db).display(&db, Edition::CURRENT).to_string(), None => String::new(), }; expect.assert_eq(&actual); @@ -155,7 +156,7 @@ mod tests { let items = crate::traits::get_missing_assoc_items(&sema, &impl_block); let actual = items .into_iter() - .map(|item| item.name(&db).unwrap().display(&db).to_string()) + .map(|item| item.name(&db).unwrap().display(&db, Edition::CURRENT).to_string()) .collect::>() .join("\n"); expect.assert_eq(&actual); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs b/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs index 5b566c5067d85..515bc418cb467 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/ty_filter.rs @@ -5,10 +5,7 @@ use std::iter; use hir::Semantics; -use syntax::{ - ast::{self, make, Pat}, - ToSmolStr, -}; +use syntax::ast::{self, make, Pat}; use crate::RootDatabase; @@ -29,7 +26,7 @@ impl TryEnum { _ => return None, }; TryEnum::ALL.iter().find_map(|&var| { - if enum_.name(sema.db).display_no_db().to_smolstr() == var.type_name() { + if enum_.name(sema.db).eq_ident(var.type_name()) { return Some(var); } None diff --git a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs index 965f432407b0e..c3f0bf3706904 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/use_trivial_constructor.rs @@ -1,6 +1,7 @@ //! Functionality for generating trivial constructors use hir::StructKind; +use span::Edition; use syntax::{ ast::{make, Expr, Path}, ToSmolStr, @@ -11,6 +12,7 @@ pub fn use_trivial_constructor( db: &crate::RootDatabase, path: Path, ty: &hir::Type, + edition: Edition, ) -> Option { match ty.as_adt() { Some(hir::Adt::Enum(x)) => { @@ -19,7 +21,7 @@ pub fn use_trivial_constructor( let path = make::path_qualified( path, make::path_segment(make::name_ref( - &variant.name(db).display_no_db().to_smolstr(), + &variant.name(db).display_no_db(edition).to_smolstr(), )), ); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs index 05fb1c29b3137..02299197b1258 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs @@ -12,7 +12,7 @@ pub(crate) fn expected_function( Diagnostic::new_with_syntax_node_ptr( ctx, DiagnosticCode::RustcHardError("E0618"), - format!("expected function, found {}", d.found.display(ctx.sema.db)), + format!("expected function, found {}", d.found.display(ctx.sema.db, ctx.edition)), d.call.map(|it| it.into()), ) .experimental() diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index 117088ca09c8a..ccb33fed10044 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -12,7 +12,7 @@ use itertools::Itertools; use stdx::{format_to, never}; use syntax::{ ast::{self, make}, - SyntaxKind, SyntaxNode, + Edition, SyntaxKind, SyntaxNode, }; use text_edit::TextEdit; @@ -104,6 +104,7 @@ pub(crate) fn json_in_items( file_id: EditionedFileId, node: &SyntaxNode, config: &DiagnosticsConfig, + edition: Edition, ) { (|| { if node.kind() == SyntaxKind::ERROR @@ -156,7 +157,11 @@ pub(crate) fn json_in_items( config.insert_use.prefix_kind, cfg, ) { - insert_use(&scope, mod_path_to_ast(&it), &config.insert_use); + insert_use( + &scope, + mod_path_to_ast(&it, edition), + &config.insert_use, + ); } } } @@ -168,7 +173,11 @@ pub(crate) fn json_in_items( config.insert_use.prefix_kind, cfg, ) { - insert_use(&scope, mod_path_to_ast(&it), &config.insert_use); + insert_use( + &scope, + mod_path_to_ast(&it, edition), + &config.insert_use, + ); } } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs index ea7908525ae12..86c237f7b5ec3 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -11,7 +11,7 @@ use stdx::format_to; use syntax::{ algo, ast::{self, make}, - AstNode, SyntaxNode, SyntaxNodePtr, ToSmolStr, + AstNode, Edition, SyntaxNode, SyntaxNodePtr, ToSmolStr, }; use text_edit::TextEdit; @@ -31,7 +31,7 @@ use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; pub(crate) fn missing_fields(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Diagnostic { let mut message = String::from("missing structure fields:\n"); for field in &d.missed_fields { - format_to!(message, "- {}\n", field.display(ctx.sema.db)); + format_to!(message, "- {}\n", field.display(ctx.sema.db, ctx.edition)); } let ptr = InFile::new( @@ -134,8 +134,9 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option, d: &hir::MissingFields) -> Option, d: &hir::MissingFields) -> Option, d: &hir::MissingFields) -> Option ast::Type { +fn make_ty( + ty: &hir::Type, + db: &dyn HirDatabase, + module: hir::Module, + edition: Edition, +) -> ast::Type { let ty_str = match ty.as_adt() { - Some(adt) => adt.name(db).display(db.upcast()).to_string(), + Some(adt) => adt.name(db).display(db.upcast(), edition).to_string(), None => { ty.display_source_code(db, module.into(), false).ok().unwrap_or_else(|| "_".to_owned()) } @@ -223,13 +229,13 @@ fn get_default_constructor( let famous_defs = FamousDefs(&ctx.sema, krate); if has_new_func { - Some(make::ext::expr_ty_new(&make_ty(ty, ctx.sema.db, module))) + Some(make::ext::expr_ty_new(&make_ty(ty, ctx.sema.db, module, ctx.edition))) } else if ty.as_adt() == famous_defs.core_option_Option()?.ty(ctx.sema.db).as_adt() { Some(make::ext::option_none()) } else if !ty.is_array() && ty.impls_trait(ctx.sema.db, famous_defs.core_default_Default()?, &[]) { - Some(make::ext::expr_ty_default(&make_ty(ty, ctx.sema.db, module))) + Some(make::ext::expr_ty_default(&make_ty(ty, ctx.sema.db, module, ctx.edition))) } else { None } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs index fa9a6577fcf35..06c6b0f3e4c38 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs @@ -8,7 +8,7 @@ pub(crate) fn moved_out_of_ref(ctx: &DiagnosticsContext<'_>, d: &hir::MovedOutOf Diagnostic::new_with_syntax_node_ptr( ctx, DiagnosticCode::RustcHardError("E0507"), - format!("cannot move `{}` out of reference", d.ty.display(ctx.sema.db)), + format!("cannot move `{}` out of reference", d.ty.display(ctx.sema.db, ctx.edition)), d.span, ) .experimental() // spans are broken, and I'm not sure how precise we can detect copy types diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 00352266ddbdc..e4b1f3ca95998 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -40,7 +40,7 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Option DiagnosticCode::RustcHardError("E0384"), format!( "cannot mutate immutable variable `{}`", - d.local.name(ctx.sema.db).display(ctx.sema.db) + d.local.name(ctx.sema.db).display(ctx.sema.db, ctx.edition) ), d.span, ) diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs index f6ed0d7226a79..fe32c590492d1 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs @@ -12,7 +12,7 @@ pub(crate) fn private_assoc_item( let name = d .item .name(ctx.sema.db) - .map(|name| format!("`{}` ", name.display(ctx.sema.db))) + .map(|name| format!("`{}` ", name.display(ctx.sema.db, ctx.edition))) .unwrap_or_default(); Diagnostic::new_with_syntax_node_ptr( ctx, diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs index e91e64c81b0b2..237a9b87871c9 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs @@ -10,8 +10,8 @@ pub(crate) fn private_field(ctx: &DiagnosticsContext<'_>, d: &hir::PrivateField) DiagnosticCode::RustcHardError("E0616"), format!( "field `{}` of `{}` is private", - d.field.name(ctx.sema.db).display(ctx.sema.db), - d.field.parent_def(ctx.sema.db).name(ctx.sema.db).display(ctx.sema.db) + d.field.name(ctx.sema.db).display(ctx.sema.db, ctx.edition), + d.field.parent_def(ctx.sema.db).name(ctx.sema.db).display(ctx.sema.db, ctx.edition) ), d.expr.map(|it| it.into()), ) diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs index 58d1b7f31d2fe..a35b67ce98781 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs @@ -17,7 +17,7 @@ pub(crate) fn trait_impl_missing_assoc_item( hir::AssocItem::Const(_) => "`const ", hir::AssocItem::TypeAlias(_) => "`type ", })?; - f(&name.display(ctx.sema.db))?; + f(&name.display(ctx.sema.db, ctx.edition))?; f(&"`") }); Diagnostic::new( diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs index 6d756484ebc0f..3de51ca4a30a9 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs @@ -18,11 +18,11 @@ pub(crate) fn trait_impl_redundant_assoc_item( ) -> Diagnostic { let db = ctx.sema.db; let name = d.assoc_item.0.clone(); - let redundant_assoc_item_name = name.display(db); + let redundant_assoc_item_name = name.display(db, ctx.edition); let assoc_item = d.assoc_item.1; let default_range = d.impl_.syntax_node_ptr().text_range(); - let trait_name = d.trait_.name(db).display_no_db().to_smolstr(); + let trait_name = d.trait_.name(db).display_no_db(ctx.edition).to_smolstr(); let (redundant_item_name, diagnostic_range, redundant_item_def) = match assoc_item { hir::AssocItem::Function(id) => { @@ -30,7 +30,7 @@ pub(crate) fn trait_impl_redundant_assoc_item( ( format!("`fn {redundant_assoc_item_name}`"), function.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range), - format!("\n {};", function.display(db)), + format!("\n {};", function.display(db, ctx.edition)), ) } hir::AssocItem::Const(id) => { @@ -38,7 +38,7 @@ pub(crate) fn trait_impl_redundant_assoc_item( ( format!("`const {redundant_assoc_item_name}`"), constant.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range), - format!("\n {};", constant.display(db)), + format!("\n {};", constant.display(db, ctx.edition)), ) } hir::AssocItem::TypeAlias(id) => { @@ -48,7 +48,7 @@ pub(crate) fn trait_impl_redundant_assoc_item( type_alias.source(db).map(|it| it.syntax().text_range()).unwrap_or(default_range), format!( "\n type {};", - type_alias.name(ctx.sema.db).display_no_db().to_smolstr() + type_alias.name(ctx.sema.db).display_no_db(ctx.edition).to_smolstr() ), ) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs index 6f5c68d4b5c86..5cce7c4aed565 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -40,8 +40,12 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) DiagnosticCode::RustcHardError("E0308"), format!( "expected {}, found {}", - d.expected.display(ctx.sema.db).with_closure_style(ClosureStyle::ClosureWithId), - d.actual.display(ctx.sema.db).with_closure_style(ClosureStyle::ClosureWithId), + d.expected + .display(ctx.sema.db, ctx.edition) + .with_closure_style(ClosureStyle::ClosureWithId), + d.actual + .display(ctx.sema.db, ctx.edition) + .with_closure_style(ClosureStyle::ClosureWithId), ), display_range, ) @@ -199,8 +203,8 @@ fn str_ref_to_owned( expr_ptr: &InFile>, acc: &mut Vec, ) -> Option<()> { - let expected = d.expected.display(ctx.sema.db); - let actual = d.actual.display(ctx.sema.db); + let expected = d.expected.display(ctx.sema.db, ctx.edition); + let actual = d.actual.display(ctx.sema.db, ctx.edition); // FIXME do this properly if expected.to_string() != "String" || actual.to_string() != "&str" { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs index b4a566e318810..b5c242e1e9f82 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -26,7 +26,9 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di ( format!( "invalid `_` expression, expected type `{}`", - d.expected.display(ctx.sema.db).with_closure_style(ClosureStyle::ClosureWithId), + d.expected + .display(ctx.sema.db, ctx.edition) + .with_closure_style(ClosureStyle::ClosureWithId), ), fixes(ctx, d), ) @@ -69,6 +71,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option prefer_prelude: ctx.config.prefer_prelude, prefer_absolute: ctx.config.prefer_absolute, }, + ctx.edition, ) .ok() }) diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs index 97943b7e8b347..6af36fb9e7398 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs @@ -9,7 +9,7 @@ pub(crate) fn undeclared_label( Diagnostic::new_with_syntax_node_ptr( ctx, DiagnosticCode::RustcHardError("undeclared-label"), - format!("use of undeclared label `{}`", name.display(ctx.sema.db)), + format!("use of undeclared label `{}`", name.display(ctx.sema.db, ctx.edition)), d.node.map(|it| it.into()), ) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs index a1573bab8ae9b..e0822fc5b3385 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -11,7 +11,7 @@ use ide_db::{ use paths::Utf8Component; use syntax::{ ast::{self, edit::IndentLevel, HasModuleItem, HasName}, - AstNode, TextRange, ToSmolStr, + AstNode, TextRange, }; use text_edit::TextEdit; @@ -112,8 +112,7 @@ fn fixes( // shouldn't occur _ => continue 'crates, }; - match current.children.iter().find(|(name, _)| name.display_no_db().to_smolstr() == seg) - { + match current.children.iter().find(|(name, _)| name.eq_ident(seg)) { Some((_, &child)) => current = &crate_def_map[child], None => continue 'crates, } @@ -162,11 +161,7 @@ fn fixes( // try finding a parent that has an inline tree from here on let mut current = module; for s in stack.iter().rev() { - match module - .children - .iter() - .find(|(name, _)| name.display_no_db().to_smolstr() == s) - { + match module.children.iter().find(|(name, _)| name.eq_ident(s)) { Some((_, child)) => { current = &crate_def_map[*child]; } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs index 3601041fc735b..bdff2417ca114 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs @@ -9,7 +9,7 @@ pub(crate) fn unreachable_label( Diagnostic::new_with_syntax_node_ptr( ctx, DiagnosticCode::RustcHardError("E0767"), - format!("use of unreachable label `{}`", name.display(ctx.sema.db)), + format!("use of unreachable label `{}`", name.display(ctx.sema.db, ctx.edition)), d.node.map(|it| it.into()), ) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs index eb8eea69f67f6..76d624c47abfd 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -36,8 +36,8 @@ pub(crate) fn unresolved_field( DiagnosticCode::RustcHardError("E0559"), format!( "no field `{}` on type `{}`{method_suffix}", - d.name.display(ctx.sema.db), - d.receiver.display(ctx.sema.db) + d.name.display(ctx.sema.db, ctx.edition), + d.receiver.display(ctx.sema.db, ctx.edition) ), adjusted_display_range(ctx, d.expr, &|expr| { Some( diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs index c8ff54cba3a74..5b596123e75fd 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs @@ -13,7 +13,7 @@ pub(crate) fn unresolved_macro_call( let bang = if d.is_bang { "!" } else { "" }; Diagnostic::new( DiagnosticCode::RustcHardError("unresolved-macro-call"), - format!("unresolved macro `{}{bang}`", d.path.display(ctx.sema.db)), + format!("unresolved macro `{}{bang}`", d.path.display(ctx.sema.db, ctx.edition)), display_range, ) .experimental() diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs index 387d56b890b94..c0d038a238ba8 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -30,8 +30,8 @@ pub(crate) fn unresolved_method( DiagnosticCode::RustcHardError("E0599"), format!( "no method `{}` on type `{}`{suffix}", - d.name.display(ctx.sema.db), - d.receiver.display(ctx.sema.db) + d.name.display(ctx.sema.db, ctx.edition), + d.receiver.display(ctx.sema.db, ctx.edition) ), adjusted_display_range(ctx, d.expr, &|expr| { Some( @@ -154,9 +154,10 @@ fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) - }; let mut receiver_type_adt_name = - receiver_type.as_adt()?.name(db).display_no_db().to_smolstr(); + receiver_type.as_adt()?.name(db).display_no_db(ctx.edition).to_smolstr(); - let generic_parameters: Vec = receiver_type.generic_parameters(db).collect(); + let generic_parameters: Vec = + receiver_type.generic_parameters(db, ctx.edition).collect(); // if receiver should be pass as first arg in the assoc func, // we could omit generic parameters cause compiler can deduce it automatically if !need_to_take_receiver_as_first_arg && !generic_parameters.is_empty() { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs index bf19331d9fdd5..c594754bd440a 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs @@ -5,7 +5,7 @@ use ide_db::{ source_change::SourceChange, FileRange, RootDatabase, }; -use syntax::TextRange; +use syntax::{Edition, TextRange}; use text_edit::TextEdit; use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; @@ -42,7 +42,14 @@ pub(crate) fn unused_variables( ast, ) .with_fixes(name_range.and_then(|it| { - fixes(ctx.sema.db, var_name, it.range, diagnostic_range.into(), ast.file_id.is_macro()) + fixes( + ctx.sema.db, + var_name, + it.range, + diagnostic_range.into(), + ast.file_id.is_macro(), + ctx.edition, + ) })) .experimental(), ) @@ -54,6 +61,7 @@ fn fixes( name_range: TextRange, diagnostic_range: FileRange, is_in_marco: bool, + edition: Edition, ) -> Option> { if is_in_marco { return None; @@ -63,14 +71,14 @@ fn fixes( id: AssistId("unscore_unused_variable_name", AssistKind::QuickFix), label: Label::new(format!( "Rename unused {} to _{}", - var_name.display(db), - var_name.display(db) + var_name.display(db, edition), + var_name.display(db, edition) )), group: None, target: diagnostic_range.range, source_change: Some(SourceChange::from_text_edit( diagnostic_range.file_id, - TextEdit::replace(name_range, format!("_{}", var_name.display(db))), + TextEdit::replace(name_range, format!("_{}", var_name.display(db, edition))), )), command: None, }]) diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index a61c5f0cd4db2..bf320794afc33 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -90,7 +90,7 @@ use once_cell::sync::Lazy; use stdx::never; use syntax::{ ast::{self, AstNode}, - AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, + AstPtr, Edition, SyntaxNode, SyntaxNodePtr, TextRange, }; // FIXME: Make this an enum @@ -279,6 +279,7 @@ struct DiagnosticsContext<'a> { config: &'a DiagnosticsConfig, sema: Semantics<'a, RootDatabase>, resolve: &'a AssistResolveStrategy, + edition: Edition, } impl DiagnosticsContext<'_> { @@ -359,12 +360,19 @@ pub fn semantic_diagnostics( for node in parse.syntax().descendants() { handlers::useless_braces::useless_braces(&mut res, file_id, &node); handlers::field_shorthand::field_shorthand(&mut res, file_id, &node); - handlers::json_is_not_rust::json_in_items(&sema, &mut res, file_id, &node, config); + handlers::json_is_not_rust::json_in_items( + &sema, + &mut res, + file_id, + &node, + config, + file_id.edition(), + ); } let module = sema.file_to_module_def(file_id); - let ctx = DiagnosticsContext { config, sema, resolve }; + let ctx = DiagnosticsContext { config, sema, resolve, edition: file_id.edition() }; let mut diags = Vec::new(); match module { @@ -490,6 +498,7 @@ pub fn semantic_diagnostics( &mut rustc_stack, &mut clippy_stack, &mut diagnostics_of_range, + ctx.edition, ); res.retain(|d| d.severity != Severity::Allow); @@ -544,6 +553,7 @@ fn handle_lint_attributes( rustc_stack: &mut FxHashMap>, clippy_stack: &mut FxHashMap>, diagnostics_of_range: &mut FxHashMap, &mut Diagnostic>, + edition: Edition, ) { let _g = tracing::info_span!("handle_lint_attributes").entered(); let file_id = sema.hir_file_for(root); @@ -552,9 +562,15 @@ fn handle_lint_attributes( match ev { syntax::WalkEvent::Enter(node) => { for attr in node.children().filter_map(ast::Attr::cast) { - parse_lint_attribute(attr, rustc_stack, clippy_stack, |stack, severity| { - stack.push(severity); - }); + parse_lint_attribute( + attr, + rustc_stack, + clippy_stack, + |stack, severity| { + stack.push(severity); + }, + edition, + ); } if let Some(it) = diagnostics_of_range.get_mut(&InFile { file_id, value: node.clone() }) @@ -591,6 +607,7 @@ fn handle_lint_attributes( rustc_stack, clippy_stack, diagnostics_of_range, + edition, ); for stack in [&mut *rustc_stack, &mut *clippy_stack] { stack.entry("__RA_EVERY_LINT".to_owned()).or_default().pop(); @@ -605,17 +622,24 @@ fn handle_lint_attributes( rustc_stack, clippy_stack, diagnostics_of_range, + edition, ); } } } syntax::WalkEvent::Leave(node) => { for attr in node.children().filter_map(ast::Attr::cast) { - parse_lint_attribute(attr, rustc_stack, clippy_stack, |stack, severity| { - if stack.pop() != Some(severity) { - never!("Mismatched serevity in walking lint attributes"); - } - }); + parse_lint_attribute( + attr, + rustc_stack, + clippy_stack, + |stack, severity| { + if stack.pop() != Some(severity) { + never!("Mismatched serevity in walking lint attributes"); + } + }, + edition, + ); } } } @@ -627,6 +651,7 @@ fn parse_lint_attribute( rustc_stack: &mut FxHashMap>, clippy_stack: &mut FxHashMap>, job: impl Fn(&mut Vec, Severity), + edition: Edition, ) { let Some((tag, args_tt)) = attr.as_simple_call() else { return; @@ -637,7 +662,7 @@ fn parse_lint_attribute( "forbid" | "deny" => Severity::Error, _ => return, }; - for lint in parse_tt_as_comma_sep_paths(args_tt).into_iter().flatten() { + for lint in parse_tt_as_comma_sep_paths(args_tt, edition).into_iter().flatten() { if let Some(lint) = lint.as_single_name_ref() { job(rustc_stack.entry(lint.to_string()).or_default(), severity); } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs index 5f6d77c064ca3..6569f0f5552f9 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs @@ -8,6 +8,7 @@ use crate::{ }; use hir::{FileRange, ImportPathConfig, Semantics}; use ide_db::FxHashMap; +use parser::Edition; use std::{cell::Cell, iter::Peekable}; use syntax::{ ast::{self, AstNode, AstToken, HasGenericArgs}, @@ -626,6 +627,11 @@ impl<'db, 'sema> Matcher<'db, 'sema> { match_error!("Failed to get receiver type for `{}`", expr.syntax().text()) })? .original; + let edition = self + .sema + .scope(expr.syntax()) + .map(|it| it.krate().edition(self.sema.db)) + .unwrap_or(Edition::CURRENT); // Temporary needed to make the borrow checker happy. let res = code_type .autoderef(self.sema.db) @@ -635,8 +641,8 @@ impl<'db, 'sema> Matcher<'db, 'sema> { .ok_or_else(|| { match_error!( "Pattern type `{}` didn't match code type `{}`", - pattern_type.display(self.sema.db), - code_type.display(self.sema.db) + pattern_type.display(self.sema.db, edition), + code_type.display(self.sema.db, edition) ) }); res diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs index b4b83f62da240..65756601f66a7 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/replacing.rs @@ -2,6 +2,7 @@ use ide_db::{FxHashMap, FxHashSet}; use itertools::Itertools; +use parser::Edition; use syntax::{ ast::{self, AstNode, AstToken}, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize, @@ -33,7 +34,7 @@ fn matches_to_edit_at_offset( for m in &matches.matches { edit_builder.replace( m.range.range.checked_sub(relative_start).unwrap(), - render_replace(db, m, file_src, rules), + render_replace(db, m, file_src, rules, m.range.file_id.edition()), ); } edit_builder.finish() @@ -54,6 +55,7 @@ struct ReplacementRenderer<'a> { // is parsed, placeholders don't get split. e.g. if a template of `$a.to_string()` results in `1 // + 2.to_string()` then the placeholder value `1 + 2` was split and needs parenthesis. placeholder_tokens_requiring_parenthesis: FxHashSet, + edition: Edition, } fn render_replace( @@ -61,6 +63,7 @@ fn render_replace( match_info: &Match, file_src: &str, rules: &[ResolvedRule], + edition: Edition, ) -> String { let rule = &rules[match_info.rule_index]; let template = rule @@ -76,6 +79,7 @@ fn render_replace( out: String::new(), placeholder_tokens_requiring_parenthesis: FxHashSet::default(), placeholder_tokens_by_range: FxHashMap::default(), + edition, }; renderer.render_node(&template.node); renderer.maybe_rerender_with_extra_parenthesis(&template.node); @@ -105,7 +109,7 @@ impl ReplacementRenderer<'_> { fn render_node(&mut self, node: &SyntaxNode) { if let Some(mod_path) = self.match_info.rendered_template_paths.get(node) { - self.out.push_str(&mod_path.display(self.db).to_string()); + self.out.push_str(&mod_path.display(self.db, self.edition).to_string()); // Emit everything except for the segment's name-ref, since we already effectively // emitted that as part of `mod_path`. if let Some(path) = ast::Path::cast(node.clone()) { diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs index e9e5240897e17..cad0d06eeab18 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs @@ -413,7 +413,8 @@ fn rewrite_url_link(db: &RootDatabase, def: Definition, target: &str) -> Option< fn mod_path_of_def(db: &RootDatabase, def: Definition) -> Option { def.canonical_module_path(db).map(|it| { let mut path = String::new(); - it.flat_map(|it| it.name(db)).for_each(|name| format_to!(path, "{}/", name.display(db))); + it.flat_map(|it| it.name(db)) + .for_each(|name| format_to!(path, "{}/", name.unescaped().display(db))); path }) } @@ -588,9 +589,11 @@ fn filename_and_frag_for_def( let res = match def { Definition::Adt(adt) => match adt { - Adt::Struct(s) => format!("struct.{}.html", s.name(db).display(db.upcast())), - Adt::Enum(e) => format!("enum.{}.html", e.name(db).display(db.upcast())), - Adt::Union(u) => format!("union.{}.html", u.name(db).display(db.upcast())), + Adt::Struct(s) => { + format!("struct.{}.html", s.name(db).unescaped().display(db.upcast())) + } + Adt::Enum(e) => format!("enum.{}.html", e.name(db).unescaped().display(db.upcast())), + Adt::Union(u) => format!("union.{}.html", u.name(db).unescaped().display(db.upcast())), }, Definition::Module(m) => match m.name(db) { // `#[doc(keyword = "...")]` is internal used only by rust compiler @@ -599,34 +602,48 @@ fn filename_and_frag_for_def( Some(kw) => { format!("keyword.{}.html", kw) } - None => format!("{}/index.html", name.display(db.upcast())), + None => format!("{}/index.html", name.unescaped().display(db.upcast())), } } None => String::from("index.html"), }, - Definition::Trait(t) => format!("trait.{}.html", t.name(db).display(db.upcast())), - Definition::TraitAlias(t) => format!("traitalias.{}.html", t.name(db).display(db.upcast())), - Definition::TypeAlias(t) => format!("type.{}.html", t.name(db).display(db.upcast())), - Definition::BuiltinType(t) => format!("primitive.{}.html", t.name().display(db.upcast())), - Definition::Function(f) => format!("fn.{}.html", f.name(db).display(db.upcast())), + Definition::Trait(t) => { + format!("trait.{}.html", t.name(db).unescaped().display(db.upcast())) + } + Definition::TraitAlias(t) => { + format!("traitalias.{}.html", t.name(db).unescaped().display(db.upcast())) + } + Definition::TypeAlias(t) => { + format!("type.{}.html", t.name(db).unescaped().display(db.upcast())) + } + Definition::BuiltinType(t) => { + format!("primitive.{}.html", t.name().unescaped().display(db.upcast())) + } + Definition::Function(f) => { + format!("fn.{}.html", f.name(db).unescaped().display(db.upcast())) + } Definition::Variant(ev) => { format!( "enum.{}.html#variant.{}", - ev.parent_enum(db).name(db).display(db.upcast()), - ev.name(db).display(db.upcast()) + ev.parent_enum(db).name(db).unescaped().display(db.upcast()), + ev.name(db).unescaped().display(db.upcast()) ) } - Definition::Const(c) => format!("const.{}.html", c.name(db)?.display(db.upcast())), - Definition::Static(s) => format!("static.{}.html", s.name(db).display(db.upcast())), + Definition::Const(c) => { + format!("const.{}.html", c.name(db)?.unescaped().display(db.upcast())) + } + Definition::Static(s) => { + format!("static.{}.html", s.name(db).unescaped().display(db.upcast())) + } Definition::Macro(mac) => match mac.kind(db) { hir::MacroKind::Declarative | hir::MacroKind::BuiltIn | hir::MacroKind::Attr | hir::MacroKind::ProcMacro => { - format!("macro.{}.html", mac.name(db).display(db.upcast())) + format!("macro.{}.html", mac.name(db).unescaped().display(db.upcast())) } hir::MacroKind::Derive => { - format!("derive.{}.html", mac.name(db).display(db.upcast())) + format!("derive.{}.html", mac.name(db).unescaped().display(db.upcast())) } }, Definition::Field(field) => { @@ -639,7 +656,7 @@ fn filename_and_frag_for_def( return Some(( def, file, - Some(format!("structfield.{}", field.name(db).display(db.upcast()))), + Some(format!("structfield.{}", field.name(db).unescaped().display(db.upcast()))), )); } Definition::SelfType(impl_) => { @@ -649,7 +666,7 @@ fn filename_and_frag_for_def( return Some((adt, file, Some(String::from("impl")))); } Definition::ExternCrateDecl(it) => { - format!("{}/index.html", it.name(db).display(db.upcast())) + format!("{}/index.html", it.name(db).unescaped().display(db.upcast())) } Definition::Local(_) | Definition::GenericParam(_) @@ -679,14 +696,16 @@ fn get_assoc_item_fragment(db: &dyn HirDatabase, assoc_item: hir::AssocItem) -> // Rustdoc makes this decision based on whether a method 'has defaultness'. // Currently this is only the case for provided trait methods. if is_trait_method && !function.has_body(db) { - format!("tymethod.{}", function.name(db).display(db.upcast())) + format!("tymethod.{}", function.name(db).unescaped().display(db.upcast())) } else { - format!("method.{}", function.name(db).display(db.upcast())) + format!("method.{}", function.name(db).unescaped().display(db.upcast())) } } AssocItem::Const(constant) => { - format!("associatedconstant.{}", constant.name(db)?.display(db.upcast())) + format!("associatedconstant.{}", constant.name(db)?.unescaped().display(db.upcast())) + } + AssocItem::TypeAlias(ty) => { + format!("associatedtype.{}", ty.name(db).unescaped().display(db.upcast())) } - AssocItem::TypeAlias(ty) => format!("associatedtype.{}", ty.name(db).display(db.upcast())), }) } diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs index c8fe45c9cf0f8..9788c01544232 100644 --- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs +++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs @@ -3,6 +3,7 @@ use ide_db::{ helpers::pick_best_token, syntax_helpers::insert_whitespace_into_node::insert_ws_into, FileId, RootDatabase, }; +use span::Edition; use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T}; use crate::FilePosition; @@ -83,7 +84,14 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< if let Some(item) = ast::Item::cast(node.clone()) { if let Some(def) = sema.resolve_attr_macro_call(&item) { break ( - def.name(db).display(db).to_string(), + def.name(db) + .display( + db, + sema.attach_first_edition(position.file_id) + .map(|it| it.edition()) + .unwrap_or(Edition::CURRENT), + ) + .to_string(), expand_macro_recur(&sema, &item)?, SyntaxKind::MACRO_ITEMS, ); diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index b1d2b34ea48d6..6a6fbcb9a6b71 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -44,6 +44,8 @@ pub(crate) fn goto_definition( ) -> Option>> { let sema = &Semantics::new(db); let file = sema.parse_guess_edition(file_id).syntax().clone(); + let edition = + sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT); let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind { IDENT | INT_NUMBER @@ -55,7 +57,7 @@ pub(crate) fn goto_definition( | COMMENT => 4, // index and prefix ops T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3, - kind if kind.is_keyword(Edition::CURRENT) => 2, + kind if kind.is_keyword(edition) => 2, T!['('] | T![')'] => 2, kind if kind.is_trivia() => 0, _ => 1, diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs index 946dca85d4613..a88261df1063d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs +++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs @@ -11,7 +11,7 @@ use ide_db::{ }, FxHashMap, FxHashSet, RootDatabase, }; -use span::{Edition, EditionedFileId}; +use span::EditionedFileId; use syntax::{ ast::{self, HasLoopBody}, match_ast, AstNode, @@ -65,7 +65,7 @@ pub(crate) fn highlight_related( let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind { T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?` T![->] => 4, - kind if kind.is_keyword(Edition::CURRENT) => 3, + kind if kind.is_keyword(file_id.edition()) => 3, IDENT | INT_NUMBER => 2, T![|] => 1, _ => 0, diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index 75f8ac2d2b1b8..6c646cf4ee1f2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -67,7 +67,11 @@ pub enum HoverAction { } impl HoverAction { - fn goto_type_from_targets(db: &RootDatabase, targets: Vec) -> Option { + fn goto_type_from_targets( + db: &RootDatabase, + targets: Vec, + edition: Edition, + ) -> Option { let targets = targets .into_iter() .filter_map(|it| { @@ -75,7 +79,8 @@ impl HoverAction { mod_path: render::path( db, it.module(db)?, - it.name(db).map(|name| name.display(db).to_string()), + it.name(db).map(|name| name.display(db, edition).to_string()), + edition, ), nav: it.try_to_nav(db)?.call_site(), }) @@ -111,10 +116,12 @@ pub(crate) fn hover( ) -> Option> { let sema = &hir::Semantics::new(db); let file = sema.parse_guess_edition(file_id).syntax().clone(); + let edition = + sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT); let mut res = if range.is_empty() { - hover_simple(sema, FilePosition { file_id, offset: range.start() }, file, config) + hover_simple(sema, FilePosition { file_id, offset: range.start() }, file, config, edition) } else { - hover_ranged(sema, frange, file, config) + hover_ranged(sema, frange, file, config, edition) }?; if let HoverDocFormat::PlainText = config.format { @@ -129,6 +136,7 @@ fn hover_simple( FilePosition { file_id, offset }: FilePosition, file: SyntaxNode, config: &HoverConfig, + edition: Edition, ) -> Option> { let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind { IDENT @@ -141,7 +149,7 @@ fn hover_simple( | T![_] => 4, // index and prefix ops and closure pipe T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] | T![|] => 3, - kind if kind.is_keyword(Edition::CURRENT) => 2, + kind if kind.is_keyword(edition) => 2, T!['('] | T![')'] => 2, kind if kind.is_trivia() => 0, _ => 1, @@ -150,7 +158,7 @@ fn hover_simple( if let Some(doc_comment) = token_as_doc_comment(&original_token) { cov_mark::hit!(no_highlight_on_comment_hover); return doc_comment.get_definition_with_descend_at(sema, offset, |def, node, range| { - let res = hover_for_definition(sema, file_id, def, &node, None, config); + let res = hover_for_definition(sema, file_id, def, &node, None, config, edition); Some(RangeInfo::new(range, res)) }); } @@ -165,6 +173,7 @@ fn hover_simple( &original_token.parent()?, None, config, + edition, ); return Some(RangeInfo::new(range, res)); } @@ -240,7 +249,7 @@ fn hover_simple( .flatten() .unique_by(|&(def, _, _)| def) .map(|(def, macro_arm, node)| { - hover_for_definition(sema, file_id, def, &node, macro_arm, config) + hover_for_definition(sema, file_id, def, &node, macro_arm, config, edition) }) .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| { acc.actions.extend(actions); @@ -249,9 +258,9 @@ fn hover_simple( }) }) // try keywords - .or_else(|| descended().find_map(|token| render::keyword(sema, config, token))) + .or_else(|| descended().find_map(|token| render::keyword(sema, config, token, edition))) // try _ hovers - .or_else(|| descended().find_map(|token| render::underscore(sema, config, token))) + .or_else(|| descended().find_map(|token| render::underscore(sema, config, token, edition))) // try rest pattern hover .or_else(|| { descended().find_map(|token| { @@ -266,7 +275,7 @@ fn hover_simple( let record_pat = record_pat_field_list.syntax().parent().and_then(ast::RecordPat::cast)?; - Some(render::struct_rest_pat(sema, config, &record_pat)) + Some(render::struct_rest_pat(sema, config, &record_pat, edition)) }) }) // try () call hovers @@ -283,7 +292,7 @@ fn hover_simple( _ => return None, } }; - render::type_info_of(sema, config, &Either::Left(call_expr)) + render::type_info_of(sema, config, &Either::Left(call_expr), edition) }) }) // try closure @@ -293,12 +302,12 @@ fn hover_simple( return None; } let c = token.parent().and_then(|x| x.parent()).and_then(ast::ClosureExpr::cast)?; - render::closure_expr(sema, config, c) + render::closure_expr(sema, config, c, edition) }) }) // tokens .or_else(|| { - render::literal(sema, original_token.clone()) + render::literal(sema, original_token.clone(), edition) .map(|markup| HoverResult { markup, actions: vec![] }) }); @@ -313,6 +322,7 @@ fn hover_ranged( FileRange { range, .. }: FileRange, file: SyntaxNode, config: &HoverConfig, + edition: Edition, ) -> Option> { // FIXME: make this work in attributes let expr_or_pat = file @@ -321,15 +331,17 @@ fn hover_ranged( .take_while(|it| ast::MacroCall::can_cast(it.kind()) || !ast::Item::can_cast(it.kind())) .find_map(Either::::cast)?; let res = match &expr_or_pat { - Either::Left(ast::Expr::TryExpr(try_expr)) => render::try_expr(sema, config, try_expr), + Either::Left(ast::Expr::TryExpr(try_expr)) => { + render::try_expr(sema, config, try_expr, edition) + } Either::Left(ast::Expr::PrefixExpr(prefix_expr)) if prefix_expr.op_kind() == Some(ast::UnaryOp::Deref) => { - render::deref_expr(sema, config, prefix_expr) + render::deref_expr(sema, config, prefix_expr, edition) } _ => None, }; - let res = res.or_else(|| render::type_info_of(sema, config, &expr_or_pat)); + let res = res.or_else(|| render::type_info_of(sema, config, &expr_or_pat, edition)); res.map(|it| { let range = match expr_or_pat { Either::Left(it) => it.syntax().text_range(), @@ -347,6 +359,7 @@ pub(crate) fn hover_for_definition( scope_node: &SyntaxNode, macro_arm: Option, config: &HoverConfig, + edition: Edition, ) -> HoverResult { let famous_defs = match &def { Definition::BuiltinType(_) => sema.scope(scope_node).map(|it| FamousDefs(sema, it.krate())), @@ -370,15 +383,22 @@ pub(crate) fn hover_for_definition( }; let notable_traits = def_ty.map(|ty| notable_traits(db, &ty)).unwrap_or_default(); - let markup = - render::definition(sema.db, def, famous_defs.as_ref(), ¬able_traits, macro_arm, config); + let markup = render::definition( + sema.db, + def, + famous_defs.as_ref(), + ¬able_traits, + macro_arm, + config, + edition, + ); HoverResult { markup: render::process_markup(sema.db, def, &markup, config), actions: [ show_fn_references_action(sema.db, def), show_implementations_action(sema.db, def), runnable_action(sema, def, file_id), - goto_type_action_for_def(sema.db, def, ¬able_traits), + goto_type_action_for_def(sema.db, def, ¬able_traits, edition), ] .into_iter() .flatten() @@ -470,6 +490,7 @@ fn goto_type_action_for_def( db: &RootDatabase, def: Definition, notable_traits: &[(hir::Trait, Vec<(Option, hir::Name)>)], + edition: Edition, ) -> Option { let mut targets: Vec = Vec::new(); let mut push_new_def = |item: hir::ModuleDef| { @@ -500,13 +521,13 @@ fn goto_type_action_for_def( Definition::GenericParam(hir::GenericParam::ConstParam(it)) => it.ty(db), Definition::Field(field) => field.ty(db), Definition::Function(function) => function.ret_type(db), - _ => return HoverAction::goto_type_from_targets(db, targets), + _ => return HoverAction::goto_type_from_targets(db, targets, edition), }; walk_and_push_ty(db, &ty, &mut push_new_def); } - HoverAction::goto_type_from_targets(db, targets) + HoverAction::goto_type_from_targets(db, targets, edition) } fn walk_and_push_ty( diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs index 13b7ba1e2ba86..3e41b42be44b1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs @@ -35,27 +35,30 @@ pub(super) fn type_info_of( sema: &Semantics<'_, RootDatabase>, _config: &HoverConfig, expr_or_pat: &Either, + edition: Edition, ) -> Option { let ty_info = match expr_or_pat { Either::Left(expr) => sema.type_of_expr(expr)?, Either::Right(pat) => sema.type_of_pat(pat)?, }; - type_info(sema, _config, ty_info) + type_info(sema, _config, ty_info, edition) } pub(super) fn closure_expr( sema: &Semantics<'_, RootDatabase>, config: &HoverConfig, c: ast::ClosureExpr, + edition: Edition, ) -> Option { let TypeInfo { original, .. } = sema.type_of_expr(&c.into())?; - closure_ty(sema, config, &TypeInfo { original, adjusted: None }) + closure_ty(sema, config, &TypeInfo { original, adjusted: None }, edition) } pub(super) fn try_expr( sema: &Semantics<'_, RootDatabase>, _config: &HoverConfig, try_expr: &ast::TryExpr, + edition: Edition, ) -> Option { let inner_ty = sema.type_of_expr(&try_expr.expr()?)?.original; let mut ancestors = try_expr.syntax().ancestors(); @@ -118,12 +121,12 @@ pub(super) fn try_expr( }; walk_and_push_ty(sema.db, &inner_ty, &mut push_new_def); walk_and_push_ty(sema.db, &body_ty, &mut push_new_def); - if let Some(actions) = HoverAction::goto_type_from_targets(sema.db, targets) { + if let Some(actions) = HoverAction::goto_type_from_targets(sema.db, targets, edition) { res.actions.push(actions); } - let inner_ty = inner_ty.display(sema.db).to_string(); - let body_ty = body_ty.display(sema.db).to_string(); + let inner_ty = inner_ty.display(sema.db, edition).to_string(); + let body_ty = body_ty.display(sema.db, edition).to_string(); let ty_len_max = inner_ty.len().max(body_ty.len()); let l = "Propagated as: ".len() - " Type: ".len(); @@ -147,6 +150,7 @@ pub(super) fn deref_expr( sema: &Semantics<'_, RootDatabase>, _config: &HoverConfig, deref_expr: &ast::PrefixExpr, + edition: Edition, ) -> Option { let inner_ty = sema.type_of_expr(&deref_expr.expr()?)?.original; let TypeInfo { original, adjusted } = @@ -164,9 +168,9 @@ pub(super) fn deref_expr( res.markup = if let Some(adjusted_ty) = adjusted { walk_and_push_ty(sema.db, &adjusted_ty, &mut push_new_def); - let original = original.display(sema.db).to_string(); - let adjusted = adjusted_ty.display(sema.db).to_string(); - let inner = inner_ty.display(sema.db).to_string(); + let original = original.display(sema.db, edition).to_string(); + let adjusted = adjusted_ty.display(sema.db, edition).to_string(); + let inner = inner_ty.display(sema.db, edition).to_string(); let type_len = "To type: ".len(); let coerced_len = "Coerced to: ".len(); let deref_len = "Dereferenced from: ".len(); @@ -184,8 +188,8 @@ pub(super) fn deref_expr( ) .into() } else { - let original = original.display(sema.db).to_string(); - let inner = inner_ty.display(sema.db).to_string(); + let original = original.display(sema.db, edition).to_string(); + let inner = inner_ty.display(sema.db, edition).to_string(); let type_len = "To type: ".len(); let deref_len = "Dereferenced from: ".len(); let max_len = (original.len() + type_len).max(inner.len() + deref_len); @@ -198,7 +202,7 @@ pub(super) fn deref_expr( ) .into() }; - if let Some(actions) = HoverAction::goto_type_from_targets(sema.db, targets) { + if let Some(actions) = HoverAction::goto_type_from_targets(sema.db, targets, edition) { res.actions.push(actions); } @@ -209,6 +213,7 @@ pub(super) fn underscore( sema: &Semantics<'_, RootDatabase>, config: &HoverConfig, token: &SyntaxToken, + edition: Edition, ) -> Option { if token.kind() != T![_] { return None; @@ -217,8 +222,8 @@ pub(super) fn underscore( let _it = match_ast! { match parent { ast::InferType(it) => it, - ast::UnderscoreExpr(it) => return type_info_of(sema, config, &Either::Left(ast::Expr::UnderscoreExpr(it))), - ast::WildcardPat(it) => return type_info_of(sema, config, &Either::Right(ast::Pat::WildcardPat(it))), + ast::UnderscoreExpr(it) => return type_info_of(sema, config, &Either::Left(ast::Expr::UnderscoreExpr(it)),edition), + ast::WildcardPat(it) => return type_info_of(sema, config, &Either::Right(ast::Pat::WildcardPat(it)),edition), _ => return None, } }; @@ -251,16 +256,18 @@ pub(super) fn keyword( sema: &Semantics<'_, RootDatabase>, config: &HoverConfig, token: &SyntaxToken, + edition: Edition, ) -> Option { - if !token.kind().is_keyword(Edition::CURRENT) || !config.documentation || !config.keywords { + if !token.kind().is_keyword(edition) || !config.documentation || !config.keywords { return None; } let parent = token.parent()?; let famous_defs = FamousDefs(sema, sema.scope(&parent)?.krate()); - let KeywordHint { description, keyword_mod, actions } = keyword_hints(sema, token, parent); + let KeywordHint { description, keyword_mod, actions } = + keyword_hints(sema, token, parent, edition); - let doc_owner = find_std_module(&famous_defs, &keyword_mod)?; + let doc_owner = find_std_module(&famous_defs, &keyword_mod, edition)?; let docs = doc_owner.docs(sema.db)?; let markup = process_markup( sema.db, @@ -278,6 +285,7 @@ pub(super) fn struct_rest_pat( sema: &Semantics<'_, RootDatabase>, _config: &HoverConfig, pattern: &ast::RecordPat, + edition: Edition, ) -> HoverResult { let missing_fields = sema.record_pattern_missing_fields(pattern); @@ -299,7 +307,7 @@ pub(super) fn struct_rest_pat( res.markup = { let mut s = String::from(".., "); for (f, _) in &missing_fields { - s += f.display(sema.db).to_string().as_ref(); + s += f.display(sema.db, edition).to_string().as_ref(); s += ", "; } // get rid of trailing comma @@ -307,7 +315,7 @@ pub(super) fn struct_rest_pat( Markup::fenced_block(&s) }; - if let Some(actions) = HoverAction::goto_type_from_targets(sema.db, targets) { + if let Some(actions) = HoverAction::goto_type_from_targets(sema.db, targets, edition) { res.actions.push(actions); } res @@ -366,7 +374,7 @@ pub(super) fn process_markup( Markup::from(markup) } -fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option { +fn definition_owner_name(db: &RootDatabase, def: &Definition, edition: Edition) -> Option { match def { Definition::Field(f) => Some(f.parent_def(db).name(db)), Definition::Local(l) => l.parent(db).name(db), @@ -385,17 +393,22 @@ fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option } } } - .map(|name| name.display(db).to_string()) + .map(|name| name.display(db, edition).to_string()) } -pub(super) fn path(db: &RootDatabase, module: hir::Module, item_name: Option) -> String { +pub(super) fn path( + db: &RootDatabase, + module: hir::Module, + item_name: Option, + edition: Edition, +) -> String { let crate_name = db.crate_graph()[module.krate().into()].display_name.as_ref().map(|it| it.to_string()); let module_path = module .path_to_root(db) .into_iter() .rev() - .flat_map(|it| it.name(db).map(|name| name.display(db).to_string())); + .flat_map(|it| it.name(db).map(|name| name.display(db, edition).to_string())); crate_name.into_iter().chain(module_path).chain(item_name).join("::") } @@ -406,39 +419,42 @@ pub(super) fn definition( notable_traits: &[(Trait, Vec<(Option, Name)>)], macro_arm: Option, config: &HoverConfig, + edition: Edition, ) -> Markup { - let mod_path = definition_mod_path(db, &def); + let mod_path = definition_mod_path(db, &def, edition); let label = match def { Definition::Trait(trait_) => { - trait_.display_limited(db, config.max_trait_assoc_items_count).to_string() + trait_.display_limited(db, config.max_trait_assoc_items_count, edition).to_string() } Definition::Adt(adt @ (Adt::Struct(_) | Adt::Union(_))) => { - adt.display_limited(db, config.max_fields_count).to_string() + adt.display_limited(db, config.max_fields_count, edition).to_string() } Definition::Variant(variant) => { - variant.display_limited(db, config.max_fields_count).to_string() + variant.display_limited(db, config.max_fields_count, edition).to_string() } Definition::Adt(adt @ Adt::Enum(_)) => { - adt.display_limited(db, config.max_enum_variants_count).to_string() + adt.display_limited(db, config.max_enum_variants_count, edition).to_string() } Definition::SelfType(impl_def) => { let self_ty = &impl_def.self_ty(db); match self_ty.as_adt() { - Some(adt) => adt.display_limited(db, config.max_fields_count).to_string(), - None => self_ty.display(db).to_string(), + Some(adt) => adt.display_limited(db, config.max_fields_count, edition).to_string(), + None => self_ty.display(db, edition).to_string(), } } Definition::Macro(it) => { - let mut label = it.display(db).to_string(); + let mut label = it.display(db, edition).to_string(); if let Some(macro_arm) = macro_arm { format_to!(label, " // matched arm #{}", macro_arm); } label } - Definition::Function(fn_) => fn_.display_with_container_bounds(db, true).to_string(), - _ => def.label(db), + Definition::Function(fn_) => { + fn_.display_with_container_bounds(db, true, edition).to_string() + } + _ => def.label(db, edition), }; - let docs = def.docs(db, famous_defs); + let docs = def.docs(db, famous_defs, edition); let value = (|| match def { Definition::Variant(it) => { if !it.parent_enum(db).is_data_carrying(db) { @@ -453,7 +469,7 @@ pub(super) fn definition( } } Definition::Const(it) => { - let body = it.render_eval(db); + let body = it.render_eval(db, edition); match body { Ok(it) => Some(it), Err(_) => { @@ -511,7 +527,7 @@ pub(super) fn definition( }; let mut desc = String::new(); - if let Some(notable_traits) = render_notable_trait_comment(db, notable_traits) { + if let Some(notable_traits) = render_notable_trait_comment(db, notable_traits, edition) { desc.push_str(¬able_traits); desc.push('\n'); } @@ -528,7 +544,11 @@ pub(super) fn definition( markup(docs.map(Into::into), desc, mod_path) } -pub(super) fn literal(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option { +pub(super) fn literal( + sema: &Semantics<'_, RootDatabase>, + token: SyntaxToken, + edition: Edition, +) -> Option { let lit = token.parent().and_then(ast::Literal::cast)?; let ty = if let Some(p) = lit.syntax().parent().and_then(ast::Pat::cast) { sema.type_of_pat(&p)? @@ -575,7 +595,7 @@ pub(super) fn literal(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> _ => return None } }; - let ty = ty.display(sema.db); + let ty = ty.display(sema.db, edition); let mut s = format!("```rust\n{ty}\n```\n___\n\n"); match value { @@ -594,6 +614,7 @@ pub(super) fn literal(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> fn render_notable_trait_comment( db: &RootDatabase, notable_traits: &[(Trait, Vec<(Option, Name)>)], + edition: Edition, ) -> Option { let mut desc = String::new(); let mut needs_impl_header = true; @@ -603,17 +624,17 @@ fn render_notable_trait_comment( } else { ", " }); - format_to!(desc, "{}", trait_.name(db).display(db),); + format_to!(desc, "{}", trait_.name(db).display(db, edition)); if !assoc_types.is_empty() { desc.push('<'); format_to!( desc, "{}", assoc_types.iter().format_with(", ", |(ty, name), f| { - f(&name.display(db))?; + f(&name.display(db, edition))?; f(&" = ")?; match ty { - Some(ty) => f(&ty.display(db)), + Some(ty) => f(&ty.display(db, edition)), None => f(&"?"), } }) @@ -628,8 +649,9 @@ fn type_info( sema: &Semantics<'_, RootDatabase>, config: &HoverConfig, ty: TypeInfo, + edition: Edition, ) -> Option { - if let Some(res) = closure_ty(sema, config, &ty) { + if let Some(res) = closure_ty(sema, config, &ty, edition) { return Some(res); }; let db = sema.db; @@ -655,17 +677,17 @@ fn type_info( } else { ", " }); - format_to!(desc, "{}", trait_.name(db).display(db),); + format_to!(desc, "{}", trait_.name(db).display(db, edition)); if !assoc_types.is_empty() { desc.push('<'); format_to!( desc, "{}", assoc_types.into_iter().format_with(", ", |(ty, name), f| { - f(&name.display(db))?; + f(&name.display(db, edition))?; f(&" = ")?; match ty { - Some(ty) => f(&ty.display(db)), + Some(ty) => f(&ty.display(db, edition)), None => f(&"?"), } }) @@ -679,8 +701,8 @@ fn type_info( desc }; - let original = original.display(db).to_string(); - let adjusted = adjusted_ty.display(db).to_string(); + let original = original.display(db, edition).to_string(); + let adjusted = adjusted_ty.display(db, edition).to_string(); let static_text_diff_len = "Coerced to: ".len() - "Type: ".len(); format!( "```text\nType: {:>apad$}\nCoerced to: {:>opad$}\n{notable}```\n", @@ -691,14 +713,15 @@ fn type_info( ) .into() } else { - let mut desc = match render_notable_trait_comment(db, ¬able_traits(db, &original)) { - Some(desc) => desc + "\n", - None => String::new(), - }; - format_to!(desc, "{}", original.display(db)); + let mut desc = + match render_notable_trait_comment(db, ¬able_traits(db, &original), edition) { + Some(desc) => desc + "\n", + None => String::new(), + }; + format_to!(desc, "{}", original.display(db, edition)); Markup::fenced_block(&desc) }; - if let Some(actions) = HoverAction::goto_type_from_targets(db, targets) { + if let Some(actions) = HoverAction::goto_type_from_targets(db, targets, edition) { res.actions.push(actions); } Some(res) @@ -708,6 +731,7 @@ fn closure_ty( sema: &Semantics<'_, RootDatabase>, config: &HoverConfig, TypeInfo { original, adjusted }: &TypeInfo, + edition: Edition, ) -> Option { let c = original.as_closure()?; let mut captures_rendered = c.captured_items(sema.db) @@ -740,12 +764,12 @@ fn closure_ty( walk_and_push_ty(sema.db, adjusted_ty, &mut push_new_def); format!( "\nCoerced to: {}", - adjusted_ty.display(sema.db).with_closure_style(hir::ClosureStyle::ImplFn) + adjusted_ty.display(sema.db, edition).with_closure_style(hir::ClosureStyle::ImplFn) ) } else { String::new() }; - let mut markup = format!("```rust\n{}", c.display_with_id(sema.db),); + let mut markup = format!("```rust\n{}", c.display_with_id(sema.db, edition)); if let Some(layout) = render_memory_layout(config.memory_layout, || original.layout(sema.db), |_| None, |_| None) @@ -758,23 +782,23 @@ fn closure_ty( format_to!( markup, "\n{}\n```{adjusted}\n\n## Captures\n{}", - c.display_with_impl(sema.db), + c.display_with_impl(sema.db, edition), captures_rendered, ); let mut res = HoverResult::default(); - if let Some(actions) = HoverAction::goto_type_from_targets(sema.db, targets) { + if let Some(actions) = HoverAction::goto_type_from_targets(sema.db, targets, edition) { res.actions.push(actions); } res.markup = markup.into(); Some(res) } -fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option { +fn definition_mod_path(db: &RootDatabase, def: &Definition, edition: Edition) -> Option { if matches!(def, Definition::GenericParam(_) | Definition::Local(_) | Definition::Label(_)) { return None; } - def.module(db).map(|module| path(db, module, definition_owner_name(db, def))) + def.module(db).map(|module| path(db, module, definition_owner_name(db, def, edition), edition)) } fn markup(docs: Option, desc: String, mod_path: Option) -> Markup { @@ -793,12 +817,16 @@ fn markup(docs: Option, desc: String, mod_path: Option) -> Marku buf.into() } -fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option { +fn find_std_module( + famous_defs: &FamousDefs<'_, '_>, + name: &str, + edition: Edition, +) -> Option { let db = famous_defs.0.db; let std_crate = famous_defs.std()?; let std_root_module = std_crate.root_module(); std_root_module.children(db).find(|module| { - module.name(db).map_or(false, |module| module.display(db).to_string() == name) + module.name(db).map_or(false, |module| module.display(db, edition).to_string() == name) }) } @@ -889,6 +917,7 @@ fn keyword_hints( sema: &Semantics<'_, RootDatabase>, token: &SyntaxToken, parent: syntax::SyntaxNode, + edition: Edition, ) -> KeywordHint { match token.kind() { T![await] | T![loop] | T![match] | T![unsafe] | T![as] | T![try] | T![if] | T![else] => { @@ -906,12 +935,12 @@ fn keyword_hints( walk_and_push_ty(sema.db, &ty.original, &mut push_new_def); let ty = ty.adjusted(); - let description = format!("{}: {}", token.text(), ty.display(sema.db)); + let description = format!("{}: {}", token.text(), ty.display(sema.db, edition)); KeywordHint { description, keyword_mod, - actions: HoverAction::goto_type_from_targets(sema.db, targets) + actions: HoverAction::goto_type_from_targets(sema.db, targets, edition) .into_iter() .collect(), } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 18f0aeba29ea8..8b60e562d7b8d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -8630,3 +8630,75 @@ fn main() { "#]], ); } + +#[test] +fn raw_keyword_different_editions() { + check( + r#" +//- /lib1.rs crate:with_edition_2015 edition:2015 +pub fn dyn() {} + +//- /lib2.rs crate:with_edition_2018 edition:2018 deps:with_edition_2015 new_source_root:local +fn foo() { + with_edition_2015::r#dyn$0(); +} + "#, + expect![[r#" + *r#dyn* + + ```rust + with_edition_2015 + ``` + + ```rust + pub fn r#dyn() + ``` + "#]], + ); + + check( + r#" +//- /lib1.rs crate:with_edition_2018 edition:2018 +pub fn r#dyn() {} + +//- /lib2.rs crate:with_edition_2015 edition:2015 deps:with_edition_2018 new_source_root:local +fn foo() { + with_edition_2018::dyn$0(); +} + "#, + expect![[r#" + *dyn* + + ```rust + with_edition_2018 + ``` + + ```rust + pub fn dyn() + ``` + "#]], + ); + + check( + r#" +//- /lib1.rs crate:escaping_needlessly edition:2015 +pub fn r#dyn() {} + +//- /lib2.rs crate:dependent edition:2015 deps:escaping_needlessly new_source_root:local +fn foo() { + escaping_needlessly::dyn$0(); +} + "#, + expect![[r#" + *dyn* + + ```rust + escaping_needlessly + ``` + + ```rust + pub fn dyn() + ``` + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs index 0a8d2727575d2..6a5d5e26a4f05 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs @@ -11,7 +11,7 @@ use hir::{ use ide_db::{famous_defs::FamousDefs, FileRange, RootDatabase}; use itertools::Itertools; use smallvec::{smallvec, SmallVec}; -use span::EditionedFileId; +use span::{Edition, EditionedFileId}; use stdx::never; use syntax::{ ast::{self, AstNode}, @@ -372,6 +372,7 @@ fn label_of_ty( famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, ty: &hir::Type, + edition: Edition, ) -> Option { fn rec( sema: &Semantics<'_, RootDatabase>, @@ -380,6 +381,7 @@ fn label_of_ty( ty: &hir::Type, label_builder: &mut InlayHintLabelBuilder<'_>, config: &InlayHintsConfig, + edition: Edition, ) -> Result<(), HirDisplayError> { let iter_item_type = hint_iterator(sema, famous_defs, ty); match iter_item_type { @@ -410,12 +412,12 @@ fn label_of_ty( label_builder.write_str(LABEL_ITEM)?; label_builder.end_location_link(); label_builder.write_str(LABEL_MIDDLE2)?; - rec(sema, famous_defs, max_length, &ty, label_builder, config)?; + rec(sema, famous_defs, max_length, &ty, label_builder, config, edition)?; label_builder.write_str(LABEL_END)?; Ok(()) } None => ty - .display_truncated(sema.db, max_length) + .display_truncated(sema.db, max_length, edition) .with_closure_style(config.closure_style) .write_to(label_builder), } @@ -427,7 +429,7 @@ fn label_of_ty( location: None, result: InlayHintLabel::default(), }; - let _ = rec(sema, famous_defs, config.max_length, ty, &mut label_builder, config); + let _ = rec(sema, famous_defs, config.max_length, ty, &mut label_builder, config, edition); let r = label_builder.finish(); Some(r) } @@ -569,7 +571,7 @@ fn hints( match node { ast::Expr(expr) => { chaining::hints(hints, famous_defs, config, file_id, &expr); - adjustment::hints(hints, sema, config, &expr); + adjustment::hints(hints, sema, config, file_id, &expr); match expr { ast::Expr::CallExpr(it) => param_name::hints(hints, sema, config, ast::Expr::from(it)), ast::Expr::MethodCallExpr(it) => { @@ -600,7 +602,7 @@ fn hints( // FIXME: record impl lifetimes so they aren't being reused in assoc item lifetime inlay hints ast::Item::Impl(_) => None, ast::Item::Fn(it) => { - implicit_drop::hints(hints, sema, config, &it); + implicit_drop::hints(hints, sema, config, file_id, &it); fn_lifetime_fn::hints(hints, config, it) }, // static type elisions diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs index 7932d8efbcf18..756198d0c0110 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs @@ -10,6 +10,7 @@ use hir::{ }; use ide_db::RootDatabase; +use span::EditionedFileId; use stdx::never; use syntax::{ ast::{self, make, AstNode}, @@ -25,6 +26,7 @@ pub(super) fn hints( acc: &mut Vec, sema: &Semantics<'_, RootDatabase>, config: &InlayHintsConfig, + file_id: EditionedFileId, expr: &ast::Expr, ) -> Option<()> { if config.adjustment_hints_hide_outside_unsafe && !sema.is_inside_unsafe(expr) { @@ -141,8 +143,8 @@ pub(super) fn hints( if postfix { format!(".{}", text.trim_end()) } else { text.to_owned() }, Some(InlayTooltip::Markdown(format!( "`{}` → `{}` ({coercion} coercion)", - source.display(sema.db), - target.display(sema.db), + source.display(sema.db, file_id.edition()), + target.display(sema.db, file_id.edition()), ))), None, ); diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs index 7310852b8ed57..d67afe14d7c93 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs @@ -22,7 +22,7 @@ pub(super) fn hints( acc: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - _file_id: EditionedFileId, + file_id: EditionedFileId, pat: &ast::IdentPat, ) -> Option<()> { if !config.type_hints { @@ -67,7 +67,7 @@ pub(super) fn hints( return None; } - let mut label = label_of_ty(famous_defs, config, &ty)?; + let mut label = label_of_ty(famous_defs, config, &ty, file_id.edition())?; if config.hide_named_constructor_hints && is_named_constructor(sema, pat, &label.to_string()).is_some() diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs index 4e15213b8bbf0..35f4d46e187c4 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs @@ -14,7 +14,7 @@ pub(super) fn hints( acc: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - _file_id: EditionedFileId, + file_id: EditionedFileId, expr: &ast::Expr, ) -> Option<()> { if !config.chaining_hints { @@ -58,7 +58,7 @@ pub(super) fn hints( } } } - let label = label_of_ty(famous_defs, config, &ty)?; + let label = label_of_ty(famous_defs, config, &ty, file_id.edition())?; acc.push(InlayHint { range: expr.syntax().text_range(), kind: InlayKind::Chaining, diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs index 8f2777f3928d1..d78fd64bdf4dc 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs @@ -36,8 +36,12 @@ pub(super) fn hints( let ty = imp.self_ty(sema.db); let trait_ = imp.trait_(sema.db); let hint_text = match trait_ { - Some(tr) => format!("impl {} for {}", tr.name(sema.db).display(sema.db), ty.display_truncated(sema.db, config.max_length)), - None => format!("impl {}", ty.display_truncated(sema.db, config.max_length)), + Some(tr) => format!( + "impl {} for {}", + tr.name(sema.db).display(sema.db, file_id.edition()), + ty.display_truncated(sema.db, config.max_length, file_id.edition(), + )), + None => format!("impl {}", ty.display_truncated(sema.db, config.max_length, file_id.edition())), }; (hint_text, None) }, diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs index f6bd7ca064fda..325c2040691b1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs @@ -14,7 +14,7 @@ pub(super) fn hints( acc: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - _file_id: EditionedFileId, + file_id: EditionedFileId, closure: ast::ClosureExpr, ) -> Option<()> { if config.closure_return_type_hints == ClosureReturnTypeHints::Never { @@ -43,7 +43,7 @@ pub(super) fn hints( return None; } - let mut label = label_of_ty(famous_defs, config, &ty)?; + let mut label = label_of_ty(famous_defs, config, &ty, file_id.edition())?; if arrow.is_none() { label.prepend_str(" -> "); diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs index 7f901db28d368..b4695a2b3519a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs @@ -12,6 +12,7 @@ use hir::{ }; use ide_db::{FileRange, RootDatabase}; +use span::EditionedFileId; use syntax::{ ast::{self, AstNode}, match_ast, ToSmolStr, @@ -23,6 +24,7 @@ pub(super) fn hints( acc: &mut Vec, sema: &Semantics<'_, RootDatabase>, config: &InlayHintsConfig, + file_id: EditionedFileId, def: &ast::Fn, ) -> Option<()> { if !config.implicit_drop_hints { @@ -100,7 +102,7 @@ pub(super) fn hints( }) }); let binding = &hir.bindings[*binding]; - let name = binding.name.display_no_db().to_smolstr(); + let name = binding.name.display_no_db(file_id.edition()).to_smolstr(); if name.starts_with(" Some(it.name(sema.db).display_no_db().to_smolstr()), + hir::CallableKind::Function(it) => { + Some(it.name(sema.db).unescaped().display_no_db().to_smolstr()) + } _ => None, }; let fn_name = fn_name.as_deref(); diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs index 1b64bc926039e..7b5fd651e3d79 100644 --- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs +++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs @@ -249,10 +249,11 @@ pub(crate) fn def_to_moniker( let module = def.module(db)?; let krate = module.krate(); + let edition = krate.edition(db); let mut description = vec![]; description.extend(module.path_to_root(db).into_iter().filter_map(|x| { Some(MonikerDescriptor { - name: x.name(db)?.display(db).to_string(), + name: x.name(db)?.display(db, edition).to_string(), desc: def_to_kind(db, x.into()).into(), }) })); @@ -265,7 +266,7 @@ pub(crate) fn def_to_moniker( // Because different traits can have functions with the same name, // we have to include the trait name as part of the moniker for uniqueness. description.push(MonikerDescriptor { - name: trait_.name(db).display(db).to_string(), + name: trait_.name(db).display(db, edition).to_string(), desc: def_to_kind(db, trait_.into()).into(), }); } @@ -274,14 +275,14 @@ pub(crate) fn def_to_moniker( // we add both the struct name and the trait name to the path if let Some(adt) = impl_.self_ty(db).as_adt() { description.push(MonikerDescriptor { - name: adt.name(db).display(db).to_string(), + name: adt.name(db).display(db, edition).to_string(), desc: def_to_kind(db, adt.into()).into(), }); } if let Some(trait_) = impl_.trait_(db) { description.push(MonikerDescriptor { - name: trait_.name(db).display(db).to_string(), + name: trait_.name(db).display(db, edition).to_string(), desc: def_to_kind(db, trait_.into()).into(), }); } @@ -291,7 +292,7 @@ pub(crate) fn def_to_moniker( if let Definition::Field(it) = def { description.push(MonikerDescriptor { - name: it.parent_def(db).name(db).display(db).to_string(), + name: it.parent_def(db).name(db).display(db, edition).to_string(), desc: def_to_kind(db, it.parent_def(db).into()).into(), }); } @@ -303,7 +304,7 @@ pub(crate) fn def_to_moniker( let parent_name = parent.name(db); if let Some(name) = parent_name { description.push(MonikerDescriptor { - name: name.display(db).to_string(), + name: name.display(db, edition).to_string(), desc: def_to_kind(db, parent).into(), }); } @@ -326,53 +327,53 @@ pub(crate) fn def_to_moniker( return None; } - MonikerDescriptor { name: local.name(db).display(db).to_string(), desc } + MonikerDescriptor { name: local.name(db).display(db, edition).to_string(), desc } } Definition::Macro(m) => { - MonikerDescriptor { name: m.name(db).display(db).to_string(), desc } + MonikerDescriptor { name: m.name(db).display(db, edition).to_string(), desc } } Definition::Function(f) => { - MonikerDescriptor { name: f.name(db).display(db).to_string(), desc } + MonikerDescriptor { name: f.name(db).display(db, edition).to_string(), desc } } Definition::Variant(v) => { - MonikerDescriptor { name: v.name(db).display(db).to_string(), desc } + MonikerDescriptor { name: v.name(db).display(db, edition).to_string(), desc } } Definition::Const(c) => { - MonikerDescriptor { name: c.name(db)?.display(db).to_string(), desc } + MonikerDescriptor { name: c.name(db)?.display(db, edition).to_string(), desc } } Definition::Trait(trait_) => { - MonikerDescriptor { name: trait_.name(db).display(db).to_string(), desc } + MonikerDescriptor { name: trait_.name(db).display(db, edition).to_string(), desc } } Definition::TraitAlias(ta) => { - MonikerDescriptor { name: ta.name(db).display(db).to_string(), desc } + MonikerDescriptor { name: ta.name(db).display(db, edition).to_string(), desc } } Definition::TypeAlias(ta) => { - MonikerDescriptor { name: ta.name(db).display(db).to_string(), desc } + MonikerDescriptor { name: ta.name(db).display(db, edition).to_string(), desc } } Definition::Module(m) => { - MonikerDescriptor { name: m.name(db)?.display(db).to_string(), desc } + MonikerDescriptor { name: m.name(db)?.display(db, edition).to_string(), desc } } Definition::BuiltinType(b) => { - MonikerDescriptor { name: b.name().display(db).to_string(), desc } + MonikerDescriptor { name: b.name().display(db, edition).to_string(), desc } } Definition::SelfType(imp) => MonikerDescriptor { - name: imp.self_ty(db).as_adt()?.name(db).display(db).to_string(), + name: imp.self_ty(db).as_adt()?.name(db).display(db, edition).to_string(), desc, }, Definition::Field(it) => { - MonikerDescriptor { name: it.name(db).display(db).to_string(), desc } + MonikerDescriptor { name: it.name(db).display(db, edition).to_string(), desc } } Definition::TupleField(it) => { - MonikerDescriptor { name: it.name().display(db).to_string(), desc } + MonikerDescriptor { name: it.name().display(db, edition).to_string(), desc } } Definition::Adt(adt) => { - MonikerDescriptor { name: adt.name(db).display(db).to_string(), desc } + MonikerDescriptor { name: adt.name(db).display(db, edition).to_string(), desc } } Definition::Static(s) => { - MonikerDescriptor { name: s.name(db).display(db).to_string(), desc } + MonikerDescriptor { name: s.name(db).display(db, edition).to_string(), desc } } Definition::ExternCrateDecl(m) => { - MonikerDescriptor { name: m.name(db).display(db).to_string(), desc } + MonikerDescriptor { name: m.name(db).display(db, edition).to_string(), desc } } }; diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs index 066141d36f16a..9ace9fda62b97 100644 --- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs +++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs @@ -5,14 +5,15 @@ use std::fmt; use arrayvec::ArrayVec; use either::Either; use hir::{ - db::ExpandDatabase, symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource, - HirDisplay, HirFileId, InFile, LocalSource, ModuleSource, + db::ExpandDatabase, symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasCrate, + HasSource, HirDisplay, HirFileId, HirFileIdExt, InFile, LocalSource, ModuleSource, }; use ide_db::{ defs::Definition, documentation::{Documentation, HasDocs}, FileId, FileRange, RootDatabase, SymbolKind, }; +use span::Edition; use stdx::never; use syntax::{ ast::{self, HasName}, @@ -97,7 +98,9 @@ impl NavigationTarget { db: &RootDatabase, module: hir::Module, ) -> UpmappingResult { - let name = module.name(db).map(|it| it.display_no_db().to_smolstr()).unwrap_or_default(); + let edition = module.krate().edition(db); + let name = + module.name(db).map(|it| it.display_no_db(edition).to_smolstr()).unwrap_or_default(); match module.declaration_source(db) { Some(InFile { value, file_id }) => { orig_range_with_focus(db, file_id, value.syntax(), value.name()).map( @@ -110,7 +113,7 @@ impl NavigationTarget { SymbolKind::Module, ); res.docs = module.docs(db); - res.description = Some(module.display(db).to_string()); + res.description = Some(module.display(db, edition).to_string()); res }, ) @@ -175,6 +178,8 @@ impl NavigationTarget { impl TryToNav for FileSymbol { fn try_to_nav(&self, db: &RootDatabase) -> Option> { + let edition = + self.def.module(db).map(|it| it.krate().edition(db)).unwrap_or(Edition::CURRENT); Some( orig_range_with_focus_r( db, @@ -185,27 +190,26 @@ impl TryToNav for FileSymbol { .map(|(FileRange { file_id, range: full_range }, focus_range)| { NavigationTarget { file_id, - name: self - .is_alias - .then(|| self.def.name(db)) - .flatten() - .map_or_else(|| self.name.clone(), |it| it.display_no_db().to_smolstr()), + name: self.is_alias.then(|| self.def.name(db)).flatten().map_or_else( + || self.name.clone(), + |it| it.display_no_db(edition).to_smolstr(), + ), alias: self.is_alias.then(|| self.name.clone()), kind: Some(hir::ModuleDefId::from(self.def).into()), full_range, focus_range, container_name: self.container_name.clone(), description: match self.def { - hir::ModuleDef::Module(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Function(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Adt(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Variant(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Const(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Static(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Trait(it) => Some(it.display(db).to_string()), - hir::ModuleDef::TraitAlias(it) => Some(it.display(db).to_string()), - hir::ModuleDef::TypeAlias(it) => Some(it.display(db).to_string()), - hir::ModuleDef::Macro(it) => Some(it.display(db).to_string()), + hir::ModuleDef::Module(it) => Some(it.display(db, edition).to_string()), + hir::ModuleDef::Function(it) => Some(it.display(db, edition).to_string()), + hir::ModuleDef::Adt(it) => Some(it.display(db, edition).to_string()), + hir::ModuleDef::Variant(it) => Some(it.display(db, edition).to_string()), + hir::ModuleDef::Const(it) => Some(it.display(db, edition).to_string()), + hir::ModuleDef::Static(it) => Some(it.display(db, edition).to_string()), + hir::ModuleDef::Trait(it) => Some(it.display(db, edition).to_string()), + hir::ModuleDef::TraitAlias(it) => Some(it.display(db, edition).to_string()), + hir::ModuleDef::TypeAlias(it) => Some(it.display(db, edition).to_string()), + hir::ModuleDef::Macro(it) => Some(it.display(db, edition).to_string()), hir::ModuleDef::BuiltinType(_) => None, }, docs: None, @@ -271,11 +275,13 @@ pub(crate) trait ToNavFromAst: Sized { } } -fn container_name(db: &RootDatabase, t: impl HasContainer) -> Option { +fn container_name(db: &RootDatabase, t: impl HasContainer, edition: Edition) -> Option { match t.container(db) { - hir::ItemContainer::Trait(it) => Some(it.name(db).display_no_db().to_smolstr()), + hir::ItemContainer::Trait(it) => Some(it.name(db).display_no_db(edition).to_smolstr()), // FIXME: Handle owners of blocks correctly here - hir::ItemContainer::Module(it) => it.name(db).map(|name| name.display_no_db().to_smolstr()), + hir::ItemContainer::Module(it) => { + it.name(db).map(|name| name.display_no_db(edition).to_smolstr()) + } _ => None, } } @@ -283,32 +289,32 @@ fn container_name(db: &RootDatabase, t: impl HasContainer) -> Option { impl ToNavFromAst for hir::Function { const KIND: SymbolKind = SymbolKind::Function; fn container_name(self, db: &RootDatabase) -> Option { - container_name(db, self) + container_name(db, self, self.krate(db).edition(db)) } } impl ToNavFromAst for hir::Const { const KIND: SymbolKind = SymbolKind::Const; fn container_name(self, db: &RootDatabase) -> Option { - container_name(db, self) + container_name(db, self, self.krate(db).edition(db)) } } impl ToNavFromAst for hir::Static { const KIND: SymbolKind = SymbolKind::Static; fn container_name(self, db: &RootDatabase) -> Option { - container_name(db, self) + container_name(db, self, self.krate(db).edition(db)) } } impl ToNavFromAst for hir::Struct { const KIND: SymbolKind = SymbolKind::Struct; fn container_name(self, db: &RootDatabase) -> Option { - container_name(db, self) + container_name(db, self, self.krate(db).edition(db)) } } impl ToNavFromAst for hir::Enum { const KIND: SymbolKind = SymbolKind::Enum; fn container_name(self, db: &RootDatabase) -> Option { - container_name(db, self) + container_name(db, self, self.krate(db).edition(db)) } } impl ToNavFromAst for hir::Variant { @@ -317,25 +323,25 @@ impl ToNavFromAst for hir::Variant { impl ToNavFromAst for hir::Union { const KIND: SymbolKind = SymbolKind::Union; fn container_name(self, db: &RootDatabase) -> Option { - container_name(db, self) + container_name(db, self, self.krate(db).edition(db)) } } impl ToNavFromAst for hir::TypeAlias { const KIND: SymbolKind = SymbolKind::TypeAlias; fn container_name(self, db: &RootDatabase) -> Option { - container_name(db, self) + container_name(db, self, self.krate(db).edition(db)) } } impl ToNavFromAst for hir::Trait { const KIND: SymbolKind = SymbolKind::Trait; fn container_name(self, db: &RootDatabase) -> Option { - container_name(db, self) + container_name(db, self, self.krate(db).edition(db)) } } impl ToNavFromAst for hir::TraitAlias { const KIND: SymbolKind = SymbolKind::TraitAlias; fn container_name(self, db: &RootDatabase) -> Option { - container_name(db, self) + container_name(db, self, self.krate(db).edition(db)) } } @@ -346,6 +352,7 @@ where { fn try_to_nav(&self, db: &RootDatabase) -> Option> { let src = self.source(db)?; + let edition = src.file_id.original_file(db).edition(); Some( NavigationTarget::from_named( db, @@ -354,7 +361,7 @@ where ) .map(|mut res| { res.docs = self.docs(db); - res.description = Some(self.display(db).to_string()); + res.description = Some(self.display(db, edition).to_string()); res.container_name = self.container_name(db); res }), @@ -365,8 +372,10 @@ where impl ToNav for hir::Module { fn to_nav(&self, db: &RootDatabase) -> UpmappingResult { let InFile { file_id, value } = self.definition_source(db); + let edition = self.krate(db).edition(db); - let name = self.name(db).map(|it| it.display_no_db().to_smolstr()).unwrap_or_default(); + let name = + self.name(db).map(|it| it.display_no_db(edition).to_smolstr()).unwrap_or_default(); let (syntax, focus) = match &value { ModuleSource::SourceFile(node) => (node.syntax(), None), ModuleSource::Module(node) => (node.syntax(), node.name()), @@ -418,6 +427,7 @@ impl TryToNav for hir::ExternCrateDecl { let focus = value .rename() .map_or_else(|| value.name_ref().map(Either::Left), |it| it.name().map(Either::Right)); + let edition = self.module(db).krate().edition(db); Some(orig_range_with_focus(db, file_id, value.syntax(), focus).map( |(FileRange { file_id, range: full_range }, focus_range)| { @@ -425,7 +435,7 @@ impl TryToNav for hir::ExternCrateDecl { file_id, self.alias_or_name(db) .unwrap_or_else(|| self.name(db)) - .display_no_db() + .display_no_db(edition) .to_smolstr(), focus_range, full_range, @@ -433,8 +443,8 @@ impl TryToNav for hir::ExternCrateDecl { ); res.docs = self.docs(db); - res.description = Some(self.display(db).to_string()); - res.container_name = container_name(db, *self); + res.description = Some(self.display(db, edition).to_string()); + res.container_name = container_name(db, *self, edition); res }, )) @@ -444,13 +454,14 @@ impl TryToNav for hir::ExternCrateDecl { impl TryToNav for hir::Field { fn try_to_nav(&self, db: &RootDatabase) -> Option> { let src = self.source(db)?; + let edition = self.parent_def(db).module(db).krate().edition(db); let field_source = match &src.value { FieldSource::Named(it) => { NavigationTarget::from_named(db, src.with_value(it), SymbolKind::Field).map( |mut res| { res.docs = self.docs(db); - res.description = Some(self.display(db).to_string()); + res.description = Some(self.display(db, edition).to_string()); res }, ) @@ -531,10 +542,11 @@ impl ToNav for LocalSource { Either::Left(bind_pat) => (bind_pat.syntax(), bind_pat.name()), Either::Right(it) => (it.syntax(), it.name()), }; + let edition = self.local.parent(db).module(db).krate().edition(db); orig_range_with_focus(db, file_id, node, name).map( |(FileRange { file_id, range: full_range }, focus_range)| { - let name = local.name(db).display_no_db().to_smolstr(); + let name = local.name(db).display_no_db(edition).to_smolstr(); let kind = if local.is_self(db) { SymbolKind::SelfParam } else if local.is_param(db) { @@ -567,7 +579,8 @@ impl ToNav for hir::Local { impl TryToNav for hir::Label { fn try_to_nav(&self, db: &RootDatabase) -> Option> { let InFile { file_id, value } = self.source(db)?; - let name = self.name(db).display_no_db().to_smolstr(); + // Labels can't be keywords, so no escaping needed. + let name = self.name(db).display_no_db(Edition::Edition2015).to_smolstr(); Some(orig_range_with_focus(db, file_id, value.syntax(), value.lifetime()).map( |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { @@ -588,7 +601,8 @@ impl TryToNav for hir::Label { impl TryToNav for hir::TypeParam { fn try_to_nav(&self, db: &RootDatabase) -> Option> { let InFile { file_id, value } = self.merge().source(db)?; - let name = self.name(db).display_no_db().to_smolstr(); + let edition = self.module(db).krate().edition(db); + let name = self.name(db).display_no_db(edition).to_smolstr(); let value = match value { Either::Left(ast::TypeOrConstParam::Type(x)) => Either::Left(x), @@ -630,7 +644,8 @@ impl TryToNav for hir::TypeOrConstParam { impl TryToNav for hir::LifetimeParam { fn try_to_nav(&self, db: &RootDatabase) -> Option> { let InFile { file_id, value } = self.source(db)?; - let name = self.name(db).display_no_db().to_smolstr(); + // Lifetimes cannot be keywords, so not escaping needed. + let name = self.name(db).display_no_db(Edition::Edition2015).to_smolstr(); Some(orig_range(db, file_id, value.syntax()).map( |(FileRange { file_id, range: full_range }, focus_range)| NavigationTarget { @@ -651,7 +666,8 @@ impl TryToNav for hir::LifetimeParam { impl TryToNav for hir::ConstParam { fn try_to_nav(&self, db: &RootDatabase) -> Option> { let InFile { file_id, value } = self.merge().source(db)?; - let name = self.name(db).display_no_db().to_smolstr(); + let edition = self.module(db).krate().edition(db); + let name = self.name(db).display_no_db(edition).to_smolstr(); let value = match value { Either::Left(ast::TypeOrConstParam::Const(x)) => x, diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index aef162bf0acea..3e3d5ef6e6558 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -306,8 +306,9 @@ fn handle_control_flow_keywords( FilePosition { file_id, offset }: FilePosition, ) -> Option { let file = sema.parse_guess_edition(file_id); - let token = - file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword(Edition::CURRENT))?; + let edition = + sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT); + let token = file.syntax().token_at_offset(offset).find(|t| t.kind().is_keyword(edition))?; let references = match token.kind() { T![fn] | T![return] | T![try] => highlight_related::highlight_exit_points(sema, token), diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs index ed0e3d89ddc66..42b7472c645f3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs @@ -6,19 +6,14 @@ use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics}; use ide_db::{ - base_db::SourceDatabase, defs::{Definition, NameClass, NameRefClass}, rename::{bail, format_err, source_edit_from_references, IdentifierKind}, source_change::SourceChangeBuilder, FileId, FileRange, RootDatabase, }; use itertools::Itertools; -use span::Edition; use stdx::{always, never}; -use syntax::{ - ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange, TextSize, - ToSmolStr, -}; +use syntax::{ast, AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize}; use text_edit::TextEdit; @@ -103,7 +98,7 @@ pub(crate) fn rename( // FIXME: This can use the `ide_db::rename_reference` (or def.rename) method once we can // properly find "direct" usages/references. .map(|(.., def)| { - match IdentifierKind::classify(Edition::CURRENT_FIXME, new_name)? { + match IdentifierKind::classify(new_name)? { IdentifierKind::Ident => (), IdentifierKind::Lifetime => { bail!("Cannot alias reference to a lifetime identifier") @@ -125,7 +120,10 @@ pub(crate) fn rename( let mut source_change = SourceChange::default(); source_change.extend(usages.references.get_mut(&file_id).iter().map(|refs| { - (position.file_id, source_edit_from_references(refs, def, new_name)) + ( + position.file_id, + source_edit_from_references(refs, def, new_name, file_id.edition()), + ) })); Ok(source_change) @@ -163,12 +161,7 @@ pub(crate) fn will_rename_file( let sema = Semantics::new(db); let module = sema.file_to_module_def(file_id)?; let def = Definition::Module(module); - let mut change = - if is_raw_identifier(new_name_stem, db.crate_graph()[module.krate().into()].edition) { - def.rename(&sema, &SmolStr::from_iter(["r#", new_name_stem])).ok()? - } else { - def.rename(&sema, new_name_stem).ok()? - }; + let mut change = def.rename(&sema, new_name_stem).ok()?; change.file_system_edits.clear(); Some(change) } @@ -272,7 +265,7 @@ fn find_definitions( // if the name differs from the definitions name it has to be an alias if def .name(sema.db) - .map_or(false, |it| it.display_no_db().to_smolstr() != name_ref.text().as_str()) + .map_or(false, |it| !it.eq_ident(name_ref.text().as_str())) { Err(format_err!("Renaming aliases is currently unsupported")) } else { @@ -379,7 +372,7 @@ fn rename_to_self( let usages = def.usages(sema).all(); let mut source_change = SourceChange::default(); source_change.extend(usages.iter().map(|(file_id, references)| { - (file_id.into(), source_edit_from_references(references, def, "self")) + (file_id.into(), source_edit_from_references(references, def, "self", file_id.edition())) })); source_change.insert_source_edit( file_id.original_file(sema.db), @@ -400,7 +393,7 @@ fn rename_self_to_param( return Ok(SourceChange::default()); } - let identifier_kind = IdentifierKind::classify(Edition::CURRENT_FIXME, new_name)?; + let identifier_kind = IdentifierKind::classify(new_name)?; let InFile { file_id, value: self_param } = sema.source(self_param).ok_or_else(|| format_err!("cannot find function source"))?; @@ -415,7 +408,7 @@ fn rename_self_to_param( let mut source_change = SourceChange::default(); source_change.insert_source_edit(file_id.original_file(sema.db), edit); source_change.extend(usages.iter().map(|(file_id, references)| { - (file_id.into(), source_edit_from_references(references, def, new_name)) + (file_id.into(), source_edit_from_references(references, def, new_name, file_id.edition())) })); Ok(source_change) } @@ -636,9 +629,9 @@ impl Foo { #[test] fn test_rename_to_invalid_identifier3() { check( - "let", + "super", r#"fn main() { let i$0 = 1; }"#, - "error: Invalid name `let`: not an identifier", + "error: Invalid name `super`: not an identifier", ); } @@ -687,11 +680,7 @@ impl Foo { #[test] fn test_rename_mod_invalid_raw_ident() { - check( - "r#self", - r#"mod foo$0 {}"#, - "error: Invalid name: `self` cannot be a raw identifier", - ); + check("r#self", r#"mod foo$0 {}"#, "error: Invalid name `self`: not an identifier"); } #[test] @@ -1545,6 +1534,228 @@ pub fn baz() {} ); } + #[test] + fn test_rename_each_usage_gets_appropriate_rawness() { + check_expect( + "dyn", + r#" +//- /a.rs crate:a edition:2015 +pub fn foo() {} + +//- /b.rs crate:b edition:2018 deps:a new_source_root:local +fn bar() { + a::foo$0(); +} + "#, + expect![[r#" + source_file_edits: [ + ( + FileId( + 0, + ), + [ + Indel { + insert: "dyn", + delete: 7..10, + }, + ], + ), + ( + FileId( + 1, + ), + [ + Indel { + insert: "r#dyn", + delete: 18..21, + }, + ], + ), + ] + file_system_edits: [] + "#]], + ); + + check_expect( + "dyn", + r#" +//- /a.rs crate:a edition:2018 +pub fn foo() {} + +//- /b.rs crate:b edition:2015 deps:a new_source_root:local +fn bar() { + a::foo$0(); +} + "#, + expect![[r#" + source_file_edits: [ + ( + FileId( + 0, + ), + [ + Indel { + insert: "r#dyn", + delete: 7..10, + }, + ], + ), + ( + FileId( + 1, + ), + [ + Indel { + insert: "dyn", + delete: 18..21, + }, + ], + ), + ] + file_system_edits: [] + "#]], + ); + + check_expect( + "r#dyn", + r#" +//- /a.rs crate:a edition:2018 +pub fn foo$0() {} + +//- /b.rs crate:b edition:2015 deps:a new_source_root:local +fn bar() { + a::foo(); +} + "#, + expect![[r#" + source_file_edits: [ + ( + FileId( + 0, + ), + [ + Indel { + insert: "r#dyn", + delete: 7..10, + }, + ], + ), + ( + FileId( + 1, + ), + [ + Indel { + insert: "dyn", + delete: 18..21, + }, + ], + ), + ] + file_system_edits: [] + "#]], + ); + } + + #[test] + fn rename_raw_identifier() { + check_expect( + "abc", + r#" +//- /a.rs crate:a edition:2015 +pub fn dyn() {} + +fn foo() { + dyn$0(); +} + +//- /b.rs crate:b edition:2018 deps:a new_source_root:local +fn bar() { + a::r#dyn(); +} + "#, + expect![[r#" + source_file_edits: [ + ( + FileId( + 0, + ), + [ + Indel { + insert: "abc", + delete: 7..10, + }, + Indel { + insert: "abc", + delete: 32..35, + }, + ], + ), + ( + FileId( + 1, + ), + [ + Indel { + insert: "abc", + delete: 18..23, + }, + ], + ), + ] + file_system_edits: [] + "#]], + ); + + check_expect( + "abc", + r#" +//- /a.rs crate:a edition:2018 +pub fn r#dyn() {} + +fn foo() { + r#dyn$0(); +} + +//- /b.rs crate:b edition:2015 deps:a new_source_root:local +fn bar() { + a::dyn(); +} + "#, + expect![[r#" + source_file_edits: [ + ( + FileId( + 0, + ), + [ + Indel { + insert: "abc", + delete: 7..12, + }, + Indel { + insert: "abc", + delete: 34..39, + }, + ], + ), + ( + FileId( + 1, + ), + [ + Indel { + insert: "abc", + delete: 18..21, + }, + ], + ), + ] + file_system_edits: [] + "#]], + ); + } + #[test] fn test_enum_variant_from_module_1() { cov_mark::check!(rename_non_local); diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index 5d4b8b3643943..38dc522789d71 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -3,7 +3,8 @@ use std::fmt; use ast::HasName; use cfg::{CfgAtom, CfgExpr}; use hir::{ - db::HirDatabase, sym, AsAssocItem, AttrsWithOwner, HasAttrs, HasSource, HirFileIdExt, Semantics, + db::HirDatabase, sym, AsAssocItem, AttrsWithOwner, HasAttrs, HasCrate, HasSource, HirFileIdExt, + Semantics, }; use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn}; use ide_db::{ @@ -14,7 +15,7 @@ use ide_db::{ FilePosition, FxHashMap, FxHashSet, RootDatabase, SymbolKind, }; use itertools::Itertools; -use span::TextSize; +use span::{Edition, TextSize}; use stdx::{always, format_to}; use syntax::{ ast::{self, AstNode}, @@ -321,6 +322,7 @@ pub(crate) fn runnable_fn( sema: &Semantics<'_, RootDatabase>, def: hir::Function, ) -> Option { + let edition = def.krate(sema.db).edition(sema.db); let under_cfg_test = has_cfg_test(def.module(sema.db).attrs(sema.db)); let kind = if !under_cfg_test && def.is_main(sema.db) { RunnableKind::Bin @@ -328,11 +330,11 @@ pub(crate) fn runnable_fn( let test_id = || { let canonical_path = { let def: hir::ModuleDef = def.into(); - def.canonical_path(sema.db) + def.canonical_path(sema.db, edition) }; canonical_path .map(TestId::Path) - .unwrap_or(TestId::Name(def.name(sema.db).display_no_db().to_smolstr())) + .unwrap_or(TestId::Name(def.name(sema.db).display_no_db(edition).to_smolstr())) }; if def.is_test(sema.db) { @@ -367,8 +369,11 @@ pub(crate) fn runnable_mod( .path_to_root(sema.db) .into_iter() .rev() - .filter_map(|it| it.name(sema.db)) - .map(|it| it.display(sema.db).to_string()) + .filter_map(|module| { + module.name(sema.db).map(|mod_name| { + mod_name.display(sema.db, module.krate().edition(sema.db)).to_string() + }) + }) .join("::"); let attrs = def.attrs(sema.db); @@ -381,6 +386,7 @@ pub(crate) fn runnable_impl( sema: &Semantics<'_, RootDatabase>, def: &hir::Impl, ) -> Option { + let edition = def.module(sema.db).krate().edition(sema.db); let attrs = def.attrs(sema.db); if !has_runnable_doc_test(&attrs) { return None; @@ -389,13 +395,13 @@ pub(crate) fn runnable_impl( let nav = def.try_to_nav(sema.db)?.call_site(); let ty = def.self_ty(sema.db); let adt_name = ty.as_adt()?.name(sema.db); - let mut ty_args = ty.generic_parameters(sema.db).peekable(); + let mut ty_args = ty.generic_parameters(sema.db, edition).peekable(); let params = if ty_args.peek().is_some() { format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))) } else { String::new() }; - let mut test_id = format!("{}{params}", adt_name.display(sema.db)); + let mut test_id = format!("{}{params}", adt_name.display(sema.db, edition)); test_id.retain(|c| c != ' '); let test_id = TestId::Path(test_id); @@ -419,8 +425,11 @@ fn runnable_mod_outline_definition( .path_to_root(sema.db) .into_iter() .rev() - .filter_map(|it| it.name(sema.db)) - .map(|it| it.display(sema.db).to_string()) + .filter_map(|module| { + module.name(sema.db).map(|mod_name| { + mod_name.display(sema.db, module.krate().edition(sema.db)).to_string() + }) + }) .join("::"); let attrs = def.attrs(sema.db); @@ -452,6 +461,7 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { Definition::SelfType(it) => it.attrs(db), _ => return None, }; + let edition = def.krate(db).map(|it| it.edition(db)).unwrap_or(Edition::CURRENT); if !has_runnable_doc_test(&attrs) { return None; } @@ -460,29 +470,29 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { let mut path = String::new(); def.canonical_module_path(db)? .flat_map(|it| it.name(db)) - .for_each(|name| format_to!(path, "{}::", name.display(db))); + .for_each(|name| format_to!(path, "{}::", name.display(db, edition))); // This probably belongs to canonical_path? if let Some(assoc_item) = def.as_assoc_item(db) { if let Some(ty) = assoc_item.implementing_ty(db) { if let Some(adt) = ty.as_adt() { let name = adt.name(db); - let mut ty_args = ty.generic_parameters(db).peekable(); - format_to!(path, "{}", name.display(db)); + let mut ty_args = ty.generic_parameters(db, edition).peekable(); + format_to!(path, "{}", name.display(db, edition)); if ty_args.peek().is_some() { format_to!(path, "<{}>", ty_args.format_with(",", |ty, cb| cb(&ty))); } - format_to!(path, "::{}", def_name.display(db)); + format_to!(path, "::{}", def_name.display(db, edition)); path.retain(|c| c != ' '); return Some(path); } } } - format_to!(path, "{}", def_name.display(db)); + format_to!(path, "{}", def_name.display(db, edition)); Some(path) })(); - let test_id = - path.map_or_else(|| TestId::Name(def_name.display_no_db().to_smolstr()), TestId::Path); + let test_id = path + .map_or_else(|| TestId::Name(def_name.display_no_db(edition).to_smolstr()), TestId::Path); let mut nav = match def { Definition::Module(def) => NavigationTarget::from_module_to_decl(db, def), diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs index b6c9e2f6366a7..9d3b8c6ebd19b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs +++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs @@ -13,6 +13,7 @@ use ide_db::{ documentation::{Documentation, HasDocs}, FilePosition, FxIndexMap, }; +use span::Edition; use stdx::format_to; use syntax::{ algo, @@ -82,6 +83,8 @@ pub(crate) fn signature_help( // this prevents us from leaving the CallExpression .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?; let token = sema.descend_into_macros_single(DescendPreference::None, token); + let edition = + sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT); for node in token.parent_ancestors() { match_ast! { @@ -91,49 +94,49 @@ pub(crate) fn signature_help( if cursor_outside { continue; } - return signature_help_for_call(&sema, arg_list, token); + return signature_help_for_call(&sema, arg_list, token, edition); }, ast::GenericArgList(garg_list) => { let cursor_outside = garg_list.r_angle_token().as_ref() == Some(&token); if cursor_outside { continue; } - return signature_help_for_generics(&sema, garg_list, token); + return signature_help_for_generics(&sema, garg_list, token, edition); }, ast::RecordExpr(record) => { let cursor_outside = record.record_expr_field_list().and_then(|list| list.r_curly_token()).as_ref() == Some(&token); if cursor_outside { continue; } - return signature_help_for_record_lit(&sema, record, token); + return signature_help_for_record_lit(&sema, record, token, edition); }, ast::RecordPat(record) => { let cursor_outside = record.record_pat_field_list().and_then(|list| list.r_curly_token()).as_ref() == Some(&token); if cursor_outside { continue; } - return signature_help_for_record_pat(&sema, record, token); + return signature_help_for_record_pat(&sema, record, token, edition); }, ast::TupleStructPat(tuple_pat) => { let cursor_outside = tuple_pat.r_paren_token().as_ref() == Some(&token); if cursor_outside { continue; } - return signature_help_for_tuple_struct_pat(&sema, tuple_pat, token); + return signature_help_for_tuple_struct_pat(&sema, tuple_pat, token, edition); }, ast::TuplePat(tuple_pat) => { let cursor_outside = tuple_pat.r_paren_token().as_ref() == Some(&token); if cursor_outside { continue; } - return signature_help_for_tuple_pat(&sema, tuple_pat, token); + return signature_help_for_tuple_pat(&sema, tuple_pat, token, edition); }, ast::TupleExpr(tuple_expr) => { let cursor_outside = tuple_expr.r_paren_token().as_ref() == Some(&token); if cursor_outside { continue; } - return signature_help_for_tuple_expr(&sema, tuple_expr, token); + return signature_help_for_tuple_expr(&sema, tuple_expr, token, edition); }, _ => (), } @@ -157,6 +160,7 @@ fn signature_help_for_call( sema: &Semantics<'_, RootDatabase>, arg_list: ast::ArgList, token: SyntaxToken, + edition: Edition, ) -> Option { // Find the calling expression and its NameRef let mut nodes = arg_list.syntax().ancestors().skip(1); @@ -181,7 +185,7 @@ fn signature_help_for_call( match callable.kind() { hir::CallableKind::Function(func) => { res.doc = func.docs(db); - format_to!(res.signature, "fn {}", func.name(db).display(db)); + format_to!(res.signature, "fn {}", func.name(db).display(db, edition)); fn_params = Some(match callable.receiver_param(db) { Some(_self) => func.params_without_self(db), None => func.assoc_fn_params(db), @@ -189,15 +193,15 @@ fn signature_help_for_call( } hir::CallableKind::TupleStruct(strukt) => { res.doc = strukt.docs(db); - format_to!(res.signature, "struct {}", strukt.name(db).display(db)); + format_to!(res.signature, "struct {}", strukt.name(db).display(db, edition)); } hir::CallableKind::TupleEnumVariant(variant) => { res.doc = variant.docs(db); format_to!( res.signature, "enum {}::{}", - variant.parent_enum(db).name(db).display(db), - variant.name(db).display(db) + variant.parent_enum(db).name(db).display(db, edition), + variant.name(db).display(db, edition) ); } hir::CallableKind::Closure(closure) => { @@ -210,7 +214,7 @@ fn signature_help_for_call( Some(adt) => format_to!( res.signature, "<{} as {fn_trait}>::{}", - adt.name(db).display(db), + adt.name(db).display(db, edition), fn_trait.function_name() ), None => format_to!(res.signature, "impl {fn_trait}"), @@ -220,7 +224,7 @@ fn signature_help_for_call( res.signature.push('('); { if let Some((self_param, _)) = callable.receiver_param(db) { - format_to!(res.signature, "{}", self_param.display(db)) + format_to!(res.signature, "{}", self_param.display(db, edition)) } let mut buf = String::new(); for (idx, p) in callable.params().into_iter().enumerate() { @@ -240,8 +244,10 @@ fn signature_help_for_call( // This is overly conservative: we do not substitute known type vars // (see FIXME in tests::impl_trait) and falling back on any unknowns. match (p.ty().contains_unknown(), fn_params.as_deref()) { - (true, Some(fn_params)) => format_to!(buf, "{}", fn_params[idx].ty().display(db)), - _ => format_to!(buf, "{}", p.ty().display(db)), + (true, Some(fn_params)) => { + format_to!(buf, "{}", fn_params[idx].ty().display(db, edition)) + } + _ => format_to!(buf, "{}", p.ty().display(db, edition)), } res.push_call_param(&buf); } @@ -250,7 +256,7 @@ fn signature_help_for_call( let mut render = |ret_type: hir::Type| { if !ret_type.is_unit() { - format_to!(res.signature, " -> {}", ret_type.display(db)); + format_to!(res.signature, " -> {}", ret_type.display(db, edition)); } }; match callable.kind() { @@ -270,6 +276,7 @@ fn signature_help_for_generics( sema: &Semantics<'_, RootDatabase>, arg_list: ast::GenericArgList, token: SyntaxToken, + edition: Edition, ) -> Option { let (generics_def, mut active_parameter, first_arg_is_non_lifetime, variant) = generic_def_for_node(sema, &arg_list, &token)?; @@ -284,11 +291,11 @@ fn signature_help_for_generics( match generics_def { hir::GenericDef::Function(it) => { res.doc = it.docs(db); - format_to!(res.signature, "fn {}", it.name(db).display(db)); + format_to!(res.signature, "fn {}", it.name(db).display(db, edition)); } hir::GenericDef::Adt(hir::Adt::Enum(it)) => { res.doc = it.docs(db); - format_to!(res.signature, "enum {}", it.name(db).display(db)); + format_to!(res.signature, "enum {}", it.name(db).display(db, edition)); if let Some(variant) = variant { // In paths, generics of an enum can be specified *after* one of its variants. // eg. `None::` @@ -298,23 +305,23 @@ fn signature_help_for_generics( } hir::GenericDef::Adt(hir::Adt::Struct(it)) => { res.doc = it.docs(db); - format_to!(res.signature, "struct {}", it.name(db).display(db)); + format_to!(res.signature, "struct {}", it.name(db).display(db, edition)); } hir::GenericDef::Adt(hir::Adt::Union(it)) => { res.doc = it.docs(db); - format_to!(res.signature, "union {}", it.name(db).display(db)); + format_to!(res.signature, "union {}", it.name(db).display(db, edition)); } hir::GenericDef::Trait(it) => { res.doc = it.docs(db); - format_to!(res.signature, "trait {}", it.name(db).display(db)); + format_to!(res.signature, "trait {}", it.name(db).display(db, edition)); } hir::GenericDef::TraitAlias(it) => { res.doc = it.docs(db); - format_to!(res.signature, "trait {}", it.name(db).display(db)); + format_to!(res.signature, "trait {}", it.name(db).display(db, edition)); } hir::GenericDef::TypeAlias(it) => { res.doc = it.docs(db); - format_to!(res.signature, "type {}", it.name(db).display(db)); + format_to!(res.signature, "type {}", it.name(db).display(db, edition)); } // These don't have generic args that can be specified hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) => return None, @@ -339,11 +346,11 @@ fn signature_help_for_generics( } buf.clear(); - format_to!(buf, "{}", param.display(db)); + format_to!(buf, "{}", param.display(db, edition)); res.push_generic_param(&buf); } if let hir::GenericDef::Trait(tr) = generics_def { - add_assoc_type_bindings(db, &mut res, tr, arg_list); + add_assoc_type_bindings(db, &mut res, tr, arg_list, edition); } res.signature.push('>'); @@ -355,6 +362,7 @@ fn add_assoc_type_bindings( res: &mut SignatureHelp, tr: Trait, args: ast::GenericArgList, + edition: Edition, ) { if args.syntax().ancestors().find_map(ast::TypeBound::cast).is_none() { // Assoc type bindings are only valid in type bound position. @@ -378,7 +386,7 @@ fn add_assoc_type_bindings( for item in tr.items_with_supertraits(db) { if let AssocItem::TypeAlias(ty) = item { - let name = ty.name(db).display_no_db().to_smolstr(); + let name = ty.name(db).display_no_db(edition).to_smolstr(); if !present_bindings.contains(&*name) { buf.clear(); format_to!(buf, "{} = …", name); @@ -392,6 +400,7 @@ fn signature_help_for_record_lit( sema: &Semantics<'_, RootDatabase>, record: ast::RecordExpr, token: SyntaxToken, + edition: Edition, ) -> Option { signature_help_for_record_( sema, @@ -403,6 +412,7 @@ fn signature_help_for_record_lit( .filter_map(|field| sema.resolve_record_field(&field)) .map(|(field, _, ty)| (field, ty)), token, + edition, ) } @@ -410,6 +420,7 @@ fn signature_help_for_record_pat( sema: &Semantics<'_, RootDatabase>, record: ast::RecordPat, token: SyntaxToken, + edition: Edition, ) -> Option { signature_help_for_record_( sema, @@ -420,6 +431,7 @@ fn signature_help_for_record_pat( .fields() .filter_map(|field| sema.resolve_record_pat_field(&field)), token, + edition, ) } @@ -427,6 +439,7 @@ fn signature_help_for_tuple_struct_pat( sema: &Semantics<'_, RootDatabase>, pat: ast::TupleStructPat, token: SyntaxToken, + edition: Edition, ) -> Option { let path = pat.path()?; let path_res = sema.resolve_path(&path)?; @@ -445,8 +458,8 @@ fn signature_help_for_tuple_struct_pat( format_to!( res.signature, "enum {}::{} (", - en.name(db).display(db), - variant.name(db).display(db) + en.name(db).display(db, edition), + variant.name(db).display(db, edition) ); variant.fields(db) } else { @@ -459,7 +472,7 @@ fn signature_help_for_tuple_struct_pat( match adt { hir::Adt::Struct(it) => { res.doc = it.docs(db); - format_to!(res.signature, "struct {} (", it.name(db).display(db)); + format_to!(res.signature, "struct {} (", it.name(db).display(db, edition)); it.fields(db) } _ => return None, @@ -472,6 +485,7 @@ fn signature_help_for_tuple_struct_pat( token, pat.fields(), fields.into_iter().map(|it| it.ty(db)), + edition, )) } @@ -479,6 +493,7 @@ fn signature_help_for_tuple_pat( sema: &Semantics<'_, RootDatabase>, pat: ast::TuplePat, token: SyntaxToken, + edition: Edition, ) -> Option { let db = sema.db; let field_pats = pat.fields(); @@ -498,6 +513,7 @@ fn signature_help_for_tuple_pat( token, field_pats, fields.into_iter(), + edition, )) } @@ -505,6 +521,7 @@ fn signature_help_for_tuple_expr( sema: &Semantics<'_, RootDatabase>, expr: ast::TupleExpr, token: SyntaxToken, + edition: Edition, ) -> Option { let active_parameter = Some( expr.syntax() @@ -526,7 +543,7 @@ fn signature_help_for_tuple_expr( let fields = expr.original.tuple_fields(db); let mut buf = String::new(); for ty in fields { - format_to!(buf, "{}", ty.display_truncated(db, Some(20))); + format_to!(buf, "{}", ty.display_truncated(db, Some(20), edition)); res.push_call_param(&buf); buf.clear(); } @@ -540,6 +557,7 @@ fn signature_help_for_record_( path: &ast::Path, fields2: impl Iterator, token: SyntaxToken, + edition: Edition, ) -> Option { let active_parameter = field_list_children .filter_map(NodeOrToken::into_token) @@ -566,8 +584,8 @@ fn signature_help_for_record_( format_to!( res.signature, "enum {}::{} {{ ", - en.name(db).display(db), - variant.name(db).display(db) + en.name(db).display(db, edition), + variant.name(db).display(db, edition) ); } else { let adt = match path_res { @@ -580,12 +598,12 @@ fn signature_help_for_record_( hir::Adt::Struct(it) => { fields = it.fields(db); res.doc = it.docs(db); - format_to!(res.signature, "struct {} {{ ", it.name(db).display(db)); + format_to!(res.signature, "struct {} {{ ", it.name(db).display(db, edition)); } hir::Adt::Union(it) => { fields = it.fields(db); res.doc = it.docs(db); - format_to!(res.signature, "union {} {{ ", it.name(db).display(db)); + format_to!(res.signature, "union {} {{ ", it.name(db).display(db, edition)); } _ => return None, } @@ -596,7 +614,12 @@ fn signature_help_for_record_( let mut buf = String::new(); for (field, ty) in fields2 { let name = field.name(db); - format_to!(buf, "{}: {}", name.display(db), ty.display_truncated(db, Some(20))); + format_to!( + buf, + "{}: {}", + name.display(db, edition), + ty.display_truncated(db, Some(20), edition) + ); res.push_record_field(&buf); buf.clear(); @@ -606,7 +629,12 @@ fn signature_help_for_record_( } for (name, field) in fields { let Some(field) = field else { continue }; - format_to!(buf, "{}: {}", name.display(db), field.ty(db).display_truncated(db, Some(20))); + format_to!( + buf, + "{}: {}", + name.display(db, edition), + field.ty(db).display_truncated(db, Some(20), edition) + ); res.push_record_field(&buf); buf.clear(); } @@ -621,6 +649,7 @@ fn signature_help_for_tuple_pat_ish( token: SyntaxToken, mut field_pats: AstChildren, fields: impl ExactSizeIterator, + edition: Edition, ) -> SignatureHelp { let rest_pat = field_pats.find(|it| matches!(it, ast::Pat::RestPat(_))); let is_left_of_rest_pat = @@ -647,7 +676,7 @@ fn signature_help_for_tuple_pat_ish( let mut buf = String::new(); for ty in fields { - format_to!(buf, "{}", ty.display_truncated(db, Some(20))); + format_to!(buf, "{}", ty.display_truncated(db, Some(20), edition)); res.push_call_param(&buf); buf.clear(); } diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index eaccee08e8c01..0f8d03c8d5591 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -10,6 +10,7 @@ use ide_db::{ helpers::get_definition, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, }; +use span::Edition; use syntax::{AstNode, SyntaxKind::*, SyntaxNode, TextRange, T}; use crate::inlay_hints::InlayFieldsToResolve; @@ -116,7 +117,11 @@ fn documentation_for_definition( _ => None, }; - def.docs(sema.db, famous_defs.as_ref()) + def.docs( + sema.db, + famous_defs.as_ref(), + def.krate(sema.db).map(|it| it.edition(sema.db)).unwrap_or(Edition::CURRENT), + ) } impl StaticIndex<'_> { @@ -161,6 +166,8 @@ impl StaticIndex<'_> { // hovers let sema = hir::Semantics::new(self.db); let tokens_or_nodes = sema.parse_guess_edition(file_id).syntax().clone(); + let edition = + sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT); let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|it| match it { syntax::NodeOrToken::Node(_) => None, syntax::NodeOrToken::Token(it) => Some(it), @@ -201,17 +208,20 @@ impl StaticIndex<'_> { &node, None, &hover_config, + edition, )), definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| { FileRange { file_id: it.file_id, range: it.focus_or_full_range() } }), references: vec![], moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)), - display_name: def.name(self.db).map(|name| name.display(self.db).to_string()), + display_name: def + .name(self.db) + .map(|name| name.display(self.db, edition).to_string()), enclosing_moniker: current_crate .zip(def.enclosing_definition(self.db)) .and_then(|(cc, enclosing_def)| def_to_moniker(self.db, enclosing_def, cc)), - signature: Some(def.label(self.db)), + signature: Some(def.label(self.db, edition)), kind: def_to_kind(self.db, def), }); self.def_map.insert(def, it); diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index 23185920058b7..e082fcdc76589 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -501,7 +501,9 @@ fn traverse( config.syntactic_name_ref_highlighting, name_like, ), - NodeOrToken::Token(token) => highlight::token(sema, token).zip(Some(None)), + NodeOrToken::Token(token) => { + highlight::token(sema, token, file_id.edition()).zip(Some(None)) + } }; if let Some((mut highlight, binding_hash)) = element { if is_unlinked && highlight.tag == HlTag::UnresolvedReference { diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs index 70fdc0a1b3a7e..eeba9cf35c997 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/highlight.rs @@ -19,7 +19,11 @@ use crate::{ Highlight, HlMod, HlTag, }; -pub(super) fn token(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option { +pub(super) fn token( + sema: &Semantics<'_, RootDatabase>, + token: SyntaxToken, + edition: Edition, +) -> Option { if let Some(comment) = ast::Comment::cast(token.clone()) { let h = HlTag::Comment; return Some(match comment.kind().doc { @@ -42,7 +46,7 @@ pub(super) fn token(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> O HlTag::None.into() } p if p.is_punct() => punctuation(sema, token, p), - k if k.is_keyword(Edition::CURRENT) => keyword(sema, token, k)?, + k if k.is_keyword(edition) => keyword(sema, token, k)?, _ => return None, }; Some(highlight) diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs index 460fc4fe141c7..b441b4cc90eb6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/macro_.rs @@ -1,5 +1,4 @@ //! Syntax highlighting for macro_rules!. -use span::Edition; use syntax::{SyntaxKind, SyntaxToken, TextRange, T}; use crate::{HlRange, HlTag}; @@ -118,7 +117,7 @@ fn update_macro_state(state: &mut MacroMatcherParseState, tok: &SyntaxToken) { fn is_metavariable(token: &SyntaxToken) -> Option { match token.kind() { - kind if kind == SyntaxKind::IDENT || kind.is_keyword(Edition::CURRENT) => { + kind if kind.is_any_identifier() => { if let Some(_dollar) = token.prev_token().filter(|t| t.kind() == T![$]) { return Some(token.text_range()); } diff --git a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs index dae79998dc4eb..a6352b99d4f52 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_item_tree.rs @@ -16,5 +16,5 @@ pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String { let file_id = sema .attach_first_edition(file_id) .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); - db.file_item_tree(file_id.into()).pretty_print(db) + db.file_item_tree(file_id.into()).pretty_print(db, file_id.edition()) } diff --git a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs index df3f2f18b4cdf..830c39e21ea53 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs @@ -6,6 +6,7 @@ use ide_db::{ helpers::{get_definition, pick_best_token}, RootDatabase, }; +use span::Edition; use syntax::{AstNode, SyntaxKind}; use crate::FilePosition; @@ -85,6 +86,10 @@ pub(crate) fn view_memory_layout( ) -> Option { let sema = Semantics::new(db); let file = sema.parse_guess_edition(position.file_id); + let edition = sema + .attach_first_edition(position.file_id) + .map(|it| it.edition()) + .unwrap_or(Edition::CURRENT); let token = pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind { SyntaxKind::IDENT => 3, @@ -111,6 +116,7 @@ pub(crate) fn view_memory_layout( ty: &Type, layout: &Layout, parent_idx: usize, + edition: Edition, ) { let mut fields = ty .fields(db) @@ -141,7 +147,7 @@ pub(crate) fn view_memory_layout( if let Ok(child_layout) = child_ty.layout(db) { nodes.push(MemoryLayoutNode { item_name: field.name(db), - typename: child_ty.display(db).to_string(), + typename: child_ty.display(db, edition).to_string(), size: child_layout.size(), alignment: child_layout.align(), offset: match *field { @@ -157,7 +163,7 @@ pub(crate) fn view_memory_layout( item_name: field.name(db) + format!("(no layout data: {:?})", child_ty.layout(db).unwrap_err()) .as_ref(), - typename: child_ty.display(db).to_string(), + typename: child_ty.display(db, edition).to_string(), size: 0, offset: 0, alignment: 0, @@ -170,7 +176,7 @@ pub(crate) fn view_memory_layout( for (i, (_, child_ty)) in fields.iter().enumerate() { if let Ok(child_layout) = child_ty.layout(db) { - read_layout(nodes, db, child_ty, &child_layout, children_start + i); + read_layout(nodes, db, child_ty, &child_layout, children_start + i, edition); } } } @@ -188,7 +194,7 @@ pub(crate) fn view_memory_layout( def => def.name(db).map(|n| n.as_str().to_owned()).unwrap_or("[ROOT]".to_owned()), }; - let typename = ty.display(db).to_string(); + let typename = ty.display(db, edition).to_string(); let mut nodes = vec![MemoryLayoutNode { item_name, @@ -200,7 +206,7 @@ pub(crate) fn view_memory_layout( children_start: -1, children_len: 0, }]; - read_layout(&mut nodes, db, &ty, &layout, 0); + read_layout(&mut nodes, db, &ty, &layout, 0, edition); RecursiveMemoryLayout { nodes } }) diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs index 3ca6bd4cb111c..6a8cca9ccc79d 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind.rs @@ -3,6 +3,8 @@ mod generated; +use crate::Edition; + #[allow(unreachable_pub)] pub use self::generated::SyntaxKind; @@ -26,4 +28,11 @@ impl SyntaxKind { pub fn is_trivia(self) -> bool { matches!(self, SyntaxKind::WHITESPACE | SyntaxKind::COMMENT) } + + /// Returns true if this is an identifier or a keyword. + #[inline] + pub fn is_any_identifier(self) -> bool { + // Assuming no edition removed keywords... + self == SyntaxKind::IDENT || self.is_keyword(Edition::LATEST) + } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs index 0bd6677b66237..5eb6ff664f665 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs @@ -21,7 +21,7 @@ use std::io::Read; use anyhow::Result; use hir::{Module, Name}; use hir_ty::db::HirDatabase; -use ide::AnalysisHost; +use ide::{AnalysisHost, Edition}; use itertools::Itertools; use vfs::Vfs; @@ -85,6 +85,6 @@ fn full_name_of_item(db: &dyn HirDatabase, module: Module, name: Name) -> String .rev() .filter_map(|it| it.name(db)) .chain(Some(name)) - .map(|it| it.display(db.upcast()).to_string()) + .map(|it| it.display(db.upcast(), Edition::LATEST).to_string()) .join("::") } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index 06f4ba815d0dd..44e56645ba3c3 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -17,7 +17,7 @@ use hir_def::{ }; use hir_ty::{Interner, Substitution, TyExt, TypeFlags}; use ide::{ - Analysis, AnalysisHost, AnnotationConfig, DiagnosticsConfig, InlayFieldsToResolve, + Analysis, AnalysisHost, AnnotationConfig, DiagnosticsConfig, Edition, InlayFieldsToResolve, InlayHintsConfig, LineCol, RootDatabase, }; use ide_db::{ @@ -309,7 +309,7 @@ impl flags::AnalysisStats { let mut fail = 0; for &c in consts { all += 1; - let Err(error) = c.render_eval(db) else { + let Err(error) = c.render_eval(db, Edition::LATEST) else { continue; }; if verbosity.is_spammy() { @@ -442,6 +442,7 @@ impl flags::AnalysisStats { prefer_prelude: true, prefer_absolute: false, }, + Edition::LATEST, ) .unwrap(); syntax_hit_found |= trim(&original_text) == trim(&generated); @@ -563,7 +564,7 @@ impl flags::AnalysisStats { .rev() .filter_map(|it| it.name(db)) .chain(Some(body.name(db).unwrap_or_else(Name::missing))) - .map(|it| it.display(db).to_string()) + .map(|it| it.display(db, Edition::LATEST).to_string()) .join("::"); println!("Mir body for {full_name} failed due {e:?}"); } @@ -628,12 +629,14 @@ impl flags::AnalysisStats { .filter_map(|it| it.name(db)) .rev() .chain(Some(body_id.name(db).unwrap_or_else(Name::missing))) - .map(|it| it.display(db).to_string()), + .map(|it| it.display(db, Edition::LATEST).to_string()), ) .join("::") }; if let Some(only_name) = self.only.as_deref() { - if name.display(db).to_string() != only_name && full_name() != only_name { + if name.display(db, Edition::LATEST).to_string() != only_name + && full_name() != only_name + { continue; } } @@ -687,7 +690,10 @@ impl flags::AnalysisStats { end.col, )); } else { - bar.println(format!("{}: Unknown type", name.display(db))); + bar.println(format!( + "{}: Unknown type", + name.display(db, Edition::LATEST) + )); } } true @@ -708,17 +714,20 @@ impl flags::AnalysisStats { start.col, end.line + 1, end.col, - ty.display(db) + ty.display(db, Edition::LATEST) )); } else { - bar.println(format!("unknown location: {}", ty.display(db))); + bar.println(format!( + "unknown location: {}", + ty.display(db, Edition::LATEST) + )); } } if unknown_or_partial && self.output == Some(OutputFormat::Csv) { println!( r#"{},type,"{}""#, location_csv_expr(db, vfs, &sm(), expr_id), - ty.display(db) + ty.display(db, Edition::LATEST) ); } if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) { @@ -733,15 +742,15 @@ impl flags::AnalysisStats { start.col, end.line + 1, end.col, - mismatch.expected.display(db), - mismatch.actual.display(db) + mismatch.expected.display(db, Edition::LATEST), + mismatch.actual.display(db, Edition::LATEST) )); } else { bar.println(format!( "{}: Expected {}, got {}", - name.display(db), - mismatch.expected.display(db), - mismatch.actual.display(db) + name.display(db, Edition::LATEST), + mismatch.expected.display(db, Edition::LATEST), + mismatch.actual.display(db, Edition::LATEST) )); } } @@ -749,8 +758,8 @@ impl flags::AnalysisStats { println!( r#"{},mismatch,"{}","{}""#, location_csv_expr(db, vfs, &sm(), expr_id), - mismatch.expected.display(db), - mismatch.actual.display(db) + mismatch.expected.display(db, Edition::LATEST), + mismatch.actual.display(db, Edition::LATEST) ); } } @@ -785,7 +794,10 @@ impl flags::AnalysisStats { end.col, )); } else { - bar.println(format!("{}: Unknown type", name.display(db))); + bar.println(format!( + "{}: Unknown type", + name.display(db, Edition::LATEST) + )); } } true @@ -806,17 +818,20 @@ impl flags::AnalysisStats { start.col, end.line + 1, end.col, - ty.display(db) + ty.display(db, Edition::LATEST) )); } else { - bar.println(format!("unknown location: {}", ty.display(db))); + bar.println(format!( + "unknown location: {}", + ty.display(db, Edition::LATEST) + )); } } if unknown_or_partial && self.output == Some(OutputFormat::Csv) { println!( r#"{},type,"{}""#, location_csv_pat(db, vfs, &sm(), pat_id), - ty.display(db) + ty.display(db, Edition::LATEST) ); } if let Some(mismatch) = inference_result.type_mismatch_for_pat(pat_id) { @@ -830,15 +845,15 @@ impl flags::AnalysisStats { start.col, end.line + 1, end.col, - mismatch.expected.display(db), - mismatch.actual.display(db) + mismatch.expected.display(db, Edition::LATEST), + mismatch.actual.display(db, Edition::LATEST) )); } else { bar.println(format!( "{}: Expected {}, got {}", - name.display(db), - mismatch.expected.display(db), - mismatch.actual.display(db) + name.display(db, Edition::LATEST), + mismatch.expected.display(db, Edition::LATEST), + mismatch.actual.display(db, Edition::LATEST) )); } } @@ -846,8 +861,8 @@ impl flags::AnalysisStats { println!( r#"{},mismatch,"{}","{}""#, location_csv_pat(db, vfs, &sm(), pat_id), - mismatch.expected.display(db), - mismatch.actual.display(db) + mismatch.expected.display(db, Edition::LATEST), + mismatch.actual.display(db, Edition::LATEST) ); } } @@ -923,12 +938,16 @@ impl flags::AnalysisStats { .filter_map(|it| it.name(db)) .rev() .chain(Some(body_id.name(db).unwrap_or_else(Name::missing))) - .map(|it| it.display(db).to_string()), + .map(|it| it.display(db, Edition::LATEST).to_string()), ) .join("::") }; if let Some(only_name) = self.only.as_deref() { - if body_id.name(db).unwrap_or_else(Name::missing).display(db).to_string() + if body_id + .name(db) + .unwrap_or_else(Name::missing) + .display(db, Edition::LATEST) + .to_string() != only_name && full_name() != only_name { diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs index 5153db237275c..56e43e82ed27a 100644 --- a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs @@ -301,14 +301,11 @@ where }; } let leaf: tt::Leaf<_> = match kind { - T![true] | T![false] => make_ident!(), - IDENT => { + k if k.is_any_identifier() => { let text = token.to_text(conv); tt::Ident::new(&text, conv.span_for(abs_range)).into() } UNDERSCORE => make_ident!(), - // FIXME: Edition - k if k.is_keyword(Edition::CURRENT) => make_ident!(), k if k.is_literal() => { let text = token.to_text(conv); let span = conv.span_for(abs_range); diff --git a/src/tools/rust-analyzer/crates/syntax/src/utils.rs b/src/tools/rust-analyzer/crates/syntax/src/utils.rs index 77d49b442e0ad..d1f60f0b71bcc 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/utils.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/utils.rs @@ -2,6 +2,7 @@ use crate::SyntaxKind; +#[inline] pub fn is_raw_identifier(name: &str, edition: parser::Edition) -> bool { let is_keyword = SyntaxKind::from_keyword(name, edition).is_some(); is_keyword && !matches!(name, "self" | "crate" | "super" | "Self") From 6fc487d2ccb57b7166bb706707485bd5b129fe9d Mon Sep 17 00:00:00 2001 From: David Richey Date: Fri, 16 Aug 2024 09:47:46 -0500 Subject: [PATCH 015/124] Remove rust-analyzer.workspace.discoverProjectRunner --- src/tools/rust-analyzer/editors/code/package.json | 8 -------- src/tools/rust-analyzer/editors/code/src/config.ts | 4 ---- 2 files changed, 12 deletions(-) diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index bf9c4a366d422..3d1e1efe4db7d 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -330,14 +330,6 @@ "default": false, "type": "boolean" }, - "rust-analyzer.discoverProjectRunner": { - "markdownDescription": "Sets the extension responsible for determining which extension the rust-analyzer extension uses to generate `rust-project.json` files. This should should only be used\n if a build system like Buck or Bazel is also in use.", - "default": null, - "type": [ - "null", - "string" - ] - }, "rust-analyzer.showUnlinkedFileNotification": { "markdownDescription": "Whether to show a notification for unlinked files asking the user to add the corresponding Cargo.toml to the linked projects setting.", "default": true, diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index dc0165df71ea9..1e3dc60809588 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -252,10 +252,6 @@ export class Config { await this.cfg.update("checkOnSave", !(value || false), target || null, overrideInLanguage); } - get discoverProjectRunner(): string | undefined { - return this.get("discoverProjectRunner"); - } - get problemMatcher(): string[] { return this.get("runnables.problemMatcher") || []; } From d3fa5e9c2b0c7660d88b4a88ccff1d7ee3621b03 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Sat, 17 Aug 2024 21:35:07 +0900 Subject: [PATCH 016/124] Pin `rowan` to `0.15.15` --- src/tools/rust-analyzer/crates/syntax/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml index d945fa59c9cd8..fcb9b0ea354f1 100644 --- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml +++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml @@ -16,7 +16,7 @@ doctest = false cov-mark = "2.0.0-pre.1" either.workspace = true itertools.workspace = true -rowan = "0.15.15" +rowan = "=0.15.15" rustc-hash.workspace = true indexmap.workspace = true smol_str.workspace = true From c4b8c657e40076505d4da332c467214fbabe99ad Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Sat, 17 Aug 2024 20:56:40 +0900 Subject: [PATCH 017/124] fix: Wrong BoundVar index when lowering impl trait parameter of parent generics --- .../rust-analyzer/crates/hir-ty/src/lower.rs | 35 +++++++++---------- .../crates/hir-ty/src/tests/regression.rs | 33 +++++++++++++++++ 2 files changed, 50 insertions(+), 18 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index 446f1853c8aec..79b096068da00 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -377,26 +377,25 @@ impl<'a> TyLoweringContext<'a> { // Count the number of `impl Trait` things that appear within our bounds. // Since t hose have been emitted as implicit type args already. counter.set(idx + count_impl_traits(type_ref) as u16); - let ( - _parent_params, - self_param, - type_params, - const_params, - _impl_trait_params, - lifetime_params, - ) = self + let kind = self .generics() .expect("variable impl trait lowering must be in a generic def") - .provenance_split(); - TyKind::BoundVar(BoundVar::new( - self.in_binders, - idx as usize - + self_param as usize - + type_params - + const_params - + lifetime_params, - )) - .intern(Interner) + .iter() + .enumerate() + .filter_map(|(i, (id, data))| match (id, data) { + ( + GenericParamId::TypeParamId(_), + GenericParamDataRef::TypeParamData(data), + ) if data.provenance == TypeParamProvenance::ArgumentImplTrait => { + Some(i) + } + _ => None, + }) + .nth(idx as usize) + .map_or(TyKind::Error, |id| { + TyKind::BoundVar(BoundVar { debruijn: self.in_binders, index: id }) + }); + kind.intern(Interner) } ImplTraitLoweringState::Disallowed => { // FIXME: report error diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index 26e9243e7388f..01862aa2a37ba 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -2162,3 +2162,36 @@ fn main() { "#]], ); } + +#[test] +fn issue_17711() { + check_infer( + r#" +//- minicore: deref +use core::ops::Deref; + +struct Struct<'a, T>(&'a T); + +trait Trait {} + +impl<'a, T: Deref> Struct<'a, T> { + fn foo(&self) -> &Self { self } + + fn bar(&self) { + let _ = self.foo(); + } + +} +"#, + expect![[r#" + 137..141 'self': &'? Struct<'a, T> + 152..160 '{ self }': &'? Struct<'a, T> + 154..158 'self': &'? Struct<'a, T> + 174..178 'self': &'? Struct<'a, T> + 180..215 '{ ... }': () + 194..195 '_': &'? Struct<'?, T> + 198..202 'self': &'? Struct<'a, T> + 198..208 'self.foo()': &'? Struct<'?, T> + "#]], + ); +} From d0ce97f41da59a49104bb252eab0ff5cf9ec613d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 17 Aug 2024 11:18:50 +0200 Subject: [PATCH 018/124] feat: Make rust-analyzer work partially when missing an internet connection --- .../project-model/src/cargo_workspace.rs | 50 +++++++++++++++++-- .../crates/project-model/src/sysroot.rs | 23 +++++---- .../crates/project-model/src/workspace.rs | 7 +-- .../crates/rust-analyzer/src/reload.rs | 12 +++++ 4 files changed, 75 insertions(+), 17 deletions(-) diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index 38eeedec6217f..db9c20fdc53a5 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -33,6 +33,8 @@ pub struct CargoWorkspace { workspace_root: AbsPathBuf, target_directory: AbsPathBuf, manifest_path: ManifestPath, + // Whether this workspace was queried with `--no-deps`. + no_deps: bool, } impl ops::Index for CargoWorkspace { @@ -259,6 +261,18 @@ impl CargoWorkspace { sysroot: &Sysroot, locked: bool, progress: &dyn Fn(String), + ) -> anyhow::Result { + Self::fetch_metadata_(cargo_toml, current_dir, config, sysroot, locked, false, progress) + } + + fn fetch_metadata_( + cargo_toml: &ManifestPath, + current_dir: &AbsPath, + config: &CargoConfig, + sysroot: &Sysroot, + locked: bool, + no_deps: bool, + progress: &dyn Fn(String), ) -> anyhow::Result { let targets = find_list_of_build_targets(config, cargo_toml, sysroot); @@ -314,6 +328,9 @@ impl CargoWorkspace { if locked { other_options.push("--locked".to_owned()); } + if no_deps { + other_options.push("--no-deps".to_owned()); + } meta.other_options(other_options); // FIXME: Fetching metadata is a slow process, as it might require @@ -324,6 +341,22 @@ impl CargoWorkspace { (|| -> Result { let output = meta.cargo_command().output()?; if !output.status.success() { + if !no_deps { + // If we failed to fetch metadata with deps, try again without them. + // This makes r-a still work partially when offline. + if let Ok(metadata) = Self::fetch_metadata_( + cargo_toml, + current_dir, + config, + sysroot, + locked, + true, + progress, + ) { + return Ok(metadata); + } + } + return Err(cargo_metadata::Error::CargoMetadata { stderr: String::from_utf8(output.stderr)?, }); @@ -431,8 +464,8 @@ impl CargoWorkspace { pkg_data.targets.push(tgt); } } - let resolve = meta.resolve.expect("metadata executed with deps"); - for mut node in resolve.nodes { + let no_deps = meta.resolve.is_none(); + for mut node in meta.resolve.map_or_else(Vec::new, |it| it.nodes) { let &source = pkg_by_id.get(&node.id).unwrap(); node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg)); let dependencies = node @@ -451,7 +484,14 @@ impl CargoWorkspace { let target_directory = AbsPathBuf::assert(meta.target_directory); - CargoWorkspace { packages, targets, workspace_root, target_directory, manifest_path } + CargoWorkspace { + packages, + targets, + workspace_root, + target_directory, + manifest_path, + no_deps, + } } pub fn packages(&self) -> impl ExactSizeIterator + '_ { @@ -533,6 +573,10 @@ impl CargoWorkspace { fn is_unique(&self, name: &str) -> bool { self.packages.iter().filter(|(_, v)| v.name == name).count() == 1 } + + pub fn no_deps(&self) -> bool { + self.no_deps + } } fn find_list_of_build_targets( diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs index 419fac3f41f75..e11f0ee3ae3ff 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs @@ -372,18 +372,19 @@ impl Sysroot { .flatten() }; - let resolve = res.resolve.as_mut().expect("metadata executed with deps"); - resolve.nodes.retain_mut(|node| { - // Replace `rustc-std-workspace` crate with the actual one in the dependency list - node.deps.iter_mut().for_each(|dep| { - let real_pkg = patches.clone().find(|((_, fake_id), _)| *fake_id == dep.pkg); - if let Some((_, real)) = real_pkg { - dep.pkg = real; - } + if let Some(resolve) = res.resolve.as_mut() { + resolve.nodes.retain_mut(|node| { + // Replace `rustc-std-workspace` crate with the actual one in the dependency list + node.deps.iter_mut().for_each(|dep| { + let real_pkg = patches.clone().find(|((_, fake_id), _)| *fake_id == dep.pkg); + if let Some((_, real)) = real_pkg { + dep.pkg = real; + } + }); + // Remove this node if it's a fake one + !patches.clone().any(|((_, fake), _)| fake == node.id) }); - // Remove this node if it's a fake one - !patches.clone().any(|((_, fake), _)| fake == node.id) - }); + } // Remove the fake ones from the package list patches.map(|((idx, _), _)| idx).sorted().rev().for_each(|idx| { res.packages.remove(idx); diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 5620dfade2d22..c6be91229fd00 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -441,14 +441,15 @@ impl ProjectWorkspace { ) -> anyhow::Result { match &self.kind { ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _)), .. } - | ProjectWorkspaceKind::Cargo { cargo, .. } => { + | ProjectWorkspaceKind::Cargo { cargo, .. } + if !cargo.no_deps() => + { WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, &self.sysroot) .with_context(|| { format!("Failed to run build scripts for {}", cargo.workspace_root()) }) } - ProjectWorkspaceKind::DetachedFile { cargo: None, .. } - | ProjectWorkspaceKind::Json { .. } => Ok(WorkspaceBuildScripts::default()), + _ => Ok(WorkspaceBuildScripts::default()), } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index dee34b1b393d2..71ed2872688d7 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -165,6 +165,18 @@ impl GlobalState { self.proc_macro_clients.iter().map(Some).chain(iter::repeat_with(|| None)); for (ws, proc_macro_client) in self.workspaces.iter().zip(proc_macro_clients) { + if matches!( + &ws.kind, + ProjectWorkspaceKind::Cargo { cargo, .. } | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _)), .. } + if cargo.no_deps() + ) { + status.health |= lsp_ext::Health::Warning; + format_to!( + message, + "Workspace `{}` has been queried without dependencies, connecting to crates.io might have failed.\n\n", + ws.manifest_or_root() + ); + } if let Some(err) = ws.sysroot.error() { status.health |= lsp_ext::Health::Warning; format_to!( From 324cf839bdedf80183d4e0c0d6b7ddf970eb8a2d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 17 Aug 2024 19:18:56 +0200 Subject: [PATCH 019/124] Adress new clippy::large_enum_variant diagnostics --- .../rust-analyzer/crates/rust-analyzer/src/diagnostics.rs | 8 +++++--- .../crates/rust-analyzer/src/diagnostics/to_proto.rs | 8 ++++---- .../rust-analyzer/crates/rust-analyzer/src/flycheck.rs | 1 + 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs index 034c49c3d5c69..5f2871ac99226 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs @@ -75,7 +75,7 @@ impl DiagnosticCollection { flycheck_id: usize, file_id: FileId, diagnostic: lsp_types::Diagnostic, - fix: Option, + fix: Option>, ) { let diagnostics = self.check.entry(flycheck_id).or_default().entry(file_id).or_default(); for existing_diagnostic in diagnostics.iter() { @@ -84,8 +84,10 @@ impl DiagnosticCollection { } } - let check_fixes = Arc::make_mut(&mut self.check_fixes); - check_fixes.entry(flycheck_id).or_default().entry(file_id).or_default().extend(fix); + if let Some(fix) = fix { + let check_fixes = Arc::make_mut(&mut self.check_fixes); + check_fixes.entry(flycheck_id).or_default().entry(file_id).or_default().push(*fix); + } diagnostics.push(diagnostic); self.changes.insert(file_id); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs index 208a70bc02ad9..e330cfc3cfd68 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -170,7 +170,7 @@ fn resolve_path( struct SubDiagnostic { related: lsp_types::DiagnosticRelatedInformation, - suggested_fix: Option, + suggested_fix: Option>, } enum MappedRustChildDiagnostic { @@ -241,7 +241,7 @@ fn map_rust_child_diagnostic( location: location(config, workspace_root, spans[0], snap), message: message.clone(), }, - suggested_fix: Some(Fix { + suggested_fix: Some(Box::new(Fix { ranges: spans .iter() .map(|&span| location(config, workspace_root, span, snap).range) @@ -260,7 +260,7 @@ fn map_rust_child_diagnostic( data: None, command: None, }, - }), + })), }) } } @@ -269,7 +269,7 @@ fn map_rust_child_diagnostic( pub(crate) struct MappedRustDiagnostic { pub(crate) url: lsp_types::Url, pub(crate) diagnostic: lsp_types::Diagnostic, - pub(crate) fix: Option, + pub(crate) fix: Option>, } /// Converts a Rust root diagnostic to LSP form diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 8f2e7d1ca26b9..443f52c6dd373 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -218,6 +218,7 @@ struct FlycheckActor { status: FlycheckStatus, } +#[allow(clippy::large_enum_variant)] enum Event { RequestStateChange(StateChange), CheckEvent(Option), From 74db4d309a99c632dfb7f38d90bfcb14d4e62d87 Mon Sep 17 00:00:00 2001 From: David Richey Date: Sun, 18 Aug 2024 15:09:41 -0500 Subject: [PATCH 020/124] Include generics when lowering extern type --- .../rust-analyzer/crates/hir-ty/src/lower.rs | 10 +++++----- .../crates/hir-ty/src/tests/regression.rs | 16 ++++++++++++++++ 2 files changed, 21 insertions(+), 5 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index 79b096068da00..5824c59c5a575 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -1976,13 +1976,13 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders { .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) .with_type_param_mode(ParamLoweringMode::Variable); let type_alias_data = db.type_alias_data(t); - if type_alias_data.is_extern { - Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner)) + let inner = if type_alias_data.is_extern { + TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner) } else { let type_ref = &type_alias_data.type_ref; - let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error)); - make_binders(db, &generics, inner) - } + ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error)) + }; + make_binders(db, &generics, inner) } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index 01862aa2a37ba..d6e034b01840a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -2195,3 +2195,19 @@ impl<'a, T: Deref> Struct<'a, T> { "#]], ); } + +#[test] +fn issue_17767() { + check_infer( + r#" +extern "C" { + type Foo; +} + +fn f() -> Foo {} +"#, + expect![[r#" + 47..49 '{}': Foo + "#]], + ); +} From 4abdcd7fd9767238e695eed65667756ed9bf00c5 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 19 Aug 2024 08:17:24 +0200 Subject: [PATCH 021/124] provenance_gc: fix comment --- src/tools/miri/src/provenance_gc.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/tools/miri/src/provenance_gc.rs b/src/tools/miri/src/provenance_gc.rs index 8edd80744ddc9..5c6edf4ddcedf 100644 --- a/src/tools/miri/src/provenance_gc.rs +++ b/src/tools/miri/src/provenance_gc.rs @@ -187,9 +187,9 @@ impl LiveAllocs<'_, '_> { impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { fn run_provenance_gc(&mut self) { - // We collect all tags from various parts of the interpreter, but also let this = self.eval_context_mut(); + // We collect all tags and AllocId from every part of the interpreter. let mut tags = FxHashSet::default(); let mut alloc_ids = FxHashSet::default(); this.visit_provenance(&mut |id, tag| { @@ -200,6 +200,8 @@ pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { tags.insert(tag); } }); + + // Based on this, clean up the interpreter state. self.remove_unreachable_tags(tags); self.remove_unreachable_allocs(alloc_ids); } From 4001f5933eb0f959232afce75005fc34d94803f1 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 19 Aug 2024 08:25:48 +0200 Subject: [PATCH 022/124] make the cleanup functions private also move "is there a borrow tracker" check out of the loop --- src/tools/miri/src/provenance_gc.rs | 50 ++++++++++++++--------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/src/tools/miri/src/provenance_gc.rs b/src/tools/miri/src/provenance_gc.rs index 5c6edf4ddcedf..d4bed69c6707a 100644 --- a/src/tools/miri/src/provenance_gc.rs +++ b/src/tools/miri/src/provenance_gc.rs @@ -184,6 +184,29 @@ impl LiveAllocs<'_, '_> { } } +fn remove_unreachable_tags<'tcx>(this: &mut MiriInterpCx<'tcx>, tags: FxHashSet) { + // Avoid iterating all allocations if there's no borrow tracker anyway. + if this.machine.borrow_tracker.is_some() { + this.memory.alloc_map().iter(|it| { + for (_id, (_kind, alloc)) in it { + alloc.extra.borrow_tracker.as_ref().unwrap().remove_unreachable_tags(&tags); + } + }); + } +} + +fn remove_unreachable_allocs<'tcx>(this: &mut MiriInterpCx<'tcx>, allocs: FxHashSet) { + let allocs = LiveAllocs { ecx: this, collected: allocs }; + this.machine.allocation_spans.borrow_mut().retain(|id, _| allocs.is_live(*id)); + this.machine.symbolic_alignment.borrow_mut().retain(|id, _| allocs.is_live(*id)); + this.machine.alloc_addresses.borrow_mut().remove_unreachable_allocs(&allocs); + if let Some(borrow_tracker) = &this.machine.borrow_tracker { + borrow_tracker.borrow_mut().remove_unreachable_allocs(&allocs); + } + // Clean up core (non-Miri-specific) state. + this.remove_unreachable_allocs(&allocs.collected); +} + impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { fn run_provenance_gc(&mut self) { @@ -202,30 +225,7 @@ pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { }); // Based on this, clean up the interpreter state. - self.remove_unreachable_tags(tags); - self.remove_unreachable_allocs(alloc_ids); - } - - fn remove_unreachable_tags(&mut self, tags: FxHashSet) { - let this = self.eval_context_mut(); - this.memory.alloc_map().iter(|it| { - for (_id, (_kind, alloc)) in it { - if let Some(bt) = &alloc.extra.borrow_tracker { - bt.remove_unreachable_tags(&tags); - } - } - }); - } - - fn remove_unreachable_allocs(&mut self, allocs: FxHashSet) { - let this = self.eval_context_mut(); - let allocs = LiveAllocs { ecx: this, collected: allocs }; - this.machine.allocation_spans.borrow_mut().retain(|id, _| allocs.is_live(*id)); - this.machine.symbolic_alignment.borrow_mut().retain(|id, _| allocs.is_live(*id)); - this.machine.alloc_addresses.borrow_mut().remove_unreachable_allocs(&allocs); - if let Some(borrow_tracker) = &this.machine.borrow_tracker { - borrow_tracker.borrow_mut().remove_unreachable_allocs(&allocs); - } - this.remove_unreachable_allocs(&allocs.collected); + remove_unreachable_tags(this, tags); + remove_unreachable_allocs(this, alloc_ids); } } From 1bfb362d7f19e53ea0f76e596ea39b2872a7f4a7 Mon Sep 17 00:00:00 2001 From: Victor Song Date: Wed, 14 Aug 2024 01:06:49 -0500 Subject: [PATCH 023/124] chore(config): remove `invocationLocation` in favor of `invocationStrategy` These flags were added to help rust-analyzer integrate with repos requiring non-Cargo invocations. The consensus is that having two independent settings are no longer needed. This change removes `invocationLocation` in favor of `invocationStrategy` and changes the internal representation of `InvocationStrategy::Once` to hold the workspace root. --- .../project-model/src/build_dependencies.rs | 19 ++----- .../project-model/src/cargo_workspace.rs | 3 +- .../crates/project-model/src/lib.rs | 9 +--- .../crates/rust-analyzer/src/config.rs | 50 +------------------ .../crates/rust-analyzer/src/flycheck.rs | 37 +++----------- .../crates/rust-analyzer/src/reload.rs | 22 ++++---- .../docs/user/generated_config.adoc | 26 +--------- .../rust-analyzer/editors/code/package.json | 40 +-------------- 8 files changed, 33 insertions(+), 173 deletions(-) diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs index e7a4b8f39f7a5..25d1ea07aaec6 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs @@ -19,8 +19,8 @@ use serde::Deserialize; use toolchain::Tool; use crate::{ - utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation, - InvocationStrategy, ManifestPath, Package, Sysroot, TargetKind, + utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationStrategy, ManifestPath, + Package, Sysroot, TargetKind, }; /// Output of the build script and proc-macro building steps for a workspace. @@ -63,10 +63,7 @@ impl WorkspaceBuildScripts { progress: &dyn Fn(String), sysroot: &Sysroot, ) -> io::Result { - let current_dir = match &config.invocation_location { - InvocationLocation::Root(root) if config.run_build_script_command.is_some() => root, - _ => workspace.workspace_root(), - }; + let current_dir = workspace.workspace_root(); let allowed_features = workspace.workspace_features(); let cmd = Self::build_command( @@ -89,15 +86,7 @@ impl WorkspaceBuildScripts { ) -> io::Result> { assert_eq!(config.invocation_strategy, InvocationStrategy::Once); - let current_dir = match &config.invocation_location { - InvocationLocation::Root(root) => root, - InvocationLocation::Workspace => { - return Err(io::Error::new( - io::ErrorKind::Other, - "Cannot run build scripts from workspace with invocation strategy `once`", - )) - } - }; + let current_dir = workspace_root; let cmd = Self::build_command( config, &Default::default(), diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index db9c20fdc53a5..492bc9a9255c6 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -13,7 +13,7 @@ use serde_json::from_value; use span::Edition; use toolchain::Tool; -use crate::{utf8_stdout, InvocationLocation, ManifestPath, Sysroot}; +use crate::{utf8_stdout, ManifestPath, Sysroot}; use crate::{CfgOverrides, InvocationStrategy}; /// [`CargoWorkspace`] represents the logical structure of, well, a Cargo @@ -100,7 +100,6 @@ pub struct CargoConfig { /// Extra env vars to set when invoking the cargo command pub extra_env: FxHashMap, pub invocation_strategy: InvocationStrategy, - pub invocation_location: InvocationLocation, /// Optional path to use instead of `target` when building pub target_dir: Option, } diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs index 4fa70508bbd4d..b8ac55ed0d5d1 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs @@ -186,20 +186,13 @@ fn utf8_stdout(mut cmd: Command) -> anyhow::Result { Ok(stdout.trim().to_owned()) } -#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] +#[derive(Clone, Debug, Default, PartialEq, Eq)] pub enum InvocationStrategy { Once, #[default] PerWorkspace, } -#[derive(Clone, Debug, Default, PartialEq, Eq)] -pub enum InvocationLocation { - Root(AbsPathBuf), - #[default] - Workspace, -} - /// A set of cfg-overrides per crate. #[derive(Default, Debug, Clone, Eq, PartialEq)] pub struct CfgOverrides { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 02f5d75136e9e..680a500eaa4f7 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -80,13 +80,6 @@ config_data! { pub(crate) cargo_autoreload: bool = true, /// Run build scripts (`build.rs`) for more precise code analysis. cargo_buildScripts_enable: bool = true, - /// Specifies the working directory for running build scripts. - /// - "workspace": run build scripts for a workspace in the workspace's root directory. - /// This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`. - /// - "root": run build scripts in the project's root directory. - /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` - /// is set. - cargo_buildScripts_invocationLocation: InvocationLocation = InvocationLocation::Workspace, /// Specifies the invocation strategy to use when running the build scripts command. /// If `per_workspace` is set, the command will be executed for each workspace. /// If `once` is set, the command will be executed once. @@ -101,8 +94,7 @@ config_data! { /// If there are multiple linked projects/workspaces, this command is invoked for /// each of them, with the working directory being the workspace root /// (i.e., the folder containing the `Cargo.toml`). This can be overwritten - /// by changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#` and - /// `#rust-analyzer.cargo.buildScripts.invocationLocation#`. + /// by changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#`. /// /// By default, a cargo invocation will be constructed for the configured /// targets and features, with the following base command line: @@ -182,14 +174,6 @@ config_data! { /// /// For example for `cargo check`: `dead_code`, `unused_imports`, `unused_variables`,... check_ignore: FxHashSet = FxHashSet::default(), - /// Specifies the working directory for running checks. - /// - "workspace": run checks for workspaces in the corresponding workspaces' root directories. - // FIXME: Ideally we would support this in some way - /// This falls back to "root" if `#rust-analyzer.check.invocationStrategy#` is set to `once`. - /// - "root": run checks in the project's root directory. - /// This config only has an effect when `#rust-analyzer.check.overrideCommand#` - /// is set. - check_invocationLocation | checkOnSave_invocationLocation: InvocationLocation = InvocationLocation::Workspace, /// Specifies the invocation strategy to use when running the check command. /// If `per_workspace` is set, the command will be executed for each workspace. /// If `once` is set, the command will be executed once. @@ -212,8 +196,7 @@ config_data! { /// If there are multiple linked projects/workspaces, this command is invoked for /// each of them, with the working directory being the workspace root /// (i.e., the folder containing the `Cargo.toml`). This can be overwritten - /// by changing `#rust-analyzer.check.invocationStrategy#` and - /// `#rust-analyzer.check.invocationLocation#`. + /// by changing `#rust-analyzer.check.invocationStrategy#`. /// /// If `$saved_file` is part of the command, rust-analyzer will pass /// the absolute path of the saved file to the provided command. This is @@ -1868,12 +1851,6 @@ impl Config { InvocationStrategy::Once => project_model::InvocationStrategy::Once, InvocationStrategy::PerWorkspace => project_model::InvocationStrategy::PerWorkspace, }, - invocation_location: match self.cargo_buildScripts_invocationLocation(None) { - InvocationLocation::Root => { - project_model::InvocationLocation::Root(self.root_path.clone()) - } - InvocationLocation::Workspace => project_model::InvocationLocation::Workspace, - }, run_build_script_command: self.cargo_buildScripts_overrideCommand(None).clone(), extra_args: self.cargo_extraArgs(None).clone(), extra_env: self.cargo_extraEnv(None).clone(), @@ -1930,14 +1907,6 @@ impl Config { crate::flycheck::InvocationStrategy::PerWorkspace } }, - invocation_location: match self.check_invocationLocation(None) { - InvocationLocation::Root => { - crate::flycheck::InvocationLocation::Root(self.root_path.clone()) - } - InvocationLocation::Workspace => { - crate::flycheck::InvocationLocation::Workspace - } - }, } } Some(_) | None => FlycheckConfig::CargoCommand { @@ -2348,13 +2317,6 @@ pub(crate) enum InvocationStrategy { #[derive(Serialize, Deserialize, Debug, Clone)] struct CheckOnSaveTargets(#[serde(with = "single_or_array")] Vec); -#[derive(Serialize, Deserialize, Debug, Clone)] -#[serde(rename_all = "snake_case")] -enum InvocationLocation { - Root, - Workspace, -} - #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "snake_case")] enum LifetimeElisionDef { @@ -3196,14 +3158,6 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "The command will be executed once." ], }, - "InvocationLocation" => set! { - "type": "string", - "enum": ["workspace", "root"], - "enumDescriptions": [ - "The command will be executed in the corresponding workspace root.", - "The command will be executed in the project root." - ], - }, "Option" => set! { "anyOf": [ { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 443f52c6dd373..cf13a7386379b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -15,20 +15,13 @@ use toolchain::Tool; use crate::command::{CommandHandle, ParseFromLine}; -#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] +#[derive(Clone, Debug, Default, PartialEq, Eq)] pub(crate) enum InvocationStrategy { Once, #[default] PerWorkspace, } -#[derive(Clone, Debug, Default, PartialEq, Eq)] -pub(crate) enum InvocationLocation { - Root(AbsPathBuf), - #[default] - Workspace, -} - #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) struct CargoOptions { pub(crate) target_triples: Vec, @@ -79,7 +72,6 @@ pub(crate) enum FlycheckConfig { args: Vec, extra_env: FxHashMap, invocation_strategy: InvocationStrategy, - invocation_location: InvocationLocation, }, } @@ -424,30 +416,17 @@ impl FlycheckActor { cmd.args(&options.extra_args); Some(cmd) } - FlycheckConfig::CustomCommand { - command, - args, - extra_env, - invocation_strategy, - invocation_location, - } => { + FlycheckConfig::CustomCommand { command, args, extra_env, invocation_strategy } => { let mut cmd = Command::new(command); cmd.envs(extra_env); - match invocation_location { - InvocationLocation::Workspace => { - match invocation_strategy { - InvocationStrategy::Once => { - cmd.current_dir(&self.root); - } - InvocationStrategy::PerWorkspace => { - // FIXME: cmd.current_dir(&affected_workspace); - cmd.current_dir(&self.root); - } - } + match invocation_strategy { + InvocationStrategy::Once => { + cmd.current_dir(&self.root); } - InvocationLocation::Root(root) => { - cmd.current_dir(root); + InvocationStrategy::PerWorkspace => { + // FIXME: cmd.current_dir(&affected_workspace); + cmd.current_dir(&self.root); } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 71ed2872688d7..510dc22603c59 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -764,18 +764,22 @@ impl GlobalState { FlycheckConfig::CargoCommand { .. } => { crate::flycheck::InvocationStrategy::PerWorkspace } - FlycheckConfig::CustomCommand { invocation_strategy, .. } => invocation_strategy, + FlycheckConfig::CustomCommand { ref invocation_strategy, .. } => { + invocation_strategy.clone() + } }; self.flycheck = match invocation_strategy { - crate::flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn( - 0, - sender, - config, - None, - self.config.root_path().clone(), - None, - )], + crate::flycheck::InvocationStrategy::Once => { + vec![FlycheckHandle::spawn( + 0, + sender, + config, + None, + self.config.root_path().clone(), + None, + )] + } crate::flycheck::InvocationStrategy::PerWorkspace => { self.workspaces .iter() diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc index 2be338dd4d1d9..647cf3db4ab6e 100644 --- a/src/tools/rust-analyzer/docs/user/generated_config.adoc +++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc @@ -45,16 +45,6 @@ Automatically refresh project info via `cargo metadata` on -- Run build scripts (`build.rs`) for more precise code analysis. -- -[[rust-analyzer.cargo.buildScripts.invocationLocation]]rust-analyzer.cargo.buildScripts.invocationLocation (default: `"workspace"`):: -+ --- -Specifies the working directory for running build scripts. -- "workspace": run build scripts for a workspace in the workspace's root directory. - This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`. -- "root": run build scripts in the project's root directory. -This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` -is set. --- [[rust-analyzer.cargo.buildScripts.invocationStrategy]]rust-analyzer.cargo.buildScripts.invocationStrategy (default: `"per_workspace"`):: + -- @@ -75,8 +65,7 @@ option. If there are multiple linked projects/workspaces, this command is invoked for each of them, with the working directory being the workspace root (i.e., the folder containing the `Cargo.toml`). This can be overwritten -by changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#` and -`#rust-analyzer.cargo.buildScripts.invocationLocation#`. +by changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#`. By default, a cargo invocation will be constructed for the configured targets and features, with the following base command line: @@ -209,16 +198,6 @@ List of `cargo check` (or other command specified in `check.command`) diagnostic For example for `cargo check`: `dead_code`, `unused_imports`, `unused_variables`,... -- -[[rust-analyzer.check.invocationLocation]]rust-analyzer.check.invocationLocation (default: `"workspace"`):: -+ --- -Specifies the working directory for running checks. -- "workspace": run checks for workspaces in the corresponding workspaces' root directories. - This falls back to "root" if `#rust-analyzer.check.invocationStrategy#` is set to `once`. -- "root": run checks in the project's root directory. -This config only has an effect when `#rust-analyzer.check.overrideCommand#` -is set. --- [[rust-analyzer.check.invocationStrategy]]rust-analyzer.check.invocationStrategy (default: `"per_workspace"`):: + -- @@ -250,8 +229,7 @@ Cargo, you might also want to change If there are multiple linked projects/workspaces, this command is invoked for each of them, with the working directory being the workspace root (i.e., the folder containing the `Cargo.toml`). This can be overwritten -by changing `#rust-analyzer.check.invocationStrategy#` and -`#rust-analyzer.check.invocationLocation#`. +by changing `#rust-analyzer.check.invocationStrategy#`. If `$saved_file` is part of the command, rust-analyzer will pass the absolute path of the saved file to the provided command. This is diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 3d1e1efe4db7d..9c2c227319f32 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -663,24 +663,6 @@ } } }, - { - "title": "cargo", - "properties": { - "rust-analyzer.cargo.buildScripts.invocationLocation": { - "markdownDescription": "Specifies the working directory for running build scripts.\n- \"workspace\": run build scripts for a workspace in the workspace's root directory.\n This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`.\n- \"root\": run build scripts in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", - "default": "workspace", - "type": "string", - "enum": [ - "workspace", - "root" - ], - "enumDescriptions": [ - "The command will be executed in the corresponding workspace root.", - "The command will be executed in the project root." - ] - } - } - }, { "title": "cargo", "properties": { @@ -703,7 +685,7 @@ "title": "cargo", "properties": { "rust-analyzer.cargo.buildScripts.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#` and\n`#rust-analyzer.cargo.buildScripts.invocationLocation#`.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets --keep-going\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.cargo.buildScripts.invocationStrategy#`.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets --keep-going\n```\n.", "default": null, "type": [ "null", @@ -965,24 +947,6 @@ } } }, - { - "title": "check", - "properties": { - "rust-analyzer.check.invocationLocation": { - "markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.check.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.check.overrideCommand#`\nis set.", - "default": "workspace", - "type": "string", - "enum": [ - "workspace", - "root" - ], - "enumDescriptions": [ - "The command will be executed in the corresponding workspace root.", - "The command will be executed in the project root." - ] - } - } - }, { "title": "check", "properties": { @@ -1018,7 +982,7 @@ "title": "check", "properties": { "rust-analyzer.check.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#` and\n`#rust-analyzer.check.invocationLocation#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the future.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the future.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", "default": null, "type": [ "null", From 036affcdee79fb1941b28a422544cc12d23e96fa Mon Sep 17 00:00:00 2001 From: roife Date: Mon, 19 Aug 2024 17:27:54 +0800 Subject: [PATCH 024/124] fix: keep comments in convert_while_to_loop --- .../src/handlers/convert_while_to_loop.rs | 97 +++++++++++++++++-- 1 file changed, 89 insertions(+), 8 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs index c34b684112aaf..434daa279cac6 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_while_to_loop.rs @@ -1,5 +1,6 @@ -use std::iter::once; +use std::iter; +use either::Either; use ide_db::syntax_helpers::node_ext::is_pattern_cond; use syntax::{ ast::{ @@ -52,18 +53,30 @@ pub(crate) fn convert_while_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>) |edit| { let while_indent_level = IndentLevel::from_node(while_expr.syntax()); - let break_block = - make::block_expr(once(make::expr_stmt(make::expr_break(None, None)).into()), None) - .indent(while_indent_level); + let break_block = make::block_expr( + iter::once(make::expr_stmt(make::expr_break(None, None)).into()), + None, + ) + .indent(while_indent_level); let block_expr = if is_pattern_cond(while_cond.clone()) { let if_expr = make::expr_if(while_cond, while_body, Some(break_block.into())); - let stmts = once(make::expr_stmt(if_expr).into()); + let stmts = iter::once(make::expr_stmt(if_expr).into()); make::block_expr(stmts, None) } else { let if_cond = invert_boolean_expression(while_cond); - let if_expr = make::expr_if(if_cond, break_block, None); - let stmts = once(make::expr_stmt(if_expr).into()).chain(while_body.statements()); - make::block_expr(stmts, while_body.tail_expr()) + let if_expr = make::expr_if(if_cond, break_block, None).syntax().clone().into(); + let elements = while_body.stmt_list().map_or_else( + || Either::Left(iter::empty()), + |stmts| { + Either::Right(stmts.syntax().children_with_tokens().filter(|node_or_tok| { + // Filter out the trailing expr + !node_or_tok + .as_node() + .is_some_and(|node| ast::Expr::can_cast(node.kind())) + })) + }, + ); + make::hacky_block_expr(iter::once(if_expr).chain(elements), while_body.tail_expr()) }; let replacement = make::expr_loop(block_expr.indent(while_indent_level)); @@ -182,6 +195,74 @@ fn main() { bar(); } } +"#, + ); + } + + #[test] + fn preserve_comments() { + check_assist( + convert_while_to_loop, + r#" +fn main() { + let mut i = 0; + + $0while i < 5 { + // comment 1 + dbg!(i); + // comment 2 + i += 1; + // comment 3 + } +} +"#, + r#" +fn main() { + let mut i = 0; + + loop { + if i >= 5 { + break; + } + // comment 1 + dbg!(i); + // comment 2 + i += 1; + // comment 3 + } +} +"#, + ); + + check_assist( + convert_while_to_loop, + r#" +fn main() { + let v = vec![1, 2, 3]; + let iter = v.iter(); + + $0while let Some(i) = iter.next() { + // comment 1 + dbg!(i); + // comment 2 + } +} +"#, + r#" +fn main() { + let v = vec![1, 2, 3]; + let iter = v.iter(); + + loop { + if let Some(i) = iter.next() { + // comment 1 + dbg!(i); + // comment 2 + } else { + break; + } + } +} "#, ); } From b2062ea49f955b3a3b744b2e226aad599eb4a43b Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 19 Aug 2024 14:21:11 +0200 Subject: [PATCH 025/124] Improve documentation for `InvocationStrategy` --- .../project-model/src/build_dependencies.rs | 7 +++---- .../crates/project-model/src/workspace.rs | 19 +++++++++++-------- .../crates/rust-analyzer/src/config.rs | 10 ++++++---- .../docs/user/generated_config.adoc | 6 ++++-- .../rust-analyzer/editors/code/package.json | 10 +++++----- 5 files changed, 29 insertions(+), 23 deletions(-) diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs index 25d1ea07aaec6..dc71b13eeec69 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs @@ -82,17 +82,16 @@ impl WorkspaceBuildScripts { config: &CargoConfig, workspaces: &[&CargoWorkspace], progress: &dyn Fn(String), - workspace_root: &AbsPathBuf, + working_directory: &AbsPathBuf, ) -> io::Result> { assert_eq!(config.invocation_strategy, InvocationStrategy::Once); - let current_dir = workspace_root; let cmd = Self::build_command( config, &Default::default(), // This is not gonna be used anyways, so just construct a dummy here - &ManifestPath::try_from(workspace_root.clone()).unwrap(), - current_dir, + &ManifestPath::try_from(working_directory.clone()).unwrap(), + working_directory, &Sysroot::empty(), )?; // NB: Cargo.toml could have been modified between `cargo metadata` and diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index c6be91229fd00..33ba7f9688d6b 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -459,7 +459,7 @@ impl ProjectWorkspace { workspaces: &[ProjectWorkspace], config: &CargoConfig, progress: &dyn Fn(String), - workspace_root: &AbsPathBuf, + working_directory: &AbsPathBuf, ) -> Vec> { if matches!(config.invocation_strategy, InvocationStrategy::PerWorkspace) || config.run_build_script_command.is_none() @@ -474,13 +474,16 @@ impl ProjectWorkspace { _ => None, }) .collect(); - let outputs = - &mut match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress, workspace_root) - { - Ok(it) => Ok(it.into_iter()), - // io::Error is not Clone? - Err(e) => Err(sync::Arc::new(e)), - }; + let outputs = &mut match WorkspaceBuildScripts::run_once( + config, + &cargo_ws, + progress, + working_directory, + ) { + Ok(it) => Ok(it.into_iter()), + // io::Error is not Clone? + Err(e) => Err(sync::Arc::new(e)), + }; workspaces .iter() diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 680a500eaa4f7..d7f24e6ce1adc 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -81,8 +81,10 @@ config_data! { /// Run build scripts (`build.rs`) for more precise code analysis. cargo_buildScripts_enable: bool = true, /// Specifies the invocation strategy to use when running the build scripts command. - /// If `per_workspace` is set, the command will be executed for each workspace. - /// If `once` is set, the command will be executed once. + /// If `per_workspace` is set, the command will be executed for each Rust workspace with the + /// workspace as the working directory. + /// If `once` is set, the command will be executed once with the opened project as the + /// working directory. /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` /// is set. cargo_buildScripts_invocationStrategy: InvocationStrategy = InvocationStrategy::PerWorkspace, @@ -3154,8 +3156,8 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "type": "string", "enum": ["per_workspace", "once"], "enumDescriptions": [ - "The command will be executed for each workspace.", - "The command will be executed once." + "The command will be executed for each Rust workspace with the workspace as the working directory.", + "The command will be executed once with the opened project as the working directory." ], }, "Option" => set! { diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc index 647cf3db4ab6e..e4a8c6493a883 100644 --- a/src/tools/rust-analyzer/docs/user/generated_config.adoc +++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc @@ -49,8 +49,10 @@ Run build scripts (`build.rs`) for more precise code analysis. + -- Specifies the invocation strategy to use when running the build scripts command. -If `per_workspace` is set, the command will be executed for each workspace. -If `once` is set, the command will be executed once. +If `per_workspace` is set, the command will be executed for each Rust workspace with the +workspace as the working directory. +If `once` is set, the command will be executed once with the opened project as the +working directory. This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` is set. -- diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 9c2c227319f32..98e8bbf02aa55 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -667,7 +667,7 @@ "title": "cargo", "properties": { "rust-analyzer.cargo.buildScripts.invocationStrategy": { - "markdownDescription": "Specifies the invocation strategy to use when running the build scripts command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", + "markdownDescription": "Specifies the invocation strategy to use when running the build scripts command.\nIf `per_workspace` is set, the command will be executed for each Rust workspace with the\nworkspace as the working directory.\nIf `once` is set, the command will be executed once with the opened project as the\nworking directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", "default": "per_workspace", "type": "string", "enum": [ @@ -675,8 +675,8 @@ "once" ], "enumDescriptions": [ - "The command will be executed for each workspace.", - "The command will be executed once." + "The command will be executed for each Rust workspace with the workspace as the working directory.", + "The command will be executed once with the opened project as the working directory." ] } } @@ -959,8 +959,8 @@ "once" ], "enumDescriptions": [ - "The command will be executed for each workspace.", - "The command will be executed once." + "The command will be executed for each Rust workspace with the workspace as the working directory.", + "The command will be executed once with the opened project as the working directory." ] } } From bd6ea758cc9221dbe8cd922495b8efbbce54932a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 19 Aug 2024 14:50:42 +0200 Subject: [PATCH 026/124] Remove the ability to configure the user config path --- .../crates/rust-analyzer/src/bin/main.rs | 2 +- .../crates/rust-analyzer/src/cli/scip.rs | 1 - .../crates/rust-analyzer/src/config.rs | 72 +++++++++---------- .../rust-analyzer/src/diagnostics/to_proto.rs | 1 - .../crates/rust-analyzer/src/global_state.rs | 2 +- .../crates/rust-analyzer/src/reload.rs | 9 ++- .../rust-analyzer/tests/slow-tests/ratoml.rs | 12 ++-- .../rust-analyzer/tests/slow-tests/support.rs | 34 +++++---- .../rust-analyzer/crates/vfs/src/vfs_path.rs | 14 ++++ 9 files changed, 81 insertions(+), 66 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs index 42953d3b833c2..21b481c1fa29e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs @@ -227,7 +227,7 @@ fn run_server() -> anyhow::Result<()> { .filter(|workspaces| !workspaces.is_empty()) .unwrap_or_else(|| vec![root_path.clone()]); let mut config = - Config::new(root_path, capabilities, workspace_roots, visual_studio_code_version, None); + Config::new(root_path, capabilities, workspace_roots, visual_studio_code_version); if let Some(json) = initialization_options { let mut change = ConfigChange::default(); change.change_client_config(json); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index 34b20851dd6bb..a8a02712fe291 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -37,7 +37,6 @@ impl flags::Scip { lsp_types::ClientCapabilities::default(), vec![], None, - None, ); if let Some(p) = self.config_path { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index d7f24e6ce1adc..02a513f44b258 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -3,10 +3,13 @@ //! Of particular interest is the `feature_flags` hash map: while other fields //! configure the server itself, feature flags are passed into analysis, and //! tweak things like automatic insertion of `()` in completions. -use std::{fmt, iter, ops::Not, sync::OnceLock}; +use std::{ + env, fmt, iter, + ops::Not, + sync::{LazyLock, OnceLock}, +}; use cfg::{CfgAtom, CfgDiff}; -use dirs::config_dir; use hir::Symbol; use ide::{ AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode, @@ -735,7 +738,6 @@ pub enum RatomlFileKind { } #[derive(Debug, Clone)] -// FIXME @alibektas : Seems like a clippy warning of this sort should tell that combining different ConfigInputs into one enum was not a good idea. #[allow(clippy::large_enum_variant)] enum RatomlFile { Workspace(GlobalLocalConfigInput), @@ -757,16 +759,6 @@ pub struct Config { /// by receiving a `lsp_types::notification::DidChangeConfiguration`. client_config: (FullConfigInput, ConfigErrors), - /// Path to the root configuration file. This can be seen as a generic way to define what would be `$XDG_CONFIG_HOME/rust-analyzer/rust-analyzer.toml` in Linux. - /// If not specified by init of a `Config` object this value defaults to : - /// - /// |Platform | Value | Example | - /// | ------- | ------------------------------------- | ---------------------------------------- | - /// | Linux | `$XDG_CONFIG_HOME` or `$HOME`/.config | /home/alice/.config | - /// | macOS | `$HOME`/Library/Application Support | /Users/Alice/Library/Application Support | - /// | Windows | `{FOLDERID_RoamingAppData}` | C:\Users\Alice\AppData\Roaming | - user_config_path: VfsPath, - /// Config node whose values apply to **every** Rust project. user_config: Option<(GlobalLocalConfigInput, ConfigErrors)>, @@ -794,8 +786,27 @@ impl std::ops::Deref for Config { } impl Config { - pub fn user_config_path(&self) -> &VfsPath { - &self.user_config_path + /// Path to the root configuration file. This can be seen as a generic way to define what would be `$XDG_CONFIG_HOME/rust-analyzer/rust-analyzer.toml` in Linux. + /// This path is equal to: + /// + /// |Platform | Value | Example | + /// | ------- | ------------------------------------- | ---------------------------------------- | + /// | Linux | `$XDG_CONFIG_HOME` or `$HOME`/.config | /home/alice/.config | + /// | macOS | `$HOME`/Library/Application Support | /Users/Alice/Library/Application Support | + /// | Windows | `{FOLDERID_RoamingAppData}` | C:\Users\Alice\AppData\Roaming | + pub fn user_config_path() -> &'static AbsPath { + static USER_CONFIG_PATH: LazyLock = LazyLock::new(|| { + let user_config_path = if let Some(path) = env::var_os("__TEST_RA_USER_CONFIG_DIR") { + std::path::PathBuf::from(path) + } else { + dirs::config_dir() + .expect("A config dir is expected to existed on all platforms ra supports.") + .join("rust-analyzer") + } + .join("rust-analyzer.toml"); + AbsPathBuf::assert_utf8(user_config_path) + }); + &USER_CONFIG_PATH } pub fn same_source_root_parent_map( @@ -1315,24 +1326,8 @@ impl Config { caps: lsp_types::ClientCapabilities, workspace_roots: Vec, visual_studio_code_version: Option, - user_config_path: Option, ) -> Self { static DEFAULT_CONFIG_DATA: OnceLock<&'static DefaultConfigData> = OnceLock::new(); - let user_config_path = if let Some(user_config_path) = user_config_path { - user_config_path.join("rust-analyzer").join("rust-analyzer.toml") - } else { - let p = config_dir() - .expect("A config dir is expected to existed on all platforms ra supports.") - .join("rust-analyzer") - .join("rust-analyzer.toml"); - Utf8PathBuf::from_path_buf(p).expect("Config dir expected to be abs.") - }; - - // A user config cannot be a virtual path as rust-analyzer cannot support watching changes in virtual paths. - // See `GlobalState::process_changes` to get more info. - // FIXME @alibektas : Temporary solution. I don't think this is right as at some point we may allow users to specify - // custom USER_CONFIG_PATHs which may also be relative. - let user_config_path = VfsPath::from(AbsPathBuf::assert(user_config_path)); Config { caps: ClientCapabilities::new(caps), @@ -1345,7 +1340,6 @@ impl Config { default_config: DEFAULT_CONFIG_DATA.get_or_init(|| Box::leak(Box::default())), source_root_parent_map: Arc::new(FxHashMap::default()), user_config: None, - user_config_path, detached_files: Default::default(), validation_errors: Default::default(), ratoml_file: Default::default(), @@ -3417,7 +3411,7 @@ mod tests { #[test] fn proc_macro_srv_null() { let mut config = - Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None); let mut change = ConfigChange::default(); change.change_client_config(serde_json::json!({ @@ -3432,7 +3426,7 @@ mod tests { #[test] fn proc_macro_srv_abs() { let mut config = - Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None); let mut change = ConfigChange::default(); change.change_client_config(serde_json::json!({ "procMacro" : { @@ -3446,7 +3440,7 @@ mod tests { #[test] fn proc_macro_srv_rel() { let mut config = - Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None); let mut change = ConfigChange::default(); @@ -3466,7 +3460,7 @@ mod tests { #[test] fn cargo_target_dir_unset() { let mut config = - Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None); let mut change = ConfigChange::default(); @@ -3484,7 +3478,7 @@ mod tests { #[test] fn cargo_target_dir_subdir() { let mut config = - Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None); let mut change = ConfigChange::default(); change.change_client_config(serde_json::json!({ @@ -3502,7 +3496,7 @@ mod tests { #[test] fn cargo_target_dir_relative_dir() { let mut config = - Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None); let mut change = ConfigChange::default(); change.change_client_config(serde_json::json!({ @@ -3523,7 +3517,7 @@ mod tests { #[test] fn toml_unknown_key() { let config = - Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None); let mut change = ConfigChange::default(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs index e330cfc3cfd68..c3ab7f3ae7156 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -548,7 +548,6 @@ mod tests { ClientCapabilities::default(), Vec::new(), None, - None, ), ); let snap = state.snapshot(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 7a7ec1d77e09b..1e21249c12554 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -380,7 +380,7 @@ impl GlobalState { || !self.config.same_source_root_parent_map(&self.local_roots_parent_map) { let config_change = { - let user_config_path = self.config.user_config_path(); + let user_config_path = Config::user_config_path(); let mut change = ConfigChange::default(); let db = self.analysis_host.raw_database(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 510dc22603c59..5b58b332df254 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -550,9 +550,12 @@ impl GlobalState { }; watchers.extend( - iter::once(self.config.user_config_path().as_path()) - .chain(self.workspaces.iter().map(|ws| ws.manifest().map(ManifestPath::as_ref))) - .flatten() + iter::once(Config::user_config_path()) + .chain( + self.workspaces + .iter() + .filter_map(|ws| ws.manifest().map(ManifestPath::as_ref)), + ) .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern: lsp_types::GlobPattern::String(glob_pattern.to_string()), kind: None, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs index c06ba9eee1477..478840f104e28 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs @@ -8,6 +8,7 @@ use lsp_types::{ }; use paths::Utf8PathBuf; +use rust_analyzer::config::Config; use rust_analyzer::lsp::ext::{InternalTestingFetchConfig, InternalTestingFetchConfigParams}; use serde_json::json; use test_utils::skip_slow_tests; @@ -24,7 +25,6 @@ struct RatomlTest { urls: Vec, server: Server, tmp_path: Utf8PathBuf, - user_config_dir: Utf8PathBuf, } impl RatomlTest { @@ -41,11 +41,7 @@ impl RatomlTest { let full_fixture = fixtures.join("\n"); - let user_cnf_dir = TestDir::new(); - let user_config_dir = user_cnf_dir.path().to_owned(); - - let mut project = - Project::with_fixture(&full_fixture).tmp_dir(tmp_dir).user_config_dir(user_cnf_dir); + let mut project = Project::with_fixture(&full_fixture).tmp_dir(tmp_dir); for root in roots { project = project.root(root); @@ -57,7 +53,7 @@ impl RatomlTest { let server = project.server().wait_until_workspace_is_loaded(); - let mut case = Self { urls: vec![], server, tmp_path, user_config_dir }; + let mut case = Self { urls: vec![], server, tmp_path }; let urls = fixtures.iter().map(|fixture| case.fixture_path(fixture)).collect::>(); case.urls = urls; case @@ -81,7 +77,7 @@ impl RatomlTest { let mut spl = spl.into_iter(); if let Some(first) = spl.next() { if first == "$$CONFIG_DIR$$" { - path = self.user_config_dir.clone(); + path = Config::user_config_path().to_path_buf().into(); } else { path = path.join(first); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs index 081ee5fa3e4e7..5bd27133c2c46 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs @@ -8,6 +8,7 @@ use std::{ use crossbeam_channel::{after, select, Receiver}; use lsp_server::{Connection, Message, Notification, Request}; use lsp_types::{notification::Exit, request::Shutdown, TextDocumentIdentifier, Url}; +use parking_lot::{Mutex, MutexGuard}; use paths::{Utf8Path, Utf8PathBuf}; use rust_analyzer::{ config::{Config, ConfigChange, ConfigErrors}, @@ -27,7 +28,6 @@ pub(crate) struct Project<'a> { roots: Vec, config: serde_json::Value, root_dir_contains_symlink: bool, - user_config_path: Option, } impl Project<'_> { @@ -51,15 +51,9 @@ impl Project<'_> { } }), root_dir_contains_symlink: false, - user_config_path: None, } } - pub(crate) fn user_config_dir(mut self, config_path_dir: TestDir) -> Self { - self.user_config_path = Some(config_path_dir.path().to_owned()); - self - } - pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Self { self.tmp_dir = Some(tmp_dir); self @@ -91,6 +85,7 @@ impl Project<'_> { } pub(crate) fn server(self) -> Server { + static CONFIG_DIR_LOCK: Mutex<()> = Mutex::new(()); let tmp_dir = self.tmp_dir.unwrap_or_else(|| { if self.root_dir_contains_symlink { TestDir::new_symlink() @@ -122,9 +117,13 @@ impl Project<'_> { assert!(mini_core.is_none()); assert!(toolchain.is_none()); + let mut config_dir_guard = None; for entry in fixture { if let Some(pth) = entry.path.strip_prefix("/$$CONFIG_DIR$$") { - let path = self.user_config_path.clone().unwrap().join(&pth['/'.len_utf8()..]); + if config_dir_guard.is_none() { + config_dir_guard = Some(CONFIG_DIR_LOCK.lock()); + } + let path = Config::user_config_path().join(&pth['/'.len_utf8()..]); fs::create_dir_all(path.parent().unwrap()).unwrap(); fs::write(path.as_path(), entry.text.as_bytes()).unwrap(); } else { @@ -201,7 +200,6 @@ impl Project<'_> { }, roots, None, - self.user_config_path, ); let mut change = ConfigChange::default(); @@ -213,7 +211,7 @@ impl Project<'_> { config.rediscover_workspaces(); - Server::new(tmp_dir.keep(), config) + Server::new(config_dir_guard, tmp_dir.keep(), config) } } @@ -228,10 +226,15 @@ pub(crate) struct Server { client: Connection, /// XXX: remove the tempdir last dir: TestDir, + _config_dir_guard: Option>, } impl Server { - fn new(dir: TestDir, config: Config) -> Server { + fn new( + config_dir_guard: Option>, + dir: TestDir, + config: Config, + ) -> Server { let (connection, client) = Connection::memory(); let _thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker) @@ -239,7 +242,14 @@ impl Server { .spawn(move || main_loop(config, connection).unwrap()) .expect("failed to spawn a thread"); - Server { req_id: Cell::new(1), dir, messages: Default::default(), client, _thread } + Server { + req_id: Cell::new(1), + dir, + messages: Default::default(), + client, + _thread, + _config_dir_guard: config_dir_guard, + } } pub(crate) fn doc_id(&self, rel_path: &str) -> TextDocumentIdentifier { diff --git a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs index 92a49e07936d6..3c8e37413f68f 100644 --- a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs +++ b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs @@ -313,6 +313,20 @@ impl fmt::Debug for VfsPathRepr { } } +impl PartialEq for VfsPath { + fn eq(&self, other: &AbsPath) -> bool { + match &self.0 { + VfsPathRepr::PathBuf(lhs) => lhs == other, + VfsPathRepr::VirtualPath(_) => false, + } + } +} +impl PartialEq for AbsPath { + fn eq(&self, other: &VfsPath) -> bool { + other == self + } +} + /// `/`-separated virtual path. /// /// This is used to describe files that do not reside on the file system. From 879fa66c881f1d1e0632cb3bfc6d559c042e2e72 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 19 Aug 2024 16:00:06 +0200 Subject: [PATCH 027/124] Allow user config to not exist --- .../rust-analyzer/crates/rust-analyzer/src/config.rs | 12 +++++------- .../crates/rust-analyzer/src/global_state.rs | 2 +- .../rust-analyzer/crates/rust-analyzer/src/reload.rs | 7 ++----- .../crates/rust-analyzer/tests/slow-tests/ratoml.rs | 2 +- .../crates/rust-analyzer/tests/slow-tests/support.rs | 2 +- 5 files changed, 10 insertions(+), 15 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 02a513f44b258..36907c468b02f 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -794,19 +794,17 @@ impl Config { /// | Linux | `$XDG_CONFIG_HOME` or `$HOME`/.config | /home/alice/.config | /// | macOS | `$HOME`/Library/Application Support | /Users/Alice/Library/Application Support | /// | Windows | `{FOLDERID_RoamingAppData}` | C:\Users\Alice\AppData\Roaming | - pub fn user_config_path() -> &'static AbsPath { - static USER_CONFIG_PATH: LazyLock = LazyLock::new(|| { + pub fn user_config_path() -> Option<&'static AbsPath> { + static USER_CONFIG_PATH: LazyLock> = LazyLock::new(|| { let user_config_path = if let Some(path) = env::var_os("__TEST_RA_USER_CONFIG_DIR") { std::path::PathBuf::from(path) } else { - dirs::config_dir() - .expect("A config dir is expected to existed on all platforms ra supports.") - .join("rust-analyzer") + dirs::config_dir()?.join("rust-analyzer") } .join("rust-analyzer.toml"); - AbsPathBuf::assert_utf8(user_config_path) + Some(AbsPathBuf::assert_utf8(user_config_path)) }); - &USER_CONFIG_PATH + USER_CONFIG_PATH.as_deref() } pub fn same_source_root_parent_map( diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 1e21249c12554..8fcdfa5829aae 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -399,7 +399,7 @@ impl GlobalState { .collect_vec(); for (file_id, (_change_kind, vfs_path)) in modified_ratoml_files { - if vfs_path == *user_config_path { + if vfs_path.as_path() == user_config_path { change.change_user_config(Some(db.file_text(file_id))); continue; } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 5b58b332df254..2b9bcf12a48f5 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -551,11 +551,8 @@ impl GlobalState { watchers.extend( iter::once(Config::user_config_path()) - .chain( - self.workspaces - .iter() - .filter_map(|ws| ws.manifest().map(ManifestPath::as_ref)), - ) + .chain(self.workspaces.iter().map(|ws| ws.manifest().map(ManifestPath::as_ref))) + .flatten() .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern: lsp_types::GlobPattern::String(glob_pattern.to_string()), kind: None, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs index 478840f104e28..f68b94fa66a46 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs @@ -77,7 +77,7 @@ impl RatomlTest { let mut spl = spl.into_iter(); if let Some(first) = spl.next() { if first == "$$CONFIG_DIR$$" { - path = Config::user_config_path().to_path_buf().into(); + path = Config::user_config_path().unwrap().to_path_buf().into(); } else { path = path.join(first); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs index 5bd27133c2c46..06ce984681833 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs @@ -123,7 +123,7 @@ impl Project<'_> { if config_dir_guard.is_none() { config_dir_guard = Some(CONFIG_DIR_LOCK.lock()); } - let path = Config::user_config_path().join(&pth['/'.len_utf8()..]); + let path = Config::user_config_path().unwrap().join(&pth['/'.len_utf8()..]); fs::create_dir_all(path.parent().unwrap()).unwrap(); fs::write(path.as_path(), entry.text.as_bytes()).unwrap(); } else { From ea12d79b3aaf025425b6a3043f38dc7454eda252 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Sun, 18 Aug 2024 22:48:47 +0900 Subject: [PATCH 028/124] fix: Panic when a TAIT exists in a RPIT --- .../rust-analyzer/crates/hir-ty/src/infer.rs | 79 ++++++++++++++++--- .../crates/hir-ty/src/tests/regression.rs | 17 ++++ 2 files changed, 85 insertions(+), 11 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 8defeee5e8f49..8b4e98c5500e8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -604,6 +604,11 @@ fn find_continuable( } } +enum ImplTraitReplacingMode { + ReturnPosition(FxHashSet), + TypeAlias, +} + impl<'a> InferenceContext<'a> { fn new( db: &'a dyn HirDatabase, @@ -825,13 +830,19 @@ impl<'a> InferenceContext<'a> { self.write_binding_ty(self_param, ty); } } - let mut params_and_ret_tys = Vec::new(); + let mut tait_candidates = FxHashSet::default(); for (ty, pat) in param_tys.zip(&*self.body.params) { let ty = self.insert_type_vars(ty); let ty = self.normalize_associated_types_in(ty); self.infer_top_pat(*pat, &ty); - params_and_ret_tys.push(ty); + if ty + .data(Interner) + .flags + .intersects(TypeFlags::HAS_TY_OPAQUE.union(TypeFlags::HAS_TY_INFER)) + { + tait_candidates.insert(ty); + } } let return_ty = &*data.ret_type; @@ -844,7 +855,12 @@ impl<'a> InferenceContext<'a> { let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) { // RPIT opaque types use substitution of their parent function. let fn_placeholders = TyBuilder::placeholder_subst(self.db, func); - let result = self.insert_inference_vars_for_impl_trait(return_ty, fn_placeholders); + let mut mode = ImplTraitReplacingMode::ReturnPosition(FxHashSet::default()); + let result = + self.insert_inference_vars_for_impl_trait(return_ty, fn_placeholders, &mut mode); + if let ImplTraitReplacingMode::ReturnPosition(taits) = mode { + tait_candidates.extend(taits); + } let rpits = rpits.skip_binders(); for (id, _) in rpits.impl_traits.iter() { if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) { @@ -863,11 +879,23 @@ impl<'a> InferenceContext<'a> { // Functions might be defining usage sites of TAITs. // To define an TAITs, that TAIT must appear in the function's signatures. // So, it suffices to check for params and return types. - params_and_ret_tys.push(self.return_ty.clone()); - self.make_tait_coercion_table(params_and_ret_tys.iter()); + if self + .return_ty + .data(Interner) + .flags + .intersects(TypeFlags::HAS_TY_OPAQUE.union(TypeFlags::HAS_TY_INFER)) + { + tait_candidates.insert(self.return_ty.clone()); + } + self.make_tait_coercion_table(tait_candidates.iter()); } - fn insert_inference_vars_for_impl_trait(&mut self, t: T, placeholders: Substitution) -> T + fn insert_inference_vars_for_impl_trait( + &mut self, + t: T, + placeholders: Substitution, + mode: &mut ImplTraitReplacingMode, + ) -> T where T: crate::HasInterner + crate::TypeFoldable, { @@ -880,10 +908,31 @@ impl<'a> InferenceContext<'a> { }; let (impl_traits, idx) = match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) { + // We don't replace opaque types from other kind with inference vars + // because `insert_inference_vars_for_impl_traits` for each kinds + // and unreplaced opaque types of other kind are resolved while + // inferencing because of `tait_coercion_table`. + // Moreover, calling `insert_inference_vars_for_impl_traits` with same + // `placeholders` for other kind may cause trouble because + // the substs for the bounds of each impl traits do not match ImplTraitId::ReturnTypeImplTrait(def, idx) => { + if matches!(mode, ImplTraitReplacingMode::TypeAlias) { + // RPITs don't have `tait_coercion_table`, so use inserted inference + // vars for them. + if let Some(ty) = self.result.type_of_rpit.get(idx) { + return ty.clone(); + } + return ty; + } (self.db.return_type_impl_traits(def), idx) } ImplTraitId::TypeAliasImplTrait(def, idx) => { + if let ImplTraitReplacingMode::ReturnPosition(taits) = mode { + // Gather TAITs while replacing RPITs because TAITs inside RPITs + // may not visited while replacing TAITs + taits.insert(ty.clone()); + return ty; + } (self.db.type_alias_impl_traits(def), idx) } _ => unreachable!(), @@ -892,16 +941,20 @@ impl<'a> InferenceContext<'a> { return ty; }; let bounds = (*impl_traits) - .map_ref(|rpits| rpits.impl_traits[idx].bounds.map_ref(|it| it.iter())); + .map_ref(|its| its.impl_traits[idx].bounds.map_ref(|it| it.iter())); let var = self.table.new_type_var(); let var_subst = Substitution::from1(Interner, var.clone()); for bound in bounds { - let predicate = bound.map(|it| it.cloned()).substitute(Interner, &placeholders); + let predicate = bound.map(|it| it.cloned()); + let predicate = predicate.substitute(Interner, &placeholders); let (var_predicate, binders) = predicate.substitute(Interner, &var_subst).into_value_and_skipped_binders(); always!(binders.is_empty(Interner)); // quantified where clauses not yet handled - let var_predicate = self - .insert_inference_vars_for_impl_trait(var_predicate, placeholders.clone()); + let var_predicate = self.insert_inference_vars_for_impl_trait( + var_predicate, + placeholders.clone(), + mode, + ); self.push_obligation(var_predicate.cast(Interner)); } self.result.type_of_rpit.insert(idx, var.clone()); @@ -1038,7 +1091,11 @@ impl<'a> InferenceContext<'a> { self.db.lookup_intern_impl_trait_id(id.into()) { let subst = TyBuilder::placeholder_subst(self.db, alias_id); - let ty = self.insert_inference_vars_for_impl_trait(ty, subst); + let ty = self.insert_inference_vars_for_impl_trait( + ty, + subst, + &mut ImplTraitReplacingMode::TypeAlias, + ); Some((id, ty)) } else { None diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index d6e034b01840a..17fbe4db31795 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -2211,3 +2211,20 @@ fn f() -> Foo {} "#]], ); } + +#[test] +fn issue_17921() { + check_infer( + r#" +//- minicore: future +trait Foo {} +type Bar = impl Foo; + +async fn f() -> Bar {} +"#, + expect![[r#" + 64..66 '{}': () + 64..66 '{}': impl Future + "#]], + ); +} From f25cb8073c2a10648fff0a267e3c41817a81b1a0 Mon Sep 17 00:00:00 2001 From: Wilfred Hughes Date: Tue, 13 Aug 2024 16:26:37 -0700 Subject: [PATCH 029/124] ServerStatusParams should consider 'prime caches' in quiescent status Priming caches is a performance win, but it takes a lock on the salsa database and prevents rust-analyzer from responding to e.g. go-to-def requests. This causes confusion for users, who see the spinner next to rust-analyzer in the VS Code footer stop, so they start attempting to navigate their code. Instead, set the `quiescent` status in LSP to false during cache priming, so the VS Code spinner persists until we can respond to any LSP request. --- .../crates/rust-analyzer/src/reload.rs | 23 +++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 510dc22603c59..884a8e83472ce 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -61,6 +61,10 @@ pub(crate) enum ProcMacroProgress { } impl GlobalState { + /// Is the server quiescent? + /// + /// This indicates that we've fully loaded the projects and + /// are ready to do semantic work. pub(crate) fn is_quiescent(&self) -> bool { self.vfs_done && self.last_reported_status.is_some() @@ -71,6 +75,15 @@ impl GlobalState { && self.vfs_progress_config_version >= self.vfs_config_version } + /// Is the server ready to respond to analysis dependent LSP requests? + /// + /// Unlike `is_quiescent`, this returns false when we're indexing + /// the project, because we're holding the salsa lock and cannot + /// respond to LSP requests that depend on salsa data. + fn is_fully_ready(&self) -> bool { + self.is_quiescent() && !self.prime_caches_queue.op_in_progress() + } + pub(crate) fn update_configuration(&mut self, config: Config) { let _p = tracing::info_span!("GlobalState::update_configuration").entered(); let old_config = mem::replace(&mut self.config, Arc::new(config)); @@ -102,13 +115,15 @@ impl GlobalState { } pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams { - let quiescent = self.is_quiescent(); - let mut status = - lsp_ext::ServerStatusParams { health: lsp_ext::Health::Ok, quiescent, message: None }; + let mut status = lsp_ext::ServerStatusParams { + health: lsp_ext::Health::Ok, + quiescent: self.is_fully_ready(), + message: None, + }; let mut message = String::new(); if !self.config.cargo_autoreload(None) - && quiescent + && self.is_quiescent() && self.fetch_workspaces_queue.op_requested() && self.config.discover_workspace_config().is_none() { From a7d15a8c3b5e9bd651d2a2606e3bd346e2b3b2d1 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 20 Aug 2024 09:53:37 +0200 Subject: [PATCH 030/124] fix: Fix panics for semantic highlighting at startup --- .../rust-analyzer/src/handlers/dispatch.rs | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs index a105ec638203b..f03de8ce0f038 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs @@ -139,16 +139,26 @@ impl RequestDispatcher<'_> { self.on_with_thread_intent::(ThreadIntent::Worker, f) } - /// Dispatches a latency-sensitive request onto the thread pool. + /// Dispatches a latency-sensitive request onto the thread pool. When the VFS is marked not + /// ready this will return a default constructed [`R::Result`]. pub(crate) fn on_latency_sensitive( &mut self, f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result, ) -> &mut Self where - R: lsp_types::request::Request + 'static, - R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug, - R::Result: Serialize, + R: lsp_types::request::Request< + Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug, + Result: Serialize + Default, + > + 'static, { + if !self.global_state.vfs_done { + if let Some(lsp_server::Request { id, .. }) = + self.req.take_if(|it| it.method == R::METHOD) + { + self.global_state.respond(lsp_server::Response::new_ok(id, R::Result::default())); + } + return self; + } self.on_with_thread_intent::(ThreadIntent::LatencySensitive, f) } From a42c732c53933a29b4e4718650a398b15f025691 Mon Sep 17 00:00:00 2001 From: Ali Bektas Date: Sat, 17 Aug 2024 00:57:01 +0200 Subject: [PATCH 031/124] Define workspace level configs. --- .../crates/rust-analyzer/src/config.rs | 222 +++++++++++------- .../rust-analyzer/src/handlers/request.rs | 6 +- .../rust-analyzer/src/lsp/capabilities.rs | 2 +- .../crates/rust-analyzer/src/reload.rs | 2 +- 4 files changed, 142 insertions(+), 90 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index d7f24e6ce1adc..ab27a65de5afe 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -436,6 +436,13 @@ config_data! { } } +config_data! { + workspace: struct WorkspaceDefaultConfigData <- WorkspaceConfigInput -> { + + + } +} + config_data! { /// Configs that only make sense when they are set by a client. As such they can only be defined /// by setting them using client's settings (e.g `settings.json` on VS Code). @@ -738,7 +745,7 @@ pub enum RatomlFileKind { // FIXME @alibektas : Seems like a clippy warning of this sort should tell that combining different ConfigInputs into one enum was not a good idea. #[allow(clippy::large_enum_variant)] enum RatomlFile { - Workspace(GlobalLocalConfigInput), + Workspace(WorkspaceLocalConfigInput), Crate(LocalConfigInput), } @@ -962,7 +969,7 @@ impl Config { source_root_id, ( RatomlFile::Workspace( - GlobalLocalConfigInput::from_toml( + WorkspaceLocalConfigInput::from_toml( table, &mut toml_errors, ), @@ -1000,7 +1007,7 @@ impl Config { config.source_root_parent_map = source_root_map; } - if config.check_command(None).is_empty() { + if config.check_command().is_empty() { config.validation_errors.0.push(Arc::new(ConfigErrorInner::Json { config_key: "/check/command".to_owned(), error: serde_json::Error::custom("expected a non-empty string"), @@ -1444,11 +1451,11 @@ impl Config { pub fn diagnostics(&self, source_root: Option) -> DiagnosticsConfig { DiagnosticsConfig { - enabled: *self.diagnostics_enable(source_root), + enabled: *self.diagnostics_enable(), proc_attr_macros_enabled: self.expand_proc_attr_macros(), proc_macros_enabled: *self.procMacro_enable(), - disable_experimental: !self.diagnostics_experimental_enable(source_root), - disabled: self.diagnostics_disabled(source_root).clone(), + disable_experimental: !self.diagnostics_experimental_enable(), + disabled: self.diagnostics_disabled().clone(), expr_fill_default: match self.assist_expressionFillDefault(source_root) { ExprFillDefaultDef::Todo => ExprFillDefaultMode::Todo, ExprFillDefaultDef::Default => ExprFillDefaultMode::Default, @@ -1458,7 +1465,7 @@ impl Config { prefer_no_std: self.imports_preferNoStd(source_root).to_owned(), prefer_prelude: self.imports_preferPrelude(source_root).to_owned(), prefer_absolute: self.imports_prefixExternPrelude(source_root).to_owned(), - style_lints: self.diagnostics_styleLints_enable(source_root).to_owned(), + style_lints: self.diagnostics_styleLints_enable().to_owned(), term_search_fuel: self.assist_termSearch_fuel(source_root).to_owned() as u64, term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(), } @@ -1651,11 +1658,11 @@ impl Config { } pub fn has_linked_projects(&self) -> bool { - !self.linkedProjects(None).is_empty() + !self.linkedProjects().is_empty() } pub fn linked_manifests(&self) -> impl Iterator + '_ { - self.linkedProjects(None).iter().filter_map(|it| match it { + self.linkedProjects().iter().filter_map(|it| match it { ManifestOrProjectJson::Manifest(p) => Some(&**p), // despite having a buildfile, using this variant as a manifest // will fail. @@ -1665,20 +1672,20 @@ impl Config { } pub fn has_linked_project_jsons(&self) -> bool { - self.linkedProjects(None) + self.linkedProjects() .iter() .any(|it| matches!(it, ManifestOrProjectJson::ProjectJson { .. })) } pub fn discover_workspace_config(&self) -> Option<&DiscoverWorkspaceConfig> { - self.workspace_discoverConfig(None).as_ref() + self.workspace_discoverConfig().as_ref() } pub fn linked_or_discovered_projects(&self) -> Vec { - match self.linkedProjects(None).as_slice() { + match self.linkedProjects().as_slice() { [] => { let exclude_dirs: Vec<_> = - self.files_excludeDirs(None).iter().map(|p| self.root_path.join(p)).collect(); + self.files_excludeDirs().iter().map(|p| self.root_path.join(p)).collect(); self.discovered_projects .iter() .filter(|project| { @@ -1713,48 +1720,48 @@ impl Config { } pub fn prefill_caches(&self) -> bool { - self.cachePriming_enable(None).to_owned() + self.cachePriming_enable().to_owned() } pub fn publish_diagnostics(&self) -> bool { - self.diagnostics_enable(None).to_owned() + self.diagnostics_enable().to_owned() } pub fn diagnostics_map(&self) -> DiagnosticsMapConfig { DiagnosticsMapConfig { - remap_prefix: self.diagnostics_remapPrefix(None).clone(), - warnings_as_info: self.diagnostics_warningsAsInfo(None).clone(), - warnings_as_hint: self.diagnostics_warningsAsHint(None).clone(), - check_ignore: self.check_ignore(None).clone(), + remap_prefix: self.diagnostics_remapPrefix().clone(), + warnings_as_info: self.diagnostics_warningsAsInfo().clone(), + warnings_as_hint: self.diagnostics_warningsAsHint().clone(), + check_ignore: self.check_ignore().clone(), } } pub fn extra_args(&self) -> &Vec { - self.cargo_extraArgs(None) + self.cargo_extraArgs() } pub fn extra_env(&self) -> &FxHashMap { - self.cargo_extraEnv(None) + self.cargo_extraEnv() } pub fn check_extra_args(&self) -> Vec { let mut extra_args = self.extra_args().clone(); - extra_args.extend_from_slice(self.check_extraArgs(None)); + extra_args.extend_from_slice(self.check_extraArgs()); extra_args } pub fn check_extra_env(&self) -> FxHashMap { - let mut extra_env = self.cargo_extraEnv(None).clone(); - extra_env.extend(self.check_extraEnv(None).clone()); + let mut extra_env = self.cargo_extraEnv().clone(); + extra_env.extend(self.check_extraEnv().clone()); extra_env } pub fn lru_parse_query_capacity(&self) -> Option { - self.lru_capacity(None).to_owned() + self.lru_capacity().to_owned() } pub fn lru_query_capacities_config(&self) -> Option<&FxHashMap, u16>> { - self.lru_query_capacities(None).is_empty().not().then(|| self.lru_query_capacities(None)) + self.lru_query_capacities().is_empty().not().then(|| self.lru_query_capacities()) } pub fn proc_macro_srv(&self) -> Option { @@ -1763,7 +1770,7 @@ impl Config { } pub fn ignored_proc_macros(&self) -> &FxHashMap, Box<[Box]>> { - self.procMacro_ignored(None) + self.procMacro_ignored() } pub fn expand_proc_macros(&self) -> bool { @@ -1778,11 +1785,7 @@ impl Config { } _ => FilesWatcher::Server, }, - exclude: self - .files_excludeDirs(None) - .iter() - .map(|it| self.root_path.join(it)) - .collect(), + exclude: self.files_excludeDirs().iter().map(|it| self.root_path.join(it)).collect(), } } @@ -1793,22 +1796,22 @@ impl Config { } pub fn cargo_autoreload_config(&self) -> bool { - self.cargo_autoreload(None).to_owned() + self.cargo_autoreload().to_owned() } pub fn run_build_scripts(&self) -> bool { - self.cargo_buildScripts_enable(None).to_owned() || self.procMacro_enable().to_owned() + self.cargo_buildScripts_enable().to_owned() || self.procMacro_enable().to_owned() } pub fn cargo(&self) -> CargoConfig { - let rustc_source = self.rustc_source(None).as_ref().map(|rustc_src| { + let rustc_source = self.rustc_source().as_ref().map(|rustc_src| { if rustc_src == "discover" { RustLibSource::Discover } else { RustLibSource::Path(self.root_path.join(rustc_src)) } }); - let sysroot = self.cargo_sysroot(None).as_ref().map(|sysroot| { + let sysroot = self.cargo_sysroot().as_ref().map(|sysroot| { if sysroot == "discover" { RustLibSource::Discover } else { @@ -1816,24 +1819,24 @@ impl Config { } }); let sysroot_src = - self.cargo_sysrootSrc(None).as_ref().map(|sysroot| self.root_path.join(sysroot)); + self.cargo_sysrootSrc().as_ref().map(|sysroot| self.root_path.join(sysroot)); CargoConfig { - all_targets: *self.cargo_allTargets(None), - features: match &self.cargo_features(None) { + all_targets: *self.cargo_allTargets(), + features: match &self.cargo_features() { CargoFeaturesDef::All => CargoFeatures::All, CargoFeaturesDef::Selected(features) => CargoFeatures::Selected { features: features.clone(), - no_default_features: self.cargo_noDefaultFeatures(None).to_owned(), + no_default_features: self.cargo_noDefaultFeatures().to_owned(), }, }, - target: self.cargo_target(None).clone(), + target: self.cargo_target().clone(), sysroot, sysroot_src, rustc_source, cfg_overrides: project_model::CfgOverrides { global: CfgDiff::new( - self.cargo_cfgs(None) + self.cargo_cfgs() .iter() .map(|(key, val)| match val { Some(val) => CfgAtom::KeyValue { @@ -1848,43 +1851,43 @@ impl Config { .unwrap(), selective: Default::default(), }, - wrap_rustc_in_build_scripts: *self.cargo_buildScripts_useRustcWrapper(None), - invocation_strategy: match self.cargo_buildScripts_invocationStrategy(None) { + wrap_rustc_in_build_scripts: *self.cargo_buildScripts_useRustcWrapper(), + invocation_strategy: match self.cargo_buildScripts_invocationStrategy() { InvocationStrategy::Once => project_model::InvocationStrategy::Once, InvocationStrategy::PerWorkspace => project_model::InvocationStrategy::PerWorkspace, }, - run_build_script_command: self.cargo_buildScripts_overrideCommand(None).clone(), - extra_args: self.cargo_extraArgs(None).clone(), - extra_env: self.cargo_extraEnv(None).clone(), + run_build_script_command: self.cargo_buildScripts_overrideCommand().clone(), + extra_args: self.cargo_extraArgs().clone(), + extra_env: self.cargo_extraEnv().clone(), target_dir: self.target_dir_from_config(), } } - pub fn rustfmt(&self, source_root_id: Option) -> RustfmtConfig { - match &self.rustfmt_overrideCommand(source_root_id) { + pub fn rustfmt(&self) -> RustfmtConfig { + match &self.rustfmt_overrideCommand() { Some(args) if !args.is_empty() => { let mut args = args.clone(); let command = args.remove(0); RustfmtConfig::CustomCommand { command, args } } Some(_) | None => RustfmtConfig::Rustfmt { - extra_args: self.rustfmt_extraArgs(source_root_id).clone(), - enable_range_formatting: *self.rustfmt_rangeFormatting_enable(source_root_id), + extra_args: self.rustfmt_extraArgs().clone(), + enable_range_formatting: *self.rustfmt_rangeFormatting_enable(), }, } } pub fn flycheck_workspace(&self) -> bool { - *self.check_workspace(None) + *self.check_workspace() } pub(crate) fn cargo_test_options(&self) -> CargoOptions { CargoOptions { - target_triples: self.cargo_target(None).clone().into_iter().collect(), + target_triples: self.cargo_target().clone().into_iter().collect(), all_targets: false, - no_default_features: *self.cargo_noDefaultFeatures(None), - all_features: matches!(self.cargo_features(None), CargoFeaturesDef::All), - features: match self.cargo_features(None).clone() { + no_default_features: *self.cargo_noDefaultFeatures(), + all_features: matches!(self.cargo_features(), CargoFeaturesDef::All), + features: match self.cargo_features().clone() { CargoFeaturesDef::All => vec![], CargoFeaturesDef::Selected(it) => it, }, @@ -1895,7 +1898,7 @@ impl Config { } pub(crate) fn flycheck(&self) -> FlycheckConfig { - match &self.check_overrideCommand(None) { + match &self.check_overrideCommand() { Some(args) if !args.is_empty() => { let mut args = args.clone(); let command = args.remove(0); @@ -1903,7 +1906,7 @@ impl Config { command, args, extra_env: self.check_extra_env(), - invocation_strategy: match self.check_invocationStrategy(None) { + invocation_strategy: match self.check_invocationStrategy() { InvocationStrategy::Once => crate::flycheck::InvocationStrategy::Once, InvocationStrategy::PerWorkspace => { crate::flycheck::InvocationStrategy::PerWorkspace @@ -1912,30 +1915,28 @@ impl Config { } } Some(_) | None => FlycheckConfig::CargoCommand { - command: self.check_command(None).clone(), + command: self.check_command().clone(), options: CargoOptions { target_triples: self - .check_targets(None) + .check_targets() .clone() .and_then(|targets| match &targets.0[..] { [] => None, targets => Some(targets.into()), }) - .unwrap_or_else(|| self.cargo_target(None).clone().into_iter().collect()), - all_targets: self - .check_allTargets(None) - .unwrap_or(*self.cargo_allTargets(None)), + .unwrap_or_else(|| self.cargo_target().clone().into_iter().collect()), + all_targets: self.check_allTargets().unwrap_or(*self.cargo_allTargets()), no_default_features: self - .check_noDefaultFeatures(None) - .unwrap_or(*self.cargo_noDefaultFeatures(None)), + .check_noDefaultFeatures() + .unwrap_or(*self.cargo_noDefaultFeatures()), all_features: matches!( - self.check_features(None).as_ref().unwrap_or(self.cargo_features(None)), + self.check_features().as_ref().unwrap_or(self.cargo_features()), CargoFeaturesDef::All ), features: match self - .check_features(None) + .check_features() .clone() - .unwrap_or_else(|| self.cargo_features(None).clone()) + .unwrap_or_else(|| self.cargo_features().clone()) { CargoFeaturesDef::All => vec![], CargoFeaturesDef::Selected(it) => it, @@ -1950,7 +1951,7 @@ impl Config { } fn target_dir_from_config(&self) -> Option { - self.cargo_targetDir(None).as_ref().and_then(|target_dir| match target_dir { + self.cargo_targetDir().as_ref().and_then(|target_dir| match target_dir { TargetDirectory::UseSubdirectory(true) => { Some(Utf8PathBuf::from("target/rust-analyzer")) } @@ -1961,18 +1962,18 @@ impl Config { } pub fn check_on_save(&self) -> bool { - *self.checkOnSave(None) + *self.checkOnSave() } pub fn script_rebuild_on_save(&self) -> bool { - *self.cargo_buildScripts_rebuildOnSave(None) + *self.cargo_buildScripts_rebuildOnSave() } pub fn runnables(&self) -> RunnablesConfig { RunnablesConfig { - override_cargo: self.runnables_command(None).clone(), - cargo_extra_args: self.runnables_extraArgs(None).clone(), - extra_test_binary_args: self.runnables_extraTestBinaryArgs(None).clone(), + override_cargo: self.runnables_command().clone(), + cargo_extra_args: self.runnables_extraArgs().clone(), + extra_test_binary_args: self.runnables_extraTestBinaryArgs().clone(), } } @@ -2043,7 +2044,7 @@ impl Config { } pub fn prime_caches_num_threads(&self) -> usize { - match self.cachePriming_numThreads(None) { + match self.cachePriming_numThreads() { NumThreads::Concrete(0) | NumThreads::Physical => num_cpus::get_physical(), &NumThreads::Concrete(n) => n, NumThreads::Logical => num_cpus::get(), @@ -2542,7 +2543,7 @@ macro_rules! _impl_for_config_data { )* } }; - (global, $( + (workspace, $( $(#[doc=$doc:literal])* $vis:vis $field:ident : $ty:ty = $default:expr, )* @@ -2551,18 +2552,47 @@ macro_rules! _impl_for_config_data { $( $($doc)* #[allow(non_snake_case)] - $vis fn $field(&self, source_root : Option) -> &$ty { + $vis fn $field(&self, source_root: Option) -> &$ty { let mut source_root = source_root.as_ref(); while let Some(sr) = source_root { - if let Some((RatomlFile::Workspace(config), _)) = self.ratoml_file.get(&sr) { - if let Some(v) = config.global.$field.as_ref() { - return &v; + if let Some((file, _)) = self.ratoml_file.get(&sr) { + match file { + RatomlFile::Workspace(config) => { + if let Some(v) = config.workspace.$field.as_ref() { + return &v; + } + }, } } - source_root = self.source_root_parent_map.get(&sr); } + if let Some(v) = self.client_config.0.local.$field.as_ref() { + return &v; + } + + if let Some((user_config, _)) = self.user_config.as_ref() { + if let Some(v) = user_config.local.$field.as_ref() { + return &v; + } + } + + &self.default_config.workspace.$field + } + )* + } + }; + (global, $( + $(#[doc=$doc:literal])* + $vis:vis $field:ident : $ty:ty = $default:expr, + )* + ) => { + impl Config { + $( + $($doc)* + #[allow(non_snake_case)] + // TODO Remove source_root + $vis fn $field(&self) -> &$ty { if let Some(v) = self.client_config.0.global.$field.as_ref() { return &v; } @@ -2772,6 +2802,28 @@ impl GlobalLocalConfigInput { } } +/// All of the config levels, all fields `Option`, to describe fields that are actually set by +/// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to +/// all fields being None. +#[derive(Debug, Clone, Default)] +#[allow(dead_code)] +struct WorkspaceLocalConfigInput { + workspace: WorkspaceConfigInput, + local: LocalConfigInput, +} + +impl WorkspaceLocalConfigInput { + #[allow(dead_code)] + const FIELDS: &'static [&'static [&'static str]] = + &[WorkspaceConfigInput::FIELDS, LocalConfigInput::FIELDS]; + fn from_toml(toml: toml::Table, error_sink: &mut Vec<(String, toml::de::Error)>) -> Self { + Self { + workspace: WorkspaceConfigInput::from_toml(&toml, error_sink), + local: LocalConfigInput::from_toml(&toml, error_sink), + } + } +} + fn get_field_json( json: &mut serde_json::Value, error_sink: &mut Vec<(String, serde_json::Error)>, @@ -3475,7 +3527,7 @@ mod tests { })); (config, _, _) = config.apply_change(change); - assert_eq!(config.cargo_targetDir(None), &None); + assert_eq!(config.cargo_targetDir(), &None); assert!( matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir.is_none()) ); @@ -3493,7 +3545,7 @@ mod tests { (config, _, _) = config.apply_change(change); - assert_eq!(config.cargo_targetDir(None), &Some(TargetDirectory::UseSubdirectory(true))); + assert_eq!(config.cargo_targetDir(), &Some(TargetDirectory::UseSubdirectory(true))); assert!( matches!(config.flycheck(), FlycheckConfig::CargoCommand { options, .. } if options.target_dir == Some(Utf8PathBuf::from("target/rust-analyzer"))) ); @@ -3512,7 +3564,7 @@ mod tests { (config, _, _) = config.apply_change(change); assert_eq!( - config.cargo_targetDir(None), + config.cargo_targetDir(), &Some(TargetDirectory::Directory(Utf8PathBuf::from("other_folder"))) ); assert!( diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index 34325ac7a93aa..1475d03ca6767 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -2113,9 +2113,8 @@ fn run_rustfmt( let edition = editions.iter().copied().max(); let line_index = snap.file_line_index(file_id)?; - let sr = snap.analysis.source_root_id(file_id)?; - let mut command = match snap.config.rustfmt(Some(sr)) { + let mut command = match snap.config.rustfmt() { RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => { // FIXME: Set RUSTUP_TOOLCHAIN let mut cmd = process::Command::new(toolchain::Tool::Rustfmt.path()); @@ -2303,8 +2302,9 @@ pub(crate) fn internal_testing_fetch_config( .transpose()?; serde_json::to_value(match &*params.config { "local" => state.config.assist(source_root).assist_emit_must_use, + // TODO Most probably will fail because it was not renamed to workspace "global" => matches!( - state.config.rustfmt(source_root), + state.config.rustfmt(), RustfmtConfig::Rustfmt { enable_range_formatting: true, .. } ), _ => return Err(anyhow::anyhow!("Unknown test config key: {}", params.config)), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs index 9610808c27e18..212294b5d326e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs @@ -67,7 +67,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { code_action_provider: Some(config.caps().code_action_capabilities()), code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }), document_formatting_provider: Some(OneOf::Left(true)), - document_range_formatting_provider: match config.rustfmt(None) { + document_range_formatting_provider: match config.rustfmt() { RustfmtConfig::Rustfmt { enable_range_formatting: true, .. } => Some(OneOf::Left(true)), _ => Some(OneOf::Left(false)), }, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 884a8e83472ce..4a3e8ae050562 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -122,7 +122,7 @@ impl GlobalState { }; let mut message = String::new(); - if !self.config.cargo_autoreload(None) + if !self.config.cargo_autoreload() && self.is_quiescent() && self.fetch_workspaces_queue.op_requested() && self.config.discover_workspace_config().is_none() From 8e315988290eb8a937d56487f603c9a2a6338bfe Mon Sep 17 00:00:00 2001 From: Ali Bektas Date: Sat, 17 Aug 2024 01:48:23 +0200 Subject: [PATCH 032/124] Next up : generating configs for workspace level configs --- .../crates/rust-analyzer/src/config.rs | 70 +++++++++---------- .../rust-analyzer/src/handlers/request.rs | 8 +-- .../rust-analyzer/src/lsp/capabilities.rs | 2 +- .../rust-analyzer/tests/slow-tests/ratoml.rs | 18 +++-- .../docs/user/generated_config.adoc | 21 ------ .../rust-analyzer/editors/code/package.json | 39 ----------- 6 files changed, 48 insertions(+), 110 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index ab27a65de5afe..437f7c33ced17 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -294,18 +294,6 @@ config_data! { /// This option does not take effect until rust-analyzer is restarted. rustc_source: Option = None, - /// Additional arguments to `rustfmt`. - rustfmt_extraArgs: Vec = vec![], - /// Advanced option, fully override the command rust-analyzer uses for - /// formatting. This should be the equivalent of `rustfmt` here, and - /// not that of `cargo fmt`. The file contents will be passed on the - /// standard input and the formatted result will be read from the - /// standard output. - rustfmt_overrideCommand: Option> = None, - /// Enables the use of rustfmt's unstable range formatting command for the - /// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only - /// available on a nightly build. - rustfmt_rangeFormatting_enable: bool = false, /// Enables automatic discovery of projects using [`DiscoverWorkspaceConfig::command`]. /// @@ -439,6 +427,18 @@ config_data! { config_data! { workspace: struct WorkspaceDefaultConfigData <- WorkspaceConfigInput -> { + /// Additional arguments to `rustfmt`. + rustfmt_extraArgs: Vec = vec![], + /// Advanced option, fully override the command rust-analyzer uses for + /// formatting. This should be the equivalent of `rustfmt` here, and + /// not that of `cargo fmt`. The file contents will be passed on the + /// standard input and the formatted result will be read from the + /// standard output. + rustfmt_overrideCommand: Option> = None, + /// Enables the use of rustfmt's unstable range formatting command for the + /// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only + /// available on a nightly build. + rustfmt_rangeFormatting_enable: bool = false, } } @@ -775,7 +775,7 @@ pub struct Config { user_config_path: VfsPath, /// Config node whose values apply to **every** Rust project. - user_config: Option<(GlobalLocalConfigInput, ConfigErrors)>, + user_config: Option<(GlobalWorkspaceLocalConfigInput, ConfigErrors)>, ratoml_file: FxHashMap, @@ -825,13 +825,13 @@ impl Config { if let Ok(table) = toml::from_str(&change) { let mut toml_errors = vec![]; validate_toml_table( - GlobalLocalConfigInput::FIELDS, + GlobalWorkspaceLocalConfigInput::FIELDS, &table, &mut String::new(), &mut toml_errors, ); config.user_config = Some(( - GlobalLocalConfigInput::from_toml(table, &mut toml_errors), + GlobalWorkspaceLocalConfigInput::from_toml(table, &mut toml_errors), ConfigErrors( toml_errors .into_iter() @@ -960,7 +960,7 @@ impl Config { match toml::from_str(&text) { Ok(table) => { validate_toml_table( - GlobalLocalConfigInput::FIELDS, + WorkspaceLocalConfigInput::FIELDS, &table, &mut String::new(), &mut toml_errors, @@ -1863,16 +1863,16 @@ impl Config { } } - pub fn rustfmt(&self) -> RustfmtConfig { - match &self.rustfmt_overrideCommand() { + pub fn rustfmt(&self, source_root_id: Option) -> RustfmtConfig { + match &self.rustfmt_overrideCommand(source_root_id) { Some(args) if !args.is_empty() => { let mut args = args.clone(); let command = args.remove(0); RustfmtConfig::CustomCommand { command, args } } Some(_) | None => RustfmtConfig::Rustfmt { - extra_args: self.rustfmt_extraArgs().clone(), - enable_range_formatting: *self.rustfmt_rangeFormatting_enable(), + extra_args: self.rustfmt_extraArgs(source_root_id).clone(), + enable_range_formatting: *self.rustfmt_rangeFormatting_enable(source_root_id), }, } } @@ -2555,24 +2555,20 @@ macro_rules! _impl_for_config_data { $vis fn $field(&self, source_root: Option) -> &$ty { let mut source_root = source_root.as_ref(); while let Some(sr) = source_root { - if let Some((file, _)) = self.ratoml_file.get(&sr) { - match file { - RatomlFile::Workspace(config) => { - if let Some(v) = config.workspace.$field.as_ref() { - return &v; - } - }, + if let Some((RatomlFile::Workspace(config), _)) = self.ratoml_file.get(&sr) { + if let Some(v) = config.workspace.$field.as_ref() { + return &v; } } source_root = self.source_root_parent_map.get(&sr); } - if let Some(v) = self.client_config.0.local.$field.as_ref() { + if let Some(v) = self.client_config.0.workspace.$field.as_ref() { return &v; } if let Some((user_config, _)) = self.user_config.as_ref() { - if let Some(v) = user_config.local.$field.as_ref() { + if let Some(v) = user_config.workspace.$field.as_ref() { return &v; } } @@ -2591,7 +2587,6 @@ macro_rules! _impl_for_config_data { $( $($doc)* #[allow(non_snake_case)] - // TODO Remove source_root $vis fn $field(&self) -> &$ty { if let Some(v) = self.client_config.0.global.$field.as_ref() { return &v; @@ -2730,6 +2725,7 @@ use _config_data as config_data; #[derive(Default, Debug, Clone)] struct DefaultConfigData { global: GlobalDefaultConfigData, + workspace: WorkspaceDefaultConfigData, local: LocalDefaultConfigData, client: ClientDefaultConfigData, } @@ -2740,6 +2736,7 @@ struct DefaultConfigData { #[derive(Debug, Clone, Default)] struct FullConfigInput { global: GlobalConfigInput, + workspace: WorkspaceConfigInput, local: LocalConfigInput, client: ClientConfigInput, } @@ -2753,6 +2750,7 @@ impl FullConfigInput { global: GlobalConfigInput::from_json(&mut json, error_sink), local: LocalConfigInput::from_json(&mut json, error_sink), client: ClientConfigInput::from_json(&mut json, error_sink), + workspace: WorkspaceConfigInput::from_json(&mut json, error_sink), } } @@ -2783,26 +2781,28 @@ impl FullConfigInput { /// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to /// all fields being None. #[derive(Debug, Clone, Default)] -struct GlobalLocalConfigInput { +struct GlobalWorkspaceLocalConfigInput { global: GlobalConfigInput, local: LocalConfigInput, + workspace: WorkspaceConfigInput, } -impl GlobalLocalConfigInput { +impl GlobalWorkspaceLocalConfigInput { const FIELDS: &'static [&'static [&'static str]] = &[GlobalConfigInput::FIELDS, LocalConfigInput::FIELDS]; fn from_toml( toml: toml::Table, error_sink: &mut Vec<(String, toml::de::Error)>, - ) -> GlobalLocalConfigInput { - GlobalLocalConfigInput { + ) -> GlobalWorkspaceLocalConfigInput { + GlobalWorkspaceLocalConfigInput { global: GlobalConfigInput::from_toml(&toml, error_sink), local: LocalConfigInput::from_toml(&toml, error_sink), + workspace: WorkspaceConfigInput::from_toml(&toml, error_sink), } } } -/// All of the config levels, all fields `Option`, to describe fields that are actually set by +/// Workspace and local config levels, all fields `Option`, to describe fields that are actually set by /// some rust-analyzer.toml file or JSON blob. An empty rust-analyzer.toml corresponds to /// all fields being None. #[derive(Debug, Clone, Default)] diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index 1475d03ca6767..d78bd3b44d31e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -2113,8 +2113,9 @@ fn run_rustfmt( let edition = editions.iter().copied().max(); let line_index = snap.file_line_index(file_id)?; + let source_root_id = snap.analysis.source_root_id(file_id).ok(); - let mut command = match snap.config.rustfmt() { + let mut command = match snap.config.rustfmt(source_root_id) { RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => { // FIXME: Set RUSTUP_TOOLCHAIN let mut cmd = process::Command::new(toolchain::Tool::Rustfmt.path()); @@ -2302,9 +2303,8 @@ pub(crate) fn internal_testing_fetch_config( .transpose()?; serde_json::to_value(match &*params.config { "local" => state.config.assist(source_root).assist_emit_must_use, - // TODO Most probably will fail because it was not renamed to workspace - "global" => matches!( - state.config.rustfmt(), + "workspace" => matches!( + state.config.rustfmt(source_root), RustfmtConfig::Rustfmt { enable_range_formatting: true, .. } ), _ => return Err(anyhow::anyhow!("Unknown test config key: {}", params.config)), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs index 212294b5d326e..9610808c27e18 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs @@ -67,7 +67,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { code_action_provider: Some(config.caps().code_action_capabilities()), code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }), document_formatting_provider: Some(OneOf::Left(true)), - document_range_formatting_provider: match config.rustfmt() { + document_range_formatting_provider: match config.rustfmt(None) { RustfmtConfig::Rustfmt { enable_range_formatting: true, .. } => Some(OneOf::Left(true)), _ => Some(OneOf::Left(false)), }, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs index c06ba9eee1477..8cb6409a84854 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/ratoml.rs @@ -17,7 +17,7 @@ enum QueryType { /// A query whose config key is a part of the global configs, so that /// testing for changes to this config means testing if global changes /// take affect. - Global, + Workspace, } struct RatomlTest { @@ -165,7 +165,7 @@ impl RatomlTest { fn query(&self, query: QueryType, source_file_idx: usize) -> bool { let config = match query { QueryType::Local => "local".to_owned(), - QueryType::Global => "global".to_owned(), + QueryType::Workspace => "workspace".to_owned(), }; let res = self.server.send_request::( InternalTestingFetchConfigParams { @@ -823,10 +823,8 @@ fn ratoml_multiple_ratoml_in_single_source_root() { // assert!(!server.query(QueryType::AssistEmitMustUse, 5)); // } -/// Having a ratoml file at the root of a project enables -/// configuring global level configurations as well. #[test] -fn ratoml_in_root_is_global() { +fn ratoml_in_root_is_workspace() { if skip_slow_tests() { return; } @@ -854,7 +852,7 @@ fn main() { None, ); - assert!(server.query(QueryType::Global, 2)); + assert!(server.query(QueryType::Workspace, 2)); } #[test] @@ -886,9 +884,9 @@ fn main() { None, ); - assert!(server.query(QueryType::Global, 2)); + assert!(server.query(QueryType::Workspace, 2)); server.edit(1, "rustfmt.rangeFormatting.enable = false".to_owned()); - assert!(!server.query(QueryType::Global, 2)); + assert!(!server.query(QueryType::Workspace, 2)); } #[test] @@ -920,7 +918,7 @@ fn main() { None, ); - assert!(server.query(QueryType::Global, 2)); + assert!(server.query(QueryType::Workspace, 2)); server.delete(1); - assert!(!server.query(QueryType::Global, 2)); + assert!(!server.query(QueryType::Workspace, 2)); } diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc index e4a8c6493a883..1ea679d2a52b6 100644 --- a/src/tools/rust-analyzer/docs/user/generated_config.adoc +++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc @@ -878,27 +878,6 @@ crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it. This option does not take effect until rust-analyzer is restarted. -- -[[rust-analyzer.rustfmt.extraArgs]]rust-analyzer.rustfmt.extraArgs (default: `[]`):: -+ --- -Additional arguments to `rustfmt`. --- -[[rust-analyzer.rustfmt.overrideCommand]]rust-analyzer.rustfmt.overrideCommand (default: `null`):: -+ --- -Advanced option, fully override the command rust-analyzer uses for -formatting. This should be the equivalent of `rustfmt` here, and -not that of `cargo fmt`. The file contents will be passed on the -standard input and the formatted result will be read from the -standard output. --- -[[rust-analyzer.rustfmt.rangeFormatting.enable]]rust-analyzer.rustfmt.rangeFormatting.enable (default: `false`):: -+ --- -Enables the use of rustfmt's unstable range formatting command for the -`textDocument/rangeFormatting` request. The rustfmt option is unstable and only -available on a nightly build. --- [[rust-analyzer.semanticHighlighting.doc.comment.inject.enable]]rust-analyzer.semanticHighlighting.doc.comment.inject.enable (default: `true`):: + -- diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 98e8bbf02aa55..15e08222ad8e0 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -2358,45 +2358,6 @@ } } }, - { - "title": "rustfmt", - "properties": { - "rust-analyzer.rustfmt.extraArgs": { - "markdownDescription": "Additional arguments to `rustfmt`.", - "default": [], - "type": "array", - "items": { - "type": "string" - } - } - } - }, - { - "title": "rustfmt", - "properties": { - "rust-analyzer.rustfmt.overrideCommand": { - "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for\nformatting. This should be the equivalent of `rustfmt` here, and\nnot that of `cargo fmt`. The file contents will be passed on the\nstandard input and the formatted result will be read from the\nstandard output.", - "default": null, - "type": [ - "null", - "array" - ], - "items": { - "type": "string" - } - } - } - }, - { - "title": "rustfmt", - "properties": { - "rust-analyzer.rustfmt.rangeFormatting.enable": { - "markdownDescription": "Enables the use of rustfmt's unstable range formatting command for the\n`textDocument/rangeFormatting` request. The rustfmt option is unstable and only\navailable on a nightly build.", - "default": false, - "type": "boolean" - } - } - }, { "title": "semanticHighlighting", "properties": { From 1b2eaa59b293fe9ef373c4b9642cde292db56061 Mon Sep 17 00:00:00 2001 From: Ali Bektas Date: Sat, 17 Aug 2024 02:32:38 +0200 Subject: [PATCH 033/124] Old configs are back --- .../crates/rust-analyzer/src/config.rs | 1 + .../docs/user/generated_config.adoc | 21 ++++++++++ .../rust-analyzer/editors/code/package.json | 39 +++++++++++++++++++ 3 files changed, 61 insertions(+) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 437f7c33ced17..fd2979be27050 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -2759,6 +2759,7 @@ impl FullConfigInput { GlobalConfigInput::schema_fields(&mut fields); LocalConfigInput::schema_fields(&mut fields); ClientConfigInput::schema_fields(&mut fields); + WorkspaceConfigInput::schema_fields(&mut fields); fields.sort_by_key(|&(x, ..)| x); fields .iter() diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc index 1ea679d2a52b6..e4a8c6493a883 100644 --- a/src/tools/rust-analyzer/docs/user/generated_config.adoc +++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc @@ -878,6 +878,27 @@ crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it. This option does not take effect until rust-analyzer is restarted. -- +[[rust-analyzer.rustfmt.extraArgs]]rust-analyzer.rustfmt.extraArgs (default: `[]`):: ++ +-- +Additional arguments to `rustfmt`. +-- +[[rust-analyzer.rustfmt.overrideCommand]]rust-analyzer.rustfmt.overrideCommand (default: `null`):: ++ +-- +Advanced option, fully override the command rust-analyzer uses for +formatting. This should be the equivalent of `rustfmt` here, and +not that of `cargo fmt`. The file contents will be passed on the +standard input and the formatted result will be read from the +standard output. +-- +[[rust-analyzer.rustfmt.rangeFormatting.enable]]rust-analyzer.rustfmt.rangeFormatting.enable (default: `false`):: ++ +-- +Enables the use of rustfmt's unstable range formatting command for the +`textDocument/rangeFormatting` request. The rustfmt option is unstable and only +available on a nightly build. +-- [[rust-analyzer.semanticHighlighting.doc.comment.inject.enable]]rust-analyzer.semanticHighlighting.doc.comment.inject.enable (default: `true`):: + -- diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index 15e08222ad8e0..98e8bbf02aa55 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -2358,6 +2358,45 @@ } } }, + { + "title": "rustfmt", + "properties": { + "rust-analyzer.rustfmt.extraArgs": { + "markdownDescription": "Additional arguments to `rustfmt`.", + "default": [], + "type": "array", + "items": { + "type": "string" + } + } + } + }, + { + "title": "rustfmt", + "properties": { + "rust-analyzer.rustfmt.overrideCommand": { + "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for\nformatting. This should be the equivalent of `rustfmt` here, and\nnot that of `cargo fmt`. The file contents will be passed on the\nstandard input and the formatted result will be read from the\nstandard output.", + "default": null, + "type": [ + "null", + "array" + ], + "items": { + "type": "string" + } + } + } + }, + { + "title": "rustfmt", + "properties": { + "rust-analyzer.rustfmt.rangeFormatting.enable": { + "markdownDescription": "Enables the use of rustfmt's unstable range formatting command for the\n`textDocument/rangeFormatting` request. The rustfmt option is unstable and only\navailable on a nightly build.", + "default": false, + "type": "boolean" + } + } + }, { "title": "semanticHighlighting", "properties": { From 94f206a13936cd1897554c91c377b37915eb338e Mon Sep 17 00:00:00 2001 From: Ali Bektas Date: Tue, 20 Aug 2024 18:13:22 +0200 Subject: [PATCH 034/124] Run flycheck only on crate if target is binary. --- .../crates/rust-analyzer/src/flycheck.rs | 30 +++++++---- .../src/handlers/notification.rs | 51 +++++++++++++------ .../crates/rust-analyzer/src/main_loop.rs | 4 +- 3 files changed, 58 insertions(+), 27 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index cf13a7386379b..0e859f327510a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -118,14 +118,14 @@ impl FlycheckHandle { } /// Schedule a re-start of the cargo check worker to do a workspace wide check. - pub(crate) fn restart_workspace(&self, saved_file: Option) { - self.sender.send(StateChange::Restart { package: None, saved_file }).unwrap(); + pub(crate) fn restart_workspace(&self, saved_file: Option, target: Option) { + self.sender.send(StateChange::Restart { package: None, saved_file, target }).unwrap(); } /// Schedule a re-start of the cargo check worker to do a package wide check. - pub(crate) fn restart_for_package(&self, package: String) { + pub(crate) fn restart_for_package(&self, package: String, target: Option) { self.sender - .send(StateChange::Restart { package: Some(package), saved_file: None }) + .send(StateChange::Restart { package: Some(package), saved_file: None, target }) .unwrap(); } @@ -183,7 +183,7 @@ pub(crate) enum Progress { } enum StateChange { - Restart { package: Option, saved_file: Option }, + Restart { package: Option, saved_file: Option, target: Option }, Cancel, } @@ -271,7 +271,7 @@ impl FlycheckActor { tracing::debug!(flycheck_id = self.id, "flycheck cancelled"); self.cancel_check_process(); } - Event::RequestStateChange(StateChange::Restart { package, saved_file }) => { + Event::RequestStateChange(StateChange::Restart { package, saved_file, target }) => { // Cancel the previously spawned process self.cancel_check_process(); while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) { @@ -281,11 +281,14 @@ impl FlycheckActor { } } - let command = - match self.check_command(package.as_deref(), saved_file.as_deref()) { - Some(c) => c, - None => continue, - }; + let command = match self.check_command( + package.as_deref(), + saved_file.as_deref(), + target.as_deref(), + ) { + Some(c) => c, + None => continue, + }; let formatted_command = format!("{command:?}"); tracing::debug!(?command, "will restart flycheck"); @@ -381,6 +384,7 @@ impl FlycheckActor { &self, package: Option<&str>, saved_file: Option<&AbsPath>, + bin_target: Option<&str>, ) -> Option { match &self.config { FlycheckConfig::CargoCommand { command, options, ansi_color_output } => { @@ -396,6 +400,10 @@ impl FlycheckActor { None => cmd.arg("--workspace"), }; + if let Some(tgt) = bin_target { + cmd.arg("--bin").arg(tgt); + } + cmd.arg(if *ansi_color_output { "--message-format=json-diagnostic-rendered-ansi" } else { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index de5d1f231368b..fb7caf652d06a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -20,6 +20,7 @@ use crate::{ lsp_ext::{self, RunFlycheckParams}, mem_docs::DocumentData, reload, + target_spec::TargetSpec, }; pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> anyhow::Result<()> { @@ -185,7 +186,7 @@ pub(crate) fn handle_did_save_text_document( } else if state.config.check_on_save() { // No specific flycheck was triggered, so let's trigger all of them. for flycheck in state.flycheck.iter() { - flycheck.restart_workspace(None); + flycheck.restart_workspace(None, None); } } Ok(()) @@ -287,16 +288,33 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { let world = state.snapshot(); let mut updated = false; let task = move || -> std::result::Result<(), ide::Cancelled> { - // Trigger flychecks for all workspaces that depend on the saved file - // Crates containing or depending on the saved file - let crate_ids: Vec<_> = world - .analysis - .crates_for(file_id)? - .into_iter() - .flat_map(|id| world.analysis.transitive_rev_deps(id)) - .flatten() - .unique() - .collect(); + // Is the target binary? If so we let flycheck run only for the workspace that contains the crate. + let target_is_bin = TargetSpec::for_file(&world, file_id)?.and_then(|x| { + if x.target_kind() == project_model::TargetKind::Bin { + return match x { + TargetSpec::Cargo(c) => Some(c.target), + TargetSpec::ProjectJson(p) => Some(p.label), + }; + } + + None + }); + + let crate_ids = if target_is_bin.is_some() { + // Trigger flychecks for the only workspace which the binary crate belongs to + world.analysis.crates_for(file_id)?.into_iter().unique().collect::>() + } else { + // Trigger flychecks for all workspaces that depend on the saved file + // Crates containing or depending on the saved file + world + .analysis + .crates_for(file_id)? + .into_iter() + .flat_map(|id| world.analysis.transitive_rev_deps(id)) + .flatten() + .unique() + .collect::>() + }; let crate_root_paths: Vec<_> = crate_ids .iter() @@ -347,8 +365,11 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { if id == flycheck.id() { updated = true; match package.filter(|_| !world.config.flycheck_workspace()) { - Some(package) => flycheck.restart_for_package(package), - None => flycheck.restart_workspace(saved_file.clone()), + Some(package) => { + flycheck.restart_for_package(package, target_is_bin.clone()) + } + None => flycheck + .restart_workspace(saved_file.clone(), target_is_bin.clone()), } continue; } @@ -357,7 +378,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { // No specific flycheck was triggered, so let's trigger all of them. if !updated { for flycheck in world.flycheck.iter() { - flycheck.restart_workspace(saved_file.clone()); + flycheck.restart_workspace(saved_file.clone(), None); } } Ok(()) @@ -399,7 +420,7 @@ pub(crate) fn handle_run_flycheck( } // No specific flycheck was triggered, so let's trigger all of them. for flycheck in state.flycheck.iter() { - flycheck.restart_workspace(None); + flycheck.restart_workspace(None, None); } Ok(()) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 1d4ee71e5c1a0..79a209f8bfe23 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -406,7 +406,9 @@ impl GlobalState { if became_quiescent { if self.config.check_on_save() { // Project has loaded properly, kick off initial flycheck - self.flycheck.iter().for_each(|flycheck| flycheck.restart_workspace(None)); + self.flycheck + .iter() + .for_each(|flycheck| flycheck.restart_workspace(None, None)); } if self.config.prefill_caches() { self.prime_caches_queue.request_op("became quiescent".to_owned(), ()); From 3f89eebd20955edf89477f72e99fbf9ab0efc936 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 21 Aug 2024 12:22:15 +0200 Subject: [PATCH 035/124] internal: Implement `module_path` macro --- .../rust-analyzer/crates/base-db/src/input.rs | 1 + .../rust-analyzer/crates/hir-def/src/data.rs | 3 +- .../crates/hir-def/src/expander.rs | 9 ++- .../rust-analyzer/crates/hir-def/src/lib.rs | 44 +++++++------ .../hir-def/src/macro_expansion_tests/mod.rs | 15 +++-- .../crates/hir-def/src/nameres.rs | 28 +++++++- .../crates/hir-def/src/nameres/collector.rs | 27 ++++++-- .../crates/hir-expand/src/builtin.rs | 2 +- .../crates/hir-expand/src/builtin/fn_macro.rs | 30 +++++---- .../crates/hir-expand/src/builtin/quote.rs | 4 +- .../crates/hir-expand/src/eager.rs | 45 +++++++++++++ .../rust-analyzer/crates/hir/src/semantics.rs | 25 +++++-- .../crates/hir/src/source_analyzer.rs | 21 ++++-- .../crates/ide/src/goto_type_definition.rs | 8 ++- .../crates/ide/src/hover/tests.rs | 65 +++++++++++++++++++ .../rust-analyzer/crates/ide/src/status.rs | 1 + .../crates/intern/src/symbol/symbols.rs | 1 + .../crates/rust-analyzer/src/cli/lsif.rs | 8 +-- .../crates/rust-analyzer/src/cli/scip.rs | 2 - .../crates/test-utils/src/minicore.rs | 11 ++++ 20 files changed, 279 insertions(+), 71 deletions(-) diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index 3616fa9fd8684..b67f4c7a0cc4b 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -272,6 +272,7 @@ impl ReleaseChannel { } } +#[non_exhaustive] #[derive(Debug, Clone, PartialEq, Eq)] pub struct CrateData { pub root_file_id: FileId, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs index d17ebd7ff920b..c62f2f8733832 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs @@ -748,8 +748,9 @@ impl<'a> AssocItemCollector<'a> { &AstIdWithPath::new(file_id, ast_id, Clone::clone(path)), ctxt, expand_to, - self.expander.krate(), + self.expander.module, resolver, + |module| module.def_map(self.db).path_for_module(self.db, module), ) { Ok(Some(call_id)) => { let res = diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs index 6d8b4445f75bc..57bd54ed17c57 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs @@ -69,9 +69,12 @@ impl Expander { let result = self.within_limit(db, |this| { let macro_call = this.in_file(¯o_call); - match macro_call.as_call_id_with_errors(db.upcast(), this.module.krate(), |path| { - resolver(path).map(|it| db.macro_def(it)) - }) { + match macro_call.as_call_id( + db.upcast(), + this.module, + |path| resolver(path).map(|it| db.macro_def(it)), + |module| this.module.def_map(db).path_for_module(db, module), + ) { Ok(call_id) => call_id, Err(resolve_err) => { unresolved_macro_err = Some(resolve_err); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index 4ced30c81dced..40ead05b9050a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -77,7 +77,7 @@ use base_db::{ use hir_expand::{ builtin::{BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, - eager::expand_eager_macro_input, + eager::{expand_eager_macro_input, expand_module_path_as_eager}, impl_intern_lookup, name::Name, proc_macro::{CustomProcMacroExpander, ProcMacroKind}, @@ -1400,26 +1400,19 @@ pub trait AsMacroCall { fn as_call_id( &self, db: &dyn ExpandDatabase, - krate: CrateId, - resolver: impl Fn(&path::ModPath) -> Option + Copy, - ) -> Option { - self.as_call_id_with_errors(db, krate, resolver).ok()?.value - } - - fn as_call_id_with_errors( - &self, - db: &dyn ExpandDatabase, - krate: CrateId, + module: ModuleId, resolver: impl Fn(&path::ModPath) -> Option + Copy, + mod_path: impl FnOnce(ModuleId) -> String, ) -> Result>, UnresolvedMacro>; } impl AsMacroCall for InFile<&ast::MacroCall> { - fn as_call_id_with_errors( + fn as_call_id( &self, db: &dyn ExpandDatabase, - krate: CrateId, + module: ModuleId, resolver: impl Fn(&path::ModPath) -> Option + Copy, + mod_path: impl FnOnce(ModuleId) -> String, ) -> Result>, UnresolvedMacro> { let expands_to = hir_expand::ExpandTo::from_call_site(self.value); let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); @@ -1446,9 +1439,10 @@ impl AsMacroCall for InFile<&ast::MacroCall> { &path, call_site.ctx, expands_to, - krate, + module, resolver, resolver, + mod_path, ) } } @@ -1475,8 +1469,9 @@ fn macro_call_as_call_id( call: &AstIdWithPath, call_site: SyntaxContextId, expand_to: ExpandTo, - krate: CrateId, + module: ModuleId, resolver: impl Fn(&path::ModPath) -> Option + Copy, + mod_path: impl FnOnce(ModuleId) -> String, ) -> Result, UnresolvedMacro> { macro_call_as_call_id_with_eager( db, @@ -1484,9 +1479,10 @@ fn macro_call_as_call_id( &call.path, call_site, expand_to, - krate, + module, resolver, resolver, + mod_path, ) .map(|res| res.value) } @@ -1497,16 +1493,26 @@ fn macro_call_as_call_id_with_eager( path: &path::ModPath, call_site: SyntaxContextId, expand_to: ExpandTo, - krate: CrateId, + module: ModuleId, resolver: impl FnOnce(&path::ModPath) -> Option, eager_resolver: impl Fn(&path::ModPath) -> Option, + mod_path: impl FnOnce(ModuleId) -> String, ) -> Result>, UnresolvedMacro> { let def = resolver(path).ok_or_else(|| UnresolvedMacro { path: path.clone() })?; let res = match def.kind { + MacroDefKind::BuiltInEager(_, EagerExpander::ModulePath) => expand_module_path_as_eager( + db, + module.krate, + mod_path(module), + &ast_id.to_node(db), + ast_id, + def, + call_site, + ), MacroDefKind::BuiltInEager(..) => expand_eager_macro_input( db, - krate, + module.krate, &ast_id.to_node(db), ast_id, def, @@ -1516,7 +1522,7 @@ fn macro_call_as_call_id_with_eager( _ if def.is_fn_like() => ExpandResult { value: Some(def.make_call( db, - krate, + module.krate, MacroCallKind::FnLike { ast_id, expand_to, eager: None }, call_site, )), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index 7f761192517c4..0d232deb2ced1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -95,11 +95,16 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) { let macro_call = InFile::new(source.file_id, ¯o_call); let res = macro_call - .as_call_id_with_errors(&db, krate, |path| { - resolver - .resolve_path_as_macro(&db, path, Some(MacroSubNs::Bang)) - .map(|(it, _)| db.macro_def(it)) - }) + .as_call_id( + &db, + resolver.module(), + |path| { + resolver + .resolve_path_as_macro(&db, path, Some(MacroSubNs::Bang)) + .map(|(it, _)| db.macro_def(it)) + }, + |module| def_map.path_for_module(&db, module), + ) .unwrap(); let macro_call_id = res.value.unwrap(); let macro_file = MacroFileId { macro_call_id }; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 11601c683e1ea..cd8db00c8b2d1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -63,7 +63,7 @@ use base_db::CrateId; use hir_expand::{ name::Name, proc_macro::ProcMacroKind, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, }; -use intern::Symbol; +use intern::{sym, Symbol}; use itertools::Itertools; use la_arena::Arena; use rustc_hash::{FxHashMap, FxHashSet}; @@ -139,6 +139,7 @@ pub struct DefMap { /// Data that belongs to a crate which is shared between a crate's def map and all its block def maps. #[derive(Clone, Debug, PartialEq, Eq)] struct DefMapCrateData { + crate_name: Option, /// The extern prelude which contains all root modules of external crates that are in scope. extern_prelude: FxIndexMap)>, @@ -164,6 +165,7 @@ struct DefMapCrateData { impl DefMapCrateData { fn new(edition: Edition) -> Self { Self { + crate_name: None, extern_prelude: FxIndexMap::default(), exported_derives: FxHashMap::default(), fn_proc_macro_mapping: FxHashMap::default(), @@ -186,6 +188,7 @@ impl DefMapCrateData { registered_attrs, registered_tools, unstable_features, + crate_name: _, rustc_coherence_is_core: _, no_core: _, no_std: _, @@ -443,6 +446,29 @@ impl DefMap { self.modules.iter() } + pub fn path_for_module(&self, db: &dyn DefDatabase, mut module: ModuleId) -> String { + debug_assert!(module.krate == self.krate && module.block == self.block.map(|b| b.block)); + let mut parts = vec![]; + if let Some(name) = module.name(db) { + parts.push(name.symbol().clone()); + } + while let Some(parent) = module.def_map(db).containing_module(module.local_id) { + module = parent; + if let Some(name) = module.name(db) { + parts.push(name.symbol().clone()); + } + if parts.len() > 10 { + break; + } + } + // FIXME: crate_name should be non-optional but we don't pass the name through yet + parts.push(match &self.data.crate_name { + Some(name) => name.clone(), + None => sym::crate_.clone(), + }); + parts.into_iter().rev().format("::").to_string() + } + pub fn derive_helpers_in_scope( &self, id: AstId, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 22eb5a174d393..30df6c206dfb4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -247,18 +247,23 @@ impl DefCollector<'_> { let _p = tracing::info_span!("seed_with_top_level").entered(); let crate_graph = self.db.crate_graph(); + let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); + crate_data.crate_name = crate_graph[self.def_map.krate] + .display_name + .as_ref() + .map(|it| it.crate_name().symbol().clone()); + let file_id = crate_graph[self.def_map.krate].root_file_id(); let item_tree = self.db.file_item_tree(file_id.into()); let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate); - let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); - let mut process = true; + let mut crate_cged_out = false; // Process other crate-level attributes. for attr in &*attrs { if let Some(cfg) = attr.cfg() { if self.cfg_options.check(&cfg) == Some(false) { - process = false; + crate_cged_out = true; break; } } @@ -272,6 +277,11 @@ impl DefCollector<'_> { } } } + () if *attr_name == sym::crate_name.clone() => { + if let Some(name) = attr.string_value().cloned() { + crate_data.crate_name = Some(name); + } + } () if *attr_name == sym::crate_type.clone() => { if attr.string_value() == Some(&sym::proc_dash_macro) { self.is_proc_macro = true; @@ -337,7 +347,7 @@ impl DefCollector<'_> { self.inject_prelude(); - if !process { + if crate_cged_out { return; } @@ -1207,8 +1217,9 @@ impl DefCollector<'_> { ast_id, *call_site, *expand_to, - self.def_map.krate, + self.def_map.module_id(directive.module_id), resolver_def_id, + |module| self.def_map.path_for_module(self.db, module), ); if let Ok(Some(call_id)) = call_id { self.def_map.modules[directive.module_id] @@ -1486,7 +1497,7 @@ impl DefCollector<'_> { ast_id, *call_site, *expand_to, - self.def_map.krate, + self.def_map.module_id(directive.module_id), |path| { let resolved_res = self.def_map.resolve_path_fp_with_macro( self.db, @@ -1498,6 +1509,7 @@ impl DefCollector<'_> { ); resolved_res.resolved_def.take_macros().map(|it| self.db.macro_def(it)) }, + |module| self.def_map.path_for_module(self.db, module), ); if let Err(UnresolvedMacro { path }) = macro_call_as_call_id { self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call( @@ -2351,7 +2363,7 @@ impl ModCollector<'_, '_> { &ast_id.path, ctxt, expand_to, - self.def_collector.def_map.krate, + self.def_collector.def_map.module_id(self.module_id), |path| { path.as_ident().and_then(|name| { let def_map = &self.def_collector.def_map; @@ -2381,6 +2393,7 @@ impl ModCollector<'_, '_> { ); resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it)) }, + |module| self.def_collector.def_map.path_for_module(self.def_collector.db, module), ) { // FIXME: if there were errors, this might've been in the eager expansion from an // unresolved macro, so we need to push this into late macro resolution. see fixme above diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs index 252430e4e954a..7b9cb4e793c91 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs @@ -1,6 +1,6 @@ //! Builtin macros and attributes #[macro_use] -mod quote; +pub(crate) mod quote; mod attr_macro; mod derive_macro; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs index 795d9b14df961..e4d09ddb3abcc 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs @@ -116,7 +116,6 @@ register_builtin! { (column, Column) => line_expand, (file, File) => file_expand, (line, Line) => line_expand, - (module_path, ModulePath) => module_path_expand, (assert, Assert) => assert_expand, (stringify, Stringify) => stringify_expand, (llvm_asm, LlvmAsm) => asm_expand, @@ -142,7 +141,10 @@ register_builtin! { (include_bytes, IncludeBytes) => include_bytes_expand, (include_str, IncludeStr) => include_str_expand, (env, Env) => env_expand, - (option_env, OptionEnv) => option_env_expand + (option_env, OptionEnv) => option_env_expand, + // This isn't really eager, we have no inputs, but we abuse the fact how eager macros are + // handled in r-a to be able to thread the module path through. + (module_path, ModulePath) => module_path_expand } fn mk_pound(span: Span) -> tt::Subtree { @@ -157,18 +159,6 @@ fn mk_pound(span: Span) -> tt::Subtree { ) } -fn module_path_expand( - _db: &dyn ExpandDatabase, - _id: MacroCallId, - _tt: &tt::Subtree, - span: Span, -) -> ExpandResult { - // Just return a dummy result. - ExpandResult::ok(quote! {span => - "module::path" - }) -} - fn line_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, @@ -904,6 +894,18 @@ fn option_env_expand( ExpandResult::ok(expanded) } +fn module_path_expand( + _db: &dyn ExpandDatabase, + _id: MacroCallId, + tt: &tt::Subtree, + span: Span, +) -> ExpandResult { + // Note: The actual implementation of this is in crates\hir-expand\src\eager.rs + ExpandResult::ok(quote! {span => + #tt + }) +} + fn quote_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs index 5c33f817f9e44..0c895056c2255 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs @@ -128,7 +128,7 @@ macro_rules! quote_impl__ { } }; } -pub(super) use quote_impl__ as __quote; +pub(crate) use quote_impl__ as __quote; /// FIXME: /// It probably should implement in proc-macro @@ -137,7 +137,7 @@ macro_rules! quote_impl { $crate::builtin::quote::IntoTt::to_subtree($crate::builtin::quote::__quote!($span $($tt)*), $span) } } -pub(super) use quote_impl as quote; +pub(crate) use quote_impl as quote; pub(crate) trait IntoTt { fn to_subtree(self, span: Span) -> crate::tt::Subtree; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs index 1dadfe2ba99bb..534d5663860d6 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs @@ -32,6 +32,51 @@ use crate::{ MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, }; +pub fn expand_module_path_as_eager( + db: &dyn ExpandDatabase, + krate: CrateId, + mod_path: String, + macro_call: &ast::MacroCall, + ast_id: AstId, + def: MacroDefId, + call_site: SyntaxContextId, +) -> ExpandResult> { + let expand_to = ExpandTo::from_call_site(macro_call); + + // Note: + // When `lazy_expand` is called, its *parent* file must already exist. + // Here we store an eager macro id for the argument expanded subtree + // for that purpose. + let arg_id = MacroCallLoc { + def, + krate, + kind: MacroCallKind::FnLike { ast_id, expand_to: ExpandTo::Expr, eager: None }, + ctxt: call_site, + } + .intern(db); + #[allow(deprecated)] // builtin eager macros are never derives + let (_, _, span) = db.macro_arg(arg_id); + let subtree = crate::builtin::quote::quote! {span => #mod_path}; + + let loc = MacroCallLoc { + def, + krate, + kind: MacroCallKind::FnLike { + ast_id, + expand_to, + eager: Some(Arc::new(EagerCallInfo { + arg: Arc::new(subtree), + arg_id, + error: None, + span, + })), + }, + ctxt: call_site, + }; + + ExpandResult { value: Some(loc.intern(db)), err: None } +} + pub fn expand_eager_macro_input( db: &dyn ExpandDatabase, krate: CrateId, diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index d086aee42855f..d9f3b9e9433dd 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -364,7 +364,6 @@ impl<'db> SemanticsImpl<'db> { _, BuiltinFnLikeExpander::Column | BuiltinFnLikeExpander::File - | BuiltinFnLikeExpander::ModulePath | BuiltinFnLikeExpander::Asm | BuiltinFnLikeExpander::LlvmAsm | BuiltinFnLikeExpander::GlobalAsm @@ -482,10 +481,26 @@ impl<'db> SemanticsImpl<'db> { let SourceAnalyzer { file_id, resolver, .. } = self.analyze_no_infer(actual_macro_call.syntax())?; let macro_call = InFile::new(file_id, actual_macro_call); - let krate = resolver.krate(); - let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { - resolver.resolve_path_as_macro_def(self.db.upcast(), path, Some(MacroSubNs::Bang)) - })?; + let macro_call_id = macro_call + .as_call_id( + self.db.upcast(), + resolver.module(), + |path| { + resolver.resolve_path_as_macro_def( + self.db.upcast(), + path, + Some(MacroSubNs::Bang), + ) + }, + |module| { + resolver + .module() + .def_map(self.db.upcast()) + .path_for_module(self.db.upcast(), module) + }, + ) + .ok()? + .value?; hir_expand::db::expand_speculative( self.db.upcast(), macro_call_id, diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index be0116862b9cf..fe1d5b5764d35 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -839,12 +839,25 @@ impl SourceAnalyzer { db: &dyn HirDatabase, macro_call: InFile<&ast::MacroCall>, ) -> Option { - let krate = self.resolver.krate(); // FIXME: This causes us to parse, generally this is the wrong approach for resolving a // macro call to a macro call id! - let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { - self.resolver.resolve_path_as_macro_def(db.upcast(), path, Some(MacroSubNs::Bang)) - })?; + let macro_call_id = macro_call + .as_call_id( + db.upcast(), + self.resolver.module(), + |path| { + self.resolver.resolve_path_as_macro_def( + db.upcast(), + path, + Some(MacroSubNs::Bang), + ) + }, + |module| { + self.resolver.module().def_map(db.upcast()).path_for_module(db.upcast(), module) + }, + ) + .ok()? + .value?; // why the 64? Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64) } diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs index f75b8fb7d0224..dbe851ec47454 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs @@ -24,9 +24,11 @@ pub(crate) fn goto_type_definition( let file: ast::SourceFile = sema.parse_guess_edition(file_id); let token: SyntaxToken = pick_best_token(file.syntax().token_at_offset(offset), |kind| match kind { - IDENT | INT_NUMBER | T![self] => 2, - kind if kind.is_trivia() => 0, - _ => 1, + IDENT | INT_NUMBER | T![self] => 3, + // operators + T!['('] | T!['['] | T!['{'] | T![')'] | T![']'] | T!['}'] | T![!] | T![?] => 2, + kind if !kind.is_trivia() => 1, + _ => 0, })?; let mut res = Vec::new(); diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 8b60e562d7b8d..fc59307806f61 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -8702,3 +8702,68 @@ fn foo() { "#]], ); } + +#[test] +fn module_path_macro() { + check( + r##" +//- minicore: module_path + +const C$0: &'static str = module_path!(); +"##, + expect![[r#" + *C* + + ```rust + test + ``` + + ```rust + const C: &'static str = "test" + ``` + "#]], + ); + check( + r##" +//- minicore: module_path + +mod foo { + const C$0: &'static str = module_path!(); +} +"##, + expect![[r#" + *C* + + ```rust + test::foo + ``` + + ```rust + const C: &'static str = "test::foo" + ``` + "#]], + ); + check( + r##" +//- minicore: module_path +mod baz { + const _: () = { + mod bar { + const C$0: &'static str = module_path!(); + } + } +} +"##, + expect![[r#" + *C* + + ```rust + test::bar + ``` + + ```rust + const C: &'static str = "test::baz::bar" + ``` + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs index 67d6932da962d..ce21e3cad5baf 100644 --- a/src/tools/rust-analyzer/crates/ide/src/status.rs +++ b/src/tools/rust-analyzer/crates/ide/src/status.rs @@ -69,6 +69,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { dependencies, origin, is_proc_macro, + .. } = &crate_graph[crate_id]; format_to!( buf, diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs index 2feca32ff8686..5d05ef3b79be9 100644 --- a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs +++ b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs @@ -177,6 +177,7 @@ define_symbols! { coroutine_state, coroutine, count, + crate_name, crate_type, CStr, debug_assertions, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs index 89fe712ced020..f2d21208d51f6 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs @@ -22,7 +22,7 @@ use crate::{ }; struct LsifManager<'a> { - count: i32, + id_counter: i32, token_map: FxHashMap, range_map: FxHashMap, file_map: FxHashMap, @@ -44,7 +44,7 @@ impl From for lsp_types::NumberOrString { impl LsifManager<'_> { fn new<'a>(analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs) -> LsifManager<'a> { LsifManager { - count: 0, + id_counter: 0, token_map: FxHashMap::default(), range_map: FxHashMap::default(), file_map: FxHashMap::default(), @@ -56,9 +56,9 @@ impl LsifManager<'_> { } fn add(&mut self, data: lsif::Element) -> Id { - let id = Id(self.count); + let id = Id(self.id_counter); self.emit(&serde_json::to_string(&lsif::Entry { id: id.into(), data }).unwrap()); - self.count += 1; + self.id_counter += 1; id } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index 34b20851dd6bb..91d6c9f5e2436 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -49,7 +49,6 @@ impl flags::Scip { let error_sink; (config, error_sink, _) = config.apply_change(change); - // FIXME @alibektas : What happens to errors without logging? error!(?error_sink, "Config Error(s)"); } let cargo_config = config.cargo(); @@ -116,7 +115,6 @@ impl flags::Scip { tokens.into_iter().for_each(|(text_range, id)| { let token = si.tokens.get(id).unwrap(); - let range = text_range_to_scip_range(&line_index, text_range); let symbol = tokens_to_symbol .entry(id) diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs index 2d615c34a3520..2e01d19908832 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs @@ -41,6 +41,7 @@ //! iterator: option //! iterators: iterator, fn //! manually_drop: drop +//! module_path: //! non_null: //! non_zero: //! option: panic @@ -1434,6 +1435,16 @@ mod panicking { #[macro_use] mod macros { + // region:module_path + #[macro_export] + #[rustc_builtin_macro] + macro_rules! module_path { + ($($arg:tt)*) => { + /* compiler built-in */ + }; + } + // endregion:module_path + // region:panic #[macro_export] #[rustc_builtin_macro(core_panic)] From db2e8e1ed0e0aa4f4c9e3f5712c4c9351a7bf2e2 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Thu, 22 Aug 2024 00:32:44 +0900 Subject: [PATCH 036/124] fix: Wrong `Sized` predicate for `generic_predicates_for_param` --- src/tools/rust-analyzer/crates/hir-ty/src/lower.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index 5824c59c5a575..58c34cbd9e164 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -1551,6 +1551,10 @@ pub(crate) fn generic_predicates_for_param_query( } }; if invalid_target { + // If this is filtered out without lowering, `?Sized` is not gathered into `ctx.unsized_types` + if let TypeBound::Path(_, TraitBoundModifier::Maybe) = &**bound { + ctx.lower_where_predicate(pred, &def, true).for_each(drop); + } return false; } From f84553641c7c20d5c62cddc2ddd11255280eb505 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Aug 2024 12:33:32 +0200 Subject: [PATCH 037/124] Remove DescendPreference::SameText --- src/tools/rust-analyzer/Cargo.toml | 2 + .../crates/hir-expand/src/lib.rs | 7 + .../rust-analyzer/crates/hir/src/semantics.rs | 353 +++++++++++------- .../extract_expressions_from_format_string.rs | 14 +- .../src/syntax_helpers/format_string_exprs.rs | 1 + .../rust-analyzer/crates/ide/src/doc_links.rs | 2 +- .../crates/ide/src/extend_selection.rs | 10 +- .../crates/ide/src/goto_implementation.rs | 93 ++--- .../rust-analyzer/crates/ide/src/hover.rs | 33 +- .../crates/ide/src/references.rs | 4 +- .../crates/ide/src/signature_help.rs | 7 +- .../crates/ide/src/syntax_highlighting.rs | 3 +- .../test_data/highlight_macros.html | 4 +- 13 files changed, 304 insertions(+), 229 deletions(-) diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index 56e80e11e774f..d8f9b2b7f6afb 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -184,6 +184,8 @@ style = { level = "warn", priority = -1 } suspicious = { level = "warn", priority = -1 } ## allow following lints +# subjective +single_match = "allow" # () makes a fine error in most cases result_unit_err = "allow" # We don't expose public APIs that matter like this diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index 2bea9026265a6..741c2b2ebfe23 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -279,6 +279,7 @@ pub enum MacroCallKind { } pub trait HirFileIdExt { + fn edition(self, db: &dyn ExpandDatabase) -> Edition; /// Returns the original file of this macro call hierarchy. fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId; @@ -293,6 +294,12 @@ pub trait HirFileIdExt { } impl HirFileIdExt for HirFileId { + fn edition(self, db: &dyn ExpandDatabase) -> Edition { + match self.repr() { + HirFileIdRepr::FileId(file_id) => file_id.edition(), + HirFileIdRepr::MacroFile(m) => m.macro_call_id.lookup(db).def.edition, + } + } fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId { let mut file_id = self; loop { diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index d086aee42855f..a8151e70a108e 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -4,6 +4,7 @@ mod source_to_def; use std::{ cell::RefCell, + convert::Infallible, fmt, iter, mem, ops::{self, ControlFlow, Not}, }; @@ -48,12 +49,46 @@ use crate::{ Variant, VariantDef, }; +const CONTINUE_NO_BREAKS: ControlFlow = ControlFlow::Continue(()); + pub enum DescendPreference { - SameText, SameKind, None, } +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum MacroInputKind { + Root, + Bang, + AttrInput, + AttrTarget, + Derive, + DeriveHelper, + // Include, +} +impl MacroInputKind { + pub fn is_tt(self) -> bool { + matches!( + self, + MacroInputKind::AttrInput + | MacroInputKind::Bang + | MacroInputKind::DeriveHelper + | MacroInputKind::Derive + ) + } +} + +impl ops::BitOr for MacroInputKind { + type Output = Self; + + fn bitor(self, rhs: Self) -> Self::Output { + match self { + Self::Root => rhs, + _ => self, + } + } +} + #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PathResolution { /// An item @@ -545,51 +580,50 @@ impl<'db> SemanticsImpl<'db> { ) } + /// Retrieves all the formatting parts of the format_args! template string. pub fn as_format_args_parts( &self, string: &ast::String, ) -> Option)>> { - if let Some(quote) = string.open_quote_text_range() { - return self - .descend_into_macros(DescendPreference::SameText, string.syntax().clone()) - .into_iter() - .find_map(|token| { - let string = ast::String::cast(token)?; - let literal = - string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; - let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; - let source_analyzer = self.analyze_no_infer(format_args.syntax())?; - let format_args = self.wrap_node_infile(format_args); - let res = source_analyzer - .as_format_args_parts(self.db, format_args.as_ref())? - .map(|(range, res)| (range + quote.end(), res)) - .collect(); - Some(res) - }); - } - None + let quote = string.open_quote_text_range()?; + self.descend_into_macros_ng_b(string.syntax().clone(), |kind, token| { + (|| { + let token = token.value; + let string = ast::String::cast(token)?; + let literal = + string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; + let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; + let source_analyzer = self.analyze_no_infer(format_args.syntax())?; + let format_args = self.wrap_node_infile(format_args); + let res = source_analyzer + .as_format_args_parts(self.db, format_args.as_ref())? + .map(|(range, res)| (range + quote.end(), res)) + .collect(); + Some(res) + })() + .map_or(ControlFlow::Continue(()), ControlFlow::Break) + }) } + /// Retrieves the formatting part of the format_args! template string at the given offset. pub fn check_for_format_args_template( &self, original_token: SyntaxToken, offset: TextSize, ) -> Option<(TextRange, Option)> { - if let Some(original_string) = ast::String::cast(original_token.clone()) { - if let Some(quote) = original_string.open_quote_text_range() { - return self - .descend_into_macros(DescendPreference::SameText, original_token) - .into_iter() - .find_map(|token| { - self.resolve_offset_in_format_args( - ast::String::cast(token)?, - offset.checked_sub(quote.end())?, - ) - }) - .map(|(range, res)| (range + quote.end(), res)); - } - } - None + let original_string = ast::String::cast(original_token.clone())?; + let quote = original_string.open_quote_text_range()?; + self.descend_into_macros_ng_b(original_token.clone(), |kind, token| { + (|| { + let token = token.value; + self.resolve_offset_in_format_args( + ast::String::cast(token)?, + offset.checked_sub(quote.end())?, + ) + .map(|(range, res)| (range + quote.end(), res)) + })() + .map_or(ControlFlow::Continue(()), ControlFlow::Break) + }) } fn resolve_offset_in_format_args( @@ -622,7 +656,7 @@ impl<'db> SemanticsImpl<'db> { if first == last { // node is just the token, so descend the token - self.descend_into_macros_impl(first, &mut |InFile { value, .. }| { + self.descend_into_macros_impl(first, &mut |_kind, InFile { value, .. }| { if let Some(node) = value .parent_ancestors() .take_while(|it| it.text_range() == value.text_range()) @@ -630,20 +664,20 @@ impl<'db> SemanticsImpl<'db> { { res.push(node) } - ControlFlow::Continue(()) + CONTINUE_NO_BREAKS }); } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; - self.descend_into_macros_impl(first, &mut |token| { + self.descend_into_macros_impl(first, &mut |_kind, token| { scratch.push(token); - ControlFlow::Continue(()) + CONTINUE_NO_BREAKS }); let mut scratch = scratch.into_iter(); self.descend_into_macros_impl( last, - &mut |InFile { value: last, file_id: last_fid }| { + &mut |_kind, InFile { value: last, file_id: last_fid }| { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if first_fid == last_fid { if let Some(p) = first.parent() { @@ -659,7 +693,7 @@ impl<'db> SemanticsImpl<'db> { } } } - ControlFlow::Continue(()) + CONTINUE_NO_BREAKS }, ); } @@ -673,8 +707,8 @@ impl<'db> SemanticsImpl<'db> { mode: DescendPreference, token: SyntaxToken, ) -> SmallVec<[SyntaxToken; 1]> { - enum Dp<'t> { - SameText(&'t str), + enum Dp { + // SameText(&'t str), SameKind(SyntaxKind), None, } @@ -686,104 +720,123 @@ impl<'db> SemanticsImpl<'db> { None => token.kind(), }; let mode = match mode { - DescendPreference::SameText => Dp::SameText(token.text()), + // DescendPreference::SameText => Dp::SameText(token.text()), DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)), DescendPreference::None => Dp::None, }; let mut res = smallvec![]; - self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { - let is_a_match = match mode { - Dp::SameText(text) => value.text() == text, - Dp::SameKind(preferred_kind) => { - let kind = fetch_kind(&value); - kind == preferred_kind + self.descend_into_macros_impl::( + token.clone(), + &mut |kind, InFile { value, .. }| { + let is_a_match = match mode { + // Dp::SameText(text) => value.text() == text, + Dp::SameKind(preferred_kind) => { + let kind = fetch_kind(&value); + kind == preferred_kind // special case for derive macros || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF) + } + Dp::None => true, + }; + if is_a_match { + res.push(value); } - Dp::None => true, - }; - if is_a_match { - res.push(value); - } - ControlFlow::Continue(()) - }); + ControlFlow::Continue(()) + }, + ); if res.is_empty() { res.push(token); } res } - pub fn descend_into_macros_single( + pub fn descend_into_macros_ng( &self, - mode: DescendPreference, token: SyntaxToken, - ) -> SyntaxToken { - enum Dp<'t> { - SameText(&'t str), - SameKind(SyntaxKind), - None, + mut cb: impl FnMut(MacroInputKind, InFile), + ) { + self.descend_into_macros_impl(token.clone(), &mut |kind, t| { + cb(kind, t); + CONTINUE_NO_BREAKS + }); + } + + pub fn descend_into_macros_ng_b( + &self, + token: SyntaxToken, + mut cb: impl FnMut(MacroInputKind, InFile) -> ControlFlow, + ) -> Option { + self.descend_into_macros_impl(token.clone(), &mut |kind, t| cb(kind, t)) + } + + /// Descends the token into expansions, returning the tokens that matches the input + /// token's [`SyntaxKind`] and text. + pub fn descend_into_macros_exact(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { + let mut r = smallvec![]; + let text = token.text(); + let kind = token.kind(); + + self.descend_into_macros_ng(token.clone(), |m_kind, InFile { value, file_id }| { + let mapped_kind = value.kind(); + let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); + let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); + if matches { + r.push(value); + } + }); + if r.is_empty() { + r.push(token); } - let fetch_kind = |token: &SyntaxToken| match token.parent() { - Some(node) => match node.kind() { - kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind, - _ => token.kind(), - }, - None => token.kind(), - }; - let mode = match mode { - DescendPreference::SameText => Dp::SameText(token.text()), - DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)), - DescendPreference::None => Dp::None, - }; - let mut res = token.clone(); - self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { - let is_a_match = match mode { - Dp::SameText(text) => value.text() == text, - Dp::SameKind(preferred_kind) => { - let kind = fetch_kind(&value); - kind == preferred_kind - // special case for derive macros - || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF) - } - Dp::None => true, - }; - res = value; - if is_a_match { - ControlFlow::Break(()) + r + } + + /// Descends the token into expansions, returning the first token that matches the input + /// token's [`SyntaxKind`] and text. + pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken { + let text = token.text(); + let kind = token.kind(); + + self.descend_into_macros_ng_b(token.clone(), |m_kind, InFile { value, file_id }| { + let mapped_kind = value.kind(); + let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); + let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); + if matches { + ControlFlow::Break(value) } else { ControlFlow::Continue(()) } - }); - res + }) + .unwrap_or(token) } - fn descend_into_macros_impl( + fn descend_into_macros_impl( &self, token: SyntaxToken, - f: &mut dyn FnMut(InFile) -> ControlFlow<()>, - ) { + f: &mut dyn FnMut(MacroInputKind, InFile) -> ControlFlow, + ) -> Option { let _p = tracing::info_span!("descend_into_macros_impl").entered(); let (sa, span, file_id) = - match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) { - Some(sa) => match sa.file_id.file_id() { - Some(file_id) => ( - sa, - self.db.real_span_map(file_id).span_for_range(token.text_range()), - file_id.into(), - ), - None => { - stdx::never!(); - return; - } - }, - None => return, - }; + token.parent().and_then(|parent| self.analyze_no_infer(&parent)).and_then(|sa| { + let file_id = sa.file_id.file_id()?; + Some(( + sa, + self.db.real_span_map(file_id).span_for_range(token.text_range()), + HirFileId::from(file_id), + )) + })?; let mut m_cache = self.macro_call_cache.borrow_mut(); let def_map = sa.resolver.def_map(); - let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])]; - let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { + // A stack of tokens to process, along with the file they came from + // These are tracked to know which macro calls we still have to look into + // the tokens themselves aren't that interesting as the span that is being used to map + // things down never changes. + let mut stack: Vec<(_, _, SmallVec<[_; 2]>)> = + vec![(file_id, MacroInputKind::Root, smallvec![token])]; + + // Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack + let process_expansion_for_token = |stack: &mut Vec<_>, macro_file, remap_kind| { let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| { Some( ctx.cache @@ -805,11 +858,17 @@ impl<'db> SemanticsImpl<'db> { // we have found a mapping for the token if the vec is non-empty let res = mapped_tokens.is_empty().not().then_some(()); // requeue the tokens we got from mapping our current token down - stack.push((HirFileId::from(file_id), mapped_tokens)); + stack.push((HirFileId::from(file_id), remap_kind, mapped_tokens)); res }; - while let Some((file_id, mut tokens)) = stack.pop() { + // Filters out all tokens that contain the given range (usually the macro call), any such + // token is redundant as the corresponding macro call has already been processed + let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| { + tokens.retain(|t: &mut SyntaxToken| !range.contains_range(t.text_range())) + }; + + while let Some((expansion, remap_kind, ref mut tokens)) = stack.pop() { while let Some(token) = tokens.pop() { let was_not_remapped = (|| { // First expand into attribute invocations @@ -817,7 +876,7 @@ impl<'db> SemanticsImpl<'db> { token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| { // Don't force populate the dyn cache for items that don't have an attribute anyways item.attrs().next()?; - Some((ctx.item_to_macro_call(InFile::new(file_id, &item))?, item)) + Some((ctx.item_to_macro_call(InFile::new(expansion, &item))?, item)) }) }); if let Some((call_id, item)) = containing_attribute_macro_call { @@ -849,10 +908,12 @@ impl<'db> SemanticsImpl<'db> { }) .unwrap_or_else(|| text_range.start()); let text_range = TextRange::new(start, text_range.end()); - // remove any other token in this macro input, all their mappings are the - // same as this one - tokens.retain(|t| !text_range.contains_range(t.text_range())); - return process_expansion_for_token(&mut stack, file_id); + filter_duplicates(tokens, text_range); + return process_expansion_for_token( + &mut stack, + file_id, + remap_kind | MacroInputKind::AttrTarget, + ); } // Then check for token trees, that means we are either in a function-like macro or @@ -862,6 +923,7 @@ impl<'db> SemanticsImpl<'db> { .map_while(Either::::cast) .last()?; match tt { + // function-like macro call Either::Left(tt) => { if tt.left_delimiter_token().map_or(false, |it| it == token) { return None; @@ -870,7 +932,7 @@ impl<'db> SemanticsImpl<'db> { return None; } let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?; - let mcall = InFile::new(file_id, macro_call); + let mcall = InFile::new(expansion, macro_call); let file_id = match m_cache.get(&mcall) { Some(&it) => it, None => { @@ -888,17 +950,25 @@ impl<'db> SemanticsImpl<'db> { } }; let text_range = tt.syntax().text_range(); - // remove any other token in this macro input, all their mappings are the - // same as this one - tokens.retain(|t| !text_range.contains_range(t.text_range())); - - process_expansion_for_token(&mut stack, file_id).or(file_id + filter_duplicates(tokens, text_range); + + process_expansion_for_token( + &mut stack, + file_id, + remap_kind | MacroInputKind::Bang, + ) + .or(file_id .eager_arg(self.db.upcast()) .and_then(|arg| { // also descend into eager expansions - process_expansion_for_token(&mut stack, arg.as_macro_file()) + process_expansion_for_token( + &mut stack, + arg.as_macro_file(), + remap_kind | MacroInputKind::Bang, + ) })) } + // derive or derive helper Either::Right(meta) => { // attribute we failed expansion for earlier, this might be a derive invocation // or derive helper attribute @@ -910,8 +980,8 @@ impl<'db> SemanticsImpl<'db> { // so try downmapping the token into the pseudo derive expansion // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works ctx.attr_to_derive_macro_call( - InFile::new(file_id, &adt), - InFile::new(file_id, attr.clone()), + InFile::new(expansion, &adt), + InFile::new(expansion, attr.clone()), ) .map(|(_, call_id, _)| call_id) }); @@ -927,7 +997,9 @@ impl<'db> SemanticsImpl<'db> { !text_range.contains_range(t.text_range()) }); return process_expansion_for_token( - &mut stack, file_id, + &mut stack, + file_id, + remap_kind | MacroInputKind::Derive, ); } None => Some(adt), @@ -945,31 +1017,33 @@ impl<'db> SemanticsImpl<'db> { ) } }?; - if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) { + if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(expansion, &adt))) { return None; } let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name(); - // Not an attribute, nor a derive, so it's either a builtin or a derive helper + // Not an attribute, nor a derive, so it's either an intert attribute or a derive helper // Try to resolve to a derive helper and downmap - let id = self.db.ast_id_map(file_id).ast_id(&adt); + let id = self.db.ast_id_map(expansion).ast_id(&adt); let helpers = - def_map.derive_helpers_in_scope(InFile::new(file_id, id))?; + def_map.derive_helpers_in_scope(InFile::new(expansion, id))?; if !helpers.is_empty() { let text_range = attr.syntax().text_range(); - // remove any other token in this macro input, all their mappings are the - // same as this - tokens.retain(|t| !text_range.contains_range(t.text_range())); + filter_duplicates(tokens, text_range); } let mut res = None; for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { + // as there may be multiple derives registering the same helper + // name, we gotta make sure to call this for all of them! + // FIXME: We need to call `f` for all of them as well though! res = res.or(process_expansion_for_token( &mut stack, derive.as_macro_file(), + remap_kind | MacroInputKind::DeriveHelper, )); } res @@ -978,11 +1052,14 @@ impl<'db> SemanticsImpl<'db> { })() .is_none(); - if was_not_remapped && f(InFile::new(file_id, token)).is_break() { - break; + if was_not_remapped { + if let ControlFlow::Break(b) = f(remap_kind, InFile::new(expansion, token)) { + return Some(b); + } } } } + None } // Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 9180d8dfcbb21..e4d347ef16bd6 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -1,11 +1,7 @@ use crate::{utils, AssistContext, Assists}; -use hir::DescendPreference; use ide_db::{ assists::{AssistId, AssistKind}, - syntax_helpers::{ - format_string::is_format_string, - format_string_exprs::{parse_format_exprs, Arg}, - }, + syntax_helpers::format_string_exprs::{parse_format_exprs, Arg}, }; use itertools::Itertools; use syntax::{ @@ -40,13 +36,7 @@ pub(crate) fn extract_expressions_from_format_string( let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; let tt_delimiter = tt.left_delimiter_token()?.kind(); - let expanded_t = ast::String::cast( - ctx.sema - .descend_into_macros_single(DescendPreference::SameKind, fmt_string.syntax().clone()), - )?; - if !is_format_string(&expanded_t) { - return None; - } + let _ = ctx.sema.as_format_args_parts(&fmt_string)?; let (new_fmt, extracted_args) = parse_format_exprs(fmt_string.text()).ok()?; if extracted_args.is_empty() { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs index 8ab5a6ede3bd5..c104aa571894d 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/format_string_exprs.rs @@ -31,6 +31,7 @@ pub fn with_placeholders(args: Vec) -> Vec { .collect() } +// FIXME Remove this, we have this information in the HIR now /// Parser for a format-like string. It is more allowing in terms of string contents, /// as we expect variable placeholders to be filled with expressions. /// diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs index cad0d06eeab18..1304b057021b4 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs @@ -144,7 +144,7 @@ pub(crate) fn external_docs( kind if kind.is_trivia() => 0, _ => 1, })?; - let token = sema.descend_into_macros_single(DescendPreference::None, token); + let token = sema.descend_into_macros_single_exact(token); let node = token.parent()?; let definition = match_ast! { diff --git a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs index 5f6aaeaabb60e..3d49082f2858d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs +++ b/src/tools/rust-analyzer/crates/ide/src/extend_selection.rs @@ -1,6 +1,6 @@ use std::iter::successors; -use hir::{DescendPreference, Semantics}; +use hir::Semantics; use ide_db::RootDatabase; use syntax::{ algo::{self, skip_trivia_token}, @@ -140,10 +140,8 @@ fn extend_tokens_from_range( // compute original mapped token range let extended = { - let fst_expanded = - sema.descend_into_macros_single(DescendPreference::None, first_token.clone()); - let lst_expanded = - sema.descend_into_macros_single(DescendPreference::None, last_token.clone()); + let fst_expanded = sema.descend_into_macros_single_exact(first_token.clone()); + let lst_expanded = sema.descend_into_macros_single_exact(last_token.clone()); let mut lca = algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?; lca = shallowest_node(&lca); @@ -157,7 +155,7 @@ fn extend_tokens_from_range( let validate = || { let extended = &extended; move |token: &SyntaxToken| -> bool { - let expanded = sema.descend_into_macros_single(DescendPreference::None, token.clone()); + let expanded = sema.descend_into_macros_single_exact(token.clone()); let parent = match expanded.parent() { Some(it) => it, None => return false, diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs index 2eff7796d548b..e36c8ee2f3f73 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_implementation.rs @@ -1,4 +1,4 @@ -use hir::{AsAssocItem, DescendPreference, Impl, Semantics}; +use hir::{AsAssocItem, Impl, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, @@ -10,7 +10,7 @@ use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav}; // Feature: Go to Implementation // -// Navigates to the impl blocks of types. +// Navigates to the impl items of types. // // |=== // | Editor | Shortcut @@ -32,48 +32,55 @@ pub(crate) fn goto_implementation( _ => 0, })?; let range = original_token.text_range(); - let navs = - sema.descend_into_macros_single(DescendPreference::SameText, original_token) - .parent() - .and_then(ast::NameLike::cast) - .and_then(|node| match &node { - ast::NameLike::Name(name) => { - NameClass::classify(&sema, name).and_then(|class| match class { - NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it), - NameClass::PatFieldShorthand { .. } => None, - }) - } - ast::NameLike::NameRef(name_ref) => NameRefClass::classify(&sema, name_ref) - .and_then(|class| match class { - NameRefClass::Definition(def) => Some(def), - NameRefClass::FieldShorthand { .. } - | NameRefClass::ExternCrateShorthand { .. } => None, - }), - ast::NameLike::Lifetime(_) => None, - }) - .and_then(|def| { - let navs = match def { - Definition::Trait(trait_) => impls_for_trait(&sema, trait_), - Definition::Adt(adt) => impls_for_ty(&sema, adt.ty(sema.db)), - Definition::TypeAlias(alias) => impls_for_ty(&sema, alias.ty(sema.db)), - Definition::BuiltinType(builtin) => impls_for_ty(&sema, builtin.ty(sema.db)), - Definition::Function(f) => { - let assoc = f.as_assoc_item(sema.db)?; - let name = assoc.name(sema.db)?; - let trait_ = assoc.container_or_implemented_trait(sema.db)?; - impls_for_trait_item(&sema, trait_, name) + let navs = sema + .descend_into_macros_exact(original_token) + .iter() + .filter_map(|token| { + token + .parent() + .and_then(ast::NameLike::cast) + .and_then(|node| match &node { + ast::NameLike::Name(name) => { + NameClass::classify(&sema, name).and_then(|class| match class { + NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it), + NameClass::PatFieldShorthand { .. } => None, + }) } - Definition::Const(c) => { - let assoc = c.as_assoc_item(sema.db)?; - let name = assoc.name(sema.db)?; - let trait_ = assoc.container_or_implemented_trait(sema.db)?; - impls_for_trait_item(&sema, trait_, name) - } - _ => return None, - }; - Some(navs) - }) - .unwrap_or_default(); + ast::NameLike::NameRef(name_ref) => NameRefClass::classify(&sema, name_ref) + .and_then(|class| match class { + NameRefClass::Definition(def) => Some(def), + NameRefClass::FieldShorthand { .. } + | NameRefClass::ExternCrateShorthand { .. } => None, + }), + ast::NameLike::Lifetime(_) => None, + }) + .and_then(|def| { + let navs = match def { + Definition::Trait(trait_) => impls_for_trait(&sema, trait_), + Definition::Adt(adt) => impls_for_ty(&sema, adt.ty(sema.db)), + Definition::TypeAlias(alias) => impls_for_ty(&sema, alias.ty(sema.db)), + Definition::BuiltinType(builtin) => { + impls_for_ty(&sema, builtin.ty(sema.db)) + } + Definition::Function(f) => { + let assoc = f.as_assoc_item(sema.db)?; + let name = assoc.name(sema.db)?; + let trait_ = assoc.container_or_implemented_trait(sema.db)?; + impls_for_trait_item(&sema, trait_, name) + } + Definition::Const(c) => { + let assoc = c.as_assoc_item(sema.db)?; + let name = assoc.name(sema.db)?; + let trait_ = assoc.container_or_implemented_trait(sema.db)?; + impls_for_trait_item(&sema, trait_, name) + } + _ => return None, + }; + Some(navs) + }) + }) + .flatten() + .collect(); Some(RangeInfo { range, info: navs }) } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index 6c646cf4ee1f2..b04f5c8411e63 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -6,7 +6,7 @@ mod tests; use std::{iter, ops::Not}; use either::Either; -use hir::{db::DefDatabase, DescendPreference, HasCrate, HasSource, LangItem, Semantics}; +use hir::{db::DefDatabase, HasCrate, HasSource, LangItem, Semantics}; use ide_db::{ defs::{Definition, IdentClass, NameRefClass, OperatorClass}, famous_defs::FamousDefs, @@ -178,29 +178,24 @@ fn hover_simple( return Some(RangeInfo::new(range, res)); } - let in_attr = original_token - .parent_ancestors() - .filter_map(ast::Item::cast) - .any(|item| sema.is_attr_macro_call(&item)) - && !matches!( - original_token.parent().and_then(ast::TokenTree::cast), - Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())) - ); - // prefer descending the same token kind in attribute expansions, in normal macros text // equivalency is more important - let descended = sema.descend_into_macros( - if in_attr { DescendPreference::SameKind } else { DescendPreference::SameText }, - original_token.clone(), - ); + let mut descended = vec![]; + sema.descend_into_macros_ng(original_token.clone(), |_, token| { + descended.push(token.value); + }); let descended = || descended.iter(); - let result = descended() + // FIXME: WE should not try these step by step, instead to accommodate for macros we should run + // all of these in "parallel" and rank their results + let result = None // try lint hover - .find_map(|token| { - // FIXME: Definition should include known lints and the like instead of having this special case here - let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; - render::try_for_lint(&attr, token) + .or_else(|| { + descended().find_map(|token| { + // FIXME: Definition should include known lints and the like instead of having this special case here + let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; + render::try_for_lint(&attr, token) + }) }) // try definitions .or_else(|| { diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index 3e3d5ef6e6558..55afcb59bae78 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -9,7 +9,7 @@ //! at the index that the match starts at and its tree parent is //! resolved to the search element definition, we get a reference. -use hir::{DescendPreference, PathResolution, Semantics}; +use hir::{PathResolution, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, search::{ReferenceCategory, SearchScope, UsageSearchResult}, @@ -149,7 +149,7 @@ pub(crate) fn find_defs<'a>( } Some( - sema.descend_into_macros(DescendPreference::SameText, token) + sema.descend_into_macros_exact(token) .into_iter() .filter_map(|it| ast::NameLike::cast(it.parent()?)) .filter_map(move |name_like| { diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs index 9d3b8c6ebd19b..516f64959cefa 100644 --- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs +++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs @@ -4,10 +4,7 @@ use std::collections::BTreeSet; use either::Either; -use hir::{ - AssocItem, DescendPreference, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, - Trait, -}; +use hir::{AssocItem, GenericParam, HirDisplay, ModuleDef, PathResolution, Semantics, Trait}; use ide_db::{ active_parameter::{callable_for_node, generic_def_for_node}, documentation::{Documentation, HasDocs}, @@ -82,7 +79,7 @@ pub(crate) fn signature_help( // if the cursor is sandwiched between two space tokens and the call is unclosed // this prevents us from leaving the CallExpression .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?; - let token = sema.descend_into_macros_single(DescendPreference::None, token); + let token = sema.descend_into_macros_single_exact(token); let edition = sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT); diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index e082fcdc76589..94df794ccbbd9 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -410,7 +410,8 @@ fn traverse( }) .unwrap() } else { - sema.descend_into_macros_single(DescendPreference::SameKind, token) + // FIXME: We should probably rank the tokens and find the most suitable? + sema.descend_into_macros_single_exact(token) }; match token.parent().and_then(ast::NameLike::cast) { // Remap the token into the wrapping single token nodes diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html index 17411fefbd978..d6b7012a1f996 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -58,7 +58,7 @@ } def_fn! { - fn bar() -> u32 { + fn bar() -> u32 { 100 } } @@ -100,7 +100,7 @@ }; } -include!(concat!("foo/", "foo.rs")); +include!(concat!("foo/", "foo.rs")); struct S<T>(T); fn main() { From d893dcc43fec3bb12ca0ca6b77d541a355296b1c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Aug 2024 12:39:53 +0200 Subject: [PATCH 038/124] Drop MacroInputKind --- .../rust-analyzer/crates/hir/src/semantics.rs | 96 +++++-------------- .../rust-analyzer/crates/ide/src/hover.rs | 2 +- 2 files changed, 24 insertions(+), 74 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index a8151e70a108e..6e9fc3996ccdb 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -56,39 +56,6 @@ pub enum DescendPreference { None, } -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum MacroInputKind { - Root, - Bang, - AttrInput, - AttrTarget, - Derive, - DeriveHelper, - // Include, -} -impl MacroInputKind { - pub fn is_tt(self) -> bool { - matches!( - self, - MacroInputKind::AttrInput - | MacroInputKind::Bang - | MacroInputKind::DeriveHelper - | MacroInputKind::Derive - ) - } -} - -impl ops::BitOr for MacroInputKind { - type Output = Self; - - fn bitor(self, rhs: Self) -> Self::Output { - match self { - Self::Root => rhs, - _ => self, - } - } -} - #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PathResolution { /// An item @@ -586,7 +553,7 @@ impl<'db> SemanticsImpl<'db> { string: &ast::String, ) -> Option)>> { let quote = string.open_quote_text_range()?; - self.descend_into_macros_ng_b(string.syntax().clone(), |kind, token| { + self.descend_into_macros_ng_b(string.syntax().clone(), |token| { (|| { let token = token.value; let string = ast::String::cast(token)?; @@ -613,7 +580,7 @@ impl<'db> SemanticsImpl<'db> { ) -> Option<(TextRange, Option)> { let original_string = ast::String::cast(original_token.clone())?; let quote = original_string.open_quote_text_range()?; - self.descend_into_macros_ng_b(original_token.clone(), |kind, token| { + self.descend_into_macros_ng_b(original_token.clone(), |token| { (|| { let token = token.value; self.resolve_offset_in_format_args( @@ -656,7 +623,7 @@ impl<'db> SemanticsImpl<'db> { if first == last { // node is just the token, so descend the token - self.descend_into_macros_impl(first, &mut |_kind, InFile { value, .. }| { + self.descend_into_macros_impl(first, &mut |InFile { value, .. }| { if let Some(node) = value .parent_ancestors() .take_while(|it| it.text_range() == value.text_range()) @@ -669,7 +636,7 @@ impl<'db> SemanticsImpl<'db> { } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; - self.descend_into_macros_impl(first, &mut |_kind, token| { + self.descend_into_macros_impl(first, &mut |token| { scratch.push(token); CONTINUE_NO_BREAKS }); @@ -677,7 +644,7 @@ impl<'db> SemanticsImpl<'db> { let mut scratch = scratch.into_iter(); self.descend_into_macros_impl( last, - &mut |_kind, InFile { value: last, file_id: last_fid }| { + &mut |InFile { value: last, file_id: last_fid }| { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if first_fid == last_fid { if let Some(p) = first.parent() { @@ -727,7 +694,7 @@ impl<'db> SemanticsImpl<'db> { let mut res = smallvec![]; self.descend_into_macros_impl::( token.clone(), - &mut |kind, InFile { value, .. }| { + &mut |InFile { value, .. }| { let is_a_match = match mode { // Dp::SameText(text) => value.text() == text, Dp::SameKind(preferred_kind) => { @@ -753,10 +720,10 @@ impl<'db> SemanticsImpl<'db> { pub fn descend_into_macros_ng( &self, token: SyntaxToken, - mut cb: impl FnMut(MacroInputKind, InFile), + mut cb: impl FnMut(InFile), ) { - self.descend_into_macros_impl(token.clone(), &mut |kind, t| { - cb(kind, t); + self.descend_into_macros_impl(token.clone(), &mut |t| { + cb(t); CONTINUE_NO_BREAKS }); } @@ -764,9 +731,9 @@ impl<'db> SemanticsImpl<'db> { pub fn descend_into_macros_ng_b( &self, token: SyntaxToken, - mut cb: impl FnMut(MacroInputKind, InFile) -> ControlFlow, + mut cb: impl FnMut(InFile) -> ControlFlow, ) -> Option { - self.descend_into_macros_impl(token.clone(), &mut |kind, t| cb(kind, t)) + self.descend_into_macros_impl(token.clone(), &mut |t| cb(t)) } /// Descends the token into expansions, returning the tokens that matches the input @@ -776,7 +743,7 @@ impl<'db> SemanticsImpl<'db> { let text = token.text(); let kind = token.kind(); - self.descend_into_macros_ng(token.clone(), |m_kind, InFile { value, file_id }| { + self.descend_into_macros_ng(token.clone(), |InFile { value, file_id: _ }| { let mapped_kind = value.kind(); let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); @@ -796,7 +763,7 @@ impl<'db> SemanticsImpl<'db> { let text = token.text(); let kind = token.kind(); - self.descend_into_macros_ng_b(token.clone(), |m_kind, InFile { value, file_id }| { + self.descend_into_macros_ng_b(token.clone(), |InFile { value, file_id: _ }| { let mapped_kind = value.kind(); let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); @@ -812,7 +779,7 @@ impl<'db> SemanticsImpl<'db> { fn descend_into_macros_impl( &self, token: SyntaxToken, - f: &mut dyn FnMut(MacroInputKind, InFile) -> ControlFlow, + f: &mut dyn FnMut(InFile) -> ControlFlow, ) -> Option { let _p = tracing::info_span!("descend_into_macros_impl").entered(); let (sa, span, file_id) = @@ -832,11 +799,10 @@ impl<'db> SemanticsImpl<'db> { // These are tracked to know which macro calls we still have to look into // the tokens themselves aren't that interesting as the span that is being used to map // things down never changes. - let mut stack: Vec<(_, _, SmallVec<[_; 2]>)> = - vec![(file_id, MacroInputKind::Root, smallvec![token])]; + let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])]; // Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack - let process_expansion_for_token = |stack: &mut Vec<_>, macro_file, remap_kind| { + let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| { Some( ctx.cache @@ -858,7 +824,7 @@ impl<'db> SemanticsImpl<'db> { // we have found a mapping for the token if the vec is non-empty let res = mapped_tokens.is_empty().not().then_some(()); // requeue the tokens we got from mapping our current token down - stack.push((HirFileId::from(file_id), remap_kind, mapped_tokens)); + stack.push((HirFileId::from(file_id), mapped_tokens)); res }; @@ -868,7 +834,7 @@ impl<'db> SemanticsImpl<'db> { tokens.retain(|t: &mut SyntaxToken| !range.contains_range(t.text_range())) }; - while let Some((expansion, remap_kind, ref mut tokens)) = stack.pop() { + while let Some((expansion, ref mut tokens)) = stack.pop() { while let Some(token) = tokens.pop() { let was_not_remapped = (|| { // First expand into attribute invocations @@ -909,11 +875,7 @@ impl<'db> SemanticsImpl<'db> { .unwrap_or_else(|| text_range.start()); let text_range = TextRange::new(start, text_range.end()); filter_duplicates(tokens, text_range); - return process_expansion_for_token( - &mut stack, - file_id, - remap_kind | MacroInputKind::AttrTarget, - ); + return process_expansion_for_token(&mut stack, file_id); } // Then check for token trees, that means we are either in a function-like macro or @@ -952,20 +914,11 @@ impl<'db> SemanticsImpl<'db> { let text_range = tt.syntax().text_range(); filter_duplicates(tokens, text_range); - process_expansion_for_token( - &mut stack, - file_id, - remap_kind | MacroInputKind::Bang, - ) - .or(file_id + process_expansion_for_token(&mut stack, file_id).or(file_id .eager_arg(self.db.upcast()) .and_then(|arg| { // also descend into eager expansions - process_expansion_for_token( - &mut stack, - arg.as_macro_file(), - remap_kind | MacroInputKind::Bang, - ) + process_expansion_for_token(&mut stack, arg.as_macro_file()) })) } // derive or derive helper @@ -997,9 +950,7 @@ impl<'db> SemanticsImpl<'db> { !text_range.contains_range(t.text_range()) }); return process_expansion_for_token( - &mut stack, - file_id, - remap_kind | MacroInputKind::Derive, + &mut stack, file_id, ); } None => Some(adt), @@ -1043,7 +994,6 @@ impl<'db> SemanticsImpl<'db> { res = res.or(process_expansion_for_token( &mut stack, derive.as_macro_file(), - remap_kind | MacroInputKind::DeriveHelper, )); } res @@ -1053,7 +1003,7 @@ impl<'db> SemanticsImpl<'db> { .is_none(); if was_not_remapped { - if let ControlFlow::Break(b) = f(remap_kind, InFile::new(expansion, token)) { + if let ControlFlow::Break(b) = f(InFile::new(expansion, token)) { return Some(b); } } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index b04f5c8411e63..5443b505756a5 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -181,7 +181,7 @@ fn hover_simple( // prefer descending the same token kind in attribute expansions, in normal macros text // equivalency is more important let mut descended = vec![]; - sema.descend_into_macros_ng(original_token.clone(), |_, token| { + sema.descend_into_macros_ng(original_token.clone(), |token| { descended.push(token.value); }); let descended = || descended.iter(); From aeb9c7b279860c8e6c61fadb0de30b509e3a59de Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Thu, 22 Aug 2024 09:16:00 -0400 Subject: [PATCH 039/124] Implement floating point casts in const eval --- .../crates/hir-ty/src/consteval/tests.rs | 11 +++ .../crates/hir-ty/src/mir/eval.rs | 94 ++++++++++++++++++- 2 files changed, 102 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs index a0399a06f2369..86228250c2005 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs @@ -247,6 +247,17 @@ fn casts() { check_number(r#"const GOAL: i32 = -12i8 as i32"#, -12); } +#[test] +fn floating_point_casts() { + check_number(r#"const GOAL: usize = 12i32 as f32 as usize"#, 12); + check_number(r#"const GOAL: i8 = -12i32 as f64 as i8"#, -12); + check_number(r#"const GOAL: i32 = (-1ui8 as f32 + 2u64 as f32) as i32"#, 1); + check_number(r#"const GOAL: i8 = (0./0.) as i8"#, 0); + check_number(r#"const GOAL: i8 = (1./0.) as i8"#, 127); + check_number(r#"const GOAL: i8 = (-1./0.) as i8"#, -128); + check_number(r#"const GOAL: i64 = 1e18f64 as f32 as i64"#, 999999984306749440); +} + #[test] fn raw_pointer_equality() { check_number( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index 590cb3952ef32..1bb0c1886857a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -1518,9 +1518,97 @@ impl Evaluator<'_> { self.size_of_sized(target_ty, locals, "destination of int to int cast")?; Owned(current[0..dest_size].to_vec()) } - CastKind::FloatToInt => not_supported!("float to int cast"), - CastKind::FloatToFloat => not_supported!("float to float cast"), - CastKind::IntToFloat => not_supported!("float to int cast"), + CastKind::FloatToInt => { + let ty = self.operand_ty(operand, locals)?; + let TyKind::Scalar(chalk_ir::Scalar::Float(ty)) = ty.kind(Interner) else { + not_supported!("invalid float to int cast"); + }; + let value = self.eval_operand(operand, locals)?.get(self)?; + let value = match ty { + chalk_ir::FloatTy::F32 => { + let value = value.try_into().unwrap(); + f32::from_le_bytes(value) as f64 + } + chalk_ir::FloatTy::F64 => { + let value = value.try_into().unwrap(); + f64::from_le_bytes(value) + } + chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => { + not_supported!("unstable floating point type f16 and f128"); + } + }; + let is_signed = matches!( + target_ty.kind(Interner), + TyKind::Scalar(chalk_ir::Scalar::Int(_)) + ); + let dest_size = + self.size_of_sized(target_ty, locals, "destination of float to int cast")?; + let dest_bits = dest_size * 8; + let (max, min) = if dest_bits == 128 { + (i128::MAX, i128::MIN) + } else if is_signed { + let max = 1i128 << (dest_bits - 1); + (max - 1, -max) + } else { + (1i128 << dest_bits, 0) + }; + let value = (value as i128).min(max).max(min); + let result = value.to_le_bytes(); + Owned(result[0..dest_size].to_vec()) + } + CastKind::FloatToFloat => { + let ty = self.operand_ty(operand, locals)?; + let TyKind::Scalar(chalk_ir::Scalar::Float(ty)) = ty.kind(Interner) else { + not_supported!("invalid float to int cast"); + }; + let value = self.eval_operand(operand, locals)?.get(self)?; + let value = match ty { + chalk_ir::FloatTy::F32 => { + let value = value.try_into().unwrap(); + f32::from_le_bytes(value) as f64 + } + chalk_ir::FloatTy::F64 => { + let value = value.try_into().unwrap(); + f64::from_le_bytes(value) + } + chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => { + not_supported!("unstable floating point type f16 and f128"); + } + }; + let TyKind::Scalar(chalk_ir::Scalar::Float(target_ty)) = + target_ty.kind(Interner) + else { + not_supported!("invalid float to float cast"); + }; + match target_ty { + chalk_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()), + chalk_ir::FloatTy::F64 => Owned((value as f64).to_le_bytes().to_vec()), + chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => { + not_supported!("unstable floating point type f16 and f128"); + } + } + } + CastKind::IntToFloat => { + let current_ty = self.operand_ty(operand, locals)?; + let is_signed = matches!( + current_ty.kind(Interner), + TyKind::Scalar(chalk_ir::Scalar::Int(_)) + ); + let value = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed); + let value = i128::from_le_bytes(value); + let TyKind::Scalar(chalk_ir::Scalar::Float(target_ty)) = + target_ty.kind(Interner) + else { + not_supported!("invalid int to float cast"); + }; + match target_ty { + chalk_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()), + chalk_ir::FloatTy::F64 => Owned((value as f64).to_le_bytes().to_vec()), + chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => { + not_supported!("unstable floating point type f16 and f128"); + } + } + } CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"), }, }) From 1179cbb258483dbccec6b0a221a8c9be88999de7 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Aug 2024 15:57:19 +0200 Subject: [PATCH 040/124] Remove DescendPreference::SameKind --- .../rust-analyzer/crates/hir/src/semantics.rs | 21 +----- .../rust-analyzer/crates/ide/src/hover.rs | 2 +- .../crates/ide/src/syntax_highlighting.rs | 65 ++++++++++++++----- .../test_data/highlight_macros.html | 2 +- 4 files changed, 53 insertions(+), 37 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index 6e9fc3996ccdb..d96f8777e0f1d 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -52,7 +52,6 @@ use crate::{ const CONTINUE_NO_BREAKS: ControlFlow = ControlFlow::Continue(()); pub enum DescendPreference { - SameKind, None, } @@ -675,20 +674,9 @@ impl<'db> SemanticsImpl<'db> { token: SyntaxToken, ) -> SmallVec<[SyntaxToken; 1]> { enum Dp { - // SameText(&'t str), - SameKind(SyntaxKind), None, } - let fetch_kind = |token: &SyntaxToken| match token.parent() { - Some(node) => match node.kind() { - kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind, - _ => token.kind(), - }, - None => token.kind(), - }; let mode = match mode { - // DescendPreference::SameText => Dp::SameText(token.text()), - DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)), DescendPreference::None => Dp::None, }; let mut res = smallvec![]; @@ -696,13 +684,6 @@ impl<'db> SemanticsImpl<'db> { token.clone(), &mut |InFile { value, .. }| { let is_a_match = match mode { - // Dp::SameText(text) => value.text() == text, - Dp::SameKind(preferred_kind) => { - let kind = fetch_kind(&value); - kind == preferred_kind - // special case for derive macros - || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF) - } Dp::None => true, }; if is_a_match { @@ -733,7 +714,7 @@ impl<'db> SemanticsImpl<'db> { token: SyntaxToken, mut cb: impl FnMut(InFile) -> ControlFlow, ) -> Option { - self.descend_into_macros_impl(token.clone(), &mut |t| cb(t)) + self.descend_into_macros_impl(token.clone(), &mut cb) } /// Descends the token into expansions, returning the tokens that matches the input diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index 5443b505756a5..77c56b3b3ac77 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -186,7 +186,7 @@ fn hover_simple( }); let descended = || descended.iter(); - // FIXME: WE should not try these step by step, instead to accommodate for macros we should run + // TODO: WE should not try these step by step, instead to accommodate for macros we should run // all of these in "parallel" and rank their results let result = None // try lint hover diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index 94df794ccbbd9..a4dc91b2fcc10 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -13,7 +13,9 @@ mod html; #[cfg(test)] mod tests; -use hir::{DescendPreference, Name, Semantics}; +use std::ops::ControlFlow; + +use hir::{Name, Semantics}; use ide_db::{FxHashMap, RootDatabase, SymbolKind}; use span::EditionedFileId; use syntax::{ @@ -399,20 +401,53 @@ fn traverse( // Attempt to descend tokens into macro-calls. let res = match element { NodeOrToken::Token(token) if token.kind() != COMMENT => { - let token = if token.kind() == STRING { - // for strings, try to prefer a string that has not been lost in a token - // tree - // FIXME: This should be done for everything, but check perf first - sema.descend_into_macros(DescendPreference::SameKind, token) - .into_iter() - .max_by_key(|it| { - it.parent().map_or(false, |it| it.kind() != TOKEN_TREE) - }) - .unwrap() - } else { - // FIXME: We should probably rank the tokens and find the most suitable? - sema.descend_into_macros_single_exact(token) - }; + let kind = token.kind(); + let text = token.text(); + let ident_kind = kind.is_any_identifier(); + + let mut t = None; + let mut r = 0; + // FIXME: Add an extra API that takes the file id of this. That is a simple way + // to prevent us constantly walking up the tree to fetch the file + sema.descend_into_macros_ng_b(token.clone(), |tok| { + let tok = tok.value; + let tok_kind = tok.kind(); + + let exact_same_kind = tok_kind == kind; + let both_idents = + exact_same_kind || (tok_kind.is_any_identifier() && ident_kind); + let same_text = tok.text() == text; + // anything that mapped into a token tree has likely no semantic information + let no_tt_parent = tok.parent().map_or(false, |it| it.kind() != TOKEN_TREE); + let my_rank = (both_idents as usize) + | ((exact_same_kind as usize) << 1) + | ((same_text as usize) << 2) + | ((no_tt_parent as usize) << 3); + + if my_rank > 0b1110 { + // a rank of 0b1110 means that we have found a maximally interesting + // token so stop early. + t = Some(tok); + return ControlFlow::Break(()); + } + + // r = r.max(my_rank); + // t = Some(t.take_if(|_| r < my_rank).unwrap_or(tok)); + match &mut t { + Some(prev) if r < my_rank => { + *prev = tok; + r = my_rank; + } + Some(_) => (), + None => { + r = my_rank; + t = Some(tok) + } + } + ControlFlow::Continue(()) + }); + + let token = t.unwrap_or(token); match token.parent().and_then(ast::NameLike::cast) { // Remap the token into the wrapping single token nodes Some(parent) => match (token.kind(), parent.syntax().kind()) { diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html index d6b7012a1f996..196552020ab66 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -58,7 +58,7 @@ } def_fn! { - fn bar() -> u32 { + fn bar() -> u32 { 100 } } From 8e82e44677ac08ddcb40e188034177ec745120db Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Aug 2024 16:18:01 +0200 Subject: [PATCH 041/124] Fully remove old macro descension API --- src/tools/rust-analyzer/crates/hir/src/lib.rs | 3 +- .../rust-analyzer/crates/hir/src/semantics.rs | 54 ++++++------------ .../src/handlers/extract_function.rs | 6 +- .../crates/ide-db/src/helpers.rs | 5 +- .../rust-analyzer/crates/ide-db/src/search.rs | 8 +-- .../crates/ide/src/call_hierarchy.rs | 4 +- .../rust-analyzer/crates/ide/src/doc_links.rs | 7 +-- .../crates/ide/src/expand_macro.rs | 55 ++++++++----------- .../crates/ide/src/goto_declaration.rs | 4 +- .../crates/ide/src/goto_definition.rs | 11 ++-- .../crates/ide/src/goto_type_definition.rs | 4 +- .../crates/ide/src/highlight_related.rs | 4 +- .../rust-analyzer/crates/ide/src/moniker.rs | 4 +- 13 files changed, 66 insertions(+), 103 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 57f810c7c79c7..6e26af55af827 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -93,8 +93,7 @@ pub use crate::{ diagnostics::*, has_source::HasSource, semantics::{ - DescendPreference, PathResolution, Semantics, SemanticsImpl, SemanticsScope, TypeInfo, - VisibleTraits, + PathResolution, Semantics, SemanticsImpl, SemanticsScope, TypeInfo, VisibleTraits, }, }; pub use hir_ty::method_resolution::TyFingerprint; diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index d96f8777e0f1d..f9346af6255b6 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -51,10 +51,6 @@ use crate::{ const CONTINUE_NO_BREAKS: ControlFlow = ControlFlow::Continue(()); -pub enum DescendPreference { - None, -} - #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PathResolution { /// An item @@ -183,6 +179,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, /// descend it and find again + // FIXME: Rethink this API pub fn find_node_at_offset_with_descend( &self, node: &SyntaxNode, @@ -191,8 +188,9 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast) } - /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, + /// Find an AstNode by offset inside SyntaxNode, if it is inside an attribte macro call, /// descend it and find again + // FIXME: Rethink this API pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>( &'slf self, node: &SyntaxNode, @@ -666,38 +664,6 @@ impl<'db> SemanticsImpl<'db> { res } - /// Descend the token into its macro call if it is part of one, returning the tokens in the - /// expansion that it is associated with. - pub fn descend_into_macros( - &self, - mode: DescendPreference, - token: SyntaxToken, - ) -> SmallVec<[SyntaxToken; 1]> { - enum Dp { - None, - } - let mode = match mode { - DescendPreference::None => Dp::None, - }; - let mut res = smallvec![]; - self.descend_into_macros_impl::( - token.clone(), - &mut |InFile { value, .. }| { - let is_a_match = match mode { - Dp::None => true, - }; - if is_a_match { - res.push(value); - } - ControlFlow::Continue(()) - }, - ); - if res.is_empty() { - res.push(token); - } - res - } - pub fn descend_into_macros_ng( &self, token: SyntaxToken, @@ -709,6 +675,18 @@ impl<'db> SemanticsImpl<'db> { }); } + pub fn descend_into_macros_ng_v(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { + let mut res = smallvec![]; + self.descend_into_macros_impl(token.clone(), &mut |t| { + res.push(t.value); + CONTINUE_NO_BREAKS + }); + if res.is_empty() { + res.push(token); + } + res + } + pub fn descend_into_macros_ng_b( &self, token: SyntaxToken, @@ -1003,7 +981,7 @@ impl<'db> SemanticsImpl<'db> { offset: TextSize, ) -> impl Iterator + '_> + '_ { node.token_at_offset(offset) - .map(move |token| self.descend_into_macros(DescendPreference::None, token)) + .map(move |token| self.descend_into_macros_exact(token)) .map(|descendants| { descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it)) }) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs index cfcb09b9f7d9f..fd379cb88daaf 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs @@ -3,8 +3,8 @@ use std::{iter, ops::RangeInclusive}; use ast::make; use either::Either; use hir::{ - DescendPreference, HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, - PathResolution, Semantics, TypeInfo, TypeParam, + HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, PathResolution, Semantics, + TypeInfo, TypeParam, }; use ide_db::{ defs::{Definition, NameRefClass}, @@ -834,7 +834,7 @@ impl FunctionBody { .descendants_with_tokens() .filter_map(SyntaxElement::into_token) .filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self])) - .flat_map(|t| sema.descend_into_macros(DescendPreference::None, t)) + .flat_map(|t| sema.descend_into_macros_exact(t)) .for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast))); } } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs index 63c09af3d687b..84fa58d743bbc 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs @@ -3,7 +3,7 @@ use std::collections::VecDeque; use base_db::SourceRootDatabase; -use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics}; +use hir::{Crate, ItemInNs, ModuleDef, Name, Semantics}; use span::{Edition, FileId}; use syntax::{ ast::{self, make}, @@ -112,11 +112,12 @@ pub fn is_editable_crate(krate: Crate, db: &RootDatabase) -> bool { !db.source_root(source_root_id).is_library } +// FIXME: This is a weird function pub fn get_definition( sema: &Semantics<'_, RootDatabase>, token: SyntaxToken, ) -> Option { - for token in sema.descend_into_macros(DescendPreference::None, token) { + for token in sema.descend_into_macros_exact(token) { let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); if let Some(&[x]) = def.as_deref() { return Some(x); diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 7319b7ac02da0..197c327ee4edf 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -9,8 +9,8 @@ use std::mem; use base_db::{salsa::Database, SourceDatabase, SourceRootDatabase}; use hir::{ - sym, AsAssocItem, DefWithBody, DescendPreference, FileRange, HasAttrs, HasSource, HirFileIdExt, - InFile, InRealFile, ModuleSource, PathResolution, Semantics, Visibility, + sym, AsAssocItem, DefWithBody, FileRange, HasAttrs, HasSource, HirFileIdExt, InFile, + InRealFile, ModuleSource, PathResolution, Semantics, Visibility, }; use memchr::memmem::Finder; use parser::SyntaxKind; @@ -549,9 +549,7 @@ impl<'a> FindUsages<'a> { // every textual hit. That function is notoriously // expensive even for things that do not get down mapped // into macros. - sema.descend_into_macros(DescendPreference::None, token) - .into_iter() - .filter_map(|it| it.parent()) + sema.descend_into_macros_exact(token).into_iter().filter_map(|it| it.parent()) }) }; diff --git a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs index 87093104852f3..155259a138040 100644 --- a/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs +++ b/src/tools/rust-analyzer/crates/ide/src/call_hierarchy.rs @@ -2,7 +2,7 @@ use std::iter; -use hir::{DescendPreference, Semantics}; +use hir::Semantics; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, helpers::pick_best_token, @@ -86,7 +86,7 @@ pub(crate) fn outgoing_calls( })?; let mut calls = CallLocations::default(); - sema.descend_into_macros(DescendPreference::None, token) + sema.descend_into_macros_exact(token) .into_iter() .filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast)) .filter_map(|item| match item { diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs index 1304b057021b4..1afc7e3c61830 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs @@ -10,10 +10,7 @@ use pulldown_cmark_to_cmark::{cmark_resume_with_options, Options as CMarkOptions use stdx::format_to; use url::Url; -use hir::{ - db::HirDatabase, sym, Adt, AsAssocItem, AssocItem, AssocItemContainer, DescendPreference, - HasAttrs, -}; +use hir::{db::HirDatabase, sym, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs}; use ide_db::{ base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase}, defs::{Definition, NameClass, NameRefClass}, @@ -289,7 +286,7 @@ impl DocCommentToken { let original_start = doc_token.text_range().start(); let relative_comment_offset = offset - original_start - prefix_len; - sema.descend_into_macros(DescendPreference::None, doc_token).into_iter().find_map(|t| { + sema.descend_into_macros_ng_v(doc_token).into_iter().find_map(|t| { let (node, descended_prefix_len) = match_ast! { match t { ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?), diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs index 9788c01544232..a939ed214ad85 100644 --- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs +++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs @@ -1,4 +1,4 @@ -use hir::{DescendPreference, InFile, MacroFileIdExt, Semantics}; +use hir::{InFile, MacroFileIdExt, Semantics}; use ide_db::{ helpers::pick_best_token, syntax_helpers::insert_whitespace_into_node::insert_ws_into, FileId, RootDatabase, @@ -41,37 +41,30 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< // struct Bar; // ``` - let derive = sema - .descend_into_macros(DescendPreference::None, tok.clone()) - .into_iter() - .find_map(|descended| { - let macro_file = sema.hir_file_for(&descended.parent()?).macro_file()?; - if !macro_file.is_derive_attr_pseudo_expansion(db) { - return None; - } + let derive = sema.descend_into_macros_exact(tok.clone()).into_iter().find_map(|descended| { + let macro_file = sema.hir_file_for(&descended.parent()?).macro_file()?; + if !macro_file.is_derive_attr_pseudo_expansion(db) { + return None; + } - let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string(); - // up map out of the #[derive] expansion - let InFile { file_id, value: tokens } = - hir::InMacroFile::new(macro_file, descended).upmap_once(db); - let token = sema.parse_or_expand(file_id).covering_element(tokens[0]).into_token()?; - let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; - let expansions = sema.expand_derive_macro(&attr)?; - let idx = attr - .token_tree()? - .token_trees_and_tokens() - .filter_map(NodeOrToken::into_token) - .take_while(|it| it != &token) - .filter(|it| it.kind() == T![,]) - .count(); - let expansion = format( - db, - SyntaxKind::MACRO_ITEMS, - position.file_id, - expansions.get(idx).cloned()?, - ); - Some(ExpandedMacro { name, expansion }) - }); + let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string(); + // up map out of the #[derive] expansion + let InFile { file_id, value: tokens } = + hir::InMacroFile::new(macro_file, descended).upmap_once(db); + let token = sema.parse_or_expand(file_id).covering_element(tokens[0]).into_token()?; + let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; + let expansions = sema.expand_derive_macro(&attr)?; + let idx = attr + .token_tree()? + .token_trees_and_tokens() + .filter_map(NodeOrToken::into_token) + .take_while(|it| it != &token) + .filter(|it| it.kind() == T![,]) + .count(); + let expansion = + format(db, SyntaxKind::MACRO_ITEMS, position.file_id, expansions.get(idx).cloned()?); + Some(ExpandedMacro { name, expansion }) + }); if derive.is_some() { return derive; diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs index 6076de54ebaf8..dd71bcf050eed 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs @@ -1,4 +1,4 @@ -use hir::{AsAssocItem, DescendPreference, Semantics}; +use hir::{AsAssocItem, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, RootDatabase, @@ -29,7 +29,7 @@ pub(crate) fn goto_declaration( .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?; let range = original_token.text_range(); let info: Vec = sema - .descend_into_macros(DescendPreference::None, original_token) + .descend_into_macros_ng_v(original_token) .iter() .filter_map(|token| { let parent = token.parent()?; diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index 6a6fbcb9a6b71..9fe1488e36826 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -5,10 +5,7 @@ use crate::{ navigation_target::{self, ToNav}, FilePosition, NavigationTarget, RangeInfo, TryToNav, UpmappingResult, }; -use hir::{ - AsAssocItem, AssocItem, DescendPreference, FileRange, InFile, MacroFileIdExt, ModuleDef, - Semantics, -}; +use hir::{AsAssocItem, AssocItem, FileRange, InFile, MacroFileIdExt, ModuleDef, Semantics}; use ide_db::{ base_db::{AnchoredPath, FileLoader, SourceDatabase}, defs::{Definition, IdentClass}, @@ -86,7 +83,7 @@ pub(crate) fn goto_definition( } let navs = sema - .descend_into_macros(DescendPreference::None, original_token.clone()) + .descend_into_macros_ng_v(original_token.clone()) .into_iter() .filter_map(|token| { let parent = token.parent()?; @@ -251,7 +248,7 @@ pub(crate) fn find_fn_or_blocks( None }; - sema.descend_into_macros(DescendPreference::None, token.clone()) + sema.descend_into_macros_ng_v(token.clone()) .into_iter() .filter_map(find_ancestors) .collect_vec() @@ -369,7 +366,7 @@ pub(crate) fn find_loops( None }; - sema.descend_into_macros(DescendPreference::None, token.clone()) + sema.descend_into_macros_ng_v(token.clone()) .into_iter() .filter_map(find_ancestors) .collect_vec() diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs index f75b8fb7d0224..da2be78732059 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs @@ -1,4 +1,4 @@ -use hir::{DescendPreference, GenericParam}; +use hir::GenericParam; use ide_db::{base_db::Upcast, defs::Definition, helpers::pick_best_token, RootDatabase}; use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, T}; @@ -69,7 +69,7 @@ pub(crate) fn goto_type_definition( } let range = token.text_range(); - sema.descend_into_macros(DescendPreference::None, token) + sema.descend_into_macros_ng_v(token) .into_iter() .filter_map(|token| { let ty = sema diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs index a88261df1063d..5348e855be4b0 100644 --- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs +++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs @@ -1,6 +1,6 @@ use std::iter; -use hir::{db, DescendPreference, FilePosition, FileRange, HirFileId, InFile, Semantics}; +use hir::{db, FilePosition, FileRange, HirFileId, InFile, Semantics}; use ide_db::{ defs::{Definition, IdentClass}, helpers::pick_best_token, @@ -542,7 +542,7 @@ fn cover_range(r0: Option, r1: Option) -> Option, token: SyntaxToken) -> FxHashSet { - sema.descend_into_macros(DescendPreference::None, token) + sema.descend_into_macros_exact(token) .into_iter() .filter_map(|token| IdentClass::classify_token(sema, &token)) .flat_map(IdentClass::definitions_no_ops) diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs index 7b5fd651e3d79..4be1b570981f6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs +++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs @@ -3,7 +3,7 @@ use core::fmt; -use hir::{Adt, AsAssocItem, AssocItemContainer, Crate, DescendPreference, MacroKind, Semantics}; +use hir::{Adt, AsAssocItem, AssocItemContainer, Crate, MacroKind, Semantics}; use ide_db::{ base_db::{CrateOrigin, LangCrateOrigin}, defs::{Definition, IdentClass}, @@ -154,7 +154,7 @@ pub(crate) fn moniker( }); } let navs = sema - .descend_into_macros(DescendPreference::None, original_token.clone()) + .descend_into_macros_exact(original_token.clone()) .into_iter() .filter_map(|token| { IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| { From e698ca1246d546532dfc966a228c54918989c1b2 Mon Sep 17 00:00:00 2001 From: sun-jacobi Date: Thu, 22 Aug 2024 16:20:18 +0200 Subject: [PATCH 042/124] fix a misleading comment in TB tests --- src/tools/miri/tests/pass/tree_borrows/tree-borrows.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/miri/tests/pass/tree_borrows/tree-borrows.rs b/src/tools/miri/tests/pass/tree_borrows/tree-borrows.rs index adad18c1af368..c741e4de6d5b1 100644 --- a/src/tools/miri/tests/pass/tree_borrows/tree-borrows.rs +++ b/src/tools/miri/tests/pass/tree_borrows/tree-borrows.rs @@ -321,7 +321,7 @@ fn not_unpin_not_protected() { pub struct NotUnpin(#[allow(dead_code)] i32, PhantomPinned); fn inner(x: &mut NotUnpin, f: fn(&mut NotUnpin)) { - // `f` may mutate, but it may not deallocate! + // `f` is allowed to deallocate `x`. f(x) } From 4d614444b950967db2f1d9b18fc144117df4e318 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Aug 2024 16:18:56 +0200 Subject: [PATCH 043/124] Rename macro descension functions --- .../rust-analyzer/crates/hir/src/semantics.rs | 16 ++++++++-------- .../rust-analyzer/crates/ide/src/doc_links.rs | 2 +- .../crates/ide/src/goto_declaration.rs | 2 +- .../crates/ide/src/goto_definition.rs | 9 +++------ .../crates/ide/src/goto_type_definition.rs | 2 +- src/tools/rust-analyzer/crates/ide/src/hover.rs | 2 +- .../crates/ide/src/syntax_highlighting.rs | 2 +- 7 files changed, 16 insertions(+), 19 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index f9346af6255b6..ec7a89c64320a 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -188,7 +188,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast) } - /// Find an AstNode by offset inside SyntaxNode, if it is inside an attribte macro call, + /// Find an AstNode by offset inside SyntaxNode, if it is inside an attribute macro call, /// descend it and find again // FIXME: Rethink this API pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>( @@ -550,7 +550,7 @@ impl<'db> SemanticsImpl<'db> { string: &ast::String, ) -> Option)>> { let quote = string.open_quote_text_range()?; - self.descend_into_macros_ng_b(string.syntax().clone(), |token| { + self.descend_into_macros_breakable(string.syntax().clone(), |token| { (|| { let token = token.value; let string = ast::String::cast(token)?; @@ -577,7 +577,7 @@ impl<'db> SemanticsImpl<'db> { ) -> Option<(TextRange, Option)> { let original_string = ast::String::cast(original_token.clone())?; let quote = original_string.open_quote_text_range()?; - self.descend_into_macros_ng_b(original_token.clone(), |token| { + self.descend_into_macros_breakable(original_token.clone(), |token| { (|| { let token = token.value; self.resolve_offset_in_format_args( @@ -664,7 +664,7 @@ impl<'db> SemanticsImpl<'db> { res } - pub fn descend_into_macros_ng( + pub fn descend_into_macros_cb( &self, token: SyntaxToken, mut cb: impl FnMut(InFile), @@ -675,7 +675,7 @@ impl<'db> SemanticsImpl<'db> { }); } - pub fn descend_into_macros_ng_v(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { + pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { let mut res = smallvec![]; self.descend_into_macros_impl(token.clone(), &mut |t| { res.push(t.value); @@ -687,7 +687,7 @@ impl<'db> SemanticsImpl<'db> { res } - pub fn descend_into_macros_ng_b( + pub fn descend_into_macros_breakable( &self, token: SyntaxToken, mut cb: impl FnMut(InFile) -> ControlFlow, @@ -702,7 +702,7 @@ impl<'db> SemanticsImpl<'db> { let text = token.text(); let kind = token.kind(); - self.descend_into_macros_ng(token.clone(), |InFile { value, file_id: _ }| { + self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }| { let mapped_kind = value.kind(); let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); @@ -722,7 +722,7 @@ impl<'db> SemanticsImpl<'db> { let text = token.text(); let kind = token.kind(); - self.descend_into_macros_ng_b(token.clone(), |InFile { value, file_id: _ }| { + self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| { let mapped_kind = value.kind(); let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs index 1afc7e3c61830..925ae620231d2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs @@ -286,7 +286,7 @@ impl DocCommentToken { let original_start = doc_token.text_range().start(); let relative_comment_offset = offset - original_start - prefix_len; - sema.descend_into_macros_ng_v(doc_token).into_iter().find_map(|t| { + sema.descend_into_macros(doc_token).into_iter().find_map(|t| { let (node, descended_prefix_len) = match_ast! { match t { ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?), diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs index dd71bcf050eed..6ae9dde84be5e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs @@ -29,7 +29,7 @@ pub(crate) fn goto_declaration( .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?; let range = original_token.text_range(); let info: Vec = sema - .descend_into_macros_ng_v(original_token) + .descend_into_macros(original_token) .iter() .filter_map(|token| { let parent = token.parent()?; diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index 9fe1488e36826..5769f2cabc750 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -83,7 +83,7 @@ pub(crate) fn goto_definition( } let navs = sema - .descend_into_macros_ng_v(original_token.clone()) + .descend_into_macros(original_token.clone()) .into_iter() .filter_map(|token| { let parent = token.parent()?; @@ -248,10 +248,7 @@ pub(crate) fn find_fn_or_blocks( None }; - sema.descend_into_macros_ng_v(token.clone()) - .into_iter() - .filter_map(find_ancestors) - .collect_vec() + sema.descend_into_macros(token.clone()).into_iter().filter_map(find_ancestors).collect_vec() } fn nav_for_exit_points( @@ -366,7 +363,7 @@ pub(crate) fn find_loops( None }; - sema.descend_into_macros_ng_v(token.clone()) + sema.descend_into_macros(token.clone()) .into_iter() .filter_map(find_ancestors) .collect_vec() diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs index da2be78732059..ca04b7bb5a9b3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs @@ -69,7 +69,7 @@ pub(crate) fn goto_type_definition( } let range = token.text_range(); - sema.descend_into_macros_ng_v(token) + sema.descend_into_macros(token) .into_iter() .filter_map(|token| { let ty = sema diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index 77c56b3b3ac77..d76d9afc18586 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -181,7 +181,7 @@ fn hover_simple( // prefer descending the same token kind in attribute expansions, in normal macros text // equivalency is more important let mut descended = vec![]; - sema.descend_into_macros_ng(original_token.clone(), |token| { + sema.descend_into_macros_cb(original_token.clone(), |token| { descended.push(token.value); }); let descended = || descended.iter(); diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index a4dc91b2fcc10..333f97c274397 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -409,7 +409,7 @@ fn traverse( let mut r = 0; // FIXME: Add an extra API that takes the file id of this. That is a simple way // to prevent us constantly walking up the tree to fetch the file - sema.descend_into_macros_ng_b(token.clone(), |tok| { + sema.descend_into_macros_breakable(token.clone(), |tok| { let tok = tok.value; let tok_kind = tok.kind(); From d79999aaa00679bfc581d30858d703b5b959137c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Aug 2024 16:45:37 +0200 Subject: [PATCH 044/124] Thread file id through descension API for semantic highlighting --- .../crates/hir-expand/src/files.rs | 9 ++ .../rust-analyzer/crates/hir/src/semantics.rs | 107 +++++++++++------- .../crates/ide/src/syntax_highlighting.rs | 76 +++++++------ 3 files changed, 117 insertions(+), 75 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs index 20f484f672afb..d41f69812ee61 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs @@ -461,3 +461,12 @@ impl InFile { Some(InRealFile::new(file_id, value)) } } + +impl InFile { + pub fn into_real_file(self) -> Result, InFile> { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }), + HirFileIdRepr::MacroFile(_) => Err(self), + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index ec7a89c64320a..3d6c985043089 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -550,7 +550,9 @@ impl<'db> SemanticsImpl<'db> { string: &ast::String, ) -> Option)>> { let quote = string.open_quote_text_range()?; - self.descend_into_macros_breakable(string.syntax().clone(), |token| { + + let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?; + self.descend_into_macros_breakable(token, |token| { (|| { let token = token.value; let string = ast::String::cast(token)?; @@ -576,8 +578,9 @@ impl<'db> SemanticsImpl<'db> { offset: TextSize, ) -> Option<(TextRange, Option)> { let original_string = ast::String::cast(original_token.clone())?; + let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?; let quote = original_string.open_quote_text_range()?; - self.descend_into_macros_breakable(original_token.clone(), |token| { + self.descend_into_macros_breakable(original_token, |token| { (|| { let token = token.value; self.resolve_offset_in_format_args( @@ -617,30 +620,37 @@ impl<'db> SemanticsImpl<'db> { Some(it) => it, None => return res, }; + let file = self.find_file(node.syntax()); + let Some(file_id) = file.file_id.file_id() else { + return res; + }; if first == last { // node is just the token, so descend the token - self.descend_into_macros_impl(first, &mut |InFile { value, .. }| { - if let Some(node) = value - .parent_ancestors() - .take_while(|it| it.text_range() == value.text_range()) - .find_map(N::cast) - { - res.push(node) - } - CONTINUE_NO_BREAKS - }); + self.descend_into_macros_impl( + InRealFile::new(file_id, first), + &mut |InFile { value, .. }| { + if let Some(node) = value + .parent_ancestors() + .take_while(|it| it.text_range() == value.text_range()) + .find_map(N::cast) + { + res.push(node) + } + CONTINUE_NO_BREAKS + }, + ); } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; - self.descend_into_macros_impl(first, &mut |token| { + self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token| { scratch.push(token); CONTINUE_NO_BREAKS }); let mut scratch = scratch.into_iter(); self.descend_into_macros_impl( - last, + InRealFile::new(file_id, last), &mut |InFile { value: last, file_id: last_fid }| { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if first_fid == last_fid { @@ -669,18 +679,22 @@ impl<'db> SemanticsImpl<'db> { token: SyntaxToken, mut cb: impl FnMut(InFile), ) { - self.descend_into_macros_impl(token.clone(), &mut |t| { - cb(t); - CONTINUE_NO_BREAKS - }); + if let Ok(token) = self.wrap_token_infile(token).into_real_file() { + self.descend_into_macros_impl(token, &mut |t| { + cb(t); + CONTINUE_NO_BREAKS + }); + } } pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { let mut res = smallvec![]; - self.descend_into_macros_impl(token.clone(), &mut |t| { - res.push(t.value); - CONTINUE_NO_BREAKS - }); + if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() { + self.descend_into_macros_impl(token, &mut |t| { + res.push(t.value); + CONTINUE_NO_BREAKS + }); + } if res.is_empty() { res.push(token); } @@ -689,7 +703,7 @@ impl<'db> SemanticsImpl<'db> { pub fn descend_into_macros_breakable( &self, - token: SyntaxToken, + token: InRealFile, mut cb: impl FnMut(InFile) -> ControlFlow, ) -> Option { self.descend_into_macros_impl(token.clone(), &mut cb) @@ -721,28 +735,36 @@ impl<'db> SemanticsImpl<'db> { pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken { let text = token.text(); let kind = token.kind(); - - self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| { - let mapped_kind = value.kind(); - let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); - let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); - if matches { - ControlFlow::Break(value) - } else { - ControlFlow::Continue(()) - } - }) + if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() { + self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| { + let mapped_kind = value.kind(); + let any_ident_match = + || kind.is_any_identifier() && value.kind().is_any_identifier(); + let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); + if matches { + ControlFlow::Break(value) + } else { + ControlFlow::Continue(()) + } + }) + } else { + None + } .unwrap_or(token) } fn descend_into_macros_impl( &self, - token: SyntaxToken, + InRealFile { value: token, file_id }: InRealFile, f: &mut dyn FnMut(InFile) -> ControlFlow, ) -> Option { let _p = tracing::info_span!("descend_into_macros_impl").entered(); - let (sa, span, file_id) = - token.parent().and_then(|parent| self.analyze_no_infer(&parent)).and_then(|sa| { + let (sa, span, file_id) = token + .parent() + .and_then(|parent| { + self.analyze_impl(InRealFile::new(file_id, &parent).into(), None, false) + }) + .and_then(|sa| { let file_id = sa.file_id.file_id()?; Some(( sa, @@ -1400,11 +1422,13 @@ impl<'db> SemanticsImpl<'db> { /// Returns none if the file of the node is not part of a crate. fn analyze(&self, node: &SyntaxNode) -> Option { + let node = self.find_file(node); self.analyze_impl(node, None, true) } /// Returns none if the file of the node is not part of a crate. fn analyze_no_infer(&self, node: &SyntaxNode) -> Option { + let node = self.find_file(node); self.analyze_impl(node, None, false) } @@ -1413,17 +1437,17 @@ impl<'db> SemanticsImpl<'db> { node: &SyntaxNode, offset: TextSize, ) -> Option { + let node = self.find_file(node); self.analyze_impl(node, Some(offset), false) } fn analyze_impl( &self, - node: &SyntaxNode, + node: InFile<&SyntaxNode>, offset: Option, infer_body: bool, ) -> Option { let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered(); - let node = self.find_file(node); let container = self.with_ctx(|ctx| ctx.find_container(node))?; @@ -1468,6 +1492,11 @@ impl<'db> SemanticsImpl<'db> { InFile::new(file_id, node) } + fn wrap_token_infile(&self, token: SyntaxToken) -> InFile { + let InFile { file_id, .. } = self.find_file(&token.parent().unwrap()); + InFile::new(file_id, token) + } + /// Wraps the node in a [`InFile`] with the file id it belongs to. fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> { let root_node = find_root(node); diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index 333f97c274397..bfab5ceb12914 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -15,7 +15,7 @@ mod tests; use std::ops::ControlFlow; -use hir::{Name, Semantics}; +use hir::{InRealFile, Name, Semantics}; use ide_db::{FxHashMap, RootDatabase, SymbolKind}; use span::EditionedFileId; use syntax::{ @@ -409,43 +409,47 @@ fn traverse( let mut r = 0; // FIXME: Add an extra API that takes the file id of this. That is a simple way // to prevent us constantly walking up the tree to fetch the file - sema.descend_into_macros_breakable(token.clone(), |tok| { - let tok = tok.value; - let tok_kind = tok.kind(); - - let exact_same_kind = tok_kind == kind; - let both_idents = - exact_same_kind || (tok_kind.is_any_identifier() && ident_kind); - let same_text = tok.text() == text; - // anything that mapped into a token tree has likely no semantic information - let no_tt_parent = tok.parent().map_or(false, |it| it.kind() != TOKEN_TREE); - let my_rank = (both_idents as usize) - | ((exact_same_kind as usize) << 1) - | ((same_text as usize) << 2) - | ((no_tt_parent as usize) << 3); - - if my_rank > 0b1110 { - // a rank of 0b1110 means that we have found a maximally interesting - // token so stop early. - t = Some(tok); - return ControlFlow::Break(()); - } - - // r = r.max(my_rank); - // t = Some(t.take_if(|_| r < my_rank).unwrap_or(tok)); - match &mut t { - Some(prev) if r < my_rank => { - *prev = tok; - r = my_rank; + sema.descend_into_macros_breakable( + InRealFile::new(file_id, token.clone()), + |tok| { + let tok = tok.value; + let tok_kind = tok.kind(); + + let exact_same_kind = tok_kind == kind; + let both_idents = + exact_same_kind || (tok_kind.is_any_identifier() && ident_kind); + let same_text = tok.text() == text; + // anything that mapped into a token tree has likely no semantic information + let no_tt_parent = + tok.parent().map_or(false, |it| it.kind() != TOKEN_TREE); + let my_rank = (both_idents as usize) + | ((exact_same_kind as usize) << 1) + | ((same_text as usize) << 2) + | ((no_tt_parent as usize) << 3); + + if my_rank > 0b1110 { + // a rank of 0b1110 means that we have found a maximally interesting + // token so stop early. + t = Some(tok); + return ControlFlow::Break(()); } - Some(_) => (), - None => { - r = my_rank; - t = Some(tok) + + // r = r.max(my_rank); + // t = Some(t.take_if(|_| r < my_rank).unwrap_or(tok)); + match &mut t { + Some(prev) if r < my_rank => { + *prev = tok; + r = my_rank; + } + Some(_) => (), + None => { + r = my_rank; + t = Some(tok) + } } - } - ControlFlow::Continue(()) - }); + ControlFlow::Continue(()) + }, + ); let token = t.unwrap_or(token); match token.parent().and_then(ast::NameLike::cast) { From b0e7ef4031cc6215cfb9045512d44ebba3ae82fd Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Aug 2024 17:01:51 +0200 Subject: [PATCH 045/124] Sort hover results by relevance --- .../rust-analyzer/crates/ide/src/hover.rs | 22 +++++++++++++++---- .../crates/ide/src/syntax_highlighting.rs | 2 -- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index d76d9afc18586..edf14a6f4bf06 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -180,11 +180,25 @@ fn hover_simple( // prefer descending the same token kind in attribute expansions, in normal macros text // equivalency is more important - let mut descended = vec![]; - sema.descend_into_macros_cb(original_token.clone(), |token| { - descended.push(token.value); + let mut descended = sema.descend_into_macros(original_token.clone()); + + let kind = original_token.kind(); + let text = original_token.text(); + let ident_kind = kind.is_any_identifier(); + + descended.sort_by_key(|tok| { + let tok_kind = tok.kind(); + + let exact_same_kind = tok_kind == kind; + let both_idents = exact_same_kind || (tok_kind.is_any_identifier() && ident_kind); + let same_text = tok.text() == text; + // anything that mapped into a token tree has likely no semantic information + let no_tt_parent = tok.parent().map_or(false, |it| it.kind() != TOKEN_TREE); + (both_idents as usize) + | ((exact_same_kind as usize) << 1) + | ((same_text as usize) << 2) + | ((no_tt_parent as usize) << 3) }); - let descended = || descended.iter(); // TODO: WE should not try these step by step, instead to accommodate for macros we should run // all of these in "parallel" and rank their results diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index bfab5ceb12914..927fdaa178ca2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -407,8 +407,6 @@ fn traverse( let mut t = None; let mut r = 0; - // FIXME: Add an extra API that takes the file id of this. That is a simple way - // to prevent us constantly walking up the tree to fetch the file sema.descend_into_macros_breakable( InRealFile::new(file_id, token.clone()), |tok| { From 3a423fc6b74dda43d771c9e77439c3220870671a Mon Sep 17 00:00:00 2001 From: Johannes Hostert Date: Thu, 22 Aug 2024 16:15:57 +0200 Subject: [PATCH 046/124] Avoid extra copy by using `retain_mut` and moving the deletion into the closure --- .../src/borrow_tracker/tree_borrows/tree.rs | 45 ++++++++++--------- 1 file changed, 24 insertions(+), 21 deletions(-) diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs index 56643c6cbe811..d51e1109ed4e2 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs @@ -754,29 +754,32 @@ impl Tree { // list of remaining children back in. let mut children_of_node = mem::take(&mut self.nodes.get_mut(*tag).unwrap().children); - // Remove all useless children, and save them for later. - // The closure needs `&self` and the loop below needs `&mut self`, so we need to `collect` - // in to a temporary list. - let to_remove: Vec<_> = - children_of_node.drain_filter(|x| self.is_useless(*x, live)).collect(); + // Remove all useless children. + children_of_node.retain_mut(|idx| { + if self.is_useless(*idx, live) { + // Note: In the rest of this comment, "this node" refers to `idx`. + // This node has no more children (if there were any, they have already been removed). + // It is also unreachable as determined by the GC, so we can remove it everywhere. + // Due to the API of UniMap we must make sure to call + // `UniValMap::remove` for the key of this node on *all* maps that used it + // (which are `self.nodes` and every range of `self.rperms`) + // before we can safely apply `UniKeyMap::remove` to truly remove + // this tag from the `tag_mapping`. + let node = self.nodes.remove(*idx).unwrap(); + for (_perms_range, perms) in self.rperms.iter_mut_all() { + perms.remove(*idx); + } + self.tag_mapping.remove(&node.tag); + // now delete it + false + } else { + // do nothing, but retain + true + } + }); // Put back the now-filtered vector. self.nodes.get_mut(*tag).unwrap().children = children_of_node; - // Now, all that is left is unregistering the children saved in `to_remove`. - for idx in to_remove { - // Note: In the rest of this comment, "this node" refers to `idx`. - // This node has no more children (if there were any, they have already been removed). - // It is also unreachable as determined by the GC, so we can remove it everywhere. - // Due to the API of UniMap we must make sure to call - // `UniValMap::remove` for the key of this node on *all* maps that used it - // (which are `self.nodes` and every range of `self.rperms`) - // before we can safely apply `UniKeyMap::remove` to truly remove - // this tag from the `tag_mapping`. - let node = self.nodes.remove(idx).unwrap(); - for (_perms_range, perms) in self.rperms.iter_mut_all() { - perms.remove(idx); - } - self.tag_mapping.remove(&node.tag); - } + // We are done, the parent can continue. stack.pop(); continue; From f9db48f8c93207bcca411345a5785fa37d847ad4 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Aug 2024 17:29:32 +0200 Subject: [PATCH 047/124] Consider interleaving hover kinds --- .../rust-analyzer/crates/ide/src/hover.rs | 177 ++++++++++-------- .../rust-analyzer/crates/ide/src/markup.rs | 2 +- 2 files changed, 95 insertions(+), 84 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index edf14a6f4bf06..dde98d40dc0e1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -58,7 +58,7 @@ pub enum HoverDocFormat { PlainText, } -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum HoverAction { Runnable(Runnable), Implementation(FilePosition), @@ -97,7 +97,7 @@ pub struct HoverGotoTypeData { } /// Contains the results when hovering over an item -#[derive(Debug, Default)] +#[derive(Clone, Debug, Default, Hash, PartialEq, Eq)] pub struct HoverResult { pub markup: Markup, pub actions: Vec, @@ -200,21 +200,24 @@ fn hover_simple( | ((no_tt_parent as usize) << 3) }); - // TODO: WE should not try these step by step, instead to accommodate for macros we should run - // all of these in "parallel" and rank their results - let result = None - // try lint hover - .or_else(|| { - descended().find_map(|token| { - // FIXME: Definition should include known lints and the like instead of having this special case here - let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; - render::try_for_lint(&attr, token) - }) - }) - // try definitions - .or_else(|| { - descended() - .filter_map(|token| { + let mut res = vec![]; + // let mut merge_result = |next: HoverResult| { + // res.markup = Markup::from(format!("{}\n---\n{}", res.markup, next.markup)); + // res.actions.extend(next.actions); + // }; + for token in descended { + let lint_hover = (|| { + // FIXME: Definition should include known lints and the like instead of having this special case here + let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; + render::try_for_lint(&attr, &token) + })(); + if let Some(lint_hover) = lint_hover { + res.push(lint_hover); + continue; + } + let definitions = (|| { + Some( + 'a: { let node = token.parent()?; // special case macro calls, we wanna render the invoked arm index @@ -229,11 +232,11 @@ fn hover_simple( .and_then(ast::MacroCall::cast) { if let Some(macro_) = sema.resolve_macro_call(¯o_call) { - return Some(vec![( + break 'a vec![( Definition::Macro(macro_), sema.resolve_macro_call_arm(¯o_call), node, - )]); + )]; } } } @@ -242,88 +245,96 @@ fn hover_simple( match IdentClass::classify_node(sema, &node)? { // It's better for us to fall back to the keyword hover here, // rendering poll is very confusing - IdentClass::Operator(OperatorClass::Await(_)) => None, + IdentClass::Operator(OperatorClass::Await(_)) => return None, IdentClass::NameRefClass(NameRefClass::ExternCrateShorthand { decl, .. - }) => Some(vec![(Definition::ExternCrateDecl(decl), None, node)]), + }) => { + vec![(Definition::ExternCrateDecl(decl), None, node)] + } - class => Some( + class => { multizip((class.definitions(), iter::repeat(None), iter::repeat(node))) - .collect::>(), - ), + .collect::>() + } } - }) - .flatten() + } + .into_iter() .unique_by(|&(def, _, _)| def) .map(|(def, macro_arm, node)| { hover_for_definition(sema, file_id, def, &node, macro_arm, config, edition) }) - .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| { - acc.actions.extend(actions); - acc.markup = Markup::from(format!("{}\n---\n{markup}", acc.markup)); - acc - }) - }) - // try keywords - .or_else(|| descended().find_map(|token| render::keyword(sema, config, token, edition))) - // try _ hovers - .or_else(|| descended().find_map(|token| render::underscore(sema, config, token, edition))) - // try rest pattern hover - .or_else(|| { - descended().find_map(|token| { - if token.kind() != DOT2 { - return None; - } + .collect::>(), + ) + })(); + if let Some(definitions) = definitions { + res.extend(definitions); + continue; + } + let keywords = || render::keyword(sema, config, &token, edition); + let underscore = || render::underscore(sema, config, &token, edition); + let rest_pat = || { + if token.kind() != DOT2 { + return None; + } - let rest_pat = token.parent().and_then(ast::RestPat::cast)?; - let record_pat_field_list = - rest_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast)?; + let rest_pat = token.parent().and_then(ast::RestPat::cast)?; + let record_pat_field_list = + rest_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast)?; - let record_pat = - record_pat_field_list.syntax().parent().and_then(ast::RecordPat::cast)?; + let record_pat = + record_pat_field_list.syntax().parent().and_then(ast::RecordPat::cast)?; - Some(render::struct_rest_pat(sema, config, &record_pat, edition)) - }) - }) - // try () call hovers - .or_else(|| { - descended().find_map(|token| { - if token.kind() != T!['('] && token.kind() != T![')'] { - return None; - } - let arg_list = token.parent().and_then(ast::ArgList::cast)?.syntax().parent()?; - let call_expr = syntax::match_ast! { - match arg_list { - ast::CallExpr(expr) => expr.into(), - ast::MethodCallExpr(expr) => expr.into(), - _ => return None, - } - }; - render::type_info_of(sema, config, &Either::Left(call_expr), edition) - }) - }) - // try closure - .or_else(|| { - descended().find_map(|token| { - if token.kind() != T![|] { - return None; + Some(render::struct_rest_pat(sema, config, &record_pat, edition)) + }; + let call = || { + if token.kind() != T!['('] && token.kind() != T![')'] { + return None; + } + let arg_list = token.parent().and_then(ast::ArgList::cast)?.syntax().parent()?; + let call_expr = syntax::match_ast! { + match arg_list { + ast::CallExpr(expr) => expr.into(), + ast::MethodCallExpr(expr) => expr.into(), + _ => return None, } - let c = token.parent().and_then(|x| x.parent()).and_then(ast::ClosureExpr::cast)?; - render::closure_expr(sema, config, c, edition) - }) - }) - // tokens - .or_else(|| { + }; + render::type_info_of(sema, config, &Either::Left(call_expr), edition) + }; + let closure = || { + if token.kind() != T![|] { + return None; + } + let c = token.parent().and_then(|x| x.parent()).and_then(ast::ClosureExpr::cast)?; + render::closure_expr(sema, config, c, edition) + }; + let literal = || { render::literal(sema, original_token.clone(), edition) .map(|markup| HoverResult { markup, actions: vec![] }) - }); + }; + if let Some(result) = keywords() + .or_else(underscore) + .or_else(rest_pat) + .or_else(call) + .or_else(closure) + .or_else(literal) + { + res.push(result) + } + } - result.map(|mut res: HoverResult| { - res.actions = dedupe_or_merge_hover_actions(res.actions); - RangeInfo::new(original_token.text_range(), res) - }) + res.into_iter() + .unique() + .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| { + acc.actions.extend(actions); + acc.markup = Markup::from(format!("{}\n---\n{markup}", acc.markup)); + acc + }) + .map(|mut res: HoverResult| { + res.actions = dedupe_or_merge_hover_actions(res.actions); + RangeInfo::new(original_token.text_range(), res) + }) } fn hover_ranged( diff --git a/src/tools/rust-analyzer/crates/ide/src/markup.rs b/src/tools/rust-analyzer/crates/ide/src/markup.rs index 4a4e29fa33b8b..750d12542605c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/markup.rs +++ b/src/tools/rust-analyzer/crates/ide/src/markup.rs @@ -5,7 +5,7 @@ //! what is used by LSP, so let's keep it simple. use std::fmt; -#[derive(Default, Debug)] +#[derive(Clone, Default, Debug, Hash, PartialEq, Eq)] pub struct Markup { text: String, } From 71c7ceaf529d4cd241ab247e62c87a859728bb99 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Aug 2024 18:08:36 +0200 Subject: [PATCH 048/124] Fix sorting order for tokens in hover --- .../rust-analyzer/crates/ide/src/hover.rs | 28 ++++++++++--------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs index dde98d40dc0e1..124db2985bf00 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs @@ -119,7 +119,7 @@ pub(crate) fn hover( let edition = sema.attach_first_edition(file_id).map(|it| it.edition()).unwrap_or(Edition::CURRENT); let mut res = if range.is_empty() { - hover_simple(sema, FilePosition { file_id, offset: range.start() }, file, config, edition) + hover_offset(sema, FilePosition { file_id, offset: range.start() }, file, config, edition) } else { hover_ranged(sema, frange, file, config, edition) }?; @@ -131,7 +131,7 @@ pub(crate) fn hover( } #[allow(clippy::field_reassign_with_default)] -fn hover_simple( +fn hover_offset( sema: &Semantics<'_, RootDatabase>, FilePosition { file_id, offset }: FilePosition, file: SyntaxNode, @@ -186,7 +186,7 @@ fn hover_simple( let text = original_token.text(); let ident_kind = kind.is_any_identifier(); - descended.sort_by_key(|tok| { + descended.sort_by_cached_key(|tok| { let tok_kind = tok.kind(); let exact_same_kind = tok_kind == kind; @@ -194,18 +194,15 @@ fn hover_simple( let same_text = tok.text() == text; // anything that mapped into a token tree has likely no semantic information let no_tt_parent = tok.parent().map_or(false, |it| it.kind() != TOKEN_TREE); - (both_idents as usize) + !((both_idents as usize) | ((exact_same_kind as usize) << 1) | ((same_text as usize) << 2) - | ((no_tt_parent as usize) << 3) + | ((no_tt_parent as usize) << 3)) }); let mut res = vec![]; - // let mut merge_result = |next: HoverResult| { - // res.markup = Markup::from(format!("{}\n---\n{}", res.markup, next.markup)); - // res.actions.extend(next.actions); - // }; for token in descended { + let is_same_kind = token.kind() == kind; let lint_hover = (|| { // FIXME: Definition should include known lints and the like instead of having this special case here let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; @@ -273,9 +270,14 @@ fn hover_simple( continue; } let keywords = || render::keyword(sema, config, &token, edition); - let underscore = || render::underscore(sema, config, &token, edition); + let underscore = || { + if !is_same_kind { + return None; + } + render::underscore(sema, config, &token, edition) + }; let rest_pat = || { - if token.kind() != DOT2 { + if !is_same_kind || token.kind() != DOT2 { return None; } @@ -289,7 +291,7 @@ fn hover_simple( Some(render::struct_rest_pat(sema, config, &record_pat, edition)) }; let call = || { - if token.kind() != T!['('] && token.kind() != T![')'] { + if !is_same_kind || token.kind() != T!['('] && token.kind() != T![')'] { return None; } let arg_list = token.parent().and_then(ast::ArgList::cast)?.syntax().parent()?; @@ -303,7 +305,7 @@ fn hover_simple( render::type_info_of(sema, config, &Either::Left(call_expr), edition) }; let closure = || { - if token.kind() != T![|] { + if !is_same_kind || token.kind() != T![|] { return None; } let c = token.parent().and_then(|x| x.parent()).and_then(ast::ClosureExpr::cast)?; From 0a277110b3ca68b3a48f432b97f697f5cd50ce0c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Aug 2024 18:46:23 +0200 Subject: [PATCH 049/124] Improve proc-macro panic message and workspace loading failure diagnostic --- src/tools/rust-analyzer/Cargo.toml | 3 ++- src/tools/rust-analyzer/crates/hir-expand/src/lib.rs | 2 +- src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index d8f9b2b7f6afb..aa7bd2dc5fe8b 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -19,9 +19,10 @@ rowan.opt-level = 3 rustc-hash.opt-level = 3 smol_str.opt-level = 3 text-size.opt-level = 3 +serde.opt-level = 3 +salsa.opt-level = 3 # This speeds up `cargo xtask dist`. miniz_oxide.opt-level = 3 -salsa.opt-level = 3 [profile.release] incremental = true diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index 741c2b2ebfe23..19c3c9c43f104 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -192,7 +192,7 @@ impl ExpandErrorKind { ("overflow expanding the original macro".to_owned(), true) } ExpandErrorKind::Other(e) => ((**e).to_owned(), true), - ExpandErrorKind::ProcMacroPanic(e) => ((**e).to_owned(), true), + ExpandErrorKind::ProcMacroPanic(e) => (format!("proc-macro panicked: {e}"), true), } } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 4a2a400f44cd6..5d0c6b65992c7 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -188,7 +188,7 @@ impl GlobalState { status.health |= lsp_ext::Health::Warning; format_to!( message, - "Workspace `{}` has been queried without dependencies, connecting to crates.io might have failed.\n\n", + "Failed to read Cargo metadata for `{}`, the `Cargo.toml` might be invalid or you have no internet connection.\n\n", ws.manifest_or_root() ); } From f65d60551bc01c84133429652de1ed5855e36aad Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Fri, 16 Aug 2024 10:50:17 +0300 Subject: [PATCH 050/124] When descending into macros in search, first check if there is a need to - i.e. if we are inside a macro call This avoids the need to analyze the file when we are not inside a macro call. This is especially important for the optimization in the next commit(s), as there the common case will be to descent into macros but then not analyze. --- .../rust-analyzer/crates/hir/src/semantics.rs | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index 3d6c985043089..763f53031e4c5 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -23,9 +23,11 @@ use hir_expand::{ builtin::{BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, files::InRealFile, + inert_attr_macro::find_builtin_attr_idx, name::AsName, FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, }; +use intern::Symbol; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; @@ -674,6 +676,35 @@ impl<'db> SemanticsImpl<'db> { res } + fn is_inside_macro_call(token: &SyntaxToken) -> bool { + token.parent_ancestors().any(|ancestor| { + if ast::MacroCall::can_cast(ancestor.kind()) { + return true; + } + // Check if it is an item (only items can have macro attributes) that has a non-builtin attribute. + let Some(item) = ast::Item::cast(ancestor) else { return false }; + item.attrs().any(|attr| { + let Some(meta) = attr.meta() else { return false }; + let Some(path) = meta.path() else { return false }; + let Some(attr_name) = path.as_single_name_ref() else { return true }; + let attr_name = attr_name.text(); + let attr_name = attr_name.as_str(); + attr_name == "derive" || find_builtin_attr_idx(&Symbol::intern(attr_name)).is_none() + }) + }) + } + + pub fn descend_into_macros_exact_if_in_macro( + &self, + token: SyntaxToken, + ) -> SmallVec<[SyntaxToken; 1]> { + if Self::is_inside_macro_call(&token) { + self.descend_into_macros_exact(token) + } else { + smallvec![token] + } + } + pub fn descend_into_macros_cb( &self, token: SyntaxToken, From a57def2b395082a36814d40f81ba8cb00bc1e4a5 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Mon, 19 Aug 2024 01:39:31 +0300 Subject: [PATCH 051/124] Speed up search for short associated functions, especially very common identifiers such as `new` The search is used by IDE features such as rename and find all references. The search is slow because we need to verify each candidate, and that requires analyzing it; the key to speeding it up is to avoid the analysis where possible. I did that with a bunch of tricks that exploits knowledge about the language and its possibilities. The first key insight is that associated methods may only be referenced in the form `ContainerName::func_name` (parentheses are not necessary!) (Rust doesn't include a way to `use Container::func_name`, and even if it will in the future most usages are likely to stay in that form. Searching for `::` will help only a bit, but searching for `Container` can help considerably, since it is very rare that there will be two identical instances of both a container and a method of it. However, things are not as simple as they sound. In Rust a container can be aliased in multiple ways, and even aliased from different files/modules. If we will try to resolve the alias, we will lose any gain from the textual search (although very common method names such as `new` will still benefit, most will suffer because there are more instances of a container name than its associated item). This is where the key trick enters the picture. The key insight is that there is still a textual property: a container namer cannot be aliased, unless its name is mentioned in the alias declaration, or a name of alias of it is mentioned in the alias declaration. This becomes a fixpoint algorithm: we expand our list of aliases as we collect more and more (possible) aliases, until we eventually reach a fixpoint. A fixpoint is not guaranteed (and we do have guards for the rare cases where it does not happen), but it is almost so: most types have very few aliases, if at all. We do use some semantic information while analyzing aliases. It's a balance: too much semantic analysis, and the search will become slow. But too few of it, and we will bring many incorrect aliases to our list, and risk it expands and expands and never reach a fixpoint. At the end, based on benchmarks, it seems worth to do a lot to avoid adding an alias (but not too much), while it is worth to do a lot to avoid the need to semantically analyze func_name matches (but again, not too much). After we collected our list of aliases, we filter matches based on this list. Only if a match can be real, we do semantic analysis for it. The results are promising: searching for all references on `new()` in `base-db` in the rust-analyzer repository, which previously took around 60 seconds, now takes as least as two seconds and a half (roughly), while searching for `Vec::new()`, almost an upper bound to how much a symbol can be used, that used to take 7-9 minutes(!) now completes in 100-120 seconds, and with less than half of non-verified results (aka. false positives). This is the less strictly correct (but faster) of this patch; it can miss some (rare) cases (there is a test for that - `goto_ref_on_short_associated_function_complicated_type_magic_can_confuse_our_logic()`). There is another branch that have no false negatives but is slower to search (`Vec::new()` never reaches a fixpoint in aliases collection there). I believe it is possible to create a strategy that will have the best of both worlds, but it will involve significant complexity and I didn't bother, especially considering that in the vast majority of the searches the other branch will be more than enough. But all in all, I decided to bring this branch (of course if the maintainers will agree), since our search is already not 100% accurate (it misses macros), and I believe there is value in the additional perf. --- .../rust-analyzer/crates/ide-db/src/search.rs | 507 +++++++++++++++--- .../crates/ide/src/references.rs | 230 ++++++++ 2 files changed, 657 insertions(+), 80 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 197c327ee4edf..494ff82490b29 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -4,19 +4,24 @@ //! get a super-set of matches. Then, we confirm each match using precise //! name resolution. -use std::cell::LazyCell; use std::mem; +use std::{cell::LazyCell, cmp::Reverse}; use base_db::{salsa::Database, SourceDatabase, SourceRootDatabase}; use hir::{ - sym, AsAssocItem, DefWithBody, FileRange, HasAttrs, HasSource, HirFileIdExt, InFile, - InRealFile, ModuleSource, PathResolution, Semantics, Visibility, + sym, Adt, AsAssocItem, DefWithBody, FileRange, FileRangeWrapper, HasAttrs, HasContainer, + HasSource, HirFileIdExt, InFile, InFileWrapper, InRealFile, ItemContainer, ModuleSource, + PathResolution, Semantics, Visibility, }; use memchr::memmem::Finder; use parser::SyntaxKind; -use rustc_hash::FxHashMap; +use rustc_hash::{FxHashMap, FxHashSet}; use span::EditionedFileId; -use syntax::{ast, match_ast, AstNode, AstToken, SyntaxElement, TextRange, TextSize, ToSmolStr}; +use syntax::{ + ast::{self, HasName}, + match_ast, AstNode, AstToken, SmolStr, SyntaxElement, SyntaxNode, TextRange, TextSize, + ToSmolStr, +}; use triomphe::Arc; use crate::{ @@ -442,6 +447,398 @@ impl<'a> FindUsages<'a> { res } + fn scope_files<'b>( + db: &'b RootDatabase, + scope: &'b SearchScope, + ) -> impl Iterator, EditionedFileId, TextRange)> + 'b { + scope.entries.iter().map(|(&file_id, &search_range)| { + let text = db.file_text(file_id.file_id()); + let search_range = + search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); + + (text, file_id, search_range) + }) + } + + fn match_indices<'b>( + text: &'b str, + finder: &'b Finder<'b>, + search_range: TextRange, + ) -> impl Iterator + 'b { + finder.find_iter(text.as_bytes()).filter_map(move |idx| { + let offset: TextSize = idx.try_into().unwrap(); + if !search_range.contains_inclusive(offset) { + return None; + } + // If this is not a word boundary, that means this is only part of an identifier, + // so it can't be what we're looking for. + // This speeds up short identifiers significantly. + if text[..idx] + .chars() + .next_back() + .is_some_and(|ch| matches!(ch, 'A'..='Z' | 'a'..='z' | '_')) + || text[idx + finder.needle().len()..] + .chars() + .next() + .is_some_and(|ch| matches!(ch, 'A'..='Z' | 'a'..='z' | '_' | '0'..='9')) + { + return None; + } + Some(offset) + }) + } + + fn find_nodes<'b>( + sema: &'b Semantics<'_, RootDatabase>, + name: &str, + node: &syntax::SyntaxNode, + offset: TextSize, + ) -> impl Iterator + 'b { + node.token_at_offset(offset) + .find(|it| { + // `name` is stripped of raw ident prefix. See the comment on name retrieval below. + it.text().trim_start_matches("r#") == name + }) + .into_iter() + .flat_map(move |token| { + sema.descend_into_macros_exact_if_in_macro(token) + .into_iter() + .filter_map(|it| it.parent()) + }) + } + + /// Performs a special fast search for associated functions. This is mainly intended + /// to speed up `new()` which can take a long time. + /// + /// The trick is instead of searching for `func_name` search for `TypeThatContainsContainerName::func_name`. + /// We cannot search exactly that (not even in tokens), because `ContainerName` may be aliased. + /// Instead, we perform a textual search for `ContainerName`. Then, we look for all cases where + /// `ContainerName` may be aliased (that includes `use ContainerName as Xyz` and + /// `type Xyz = ContainerName`). We collect a list of all possible aliases of `ContainerName`. + /// The list can have false positives (because there may be multiple types named `ContainerName`), + /// but it cannot have false negatives. Then, we look for `TypeThatContainsContainerNameOrAnyAlias::func_name`. + /// Those that will be found are of high chance to be actual hits (of course, we will need to verify + /// that). + /// + /// Returns true if completed the search. + // FIXME: Extend this to other cases, such as associated types/consts/enum variants (note those can be `use`d). + fn short_associated_function_fast_search( + &self, + sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, + search_scope: &SearchScope, + name: &str, + ) -> bool { + let _p = tracing::info_span!("short_associated_function_fast_search").entered(); + + let container = (|| { + let Definition::Function(function) = self.def else { + return None; + }; + if function.has_self_param(self.sema.db) { + return None; + } + match function.container(self.sema.db) { + // Only freestanding `impl`s qualify; methods from trait + // can be called from within subtraits and bounds. + ItemContainer::Impl(impl_) => { + let has_trait = impl_.trait_(self.sema.db).is_some(); + if has_trait { + return None; + } + let adt = impl_.self_ty(self.sema.db).as_adt()?; + Some(adt) + } + _ => None, + } + })(); + let Some(container) = container else { + return false; + }; + + fn has_any_name(node: &SyntaxNode, mut predicate: impl FnMut(&str) -> bool) -> bool { + node.descendants().any(|node| { + match_ast! { + match node { + ast::Name(it) => predicate(it.text().trim_start_matches("r#")), + ast::NameRef(it) => predicate(it.text().trim_start_matches("r#")), + _ => false + } + } + }) + } + + // This is a fixpoint algorithm with O(number of aliases), but most types have no or few aliases, + // so this should stay fast. + // + /// Returns `(aliases, ranges_where_Self_can_refer_to_our_type)`. + fn collect_possible_aliases( + sema: &Semantics<'_, RootDatabase>, + container: Adt, + ) -> Option<(FxHashSet, Vec>)> { + fn insert_type_alias( + db: &RootDatabase, + to_process: &mut Vec<(SmolStr, SearchScope)>, + alias_name: &str, + def: Definition, + ) { + let alias = alias_name.trim_start_matches("r#").to_smolstr(); + tracing::debug!("found alias: {alias}"); + to_process.push((alias, def.search_scope(db))); + } + + let _p = tracing::info_span!("collect_possible_aliases").entered(); + + let db = sema.db; + let container_name = container.name(db).unescaped().display(db).to_smolstr(); + let search_scope = Definition::from(container).search_scope(db); + let mut seen = FxHashSet::default(); + let mut completed = FxHashSet::default(); + let mut to_process = vec![(container_name, search_scope)]; + let mut is_possibly_self = Vec::new(); + let mut total_files_searched = 0; + + while let Some((current_to_process, current_to_process_search_scope)) = to_process.pop() + { + let is_alias = |alias: &ast::TypeAlias| { + let def = sema.to_def(alias)?; + let ty = def.ty(db); + let is_alias = ty.as_adt()? == container; + is_alias.then_some(def) + }; + + let finder = Finder::new(current_to_process.as_bytes()); + for (file_text, file_id, search_range) in + FindUsages::scope_files(db, ¤t_to_process_search_scope) + { + let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone()); + + for offset in FindUsages::match_indices(&file_text, &finder, search_range) { + let usages = + FindUsages::find_nodes(sema, ¤t_to_process, &tree, offset) + .filter(|it| { + matches!(it.kind(), SyntaxKind::NAME | SyntaxKind::NAME_REF) + }); + for usage in usages { + if let Some(alias) = usage.parent().and_then(|it| { + let path = ast::PathSegment::cast(it)?.parent_path(); + let use_tree = ast::UseTree::cast(path.syntax().parent()?)?; + use_tree.rename()?.name() + }) { + if seen.insert(InFileWrapper::new( + file_id, + alias.syntax().text_range(), + )) { + tracing::debug!("found alias: {alias}"); + // FIXME: `use`s have no easy way to determine their search scope, but they are rare. + to_process.push(( + alias.text().to_smolstr(), + current_to_process_search_scope.clone(), + )); + } + } else if let Some(alias) = + usage.ancestors().find_map(ast::TypeAlias::cast) + { + if let Some(name) = alias.name() { + if seen.insert(InFileWrapper::new( + file_id, + name.syntax().text_range(), + )) { + if let Some(def) = is_alias(&alias) { + insert_type_alias( + sema.db, + &mut to_process, + name.text().as_str(), + def.into(), + ); + } + } + } + } + + // We need to account for `Self`. It can only refer to our type inside an impl. + let impl_ = 'impl_: { + for ancestor in usage.ancestors() { + if let Some(parent) = ancestor.parent() { + if let Some(parent) = ast::Impl::cast(parent) { + // Only if the GENERIC_PARAM_LIST is directly under impl, otherwise it may be in the self ty. + if matches!( + ancestor.kind(), + SyntaxKind::ASSOC_ITEM_LIST + | SyntaxKind::WHERE_CLAUSE + | SyntaxKind::GENERIC_PARAM_LIST + ) { + break; + } + if parent + .trait_() + .is_some_and(|trait_| *trait_.syntax() == ancestor) + { + break; + } + + // Otherwise, found an impl where its self ty may be our type. + break 'impl_ Some(parent); + } + } + } + None + }; + (|| { + let impl_ = impl_?; + is_possibly_self.push(sema.original_range(impl_.syntax())); + let assoc_items = impl_.assoc_item_list()?; + let type_aliases = assoc_items + .syntax() + .descendants() + .filter_map(ast::TypeAlias::cast); + for type_alias in type_aliases { + let Some(ty) = type_alias.ty() else { continue }; + let Some(name) = type_alias.name() else { continue }; + let contains_self = ty + .syntax() + .descendants_with_tokens() + .any(|node| node.kind() == SyntaxKind::SELF_TYPE_KW); + if !contains_self { + continue; + } + if seen.insert(InFileWrapper::new( + file_id, + name.syntax().text_range(), + )) { + if let Some(def) = is_alias(&type_alias) { + insert_type_alias( + sema.db, + &mut to_process, + name.text().as_str(), + def.into(), + ); + } + } + } + Some(()) + })(); + } + } + } + + completed.insert(current_to_process); + + total_files_searched += current_to_process_search_scope.entries.len(); + // FIXME: Maybe this needs to be relative to the project size, or at least to the initial search scope? + if total_files_searched > 20_000 && completed.len() > 100 { + // This case is extremely unlikely (even searching for `Vec::new()` on rust-analyzer does not enter + // here - it searches less than 10,000 files, and it does so in five seconds), but if we get here, + // we at a risk of entering an almost-infinite loop of growing the aliases list. So just stop and + // let normal search handle this case. + tracing::info!(aliases_count = %completed.len(), "too much aliases; leaving fast path"); + return None; + } + } + + // Impls can contain each other, so we need to deduplicate their ranges. + is_possibly_self.sort_unstable_by_key(|position| { + (position.file_id, position.range.start(), Reverse(position.range.end())) + }); + is_possibly_self.dedup_by(|pos2, pos1| { + pos1.file_id == pos2.file_id + && pos1.range.start() <= pos2.range.start() + && pos1.range.end() >= pos2.range.end() + }); + + tracing::info!(aliases_count = %completed.len(), "aliases search completed"); + + Some((completed, is_possibly_self)) + } + + fn search( + this: &FindUsages<'_>, + finder: &Finder<'_>, + name: &str, + files: impl Iterator, EditionedFileId, TextRange)>, + mut container_predicate: impl FnMut( + &SyntaxNode, + InFileWrapper, + ) -> bool, + sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, + ) { + for (file_text, file_id, search_range) in files { + let tree = LazyCell::new(move || this.sema.parse(file_id).syntax().clone()); + + for offset in FindUsages::match_indices(&file_text, finder, search_range) { + let usages = FindUsages::find_nodes(this.sema, name, &tree, offset) + .filter_map(ast::NameRef::cast); + for usage in usages { + let found_usage = usage + .syntax() + .parent() + .and_then(ast::PathSegment::cast) + .map(|path_segment| { + container_predicate( + path_segment.parent_path().syntax(), + InFileWrapper::new(file_id, usage.syntax().text_range()), + ) + }) + .unwrap_or(false); + if found_usage { + this.found_name_ref(&usage, sink); + } + } + } + } + } + + let Some((container_possible_aliases, is_possibly_self)) = + collect_possible_aliases(self.sema, container) + else { + return false; + }; + + // FIXME: If Rust ever gains the ability to `use Struct::method` we'll also need to account for free + // functions. + let finder = Finder::new(name.as_bytes()); + // The search for `Self` may return duplicate results with `ContainerName`, so deduplicate them. + let mut self_positions = FxHashSet::default(); + tracing::info_span!("Self_search").in_scope(|| { + search( + self, + &finder, + name, + is_possibly_self.into_iter().map(|position| { + ( + self.sema.db.file_text(position.file_id.file_id()), + position.file_id, + position.range, + ) + }), + |path, name_position| { + let has_self = path + .descendants_with_tokens() + .any(|node| node.kind() == SyntaxKind::SELF_TYPE_KW); + if has_self { + self_positions.insert(name_position); + } + has_self + }, + sink, + ) + }); + tracing::info_span!("aliases_search").in_scope(|| { + search( + self, + &finder, + name, + FindUsages::scope_files(self.sema.db, search_scope), + |path, name_position| { + has_any_name(path, |name| container_possible_aliases.contains(name)) + && !self_positions.contains(&name_position) + }, + sink, + ) + }); + + true + } + pub fn search(&self, sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool) { let _p = tracing::info_span!("FindUsages:search").entered(); let sema = self.sema; @@ -488,77 +885,23 @@ impl<'a> FindUsages<'a> { Some(s) => s.as_str(), None => return, }; - let finder = &Finder::new(name); - let include_self_kw_refs = - self.include_self_kw_refs.as_ref().map(|ty| (ty, Finder::new("Self"))); - - // for<'a> |text: &'a str, name: &'a str, search_range: TextRange| -> impl Iterator + 'a { ... } - fn match_indices<'a>( - text: &'a str, - finder: &'a Finder<'a>, - search_range: TextRange, - ) -> impl Iterator + 'a { - finder.find_iter(text.as_bytes()).filter_map(move |idx| { - let offset: TextSize = idx.try_into().unwrap(); - if !search_range.contains_inclusive(offset) { - return None; - } - // If this is not a word boundary, that means this is only part of an identifier, - // so it can't be what we're looking for. - // This speeds up short identifiers significantly. - if text[..idx] - .chars() - .next_back() - .is_some_and(|ch| matches!(ch, 'A'..='Z' | 'a'..='z' | '_')) - || text[idx + finder.needle().len()..] - .chars() - .next() - .is_some_and(|ch| matches!(ch, 'A'..='Z' | 'a'..='z' | '_' | '0'..='9')) - { - return None; - } - Some(offset) - }) - } - - // for<'a> |scope: &'a SearchScope| -> impl Iterator, EditionedFileId, TextRange)> + 'a { ... } - fn scope_files<'a>( - sema: &'a Semantics<'_, RootDatabase>, - scope: &'a SearchScope, - ) -> impl Iterator, EditionedFileId, TextRange)> + 'a { - scope.entries.iter().map(|(&file_id, &search_range)| { - let text = sema.db.file_text(file_id.file_id()); - let search_range = - search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text))); - (text, file_id, search_range) - }) + // FIXME: This should probably depend on the number of the results (specifically, the number of false results). + if name.len() <= 7 && self.short_associated_function_fast_search(sink, &search_scope, name) + { + return; } - let find_nodes = move |name: &str, node: &syntax::SyntaxNode, offset: TextSize| { - node.token_at_offset(offset) - .find(|it| { - // `name` is stripped of raw ident prefix. See the comment on name retrieval above. - it.text().trim_start_matches("r#") == name - }) - .into_iter() - .flat_map(move |token| { - // FIXME: There should be optimization potential here - // Currently we try to descend everything we find which - // means we call `Semantics::descend_into_macros` on - // every textual hit. That function is notoriously - // expensive even for things that do not get down mapped - // into macros. - sema.descend_into_macros_exact(token).into_iter().filter_map(|it| it.parent()) - }) - }; + let finder = &Finder::new(name); + let include_self_kw_refs = + self.include_self_kw_refs.as_ref().map(|ty| (ty, Finder::new("Self"))); - for (text, file_id, search_range) in scope_files(sema, &search_scope) { + for (text, file_id, search_range) in Self::scope_files(sema.db, &search_scope) { self.sema.db.unwind_if_cancelled(); let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone()); // Search for occurrences of the items name - for offset in match_indices(&text, finder, search_range) { + for offset in Self::match_indices(&text, finder, search_range) { tree.token_at_offset(offset).for_each(|token| { let Some(str_token) = ast::String::cast(token.clone()) else { return }; if let Some((range, nameres)) = @@ -568,7 +911,9 @@ impl<'a> FindUsages<'a> { } }); - for name in find_nodes(name, &tree, offset).filter_map(ast::NameLike::cast) { + for name in + Self::find_nodes(sema, name, &tree, offset).filter_map(ast::NameLike::cast) + { if match name { ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink), ast::NameLike::Name(name) => self.found_name(&name, sink), @@ -580,8 +925,9 @@ impl<'a> FindUsages<'a> { } // Search for occurrences of the `Self` referring to our type if let Some((self_ty, finder)) = &include_self_kw_refs { - for offset in match_indices(&text, finder, search_range) { - for name_ref in find_nodes("Self", &tree, offset).filter_map(ast::NameRef::cast) + for offset in Self::match_indices(&text, finder, search_range) { + for name_ref in + Self::find_nodes(sema, "Self", &tree, offset).filter_map(ast::NameRef::cast) { if self.found_self_ty_name_ref(self_ty, &name_ref, sink) { return; @@ -599,13 +945,13 @@ impl<'a> FindUsages<'a> { let is_crate_root = module.is_crate_root().then(|| Finder::new("crate")); let finder = &Finder::new("super"); - for (text, file_id, search_range) in scope_files(sema, &scope) { + for (text, file_id, search_range) in Self::scope_files(sema.db, &scope) { self.sema.db.unwind_if_cancelled(); let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone()); - for offset in match_indices(&text, finder, search_range) { - for name_ref in - find_nodes("super", &tree, offset).filter_map(ast::NameRef::cast) + for offset in Self::match_indices(&text, finder, search_range) { + for name_ref in Self::find_nodes(sema, "super", &tree, offset) + .filter_map(ast::NameRef::cast) { if self.found_name_ref(&name_ref, sink) { return; @@ -613,9 +959,9 @@ impl<'a> FindUsages<'a> { } } if let Some(finder) = &is_crate_root { - for offset in match_indices(&text, finder, search_range) { - for name_ref in - find_nodes("crate", &tree, offset).filter_map(ast::NameRef::cast) + for offset in Self::match_indices(&text, finder, search_range) { + for name_ref in Self::find_nodes(sema, "crate", &tree, offset) + .filter_map(ast::NameRef::cast) { if self.found_name_ref(&name_ref, sink) { return; @@ -656,8 +1002,9 @@ impl<'a> FindUsages<'a> { let tree = LazyCell::new(|| sema.parse(file_id).syntax().clone()); let finder = &Finder::new("self"); - for offset in match_indices(&text, finder, search_range) { - for name_ref in find_nodes("self", &tree, offset).filter_map(ast::NameRef::cast) + for offset in Self::match_indices(&text, finder, search_range) { + for name_ref in + Self::find_nodes(sema, "self", &tree, offset).filter_map(ast::NameRef::cast) { if self.found_self_module_name_ref(&name_ref, sink) { return; diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index 55afcb59bae78..43c04b20dd7cb 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -2510,4 +2510,234 @@ fn main() { "#]], ) } + + // The following are tests for short_associated_function_fast_search() in crates/ide-db/src/search.rs, because find all references + // use `FindUsages` and I found it easy to test it here. + + #[test] + fn goto_ref_on_short_associated_function() { + check( + r#" +struct Foo; +impl Foo { + fn new$0() {} +} + +fn bar() { + Foo::new(); +} +fn baz() { + Foo::new; +} + "#, + expect![[r#" + new Function FileId(0) 27..38 30..33 + + FileId(0) 62..65 + FileId(0) 91..94 + "#]], + ); + } + + #[test] + fn goto_ref_on_short_associated_function_with_aliases() { + check( + r#" +//- /lib.rs +mod a; +mod b; + +struct Foo; +impl Foo { + fn new$0() {} +} + +fn bar() { + b::c::Baz::new(); +} + +//- /a.rs +use crate::Foo as Bar; + +fn baz() { Bar::new(); } +fn quux() { ::Assoc::new(); } + +//- /b.rs +pub(crate) mod c; + +pub(crate) struct Other; +pub(crate) trait Trait { + type Assoc; +} +impl Trait for Other { + type Assoc = super::Foo; +} + +//- /b/c.rs +type Itself = T; +pub(in super::super) type Baz = Itself; + "#, + expect![[r#" + new Function FileId(0) 42..53 45..48 + + FileId(0) 83..86 + FileId(1) 40..43 + FileId(1) 106..109 + "#]], + ); + } + + #[test] + fn goto_ref_on_short_associated_function_self_works() { + check( + r#" +//- /lib.rs +mod module; + +struct Foo; +impl Foo { + fn new$0() {} + fn bar() { Self::new(); } +} +trait Trait { + type Assoc; + fn baz(); +} +impl Trait for Foo { + type Assoc = Self; + fn baz() { Self::new(); } +} + +//- /module.rs +impl super::Foo { + fn quux() { Self::new(); } +} +fn foo() { ::Assoc::new(); } + "#, + expect![[r#" + new Function FileId(0) 40..51 43..46 + + FileId(0) 73..76 + FileId(0) 195..198 + FileId(1) 40..43 + FileId(1) 99..102 + "#]], + ); + } + + #[test] + fn goto_ref_on_short_associated_function_overlapping_self_ranges() { + check( + r#" +struct Foo; +impl Foo { + fn new$0() {} + fn bar() { + Self::new(); + impl Foo { + fn baz() { Self::new(); } + } + } +} + "#, + expect![[r#" + new Function FileId(0) 27..38 30..33 + + FileId(0) 68..71 + FileId(0) 123..126 + "#]], + ); + } + + #[test] + fn goto_ref_on_short_associated_function_no_direct_self_but_path_contains_self() { + check( + r#" +struct Foo; +impl Foo { + fn new$0() {} +} +trait Trait { + type Assoc; +} +impl Trait for (A, B) { + type Assoc = B; +} +impl Foo { + fn bar() { + <((), Foo) as Trait>::Assoc::new(); + <((), Self) as Trait>::Assoc::new(); + } +} + "#, + expect![[r#" + new Function FileId(0) 27..38 30..33 + + FileId(0) 188..191 + FileId(0) 233..236 + "#]], + ); + } + + // Checks that we can circumvent our fast path logic using complicated type level functions. + // This mainly exists as a documentation. I don't believe it is fixable. + // Usages search is not 100% accurate anyway; we miss macros. + #[test] + fn goto_ref_on_short_associated_function_complicated_type_magic_can_confuse_our_logic() { + check( + r#" +struct Foo; +impl Foo { + fn new$0() {} +} + +struct ChoiceA; +struct ChoiceB; +trait Choice { + type Choose; +} +impl Choice for ChoiceA { + type Choose = A; +} +impl Choice for ChoiceB { + type Choose = B; +} +type Choose = ::Choose; + +fn bar() { + Choose::<(), ChoiceB>::new(); +} + "#, + expect![[r#" + new Function FileId(0) 27..38 30..33 + + (no references) + "#]], + ); + } + + #[test] + fn goto_ref_on_short_associated_function_same_path_mention_alias_and_self() { + check( + r#" +struct Foo; +impl Foo { + fn new$0() {} +} + +type IgnoreFirst = B; + +impl Foo { + fn bar() { + >::new(); + } +} + "#, + expect![[r#" + new Function FileId(0) 27..38 30..33 + + FileId(0) 131..134 + "#]], + ); + } } From a44152af15c6510ab35735d623dd9e4a37fde7aa Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 22 Aug 2024 14:19:56 +0300 Subject: [PATCH 052/124] Add cov_marks to test #17927 --- src/tools/rust-analyzer/crates/ide-db/src/search.rs | 9 +++++++++ src/tools/rust-analyzer/crates/ide/src/references.rs | 10 ++++++++++ 2 files changed, 19 insertions(+) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 494ff82490b29..c807a5940d14f 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -629,6 +629,7 @@ impl<'a> FindUsages<'a> { alias.syntax().text_range(), )) { tracing::debug!("found alias: {alias}"); + cov_mark::hit!(container_use_rename); // FIXME: `use`s have no easy way to determine their search scope, but they are rare. to_process.push(( alias.text().to_smolstr(), @@ -644,12 +645,15 @@ impl<'a> FindUsages<'a> { name.syntax().text_range(), )) { if let Some(def) = is_alias(&alias) { + cov_mark::hit!(container_type_alias); insert_type_alias( sema.db, &mut to_process, name.text().as_str(), def.into(), ); + } else { + cov_mark::hit!(same_name_different_def_type_alias); } } } @@ -706,12 +710,15 @@ impl<'a> FindUsages<'a> { name.syntax().text_range(), )) { if let Some(def) = is_alias(&type_alias) { + cov_mark::hit!(self_type_alias); insert_type_alias( sema.db, &mut to_process, name.text().as_str(), def.into(), ); + } else { + cov_mark::hit!(same_name_different_def_type_alias); } } } @@ -793,6 +800,8 @@ impl<'a> FindUsages<'a> { return false; }; + cov_mark::hit!(short_associated_function_fast_search); + // FIXME: If Rust ever gains the ability to `use Struct::method` we'll also need to account for free // functions. let finder = Finder::new(name.as_bytes()); diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index 43c04b20dd7cb..64d717f88ddd9 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -2516,6 +2516,7 @@ fn main() { #[test] fn goto_ref_on_short_associated_function() { + cov_mark::check!(short_associated_function_fast_search); check( r#" struct Foo; @@ -2541,6 +2542,9 @@ fn baz() { #[test] fn goto_ref_on_short_associated_function_with_aliases() { + cov_mark::check!(short_associated_function_fast_search); + cov_mark::check!(container_use_rename); + cov_mark::check!(container_type_alias); check( r#" //- /lib.rs @@ -2589,6 +2593,8 @@ pub(in super::super) type Baz = Itself; #[test] fn goto_ref_on_short_associated_function_self_works() { + cov_mark::check!(short_associated_function_fast_search); + cov_mark::check!(self_type_alias); check( r#" //- /lib.rs @@ -2651,6 +2657,7 @@ impl Foo { #[test] fn goto_ref_on_short_associated_function_no_direct_self_but_path_contains_self() { + cov_mark::check!(short_associated_function_fast_search); check( r#" struct Foo; @@ -2684,6 +2691,8 @@ impl Foo { // Usages search is not 100% accurate anyway; we miss macros. #[test] fn goto_ref_on_short_associated_function_complicated_type_magic_can_confuse_our_logic() { + cov_mark::check!(short_associated_function_fast_search); + cov_mark::check!(same_name_different_def_type_alias); check( r#" struct Foo; @@ -2718,6 +2727,7 @@ fn bar() { #[test] fn goto_ref_on_short_associated_function_same_path_mention_alias_and_self() { + cov_mark::check!(short_associated_function_fast_search); check( r#" struct Foo; From 0251cfa33bf10665055b9a2741aac6c60cdb65ad Mon Sep 17 00:00:00 2001 From: Ali Bektas Date: Thu, 22 Aug 2024 23:59:01 +0200 Subject: [PATCH 053/124] Apply changes --- .../crates/rust-analyzer/src/flycheck.rs | 39 +++++++++++------ .../src/handlers/notification.rs | 43 +++++++++++-------- .../crates/rust-analyzer/src/main_loop.rs | 4 +- 3 files changed, 51 insertions(+), 35 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 0e859f327510a..9fb43286579f8 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -34,6 +34,14 @@ pub(crate) struct CargoOptions { pub(crate) target_dir: Option, } +#[derive(Clone)] +pub(crate) enum Target { + Bin(String), + Example(String), + Benchmark(String), + Test(String), +} + impl CargoOptions { pub(crate) fn apply_on_command(&self, cmd: &mut Command) { for target in &self.target_triples { @@ -118,12 +126,12 @@ impl FlycheckHandle { } /// Schedule a re-start of the cargo check worker to do a workspace wide check. - pub(crate) fn restart_workspace(&self, saved_file: Option, target: Option) { - self.sender.send(StateChange::Restart { package: None, saved_file, target }).unwrap(); + pub(crate) fn restart_workspace(&self, saved_file: Option) { + self.sender.send(StateChange::Restart { package: None, saved_file, target: None }).unwrap(); } /// Schedule a re-start of the cargo check worker to do a package wide check. - pub(crate) fn restart_for_package(&self, package: String, target: Option) { + pub(crate) fn restart_for_package(&self, package: String, target: Option) { self.sender .send(StateChange::Restart { package: Some(package), saved_file: None, target }) .unwrap(); @@ -183,7 +191,7 @@ pub(crate) enum Progress { } enum StateChange { - Restart { package: Option, saved_file: Option, target: Option }, + Restart { package: Option, saved_file: Option, target: Option }, Cancel, } @@ -281,14 +289,12 @@ impl FlycheckActor { } } - let command = match self.check_command( - package.as_deref(), - saved_file.as_deref(), - target.as_deref(), - ) { - Some(c) => c, - None => continue, + let Some(command) = + self.check_command(package.as_deref(), saved_file.as_deref(), target) + else { + continue; }; + let formatted_command = format!("{command:?}"); tracing::debug!(?command, "will restart flycheck"); @@ -384,7 +390,7 @@ impl FlycheckActor { &self, package: Option<&str>, saved_file: Option<&AbsPath>, - bin_target: Option<&str>, + target: Option, ) -> Option { match &self.config { FlycheckConfig::CargoCommand { command, options, ansi_color_output } => { @@ -400,8 +406,13 @@ impl FlycheckActor { None => cmd.arg("--workspace"), }; - if let Some(tgt) = bin_target { - cmd.arg("--bin").arg(tgt); + if let Some(tgt) = target { + match tgt { + Target::Bin(tgt) => cmd.arg("--bin").arg(tgt), + Target::Example(tgt) => cmd.arg("--example").arg(tgt), + Target::Test(tgt) => cmd.arg("--test").arg(tgt), + Target::Benchmark(tgt) => cmd.arg("--bench").arg(tgt), + }; } cmd.arg(if *ansi_color_output { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index fb7caf652d06a..f99319271ba4a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -15,6 +15,7 @@ use vfs::{AbsPathBuf, ChangeKind, VfsPath}; use crate::{ config::{Config, ConfigChange}, + flycheck::Target, global_state::{FetchWorkspaceRequest, GlobalState}, lsp::{from_proto, utils::apply_document_changes}, lsp_ext::{self, RunFlycheckParams}, @@ -186,7 +187,7 @@ pub(crate) fn handle_did_save_text_document( } else if state.config.check_on_save() { // No specific flycheck was triggered, so let's trigger all of them. for flycheck in state.flycheck.iter() { - flycheck.restart_workspace(None, None); + flycheck.restart_workspace(None); } } Ok(()) @@ -289,18 +290,25 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { let mut updated = false; let task = move || -> std::result::Result<(), ide::Cancelled> { // Is the target binary? If so we let flycheck run only for the workspace that contains the crate. - let target_is_bin = TargetSpec::for_file(&world, file_id)?.and_then(|x| { - if x.target_kind() == project_model::TargetKind::Bin { - return match x { - TargetSpec::Cargo(c) => Some(c.target), - TargetSpec::ProjectJson(p) => Some(p.label), - }; - } + let target = TargetSpec::for_file(&world, file_id)?.and_then(|x| { + let tgt_kind = x.target_kind(); + let tgt_name = match x { + TargetSpec::Cargo(c) => c.target, + TargetSpec::ProjectJson(p) => p.label, + }; + + let tgt = match tgt_kind { + project_model::TargetKind::Bin => Target::Bin(tgt_name), + project_model::TargetKind::Example => Target::Example(tgt_name), + project_model::TargetKind::Test => Target::Test(tgt_name), + project_model::TargetKind::Bench => Target::Benchmark(tgt_name), + _ => return None, + }; - None + Some(tgt) }); - let crate_ids = if target_is_bin.is_some() { + let crate_ids = if target.is_some() { // Trigger flychecks for the only workspace which the binary crate belongs to world.analysis.crates_for(file_id)?.into_iter().unique().collect::>() } else { @@ -364,12 +372,11 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { for (id, package) in workspace_ids.clone() { if id == flycheck.id() { updated = true; - match package.filter(|_| !world.config.flycheck_workspace()) { - Some(package) => { - flycheck.restart_for_package(package, target_is_bin.clone()) - } - None => flycheck - .restart_workspace(saved_file.clone(), target_is_bin.clone()), + match package + .filter(|_| !world.config.flycheck_workspace() || target.is_some()) + { + Some(package) => flycheck.restart_for_package(package, target.clone()), + None => flycheck.restart_workspace(saved_file.clone()), } continue; } @@ -378,7 +385,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { // No specific flycheck was triggered, so let's trigger all of them. if !updated { for flycheck in world.flycheck.iter() { - flycheck.restart_workspace(saved_file.clone(), None); + flycheck.restart_workspace(saved_file.clone()); } } Ok(()) @@ -420,7 +427,7 @@ pub(crate) fn handle_run_flycheck( } // No specific flycheck was triggered, so let's trigger all of them. for flycheck in state.flycheck.iter() { - flycheck.restart_workspace(None, None); + flycheck.restart_workspace(None); } Ok(()) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 79a209f8bfe23..1d4ee71e5c1a0 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -406,9 +406,7 @@ impl GlobalState { if became_quiescent { if self.config.check_on_save() { // Project has loaded properly, kick off initial flycheck - self.flycheck - .iter() - .for_each(|flycheck| flycheck.restart_workspace(None, None)); + self.flycheck.iter().for_each(|flycheck| flycheck.restart_workspace(None)); } if self.config.prefill_caches() { self.prime_caches_queue.request_op("became quiescent".to_owned(), ()); From 916c5598903702f321c746f18213e0ccacd5a0e2 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 23 Aug 2024 11:05:25 +0200 Subject: [PATCH 054/124] Remove incorrect FIXME comment --- src/tools/rust-analyzer/crates/hir-def/src/nameres.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index cd8db00c8b2d1..db45f6bed6679 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -461,7 +461,6 @@ impl DefMap { break; } } - // FIXME: crate_name should be non-optional but we don't pass the name through yet parts.push(match &self.data.crate_name { Some(name) => name.clone(), None => sym::crate_.clone(), From 5f7489a3d7dcf48a06ca70aea3c1941aa973c271 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 23 Aug 2024 11:45:47 +0200 Subject: [PATCH 055/124] internal: Don't requery crates_for for flycheck when crates are known --- .../crates/rust-analyzer/src/global_state.rs | 1 + .../src/handlers/notification.rs | 46 ++++++++++--------- .../crates/rust-analyzer/src/target_spec.rs | 1 + 3 files changed, 26 insertions(+), 22 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 8fcdfa5829aae..afdce6cdf1a17 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -696,6 +696,7 @@ impl GlobalStateSnapshot { }; return Some(TargetSpec::ProjectJson(ProjectJsonTargetSpec { + crate_id, label: build.label, target_kind: build.target_kind, shell_runnables: project.runnables().to_owned(), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index f99319271ba4a..b08b5b1d725b1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -10,6 +10,7 @@ use lsp_types::{ DidOpenTextDocumentParams, DidSaveTextDocumentParams, WorkDoneProgressCancelParams, }; use paths::Utf8PathBuf; +use stdx::TupleExt; use triomphe::Arc; use vfs::{AbsPathBuf, ChangeKind, VfsPath}; @@ -290,11 +291,11 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { let mut updated = false; let task = move || -> std::result::Result<(), ide::Cancelled> { // Is the target binary? If so we let flycheck run only for the workspace that contains the crate. - let target = TargetSpec::for_file(&world, file_id)?.and_then(|x| { - let tgt_kind = x.target_kind(); - let tgt_name = match x { - TargetSpec::Cargo(c) => c.target, - TargetSpec::ProjectJson(p) => p.label, + let target = TargetSpec::for_file(&world, file_id)?.and_then(|it| { + let tgt_kind = it.target_kind(); + let (tgt_name, crate_id) = match it { + TargetSpec::Cargo(c) => (c.target, c.crate_id), + TargetSpec::ProjectJson(p) => (p.label, p.crate_id), }; let tgt = match tgt_kind { @@ -305,25 +306,25 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { _ => return None, }; - Some(tgt) + Some((tgt, crate_id)) }); - let crate_ids = if target.is_some() { - // Trigger flychecks for the only workspace which the binary crate belongs to - world.analysis.crates_for(file_id)?.into_iter().unique().collect::>() - } else { - // Trigger flychecks for all workspaces that depend on the saved file - // Crates containing or depending on the saved file - world - .analysis - .crates_for(file_id)? - .into_iter() - .flat_map(|id| world.analysis.transitive_rev_deps(id)) - .flatten() - .unique() - .collect::>() + let crate_ids = match target { + // Trigger flychecks for the only crate which the target belongs to + Some((_, krate)) => vec![krate], + None => { + // Trigger flychecks for all workspaces that depend on the saved file + // Crates containing or depending on the saved file + world + .analysis + .crates_for(file_id)? + .into_iter() + .flat_map(|id| world.analysis.transitive_rev_deps(id)) + .flatten() + .unique() + .collect::>() + } }; - let crate_root_paths: Vec<_> = crate_ids .iter() .filter_map(|&crate_id| { @@ -375,7 +376,8 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { match package .filter(|_| !world.config.flycheck_workspace() || target.is_some()) { - Some(package) => flycheck.restart_for_package(package, target.clone()), + Some(package) => flycheck + .restart_for_package(package, target.clone().map(TupleExt::head)), None => flycheck.restart_workspace(saved_file.clone()), } continue; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs index 965fd415e9967..954e13cbf2727 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs @@ -62,6 +62,7 @@ pub(crate) struct CargoTargetSpec { #[derive(Clone, Debug)] pub(crate) struct ProjectJsonTargetSpec { + pub(crate) crate_id: CrateId, pub(crate) label: String, pub(crate) target_kind: TargetKind, pub(crate) shell_runnables: Vec, From eb896a580a09d2ebd695ae879a35ba7fc7e6ade3 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Sat, 24 Aug 2024 01:28:48 +0900 Subject: [PATCH 056/124] fix: Wrong `Self: Sized` predicate for trait assoc items --- .../rust-analyzer/crates/hir-ty/src/lower.rs | 40 +++++++++++++++---- 1 file changed, 32 insertions(+), 8 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index 58c34cbd9e164..370d9ba99cb0c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -1743,15 +1743,39 @@ fn implicitly_sized_clauses<'a, 'subst: 'a>( substitution: &'subst Substitution, resolver: &Resolver, ) -> Option + Captures<'a> + Captures<'subst>> { - let is_trait_def = matches!(def, GenericDefId::TraitId(..)); - let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..]; let sized_trait = db .lang_item(resolver.krate(), LangItem::Sized) - .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id)); + .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id))?; - sized_trait.map(move |sized_trait| { - generic_args - .iter() + let get_trait_self_idx = |container: ItemContainerId| { + if matches!(container, ItemContainerId::TraitId(_)) { + let generics = generics(db.upcast(), def); + Some(generics.len_self()) + } else { + None + } + }; + let trait_self_idx = match def { + GenericDefId::TraitId(_) => Some(0), + GenericDefId::FunctionId(it) => get_trait_self_idx(it.lookup(db.upcast()).container), + GenericDefId::ConstId(it) => get_trait_self_idx(it.lookup(db.upcast()).container), + GenericDefId::TypeAliasId(it) => get_trait_self_idx(it.lookup(db.upcast()).container), + _ => None, + }; + + Some( + substitution + .iter(Interner) + .enumerate() + .filter_map( + move |(idx, generic_arg)| { + if Some(idx) == trait_self_idx { + None + } else { + Some(generic_arg) + } + }, + ) .filter_map(|generic_arg| generic_arg.ty(Interner)) .filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty)) .map(move |self_ty| { @@ -1759,8 +1783,8 @@ fn implicitly_sized_clauses<'a, 'subst: 'a>( trait_id: sized_trait, substitution: Substitution::from1(Interner, self_ty.clone()), }) - }) - }) + }), + ) } #[derive(Debug, Clone, PartialEq, Eq, Hash)] From bdbc057becb15199813051e623b786f57a1d7466 Mon Sep 17 00:00:00 2001 From: Wilfred Hughes Date: Fri, 23 Aug 2024 15:10:01 -0700 Subject: [PATCH 057/124] Include buildfile path in watcher list --- .../crates/rust-analyzer/src/reload.rs | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 5d0c6b65992c7..8a28de10e8204 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -564,6 +564,23 @@ impl GlobalState { .collect() }; + // Also explicitly watch any build files configured in JSON project files. + for ws in self.workspaces.iter() { + if let ProjectWorkspaceKind::Json(project_json) = &ws.kind { + for (_, krate) in project_json.crates() { + let Some(build) = &krate.build else { + continue; + }; + watchers.push(lsp_types::FileSystemWatcher { + glob_pattern: lsp_types::GlobPattern::String( + build.build_file.to_string(), + ), + kind: None, + }); + } + } + } + watchers.extend( iter::once(Config::user_config_path()) .chain(self.workspaces.iter().map(|ws| ws.manifest().map(ManifestPath::as_ref))) From 7f968ba98d853f29042c8e917d2d8047043551f8 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sat, 24 Aug 2024 09:58:16 +0200 Subject: [PATCH 058/124] fix calling pipe, pipe2, socketpair with a pointer-to-array --- src/tools/miri/src/shims/unix/unnamed_socket.rs | 2 +- src/tools/miri/tests/pass-dep/libc/libc-pipe.rs | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/src/tools/miri/src/shims/unix/unnamed_socket.rs b/src/tools/miri/src/shims/unix/unnamed_socket.rs index 745f27398d0a8..127aef29244c5 100644 --- a/src/tools/miri/src/shims/unix/unnamed_socket.rs +++ b/src/tools/miri/src/shims/unix/unnamed_socket.rs @@ -339,7 +339,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { ) -> InterpResult<'tcx, Scalar> { let this = self.eval_context_mut(); - let pipefd = this.deref_pointer(pipefd)?; + let pipefd = this.deref_pointer_as(pipefd, this.machine.layouts.i32)?; let flags = match flags { Some(flags) => this.read_scalar(flags)?.to_i32()?, None => 0, diff --git a/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs b/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs index 5dff612bd8930..90dbd888392bf 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs @@ -6,6 +6,7 @@ fn main() { test_pipe(); test_pipe_threaded(); test_race(); + test_pipe_array(); } fn test_pipe() { @@ -97,3 +98,13 @@ fn test_race() { thread::yield_now(); thread1.join().unwrap(); } + +fn test_pipe_array() { + // Declare `pipe` to take an array rather than a `*mut i32`. + extern "C" { + fn pipe(pipefd: &mut [i32; 2]) -> i32; + } + + let mut fds: [i32; 2] = [0; 2]; + assert_eq!(unsafe { pipe(&mut fds) }, 0); +} From 14f22c6e66b75aab02fc2a50ce18f1af638050ec Mon Sep 17 00:00:00 2001 From: tiif Date: Fri, 23 Aug 2024 21:23:09 +0800 Subject: [PATCH 059/124] epoll: Add EINVAL case --- src/tools/miri/src/shims/unix/linux/epoll.rs | 7 +++++++ src/tools/miri/tests/pass-dep/libc/libc-epoll.rs | 14 ++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/src/tools/miri/src/shims/unix/linux/epoll.rs b/src/tools/miri/src/shims/unix/linux/epoll.rs index fb1e0afdf9ed7..ea430fec59340 100644 --- a/src/tools/miri/src/shims/unix/linux/epoll.rs +++ b/src/tools/miri/src/shims/unix/linux/epoll.rs @@ -251,6 +251,13 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { throw_unsup_format!("epoll_ctl: encountered unknown unsupported operation {:#x}", op); } + // Throw EINVAL if epfd and fd have the same value. + if epfd_value == fd { + let einval = this.eval_libc("EINVAL"); + this.set_last_error(einval)?; + return Ok(Scalar::from_i32(-1)); + } + // Check if epfd is a valid epoll file descriptor. let Some(epfd) = this.machine.fds.get(epfd_value) else { return Ok(Scalar::from_i32(this.fd_not_found()?)); diff --git a/src/tools/miri/tests/pass-dep/libc/libc-epoll.rs b/src/tools/miri/tests/pass-dep/libc/libc-epoll.rs index 052ce73de237f..7b8acbd120061 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-epoll.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-epoll.rs @@ -21,6 +21,7 @@ fn main() { test_socketpair_epollerr(); test_epoll_lost_events(); test_ready_list_fetching_logic(); + test_epoll_ctl_epfd_equal_fd(); } // Using `as` cast since `EPOLLET` wraps around @@ -630,3 +631,16 @@ fn test_ready_list_fetching_logic() { let expected_value1 = fd1 as u64; check_epoll_wait::<1>(epfd, &[(expected_event1, expected_value1)]); } + +// In epoll_ctl, if the value of epfd equals to fd, EINVAL should be returned. +fn test_epoll_ctl_epfd_equal_fd() { + // Create an epoll instance. + let epfd = unsafe { libc::epoll_create1(0) }; + assert_ne!(epfd, -1); + + let array_ptr = std::ptr::without_provenance_mut::(0x100); + let res = unsafe { libc::epoll_ctl(epfd, libc::EPOLL_CTL_ADD, epfd, array_ptr) }; + let e = std::io::Error::last_os_error(); + assert_eq!(e.raw_os_error(), Some(libc::EINVAL)); + assert_eq!(res, -1); +} From 23f308b5db6ad3f0c9fe8ad451090be17e66d9f6 Mon Sep 17 00:00:00 2001 From: tiif Date: Tue, 20 Aug 2024 16:48:05 +0800 Subject: [PATCH 060/124] Handle edge case for epoll_ctl --- src/tools/miri/src/shims/unix/linux/epoll.rs | 58 +++++++++++-------- .../libc/libc_epoll_unsupported_fd.rs | 18 ++++++ .../libc/libc_epoll_unsupported_fd.stderr | 14 +++++ .../miri/tests/pass-dep/libc/libc-epoll.rs | 39 +++++++++++++ 4 files changed, 105 insertions(+), 24 deletions(-) create mode 100644 src/tools/miri/tests/fail-dep/libc/libc_epoll_unsupported_fd.rs create mode 100644 src/tools/miri/tests/fail-dep/libc/libc_epoll_unsupported_fd.stderr diff --git a/src/tools/miri/src/shims/unix/linux/epoll.rs b/src/tools/miri/src/shims/unix/linux/epoll.rs index ea430fec59340..53f8b06ca6ad1 100644 --- a/src/tools/miri/src/shims/unix/linux/epoll.rs +++ b/src/tools/miri/src/shims/unix/linux/epoll.rs @@ -269,10 +269,10 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let mut interest_list = epoll_file_description.interest_list.borrow_mut(); let ready_list = &epoll_file_description.ready_list; - let Some(file_descriptor) = this.machine.fds.get(fd) else { + let Some(fd_ref) = this.machine.fds.get(fd) else { return Ok(Scalar::from_i32(this.fd_not_found()?)); }; - let id = file_descriptor.get_id(); + let id = fd_ref.get_id(); if op == epoll_ctl_add || op == epoll_ctl_mod { // Read event bitmask and data from epoll_event passed by caller. @@ -332,7 +332,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } } - let id = file_descriptor.get_id(); // Create an epoll_interest. let interest = Rc::new(RefCell::new(EpollEventInterest { file_descriptor: fd, @@ -344,7 +343,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { if op == epoll_ctl_add { // Insert an epoll_interest to global epoll_interest list. this.machine.epoll_interests.insert_epoll_interest(id, Rc::downgrade(&interest)); - interest_list.insert(epoll_key, interest); + interest_list.insert(epoll_key, Rc::clone(&interest)); } else { // Directly modify the epoll_interest so the global epoll_event_interest table // will be updated too. @@ -353,9 +352,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { epoll_interest.data = data; } - // Readiness will be updated immediately when the epoll_event_interest is added or modified. - this.check_and_update_readiness(&file_descriptor)?; - + // Notification will be returned for current epfd if there is event in the file + // descriptor we registered. + check_and_update_one_event_interest(&fd_ref, interest, id, this)?; return Ok(Scalar::from_i32(0)); } else if op == epoll_ctl_del { let epoll_key = (id, fd); @@ -489,25 +488,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let id = fd_ref.get_id(); // Get a list of EpollEventInterest that is associated to a specific file description. if let Some(epoll_interests) = this.machine.epoll_interests.get_epoll_interest(id) { - let epoll_ready_events = fd_ref.get_epoll_ready_events()?; - // Get the bitmask of ready events. - let ready_events = epoll_ready_events.get_event_bitmask(this); - for weak_epoll_interest in epoll_interests { if let Some(epoll_interest) = weak_epoll_interest.upgrade() { - // This checks if any of the events specified in epoll_event_interest.events - // match those in ready_events. - let epoll_event_interest = epoll_interest.borrow(); - let flags = epoll_event_interest.events & ready_events; - // If there is any event that we are interested in being specified as ready, - // insert an epoll_return to the ready list. - if flags != 0 { - let epoll_key = (id, epoll_event_interest.file_descriptor); - let ready_list = &mut epoll_event_interest.ready_list.borrow_mut(); - let event_instance = - EpollEventInstance::new(flags, epoll_event_interest.data); - ready_list.insert(epoll_key, event_instance); - } + check_and_update_one_event_interest(fd_ref, epoll_interest, id, this)?; } } } @@ -532,3 +515,30 @@ fn ready_list_next( } return None; } + +/// This helper function checks whether an epoll notification should be triggered for a specific +/// epoll_interest and, if necessary, triggers the notification. Unlike check_and_update_readiness, +/// this function sends a notification to only one epoll instance. +fn check_and_update_one_event_interest<'tcx>( + fd_ref: &FileDescriptionRef, + interest: Rc>, + id: FdId, + ecx: &MiriInterpCx<'tcx>, +) -> InterpResult<'tcx> { + // Get the bitmask of ready events for a file description. + let ready_events_bitmask = fd_ref.get_epoll_ready_events()?.get_event_bitmask(ecx); + let epoll_event_interest = interest.borrow(); + // This checks if any of the events specified in epoll_event_interest.events + // match those in ready_events. + let flags = epoll_event_interest.events & ready_events_bitmask; + // If there is any event that we are interested in being specified as ready, + // insert an epoll_return to the ready list. + if flags != 0 { + let epoll_key = (id, epoll_event_interest.file_descriptor); + let ready_list = &mut epoll_event_interest.ready_list.borrow_mut(); + let event_instance = EpollEventInstance::new(flags, epoll_event_interest.data); + // Triggers the notification by inserting it to the ready list. + ready_list.insert(epoll_key, event_instance); + } + Ok(()) +} diff --git a/src/tools/miri/tests/fail-dep/libc/libc_epoll_unsupported_fd.rs b/src/tools/miri/tests/fail-dep/libc/libc_epoll_unsupported_fd.rs new file mode 100644 index 0000000000000..fb2426f7b66d8 --- /dev/null +++ b/src/tools/miri/tests/fail-dep/libc/libc_epoll_unsupported_fd.rs @@ -0,0 +1,18 @@ +//@only-target-linux + +// This is a test for registering unsupported fd with epoll. +// Register epoll fd with epoll is allowed in real system, but we do not support this. +fn main() { + // Create two epoll instance. + let epfd0 = unsafe { libc::epoll_create1(0) }; + assert_ne!(epfd0, -1); + let epfd1 = unsafe { libc::epoll_create1(0) }; + assert_ne!(epfd1, -1); + + // Register epoll with epoll. + let mut ev = + libc::epoll_event { events: (libc::EPOLLIN | libc::EPOLLET) as _, u64: epfd1 as u64 }; + let res = unsafe { libc::epoll_ctl(epfd0, libc::EPOLL_CTL_ADD, epfd1, &mut ev) }; + //~^ERROR: epoll does not support this file description + assert_eq!(res, 0); +} diff --git a/src/tools/miri/tests/fail-dep/libc/libc_epoll_unsupported_fd.stderr b/src/tools/miri/tests/fail-dep/libc/libc_epoll_unsupported_fd.stderr new file mode 100644 index 0000000000000..6f9b988d4ee2d --- /dev/null +++ b/src/tools/miri/tests/fail-dep/libc/libc_epoll_unsupported_fd.stderr @@ -0,0 +1,14 @@ +error: unsupported operation: epoll: epoll does not support this file description + --> $DIR/libc_epoll_unsupported_fd.rs:LL:CC + | +LL | let res = unsafe { libc::epoll_ctl(epfd0, libc::EPOLL_CTL_ADD, epfd1, &mut ev) }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ epoll: epoll does not support this file description + | + = help: this is likely not a bug in the program; it indicates that the program performed an operation that Miri does not support + = note: BACKTRACE: + = note: inside `main` at $DIR/libc_epoll_unsupported_fd.rs:LL:CC + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to 1 previous error + diff --git a/src/tools/miri/tests/pass-dep/libc/libc-epoll.rs b/src/tools/miri/tests/pass-dep/libc/libc-epoll.rs index 7b8acbd120061..647b5e60649e8 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-epoll.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-epoll.rs @@ -22,6 +22,7 @@ fn main() { test_epoll_lost_events(); test_ready_list_fetching_logic(); test_epoll_ctl_epfd_equal_fd(); + test_epoll_ctl_notification(); } // Using `as` cast since `EPOLLET` wraps around @@ -644,3 +645,41 @@ fn test_epoll_ctl_epfd_equal_fd() { assert_eq!(e.raw_os_error(), Some(libc::EINVAL)); assert_eq!(res, -1); } + +// We previously used check_and_update_readiness the moment a file description is registered in an +// epoll instance. But this has an unfortunate side effect of returning notification to another +// epfd that shouldn't receive notification. +fn test_epoll_ctl_notification() { + // Create an epoll instance. + let epfd0 = unsafe { libc::epoll_create1(0) }; + assert_ne!(epfd0, -1); + + // Create a socketpair instance. + let mut fds = [-1, -1]; + let res = unsafe { libc::socketpair(libc::AF_UNIX, libc::SOCK_STREAM, 0, fds.as_mut_ptr()) }; + assert_eq!(res, 0); + + // Register one side of the socketpair with epoll. + let mut ev = libc::epoll_event { events: EPOLL_IN_OUT_ET, u64: fds[0] as u64 }; + let res = unsafe { libc::epoll_ctl(epfd0, libc::EPOLL_CTL_ADD, fds[0], &mut ev) }; + assert_eq!(res, 0); + + // epoll_wait to clear notification for epfd0. + let expected_event = u32::try_from(libc::EPOLLOUT).unwrap(); + let expected_value = fds[0] as u64; + check_epoll_wait::<1>(epfd0, &[(expected_event, expected_value)]); + + // Create another epoll instance. + let epfd1 = unsafe { libc::epoll_create1(0) }; + assert_ne!(epfd1, -1); + + // Register the same file description for epfd1. + let mut ev = libc::epoll_event { events: EPOLL_IN_OUT_ET, u64: fds[0] as u64 }; + let res = unsafe { libc::epoll_ctl(epfd1, libc::EPOLL_CTL_ADD, fds[0], &mut ev) }; + assert_eq!(res, 0); + check_epoll_wait::<1>(epfd1, &[(expected_event, expected_value)]); + + // Previously this epoll_wait will receive a notification, but we shouldn't return notification + // for this epfd, because there is no I/O event between the two epoll_wait. + check_epoll_wait::<1>(epfd0, &[]); +} From f71cdbbc467ee0f13703dabe025fdd1f49f287f2 Mon Sep 17 00:00:00 2001 From: tiif Date: Mon, 19 Aug 2024 15:13:12 +0800 Subject: [PATCH 061/124] Support blocking for epoll --- src/tools/miri/src/concurrency/thread.rs | 2 + src/tools/miri/src/shims/unix/fd.rs | 8 + src/tools/miri/src/shims/unix/linux/epoll.rs | 151 +++++++++++++++--- .../src/shims/unix/linux/foreign_items.rs | 3 +- .../miri/tests/fail-dep/tokio/sleep.stderr | 15 -- .../pass-dep/libc/libc-epoll-blocking.rs | 98 ++++++++++++ ...ibc-epoll.rs => libc-epoll-no-blocking.rs} | 0 .../{fail-dep => pass-dep}/tokio/sleep.rs | 2 - 8 files changed, 239 insertions(+), 40 deletions(-) delete mode 100644 src/tools/miri/tests/fail-dep/tokio/sleep.stderr create mode 100644 src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs rename src/tools/miri/tests/pass-dep/libc/{libc-epoll.rs => libc-epoll-no-blocking.rs} (100%) rename src/tools/miri/tests/{fail-dep => pass-dep}/tokio/sleep.rs (80%) diff --git a/src/tools/miri/src/concurrency/thread.rs b/src/tools/miri/src/concurrency/thread.rs index f72591f0c4bdf..1b119ae71924e 100644 --- a/src/tools/miri/src/concurrency/thread.rs +++ b/src/tools/miri/src/concurrency/thread.rs @@ -172,6 +172,8 @@ pub enum BlockReason { Futex { addr: u64 }, /// Blocked on an InitOnce. InitOnce(InitOnceId), + /// Blocked on epoll + Epoll, } /// The state of a thread. diff --git a/src/tools/miri/src/shims/unix/fd.rs b/src/tools/miri/src/shims/unix/fd.rs index e3b9835e360a6..3ca5f6bb2dff5 100644 --- a/src/tools/miri/src/shims/unix/fd.rs +++ b/src/tools/miri/src/shims/unix/fd.rs @@ -278,6 +278,14 @@ impl WeakFileDescriptionRef { } } +impl VisitProvenance for WeakFileDescriptionRef { + fn visit_provenance(&self, _visit: &mut VisitWith<'_>) { + // A weak reference can never be the only reference to some pointer or place. + // Since the actual file description is tracked by strong ref somewhere, + // it is ok to make this a NOP operation. + } +} + /// A unique id for file descriptions. While we could use the address, considering that /// is definitely unique, the address would expose interpreter internal state when used /// for sorting things. So instead we generate a unique id per file description that stays diff --git a/src/tools/miri/src/shims/unix/linux/epoll.rs b/src/tools/miri/src/shims/unix/linux/epoll.rs index 53f8b06ca6ad1..a0baa781dea3a 100644 --- a/src/tools/miri/src/shims/unix/linux/epoll.rs +++ b/src/tools/miri/src/shims/unix/linux/epoll.rs @@ -2,8 +2,9 @@ use std::cell::RefCell; use std::collections::BTreeMap; use std::io; use std::rc::{Rc, Weak}; +use std::time::Duration; -use crate::shims::unix::fd::{FdId, FileDescriptionRef}; +use crate::shims::unix::fd::{FdId, FileDescriptionRef, WeakFileDescriptionRef}; use crate::shims::unix::*; use crate::*; @@ -19,6 +20,8 @@ struct Epoll { // This is an Rc because EpollInterest need to hold a reference to update // it. ready_list: Rc>>, + /// A list of thread ids blocked on this epoll instance. + thread_id: RefCell>, } /// EpollEventInstance contains information that will be returned by epoll_wait. @@ -58,6 +61,8 @@ pub struct EpollEventInterest { data: u64, /// Ready list of the epoll instance under which this EpollEventInterest is registered. ready_list: Rc>>, + /// The file descriptor value that this EpollEventInterest is registered under. + epfd: i32, } /// EpollReadyEvents reflects the readiness of a file description. @@ -338,6 +343,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { events, data, ready_list: Rc::clone(ready_list), + epfd: epfd_value, })); if op == epoll_ctl_add { @@ -395,7 +401,10 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { /// The `timeout` argument specifies the number of milliseconds that /// `epoll_wait()` will block. Time is measured against the - /// CLOCK_MONOTONIC clock. + /// CLOCK_MONOTONIC clock. If the timeout is zero, the function will not block, + /// while if the timeout is -1, the function will block + /// until at least one event has been retrieved (or an error + /// occurred). /// A call to `epoll_wait()` will block until either: /// • a file descriptor delivers an event; @@ -421,35 +430,107 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { events_op: &OpTy<'tcx>, maxevents: &OpTy<'tcx>, timeout: &OpTy<'tcx>, - ) -> InterpResult<'tcx, Scalar> { + dest: MPlaceTy<'tcx>, + ) -> InterpResult<'tcx> { let this = self.eval_context_mut(); - let epfd = this.read_scalar(epfd)?.to_i32()?; + let epfd_value = this.read_scalar(epfd)?.to_i32()?; let events = this.read_immediate(events_op)?; let maxevents = this.read_scalar(maxevents)?.to_i32()?; let timeout = this.read_scalar(timeout)?.to_i32()?; - if epfd <= 0 || maxevents <= 0 { + if epfd_value <= 0 || maxevents <= 0 { let einval = this.eval_libc("EINVAL"); this.set_last_error(einval)?; - return Ok(Scalar::from_i32(-1)); + this.write_int(-1, &dest)?; + return Ok(()); } // This needs to come after the maxevents value check, or else maxevents.try_into().unwrap() // will fail. - let events = this.deref_pointer_as( + let event = this.deref_pointer_as( &events, this.libc_array_ty_layout("epoll_event", maxevents.try_into().unwrap()), )?; - // FIXME: Implement blocking support - if timeout != 0 { - throw_unsup_format!("epoll_wait: timeout value can only be 0"); + let Some(epfd) = this.machine.fds.get(epfd_value) else { + let result_value: i32 = this.fd_not_found()?; + this.write_int(result_value, &dest)?; + return Ok(()); + }; + // Create a weak ref of epfd and pass it to callback so we will make sure that epfd + // is not close after the thread unblocks. + let weak_epfd = epfd.downgrade(); + + // We just need to know if the ready list is empty and borrow the thread_ids out. + // The whole logic is wrapped inside a block so we don't need to manually drop epfd later. + let ready_list_empty; + let mut thread_ids; + { + let epoll_file_description = epfd + .downcast::() + .ok_or_else(|| err_unsup_format!("non-epoll FD passed to `epoll_wait`"))?; + let binding = epoll_file_description.get_ready_list(); + ready_list_empty = binding.borrow_mut().is_empty(); + thread_ids = epoll_file_description.thread_id.borrow_mut(); + } + if timeout == 0 || !ready_list_empty { + // If the ready list is not empty, or the timeout is 0, we can return immediately. + this.blocking_epoll_callback(epfd_value, weak_epfd, &dest, &event)?; + } else { + // Blocking + let timeout = match timeout { + 0.. => { + let duration = Duration::from_millis(timeout.try_into().unwrap()); + Some((TimeoutClock::Monotonic, TimeoutAnchor::Relative, duration)) + } + -1 => None, + ..-1 => { + throw_unsup_format!( + "epoll_wait: Only timeout values greater than -1 are supported." + ); + } + }; + thread_ids.push(this.active_thread()); + this.block_thread( + BlockReason::Epoll, + timeout, + callback!( + @capture<'tcx> { + epfd_value: i32, + weak_epfd: WeakFileDescriptionRef, + dest: MPlaceTy<'tcx>, + event: MPlaceTy<'tcx>, + } + @unblock = |this| { + this.blocking_epoll_callback(epfd_value, weak_epfd, &dest, &event)?; + Ok(()) + } + @timeout = |this| { + // No notification after blocking timeout. + this.write_int(0, &dest)?; + Ok(()) + } + ), + ); } + Ok(()) + } - let Some(epfd) = this.machine.fds.get(epfd) else { - return Ok(Scalar::from_i32(this.fd_not_found()?)); + /// Callback function after epoll_wait unblocks + fn blocking_epoll_callback( + &mut self, + epfd_value: i32, + weak_epfd: WeakFileDescriptionRef, + dest: &MPlaceTy<'tcx>, + event: &MPlaceTy<'tcx>, + ) -> InterpResult<'tcx> { + let this = self.eval_context_mut(); + + let Some(epfd) = weak_epfd.upgrade() else { + throw_unsup_format!("epoll FD {epfd_value} is closed while blocking.") }; + let epoll_file_description = epfd .downcast::() .ok_or_else(|| err_unsup_format!("non-epoll FD passed to `epoll_wait`"))?; @@ -457,7 +538,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let ready_list = epoll_file_description.get_ready_list(); let mut ready_list = ready_list.borrow_mut(); let mut num_of_events: i32 = 0; - let mut array_iter = this.project_array_fields(&events)?; + let mut array_iter = this.project_array_fields(event)?; while let Some(des) = array_iter.next(this)? { if let Some(epoll_event_instance) = ready_list_next(this, &mut ready_list) { @@ -473,7 +554,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { break; } } - Ok(Scalar::from_i32(num_of_events)) + this.write_int(num_of_events, dest)?; + Ok(()) } /// For a specific file description, get its ready events and update the corresponding ready @@ -483,17 +565,42 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { /// /// This *will* report an event if anyone is subscribed to it, without any further filtering, so /// do not call this function when an FD didn't have anything happen to it! - fn check_and_update_readiness(&self, fd_ref: &FileDescriptionRef) -> InterpResult<'tcx, ()> { - let this = self.eval_context_ref(); + fn check_and_update_readiness( + &mut self, + fd_ref: &FileDescriptionRef, + ) -> InterpResult<'tcx, ()> { + let this = self.eval_context_mut(); let id = fd_ref.get_id(); + let mut waiter = Vec::new(); // Get a list of EpollEventInterest that is associated to a specific file description. if let Some(epoll_interests) = this.machine.epoll_interests.get_epoll_interest(id) { for weak_epoll_interest in epoll_interests { if let Some(epoll_interest) = weak_epoll_interest.upgrade() { - check_and_update_one_event_interest(fd_ref, epoll_interest, id, this)?; + let is_updated = check_and_update_one_event_interest(fd_ref, epoll_interest, id, this)?; + if is_updated { + // Edge-triggered notification only notify one thread even if there are + // multiple threads block on the same epfd. + let epfd = this.machine.fds.get(epoll_event_interest.epfd).unwrap(); + // FIXME: We can randomly pick a thread to unblock. + + // This unwrap can never fail because if the current epoll instance were + // closed and its epfd value reused, the upgrade of weak_epoll_interest + // above would fail. This guarantee holds because only the epoll instance + // holds a strong ref to epoll_interest. + if let Some(thread_id) = + epfd.downcast::().unwrap().thread_id.borrow_mut().pop() + { + waiter.push(thread_id); + }; + } } } } + waiter.sort(); + waiter.dedup(); + for thread_id in waiter { + this.unblock_thread(thread_id, BlockReason::Epoll)?; + } Ok(()) } } @@ -517,14 +624,15 @@ fn ready_list_next( } /// This helper function checks whether an epoll notification should be triggered for a specific -/// epoll_interest and, if necessary, triggers the notification. Unlike check_and_update_readiness, -/// this function sends a notification to only one epoll instance. +/// epoll_interest and, if necessary, triggers the notification, and returns whether the +/// event interest was updated. Unlike check_and_update_readiness, this function sends a +/// notification to only one epoll instance. fn check_and_update_one_event_interest<'tcx>( fd_ref: &FileDescriptionRef, interest: Rc>, id: FdId, ecx: &MiriInterpCx<'tcx>, -) -> InterpResult<'tcx> { +) -> InterpResult<'tcx, bool> { // Get the bitmask of ready events for a file description. let ready_events_bitmask = fd_ref.get_epoll_ready_events()?.get_event_bitmask(ecx); let epoll_event_interest = interest.borrow(); @@ -539,6 +647,7 @@ fn check_and_update_one_event_interest<'tcx>( let event_instance = EpollEventInstance::new(flags, epoll_event_interest.data); // Triggers the notification by inserting it to the ready list. ready_list.insert(epoll_key, event_instance); + return Ok(true); } - Ok(()) + return Ok(false); } diff --git a/src/tools/miri/src/shims/unix/linux/foreign_items.rs b/src/tools/miri/src/shims/unix/linux/foreign_items.rs index 581f0db42e1dc..d21f0e8f3e662 100644 --- a/src/tools/miri/src/shims/unix/linux/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/linux/foreign_items.rs @@ -62,8 +62,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "epoll_wait" => { let [epfd, events, maxevents, timeout] = this.check_shim(abi, Abi::C { unwind: false }, link_name, args)?; - let result = this.epoll_wait(epfd, events, maxevents, timeout)?; - this.write_scalar(result, dest)?; + this.epoll_wait(epfd, events, maxevents, timeout, dest.clone())?; } "eventfd" => { let [val, flag] = diff --git a/src/tools/miri/tests/fail-dep/tokio/sleep.stderr b/src/tools/miri/tests/fail-dep/tokio/sleep.stderr deleted file mode 100644 index d5bf00fc17503..0000000000000 --- a/src/tools/miri/tests/fail-dep/tokio/sleep.stderr +++ /dev/null @@ -1,15 +0,0 @@ -error: unsupported operation: epoll_wait: timeout value can only be 0 - --> CARGO_REGISTRY/.../epoll.rs:LL:CC - | -LL | / syscall!(epoll_wait( -LL | | self.ep.as_raw_fd(), -LL | | events.as_mut_ptr(), -LL | | events.capacity() as i32, -LL | | timeout, -LL | | )) - | |__________^ epoll_wait: timeout value can only be 0 - | - = help: this is likely not a bug in the program; it indicates that the program performed an operation that Miri does not support - -error: aborting due to 1 previous error - diff --git a/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs b/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs new file mode 100644 index 0000000000000..e1b4d3d85be66 --- /dev/null +++ b/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs @@ -0,0 +1,98 @@ +//@only-target-linux + +use std::convert::TryInto; +use std::thread; +use std::thread::spawn; + +// This is a set of testcases for blocking epoll. + +fn main() { + test_epoll_block_without_notification(); + test_epoll_block_then_unblock(); +} + +// Using `as` cast since `EPOLLET` wraps around +const EPOLL_IN_OUT_ET: u32 = (libc::EPOLLIN | libc::EPOLLOUT | libc::EPOLLET) as _; + +#[track_caller] +fn check_epoll_wait( + epfd: i32, + expected_notifications: &[(u32, u64)], + timeout: i32, +) { + let epoll_event = libc::epoll_event { events: 0, u64: 0 }; + let mut array: [libc::epoll_event; N] = [epoll_event; N]; + let maxsize = N; + let array_ptr = array.as_mut_ptr(); + let res = unsafe { libc::epoll_wait(epfd, array_ptr, maxsize.try_into().unwrap(), timeout) }; + if res < 0 { + panic!("epoll_wait failed: {}", std::io::Error::last_os_error()); + } + assert_eq!( + res, + expected_notifications.len().try_into().unwrap(), + "got wrong number of notifications" + ); + let slice = unsafe { std::slice::from_raw_parts(array_ptr, res.try_into().unwrap()) }; + for (return_event, expected_event) in slice.iter().zip(expected_notifications.iter()) { + let event = return_event.events; + let data = return_event.u64; + assert_eq!(event, expected_event.0, "got wrong events"); + assert_eq!(data, expected_event.1, "got wrong data"); + } +} +fn test_epoll_block_without_notification() { + // Create an epoll instance. + let epfd = unsafe { libc::epoll_create1(0) }; + assert_ne!(epfd, -1); + + // Create an eventfd instances. + let flags = libc::EFD_NONBLOCK | libc::EFD_CLOEXEC; + let fd = unsafe { libc::eventfd(0, flags) }; + + // Register eventfd with epoll. + let mut ev = libc::epoll_event { events: EPOLL_IN_OUT_ET, u64: fd as u64 }; + let res = unsafe { libc::epoll_ctl(epfd, libc::EPOLL_CTL_ADD, fd, &mut ev) }; + assert_eq!(res, 0); + + // epoll_wait to clear notification. + let expected_event = u32::try_from(libc::EPOLLOUT).unwrap(); + let expected_value = fd as u64; + check_epoll_wait::<1>(epfd, &[(expected_event, expected_value)], 0); + + // epoll_wait before triggering notification so it will block then unblock. + check_epoll_wait::<1>(epfd, &[], 5); +} + +fn test_epoll_block_then_unblock() { + // Create an epoll instance. + let epfd = unsafe { libc::epoll_create1(0) }; + assert_ne!(epfd, -1); + + // Create a socketpair instance. + let mut fds = [-1, -1]; + let res = unsafe { libc::socketpair(libc::AF_UNIX, libc::SOCK_STREAM, 0, fds.as_mut_ptr()) }; + assert_eq!(res, 0); + + // Register one side of the socketpair with epoll. + let mut ev = libc::epoll_event { events: EPOLL_IN_OUT_ET, u64: fds[0] as u64 }; + let res = unsafe { libc::epoll_ctl(epfd, libc::EPOLL_CTL_ADD, fds[0], &mut ev) }; + assert_eq!(res, 0); + + // epoll_wait to clear notification. + let expected_event = u32::try_from(libc::EPOLLOUT).unwrap(); + let expected_value = fds[0] as u64; + check_epoll_wait::<1>(epfd, &[(expected_event, expected_value)], 0); + + // epoll_wait before triggering notification so it will block then get unblocked before timeout. + let expected_event = u32::try_from(libc::EPOLLIN | libc::EPOLLOUT).unwrap(); + let expected_value = fds[0] as u64; + let thread1 = spawn(move || { + thread::yield_now(); + let data = "abcde".as_bytes().as_ptr(); + let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + assert_eq!(res, 5); + }); + check_epoll_wait::<1>(epfd, &[(expected_event, expected_value)], 10); + thread1.join().unwrap(); +} diff --git a/src/tools/miri/tests/pass-dep/libc/libc-epoll.rs b/src/tools/miri/tests/pass-dep/libc/libc-epoll-no-blocking.rs similarity index 100% rename from src/tools/miri/tests/pass-dep/libc/libc-epoll.rs rename to src/tools/miri/tests/pass-dep/libc/libc-epoll-no-blocking.rs diff --git a/src/tools/miri/tests/fail-dep/tokio/sleep.rs b/src/tools/miri/tests/pass-dep/tokio/sleep.rs similarity index 80% rename from src/tools/miri/tests/fail-dep/tokio/sleep.rs rename to src/tools/miri/tests/pass-dep/tokio/sleep.rs index 0fa5080d48460..00cc68eba3eac 100644 --- a/src/tools/miri/tests/fail-dep/tokio/sleep.rs +++ b/src/tools/miri/tests/pass-dep/tokio/sleep.rs @@ -1,7 +1,5 @@ //@compile-flags: -Zmiri-permissive-provenance -Zmiri-backtrace=full //@only-target-x86_64-unknown-linux: support for tokio only on linux and x86 -//@error-in-other-file: timeout value can only be 0 -//@normalize-stderr-test: " += note:.*\n" -> "" use tokio::time::{sleep, Duration, Instant}; From e8175a42f78c2036845e8252061623eed9171820 Mon Sep 17 00:00:00 2001 From: byt <55319043+tiif@users.noreply.github.com> Date: Sat, 24 Aug 2024 23:16:05 +0800 Subject: [PATCH 062/124] Improve comment Co-authored-by: Ralf Jung --- src/tools/miri/src/shims/unix/linux/epoll.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/miri/src/shims/unix/linux/epoll.rs b/src/tools/miri/src/shims/unix/linux/epoll.rs index a0baa781dea3a..471ff5af11b75 100644 --- a/src/tools/miri/src/shims/unix/linux/epoll.rs +++ b/src/tools/miri/src/shims/unix/linux/epoll.rs @@ -528,7 +528,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let this = self.eval_context_mut(); let Some(epfd) = weak_epfd.upgrade() else { - throw_unsup_format!("epoll FD {epfd_value} is closed while blocking.") + throw_unsup_format!("epoll FD {epfd_value} got closed while blocking.") }; let epoll_file_description = epfd From 41ab4ecc0338958ea28463e8116b348c08cf62be Mon Sep 17 00:00:00 2001 From: tiif Date: Sat, 24 Aug 2024 23:28:23 +0800 Subject: [PATCH 063/124] Pass dest place reference to epoll_wait --- src/tools/miri/src/shims/unix/linux/epoll.rs | 9 +++++---- src/tools/miri/src/shims/unix/linux/foreign_items.rs | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/tools/miri/src/shims/unix/linux/epoll.rs b/src/tools/miri/src/shims/unix/linux/epoll.rs index 471ff5af11b75..fb2eba75b878b 100644 --- a/src/tools/miri/src/shims/unix/linux/epoll.rs +++ b/src/tools/miri/src/shims/unix/linux/epoll.rs @@ -430,7 +430,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { events_op: &OpTy<'tcx>, maxevents: &OpTy<'tcx>, timeout: &OpTy<'tcx>, - dest: MPlaceTy<'tcx>, + dest: &MPlaceTy<'tcx>, ) -> InterpResult<'tcx> { let this = self.eval_context_mut(); @@ -442,7 +442,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { if epfd_value <= 0 || maxevents <= 0 { let einval = this.eval_libc("EINVAL"); this.set_last_error(einval)?; - this.write_int(-1, &dest)?; + this.write_int(-1, dest)?; return Ok(()); } @@ -455,7 +455,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let Some(epfd) = this.machine.fds.get(epfd_value) else { let result_value: i32 = this.fd_not_found()?; - this.write_int(result_value, &dest)?; + this.write_int(result_value, dest)?; return Ok(()); }; // Create a weak ref of epfd and pass it to callback so we will make sure that epfd @@ -476,7 +476,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } if timeout == 0 || !ready_list_empty { // If the ready list is not empty, or the timeout is 0, we can return immediately. - this.blocking_epoll_callback(epfd_value, weak_epfd, &dest, &event)?; + this.blocking_epoll_callback(epfd_value, weak_epfd, dest, &event)?; } else { // Blocking let timeout = match timeout { @@ -492,6 +492,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } }; thread_ids.push(this.active_thread()); + let dest = dest.clone(); this.block_thread( BlockReason::Epoll, timeout, diff --git a/src/tools/miri/src/shims/unix/linux/foreign_items.rs b/src/tools/miri/src/shims/unix/linux/foreign_items.rs index d21f0e8f3e662..d64f13f63d91f 100644 --- a/src/tools/miri/src/shims/unix/linux/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/linux/foreign_items.rs @@ -62,7 +62,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "epoll_wait" => { let [epfd, events, maxevents, timeout] = this.check_shim(abi, Abi::C { unwind: false }, link_name, args)?; - this.epoll_wait(epfd, events, maxevents, timeout, dest.clone())?; + this.epoll_wait(epfd, events, maxevents, timeout, dest)?; } "eventfd" => { let [val, flag] = From 36235b9e0d23b911d2ebcf2b87c93cd78e4b0920 Mon Sep 17 00:00:00 2001 From: tiif Date: Sat, 24 Aug 2024 23:30:57 +0800 Subject: [PATCH 064/124] Rename event to events --- src/tools/miri/src/shims/unix/linux/epoll.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/tools/miri/src/shims/unix/linux/epoll.rs b/src/tools/miri/src/shims/unix/linux/epoll.rs index fb2eba75b878b..4efdf9518658a 100644 --- a/src/tools/miri/src/shims/unix/linux/epoll.rs +++ b/src/tools/miri/src/shims/unix/linux/epoll.rs @@ -524,7 +524,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { epfd_value: i32, weak_epfd: WeakFileDescriptionRef, dest: &MPlaceTy<'tcx>, - event: &MPlaceTy<'tcx>, + events: &MPlaceTy<'tcx>, ) -> InterpResult<'tcx> { let this = self.eval_context_mut(); @@ -539,7 +539,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let ready_list = epoll_file_description.get_ready_list(); let mut ready_list = ready_list.borrow_mut(); let mut num_of_events: i32 = 0; - let mut array_iter = this.project_array_fields(event)?; + let mut array_iter = this.project_array_fields(events)?; while let Some(des) = array_iter.next(this)? { if let Some(epoll_event_instance) = ready_list_next(this, &mut ready_list) { From cd67e47872d0f23d5388fe1ec9717e3b442ec1ee Mon Sep 17 00:00:00 2001 From: tiif Date: Sat, 24 Aug 2024 23:36:18 +0800 Subject: [PATCH 065/124] Fix error in the timeout value error message --- src/tools/miri/src/shims/unix/linux/epoll.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/miri/src/shims/unix/linux/epoll.rs b/src/tools/miri/src/shims/unix/linux/epoll.rs index 4efdf9518658a..ade1ca73378de 100644 --- a/src/tools/miri/src/shims/unix/linux/epoll.rs +++ b/src/tools/miri/src/shims/unix/linux/epoll.rs @@ -487,7 +487,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { -1 => None, ..-1 => { throw_unsup_format!( - "epoll_wait: Only timeout values greater than -1 are supported." + "epoll_wait: Only timeout values greater than or equal to -1 are supported." ); } }; From 8f178e48125cbadc66f965f55c6521c2262b6bf0 Mon Sep 17 00:00:00 2001 From: tiif Date: Sat, 24 Aug 2024 23:45:46 +0800 Subject: [PATCH 066/124] Change timeout value --- src/tools/miri/tests/pass-dep/tokio/sleep.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/tools/miri/tests/pass-dep/tokio/sleep.rs b/src/tools/miri/tests/pass-dep/tokio/sleep.rs index 00cc68eba3eac..e6b02c02d0308 100644 --- a/src/tools/miri/tests/pass-dep/tokio/sleep.rs +++ b/src/tools/miri/tests/pass-dep/tokio/sleep.rs @@ -6,7 +6,7 @@ use tokio::time::{sleep, Duration, Instant}; #[tokio::main] async fn main() { let start = Instant::now(); - sleep(Duration::from_secs(1)).await; + sleep(Duration::from_millis(100)).await; let time_elapsed = &start.elapsed().as_millis(); - assert!((1000..1100).contains(time_elapsed), "{}", time_elapsed); + assert!((100..1000).contains(time_elapsed), "{}", time_elapsed); } From 7aff7110136e51674f31adff0c39f0afc622f8be Mon Sep 17 00:00:00 2001 From: tiif Date: Sun, 25 Aug 2024 01:05:20 +0800 Subject: [PATCH 067/124] Check if the thread is blocked before waking them up --- src/tools/miri/src/concurrency/thread.rs | 10 ++++++++++ src/tools/miri/src/shims/unix/linux/epoll.rs | 4 +++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/src/tools/miri/src/concurrency/thread.rs b/src/tools/miri/src/concurrency/thread.rs index 1b119ae71924e..246e480a6f8da 100644 --- a/src/tools/miri/src/concurrency/thread.rs +++ b/src/tools/miri/src/concurrency/thread.rs @@ -578,6 +578,10 @@ impl<'tcx> ThreadManager<'tcx> { self.threads[thread_id].state.is_terminated() } + fn has_blocked_on_epoll(&self, thread_id: ThreadId) -> bool { + self.threads[thread_id].state.is_blocked_on(BlockReason::Epoll) + } + /// Have all threads terminated? fn have_all_terminated(&self) -> bool { self.threads.iter().all(|thread| thread.state.is_terminated()) @@ -1137,6 +1141,12 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.machine.threads.enable_thread(thread_id); } + #[inline] + fn has_blocked_on_epoll(&self, thread_id: ThreadId) -> bool { + let this = self.eval_context_ref(); + this.machine.threads.has_blocked_on_epoll(thread_id) + } + #[inline] fn active_thread_stack<'a>(&'a self) -> &'a [Frame<'tcx, Provenance, FrameExtra<'tcx>>] { let this = self.eval_context_ref(); diff --git a/src/tools/miri/src/shims/unix/linux/epoll.rs b/src/tools/miri/src/shims/unix/linux/epoll.rs index ade1ca73378de..57731d746a9be 100644 --- a/src/tools/miri/src/shims/unix/linux/epoll.rs +++ b/src/tools/miri/src/shims/unix/linux/epoll.rs @@ -600,7 +600,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { waiter.sort(); waiter.dedup(); for thread_id in waiter { - this.unblock_thread(thread_id, BlockReason::Epoll)?; + if this.has_blocked_on_epoll(thread_id) { + this.unblock_thread(thread_id, BlockReason::Epoll)?; + } } Ok(()) } From a4d7564219460e952f50120b19afe0b1a72b8d58 Mon Sep 17 00:00:00 2001 From: tiif Date: Sun, 25 Aug 2024 01:20:01 +0800 Subject: [PATCH 068/124] Fix error introduced by rebase --- src/tools/miri/src/shims/unix/linux/epoll.rs | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/tools/miri/src/shims/unix/linux/epoll.rs b/src/tools/miri/src/shims/unix/linux/epoll.rs index 57731d746a9be..53d89b3917fb4 100644 --- a/src/tools/miri/src/shims/unix/linux/epoll.rs +++ b/src/tools/miri/src/shims/unix/linux/epoll.rs @@ -577,17 +577,22 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { if let Some(epoll_interests) = this.machine.epoll_interests.get_epoll_interest(id) { for weak_epoll_interest in epoll_interests { if let Some(epoll_interest) = weak_epoll_interest.upgrade() { - let is_updated = check_and_update_one_event_interest(fd_ref, epoll_interest, id, this)?; + let is_updated = check_and_update_one_event_interest( + fd_ref, + epoll_interest.clone(), + id, + this, + )?; if is_updated { // Edge-triggered notification only notify one thread even if there are // multiple threads block on the same epfd. - let epfd = this.machine.fds.get(epoll_event_interest.epfd).unwrap(); - // FIXME: We can randomly pick a thread to unblock. + let epfd = this.machine.fds.get(epoll_interest.borrow().epfd).unwrap(); // This unwrap can never fail because if the current epoll instance were // closed and its epfd value reused, the upgrade of weak_epoll_interest // above would fail. This guarantee holds because only the epoll instance // holds a strong ref to epoll_interest. + // FIXME: We can randomly pick a thread to unblock. if let Some(thread_id) = epfd.downcast::().unwrap().thread_id.borrow_mut().pop() { @@ -628,7 +633,7 @@ fn ready_list_next( /// This helper function checks whether an epoll notification should be triggered for a specific /// epoll_interest and, if necessary, triggers the notification, and returns whether the -/// event interest was updated. Unlike check_and_update_readiness, this function sends a +/// notification was added/updated. Unlike check_and_update_readiness, this function sends a /// notification to only one epoll instance. fn check_and_update_one_event_interest<'tcx>( fd_ref: &FileDescriptionRef, From 7e8ca571d0b6b1c551b7fe8af499ae7a7d787a6e Mon Sep 17 00:00:00 2001 From: tiif Date: Sun, 25 Aug 2024 02:05:21 +0800 Subject: [PATCH 069/124] Remove thread_id from epoll::thread_ids after timeout --- src/tools/miri/src/concurrency/thread.rs | 10 ---------- src/tools/miri/src/shims/unix/linux/epoll.rs | 12 +++++++++--- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/src/tools/miri/src/concurrency/thread.rs b/src/tools/miri/src/concurrency/thread.rs index 246e480a6f8da..1b119ae71924e 100644 --- a/src/tools/miri/src/concurrency/thread.rs +++ b/src/tools/miri/src/concurrency/thread.rs @@ -578,10 +578,6 @@ impl<'tcx> ThreadManager<'tcx> { self.threads[thread_id].state.is_terminated() } - fn has_blocked_on_epoll(&self, thread_id: ThreadId) -> bool { - self.threads[thread_id].state.is_blocked_on(BlockReason::Epoll) - } - /// Have all threads terminated? fn have_all_terminated(&self) -> bool { self.threads.iter().all(|thread| thread.state.is_terminated()) @@ -1141,12 +1137,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.machine.threads.enable_thread(thread_id); } - #[inline] - fn has_blocked_on_epoll(&self, thread_id: ThreadId) -> bool { - let this = self.eval_context_ref(); - this.machine.threads.has_blocked_on_epoll(thread_id) - } - #[inline] fn active_thread_stack<'a>(&'a self) -> &'a [Frame<'tcx, Provenance, FrameExtra<'tcx>>] { let this = self.eval_context_ref(); diff --git a/src/tools/miri/src/shims/unix/linux/epoll.rs b/src/tools/miri/src/shims/unix/linux/epoll.rs index 53d89b3917fb4..7228665e4b3de 100644 --- a/src/tools/miri/src/shims/unix/linux/epoll.rs +++ b/src/tools/miri/src/shims/unix/linux/epoll.rs @@ -509,6 +509,14 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } @timeout = |this| { // No notification after blocking timeout. + let Some(epfd) = weak_epfd.upgrade() else { + throw_unsup_format!("epoll FD {epfd_value} got closed while blocking.") + }; + // Remove the current active thread_id from the blocked thread_id list. + epfd.downcast::() + .ok_or_else(|| err_unsup_format!("non-epoll FD passed to `epoll_wait`"))? + .thread_id.borrow_mut() + .retain(|&id| id != this.active_thread()); this.write_int(0, &dest)?; Ok(()) } @@ -605,9 +613,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { waiter.sort(); waiter.dedup(); for thread_id in waiter { - if this.has_blocked_on_epoll(thread_id) { - this.unblock_thread(thread_id, BlockReason::Epoll)?; - } + this.unblock_thread(thread_id, BlockReason::Epoll)?; } Ok(()) } From 25ca8554311fc307d00de5ceb1dd81a2de79285d Mon Sep 17 00:00:00 2001 From: tiif Date: Sun, 25 Aug 2024 02:19:03 +0800 Subject: [PATCH 070/124] Make blocking_epoll_callback a private function --- src/tools/miri/src/shims/unix/linux/epoll.rs | 84 ++++++++++---------- 1 file changed, 41 insertions(+), 43 deletions(-) diff --git a/src/tools/miri/src/shims/unix/linux/epoll.rs b/src/tools/miri/src/shims/unix/linux/epoll.rs index 7228665e4b3de..ee86cf5f26d7f 100644 --- a/src/tools/miri/src/shims/unix/linux/epoll.rs +++ b/src/tools/miri/src/shims/unix/linux/epoll.rs @@ -476,7 +476,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } if timeout == 0 || !ready_list_empty { // If the ready list is not empty, or the timeout is 0, we can return immediately. - this.blocking_epoll_callback(epfd_value, weak_epfd, dest, &event)?; + blocking_epoll_callback(epfd_value, weak_epfd, dest, &event, this)?; } else { // Blocking let timeout = match timeout { @@ -504,7 +504,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { event: MPlaceTy<'tcx>, } @unblock = |this| { - this.blocking_epoll_callback(epfd_value, weak_epfd, &dest, &event)?; + blocking_epoll_callback(epfd_value, weak_epfd, &dest, &event, this)?; Ok(()) } @timeout = |this| { @@ -526,47 +526,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { Ok(()) } - /// Callback function after epoll_wait unblocks - fn blocking_epoll_callback( - &mut self, - epfd_value: i32, - weak_epfd: WeakFileDescriptionRef, - dest: &MPlaceTy<'tcx>, - events: &MPlaceTy<'tcx>, - ) -> InterpResult<'tcx> { - let this = self.eval_context_mut(); - - let Some(epfd) = weak_epfd.upgrade() else { - throw_unsup_format!("epoll FD {epfd_value} got closed while blocking.") - }; - - let epoll_file_description = epfd - .downcast::() - .ok_or_else(|| err_unsup_format!("non-epoll FD passed to `epoll_wait`"))?; - - let ready_list = epoll_file_description.get_ready_list(); - let mut ready_list = ready_list.borrow_mut(); - let mut num_of_events: i32 = 0; - let mut array_iter = this.project_array_fields(events)?; - - while let Some(des) = array_iter.next(this)? { - if let Some(epoll_event_instance) = ready_list_next(this, &mut ready_list) { - this.write_int_fields_named( - &[ - ("events", epoll_event_instance.events.into()), - ("u64", epoll_event_instance.data.into()), - ], - &des.1, - )?; - num_of_events = num_of_events.strict_add(1); - } else { - break; - } - } - this.write_int(num_of_events, dest)?; - Ok(()) - } - /// For a specific file description, get its ready events and update the corresponding ready /// list. This function should be called whenever an event causes more bytes or an EOF to become /// newly readable from an FD, and whenever more bytes can be written to an FD or no more future @@ -665,3 +624,42 @@ fn check_and_update_one_event_interest<'tcx>( } return Ok(false); } + +/// Callback function after epoll_wait unblocks +fn blocking_epoll_callback<'tcx>( + epfd_value: i32, + weak_epfd: WeakFileDescriptionRef, + dest: &MPlaceTy<'tcx>, + events: &MPlaceTy<'tcx>, + ecx: &mut MiriInterpCx<'tcx>, +) -> InterpResult<'tcx> { + let Some(epfd) = weak_epfd.upgrade() else { + throw_unsup_format!("epoll FD {epfd_value} got closed while blocking.") + }; + + let epoll_file_description = epfd + .downcast::() + .ok_or_else(|| err_unsup_format!("non-epoll FD passed to `epoll_wait`"))?; + + let ready_list = epoll_file_description.get_ready_list(); + let mut ready_list = ready_list.borrow_mut(); + let mut num_of_events: i32 = 0; + let mut array_iter = ecx.project_array_fields(events)?; + + while let Some(des) = array_iter.next(ecx)? { + if let Some(epoll_event_instance) = ready_list_next(ecx, &mut ready_list) { + ecx.write_int_fields_named( + &[ + ("events", epoll_event_instance.events.into()), + ("u64", epoll_event_instance.data.into()), + ], + &des.1, + )?; + num_of_events = num_of_events.strict_add(1); + } else { + break; + } + } + ecx.write_int(num_of_events, dest)?; + Ok(()) +} From 7d5be0629b4161dda9dde8dfdeda5e9dd50649fd Mon Sep 17 00:00:00 2001 From: tiif Date: Sun, 25 Aug 2024 03:14:42 +0800 Subject: [PATCH 071/124] Add 0 preemption rate flag for blocking test and fix comment --- src/tools/miri/src/concurrency/thread.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/miri/src/concurrency/thread.rs b/src/tools/miri/src/concurrency/thread.rs index 1b119ae71924e..af457848de668 100644 --- a/src/tools/miri/src/concurrency/thread.rs +++ b/src/tools/miri/src/concurrency/thread.rs @@ -172,7 +172,7 @@ pub enum BlockReason { Futex { addr: u64 }, /// Blocked on an InitOnce. InitOnce(InitOnceId), - /// Blocked on epoll + /// Blocked on epoll. Epoll, } From 09993ce6d5a45384adc8cab709c5517fe60b9161 Mon Sep 17 00:00:00 2001 From: tiif Date: Sun, 25 Aug 2024 03:15:52 +0800 Subject: [PATCH 072/124] Add 0 preemption rate flag --- src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs b/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs index e1b4d3d85be66..1a1d058b98365 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs @@ -1,4 +1,6 @@ //@only-target-linux +// test_epoll_block_then_unblock depends on a deterministic schedule. +//@compile-flags: -Zmiri-preemption-rate=0 use std::convert::TryInto; use std::thread; From 12faedd9b096824b5ba738ef7631da39986b3f5e Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Sat, 24 Aug 2024 22:35:49 +0300 Subject: [PATCH 073/124] Fix few bugs in closure capture computation, and add tests Also create a test infrastructure for capture computation. --- .../rust-analyzer/crates/hir-ty/src/mir.rs | 8 +- .../rust-analyzer/crates/hir-ty/src/tests.rs | 1 + .../hir-ty/src/tests/closure_captures.rs | 284 ++++++++++++++++++ src/tools/rust-analyzer/crates/hir/src/lib.rs | 1 + 4 files changed, 291 insertions(+), 3 deletions(-) create mode 100644 src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs index 9c727cb4ed197..be094b13889be 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs @@ -636,6 +636,7 @@ pub enum TerminatorKind { }, } +// Order of variants in this enum matter: they are used to compare borrow kinds. #[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)] pub enum BorrowKind { /// Data must be immutable and is aliasable. @@ -666,15 +667,16 @@ pub enum BorrowKind { Mut { kind: MutBorrowKind }, } +// Order of variants in this enum matter: they are used to compare borrow kinds. #[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)] pub enum MutBorrowKind { + /// Data must be immutable but not aliasable. This kind of borrow cannot currently + /// be expressed by the user and is used only in implicit closure bindings. + ClosureCapture, Default, /// This borrow arose from method-call auto-ref /// (i.e., adjustment::Adjust::Borrow). TwoPhasedBorrow, - /// Data must be immutable but not aliasable. This kind of borrow cannot currently - /// be expressed by the user and is used only in implicit closure bindings. - ClosureCapture, } impl BorrowKind { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs index efd8546216d4b..bcf9d5ceff0e1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs @@ -1,3 +1,4 @@ +mod closure_captures; mod coercion; mod diagnostics; mod display_source_code; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs new file mode 100644 index 0000000000000..782331a1363f5 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs @@ -0,0 +1,284 @@ +use base_db::salsa::InternKey; +use expect_test::{expect, Expect}; +use hir_def::db::DefDatabase; +use itertools::Itertools; +use test_fixture::WithFixture; + +use crate::db::{HirDatabase, InternedClosureId}; +use crate::display::HirDisplay; +use crate::test_db::TestDB; + +use super::visit_module; + +fn check_closure_captures(ra_fixture: &str, expect: Expect) { + let (db, file_id) = TestDB::with_single_file(ra_fixture); + let module = db.module_for_file(file_id); + let def_map = module.def_map(&db); + + let mut defs = Vec::new(); + visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it)); + + let mut captures_info = Vec::new(); + for def in defs { + let infer = db.infer(def); + let db = &db; + captures_info.extend(infer.closure_info.iter().flat_map(|(closure_id, (captures, _))| { + let closure = db.lookup_intern_closure(InternedClosureId::from_intern_id(closure_id.0)); + let (_, source_map) = db.body_with_source_map(closure.0); + let closure_text_range = source_map + .expr_syntax(closure.1) + .expect("failed to map closure to SyntaxNode") + .value + .text_range(); + captures.iter().flat_map(move |capture| { + // FIXME: Deduplicate this with hir::Local::sources(). + let (body, source_map) = db.body_with_source_map(closure.0); + let local_text_ranges = match body.self_param.zip(source_map.self_param_syntax()) { + Some((param, source)) if param == capture.local() => { + vec![source.file_syntax(db).text_range()] + } + _ => source_map + .patterns_for_binding(capture.local()) + .iter() + .map(|&definition| { + let src = source_map.pat_syntax(definition).unwrap(); + src.file_syntax(db).text_range() + }) + .collect(), + }; + let place = capture.display_place(closure.0, db); + let capture_ty = capture.ty.skip_binders().display_test(db).to_string(); + local_text_ranges.into_iter().map(move |local_text_range| { + ( + closure_text_range, + local_text_range, + place.clone(), + capture_ty.clone(), + capture.kind(), + ) + }) + }) + })); + } + captures_info.sort_unstable_by_key(|(closure_text_range, local_text_range, ..)| { + (closure_text_range.start(), local_text_range.start()) + }); + + let rendered = captures_info + .iter() + .map(|(closure_text_range, local_text_range, place, capture_ty, capture_kind)| { + format!( + "{closure_text_range:?};{local_text_range:?} {capture_kind:?} {place} {capture_ty}" + ) + }) + .join("\n"); + + expect.assert_eq(&rendered); +} + +#[test] +fn deref_in_let() { + check_closure_captures( + r#" +//- minicore:copy +fn main() { + let a = &mut true; + let closure = || { let b = *a; }; +} +"#, + expect!["53..71;0..75 ByRef(Shared) *a &'? bool"], + ); +} + +#[test] +fn deref_then_ref_pattern() { + check_closure_captures( + r#" +//- minicore:copy +fn main() { + let a = &mut true; + let closure = || { let &mut ref b = a; }; +} +"#, + expect!["53..79;0..83 ByRef(Shared) *a &'? bool"], + ); + check_closure_captures( + r#" +//- minicore:copy +fn main() { + let a = &mut true; + let closure = || { let &mut ref mut b = a; }; +} +"#, + expect!["53..83;0..87 ByRef(Mut { kind: Default }) *a &'? mut bool"], + ); +} + +#[test] +fn unique_borrow() { + check_closure_captures( + r#" +//- minicore:copy +fn main() { + let a = &mut true; + let closure = || { *a = false; }; +} +"#, + expect!["53..71;0..75 ByRef(Mut { kind: Default }) *a &'? mut bool"], + ); +} + +#[test] +fn deref_ref_mut() { + check_closure_captures( + r#" +//- minicore:copy +fn main() { + let a = &mut true; + let closure = || { let ref mut b = *a; }; +} +"#, + expect!["53..79;0..83 ByRef(Mut { kind: Default }) *a &'? mut bool"], + ); +} + +#[test] +fn let_else_not_consuming() { + check_closure_captures( + r#" +//- minicore:copy +fn main() { + let a = &mut true; + let closure = || { let _ = *a else { return; }; }; +} +"#, + expect!["53..88;0..92 ByRef(Shared) *a &'? bool"], + ); +} + +#[test] +fn consume() { + check_closure_captures( + r#" +//- minicore:copy +struct NonCopy; +fn main() { + let a = NonCopy; + let closure = || { let b = a; }; +} +"#, + expect!["67..84;0..88 ByValue a NonCopy"], + ); +} + +#[test] +fn ref_to_upvar() { + check_closure_captures( + r#" +//- minicore:copy +struct NonCopy; +fn main() { + let mut a = NonCopy; + let closure = || { let b = &a; }; + let closure = || { let c = &mut a; }; +} +"#, + expect![[r#" + 71..89;0..135 ByRef(Shared) a &'? NonCopy + 109..131;0..135 ByRef(Mut { kind: Default }) a &'? mut NonCopy"#]], + ); +} + +#[test] +fn field() { + check_closure_captures( + r#" +//- minicore:copy +struct Foo { a: i32, b: i32 } +fn main() { + let a = Foo { a: 0, b: 0 }; + let closure = || { let b = a.a; }; +} +"#, + expect!["92..111;0..115 ByRef(Shared) a.a &'? i32"], + ); +} + +#[test] +fn fields_different_mode() { + check_closure_captures( + r#" +//- minicore:copy +struct NonCopy; +struct Foo { a: i32, b: i32, c: NonCopy, d: bool } +fn main() { + let mut a = Foo { a: 0, b: 0 }; + let closure = || { + let b = &a.a; + let c = &mut a.b; + let d = a.c; + }; +} +"#, + expect![[r#" + 133..212;0..216 ByRef(Shared) a.a &'? i32 + 133..212;0..216 ByRef(Mut { kind: Default }) a.b &'? mut i32 + 133..212;0..216 ByValue a.c NonCopy"#]], + ); +} + +#[test] +fn autoref() { + check_closure_captures( + r#" +//- minicore:copy +struct Foo; +impl Foo { + fn imm(&self) {} + fn mut_(&mut self) {} +} +fn main() { + let mut a = Foo; + let closure = || a.imm(); + let closure = || a.mut_(); +} +"#, + expect![[r#" + 123..133;0..168 ByRef(Shared) a &'? Foo + 153..164;0..168 ByRef(Mut { kind: Default }) a &'? mut Foo"#]], + ); +} + +#[test] +fn captures_priority() { + check_closure_captures( + r#" +//- minicore:copy +struct NonCopy; +fn main() { + let mut a = &mut true; + // Max ByRef(Mut { kind: Default }) + let closure = || { + *a = false; + let b = &mut a; + }; + // Max ByRef(Mut { kind: ClosureCapture }) + let closure = || { + let b = *a; + let c = &mut *a; + }; + // Max ByValue + let mut a = NonCopy; + let closure = || { + let b = a; + let c = &mut a; + let d = &a; + }; +} +"#, + expect![[r#" + 113..167;0..430 ByRef(Mut { kind: Default }) a &'? mut &'? mut bool + 234..289;0..430 ByRef(Mut { kind: Default }) *a &'? mut bool + 353..426;0..430 ByValue a NonCopy"#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 6e26af55af827..1e8127161d674 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -4133,6 +4133,7 @@ impl ClosureCapture { } } +#[derive(Clone, Copy, PartialEq, Eq)] pub enum CaptureKind { SharedRef, UniqueSharedRef, From 77ab5686c6b4de14cef63c3987eb12dd3d29e2a5 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Sat, 24 Aug 2024 22:46:45 +0300 Subject: [PATCH 074/124] Preserve all spans for closure captures, not just one This is important for the "convert closure to fn" assist, as it needs to find and modify the places the captures are used. --- .../rust-analyzer/crates/hir-ty/src/infer.rs | 8 + .../crates/hir-ty/src/infer/closure.rs | 385 +++++++++++------- .../crates/hir-ty/src/mir/lower.rs | 11 +- .../hir-ty/src/tests/closure_captures.rs | 231 +++++++++-- 4 files changed, 456 insertions(+), 179 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 8b4e98c5500e8..062ea278151bb 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -59,6 +59,7 @@ use crate::{ generics::Generics, infer::{coerce::CoerceMany, unify::InferenceTable}, lower::ImplTraitLoweringMode, + mir::MirSpan, to_assoc_type_id, traits::FnTrait, utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder}, @@ -553,6 +554,12 @@ pub(crate) struct InferenceContext<'a> { // fields related to closure capture current_captures: Vec, + /// A stack that has an entry for each projection in the current capture. + /// + /// For example, in `a.b.c`, we capture the spans of `a`, `a.b`, and `a.b.c`. + /// We do that because sometimes we truncate projections (when a closure captures + /// both `a.b` and `a.b.c`), and we want to provide accurate spans in this case. + current_capture_span_stack: Vec, current_closure: Option, /// Stores the list of closure ids that need to be analyzed before this closure. See the /// comment on `InferenceContext::sort_closures` @@ -634,6 +641,7 @@ impl<'a> InferenceContext<'a> { breakables: Vec::new(), deferred_cast_checks: Vec::new(), current_captures: Vec::new(), + current_capture_span_stack: Vec::new(), current_closure: None, deferred_closures: FxHashMap::default(), closure_dependencies: FxHashMap::default(), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index 910e19dc580de..68e1a6ee821c7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -18,7 +18,7 @@ use hir_def::{ use hir_expand::name::Name; use intern::sym; use rustc_hash::FxHashMap; -use smallvec::SmallVec; +use smallvec::{smallvec, SmallVec}; use stdx::never; use crate::{ @@ -236,7 +236,13 @@ pub enum CaptureKind { pub struct CapturedItem { pub(crate) place: HirPlace, pub(crate) kind: CaptureKind, - pub(crate) span: MirSpan, + /// The inner vec is the stacks; the outer vec is for each capture reference. + /// + /// Even though we always report only the last span (i.e. the most inclusive span), + /// we need to keep them all, since when a closure occurs inside a closure, we + /// copy all captures of the inner closure to the outer closure, and then we may + /// truncate them, and we want the correct span to be reported. + span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>, pub(crate) ty: Binders, } @@ -253,6 +259,10 @@ impl CapturedItem { self.kind } + pub fn spans(&self) -> SmallVec<[MirSpan; 3]> { + self.span_stacks.iter().map(|stack| *stack.last().expect("empty span stack")).collect() + } + pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String { let body = db.body(owner); let krate = owner.krate(db.upcast()); @@ -314,7 +324,8 @@ impl CapturedItem { pub(crate) struct CapturedItemWithoutTy { pub(crate) place: HirPlace, pub(crate) kind: CaptureKind, - pub(crate) span: MirSpan, + /// The inner vec is the stacks; the outer vec is for each capture reference. + pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>, } impl CapturedItemWithoutTy { @@ -333,7 +344,7 @@ impl CapturedItemWithoutTy { return CapturedItem { place: self.place, kind: self.kind, - span: self.span, + span_stacks: self.span_stacks, ty: replace_placeholder_with_binder(ctx, ty), }; @@ -393,22 +404,26 @@ impl InferenceContext<'_> { let r = self.place_of_expr_without_adjust(tgt_expr)?; let default = vec![]; let adjustments = self.result.expr_adjustments.get(&tgt_expr).unwrap_or(&default); - apply_adjusts_to_place(r, adjustments) + apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments) } + /// Changes `current_capture_span_stack` to contain the stack of spans for this expr. fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option { + self.current_capture_span_stack.clear(); match &self.body[tgt_expr] { Expr::Path(p) => { let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr); if let Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(b), _)) = resolver.resolve_path_in_value_ns(self.db.upcast(), p) { + self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr)); return Some(HirPlace { local: b, projections: vec![] }); } } Expr::Field { expr, name: _ } => { let mut place = self.place_of_expr(*expr)?; let field = self.result.field_resolution(tgt_expr)?; + self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr)); place.projections.push(ProjectionElem::Field(field)); return Some(place); } @@ -418,6 +433,7 @@ impl InferenceContext<'_> { TyKind::Ref(..) | TyKind::Raw(..) ) { let mut place = self.place_of_expr(*expr)?; + self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr)); place.projections.push(ProjectionElem::Deref); return Some(place); } @@ -427,29 +443,65 @@ impl InferenceContext<'_> { None } - fn push_capture(&mut self, capture: CapturedItemWithoutTy) { - self.current_captures.push(capture); + fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) { + self.current_captures.push(CapturedItemWithoutTy { + place, + kind, + span_stacks: smallvec![self.current_capture_span_stack.iter().copied().collect()], + }); } - fn ref_expr(&mut self, expr: ExprId) { - if let Some(place) = self.place_of_expr(expr) { - self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared), expr.into()); + fn is_ref_span(&self, span: MirSpan) -> bool { + match span { + MirSpan::ExprId(expr) => matches!(self.body[expr], Expr::Ref { .. }), + MirSpan::BindingId(_) => true, + MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false, + } + } + + fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) { + // The first span is the identifier, and it must always remain. + truncate_to += 1; + for span_stack in &mut capture.span_stacks { + let mut remained = truncate_to; + let mut actual_truncate_to = 0; + for &span in &*span_stack { + actual_truncate_to += 1; + if !self.is_ref_span(span) { + remained -= 1; + if remained == 0 { + break; + } + } + } + if actual_truncate_to < span_stack.len() + && self.is_ref_span(span_stack[actual_truncate_to]) + { + // Include the ref operator if there is one, we will fix it later (in `strip_captures_ref_span()`) if it's incorrect. + actual_truncate_to += 1; + } + span_stack.truncate(actual_truncate_to); + } + } + + fn ref_expr(&mut self, expr: ExprId, place: Option) { + if let Some(place) = place { + self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared)); } self.walk_expr(expr); } - fn add_capture(&mut self, place: HirPlace, kind: CaptureKind, span: MirSpan) { + fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) { if self.is_upvar(&place) { - self.push_capture(CapturedItemWithoutTy { place, kind, span }); + self.push_capture(place, kind); } } - fn mutate_expr(&mut self, expr: ExprId) { - if let Some(place) = self.place_of_expr(expr) { + fn mutate_expr(&mut self, expr: ExprId, place: Option) { + if let Some(place) = place { self.add_capture( place, CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }), - expr.into(), ); } self.walk_expr(expr); @@ -457,12 +509,12 @@ impl InferenceContext<'_> { fn consume_expr(&mut self, expr: ExprId) { if let Some(place) = self.place_of_expr(expr) { - self.consume_place(place, expr.into()); + self.consume_place(place); } self.walk_expr(expr); } - fn consume_place(&mut self, place: HirPlace, span: MirSpan) { + fn consume_place(&mut self, place: HirPlace) { if self.is_upvar(&place) { let ty = place.ty(self); let kind = if self.is_ty_copy(ty) { @@ -470,7 +522,7 @@ impl InferenceContext<'_> { } else { CaptureKind::ByValue }; - self.push_capture(CapturedItemWithoutTy { place, kind, span }); + self.push_capture(place, kind); } } @@ -501,8 +553,10 @@ impl InferenceContext<'_> { Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared), }; if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) { - if let Some(place) = apply_adjusts_to_place(place, rest) { - self.add_capture(place, capture_kind, tgt_expr.into()); + if let Some(place) = + apply_adjusts_to_place(&mut self.current_capture_span_stack, place, rest) + { + self.add_capture(place, capture_kind); } } self.walk_expr_with_adjust(tgt_expr, rest); @@ -584,11 +638,7 @@ impl InferenceContext<'_> { self.walk_pat(&mut capture_mode, arm.pat); } if let Some(c) = capture_mode { - self.push_capture(CapturedItemWithoutTy { - place: discr_place, - kind: c, - span: (*expr).into(), - }) + self.push_capture(discr_place, c); } } } @@ -632,10 +682,11 @@ impl InferenceContext<'_> { } false }; + let place = self.place_of_expr(*expr); if mutability { - self.mutate_expr(*expr); + self.mutate_expr(*expr, place); } else { - self.ref_expr(*expr); + self.ref_expr(*expr, place); } } else { self.select_from_expr(*expr); @@ -650,16 +701,22 @@ impl InferenceContext<'_> { | Expr::Cast { expr, type_ref: _ } => { self.consume_expr(*expr); } - Expr::Ref { expr, rawness: _, mutability } => match mutability { - hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr), - hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr), - }, + Expr::Ref { expr, rawness: _, mutability } => { + // We need to do this before we push the span so the order will be correct. + let place = self.place_of_expr(*expr); + self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr)); + match mutability { + hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr, place), + hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr, place), + } + } Expr::BinaryOp { lhs, rhs, op } => { let Some(op) = op else { return; }; if matches!(op, BinaryOp::Assignment { .. }) { - self.mutate_expr(*lhs); + let place = self.place_of_expr(*lhs); + self.mutate_expr(*lhs, place); self.consume_expr(*rhs); return; } @@ -690,7 +747,11 @@ impl InferenceContext<'_> { ); let mut cc = mem::take(&mut self.current_captures); cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| { - CapturedItemWithoutTy { place: it.place.clone(), kind: it.kind, span: it.span } + CapturedItemWithoutTy { + place: it.place.clone(), + kind: it.kind, + span_stacks: it.span_stacks.clone(), + } })); self.current_captures = cc; } @@ -812,10 +873,13 @@ impl InferenceContext<'_> { } fn restrict_precision_for_unsafe(&mut self) { - for capture in &mut self.current_captures { + // FIXME: Borrow checker problems without this. + let mut current_captures = std::mem::take(&mut self.current_captures); + for capture in &mut current_captures { let mut ty = self.table.resolve_completely(self.result[capture.place.local].clone()); if ty.as_raw_ptr().is_some() || ty.is_union() { capture.kind = CaptureKind::ByRef(BorrowKind::Shared); + self.truncate_capture_spans(capture, 0); capture.place.projections.truncate(0); continue; } @@ -830,29 +894,35 @@ impl InferenceContext<'_> { ); if ty.as_raw_ptr().is_some() || ty.is_union() { capture.kind = CaptureKind::ByRef(BorrowKind::Shared); + self.truncate_capture_spans(capture, i + 1); capture.place.projections.truncate(i + 1); break; } } } + self.current_captures = current_captures; } fn adjust_for_move_closure(&mut self) { - for capture in &mut self.current_captures { + // FIXME: Borrow checker won't allow without this. + let mut current_captures = std::mem::take(&mut self.current_captures); + for capture in &mut current_captures { if let Some(first_deref) = capture.place.projections.iter().position(|proj| *proj == ProjectionElem::Deref) { + self.truncate_capture_spans(capture, first_deref); capture.place.projections.truncate(first_deref); } capture.kind = CaptureKind::ByValue; } + self.current_captures = current_captures; } fn minimize_captures(&mut self) { - self.current_captures.sort_by_key(|it| it.place.projections.len()); + self.current_captures.sort_unstable_by_key(|it| it.place.projections.len()); let mut hash_map = FxHashMap::::default(); let result = mem::take(&mut self.current_captures); - for item in result { + for mut item in result { let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] }; let mut it = item.place.projections.iter(); let prev_index = loop { @@ -860,12 +930,17 @@ impl InferenceContext<'_> { break Some(*k); } match it.next() { - Some(it) => lookup_place.projections.push(it.clone()), + Some(it) => { + lookup_place.projections.push(it.clone()); + } None => break None, } }; match prev_index { Some(p) => { + let prev_projections_len = self.current_captures[p].place.projections.len(); + self.truncate_capture_spans(&mut item, prev_projections_len); + self.current_captures[p].span_stacks.extend(item.span_stacks); let len = self.current_captures[p].place.projections.len(); let kind_after_truncate = item.place.capture_kind_of_truncated_place(item.kind, len); @@ -880,113 +955,128 @@ impl InferenceContext<'_> { } } - fn consume_with_pat(&mut self, mut place: HirPlace, pat: PatId) { - let cnt = self.result.pat_adjustments.get(&pat).map(|it| it.len()).unwrap_or_default(); - place.projections = place - .projections - .iter() - .cloned() - .chain((0..cnt).map(|_| ProjectionElem::Deref)) - .collect::>(); - match &self.body[pat] { - Pat::Missing | Pat::Wild => (), - Pat::Tuple { args, ellipsis } => { - let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); - let field_count = match self.result[pat].kind(Interner) { - TyKind::Tuple(_, s) => s.len(Interner), - _ => return, - }; - let fields = 0..field_count; - let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev())); - for (arg, i) in it { - let mut p = place.clone(); - p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId { - tuple: TupleId(!0), // dummy this, as its unused anyways - index: i as u32, - }))); - self.consume_with_pat(p, *arg); - } - } - Pat::Or(pats) => { - for pat in pats.iter() { - self.consume_with_pat(place.clone(), *pat); + fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) { + let adjustments_count = + self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default(); + place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref)); + self.current_capture_span_stack + .extend((0..adjustments_count).map(|_| MirSpan::PatId(tgt_pat))); + 'reset_span_stack: { + match &self.body[tgt_pat] { + Pat::Missing | Pat::Wild => (), + Pat::Tuple { args, ellipsis } => { + let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); + let field_count = match self.result[tgt_pat].kind(Interner) { + TyKind::Tuple(_, s) => s.len(Interner), + _ => break 'reset_span_stack, + }; + let fields = 0..field_count; + let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev())); + for (&arg, i) in it { + let mut p = place.clone(); + self.current_capture_span_stack.push(MirSpan::PatId(arg)); + p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId { + tuple: TupleId(!0), // dummy this, as its unused anyways + index: i as u32, + }))); + self.consume_with_pat(p, arg); + self.current_capture_span_stack.pop(); + } } - } - Pat::Record { args, .. } => { - let Some(variant) = self.result.variant_resolution_for_pat(pat) else { - return; - }; - match variant { - VariantId::EnumVariantId(_) | VariantId::UnionId(_) => { - self.consume_place(place, pat.into()) + Pat::Or(pats) => { + for pat in pats.iter() { + self.consume_with_pat(place.clone(), *pat); } - VariantId::StructId(s) => { - let vd = &*self.db.struct_data(s).variant_data; - for field_pat in args.iter() { - let arg = field_pat.pat; - let Some(local_id) = vd.field(&field_pat.name) else { - continue; - }; - let mut p = place.clone(); - p.projections.push(ProjectionElem::Field(Either::Left(FieldId { - parent: variant, - local_id, - }))); - self.consume_with_pat(p, arg); + } + Pat::Record { args, .. } => { + let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else { + break 'reset_span_stack; + }; + match variant { + VariantId::EnumVariantId(_) | VariantId::UnionId(_) => { + self.consume_place(place) + } + VariantId::StructId(s) => { + let vd = &*self.db.struct_data(s).variant_data; + for field_pat in args.iter() { + let arg = field_pat.pat; + let Some(local_id) = vd.field(&field_pat.name) else { + continue; + }; + let mut p = place.clone(); + self.current_capture_span_stack.push(MirSpan::PatId(arg)); + p.projections.push(ProjectionElem::Field(Either::Left(FieldId { + parent: variant, + local_id, + }))); + self.consume_with_pat(p, arg); + self.current_capture_span_stack.pop(); + } } } } - } - Pat::Range { .. } - | Pat::Slice { .. } - | Pat::ConstBlock(_) - | Pat::Path(_) - | Pat::Lit(_) => self.consume_place(place, pat.into()), - Pat::Bind { id: _, subpat: _ } => { - let mode = self.result.binding_modes[pat]; - let capture_kind = match mode { - BindingMode::Move => { - self.consume_place(place, pat.into()); - return; - } - BindingMode::Ref(Mutability::Not) => BorrowKind::Shared, - BindingMode::Ref(Mutability::Mut) => { - BorrowKind::Mut { kind: MutBorrowKind::Default } - } - }; - self.add_capture(place, CaptureKind::ByRef(capture_kind), pat.into()); - } - Pat::TupleStruct { path: _, args, ellipsis } => { - let Some(variant) = self.result.variant_resolution_for_pat(pat) else { - return; - }; - match variant { - VariantId::EnumVariantId(_) | VariantId::UnionId(_) => { - self.consume_place(place, pat.into()) - } - VariantId::StructId(s) => { - let vd = &*self.db.struct_data(s).variant_data; - let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); - let fields = vd.fields().iter(); - let it = - al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev())); - for (arg, (i, _)) in it { - let mut p = place.clone(); - p.projections.push(ProjectionElem::Field(Either::Left(FieldId { - parent: variant, - local_id: i, - }))); - self.consume_with_pat(p, *arg); + Pat::Range { .. } + | Pat::Slice { .. } + | Pat::ConstBlock(_) + | Pat::Path(_) + | Pat::Lit(_) => self.consume_place(place), + &Pat::Bind { id, subpat: _ } => { + let mode = self.result.binding_modes[tgt_pat]; + let capture_kind = match mode { + BindingMode::Move => { + self.consume_place(place); + break 'reset_span_stack; + } + BindingMode::Ref(Mutability::Not) => BorrowKind::Shared, + BindingMode::Ref(Mutability::Mut) => { + BorrowKind::Mut { kind: MutBorrowKind::Default } + } + }; + self.current_capture_span_stack.push(MirSpan::BindingId(id)); + self.add_capture(place, CaptureKind::ByRef(capture_kind)); + self.current_capture_span_stack.pop(); + } + Pat::TupleStruct { path: _, args, ellipsis } => { + let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else { + break 'reset_span_stack; + }; + match variant { + VariantId::EnumVariantId(_) | VariantId::UnionId(_) => { + self.consume_place(place) + } + VariantId::StructId(s) => { + let vd = &*self.db.struct_data(s).variant_data; + let (al, ar) = + args.split_at(ellipsis.map_or(args.len(), |it| it as usize)); + let fields = vd.fields().iter(); + let it = al + .iter() + .zip(fields.clone()) + .chain(ar.iter().rev().zip(fields.rev())); + for (&arg, (i, _)) in it { + let mut p = place.clone(); + self.current_capture_span_stack.push(MirSpan::PatId(arg)); + p.projections.push(ProjectionElem::Field(Either::Left(FieldId { + parent: variant, + local_id: i, + }))); + self.consume_with_pat(p, arg); + self.current_capture_span_stack.pop(); + } } } } + Pat::Ref { pat, mutability: _ } => { + self.current_capture_span_stack.push(MirSpan::PatId(tgt_pat)); + place.projections.push(ProjectionElem::Deref); + self.consume_with_pat(place, *pat); + self.current_capture_span_stack.pop(); + } + Pat::Box { .. } => (), // not supported } - Pat::Ref { pat, mutability: _ } => { - place.projections.push(ProjectionElem::Deref); - self.consume_with_pat(place, *pat) - } - Pat::Box { .. } => (), // not supported } + self.current_capture_span_stack + .truncate(self.current_capture_span_stack.len() - adjustments_count); } fn consume_exprs(&mut self, exprs: impl Iterator) { @@ -1044,12 +1134,28 @@ impl InferenceContext<'_> { CaptureBy::Ref => (), } self.minimize_captures(); + self.strip_captures_ref_span(); let result = mem::take(&mut self.current_captures); let captures = result.into_iter().map(|it| it.with_ty(self)).collect::>(); self.result.closure_info.insert(closure, (captures, closure_kind)); closure_kind } + fn strip_captures_ref_span(&mut self) { + // FIXME: Borrow checker won't allow without this. + let mut captures = std::mem::take(&mut self.current_captures); + for capture in &mut captures { + if matches!(capture.kind, CaptureKind::ByValue) { + for span_stack in &mut capture.span_stacks { + if self.is_ref_span(span_stack[span_stack.len() - 1]) { + span_stack.truncate(span_stack.len() - 1); + } + } + } + } + self.current_captures = captures; + } + pub(crate) fn infer_closures(&mut self) { let deferred_closures = self.sort_closures(); for (closure, exprs) in deferred_closures.into_iter().rev() { @@ -1110,10 +1216,17 @@ impl InferenceContext<'_> { } } -fn apply_adjusts_to_place(mut r: HirPlace, adjustments: &[Adjustment]) -> Option { +/// Call this only when the last span in the stack isn't a split. +fn apply_adjusts_to_place( + current_capture_span_stack: &mut Vec, + mut r: HirPlace, + adjustments: &[Adjustment], +) -> Option { + let span = *current_capture_span_stack.last().expect("empty capture span stack"); for adj in adjustments { match &adj.kind { Adjust::Deref(None) => { + current_capture_span_stack.push(span); r.projections.push(ProjectionElem::Deref); } _ => return None, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 3a0d0c933f953..9e235504519e2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -1180,8 +1180,15 @@ impl<'ctx> MirLowerCtx<'ctx> { let placeholder_subst = self.placeholder_subst(); let tmp_ty = capture.ty.clone().substitute(Interner, &placeholder_subst); - let tmp: Place = self.temp(tmp_ty, current, capture.span)?.into(); - self.push_assignment(current, tmp, Rvalue::Ref(*bk, p), capture.span); + // FIXME: Handle more than one span. + let capture_spans = capture.spans(); + let tmp: Place = self.temp(tmp_ty, current, capture_spans[0])?.into(); + self.push_assignment( + current, + tmp, + Rvalue::Ref(*bk, p), + capture_spans[0], + ); operands.push(Operand::Move(tmp)); } CaptureKind::ByValue => operands.push(Operand::Move(p)), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs index 782331a1363f5..22cef3505bf04 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs @@ -1,11 +1,15 @@ use base_db::salsa::InternKey; use expect_test::{expect, Expect}; use hir_def::db::DefDatabase; +use hir_expand::files::InFileWrapper; use itertools::Itertools; +use span::{HirFileId, TextRange}; +use syntax::{AstNode, AstPtr}; use test_fixture::WithFixture; use crate::db::{HirDatabase, InternedClosureId}; use crate::display::HirDisplay; +use crate::mir::MirSpan; use crate::test_db::TestDB; use super::visit_module; @@ -30,45 +34,69 @@ fn check_closure_captures(ra_fixture: &str, expect: Expect) { .expect("failed to map closure to SyntaxNode") .value .text_range(); - captures.iter().flat_map(move |capture| { + captures.iter().map(move |capture| { + fn text_range( + db: &TestDB, + syntax: InFileWrapper>, + ) -> TextRange { + let root = syntax.file_syntax(db); + syntax.value.to_node(&root).syntax().text_range() + } + // FIXME: Deduplicate this with hir::Local::sources(). let (body, source_map) = db.body_with_source_map(closure.0); - let local_text_ranges = match body.self_param.zip(source_map.self_param_syntax()) { + let local_text_range = match body.self_param.zip(source_map.self_param_syntax()) { Some((param, source)) if param == capture.local() => { - vec![source.file_syntax(db).text_range()] + format!("{:?}", text_range(db, source)) } _ => source_map .patterns_for_binding(capture.local()) .iter() .map(|&definition| { - let src = source_map.pat_syntax(definition).unwrap(); - src.file_syntax(db).text_range() + text_range(db, source_map.pat_syntax(definition).unwrap()) }) - .collect(), + .map(|it| format!("{it:?}")) + .join(", "), }; let place = capture.display_place(closure.0, db); let capture_ty = capture.ty.skip_binders().display_test(db).to_string(); - local_text_ranges.into_iter().map(move |local_text_range| { - ( - closure_text_range, - local_text_range, - place.clone(), - capture_ty.clone(), - capture.kind(), - ) - }) + let spans = capture + .spans() + .iter() + .flat_map(|span| match *span { + MirSpan::ExprId(expr) => { + vec![text_range(db, source_map.expr_syntax(expr).unwrap())] + } + MirSpan::PatId(pat) => { + vec![text_range(db, source_map.pat_syntax(pat).unwrap())] + } + MirSpan::BindingId(binding) => source_map + .patterns_for_binding(binding) + .iter() + .map(|pat| text_range(db, source_map.pat_syntax(*pat).unwrap())) + .collect(), + MirSpan::SelfParam => { + vec![text_range(db, source_map.self_param_syntax().unwrap())] + } + MirSpan::Unknown => Vec::new(), + }) + .sorted_by_key(|it| it.start()) + .map(|it| format!("{it:?}")) + .join(","); + + (closure_text_range, local_text_range, spans, place, capture_ty, capture.kind()) }) })); } captures_info.sort_unstable_by_key(|(closure_text_range, local_text_range, ..)| { - (closure_text_range.start(), local_text_range.start()) + (closure_text_range.start(), local_text_range.clone()) }); let rendered = captures_info .iter() - .map(|(closure_text_range, local_text_range, place, capture_ty, capture_kind)| { + .map(|(closure_text_range, local_text_range, spans, place, capture_ty, capture_kind)| { format!( - "{closure_text_range:?};{local_text_range:?} {capture_kind:?} {place} {capture_ty}" + "{closure_text_range:?};{local_text_range};{spans} {capture_kind:?} {place} {capture_ty}" ) }) .join("\n"); @@ -86,7 +114,7 @@ fn main() { let closure = || { let b = *a; }; } "#, - expect!["53..71;0..75 ByRef(Shared) *a &'? bool"], + expect!["53..71;20..21;66..68 ByRef(Shared) *a &'? bool"], ); } @@ -100,7 +128,7 @@ fn main() { let closure = || { let &mut ref b = a; }; } "#, - expect!["53..79;0..83 ByRef(Shared) *a &'? bool"], + expect!["53..79;20..21;67..72 ByRef(Shared) *a &'? bool"], ); check_closure_captures( r#" @@ -110,7 +138,7 @@ fn main() { let closure = || { let &mut ref mut b = a; }; } "#, - expect!["53..83;0..87 ByRef(Mut { kind: Default }) *a &'? mut bool"], + expect!["53..83;20..21;67..76 ByRef(Mut { kind: Default }) *a &'? mut bool"], ); } @@ -124,7 +152,7 @@ fn main() { let closure = || { *a = false; }; } "#, - expect!["53..71;0..75 ByRef(Mut { kind: Default }) *a &'? mut bool"], + expect!["53..71;20..21;58..60 ByRef(Mut { kind: Default }) *a &'? mut bool"], ); } @@ -138,7 +166,7 @@ fn main() { let closure = || { let ref mut b = *a; }; } "#, - expect!["53..79;0..83 ByRef(Mut { kind: Default }) *a &'? mut bool"], + expect!["53..79;20..21;62..71 ByRef(Mut { kind: Default }) *a &'? mut bool"], ); } @@ -152,7 +180,7 @@ fn main() { let closure = || { let _ = *a else { return; }; }; } "#, - expect!["53..88;0..92 ByRef(Shared) *a &'? bool"], + expect!["53..88;20..21;66..68 ByRef(Shared) *a &'? bool"], ); } @@ -167,7 +195,7 @@ fn main() { let closure = || { let b = a; }; } "#, - expect!["67..84;0..88 ByValue a NonCopy"], + expect!["67..84;36..37;80..81 ByValue a NonCopy"], ); } @@ -184,8 +212,8 @@ fn main() { } "#, expect![[r#" - 71..89;0..135 ByRef(Shared) a &'? NonCopy - 109..131;0..135 ByRef(Mut { kind: Default }) a &'? mut NonCopy"#]], + 71..89;36..41;84..86 ByRef(Shared) a &'? NonCopy + 109..131;36..41;122..128 ByRef(Mut { kind: Default }) a &'? mut NonCopy"#]], ); } @@ -200,7 +228,7 @@ fn main() { let closure = || { let b = a.a; }; } "#, - expect!["92..111;0..115 ByRef(Shared) a.a &'? i32"], + expect!["92..111;50..51;105..108 ByRef(Shared) a.a &'? i32"], ); } @@ -221,9 +249,9 @@ fn main() { } "#, expect![[r#" - 133..212;0..216 ByRef(Shared) a.a &'? i32 - 133..212;0..216 ByRef(Mut { kind: Default }) a.b &'? mut i32 - 133..212;0..216 ByValue a.c NonCopy"#]], + 133..212;87..92;154..158 ByRef(Shared) a.a &'? i32 + 133..212;87..92;176..184 ByRef(Mut { kind: Default }) a.b &'? mut i32 + 133..212;87..92;202..205 ByValue a.c NonCopy"#]], ); } @@ -244,8 +272,8 @@ fn main() { } "#, expect![[r#" - 123..133;0..168 ByRef(Shared) a &'? Foo - 153..164;0..168 ByRef(Mut { kind: Default }) a &'? mut Foo"#]], + 123..133;92..97;126..127 ByRef(Shared) a &'? Foo + 153..164;92..97;156..157 ByRef(Mut { kind: Default }) a &'? mut Foo"#]], ); } @@ -262,11 +290,6 @@ fn main() { *a = false; let b = &mut a; }; - // Max ByRef(Mut { kind: ClosureCapture }) - let closure = || { - let b = *a; - let c = &mut *a; - }; // Max ByValue let mut a = NonCopy; let closure = || { @@ -277,8 +300,134 @@ fn main() { } "#, expect![[r#" - 113..167;0..430 ByRef(Mut { kind: Default }) a &'? mut &'? mut bool - 234..289;0..430 ByRef(Mut { kind: Default }) *a &'? mut bool - 353..426;0..430 ByValue a NonCopy"#]], + 113..167;36..41;127..128,154..160 ByRef(Mut { kind: Default }) a &'? mut &'? mut bool + 231..304;196..201;252..253,276..277,296..297 ByValue a NonCopy"#]], + ); +} + +#[test] +fn let_underscore() { + check_closure_captures( + r#" +//- minicore:copy +fn main() { + let mut a = true; + let closure = || { let _ = a; }; +} +"#, + expect![""], + ); +} + +#[test] +fn match_wildcard() { + check_closure_captures( + r#" +//- minicore:copy +struct NonCopy; +fn main() { + let mut a = NonCopy; + let closure = || match a { + _ => {} + }; + let closure = || match a { + ref b => {} + }; + let closure = || match a { + ref mut b => {} + }; +} +"#, + expect![[r#" + 125..163;36..41;134..135 ByRef(Shared) a &'? NonCopy + 183..225;36..41;192..193 ByRef(Mut { kind: Default }) a &'? mut NonCopy"#]], + ); +} + +#[test] +fn multiple_bindings() { + check_closure_captures( + r#" +//- minicore:copy +fn main() { + let mut a = false; + let mut closure = || { let (b | b) = a; }; +} +"#, + expect!["57..80;20..25;76..77,76..77 ByRef(Shared) a &'? bool"], + ); +} + +#[test] +fn multiple_usages() { + check_closure_captures( + r#" +//- minicore:copy +fn main() { + let mut a = false; + let mut closure = || { + let b = &a; + let c = &a; + let d = &mut a; + a = true; + }; +} +"#, + expect!["57..149;20..25;78..80,98..100,118..124,134..135 ByRef(Mut { kind: Default }) a &'? mut bool"], + ); +} + +#[test] +fn ref_then_deref() { + check_closure_captures( + r#" +//- minicore:copy +fn main() { + let mut a = false; + let mut closure = || { let b = *&mut a; }; +} +"#, + expect!["57..80;20..25;71..77 ByRef(Mut { kind: Default }) a &'? mut bool"], + ); +} + +#[test] +fn ref_of_ref() { + check_closure_captures( + r#" +//- minicore:copy +fn main() { + let mut a = &false; + let closure = || { let b = &a; }; + let closure = || { let b = &mut a; }; + let a = &mut false; + let closure = || { let b = &a; }; + let closure = || { let b = &mut a; }; +} +"#, + expect![[r#" + 54..72;20..25;67..69 ByRef(Shared) a &'? &'? bool + 92..114;20..25;105..111 ByRef(Mut { kind: Default }) a &'? mut &'? bool + 158..176;124..125;171..173 ByRef(Shared) a &'? &'? mut bool + 196..218;124..125;209..215 ByRef(Mut { kind: Default }) a &'? mut &'? mut bool"#]], + ); +} + +#[test] +fn multiple_capture_usages() { + check_closure_captures( + r#" +//- minicore:copy +struct A { a: i32, b: bool } +fn main() { + let mut a = A { a: 123, b: false }; + let closure = |$0| { + let b = a.b; + a = A { a: 456, b: true }; + }; + closure(); +} +"#, + expect!["99..165;49..54;120..121,133..134 ByRef(Mut { kind: Default }) a &'? mut A"], ); } From cf243e521106e9f6fb74574da1b15459e80864e1 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 22 Aug 2024 14:27:35 +0300 Subject: [PATCH 075/124] Add `gen` modifier to functions We don't yet lower or maybe even parse them, but blocks already have `gen`, so why not. --- .../src/handlers/extract_function.rs | 1 + .../src/handlers/generate_delegate_methods.rs | 2 ++ .../src/handlers/generate_delegate_trait.rs | 1 + .../src/handlers/generate_function.rs | 1 + .../src/handlers/generate_getter_or_setter.rs | 28 +++++++++++++++++-- .../ide-assists/src/handlers/generate_new.rs | 1 + .../rust-analyzer/crates/syntax/rust.ungram | 2 +- .../crates/syntax/src/ast/generated/nodes.rs | 2 ++ .../crates/syntax/src/ast/make.rs | 4 ++- 9 files changed, 38 insertions(+), 4 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs index fd379cb88daaf..fd0b11f6ddfdb 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs @@ -1617,6 +1617,7 @@ fn format_function( fun.control_flow.is_async, fun.mods.is_const, fun.control_flow.is_unsafe, + false, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs index bbb902f8a2a4d..081e36b4ff61c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -122,6 +122,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' let is_async = method_source.async_token().is_some(); let is_const = method_source.const_token().is_some(); let is_unsafe = method_source.unsafe_token().is_some(); + let is_gen = method_source.gen_token().is_some(); let fn_name = make::name(&name); @@ -154,6 +155,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' is_async, is_const, is_unsafe, + is_gen, ) .clone_for_update(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs index 933ab2058ec91..bf4ce5c907e80 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -740,6 +740,7 @@ fn func_assoc_item( item.async_token().is_some(), item.const_token().is_some(), item.unsafe_token().is_some(), + item.gen_token().is_some(), ) .clone_for_update(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs index cd29f77f0c95c..76a647807cb49 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs @@ -365,6 +365,7 @@ impl FunctionBuilder { self.is_async, false, // FIXME : const and unsafe are not handled yet. false, + false, ) .clone_for_update(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs index 8e349e84a96c8..c879a4a3d959e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs @@ -261,7 +261,19 @@ fn generate_getter_from_info( let ret_type = Some(make::ret_type(ty)); let body = make::block_expr([], Some(body)); - make::fn_(strukt.visibility(), fn_name, None, None, params, body, ret_type, false, false, false) + make::fn_( + strukt.visibility(), + fn_name, + None, + None, + params, + body, + ret_type, + false, + false, + false, + false, + ) } fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldInfo) -> ast::Fn { @@ -285,7 +297,19 @@ fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldI let body = make::block_expr([assign_stmt.into()], None); // Make the setter fn - make::fn_(strukt.visibility(), fn_name, None, None, params, body, None, false, false, false) + make::fn_( + strukt.visibility(), + fn_name, + None, + None, + params, + body, + None, + false, + false, + false, + false, + ) } fn extract_and_parse( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs index b9dede7cbdc36..70d14d6b95d85 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs @@ -115,6 +115,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option false, false, false, + false, ) .clone_for_update(); fn_.indent(1.into()); diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram index 069be2df3a440..7e2c88ff00d69 100644 --- a/src/tools/rust-analyzer/crates/syntax/rust.ungram +++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram @@ -186,7 +186,7 @@ UseTreeList = Fn = Attr* Visibility? - 'default'? 'const'? 'async'? 'unsafe'? Abi? + 'default'? 'const'? 'async'? 'gen'? 'unsafe'? Abi? 'fn' Name GenericParamList? ParamList RetType? WhereClause? (body:BlockExpr | ';') diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs index 01886d119d675..25e49d3d65399 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs @@ -484,6 +484,8 @@ impl Fn { #[inline] pub fn fn_token(&self) -> Option { support::token(&self.syntax, T![fn]) } #[inline] + pub fn gen_token(&self) -> Option { support::token(&self.syntax, T![gen]) } + #[inline] pub fn unsafe_token(&self) -> Option { support::token(&self.syntax, T![unsafe]) } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index 00ce5c37cce35..abf1a1f38207f 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -1035,6 +1035,7 @@ pub fn fn_( is_async: bool, is_const: bool, is_unsafe: bool, + is_gen: bool, ) -> ast::Fn { let type_params = match type_params { Some(type_params) => format!("{type_params}"), @@ -1056,9 +1057,10 @@ pub fn fn_( let async_literal = if is_async { "async " } else { "" }; let const_literal = if is_const { "const " } else { "" }; let unsafe_literal = if is_unsafe { "unsafe " } else { "" }; + let gen_literal = if is_gen { "gen " } else { "" }; ast_from_text(&format!( - "{visibility}{async_literal}{const_literal}{unsafe_literal}fn {fn_name}{type_params}{params} {ret_type}{where_clause}{body}", + "{visibility}{const_literal}{async_literal}{gen_literal}{unsafe_literal}fn {fn_name}{type_params}{params} {ret_type}{where_clause}{body}", )) } pub fn struct_( From 1e0df176675826d8b0bc31d40d35ca284906d614 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 22 Aug 2024 14:30:54 +0300 Subject: [PATCH 076/124] Handle associated types that are lang items Previously we were ignoring them. --- .../crates/hir-def/src/lang_item.rs | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index a09fd658aeb8c..f7c7f72139863 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -117,11 +117,19 @@ impl LangItems { match def { ModuleDefId::TraitId(trait_) => { lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait); - db.trait_data(trait_).items.iter().for_each(|&(_, assoc_id)| { - if let AssocItemId::FunctionId(f) = assoc_id { - lang_items.collect_lang_item(db, f, LangItemTarget::Function); - } - }); + db.trait_data(trait_).items.iter().for_each( + |&(_, assoc_id)| match assoc_id { + AssocItemId::FunctionId(f) => { + lang_items.collect_lang_item(db, f, LangItemTarget::Function); + } + AssocItemId::TypeAliasId(alias) => lang_items.collect_lang_item( + db, + alias, + LangItemTarget::TypeAlias, + ), + AssocItemId::ConstId(_) => {} + }, + ); } ModuleDefId::AdtId(AdtId::EnumId(e)) => { lang_items.collect_lang_item(db, e, LangItemTarget::EnumId); From 2c6a521bab941d77713617daa3440c7feb8255ad Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 22 Aug 2024 14:33:35 +0300 Subject: [PATCH 077/124] Provide `Future::Output` and `Iterator` lang items --- src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs | 2 ++ src/tools/rust-analyzer/crates/ide/src/hover/tests.rs | 6 +++--- src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs | 2 ++ src/tools/rust-analyzer/crates/test-utils/src/minicore.rs | 2 ++ 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index f7c7f72139863..cdcd7208f373e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -461,6 +461,7 @@ language_item_table! { Context, sym::Context, context, Target::Struct, GenericRequirement::None; FuturePoll, sym::poll, future_poll_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None; + FutureOutput, sym::future_output, future_output, Target::TypeAlias, GenericRequirement::None; Option, sym::Option, option_type, Target::Enum, GenericRequirement::None; OptionSome, sym::Some, option_some_variant, Target::Variant, GenericRequirement::None; @@ -475,6 +476,7 @@ language_item_table! { IntoFutureIntoFuture, sym::into_future, into_future_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None; IntoIterIntoIter, sym::into_iter, into_iter_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None; IteratorNext, sym::next, next_fn, Target::Method(MethodKind::Trait { body: false}), GenericRequirement::None; + Iterator, sym::iterator, iterator, Target::Trait, GenericRequirement::None; PinNewUnchecked, sym::new_unchecked, new_unchecked_fn, Target::Method(MethodKind::Inherent), GenericRequirement::None; diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 8b60e562d7b8d..9585bdbe4c545 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -8465,7 +8465,7 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 7800..8008, + full_range: 7800..8042, focus_range: 7865..7871, name: "Future", kind: Trait, @@ -8479,8 +8479,8 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 8638..9104, - focus_range: 8682..8690, + full_range: 8672..9171, + focus_range: 8749..8757, name: "Iterator", kind: Trait, container_name: "iterator", diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs index 2feca32ff8686..7eb8e4a5e2e52 100644 --- a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs +++ b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs @@ -239,6 +239,7 @@ define_symbols! { fundamental, future_trait, future, + future_output, Future, ge, get_context, @@ -273,6 +274,7 @@ define_symbols! { iter_mut, iter, Iterator, + iterator, keyword, lang, le, diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs index 2d615c34a3520..7dbc498ead196 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs @@ -1195,6 +1195,7 @@ pub mod future { #[doc(notable_trait)] #[lang = "future_trait"] pub trait Future { + #[lang = "future_output"] type Output; #[lang = "poll"] fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll; @@ -1292,6 +1293,7 @@ pub mod iter { mod traits { mod iterator { #[doc(notable_trait)] + #[lang = "iterator"] pub trait Iterator { type Item; #[lang = "next"] From 737a969aa56d686ecdc9a5d689a3602ec2f7d164 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 22 Aug 2024 14:35:39 +0300 Subject: [PATCH 078/124] Add helper methods to retrieve `Future::Output` and `Iterator::Item` --- .../crates/hir-def/src/lang_item.rs | 7 +++++++ src/tools/rust-analyzer/crates/hir/src/lib.rs | 18 +++++++++++++++++- 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index cdcd7208f373e..166c965d14c67 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -74,6 +74,13 @@ impl LangItemTarget { _ => None, } } + + pub fn as_type_alias(self) -> Option { + match self { + LangItemTarget::TypeAlias(id) => Some(id), + _ => None, + } + } } #[derive(Default, Debug, Clone, PartialEq, Eq)] diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 1e8127161d674..2ff5f0d538af0 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -78,7 +78,7 @@ use hir_ty::{ use itertools::Itertools; use nameres::diagnostics::DefDiagnosticKind; use rustc_hash::FxHashSet; -use span::{Edition, EditionedFileId, FileId, MacroCallId}; +use span::{Edition, EditionedFileId, FileId, MacroCallId, SyntaxContextId}; use stdx::{impl_from, never}; use syntax::{ ast::{self, HasAttrs as _, HasGenericParams, HasName}, @@ -4379,6 +4379,22 @@ impl Type { method_resolution::implements_trait(&canonical_ty, db, &self.env, trait_) } + /// This does **not** resolve `IntoFuture`, only `Future`. + pub fn future_output(self, db: &dyn HirDatabase) -> Option { + let future_output = + db.lang_item(self.env.krate, LangItem::FutureOutput)?.as_type_alias()?; + self.normalize_trait_assoc_type(db, &[], future_output.into()) + } + + /// This does **not** resolve `IntoIterator`, only `Iterator`. + pub fn iterator_item(self, db: &dyn HirDatabase) -> Option { + let iterator_trait = db.lang_item(self.env.krate, LangItem::Iterator)?.as_trait()?; + let iterator_item = db + .trait_data(iterator_trait) + .associated_type_by_name(&Name::new_symbol(sym::Item.clone(), SyntaxContextId::ROOT))?; + self.normalize_trait_assoc_type(db, &[], iterator_item.into()) + } + /// Checks that particular type `ty` implements `std::ops::FnOnce`. /// /// This function can be used to check if a particular type is callable, since FnOnce is a From 73393377938ffafa3d913bb11e9046338fa0d1a9 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 22 Aug 2024 14:39:59 +0300 Subject: [PATCH 079/124] Modify `hacks::parse_expr_from_str()` to take an edition too This will be needed as we parse unknown identifiers and want to insert them into source code. --- .../crates/ide-assists/src/handlers/remove_dbg.rs | 4 ++-- .../crates/ide-completion/src/completions/attribute.rs | 6 ++++-- .../crates/ide-db/src/syntax_helpers/node_ext.rs | 10 ++++++---- src/tools/rust-analyzer/crates/syntax/src/hacks.rs | 4 ++-- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs index cffa3f55c9092..0e9c463e02459 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs @@ -1,7 +1,7 @@ use itertools::Itertools; use syntax::{ ast::{self, make, AstNode, AstToken}, - match_ast, ted, NodeOrToken, SyntaxElement, TextRange, TextSize, T, + match_ast, ted, Edition, NodeOrToken, SyntaxElement, TextRange, TextSize, T, }; use crate::{AssistContext, AssistId, AssistKind, Assists}; @@ -77,7 +77,7 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt let input_expressions = input_expressions .into_iter() .filter_map(|(is_sep, group)| (!is_sep).then_some(group)) - .map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join(""))) + .map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join(""), Edition::CURRENT)) .collect::>>()?; let parent = macro_expr.syntax().parent()?; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs index 75bd327608e83..9821fb4a2fa95 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/attribute.rs @@ -14,7 +14,7 @@ use ide_db::{ use itertools::Itertools; use syntax::{ ast::{self, AttrKind}, - AstNode, SyntaxKind, T, + AstNode, Edition, SyntaxKind, T, }; use crate::{ @@ -373,7 +373,9 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option> { input_expressions .into_iter() .filter_map(|(is_sep, group)| (!is_sep).then_some(group)) - .filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join(""))) + .filter_map(|mut tokens| { + syntax::hacks::parse_expr_from_str(&tokens.join(""), Edition::CURRENT) + }) .collect::>(), ) } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs index fa82902a74d5c..91e0b4495f5f1 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -477,10 +477,12 @@ pub fn parse_tt_as_comma_sep_paths( .into_iter() .filter_map(|(is_sep, group)| (!is_sep).then_some(group)) .filter_map(|mut tokens| { - syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr { - ast::Expr::PathExpr(it) => it.path(), - _ => None, - }) + syntax::hacks::parse_expr_from_str(&tokens.join(""), Edition::CURRENT).and_then( + |expr| match expr { + ast::Expr::PathExpr(it) => it.path(), + _ => None, + }, + ) }) .collect(); Some(paths) diff --git a/src/tools/rust-analyzer/crates/syntax/src/hacks.rs b/src/tools/rust-analyzer/crates/syntax/src/hacks.rs index 36615d11d85f3..9e63448ce9630 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/hacks.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/hacks.rs @@ -6,9 +6,9 @@ use parser::Edition; use crate::{ast, AstNode}; -pub fn parse_expr_from_str(s: &str) -> Option { +pub fn parse_expr_from_str(s: &str, edition: Edition) -> Option { let s = s.trim(); - let file = ast::SourceFile::parse(&format!("const _: () = {s};"), Edition::CURRENT); + let file = ast::SourceFile::parse(&format!("const _: () = {s};"), edition); let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?; if expr.syntax().text() != s { return None; From 634052268fec5ad208e8e1b461904c12fadb8dc4 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 22 Aug 2024 14:40:55 +0300 Subject: [PATCH 080/124] Provide `impl From for ast::GenericParam` --- .../rust-analyzer/crates/syntax/src/ast/node_ext.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs index 5447906206c7f..693bfe330bd9c 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs @@ -17,7 +17,7 @@ use crate::{ ted, NodeOrToken, SmolStr, SyntaxElement, SyntaxToken, TokenText, T, }; -use super::{RangeItem, RangeOp}; +use super::{GenericParam, RangeItem, RangeOp}; impl ast::Lifetime { pub fn text(&self) -> TokenText<'_> { @@ -822,6 +822,15 @@ pub enum TypeOrConstParam { Const(ast::ConstParam), } +impl From for GenericParam { + fn from(value: TypeOrConstParam) -> Self { + match value { + TypeOrConstParam::Type(it) => GenericParam::TypeParam(it), + TypeOrConstParam::Const(it) => GenericParam::ConstParam(it), + } + } +} + impl TypeOrConstParam { pub fn name(&self) -> Option { match self { From becfc5aeb9a5aec1ffb62b9540316efcb94b32ae Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 22 Aug 2024 14:41:54 +0300 Subject: [PATCH 081/124] Impl PartialEq and Eq for `IndentLevel` We can impl PartialOrd and Ord too, but I didn't need that. --- src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs index 5bc6b780e47fa..de40d638be393 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs @@ -8,7 +8,7 @@ use crate::{ ted, AstToken, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, }; -#[derive(Debug, Clone, Copy)] +#[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct IndentLevel(pub u8); impl From for IndentLevel { From 7bd3ca102d372c67dcad90322eaf83631f9c4b41 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Sun, 25 Aug 2024 04:35:58 +0300 Subject: [PATCH 082/124] Don't enable the search fast path for short associated functions when a search scope is set In most places where we set a search scope it is a single file, and so the fast path will actually harm performance, since it has to search for aliases in the whole project. The only exception that qualifies for the fast path is SSR (there is an exception that don't qualify for the fast path as it search for `use` items). It sets the search scope to avoid dependencies. We could make it use the fast path, but I didn't bother. --- src/tools/rust-analyzer/crates/ide-db/src/search.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index c807a5940d14f..12ce5a403fe87 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -528,6 +528,10 @@ impl<'a> FindUsages<'a> { search_scope: &SearchScope, name: &str, ) -> bool { + if self.scope.is_some() { + return false; + } + let _p = tracing::info_span!("short_associated_function_fast_search").entered(); let container = (|| { From d9d8d9477f4de812190183ddc7085452102c3f40 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 25 Aug 2024 09:28:47 +0200 Subject: [PATCH 083/124] fix: Fix metadata retrying eating original errors --- .../project-model/src/cargo_workspace.rs | 46 +++++++++---------- .../crates/project-model/src/sysroot.rs | 2 +- .../crates/project-model/src/tests.rs | 3 ++ .../crates/project-model/src/workspace.rs | 35 ++++++++------ .../crates/rust-analyzer/src/global_state.rs | 2 +- .../src/handlers/notification.rs | 2 +- .../rust-analyzer/src/handlers/request.rs | 4 +- .../crates/rust-analyzer/src/reload.rs | 17 +++---- .../crates/rust-analyzer/tests/crate_graph.rs | 1 + 9 files changed, 60 insertions(+), 52 deletions(-) diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index 492bc9a9255c6..e7fd939ef9e54 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -33,8 +33,6 @@ pub struct CargoWorkspace { workspace_root: AbsPathBuf, target_directory: AbsPathBuf, manifest_path: ManifestPath, - // Whether this workspace was queried with `--no-deps`. - no_deps: bool, } impl ops::Index for CargoWorkspace { @@ -253,6 +251,9 @@ struct PackageMetadata { } impl CargoWorkspace { + /// Fetches the metadata for the given `cargo_toml` manifest. + /// A successful result may contain another metadata error if the initial fetching failed but + /// the `--no-deps` retry succeeded. pub fn fetch_metadata( cargo_toml: &ManifestPath, current_dir: &AbsPath, @@ -260,7 +261,7 @@ impl CargoWorkspace { sysroot: &Sysroot, locked: bool, progress: &dyn Fn(String), - ) -> anyhow::Result { + ) -> anyhow::Result<(cargo_metadata::Metadata, Option)> { Self::fetch_metadata_(cargo_toml, current_dir, config, sysroot, locked, false, progress) } @@ -272,7 +273,7 @@ impl CargoWorkspace { locked: bool, no_deps: bool, progress: &dyn Fn(String), - ) -> anyhow::Result { + ) -> anyhow::Result<(cargo_metadata::Metadata, Option)> { let targets = find_list_of_build_targets(config, cargo_toml, sysroot); let cargo = sysroot.tool(Tool::Cargo); @@ -337,13 +338,17 @@ impl CargoWorkspace { // unclear whether cargo itself supports it. progress("metadata".to_owned()); - (|| -> Result { + (|| -> anyhow::Result<(_, _)> { let output = meta.cargo_command().output()?; if !output.status.success() { + let error = cargo_metadata::Error::CargoMetadata { + stderr: String::from_utf8(output.stderr)?, + } + .into(); if !no_deps { // If we failed to fetch metadata with deps, try again without them. // This makes r-a still work partially when offline. - if let Ok(metadata) = Self::fetch_metadata_( + if let Ok((metadata, _)) = Self::fetch_metadata_( cargo_toml, current_dir, config, @@ -352,20 +357,23 @@ impl CargoWorkspace { true, progress, ) { - return Ok(metadata); + return Ok((metadata, Some(error))); } } - - return Err(cargo_metadata::Error::CargoMetadata { - stderr: String::from_utf8(output.stderr)?, - }); + return Err(error); } let stdout = from_utf8(&output.stdout)? .lines() .find(|line| line.starts_with('{')) .ok_or(cargo_metadata::Error::NoJson)?; - cargo_metadata::MetadataCommand::parse(stdout) + Ok((cargo_metadata::MetadataCommand::parse(stdout)?, None)) })() + .map(|(metadata, error)| { + ( + metadata, + error.map(|e| e.context(format!("Failed to run `{:?}`", meta.cargo_command()))), + ) + }) .with_context(|| format!("Failed to run `{:?}`", meta.cargo_command())) } @@ -463,7 +471,6 @@ impl CargoWorkspace { pkg_data.targets.push(tgt); } } - let no_deps = meta.resolve.is_none(); for mut node in meta.resolve.map_or_else(Vec::new, |it| it.nodes) { let &source = pkg_by_id.get(&node.id).unwrap(); node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg)); @@ -483,14 +490,7 @@ impl CargoWorkspace { let target_directory = AbsPathBuf::assert(meta.target_directory); - CargoWorkspace { - packages, - targets, - workspace_root, - target_directory, - manifest_path, - no_deps, - } + CargoWorkspace { packages, targets, workspace_root, target_directory, manifest_path } } pub fn packages(&self) -> impl ExactSizeIterator + '_ { @@ -572,10 +572,6 @@ impl CargoWorkspace { fn is_unique(&self, name: &str) -> bool { self.packages.iter().filter(|(_, v)| v.name == name).count() == 1 } - - pub fn no_deps(&self) -> bool { - self.no_deps - } } fn find_list_of_build_targets( diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs index e11f0ee3ae3ff..19f4c35b5addd 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs @@ -325,7 +325,7 @@ impl Sysroot { "nightly".to_owned(), ); - let mut res = match CargoWorkspace::fetch_metadata( + let (mut res, _) = match CargoWorkspace::fetch_metadata( &library_manifest, sysroot_src_dir, &cargo_config, diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index e3bc81e1963d9..11bdc18eefb33 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -34,6 +34,7 @@ fn load_cargo_with_overrides( build_scripts: WorkspaceBuildScripts::default(), rustc: Err(None), cargo_config_extra_env: Default::default(), + error: None, }, cfg_overrides, sysroot: Sysroot::empty(), @@ -58,6 +59,7 @@ fn load_cargo_with_fake_sysroot( build_scripts: WorkspaceBuildScripts::default(), rustc: Err(None), cargo_config_extra_env: Default::default(), + error: None, }, sysroot: get_fake_sysroot(), rustc_cfg: Vec::new(), @@ -294,6 +296,7 @@ fn smoke_test_real_sysroot_cargo() { build_scripts: WorkspaceBuildScripts::default(), rustc: Err(None), cargo_config_extra_env: Default::default(), + error: None, }, sysroot, rustc_cfg: Vec::new(), diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 33ba7f9688d6b..9811abdce3164 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -69,6 +69,8 @@ pub enum ProjectWorkspaceKind { Cargo { /// The workspace as returned by `cargo metadata`. cargo: CargoWorkspace, + /// Additional `cargo metadata` error. (only populated if retried fetching via `--no-deps` succeeded). + error: Option>, /// The build script results for the workspace. build_scripts: WorkspaceBuildScripts, /// The rustc workspace loaded for this workspace. An `Err(None)` means loading has been @@ -93,7 +95,7 @@ pub enum ProjectWorkspaceKind { /// The file in question. file: ManifestPath, /// Is this file a cargo script file? - cargo: Option<(CargoWorkspace, WorkspaceBuildScripts)>, + cargo: Option<(CargoWorkspace, WorkspaceBuildScripts, Option>)>, /// Environment variables set in the `.cargo/config` file. cargo_config_extra_env: FxHashMap, }, @@ -106,6 +108,7 @@ impl fmt::Debug for ProjectWorkspace { match kind { ProjectWorkspaceKind::Cargo { cargo, + error: _, build_scripts: _, rustc, cargo_config_extra_env, @@ -256,7 +259,7 @@ impl ProjectWorkspace { false, progress, ) { - Ok(meta) => { + Ok((meta, _error)) => { let workspace = CargoWorkspace::new(meta, cargo_toml.clone()); let buildscripts = WorkspaceBuildScripts::rustc_crates( &workspace, @@ -301,7 +304,7 @@ impl ProjectWorkspace { tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace"); } - let meta = CargoWorkspace::fetch_metadata( + let (meta, error) = CargoWorkspace::fetch_metadata( cargo_toml, cargo_toml.parent(), config, @@ -324,6 +327,7 @@ impl ProjectWorkspace { build_scripts: WorkspaceBuildScripts::default(), rustc, cargo_config_extra_env, + error: error.map(Arc::new), }, sysroot, rustc_cfg, @@ -404,10 +408,11 @@ impl ProjectWorkspace { let cargo_script = CargoWorkspace::fetch_metadata(detached_file, dir, config, &sysroot, false, &|_| ()) .ok() - .map(|ws| { + .map(|(ws, error)| { ( CargoWorkspace::new(ws, detached_file.clone()), WorkspaceBuildScripts::default(), + error.map(Arc::new), ) }); @@ -440,10 +445,8 @@ impl ProjectWorkspace { progress: &dyn Fn(String), ) -> anyhow::Result { match &self.kind { - ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _)), .. } - | ProjectWorkspaceKind::Cargo { cargo, .. } - if !cargo.no_deps() => - { + ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _, None)), .. } + | ProjectWorkspaceKind::Cargo { cargo, error: None, .. } => { WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, &self.sysroot) .with_context(|| { format!("Failed to run build scripts for {}", cargo.workspace_root()) @@ -502,7 +505,7 @@ impl ProjectWorkspace { pub fn set_build_scripts(&mut self, bs: WorkspaceBuildScripts) { match &mut self.kind { ProjectWorkspaceKind::Cargo { build_scripts, .. } - | ProjectWorkspaceKind::DetachedFile { cargo: Some((_, build_scripts)), .. } => { + | ProjectWorkspaceKind::DetachedFile { cargo: Some((_, build_scripts, _)), .. } => { *build_scripts = bs } _ => assert_eq!(bs, WorkspaceBuildScripts::default()), @@ -593,6 +596,7 @@ impl ProjectWorkspace { rustc, build_scripts, cargo_config_extra_env: _, + error: _, } => { cargo .packages() @@ -648,7 +652,7 @@ impl ProjectWorkspace { include: vec![file.to_path_buf()], exclude: Vec::new(), }) - .chain(cargo_script.iter().flat_map(|(cargo, build_scripts)| { + .chain(cargo_script.iter().flat_map(|(cargo, build_scripts, _)| { cargo.packages().map(|pkg| { let is_local = cargo[pkg].is_local; let pkg_root = cargo[pkg].manifest.parent().to_path_buf(); @@ -703,7 +707,7 @@ impl ProjectWorkspace { } ProjectWorkspaceKind::DetachedFile { cargo: cargo_script, .. } => { sysroot_package_len - + cargo_script.as_ref().map_or(1, |(cargo, _)| cargo.packages().len()) + + cargo_script.as_ref().map_or(1, |(cargo, _, _)| cargo.packages().len()) } } } @@ -733,6 +737,7 @@ impl ProjectWorkspace { rustc, build_scripts, cargo_config_extra_env: _, + error: _, } => ( cargo_to_crate_graph( load, @@ -746,7 +751,7 @@ impl ProjectWorkspace { sysroot, ), ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => ( - if let Some((cargo, build_scripts)) = cargo_script { + if let Some((cargo, build_scripts, _)) = cargo_script { cargo_to_crate_graph( &mut |path| load(path), None, @@ -795,12 +800,14 @@ impl ProjectWorkspace { rustc, cargo_config_extra_env, build_scripts: _, + error: _, }, ProjectWorkspaceKind::Cargo { cargo: o_cargo, rustc: o_rustc, cargo_config_extra_env: o_cargo_config_extra_env, build_scripts: _, + error: _, }, ) => { cargo == o_cargo @@ -813,12 +820,12 @@ impl ProjectWorkspace { ( ProjectWorkspaceKind::DetachedFile { file, - cargo: Some((cargo_script, _)), + cargo: Some((cargo_script, _, _)), cargo_config_extra_env, }, ProjectWorkspaceKind::DetachedFile { file: o_file, - cargo: Some((o_cargo_script, _)), + cargo: Some((o_cargo_script, _, _)), cargo_config_extra_env: o_cargo_config_extra_env, }, ) => { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 8fcdfa5829aae..700ae749dc753 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -667,7 +667,7 @@ impl GlobalStateSnapshot { for workspace in self.workspaces.iter() { match &workspace.kind { ProjectWorkspaceKind::Cargo { cargo, .. } - | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _)), .. } => { + | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _, _)), .. } => { let Some(target_idx) = cargo.target_by_root(path) else { continue; }; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index f99319271ba4a..87b024c311e7f 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -343,7 +343,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { let package = match &ws.kind { project_model::ProjectWorkspaceKind::Cargo { cargo, .. } | project_model::ProjectWorkspaceKind::DetachedFile { - cargo: Some((cargo, _)), + cargo: Some((cargo, _, _)), .. } => cargo.packages().find_map(|pkg| { let has_target_with_root = cargo[pkg] diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index d78bd3b44d31e..1ad5ff0c8cdc1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -799,7 +799,7 @@ pub(crate) fn handle_parent_module( .iter() .filter_map(|ws| match &ws.kind { ProjectWorkspaceKind::Cargo { cargo, .. } - | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _)), .. } => { + | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _, _)), .. } => { cargo.parent_manifests(&manifest_path) } _ => None, @@ -1839,7 +1839,7 @@ pub(crate) fn handle_open_docs( let ws_and_sysroot = snap.workspaces.iter().find_map(|ws| match &ws.kind { ProjectWorkspaceKind::Cargo { cargo, .. } - | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _)), .. } => { + | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _, _)), .. } => { Some((cargo, &ws.sysroot)) } ProjectWorkspaceKind::Json { .. } => None, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 5d0c6b65992c7..2a91d614e58c1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -180,16 +180,17 @@ impl GlobalState { self.proc_macro_clients.iter().map(Some).chain(iter::repeat_with(|| None)); for (ws, proc_macro_client) in self.workspaces.iter().zip(proc_macro_clients) { - if matches!( - &ws.kind, - ProjectWorkspaceKind::Cargo { cargo, .. } | ProjectWorkspaceKind::DetachedFile { cargo: Some((cargo, _)), .. } - if cargo.no_deps() - ) { + if let ProjectWorkspaceKind::Cargo { error: Some(error), .. } + | ProjectWorkspaceKind::DetachedFile { + cargo: Some((_, _, Some(error))), .. + } = &ws.kind + { status.health |= lsp_ext::Health::Warning; format_to!( message, - "Failed to read Cargo metadata for `{}`, the `Cargo.toml` might be invalid or you have no internet connection.\n\n", - ws.manifest_or_root() + "Failed to read Cargo metadata with dependencies for `{}`: {:#}\n\n", + ws.manifest_or_root(), + error ); } if let Some(err) = ws.sysroot.error() { @@ -805,7 +806,7 @@ impl GlobalState { match &ws.kind { ProjectWorkspaceKind::Cargo { cargo, .. } | ProjectWorkspaceKind::DetachedFile { - cargo: Some((cargo, _)), + cargo: Some((cargo, _, _)), .. } => (cargo.workspace_root(), Some(cargo.manifest_path())), ProjectWorkspaceKind::Json(project) => { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs index b8a82fd6a72ee..5e4d26ce2d870 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs @@ -20,6 +20,7 @@ fn load_cargo_with_fake_sysroot(file: &str) -> ProjectWorkspace { build_scripts: WorkspaceBuildScripts::default(), rustc: Err(None), cargo_config_extra_env: Default::default(), + error: None, }, sysroot: get_fake_sysroot(), rustc_cfg: Vec::new(), From 606401f03c0e980a01183a705d84fd4ea66e9fdc Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 25 Aug 2024 10:47:30 +0200 Subject: [PATCH 084/124] fix: Fix trait method completions not acknowledging Deref impls --- .../rust-analyzer/crates/hir-def/src/lib.rs | 2 +- .../crates/hir-ty/src/method_resolution.rs | 2 +- .../ide-completion/src/tests/flyimport.rs | 36 +++++++++ .../ide-db/src/imports/import_assets.rs | 81 ++++++++++++------- .../src/handlers/unused_variables.rs | 2 +- 5 files changed, 90 insertions(+), 33 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index 4ced30c81dced..0213bd904b68c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -241,7 +241,7 @@ pub type StaticLoc = AssocItemLoc; impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static); impl_loc!(StaticLoc, id: Static, container: ItemContainerId); -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct TraitId(salsa::InternId); pub type TraitLoc = ItemLoc; impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index 3ddfea13ab0c0..d08f9b7ff06fa 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -35,7 +35,7 @@ use crate::{ }; /// This is used as a key for indexing impls. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub enum TyFingerprint { // These are lang item impls: Str, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs index 158dbaf1b1dd1..8350fdcc4c07c 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs @@ -1633,3 +1633,39 @@ fn main() { "#]], ); } + +#[test] +fn trait_impl_on_slice_method_on_deref_slice_type() { + check( + r#" +//- minicore: deref, sized +struct SliceDeref; +impl core::ops::Deref for SliceDeref { + type Target = [()]; + + fn deref(&self) -> &Self::Target { + &[] + } +} +fn main() { + SliceDeref.choose$0(); +} +mod module { + pub(super) trait SliceRandom { + type Item; + + fn choose(&self); + } + + impl SliceRandom for [T] { + type Item = T; + + fn choose(&self) {} + } +} +"#, + expect![[r#" + me choose (use module::SliceRandom) fn(&self) + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs index 07e197334706e..82a182806a4cb 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs @@ -531,40 +531,61 @@ fn trait_applicable_items( }) .collect(); - trait_candidates.retain(|&candidate_trait_id| { - // we care about the following cases: - // 1. Trait's definition crate - // 2. Definition crates for all trait's generic arguments - // a. This is recursive for fundamental types: `Into> for ()`` is OK, but - // `Into> for ()`` is *not*. - // 3. Receiver type definition crate - // a. This is recursive for fundamental types - let defining_crate_for_trait = Trait::from(candidate_trait_id).krate(db); - let Some(receiver) = trait_candidate.receiver_ty.fingerprint_for_trait_impl() else { - return false; - }; - - // in order to handle implied bounds through an associated type, keep any - // method receiver that matches `TyFingerprint::Unnameable`. this receiver - // won't be in `TraitImpls` anyways, as `TraitImpls` only contains actual - // implementations. - if matches!(receiver, TyFingerprint::Unnameable) { - return true; + let autoderef_method_receiver = { + let mut deref_chain = trait_candidate.receiver_ty.autoderef(db).collect::>(); + // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!) + if let Some((ty, _len)) = deref_chain.last().and_then(|ty| ty.as_array(db)) { + let slice = Type::new_slice(ty); + deref_chain.push(slice); } + deref_chain + .into_iter() + .filter_map(|ty| Some((ty.krate(db).into(), ty.fingerprint_for_trait_impl()?))) + .sorted() + .unique() + .collect::>() + }; - let definitions_exist_in_trait_crate = db - .trait_impls_in_crate(defining_crate_for_trait.into()) - .has_impls_for_trait_and_self_ty(candidate_trait_id, receiver); + // can be empty if the entire deref chain is has no valid trait impl fingerprints + if autoderef_method_receiver.is_empty() { + return Default::default(); + } - // this is a closure for laziness: if `definitions_exist_in_trait_crate` is true, - // we can avoid a second db lookup. - let definitions_exist_in_receiver_crate = || { - db.trait_impls_in_crate(trait_candidate.receiver_ty.krate(db).into()) - .has_impls_for_trait_and_self_ty(candidate_trait_id, receiver) - }; + // in order to handle implied bounds through an associated type, keep all traits if any + // type in the deref chain matches `TyFingerprint::Unnameable`. This fingerprint + // won't be in `TraitImpls` anyways, as `TraitImpls` only contains actual implementations. + if !autoderef_method_receiver + .iter() + .any(|(_, fingerprint)| matches!(fingerprint, TyFingerprint::Unnameable)) + { + trait_candidates.retain(|&candidate_trait_id| { + // we care about the following cases: + // 1. Trait's definition crate + // 2. Definition crates for all trait's generic arguments + // a. This is recursive for fundamental types: `Into> for ()`` is OK, but + // `Into> for ()`` is *not*. + // 3. Receiver type definition crate + // a. This is recursive for fundamental types + let defining_crate_for_trait = Trait::from(candidate_trait_id).krate(db); + + let trait_impls_in_crate = db.trait_impls_in_crate(defining_crate_for_trait.into()); + let definitions_exist_in_trait_crate = + autoderef_method_receiver.iter().any(|&(_, fingerprint)| { + trait_impls_in_crate + .has_impls_for_trait_and_self_ty(candidate_trait_id, fingerprint) + }); + // this is a closure for laziness: if `definitions_exist_in_trait_crate` is true, + // we can avoid a second db lookup. + let definitions_exist_in_receiver_crate = || { + autoderef_method_receiver.iter().any(|&(krate, fingerprint)| { + db.trait_impls_in_crate(krate) + .has_impls_for_trait_and_self_ty(candidate_trait_id, fingerprint) + }) + }; - definitions_exist_in_trait_crate || definitions_exist_in_receiver_crate() - }); + definitions_exist_in_trait_crate || definitions_exist_in_receiver_crate() + }); + } let mut located_imports = FxIndexSet::default(); let mut trait_import_paths = FxHashMap::default(); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs index c594754bd440a..84007b16aa67c 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unused_variables.rs @@ -23,7 +23,7 @@ pub(crate) fn unused_variables( return None; } let diagnostic_range = ctx.sema.diagnostics_display_range(ast); - // The range for the Actual Name. We don't want to replace the entire declarition. Using the diagnostic range causes issues within in Array Destructuring. + // The range for the Actual Name. We don't want to replace the entire declaration. Using the diagnostic range causes issues within in Array Destructuring. let name_range = d .local .primary_source(ctx.sema.db) From 0ed13eb48a6ea9a2757541bdf3c2cd1f906e1f69 Mon Sep 17 00:00:00 2001 From: tiif Date: Sun, 25 Aug 2024 17:15:19 +0800 Subject: [PATCH 085/124] Add test for triggering notification after timeout --- .../pass-dep/libc/libc-epoll-blocking.rs | 41 ++++++++++++++++++- 1 file changed, 40 insertions(+), 1 deletion(-) diff --git a/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs b/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs index 1a1d058b98365..2a5d3dff07f91 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs @@ -11,6 +11,7 @@ use std::thread::spawn; fn main() { test_epoll_block_without_notification(); test_epoll_block_then_unblock(); + test_notification_after_timeout(); } // Using `as` cast since `EPOLLET` wraps around @@ -43,6 +44,8 @@ fn check_epoll_wait( assert_eq!(data, expected_event.1, "got wrong data"); } } + +// This test allows epoll_wait to block, then unblock without notification. fn test_epoll_block_without_notification() { // Create an epoll instance. let epfd = unsafe { libc::epoll_create1(0) }; @@ -62,10 +65,11 @@ fn test_epoll_block_without_notification() { let expected_value = fd as u64; check_epoll_wait::<1>(epfd, &[(expected_event, expected_value)], 0); - // epoll_wait before triggering notification so it will block then unblock. + // This epoll wait blocks, and timeout without notification. check_epoll_wait::<1>(epfd, &[], 5); } +// This test triggers notification and unblocks the epoll_wait before timeout. fn test_epoll_block_then_unblock() { // Create an epoll instance. let epfd = unsafe { libc::epoll_create1(0) }; @@ -98,3 +102,38 @@ fn test_epoll_block_then_unblock() { check_epoll_wait::<1>(epfd, &[(expected_event, expected_value)], 10); thread1.join().unwrap(); } + +// This test triggers a notification after epoll_wait times out. +fn test_notification_after_timeout() { + // Create an epoll instance. + let epfd = unsafe { libc::epoll_create1(0) }; + assert_ne!(epfd, -1); + + // Create a socketpair instance. + let mut fds = [-1, -1]; + let res = unsafe { libc::socketpair(libc::AF_UNIX, libc::SOCK_STREAM, 0, fds.as_mut_ptr()) }; + assert_eq!(res, 0); + + // Register one side of the socketpair with epoll. + let mut ev = libc::epoll_event { events: EPOLL_IN_OUT_ET, u64: fds[0] as u64 }; + let res = unsafe { libc::epoll_ctl(epfd, libc::EPOLL_CTL_ADD, fds[0], &mut ev) }; + assert_eq!(res, 0); + + // epoll_wait to clear notification. + let expected_event = u32::try_from(libc::EPOLLOUT).unwrap(); + let expected_value = fds[0] as u64; + check_epoll_wait::<1>(epfd, &[(expected_event, expected_value)], 0); + + // epoll_wait timeouts without notification. + check_epoll_wait::<1>(epfd, &[], 10); + + // Trigger epoll notification after timeout. + let data = "abcde".as_bytes().as_ptr(); + let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + assert_eq!(res, 5); + + // Check the result of the notification. + let expected_event = u32::try_from(libc::EPOLLIN | libc::EPOLLOUT).unwrap(); + let expected_value = fds[0] as u64; + check_epoll_wait::<1>(epfd, &[(expected_event, expected_value)], 10); +} From 2703ea1623e498caa6882de083ef786b7ace4b95 Mon Sep 17 00:00:00 2001 From: duncan Date: Sun, 25 Aug 2024 11:04:52 +0100 Subject: [PATCH 086/124] fix: add extra_test_bin_args to test explorer test runner trim whitespace --- src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs | 2 ++ src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs | 1 + .../rust-analyzer/crates/rust-analyzer/src/test_runner.rs | 5 +++++ 3 files changed, 8 insertions(+) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index f74e3d6db15bd..2889af844b14e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -1884,6 +1884,7 @@ impl Config { CargoFeaturesDef::Selected(it) => it, }, extra_args: self.extra_args().clone(), + extra_test_bin_args: self.runnables_extraTestBinaryArgs().clone(), extra_env: self.extra_env().clone(), target_dir: self.target_dir_from_config(), } @@ -1934,6 +1935,7 @@ impl Config { CargoFeaturesDef::Selected(it) => it, }, extra_args: self.check_extra_args(), + extra_test_bin_args: self.runnables_extraTestBinaryArgs().clone(), extra_env: self.check_extra_env(), target_dir: self.target_dir_from_config(), }, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 9fb43286579f8..b035d779a7d59 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -30,6 +30,7 @@ pub(crate) struct CargoOptions { pub(crate) all_features: bool, pub(crate) features: Vec, pub(crate) extra_args: Vec, + pub(crate) extra_test_bin_args: Vec, pub(crate) extra_env: FxHashMap, pub(crate) target_dir: Option, } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs index 293cff474337d..5e43a3c60d86e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs @@ -102,6 +102,11 @@ impl CargoTestHandle { } cmd.args(["-Z", "unstable-options"]); cmd.arg("--format=json"); + + for extra_arg in options.extra_test_bin_args { + cmd.arg(extra_arg); + } + Ok(Self { _handle: CommandHandle::spawn(cmd, sender)? }) } } From 98e23d37065d34a5d30f5465164e70a01998e9d7 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 25 Aug 2024 13:11:13 +0200 Subject: [PATCH 087/124] internal: Don't allocate autoderef steps when not needed --- .../rust-analyzer/.git-blame-ignore-revs | 7 +- .../crates/hir-ty/src/autoderef.rs | 73 ++++++++++++++----- .../crates/hir-ty/src/method_resolution.rs | 6 +- 3 files changed, 60 insertions(+), 26 deletions(-) diff --git a/src/tools/rust-analyzer/.git-blame-ignore-revs b/src/tools/rust-analyzer/.git-blame-ignore-revs index d5951a9420916..2ccdc8c042563 100644 --- a/src/tools/rust-analyzer/.git-blame-ignore-revs +++ b/src/tools/rust-analyzer/.git-blame-ignore-revs @@ -7,9 +7,10 @@ # prettier format f247090558c9ba3c551566eae5882b7ca865225f -# subtree syncs -932d85b52946d917deab2c23ead552f7f713b828 +# pre-josh subtree syncs 3e358a6827d83e8d6473913a5e304734aadfed04 +932d85b52946d917deab2c23ead552f7f713b828 9d2cb42a413e51deb50b36794a2e1605381878fc -f532576ac53ddcc666bc8d59e0b6437065e2f599 +b2f6fd4f961fc7e4fbfdb80cae2e6065f8436f15 c48062fe2ab9a2d913d1985a6b0aec4bf936bfc1 +f532576ac53ddcc666bc8d59e0b6437065e2f599 diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs index ecfc1ff99e9ea..7a3846df40eef 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs @@ -3,6 +3,8 @@ //! reference to a type with the field `bar`. This is an approximation of the //! logic in rustc (which lives in rustc_hir_analysis/check/autoderef.rs). +use std::mem; + use chalk_ir::cast::Cast; use hir_def::lang_item::LangItem; use hir_expand::name::Name; @@ -37,7 +39,7 @@ pub fn autoderef( ) -> impl Iterator { let mut table = InferenceTable::new(db, env); let ty = table.instantiate_canonical(ty); - let mut autoderef = Autoderef::new(&mut table, ty, false); + let mut autoderef = Autoderef::new_no_tracking(&mut table, ty, false); let mut v = Vec::new(); while let Some((ty, _steps)) = autoderef.next() { // `ty` may contain unresolved inference variables. Since there's no chance they would be @@ -58,41 +60,76 @@ pub fn autoderef( v.into_iter() } +trait TrackAutoderefSteps { + fn len(&self) -> usize; + fn push(&mut self, kind: AutoderefKind, ty: &Ty); +} + +impl TrackAutoderefSteps for usize { + fn len(&self) -> usize { + *self + } + fn push(&mut self, _: AutoderefKind, _: &Ty) { + *self += 1; + } +} +impl TrackAutoderefSteps for Vec<(AutoderefKind, Ty)> { + fn len(&self) -> usize { + self.len() + } + fn push(&mut self, kind: AutoderefKind, ty: &Ty) { + self.push((kind, ty.clone())); + } +} + #[derive(Debug)] -pub(crate) struct Autoderef<'a, 'db> { - pub(crate) table: &'a mut InferenceTable<'db>, +pub(crate) struct Autoderef<'table, 'db, T = Vec<(AutoderefKind, Ty)>> { + pub(crate) table: &'table mut InferenceTable<'db>, ty: Ty, at_start: bool, - steps: Vec<(AutoderefKind, Ty)>, + steps: T, explicit: bool, } -impl<'a, 'db> Autoderef<'a, 'db> { - pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty, explicit: bool) -> Self { +impl<'table, 'db> Autoderef<'table, 'db> { + pub(crate) fn new(table: &'table mut InferenceTable<'db>, ty: Ty, explicit: bool) -> Self { let ty = table.resolve_ty_shallow(&ty); Autoderef { table, ty, at_start: true, steps: Vec::new(), explicit } } - pub(crate) fn step_count(&self) -> usize { - self.steps.len() - } - pub(crate) fn steps(&self) -> &[(AutoderefKind, Ty)] { &self.steps } +} + +impl<'table, 'db> Autoderef<'table, 'db, usize> { + pub(crate) fn new_no_tracking( + table: &'table mut InferenceTable<'db>, + ty: Ty, + explicit: bool, + ) -> Self { + let ty = table.resolve_ty_shallow(&ty); + Autoderef { table, ty, at_start: true, steps: 0, explicit } + } +} + +#[allow(private_bounds)] +impl<'table, 'db, T: TrackAutoderefSteps> Autoderef<'table, 'db, T> { + pub(crate) fn step_count(&self) -> usize { + self.steps.len() + } pub(crate) fn final_ty(&self) -> Ty { self.ty.clone() } } -impl Iterator for Autoderef<'_, '_> { +impl Iterator for Autoderef<'_, '_, T> { type Item = (Ty, usize); #[tracing::instrument(skip_all)] fn next(&mut self) -> Option { - if self.at_start { - self.at_start = false; + if mem::take(&mut self.at_start) { return Some((self.ty.clone(), 0)); } @@ -102,7 +139,7 @@ impl Iterator for Autoderef<'_, '_> { let (kind, new_ty) = autoderef_step(self.table, self.ty.clone(), self.explicit)?; - self.steps.push((kind, self.ty.clone())); + self.steps.push(kind, &self.ty); self.ty = new_ty; Some((self.ty.clone(), self.step_count())) @@ -129,12 +166,8 @@ pub(crate) fn builtin_deref<'ty>( match ty.kind(Interner) { TyKind::Ref(.., ty) => Some(ty), TyKind::Raw(.., ty) if explicit => Some(ty), - &TyKind::Adt(chalk_ir::AdtId(adt), ref substs) => { - if crate::lang_items::is_box(db, adt) { - substs.at(Interner, 0).ty(Interner) - } else { - None - } + &TyKind::Adt(chalk_ir::AdtId(adt), ref substs) if crate::lang_items::is_box(db, adt) => { + substs.at(Interner, 0).ty(Interner) } _ => None, } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index d08f9b7ff06fa..5a72b97653dbf 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -1067,7 +1067,7 @@ fn iterate_method_candidates_by_receiver( // be found in any of the derefs of receiver_ty, so we have to go through // that, including raw derefs. table.run_in_snapshot(|table| { - let mut autoderef = autoderef::Autoderef::new(table, receiver_ty.clone(), true); + let mut autoderef = autoderef::Autoderef::new_no_tracking(table, receiver_ty.clone(), true); while let Some((self_ty, _)) = autoderef.next() { iterate_inherent_methods( &self_ty, @@ -1082,7 +1082,7 @@ fn iterate_method_candidates_by_receiver( ControlFlow::Continue(()) })?; table.run_in_snapshot(|table| { - let mut autoderef = autoderef::Autoderef::new(table, receiver_ty.clone(), true); + let mut autoderef = autoderef::Autoderef::new_no_tracking(table, receiver_ty.clone(), true); while let Some((self_ty, _)) = autoderef.next() { if matches!(self_ty.kind(Interner), TyKind::InferenceVar(_, TyVariableKind::General)) { // don't try to resolve methods on unknown types @@ -1657,7 +1657,7 @@ fn autoderef_method_receiver( ty: Ty, ) -> Vec<(Canonical, ReceiverAdjustments)> { let mut deref_chain: Vec<_> = Vec::new(); - let mut autoderef = autoderef::Autoderef::new(table, ty, false); + let mut autoderef = autoderef::Autoderef::new_no_tracking(table, ty, false); while let Some((ty, derefs)) = autoderef.next() { deref_chain.push(( autoderef.table.canonicalize(ty), From 326a1c669ddcc1e202c94bca0a1a188303e98c9b Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Mon, 26 Aug 2024 01:45:52 +0300 Subject: [PATCH 088/124] Fix Return Type Syntax to include `..` (i.e. `method(..)` and not `method()`) as specified in the RFC --- .../crates/parser/src/grammar/generic_args.rs | 13 +++-- .../parser/src/grammar/generic_params.rs | 3 +- .../crates/parser/src/grammar/paths.rs | 21 ++++++-- .../parser/src/syntax_kind/generated.rs | 1 + .../parser/test_data/generated/runner.rs | 14 ++++-- .../ok/associated_return_type_bounds.rs | 1 - .../return_type_syntax_assoc_type_bound.rast | 49 ++++++++++++++++++ .../ok/return_type_syntax_assoc_type_bound.rs | 1 + .../inline/ok/return_type_syntax_in_path.rast | 50 +++++++++++++++++++ .../inline/ok/return_type_syntax_in_path.rs | 4 ++ .../rust-analyzer/crates/syntax/rust.ungram | 6 ++- .../crates/syntax/src/ast/generated/nodes.rs | 36 +++++++++++++ 12 files changed, 183 insertions(+), 16 deletions(-) delete mode 100644 src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/associated_return_type_bounds.rs create mode 100644 src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_assoc_type_bound.rast create mode 100644 src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_assoc_type_bound.rs create mode 100644 src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_in_path.rast create mode 100644 src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_in_path.rs diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs index 249be2a333559..c62c8a9d3f929 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs @@ -102,13 +102,18 @@ fn generic_arg(p: &mut Parser<'_>) -> bool { IDENT if p.nth_at(1, T!['(']) => { let m = p.start(); name_ref(p); - params::param_list_fn_trait(p); - if p.at(T![:]) && !p.at(T![::]) { - // test associated_return_type_bounds - // fn foo>() {} + if p.nth_at(1, T![..]) { + let rtn = p.start(); + p.bump(T!['(']); + p.bump(T![..]); + p.expect(T![')']); + rtn.complete(p, RETURN_TYPE_SYNTAX); + // test return_type_syntax_assoc_type_bound + // fn foo>() {} generic_params::bounds(p); m.complete(p, ASSOC_TYPE_ARG); } else { + params::param_list_fn_trait(p); // test bare_dyn_types_with_paren_as_generic_args // type A = S; // type A = S; diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs index cf80a535ac55f..e0fa753fa70a4 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs @@ -119,8 +119,7 @@ fn lifetime_bounds(p: &mut Parser<'_>) { // test type_param_bounds // struct S; pub(super) fn bounds(p: &mut Parser<'_>) { - assert!(p.at(T![:])); - p.bump(T![:]); + p.expect(T![:]); bounds_without_colon(p); } diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs index 01b8f9e918714..09db921803f9b 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/paths.rs @@ -140,11 +140,24 @@ fn opt_path_type_args(p: &mut Parser<'_>, mode: Mode) { if p.at(T![::]) && p.nth_at(2, T!['(']) { p.bump(T![::]); } - // test path_fn_trait_args - // type F = Box ()>; if p.at(T!['(']) { - params::param_list_fn_trait(p); - opt_ret_type(p); + if p.nth_at(1, T![..]) { + // test return_type_syntax_in_path + // fn foo() + // where + // T::method(..): Send, + // {} + let rtn = p.start(); + p.bump(T!['(']); + p.bump(T![..]); + p.expect(T![')']); + rtn.complete(p, RETURN_TYPE_SYNTAX); + } else { + // test path_fn_trait_args + // type F = Box ()>; + params::param_list_fn_trait(p); + opt_ret_type(p); + } } else { generic_args::opt_generic_arg_list(p, false); } diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs index eaacd88e3e327..00f212487ae68 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs @@ -253,6 +253,7 @@ pub enum SyntaxKind { RENAME, REST_PAT, RETURN_EXPR, + RETURN_TYPE_SYNTAX, RET_TYPE, SELF_PARAM, SLICE_PAT, diff --git a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs index 1907f03b44d11..9ce5a2ae748fd 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs @@ -37,10 +37,6 @@ mod ok { #[test] fn assoc_type_eq() { run_and_expect_no_errors("test_data/parser/inline/ok/assoc_type_eq.rs"); } #[test] - fn associated_return_type_bounds() { - run_and_expect_no_errors("test_data/parser/inline/ok/associated_return_type_bounds.rs"); - } - #[test] fn associated_type_bounds() { run_and_expect_no_errors("test_data/parser/inline/ok/associated_type_bounds.rs"); } @@ -519,6 +515,16 @@ mod ok { #[test] fn return_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/return_expr.rs"); } #[test] + fn return_type_syntax_assoc_type_bound() { + run_and_expect_no_errors( + "test_data/parser/inline/ok/return_type_syntax_assoc_type_bound.rs", + ); + } + #[test] + fn return_type_syntax_in_path() { + run_and_expect_no_errors("test_data/parser/inline/ok/return_type_syntax_in_path.rs"); + } + #[test] fn self_param() { run_and_expect_no_errors("test_data/parser/inline/ok/self_param.rs"); } #[test] fn self_param_outer_attr() { diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/associated_return_type_bounds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/associated_return_type_bounds.rs deleted file mode 100644 index 42029ac592702..0000000000000 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/associated_return_type_bounds.rs +++ /dev/null @@ -1 +0,0 @@ -fn foo>() {} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_assoc_type_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_assoc_type_bound.rast new file mode 100644 index 0000000000000..30e0e73bbd6c0 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_assoc_type_bound.rast @@ -0,0 +1,49 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + GENERIC_PARAM_LIST + L_ANGLE "<" + TYPE_PARAM + NAME + IDENT "T" + COLON ":" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + GENERIC_ARG_LIST + L_ANGLE "<" + ASSOC_TYPE_ARG + NAME_REF + IDENT "method" + RETURN_TYPE_SYNTAX + L_PAREN "(" + DOT2 ".." + R_PAREN ")" + COLON ":" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Send" + R_ANGLE ">" + R_ANGLE ">" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_assoc_type_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_assoc_type_bound.rs new file mode 100644 index 0000000000000..8a4cf4c3a07fa --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_assoc_type_bound.rs @@ -0,0 +1 @@ +fn foo>() {} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_in_path.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_in_path.rast new file mode 100644 index 0000000000000..501dccd79db21 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_in_path.rast @@ -0,0 +1,50 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + GENERIC_PARAM_LIST + L_ANGLE "<" + TYPE_PARAM + NAME + IDENT "T" + R_ANGLE ">" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE "\n" + WHERE_CLAUSE + WHERE_KW "where" + WHITESPACE "\n " + WHERE_PRED + PATH_TYPE + PATH + PATH + PATH_SEGMENT + NAME_REF + IDENT "T" + COLON2 "::" + PATH_SEGMENT + NAME_REF + IDENT "method" + RETURN_TYPE_SYNTAX + L_PAREN "(" + DOT2 ".." + R_PAREN ")" + COLON ":" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Send" + COMMA "," + WHITESPACE "\n" + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_in_path.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_in_path.rs new file mode 100644 index 0000000000000..a9b63fb01c859 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/return_type_syntax_in_path.rs @@ -0,0 +1,4 @@ +fn foo() +where + T::method(..): Send, +{} diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram index 069be2df3a440..ba7d8bacfb2a3 100644 --- a/src/tools/rust-analyzer/crates/syntax/rust.ungram +++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram @@ -38,8 +38,12 @@ PathSegment = '::'? NameRef | NameRef GenericArgList? | NameRef ParamList RetType? +| NameRef ReturnTypeSyntax | '<' Type ('as' PathType)? '>' +ReturnTypeSyntax = + '(' '..' ')' + //*************************// // Generics // @@ -59,7 +63,7 @@ TypeArg = AssocTypeArg = NameRef - (GenericArgList | ParamList RetType?)? + (GenericArgList | ParamList RetType? | ReturnTypeSyntax)? (':' TypeBoundList | ('=' Type | ConstArg)) LifetimeArg = diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs index 01886d119d675..bd8092d7f092e 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs @@ -114,6 +114,8 @@ impl AssocTypeArg { #[inline] pub fn ret_type(&self) -> Option { support::child(&self.syntax) } #[inline] + pub fn return_type_syntax(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option { support::child(&self.syntax) } #[inline] pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } @@ -1221,6 +1223,8 @@ impl PathSegment { #[inline] pub fn ret_type(&self) -> Option { support::child(&self.syntax) } #[inline] + pub fn return_type_syntax(&self) -> Option { support::child(&self.syntax) } + #[inline] pub fn ty(&self) -> Option { support::child(&self.syntax) } #[inline] pub fn coloncolon_token(&self) -> Option { support::token(&self.syntax, T![::]) } @@ -1485,6 +1489,19 @@ impl ReturnExpr { pub fn return_token(&self) -> Option { support::token(&self.syntax, T![return]) } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ReturnTypeSyntax { + pub(crate) syntax: SyntaxNode, +} +impl ReturnTypeSyntax { + #[inline] + pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } + #[inline] + pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } + #[inline] + pub fn dotdot_token(&self) -> Option { support::token(&self.syntax, T![..]) } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct SelfParam { pub(crate) syntax: SyntaxNode, @@ -3697,6 +3714,20 @@ impl AstNode for ReturnExpr { #[inline] fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl AstNode for ReturnTypeSyntax { + #[inline] + fn can_cast(kind: SyntaxKind) -> bool { kind == RETURN_TYPE_SYNTAX } + #[inline] + fn cast(syntax: SyntaxNode) -> Option { + if Self::can_cast(syntax.kind()) { + Some(Self { syntax }) + } else { + None + } + } + #[inline] + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} impl AstNode for SelfParam { #[inline] fn can_cast(kind: SyntaxKind) -> bool { kind == SELF_PARAM } @@ -6609,6 +6640,11 @@ impl std::fmt::Display for ReturnExpr { std::fmt::Display::fmt(self.syntax(), f) } } +impl std::fmt::Display for ReturnTypeSyntax { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.syntax(), f) + } +} impl std::fmt::Display for SelfParam { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) From e01bc0424cc0d6caa3b9108eaf103c733739976c Mon Sep 17 00:00:00 2001 From: The Miri Cronjob Bot Date: Mon, 26 Aug 2024 05:00:48 +0000 Subject: [PATCH 089/124] Preparing for merge from rustc --- src/tools/miri/rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index 1eca86baeaa21..6124cc327ad09 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -fdf61d499c8a8421ecf98e7924bb87caf43a9938 +3f121b9461cce02a703a0e7e450568849dfaa074 From 9fb611cc46661ad6bfe589222e627e54a24f4a79 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 26 Aug 2024 10:00:52 +0200 Subject: [PATCH 090/124] silence an overreaching clippy lint --- src/tools/miri/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/tools/miri/src/lib.rs b/src/tools/miri/src/lib.rs index f796e845a2399..ece393caf9a1e 100644 --- a/src/tools/miri/src/lib.rs +++ b/src/tools/miri/src/lib.rs @@ -35,6 +35,7 @@ clippy::box_default, clippy::needless_question_mark, clippy::needless_lifetimes, + clippy::too_long_first_doc_paragraph, rustc::diagnostic_outside_of_impl, // We are not implementing queries here so it's fine rustc::potential_query_instability, From 9dad25a7cb6a3f439317f2e34297f8ddf70d4fb2 Mon Sep 17 00:00:00 2001 From: Aleksei Trifonov Date: Mon, 26 Aug 2024 03:03:26 +0300 Subject: [PATCH 091/124] Show and render error lifetime args as `'_` --- .../crates/hir-ty/src/display.rs | 20 +++++++------------ .../src/handlers/extract_function.rs | 2 +- .../ide-completion/src/tests/predicate.rs | 4 ++-- .../ide-completion/src/tests/type_pos.rs | 8 ++++---- .../crates/ide/src/inlay_hints/bind_pat.rs | 2 +- 5 files changed, 15 insertions(+), 21 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index 4ef053130a8be..151cc3dc491f6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -1523,18 +1523,6 @@ fn hir_fmt_generic_arguments( None => (parameters, &[][..]), }; for generic_arg in lifetimes.iter().chain(ty_or_const) { - // FIXME: Remove this - // most of our lifetimes will be errors as we lack elision and inference - // so don't render them for now - if !cfg!(test) - && matches!( - generic_arg.lifetime(Interner), - Some(l) if ***l.interned() == LifetimeData::Error - ) - { - continue; - } - if !mem::take(&mut first) { write!(f, ", ")?; } @@ -1872,7 +1860,13 @@ impl HirDisplay for LifetimeData { LifetimeData::BoundVar(idx) => idx.hir_fmt(f), LifetimeData::InferenceVar(_) => write!(f, "_"), LifetimeData::Static => write!(f, "'static"), - LifetimeData::Error => write!(f, "'?"), + LifetimeData::Error => { + if cfg!(test) { + write!(f, "'?") + } else { + write!(f, "'_") + } + } LifetimeData::Erased => write!(f, "'"), LifetimeData::Phantom(void, _) => match *void {}, } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs index fd379cb88daaf..b6781c8e1290d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs @@ -5649,7 +5649,7 @@ fn func(i: Struct<'_, T>) { fun_name(i); } -fn $0fun_name(i: Struct) { +fn $0fun_name(i: Struct<'_, T>) { foo(i); } "#, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs index 62eb642b3bc33..46a3e97d3e92d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/predicate.rs @@ -19,7 +19,7 @@ struct Foo<'lt, T, const C: usize> where $0 {} en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo<…> Foo<{unknown}, _> + st Foo<…> Foo<'_, {unknown}, _> st Record Record st Tuple Tuple st Unit Unit @@ -92,7 +92,7 @@ struct Foo<'lt, T, const C: usize> where for<'a> $0 {} en Enum Enum ma makro!(…) macro_rules! makro md module - st Foo<…> Foo<{unknown}, _> + st Foo<…> Foo<'_, {unknown}, _> st Record Record st Tuple Tuple st Unit Unit diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs index ff38c16108791..db4ac9381cedb 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/type_pos.rs @@ -20,8 +20,8 @@ struct Foo<'lt, T, const C: usize> { en Enum Enum ma makro!(…) macro_rules! makro md module - sp Self Foo<{unknown}, _> - st Foo<…> Foo<{unknown}, _> + sp Self Foo<'_, {unknown}, _> + st Foo<…> Foo<'_, {unknown}, _> st Record Record st Tuple Tuple st Unit Unit @@ -45,8 +45,8 @@ struct Foo<'lt, T, const C: usize>(f$0); en Enum Enum ma makro!(…) macro_rules! makro md module - sp Self Foo<{unknown}, _> - st Foo<…> Foo<{unknown}, _> + sp Self Foo<'_, {unknown}, _> + st Foo<…> Foo<'_, {unknown}, _> st Record Record st Tuple Tuple st Unit Unit diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs index d67afe14d7c93..82b0a6ffcf135 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs @@ -342,7 +342,7 @@ fn f<'a>() { let x = S::<'static>; //^ S<'static> let y = S::<'_>; - //^ S + //^ S<'_> let z = S::<'a>; //^ S<'a> From 9d4fdc015767d2d14760e5ade31376362c586358 Mon Sep 17 00:00:00 2001 From: Aleksei Trifonov Date: Mon, 26 Aug 2024 03:06:49 +0300 Subject: [PATCH 092/124] Show lifetime args if there are only error ones --- .../rust-analyzer/crates/hir-ty/src/display.rs | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index 151cc3dc491f6..4a5eaaa72aed0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -1438,17 +1438,7 @@ fn hir_fmt_generics( let parameters_to_write = generic_args_sans_defaults(f, generic_def, parameters); - // FIXME: Remote this - // most of our lifetimes will be errors as we lack elision and inference - // so don't render them for now - let only_err_lifetimes = !cfg!(test) - && parameters_to_write.iter().all(|arg| { - matches!( - arg.data(Interner), - chalk_ir::GenericArgData::Lifetime(it) if *it.data(Interner) == LifetimeData::Error - ) - }); - if !parameters_to_write.is_empty() && !only_err_lifetimes { + if !parameters_to_write.is_empty() { write!(f, "<")?; hir_fmt_generic_arguments(f, parameters_to_write, self_)?; write!(f, ">")?; @@ -1881,7 +1871,7 @@ impl HirDisplay for DomainGoal { wc.hir_fmt(f)?; write!(f, ")")?; } - _ => write!(f, "?")?, + _ => write!(f, "_")?, } Ok(()) } From 65e9f8ba7f53fa0697b5d1456aefef6f5560335d Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Mon, 26 Aug 2024 19:01:02 +0300 Subject: [PATCH 093/124] Fix "Unwrap block" assist with block modifiers The assist just assumes the `{` will be the first character, which led to strange outputs such as `nsafe {`. --- .../ide-assists/src/handlers/unwrap_block.rs | 58 ++++++++++++++++++- 1 file changed, 55 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs index de801279a0e1f..f3e7f5f416769 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs @@ -128,10 +128,12 @@ fn update_expr_string_without_newline(expr_string: String) -> String { } fn update_expr_string_with_pat(expr_str: String, whitespace_pat: &[char]) -> String { - // Remove leading whitespace, index [1..] to remove the leading '{', + // Remove leading whitespace, index to remove the leading '{', // then continue to remove leading whitespace. - let expr_str = - expr_str.trim_start_matches(whitespace_pat)[1..].trim_start_matches(whitespace_pat); + // We cannot assume the `{` is the first character because there are block modifiers + // (`unsafe`, `async` etc.). + let after_open_brace_index = expr_str.find('{').map_or(0, |it| it + 1); + let expr_str = expr_str[after_open_brace_index..].trim_start_matches(whitespace_pat); // Remove trailing whitespace, index [..expr_str.len() - 1] to remove the trailing '}', // then continue to remove trailing whitespace. @@ -824,6 +826,56 @@ fn main() { let a = 1; let x = foo; } +"#, + ); + } + + #[test] + fn unwrap_block_with_modifiers() { + // https://github.com/rust-lang/rust-analyzer/issues/17964 + check_assist( + unwrap_block, + r#" +fn main() { + unsafe $0{ + bar; + } +} +"#, + r#" +fn main() { + bar; +} +"#, + ); + check_assist( + unwrap_block, + r#" +fn main() { + async move $0{ + bar; + } +} +"#, + r#" +fn main() { + bar; +} +"#, + ); + check_assist( + unwrap_block, + r#" +fn main() { + try $0{ + bar; + } +} +"#, + r#" +fn main() { + bar; +} "#, ); } From 25e5ac48af02e56f5a25795b15913423638234b9 Mon Sep 17 00:00:00 2001 From: Johannes Hostert Date: Mon, 26 Aug 2024 22:46:14 +0200 Subject: [PATCH 094/124] Disable tree traversal optimization that is wrong due to lazy nodes. See #3846 for more information. --- src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs index d51e1109ed4e2..5fbb7b927768d 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs @@ -150,7 +150,10 @@ impl LocationState { // the propagation can be skipped next time. // It is a performance loss not to call this function when a foreign access occurs. // It is unsound not to call this function when a child access occurs. - fn skip_if_known_noop( + // FIXME: This optimization is wrong, and is currently disabled (by ignoring the + // result returned here). Since we presumably want an optimization like this, + // we should add it back. See #3864 for more information. + fn update_last_foreign_access( &mut self, access_kind: AccessKind, rel_pos: AccessRelatedness, @@ -613,10 +616,7 @@ impl<'tcx> Tree { let old_state = perm.or_insert(LocationState::new_uninit(node.default_initial_perm)); - match old_state.skip_if_known_noop(access_kind, rel_pos) { - ContinueTraversal::SkipChildren => return Ok(ContinueTraversal::SkipChildren), - _ => {} - } + old_state.update_last_foreign_access(access_kind, rel_pos); let protected = global.borrow().protected_tags.contains_key(&node.tag); let transition = old_state.perform_access(access_kind, rel_pos, protected)?; From 0763a3afc8f7d71cfe3b62c76fd40912f6a6c9ed Mon Sep 17 00:00:00 2001 From: Jubilee Young Date: Mon, 26 Aug 2024 20:11:58 -0700 Subject: [PATCH 095/124] Bump backtrace to rust-lang/backtrace@fc37b22 It should be 0.3.74~ish. This should help with backtraces on Android, QNX NTO 7.0, and Windows. --- library/backtrace | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/backtrace b/library/backtrace index 72265bea21089..fc37b22dc8a38 160000 --- a/library/backtrace +++ b/library/backtrace @@ -1 +1 @@ -Subproject commit 72265bea210891ae47bbe6d4f17b493ef0606619 +Subproject commit fc37b22dc8a384d93f6b7b4817266eec6433875e From 65d25fe17fcb64776e72b7d54c7376cf7208be94 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 27 Aug 2024 08:19:09 +0200 Subject: [PATCH 096/124] Revert "feat: Implement `module_path` macro" --- .../rust-analyzer/crates/base-db/src/input.rs | 1 - .../rust-analyzer/crates/hir-def/src/data.rs | 3 +- .../crates/hir-def/src/expander.rs | 9 +-- .../rust-analyzer/crates/hir-def/src/lib.rs | 44 ++++++------- .../hir-def/src/macro_expansion_tests/mod.rs | 15 ++--- .../crates/hir-def/src/nameres.rs | 27 +------- .../crates/hir-def/src/nameres/collector.rs | 27 ++------ .../crates/hir-expand/src/builtin.rs | 2 +- .../crates/hir-expand/src/builtin/fn_macro.rs | 30 ++++----- .../crates/hir-expand/src/builtin/quote.rs | 4 +- .../crates/hir-expand/src/eager.rs | 45 ------------- .../rust-analyzer/crates/hir/src/semantics.rs | 25 ++----- .../crates/hir/src/source_analyzer.rs | 21 ++---- .../crates/ide/src/goto_type_definition.rs | 8 +-- .../crates/ide/src/hover/tests.rs | 65 ------------------- .../rust-analyzer/crates/ide/src/status.rs | 1 - .../crates/intern/src/symbol/symbols.rs | 1 - .../crates/rust-analyzer/src/cli/lsif.rs | 8 +-- .../crates/rust-analyzer/src/cli/scip.rs | 2 + .../crates/test-utils/src/minicore.rs | 11 ---- 20 files changed, 71 insertions(+), 278 deletions(-) diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index b67f4c7a0cc4b..3616fa9fd8684 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -272,7 +272,6 @@ impl ReleaseChannel { } } -#[non_exhaustive] #[derive(Debug, Clone, PartialEq, Eq)] pub struct CrateData { pub root_file_id: FileId, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs index c62f2f8733832..d17ebd7ff920b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs @@ -748,9 +748,8 @@ impl<'a> AssocItemCollector<'a> { &AstIdWithPath::new(file_id, ast_id, Clone::clone(path)), ctxt, expand_to, - self.expander.module, + self.expander.krate(), resolver, - |module| module.def_map(self.db).path_for_module(self.db, module), ) { Ok(Some(call_id)) => { let res = diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs index 57bd54ed17c57..6d8b4445f75bc 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs @@ -69,12 +69,9 @@ impl Expander { let result = self.within_limit(db, |this| { let macro_call = this.in_file(¯o_call); - match macro_call.as_call_id( - db.upcast(), - this.module, - |path| resolver(path).map(|it| db.macro_def(it)), - |module| this.module.def_map(db).path_for_module(db, module), - ) { + match macro_call.as_call_id_with_errors(db.upcast(), this.module.krate(), |path| { + resolver(path).map(|it| db.macro_def(it)) + }) { Ok(call_id) => call_id, Err(resolve_err) => { unresolved_macro_err = Some(resolve_err); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index eb235f9b9f6a3..0213bd904b68c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -77,7 +77,7 @@ use base_db::{ use hir_expand::{ builtin::{BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, - eager::{expand_eager_macro_input, expand_module_path_as_eager}, + eager::expand_eager_macro_input, impl_intern_lookup, name::Name, proc_macro::{CustomProcMacroExpander, ProcMacroKind}, @@ -1400,19 +1400,26 @@ pub trait AsMacroCall { fn as_call_id( &self, db: &dyn ExpandDatabase, - module: ModuleId, + krate: CrateId, + resolver: impl Fn(&path::ModPath) -> Option + Copy, + ) -> Option { + self.as_call_id_with_errors(db, krate, resolver).ok()?.value + } + + fn as_call_id_with_errors( + &self, + db: &dyn ExpandDatabase, + krate: CrateId, resolver: impl Fn(&path::ModPath) -> Option + Copy, - mod_path: impl FnOnce(ModuleId) -> String, ) -> Result>, UnresolvedMacro>; } impl AsMacroCall for InFile<&ast::MacroCall> { - fn as_call_id( + fn as_call_id_with_errors( &self, db: &dyn ExpandDatabase, - module: ModuleId, + krate: CrateId, resolver: impl Fn(&path::ModPath) -> Option + Copy, - mod_path: impl FnOnce(ModuleId) -> String, ) -> Result>, UnresolvedMacro> { let expands_to = hir_expand::ExpandTo::from_call_site(self.value); let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); @@ -1439,10 +1446,9 @@ impl AsMacroCall for InFile<&ast::MacroCall> { &path, call_site.ctx, expands_to, - module, + krate, resolver, resolver, - mod_path, ) } } @@ -1469,9 +1475,8 @@ fn macro_call_as_call_id( call: &AstIdWithPath, call_site: SyntaxContextId, expand_to: ExpandTo, - module: ModuleId, + krate: CrateId, resolver: impl Fn(&path::ModPath) -> Option + Copy, - mod_path: impl FnOnce(ModuleId) -> String, ) -> Result, UnresolvedMacro> { macro_call_as_call_id_with_eager( db, @@ -1479,10 +1484,9 @@ fn macro_call_as_call_id( &call.path, call_site, expand_to, - module, + krate, resolver, resolver, - mod_path, ) .map(|res| res.value) } @@ -1493,26 +1497,16 @@ fn macro_call_as_call_id_with_eager( path: &path::ModPath, call_site: SyntaxContextId, expand_to: ExpandTo, - module: ModuleId, + krate: CrateId, resolver: impl FnOnce(&path::ModPath) -> Option, eager_resolver: impl Fn(&path::ModPath) -> Option, - mod_path: impl FnOnce(ModuleId) -> String, ) -> Result>, UnresolvedMacro> { let def = resolver(path).ok_or_else(|| UnresolvedMacro { path: path.clone() })?; let res = match def.kind { - MacroDefKind::BuiltInEager(_, EagerExpander::ModulePath) => expand_module_path_as_eager( - db, - module.krate, - mod_path(module), - &ast_id.to_node(db), - ast_id, - def, - call_site, - ), MacroDefKind::BuiltInEager(..) => expand_eager_macro_input( db, - module.krate, + krate, &ast_id.to_node(db), ast_id, def, @@ -1522,7 +1516,7 @@ fn macro_call_as_call_id_with_eager( _ if def.is_fn_like() => ExpandResult { value: Some(def.make_call( db, - module.krate, + krate, MacroCallKind::FnLike { ast_id, expand_to, eager: None }, call_site, )), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index 0d232deb2ced1..7f761192517c4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -95,16 +95,11 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) { let macro_call = InFile::new(source.file_id, ¯o_call); let res = macro_call - .as_call_id( - &db, - resolver.module(), - |path| { - resolver - .resolve_path_as_macro(&db, path, Some(MacroSubNs::Bang)) - .map(|(it, _)| db.macro_def(it)) - }, - |module| def_map.path_for_module(&db, module), - ) + .as_call_id_with_errors(&db, krate, |path| { + resolver + .resolve_path_as_macro(&db, path, Some(MacroSubNs::Bang)) + .map(|(it, _)| db.macro_def(it)) + }) .unwrap(); let macro_call_id = res.value.unwrap(); let macro_file = MacroFileId { macro_call_id }; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index db45f6bed6679..11601c683e1ea 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -63,7 +63,7 @@ use base_db::CrateId; use hir_expand::{ name::Name, proc_macro::ProcMacroKind, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, }; -use intern::{sym, Symbol}; +use intern::Symbol; use itertools::Itertools; use la_arena::Arena; use rustc_hash::{FxHashMap, FxHashSet}; @@ -139,7 +139,6 @@ pub struct DefMap { /// Data that belongs to a crate which is shared between a crate's def map and all its block def maps. #[derive(Clone, Debug, PartialEq, Eq)] struct DefMapCrateData { - crate_name: Option, /// The extern prelude which contains all root modules of external crates that are in scope. extern_prelude: FxIndexMap)>, @@ -165,7 +164,6 @@ struct DefMapCrateData { impl DefMapCrateData { fn new(edition: Edition) -> Self { Self { - crate_name: None, extern_prelude: FxIndexMap::default(), exported_derives: FxHashMap::default(), fn_proc_macro_mapping: FxHashMap::default(), @@ -188,7 +186,6 @@ impl DefMapCrateData { registered_attrs, registered_tools, unstable_features, - crate_name: _, rustc_coherence_is_core: _, no_core: _, no_std: _, @@ -446,28 +443,6 @@ impl DefMap { self.modules.iter() } - pub fn path_for_module(&self, db: &dyn DefDatabase, mut module: ModuleId) -> String { - debug_assert!(module.krate == self.krate && module.block == self.block.map(|b| b.block)); - let mut parts = vec![]; - if let Some(name) = module.name(db) { - parts.push(name.symbol().clone()); - } - while let Some(parent) = module.def_map(db).containing_module(module.local_id) { - module = parent; - if let Some(name) = module.name(db) { - parts.push(name.symbol().clone()); - } - if parts.len() > 10 { - break; - } - } - parts.push(match &self.data.crate_name { - Some(name) => name.clone(), - None => sym::crate_.clone(), - }); - parts.into_iter().rev().format("::").to_string() - } - pub fn derive_helpers_in_scope( &self, id: AstId, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 30df6c206dfb4..22eb5a174d393 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -247,23 +247,18 @@ impl DefCollector<'_> { let _p = tracing::info_span!("seed_with_top_level").entered(); let crate_graph = self.db.crate_graph(); - let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); - crate_data.crate_name = crate_graph[self.def_map.krate] - .display_name - .as_ref() - .map(|it| it.crate_name().symbol().clone()); - let file_id = crate_graph[self.def_map.krate].root_file_id(); let item_tree = self.db.file_item_tree(file_id.into()); let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate); + let crate_data = Arc::get_mut(&mut self.def_map.data).unwrap(); - let mut crate_cged_out = false; + let mut process = true; // Process other crate-level attributes. for attr in &*attrs { if let Some(cfg) = attr.cfg() { if self.cfg_options.check(&cfg) == Some(false) { - crate_cged_out = true; + process = false; break; } } @@ -277,11 +272,6 @@ impl DefCollector<'_> { } } } - () if *attr_name == sym::crate_name.clone() => { - if let Some(name) = attr.string_value().cloned() { - crate_data.crate_name = Some(name); - } - } () if *attr_name == sym::crate_type.clone() => { if attr.string_value() == Some(&sym::proc_dash_macro) { self.is_proc_macro = true; @@ -347,7 +337,7 @@ impl DefCollector<'_> { self.inject_prelude(); - if crate_cged_out { + if !process { return; } @@ -1217,9 +1207,8 @@ impl DefCollector<'_> { ast_id, *call_site, *expand_to, - self.def_map.module_id(directive.module_id), + self.def_map.krate, resolver_def_id, - |module| self.def_map.path_for_module(self.db, module), ); if let Ok(Some(call_id)) = call_id { self.def_map.modules[directive.module_id] @@ -1497,7 +1486,7 @@ impl DefCollector<'_> { ast_id, *call_site, *expand_to, - self.def_map.module_id(directive.module_id), + self.def_map.krate, |path| { let resolved_res = self.def_map.resolve_path_fp_with_macro( self.db, @@ -1509,7 +1498,6 @@ impl DefCollector<'_> { ); resolved_res.resolved_def.take_macros().map(|it| self.db.macro_def(it)) }, - |module| self.def_map.path_for_module(self.db, module), ); if let Err(UnresolvedMacro { path }) = macro_call_as_call_id { self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call( @@ -2363,7 +2351,7 @@ impl ModCollector<'_, '_> { &ast_id.path, ctxt, expand_to, - self.def_collector.def_map.module_id(self.module_id), + self.def_collector.def_map.krate, |path| { path.as_ident().and_then(|name| { let def_map = &self.def_collector.def_map; @@ -2393,7 +2381,6 @@ impl ModCollector<'_, '_> { ); resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it)) }, - |module| self.def_collector.def_map.path_for_module(self.def_collector.db, module), ) { // FIXME: if there were errors, this might've been in the eager expansion from an // unresolved macro, so we need to push this into late macro resolution. see fixme above diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs index 7b9cb4e793c91..252430e4e954a 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin.rs @@ -1,6 +1,6 @@ //! Builtin macros and attributes #[macro_use] -pub(crate) mod quote; +mod quote; mod attr_macro; mod derive_macro; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs index e4d09ddb3abcc..795d9b14df961 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs @@ -116,6 +116,7 @@ register_builtin! { (column, Column) => line_expand, (file, File) => file_expand, (line, Line) => line_expand, + (module_path, ModulePath) => module_path_expand, (assert, Assert) => assert_expand, (stringify, Stringify) => stringify_expand, (llvm_asm, LlvmAsm) => asm_expand, @@ -141,10 +142,7 @@ register_builtin! { (include_bytes, IncludeBytes) => include_bytes_expand, (include_str, IncludeStr) => include_str_expand, (env, Env) => env_expand, - (option_env, OptionEnv) => option_env_expand, - // This isn't really eager, we have no inputs, but we abuse the fact how eager macros are - // handled in r-a to be able to thread the module path through. - (module_path, ModulePath) => module_path_expand + (option_env, OptionEnv) => option_env_expand } fn mk_pound(span: Span) -> tt::Subtree { @@ -159,6 +157,18 @@ fn mk_pound(span: Span) -> tt::Subtree { ) } +fn module_path_expand( + _db: &dyn ExpandDatabase, + _id: MacroCallId, + _tt: &tt::Subtree, + span: Span, +) -> ExpandResult { + // Just return a dummy result. + ExpandResult::ok(quote! {span => + "module::path" + }) +} + fn line_expand( _db: &dyn ExpandDatabase, _id: MacroCallId, @@ -894,18 +904,6 @@ fn option_env_expand( ExpandResult::ok(expanded) } -fn module_path_expand( - _db: &dyn ExpandDatabase, - _id: MacroCallId, - tt: &tt::Subtree, - span: Span, -) -> ExpandResult { - // Note: The actual implementation of this is in crates\hir-expand\src\eager.rs - ExpandResult::ok(quote! {span => - #tt - }) -} - fn quote_expand( _db: &dyn ExpandDatabase, _arg_id: MacroCallId, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs index 0c895056c2255..5c33f817f9e44 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs @@ -128,7 +128,7 @@ macro_rules! quote_impl__ { } }; } -pub(crate) use quote_impl__ as __quote; +pub(super) use quote_impl__ as __quote; /// FIXME: /// It probably should implement in proc-macro @@ -137,7 +137,7 @@ macro_rules! quote_impl { $crate::builtin::quote::IntoTt::to_subtree($crate::builtin::quote::__quote!($span $($tt)*), $span) } } -pub(crate) use quote_impl as quote; +pub(super) use quote_impl as quote; pub(crate) trait IntoTt { fn to_subtree(self, span: Span) -> crate::tt::Subtree; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs index 534d5663860d6..1dadfe2ba99bb 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs @@ -32,51 +32,6 @@ use crate::{ MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, }; -pub fn expand_module_path_as_eager( - db: &dyn ExpandDatabase, - krate: CrateId, - mod_path: String, - macro_call: &ast::MacroCall, - ast_id: AstId, - def: MacroDefId, - call_site: SyntaxContextId, -) -> ExpandResult> { - let expand_to = ExpandTo::from_call_site(macro_call); - - // Note: - // When `lazy_expand` is called, its *parent* file must already exist. - // Here we store an eager macro id for the argument expanded subtree - // for that purpose. - let arg_id = MacroCallLoc { - def, - krate, - kind: MacroCallKind::FnLike { ast_id, expand_to: ExpandTo::Expr, eager: None }, - ctxt: call_site, - } - .intern(db); - #[allow(deprecated)] // builtin eager macros are never derives - let (_, _, span) = db.macro_arg(arg_id); - let subtree = crate::builtin::quote::quote! {span => #mod_path}; - - let loc = MacroCallLoc { - def, - krate, - kind: MacroCallKind::FnLike { - ast_id, - expand_to, - eager: Some(Arc::new(EagerCallInfo { - arg: Arc::new(subtree), - arg_id, - error: None, - span, - })), - }, - ctxt: call_site, - }; - - ExpandResult { value: Some(loc.intern(db)), err: None } -} - pub fn expand_eager_macro_input( db: &dyn ExpandDatabase, krate: CrateId, diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index 1d7ab5f6b7f65..763f53031e4c5 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -365,6 +365,7 @@ impl<'db> SemanticsImpl<'db> { _, BuiltinFnLikeExpander::Column | BuiltinFnLikeExpander::File + | BuiltinFnLikeExpander::ModulePath | BuiltinFnLikeExpander::Asm | BuiltinFnLikeExpander::LlvmAsm | BuiltinFnLikeExpander::GlobalAsm @@ -482,26 +483,10 @@ impl<'db> SemanticsImpl<'db> { let SourceAnalyzer { file_id, resolver, .. } = self.analyze_no_infer(actual_macro_call.syntax())?; let macro_call = InFile::new(file_id, actual_macro_call); - let macro_call_id = macro_call - .as_call_id( - self.db.upcast(), - resolver.module(), - |path| { - resolver.resolve_path_as_macro_def( - self.db.upcast(), - path, - Some(MacroSubNs::Bang), - ) - }, - |module| { - resolver - .module() - .def_map(self.db.upcast()) - .path_for_module(self.db.upcast(), module) - }, - ) - .ok()? - .value?; + let krate = resolver.krate(); + let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { + resolver.resolve_path_as_macro_def(self.db.upcast(), path, Some(MacroSubNs::Bang)) + })?; hir_expand::db::expand_speculative( self.db.upcast(), macro_call_id, diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index fe1d5b5764d35..be0116862b9cf 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -839,25 +839,12 @@ impl SourceAnalyzer { db: &dyn HirDatabase, macro_call: InFile<&ast::MacroCall>, ) -> Option { + let krate = self.resolver.krate(); // FIXME: This causes us to parse, generally this is the wrong approach for resolving a // macro call to a macro call id! - let macro_call_id = macro_call - .as_call_id( - db.upcast(), - self.resolver.module(), - |path| { - self.resolver.resolve_path_as_macro_def( - db.upcast(), - path, - Some(MacroSubNs::Bang), - ) - }, - |module| { - self.resolver.module().def_map(db.upcast()).path_for_module(db.upcast(), module) - }, - ) - .ok()? - .value?; + let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { + self.resolver.resolve_path_as_macro_def(db.upcast(), path, Some(MacroSubNs::Bang)) + })?; // why the 64? Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64) } diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs index 9b073427d7956..ca04b7bb5a9b3 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs @@ -24,11 +24,9 @@ pub(crate) fn goto_type_definition( let file: ast::SourceFile = sema.parse_guess_edition(file_id); let token: SyntaxToken = pick_best_token(file.syntax().token_at_offset(offset), |kind| match kind { - IDENT | INT_NUMBER | T![self] => 3, - // operators - T!['('] | T!['['] | T!['{'] | T![')'] | T![']'] | T!['}'] | T![!] | T![?] => 2, - kind if !kind.is_trivia() => 1, - _ => 0, + IDENT | INT_NUMBER | T![self] => 2, + kind if kind.is_trivia() => 0, + _ => 1, })?; let mut res = Vec::new(); diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 08709d3cc2eb3..9585bdbe4c545 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -8702,68 +8702,3 @@ fn foo() { "#]], ); } - -#[test] -fn module_path_macro() { - check( - r##" -//- minicore: module_path - -const C$0: &'static str = module_path!(); -"##, - expect![[r#" - *C* - - ```rust - test - ``` - - ```rust - const C: &'static str = "test" - ``` - "#]], - ); - check( - r##" -//- minicore: module_path - -mod foo { - const C$0: &'static str = module_path!(); -} -"##, - expect![[r#" - *C* - - ```rust - test::foo - ``` - - ```rust - const C: &'static str = "test::foo" - ``` - "#]], - ); - check( - r##" -//- minicore: module_path -mod baz { - const _: () = { - mod bar { - const C$0: &'static str = module_path!(); - } - } -} -"##, - expect![[r#" - *C* - - ```rust - test::bar - ``` - - ```rust - const C: &'static str = "test::baz::bar" - ``` - "#]], - ); -} diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs index ce21e3cad5baf..67d6932da962d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/status.rs +++ b/src/tools/rust-analyzer/crates/ide/src/status.rs @@ -69,7 +69,6 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { dependencies, origin, is_proc_macro, - .. } = &crate_graph[crate_id]; format_to!( buf, diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs index 867d9ea902cfa..7eb8e4a5e2e52 100644 --- a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs +++ b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs @@ -177,7 +177,6 @@ define_symbols! { coroutine_state, coroutine, count, - crate_name, crate_type, CStr, debug_assertions, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs index f2d21208d51f6..89fe712ced020 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs @@ -22,7 +22,7 @@ use crate::{ }; struct LsifManager<'a> { - id_counter: i32, + count: i32, token_map: FxHashMap, range_map: FxHashMap, file_map: FxHashMap, @@ -44,7 +44,7 @@ impl From for lsp_types::NumberOrString { impl LsifManager<'_> { fn new<'a>(analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs) -> LsifManager<'a> { LsifManager { - id_counter: 0, + count: 0, token_map: FxHashMap::default(), range_map: FxHashMap::default(), file_map: FxHashMap::default(), @@ -56,9 +56,9 @@ impl LsifManager<'_> { } fn add(&mut self, data: lsif::Element) -> Id { - let id = Id(self.id_counter); + let id = Id(self.count); self.emit(&serde_json::to_string(&lsif::Entry { id: id.into(), data }).unwrap()); - self.id_counter += 1; + self.count += 1; id } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index f8d2608a31cae..a8a02712fe291 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -48,6 +48,7 @@ impl flags::Scip { let error_sink; (config, error_sink, _) = config.apply_change(change); + // FIXME @alibektas : What happens to errors without logging? error!(?error_sink, "Config Error(s)"); } let cargo_config = config.cargo(); @@ -114,6 +115,7 @@ impl flags::Scip { tokens.into_iter().for_each(|(text_range, id)| { let token = si.tokens.get(id).unwrap(); + let range = text_range_to_scip_range(&line_index, text_range); let symbol = tokens_to_symbol .entry(id) diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs index 754042cf883de..7dbc498ead196 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs @@ -41,7 +41,6 @@ //! iterator: option //! iterators: iterator, fn //! manually_drop: drop -//! module_path: //! non_null: //! non_zero: //! option: panic @@ -1437,16 +1436,6 @@ mod panicking { #[macro_use] mod macros { - // region:module_path - #[macro_export] - #[rustc_builtin_macro] - macro_rules! module_path { - ($($arg:tt)*) => { - /* compiler built-in */ - }; - } - // endregion:module_path - // region:panic #[macro_export] #[rustc_builtin_macro(core_panic)] From 6f8fb3c15d46c36e6d8103d5107a15b967ba5e17 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 27 Aug 2024 13:38:29 +0200 Subject: [PATCH 097/124] tree_borrows test: ensure we can actually read the variable --- src/tools/miri/tests/pass/tree_borrows/sb_fails.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/tools/miri/tests/pass/tree_borrows/sb_fails.rs b/src/tools/miri/tests/pass/tree_borrows/sb_fails.rs index 1bae30bde60c4..7da20e76229db 100644 --- a/src/tools/miri/tests/pass/tree_borrows/sb_fails.rs +++ b/src/tools/miri/tests/pass/tree_borrows/sb_fails.rs @@ -67,10 +67,11 @@ mod static_memory_modification { #[allow(mutable_transmutes)] pub fn main() { - let _x = unsafe { + let x = unsafe { std::mem::transmute::<&usize, &mut usize>(&X) // In SB this mutable reborrow fails. // But in TB we are allowed to transmute as long as we don't write. }; + assert_eq!(*&*x, 5); } } From df4580b5c109b43dadd315c01c0ff190ed69c259 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 27 Aug 2024 12:30:04 +0200 Subject: [PATCH 098/124] Expand proc-macros in workspace root, not package root --- .../rust-analyzer/crates/hir-ty/src/display.rs | 16 +++++++++------- .../crates/proc-macro-api/src/lib.rs | 3 ++- .../crates/project-model/src/cargo_workspace.rs | 4 ++++ .../crates/project-model/src/env.rs | 14 ++++++++++++-- .../crates/project-model/src/tests.rs | 2 +- .../crates/project-model/src/workspace.rs | 5 ++++- .../crates/rust-analyzer/tests/crate_graph.rs | 4 ++-- 7 files changed, 34 insertions(+), 14 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index 4ef053130a8be..2c1c6d108b932 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -34,7 +34,7 @@ use rustc_apfloat::{ }; use smallvec::SmallVec; use span::Edition; -use stdx::{never, IsNoneOr}; +use stdx::never; use triomphe::Arc; use crate::{ @@ -1489,12 +1489,14 @@ fn generic_args_sans_defaults<'ga>( } // otherwise, if the arg is equal to the param default, hide it (unless the // default is an error which can happen for the trait Self type) - #[allow(unstable_name_collisions)] - default_parameters.get(i).is_none_or(|default_parameter| { - // !is_err(default_parameter.skip_binders()) - // && - arg != &default_parameter.clone().substitute(Interner, ¶meters) - }) + match default_parameters.get(i) { + None => true, + Some(default_parameter) => { + // !is_err(default_parameter.skip_binders()) + // && + arg != &default_parameter.clone().substitute(Interner, ¶meters) + } + } }; let mut default_from = 0; for (i, parameter) in parameters.iter().enumerate() { diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs index 54c1475b8b1c8..d50a3cdbf72da 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs @@ -154,7 +154,8 @@ impl ProcMacro { mixed_site: Span, ) -> Result, PanicMessage>, ServerError> { let version = self.process.version(); - let current_dir = env.get("CARGO_MANIFEST_DIR"); + let current_dir = + env.get("CARGO_RUSTC_CURRENT_DIR").or_else(|| env.get("CARGO_MANIFEST_DIR")); let mut span_data_table = SpanDataIndexMap::default(); let def_site = span_data_table.insert_full(def_site).0; diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index 492bc9a9255c6..2f8612faf30ac 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -243,6 +243,10 @@ impl TargetKind { pub fn is_executable(self) -> bool { matches!(self, TargetKind::Bin | TargetKind::Example) } + + pub fn is_proc_macro(self) -> bool { + matches!(self, TargetKind::Lib { is_proc_macro: true }) + } } // Deserialize helper for the cargo metadata diff --git a/src/tools/rust-analyzer/crates/project-model/src/env.rs b/src/tools/rust-analyzer/crates/project-model/src/env.rs index ac7246acc592f..ec0f94b8f013e 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/env.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/env.rs @@ -3,7 +3,7 @@ use base_db::Env; use rustc_hash::FxHashMap; use toolchain::Tool; -use crate::{utf8_stdout, ManifestPath, PackageData, Sysroot, TargetKind}; +use crate::{utf8_stdout, CargoWorkspace, ManifestPath, PackageData, Sysroot, TargetKind}; /// Recreates the compile-time environment variables that Cargo sets. /// @@ -50,13 +50,23 @@ pub(crate) fn inject_cargo_env(env: &mut Env) { env.set("CARGO", Tool::Cargo.path().to_string()); } -pub(crate) fn inject_rustc_tool_env(env: &mut Env, cargo_name: &str, kind: TargetKind) { +pub(crate) fn inject_rustc_tool_env( + env: &mut Env, + cargo: &CargoWorkspace, + cargo_name: &str, + kind: TargetKind, +) { _ = kind; // FIXME // if kind.is_executable() { // env.set("CARGO_BIN_NAME", cargo_name); // } env.set("CARGO_CRATE_NAME", cargo_name.replace('-', "_")); + // NOTE: Technically we should set this for all crates, but that will worsen the deduplication + // logic so for now just keeping it proc-macros ought to be fine. + if kind.is_proc_macro() { + env.set("CARGO_RUSTC_CURRENT_DIR", cargo.manifest_path().to_string()); + } } pub(crate) fn cargo_config_env( diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index e3bc81e1963d9..568554aa12e68 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -270,7 +270,7 @@ fn crate_graph_dedup() { assert_eq!(regex_crate_graph.iter().count(), 60); crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |(_, a), (_, b)| a == b); - assert_eq!(crate_graph.iter().count(), 118); + assert_eq!(crate_graph.iter().count(), 119); } #[test] diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 33ba7f9688d6b..fa2c7f1c5af17 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -1020,6 +1020,7 @@ fn cargo_to_crate_graph( let crate_id = add_target_crate_root( crate_graph, proc_macros, + cargo, pkg_data, build_data, cfg_options.clone(), @@ -1239,6 +1240,7 @@ fn handle_rustc_crates( let crate_id = add_target_crate_root( crate_graph, proc_macros, + rustc_workspace, &rustc_workspace[pkg], build_scripts.get_output(pkg), cfg_options.clone(), @@ -1298,6 +1300,7 @@ fn handle_rustc_crates( fn add_target_crate_root( crate_graph: &mut CrateGraph, proc_macros: &mut ProcMacroPaths, + cargo: &CargoWorkspace, pkg: &PackageData, build_data: Option<&BuildScriptOutput>, cfg_options: CfgOptions, @@ -1331,7 +1334,7 @@ fn add_target_crate_root( let mut env = Env::default(); inject_cargo_package_env(&mut env, pkg); inject_cargo_env(&mut env); - inject_rustc_tool_env(&mut env, cargo_name, kind); + inject_rustc_tool_env(&mut env, cargo, cargo_name, kind); if let Some(envs) = build_data.map(|it| &it.envs) { for (k, v) in envs { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs index b8a82fd6a72ee..eee40cd280b02 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs @@ -93,7 +93,7 @@ fn test_deduplicate_origin_dev() { } } - assert!(crates_named_p2.len() == 1); + assert_eq!(crates_named_p2.len(), 1); let p2 = crates_named_p2[0]; assert!(p2.origin.is_local()); } @@ -119,7 +119,7 @@ fn test_deduplicate_origin_dev_rev() { } } - assert!(crates_named_p2.len() == 1); + assert_eq!(crates_named_p2.len(), 1); let p2 = crates_named_p2[0]; assert!(p2.origin.is_local()); } From b592256bf0359c90dc24ee58aa06369b3730dfcc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 27 Aug 2024 14:52:34 +0300 Subject: [PATCH 099/124] Drop Apache license appendices --- src/tools/rust-analyzer/LICENSE-APACHE | 25 ------------------- .../rust-analyzer/crates/salsa/LICENSE-APACHE | 25 ------------------- 2 files changed, 50 deletions(-) diff --git a/src/tools/rust-analyzer/LICENSE-APACHE b/src/tools/rust-analyzer/LICENSE-APACHE index 16fe87b06e802..1b5ec8b78e237 100644 --- a/src/tools/rust-analyzer/LICENSE-APACHE +++ b/src/tools/rust-analyzer/LICENSE-APACHE @@ -174,28 +174,3 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/src/tools/rust-analyzer/crates/salsa/LICENSE-APACHE b/src/tools/rust-analyzer/crates/salsa/LICENSE-APACHE index 16fe87b06e802..1b5ec8b78e237 100644 --- a/src/tools/rust-analyzer/crates/salsa/LICENSE-APACHE +++ b/src/tools/rust-analyzer/crates/salsa/LICENSE-APACHE @@ -174,28 +174,3 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. From 664640f57811864c615d5c91c238af967899417e Mon Sep 17 00:00:00 2001 From: Johannes Hostert Date: Tue, 27 Aug 2024 14:12:05 +0200 Subject: [PATCH 100/124] Add testcase for #3846 --- .../repeated_foreign_read_lazy_conflicted.rs | 23 ++++++++++++++ ...peated_foreign_read_lazy_conflicted.stderr | 31 +++++++++++++++++++ 2 files changed, 54 insertions(+) create mode 100644 src/tools/miri/tests/fail/tree_borrows/repeated_foreign_read_lazy_conflicted.rs create mode 100644 src/tools/miri/tests/fail/tree_borrows/repeated_foreign_read_lazy_conflicted.stderr diff --git a/src/tools/miri/tests/fail/tree_borrows/repeated_foreign_read_lazy_conflicted.rs b/src/tools/miri/tests/fail/tree_borrows/repeated_foreign_read_lazy_conflicted.rs new file mode 100644 index 0000000000000..36b47a33b181e --- /dev/null +++ b/src/tools/miri/tests/fail/tree_borrows/repeated_foreign_read_lazy_conflicted.rs @@ -0,0 +1,23 @@ +//@compile-flags: -Zmiri-tree-borrows + +use std::ptr::addr_of_mut; + +fn do_something(_: u8) {} + +unsafe fn access_after_sub_1(x: &mut u8, orig_ptr: *mut u8) { + // causes a second access, which should make the lazy part of `x` be `Reserved {conflicted: true}` + do_something(*orig_ptr); + // read from the conflicted pointer + *(x as *mut u8).byte_sub(1) = 42; //~ ERROR: /write access through .* is forbidden/ +} + +pub fn main() { + unsafe { + let mut alloc = [0u8, 0u8]; + let orig_ptr = addr_of_mut!(alloc) as *mut u8; + let foo = &mut *orig_ptr; + // cause a foreign read access to foo + do_something(alloc[0]); + access_after_sub_1(&mut *(foo as *mut u8).byte_add(1), orig_ptr); + } +} diff --git a/src/tools/miri/tests/fail/tree_borrows/repeated_foreign_read_lazy_conflicted.stderr b/src/tools/miri/tests/fail/tree_borrows/repeated_foreign_read_lazy_conflicted.stderr new file mode 100644 index 0000000000000..963e8e5eca9f6 --- /dev/null +++ b/src/tools/miri/tests/fail/tree_borrows/repeated_foreign_read_lazy_conflicted.stderr @@ -0,0 +1,31 @@ +error: Undefined Behavior: write access through at ALLOC[0x0] is forbidden + --> $DIR/repeated_foreign_read_lazy_conflicted.rs:LL:CC + | +LL | *(x as *mut u8).byte_sub(1) = 42; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ write access through at ALLOC[0x0] is forbidden + | + = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental + = help: the accessed tag has state Reserved (conflicted) which forbids this child write access +help: the accessed tag was created here, in the initial state Reserved + --> $DIR/repeated_foreign_read_lazy_conflicted.rs:LL:CC + | +LL | unsafe fn access_after_sub_1(x: &mut u8, orig_ptr: *mut u8) { + | ^ +help: the accessed tag later transitioned to Reserved (conflicted) due to a foreign read access at offsets [0x0..0x1] + --> $DIR/repeated_foreign_read_lazy_conflicted.rs:LL:CC + | +LL | do_something(*orig_ptr); + | ^^^^^^^^^ + = help: this transition corresponds to a temporary loss of write permissions until function exit + = note: BACKTRACE (of the first span): + = note: inside `access_after_sub_1` at $DIR/repeated_foreign_read_lazy_conflicted.rs:LL:CC +note: inside `main` + --> $DIR/repeated_foreign_read_lazy_conflicted.rs:LL:CC + | +LL | access_after_sub_1(&mut *(foo as *mut u8).byte_add(1), orig_ptr); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to 1 previous error + From c9a3b022818fb61b7af1e2bcdb6b707ea9f6df6f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 27 Aug 2024 14:23:23 +0200 Subject: [PATCH 101/124] Fix tests --- .../src/handlers/toggle_macro_delimiter.rs | 42 +++++++++---------- .../crates/ide-assists/src/tests/generated.rs | 4 +- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs index 6fa5ff761f680..eedb2ea3b9ada 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/toggle_macro_delimiter.rs @@ -15,7 +15,7 @@ use crate::{AssistContext, Assists}; // () => {}; // } // -// sth! $0( ); +// sth!$0( ); // ``` // -> // ``` @@ -23,7 +23,7 @@ use crate::{AssistContext, Assists}; // () => {}; // } // -// sth! { } +// sth!{ } // ``` pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { #[derive(Debug)] @@ -36,7 +36,7 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>) RCur, } - let makro = ctx.find_node_at_offset_with_descend::()?.clone_for_update(); + let makro = ctx.find_node_at_offset::()?.clone_for_update(); let makro_text_range = makro.syntax().text_range(); let cursor_offset = ctx.offset(); @@ -62,7 +62,7 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>) }; acc.add( - AssistId("add_braces", AssistKind::Refactor), + AssistId("toggle_macro_delimiter", AssistKind::Refactor), match token { MacroDelims::LPar => "Replace delimiters with braces", MacroDelims::RPar => "Replace delimiters with braces", @@ -110,20 +110,20 @@ macro_rules! sth { () => {}; } -sth! $0( ); +sth!$0( ); "#, r#" macro_rules! sth { () => {}; } -sth! { } +sth!{ } "#, ) } #[test] - fn test_braclets() { + fn test_braces() { check_assist( toggle_macro_delimiter, r#" @@ -131,14 +131,14 @@ macro_rules! sth { () => {}; } -sth! $0{ }; +sth!$0{ }; "#, r#" macro_rules! sth { () => {}; } -sth! [ ]; +sth![ ]; "#, ) } @@ -152,14 +152,14 @@ macro_rules! sth { () => {}; } -sth! $0[ ]; +sth!$0[ ]; "#, r#" macro_rules! sth { () => {}; } -sth! ( ); +sth!( ); "#, ) } @@ -174,7 +174,7 @@ mod abc { () => {}; } - sth! $0{ }; + sth!$0{ }; } "#, r#" @@ -183,7 +183,7 @@ mod abc { () => {}; } - sth! [ ]; + sth![ ]; } "#, ) @@ -196,7 +196,7 @@ mod abc { r#" macro_rules! prt { ($e:expr) => {{ - println!("{}", stringify! {$e}); + println!("{}", stringify!{$e}); }}; } @@ -213,20 +213,20 @@ prt!(($03 + 5)); r#" macro_rules! prt { ($e:expr) => {{ - println!("{}", stringify! {$e}); + println!("{}", stringify!{$e}); }}; } -prt! $0((3 + 5)); +prt!$0((3 + 5)); "#, r#" macro_rules! prt { ($e:expr) => {{ - println!("{}", stringify! {$e}); + println!("{}", stringify!{$e}); }}; } -prt! {(3 + 5)} +prt!{(3 + 5)} "#, ) } @@ -239,17 +239,17 @@ prt! {(3 + 5)} r#" macro_rules! prt { ($e:expr) => {{ - println!("{}", stringify! {$e}); + println!("{}", stringify!{$e}); }}; } macro_rules! abc { ($e:expr) => {{ - println!("{}", stringify! {$e}); + println!("{}", stringify!{$e}); }}; } -prt! {abc!($03 + 5)}; +prt!{abc!($03 + 5)}; "#, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index 05e1aff5f9f96..595ce1affb029 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -3100,14 +3100,14 @@ macro_rules! sth { () => {}; } -sth! $0( ); +sth!$0( ); "#####, r#####" macro_rules! sth { () => {}; } -sth! { } +sth!{ } "#####, ) } From 2765444c15617b234a38a736fd7d7c1c0e1024df Mon Sep 17 00:00:00 2001 From: Johannes Hostert Date: Sun, 25 Aug 2024 17:33:01 +0200 Subject: [PATCH 102/124] Make TB tree traversal bottom-up In preparation for #3837, the tree traversal needs to be made bottom-up, because the current top-down tree traversal, coupled with that PR's changes to the garbage collector, can introduce non-deterministic error messages if the GC removes a parent tag of the accessed tag that would have triggered the error first. This is a breaking change for the diagnostics emitted by TB. The implemented semantics stay the same. --- .../src/borrow_tracker/tree_borrows/tree.rs | 295 +++++++++++------- .../src/borrow_tracker/tree_borrows/unimap.rs | 4 + .../alias_through_mutation.tree.stderr | 14 +- .../box_exclusive_violation1.tree.stderr | 12 +- .../buggy_as_mut_slice.tree.stderr | 12 +- .../buggy_split_at_mut.tree.stderr | 12 +- .../both_borrows/illegal_write5.tree.stderr | 12 +- .../both_borrows/load_invalid_shr.tree.stderr | 12 +- .../mut_exclusive_violation1.tree.stderr | 12 +- .../mut_exclusive_violation2.tree.stderr | 12 +- .../pass_invalid_shr_option.tree.stderr | 12 +- .../pass_invalid_shr_tuple.tree.stderr | 12 +- .../return_invalid_shr_option.tree.stderr | 12 +- .../return_invalid_shr_tuple.tree.stderr | 12 +- .../shr_frozen_violation1.tree.stderr | 10 +- .../tree_borrows/alternate-read-write.stderr | 14 +- .../children-can-alias.uniq.stderr | 12 +- .../fail/tree_borrows/error-range.stderr | 12 +- .../fail/tree_borrows/strongly-protected.rs | 7 +- .../tree_borrows/strongly-protected.stderr | 7 +- 20 files changed, 237 insertions(+), 270 deletions(-) diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs index 5fbb7b927768d..6aba61527d467 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs @@ -149,12 +149,11 @@ impl LocationState { // is possible, it also updates the internal state to keep track of whether // the propagation can be skipped next time. // It is a performance loss not to call this function when a foreign access occurs. - // It is unsound not to call this function when a child access occurs. // FIXME: This optimization is wrong, and is currently disabled (by ignoring the // result returned here). Since we presumably want an optimization like this, // we should add it back. See #3864 for more information. - fn update_last_foreign_access( - &mut self, + fn skip_if_known_noop( + &self, access_kind: AccessKind, rel_pos: AccessRelatedness, ) -> ContinueTraversal { @@ -177,22 +176,37 @@ impl LocationState { // No need to update `self.latest_foreign_access`, // the type of the current streak among nonempty read-only // or nonempty with at least one write has not changed. - ContinueTraversal::SkipChildren + ContinueTraversal::SkipSelfAndChildren } else { // Otherwise propagate this time, and also record the // access that just occurred so that we can skip the propagation // next time. - self.latest_foreign_access = Some(access_kind); ContinueTraversal::Recurse } } else { // A child access occurred, this breaks the streak of foreign // accesses in a row and the sequence since the previous child access // is now empty. - self.latest_foreign_access = None; ContinueTraversal::Recurse } } + + /// Records a new access, so that future access can potentially be skipped + /// by `skip_if_known_noop`. + /// The invariants for this function are closely coupled to the function above: + /// It MUST be called on child accesses, and on foreign accesses MUST be called + /// when `skip_if_know_noop` returns `Recurse`, and MUST NOT be called otherwise. + /// FIXME: This optimization is wrong, and is currently disabled (by ignoring the + /// result returned here). Since we presumably want an optimization like this, + /// we should add it back. See #3864 for more information. + #[allow(unused)] + fn record_new_access(&mut self, access_kind: AccessKind, rel_pos: AccessRelatedness) { + if rel_pos.is_foreign() { + self.latest_foreign_access = Some(access_kind); + } else { + self.latest_foreign_access = None; + } + } } impl fmt::Display for LocationState { @@ -285,13 +299,16 @@ struct TreeVisitor<'tree> { /// Whether to continue exploring the children recursively or not. enum ContinueTraversal { Recurse, - SkipChildren, + SkipSelfAndChildren, } /// Stack of nodes left to explore in a tree traversal. -struct TreeVisitorStack { +struct TreeVisitorStack { /// Identifier of the original access. initial: UniIndex, + /// Function describing whether to continue at a tag. + /// This is only invoked for foreign accesses. + f_continue: NodeContinue, /// Function to apply to each tag. f_propagate: NodeApp, /// Handler to add the required context to diagnostics. @@ -299,152 +316,178 @@ struct TreeVisitorStack { /// Mutable state of the visit: the tags left to handle. /// Every tag pushed should eventually be handled, /// and the precise order is relevant for diagnostics. - stack: Vec<(UniIndex, AccessRelatedness)>, + /// Since the traversal is bottom-up, we need to remember + /// whether we're here initially (false) or after visiting all + /// children (true). The bool indicates this. + stack: Vec<(UniIndex, AccessRelatedness, bool)>, } -impl TreeVisitorStack +impl + TreeVisitorStack where - NodeApp: Fn(NodeAppArgs<'_>) -> Result, + NodeContinue: Fn(&NodeAppArgs<'_>) -> ContinueTraversal, + NodeApp: Fn(NodeAppArgs<'_>) -> Result<(), InnErr>, ErrHandler: Fn(ErrHandlerArgs<'_, InnErr>) -> OutErr, { - /// Apply the function to the current `tag`, and push its children - /// to the stack of future tags to visit. - fn exec_and_visit( + fn should_continue_at( + &self, + this: &mut TreeVisitor<'_>, + idx: UniIndex, + rel_pos: AccessRelatedness, + ) -> ContinueTraversal { + let node = this.nodes.get_mut(idx).unwrap(); + let args = NodeAppArgs { node, perm: this.perms.entry(idx), rel_pos }; + (self.f_continue)(&args) + } + + fn propagate_at( &mut self, this: &mut TreeVisitor<'_>, idx: UniIndex, - exclude: Option, rel_pos: AccessRelatedness, ) -> Result<(), OutErr> { - // 1. apply the propagation function let node = this.nodes.get_mut(idx).unwrap(); - let recurse = - (self.f_propagate)(NodeAppArgs { node, perm: this.perms.entry(idx), rel_pos }) - .map_err(|error_kind| { - (self.err_builder)(ErrHandlerArgs { - error_kind, - conflicting_info: &this.nodes.get(idx).unwrap().debug_info, - accessed_info: &this.nodes.get(self.initial).unwrap().debug_info, - }) - })?; - let node = this.nodes.get(idx).unwrap(); - // 2. add the children to the stack for future traversal - if matches!(recurse, ContinueTraversal::Recurse) { - let general_child_rel = rel_pos.for_child(); - for &child in node.children.iter() { - // Some child might be excluded from here and handled separately, - // e.g. the initially accessed tag. - if Some(child) != exclude { - // We should still ensure that if we don't skip the initially accessed - // it will receive the proper `AccessRelatedness`. - let this_child_rel = if child == self.initial { - AccessRelatedness::This - } else { - general_child_rel - }; - self.stack.push((child, this_child_rel)); + (self.f_propagate)(NodeAppArgs { node, perm: this.perms.entry(idx), rel_pos }).map_err( + |error_kind| { + (self.err_builder)(ErrHandlerArgs { + error_kind, + conflicting_info: &this.nodes.get(idx).unwrap().debug_info, + accessed_info: &this.nodes.get(self.initial).unwrap().debug_info, + }) + }, + ) + } + + fn go_upwards_from_accessed( + &mut self, + this: &mut TreeVisitor<'_>, + accessed_node: UniIndex, + push_children_of_accessed: bool, + ) -> Result<(), OutErr> { + assert!(self.stack.is_empty()); + // First, handle accessed node. A bunch of things need to + // be handled differently here compared to the further parents + // of `accesssed_node`. + { + self.propagate_at(this, accessed_node, AccessRelatedness::This)?; + if push_children_of_accessed { + let accessed_node = this.nodes.get(accessed_node).unwrap(); + for &child in accessed_node.children.iter().rev() { + self.stack.push((child, AccessRelatedness::AncestorAccess, false)); } } } + // Then, handle the accessed node's parent. Here, we need to + // make sure we only mark the "cousin" subtrees for later visitation, + // not the subtree that contains the accessed node. + let mut last_node = accessed_node; + while let Some(current) = this.nodes.get(last_node).unwrap().parent { + self.propagate_at(this, current, AccessRelatedness::StrictChildAccess)?; + let node = this.nodes.get(current).unwrap(); + for &child in node.children.iter().rev() { + if last_node == child { + continue; + } + self.stack.push((child, AccessRelatedness::DistantAccess, false)); + } + last_node = current; + } + self.stack.reverse(); Ok(()) } - fn new(initial: UniIndex, f_propagate: NodeApp, err_builder: ErrHandler) -> Self { - Self { initial, f_propagate, err_builder, stack: Vec::new() } - } - - /// Finish the exploration by applying `exec_and_visit` until - /// the stack is empty. - fn finish(&mut self, visitor: &mut TreeVisitor<'_>) -> Result<(), OutErr> { - while let Some((idx, rel_pos)) = self.stack.pop() { - self.exec_and_visit(visitor, idx, /* no children to exclude */ None, rel_pos)?; + fn finish_foreign_accesses(&mut self, this: &mut TreeVisitor<'_>) -> Result<(), OutErr> { + while let Some((idx, rel_pos, is_final)) = self.stack.last_mut() { + let idx = *idx; + let rel_pos = *rel_pos; + if *is_final { + self.stack.pop(); + self.propagate_at(this, idx, rel_pos)?; + } else { + *is_final = true; + let handle_children = self.should_continue_at(this, idx, rel_pos); + match handle_children { + ContinueTraversal::Recurse => { + // add all children, and also leave the node itself + // on the stack so that it can be visited later. + let node = this.nodes.get(idx).unwrap(); + for &child in node.children.iter() { + self.stack.push((child, rel_pos, false)); + } + } + ContinueTraversal::SkipSelfAndChildren => { + // skip self + self.stack.pop(); + continue; + } + } + } } Ok(()) } - /// Push all ancestors to the exploration stack in order of nearest ancestor - /// towards the top. - fn push_and_visit_strict_ancestors( - &mut self, - visitor: &mut TreeVisitor<'_>, - ) -> Result<(), OutErr> { - let mut path_ascend = Vec::new(); - // First climb to the root while recording the path - let mut curr = self.initial; - while let Some(ancestor) = visitor.nodes.get(curr).unwrap().parent { - path_ascend.push((ancestor, curr)); - curr = ancestor; - } - // Then descend: - // - execute f_propagate on each node - // - record children in visit - while let Some((ancestor, next_in_path)) = path_ascend.pop() { - // Explore ancestors in descending order. - // `next_in_path` is excluded from the recursion because it - // will be the `ancestor` of the next iteration. - // It also needs a different `AccessRelatedness` than the other - // children of `ancestor`. - self.exec_and_visit( - visitor, - ancestor, - Some(next_in_path), - AccessRelatedness::StrictChildAccess, - )?; - } - Ok(()) + fn new( + initial: UniIndex, + f_continue: NodeContinue, + f_propagate: NodeApp, + err_builder: ErrHandler, + ) -> Self { + Self { initial, f_continue, f_propagate, err_builder, stack: Vec::new() } } } impl<'tree> TreeVisitor<'tree> { - // Applies `f_propagate` to every vertex of the tree top-down in the following order: first - // all ancestors of `start`, then `start` itself, then children of `start`, then the rest. + // Applies `f_propagate` to every vertex of the tree bottom-up in the following order: first + // all ancestors of `start` (starting with `start` itself), then children of `start`, then the rest, + // always going bottom-up. // This ensures that errors are triggered in the following order - // - first invalid accesses with insufficient permissions, closest to the root first, - // - then protector violations, closest to `start` first. + // - first invalid accesses with insufficient permissions, closest to the accessed node first, + // - then protector violations, bottom-up, starting with the children of the accessed node, and then + // going upwards and outwards. // // `f_propagate` should follow the following format: for a given `Node` it updates its // `Permission` depending on the position relative to `start` (given by an // `AccessRelatedness`). - // It outputs whether the tree traversal for this subree should continue or not. - fn traverse_parents_this_children_others( + // `f_continue` is called before on foreign nodes, and describes whether to continue + // with the subtree at that node. + fn traverse_this_parents_children_other( mut self, start: BorTag, - f_propagate: impl Fn(NodeAppArgs<'_>) -> Result, + f_continue: impl Fn(&NodeAppArgs<'_>) -> ContinueTraversal, + f_propagate: impl Fn(NodeAppArgs<'_>) -> Result<(), InnErr>, err_builder: impl Fn(ErrHandlerArgs<'_, InnErr>) -> OutErr, ) -> Result<(), OutErr> { let start_idx = self.tag_mapping.get(&start).unwrap(); - let mut stack = TreeVisitorStack::new(start_idx, f_propagate, err_builder); - stack.push_and_visit_strict_ancestors(&mut self)?; - // All (potentially zero) ancestors have been explored, - // it's time to explore the `start` tag. - stack.exec_and_visit( - &mut self, - start_idx, - /* no children to exclude */ None, - AccessRelatedness::This, - )?; - // Then finish with a normal DFS. - stack.finish(&mut self) + let mut stack = TreeVisitorStack::new(start_idx, f_continue, f_propagate, err_builder); + // Visits the accessed node itself, and all its parents, i.e. all nodes + // undergoing a child access. Also pushes the children and the other + // cousin nodes (i.e. all nodes undergoing a foreign access) to the stack + // to be processed later. + stack.go_upwards_from_accessed(&mut self, start_idx, true)?; + // Now visit all the foreign nodes we remembered earlier. + // For this we go bottom-up, but also allow f_continue to skip entire + // subtrees from being visited if it would be a NOP. + stack.finish_foreign_accesses(&mut self) } - // Applies `f_propagate` to every non-child vertex of the tree (ancestors first). - // - // `f_propagate` should follow the following format: for a given `Node` it updates its - // `Permission` depending on the position relative to `start` (given by an - // `AccessRelatedness`). - // It outputs whether the tree traversal for this subree should continue or not. + // Like `traverse_this_parents_children_other`, but skips the children of `start`. fn traverse_nonchildren( mut self, start: BorTag, - f_propagate: impl Fn(NodeAppArgs<'_>) -> Result, + f_continue: impl Fn(&NodeAppArgs<'_>) -> ContinueTraversal, + f_propagate: impl Fn(NodeAppArgs<'_>) -> Result<(), InnErr>, err_builder: impl Fn(ErrHandlerArgs<'_, InnErr>) -> OutErr, ) -> Result<(), OutErr> { let start_idx = self.tag_mapping.get(&start).unwrap(); - let mut stack = TreeVisitorStack::new(start_idx, f_propagate, err_builder); - stack.push_and_visit_strict_ancestors(&mut self)?; - // We *don't* visit the `start` tag, and we don't push its children. - // Only finish the DFS with the cousins. - stack.finish(&mut self) + let mut stack = TreeVisitorStack::new(start_idx, f_continue, f_propagate, err_builder); + // Visits the accessed node itself, and all its parents, i.e. all nodes + // undergoing a child access. Also pushes the other cousin nodes to the + // stack, but not the children of the accessed node. + stack.go_upwards_from_accessed(&mut self, start_idx, false)?; + // Now visit all the foreign nodes we remembered earlier. + // For this we go bottom-up, but also allow f_continue to skip entire + // subtrees from being visited if it would be a NOP. + stack.finish_foreign_accesses(&mut self) } } @@ -541,16 +584,18 @@ impl<'tcx> Tree { )?; for (perms_range, perms) in self.rperms.iter_mut(access_range.start, access_range.size) { TreeVisitor { nodes: &mut self.nodes, tag_mapping: &self.tag_mapping, perms } - .traverse_parents_this_children_others( + .traverse_this_parents_children_other( tag, - |args: NodeAppArgs<'_>| -> Result { + // visit all children, skipping none + |_| ContinueTraversal::Recurse, + |args: NodeAppArgs<'_>| -> Result<(), TransitionError> { let NodeAppArgs { node, .. } = args; if global.borrow().protected_tags.get(&node.tag) == Some(&ProtectorKind::StrongProtector) { Err(TransitionError::ProtectedDealloc) } else { - Ok(ContinueTraversal::Recurse) + Ok(()) } }, |args: ErrHandlerArgs<'_, TransitionError>| -> InterpError<'tcx> { @@ -607,16 +652,28 @@ impl<'tcx> Tree { // // `perms_range` is only for diagnostics (it is the range of // the `RangeMap` on which we are currently working). + let node_skipper = |access_kind: AccessKind, args: &NodeAppArgs<'_>| -> ContinueTraversal { + let NodeAppArgs { node, perm, rel_pos } = args; + + let old_state = perm + .get() + .copied() + .unwrap_or_else(|| LocationState::new_uninit(node.default_initial_perm)); + // FIXME: See #3684 + let _would_skip_if_not_for_fixme = old_state.skip_if_known_noop(access_kind, *rel_pos); + ContinueTraversal::Recurse + }; let node_app = |perms_range: Range, access_kind: AccessKind, access_cause: diagnostics::AccessCause, args: NodeAppArgs<'_>| - -> Result { + -> Result<(), TransitionError> { let NodeAppArgs { node, mut perm, rel_pos } = args; let old_state = perm.or_insert(LocationState::new_uninit(node.default_initial_perm)); - old_state.update_last_foreign_access(access_kind, rel_pos); + // FIXME: See #3684 + // old_state.record_new_access(access_kind, rel_pos); let protected = global.borrow().protected_tags.contains_key(&node.tag); let transition = old_state.perform_access(access_kind, rel_pos, protected)?; @@ -631,7 +688,7 @@ impl<'tcx> Tree { span, }); } - Ok(ContinueTraversal::Recurse) + Ok(()) }; // Error handler in case `node_app` goes wrong. @@ -658,8 +715,9 @@ impl<'tcx> Tree { for (perms_range, perms) in self.rperms.iter_mut(access_range.start, access_range.size) { TreeVisitor { nodes: &mut self.nodes, tag_mapping: &self.tag_mapping, perms } - .traverse_parents_this_children_others( + .traverse_this_parents_children_other( tag, + |args| node_skipper(access_kind, args), |args| node_app(perms_range.clone(), access_kind, access_cause, args), |args| err_handler(perms_range.clone(), access_cause, args), )?; @@ -686,6 +744,7 @@ impl<'tcx> Tree { TreeVisitor { nodes: &mut self.nodes, tag_mapping: &self.tag_mapping, perms } .traverse_nonchildren( tag, + |args| node_skipper(access_kind, args), |args| node_app(perms_range.clone(), access_kind, access_cause, args), |args| err_handler(perms_range.clone(), access_cause, args), )?; diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/unimap.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/unimap.rs index 92bae6203b3c0..cbc25724cb685 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/unimap.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/unimap.rs @@ -221,6 +221,10 @@ impl<'a, V> UniEntry<'a, V> { } self.inner.as_mut().unwrap() } + + pub fn get(&self) -> Option<&V> { + self.inner.as_ref() + } } mod tests { diff --git a/src/tools/miri/tests/fail/both_borrows/alias_through_mutation.tree.stderr b/src/tools/miri/tests/fail/both_borrows/alias_through_mutation.tree.stderr index f26e8444a9aaf..0bd196eab1b83 100644 --- a/src/tools/miri/tests/fail/both_borrows/alias_through_mutation.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/alias_through_mutation.tree.stderr @@ -5,24 +5,18 @@ LL | let _val = *target_alias; | ^^^^^^^^^^^^^ read access through at ALLOC[0x0] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this child read access -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this child read access +help: the accessed tag was created here, in the initial state Frozen --> $DIR/alias_through_mutation.rs:LL:CC | LL | *x = &mut *(target as *mut _); | ^^^^^^^^^^^^^^^^^^^^^^^^ -help: the conflicting tag was created here, in the initial state Reserved - --> $DIR/alias_through_mutation.rs:LL:CC - | -LL | retarget(&mut target_alias, target); - | ^^^^^^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] --> $DIR/alias_through_mutation.rs:LL:CC | LL | *target = 13; | ^^^^^^^^^^^^ - = help: this transition corresponds to a loss of read and write permissions + = help: this transition corresponds to a loss of read permissions = note: BACKTRACE (of the first span): = note: inside `main` at $DIR/alias_through_mutation.rs:LL:CC diff --git a/src/tools/miri/tests/fail/both_borrows/box_exclusive_violation1.tree.stderr b/src/tools/miri/tests/fail/both_borrows/box_exclusive_violation1.tree.stderr index ee5ef0c5ea8b2..27d987feb574d 100644 --- a/src/tools/miri/tests/fail/both_borrows/box_exclusive_violation1.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/box_exclusive_violation1.tree.stderr @@ -5,19 +5,13 @@ LL | *LEAK = 7; | ^^^^^^^^^ write access through at ALLOC[0x0] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this child write access -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this child write access +help: the accessed tag was created here, in the initial state Frozen --> $DIR/box_exclusive_violation1.rs:LL:CC | LL | fn unknown_code_1(x: &i32) { | ^ -help: the conflicting tag was created here, in the initial state Frozen - --> $DIR/box_exclusive_violation1.rs:LL:CC - | -LL | unknown_code_1(&*our); - | ^^^^^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] --> $DIR/box_exclusive_violation1.rs:LL:CC | LL | *our = 5; diff --git a/src/tools/miri/tests/fail/both_borrows/buggy_as_mut_slice.tree.stderr b/src/tools/miri/tests/fail/both_borrows/buggy_as_mut_slice.tree.stderr index 5593db899710d..16e87c4d4ebc2 100644 --- a/src/tools/miri/tests/fail/both_borrows/buggy_as_mut_slice.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/buggy_as_mut_slice.tree.stderr @@ -5,19 +5,13 @@ LL | v2[1] = 7; | ^^^^^^^^^ write access through at ALLOC[0x4] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this child write access -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this child write access +help: the accessed tag was created here, in the initial state Reserved --> $DIR/buggy_as_mut_slice.rs:LL:CC | LL | let v2 = safe::as_mut_slice(&v); | ^^^^^^^^^^^^^^^^^^^^^^ -help: the conflicting tag was created here, in the initial state Reserved - --> $DIR/buggy_as_mut_slice.rs:LL:CC - | -LL | unsafe { from_raw_parts_mut(self_.as_ptr() as *mut T, self_.len()) } - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x4..0x8] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x4..0x8] --> $DIR/buggy_as_mut_slice.rs:LL:CC | LL | v1[1] = 5; diff --git a/src/tools/miri/tests/fail/both_borrows/buggy_split_at_mut.tree.stderr b/src/tools/miri/tests/fail/both_borrows/buggy_split_at_mut.tree.stderr index 7d3725d611a2c..aaf7c2256a78a 100644 --- a/src/tools/miri/tests/fail/both_borrows/buggy_split_at_mut.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/buggy_split_at_mut.tree.stderr @@ -5,19 +5,13 @@ LL | b[1] = 6; | ^^^^^^^^ write access through at ALLOC[0x4] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this child write access -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this child write access +help: the accessed tag was created here, in the initial state Reserved --> $DIR/buggy_split_at_mut.rs:LL:CC | LL | let (a, b) = safe::split_at_mut(&mut array, 0); | ^ -help: the conflicting tag was created here, in the initial state Reserved - --> $DIR/buggy_split_at_mut.rs:LL:CC - | -LL | from_raw_parts_mut(ptr.offset(mid as isize), len - mid), - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x4..0x8] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x4..0x8] --> $DIR/buggy_split_at_mut.rs:LL:CC | LL | a[1] = 5; diff --git a/src/tools/miri/tests/fail/both_borrows/illegal_write5.tree.stderr b/src/tools/miri/tests/fail/both_borrows/illegal_write5.tree.stderr index 1f3d6e387d437..dec2bb6880f87 100644 --- a/src/tools/miri/tests/fail/both_borrows/illegal_write5.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/illegal_write5.tree.stderr @@ -5,19 +5,13 @@ LL | let _val = *xref; | ^^^^^ read access through at ALLOC[0x0] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this child read access -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this child read access +help: the accessed tag was created here, in the initial state Reserved --> $DIR/illegal_write5.rs:LL:CC | LL | let xref = unsafe { &mut *xraw }; | ^^^^^^^^^^ -help: the conflicting tag was created here, in the initial state Reserved - --> $DIR/illegal_write5.rs:LL:CC - | -LL | let xref = unsafe { &mut *xraw }; - | ^^^^^^^^^^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] --> $DIR/illegal_write5.rs:LL:CC | LL | unsafe { *xraw = 15 }; diff --git a/src/tools/miri/tests/fail/both_borrows/load_invalid_shr.tree.stderr b/src/tools/miri/tests/fail/both_borrows/load_invalid_shr.tree.stderr index 292dc8150f17f..0fcfc20e43696 100644 --- a/src/tools/miri/tests/fail/both_borrows/load_invalid_shr.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/load_invalid_shr.tree.stderr @@ -5,19 +5,13 @@ LL | let _val = *xref_in_mem; | ^^^^^^^^^^^^ reborrow through at ALLOC[0x0] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this reborrow (acting as a child read access) -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this reborrow (acting as a child read access) +help: the accessed tag was created here, in the initial state Frozen --> $DIR/load_invalid_shr.rs:LL:CC | LL | let xref_in_mem = Box::new(xref); | ^^^^^^^^^^^^^^ -help: the conflicting tag was created here, in the initial state Frozen - --> $DIR/load_invalid_shr.rs:LL:CC - | -LL | let xref = unsafe { &*xraw }; - | ^^^^^^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] --> $DIR/load_invalid_shr.rs:LL:CC | LL | unsafe { *xraw = 42 }; // unfreeze diff --git a/src/tools/miri/tests/fail/both_borrows/mut_exclusive_violation1.tree.stderr b/src/tools/miri/tests/fail/both_borrows/mut_exclusive_violation1.tree.stderr index a5e62e9ea3381..5b4ee4a891302 100644 --- a/src/tools/miri/tests/fail/both_borrows/mut_exclusive_violation1.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/mut_exclusive_violation1.tree.stderr @@ -5,19 +5,13 @@ LL | *LEAK = 7; | ^^^^^^^^^ write access through at ALLOC[0x0] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this child write access -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this child write access +help: the accessed tag was created here, in the initial state Frozen --> $DIR/mut_exclusive_violation1.rs:LL:CC | LL | fn unknown_code_1(x: &i32) { | ^ -help: the conflicting tag was created here, in the initial state Frozen - --> $DIR/mut_exclusive_violation1.rs:LL:CC - | -LL | unknown_code_1(&*our); - | ^^^^^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] --> $DIR/mut_exclusive_violation1.rs:LL:CC | LL | *our = 5; diff --git a/src/tools/miri/tests/fail/both_borrows/mut_exclusive_violation2.tree.stderr b/src/tools/miri/tests/fail/both_borrows/mut_exclusive_violation2.tree.stderr index 6b6eecbe41362..d3bc54b7ef3c9 100644 --- a/src/tools/miri/tests/fail/both_borrows/mut_exclusive_violation2.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/mut_exclusive_violation2.tree.stderr @@ -5,19 +5,13 @@ LL | *raw1 = 3; | ^^^^^^^^^ write access through at ALLOC[0x0] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this child write access -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this child write access +help: the accessed tag was created here, in the initial state Reserved --> $DIR/mut_exclusive_violation2.rs:LL:CC | LL | let raw1 = ptr1.as_mut(); | ^^^^^^^^^^^^^ -help: the conflicting tag was created here, in the initial state Reserved - --> $DIR/mut_exclusive_violation2.rs:LL:CC - | -LL | let raw1 = ptr1.as_mut(); - | ^^^^^^^^^^^^^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] --> $DIR/mut_exclusive_violation2.rs:LL:CC | LL | *raw2 = 2; diff --git a/src/tools/miri/tests/fail/both_borrows/pass_invalid_shr_option.tree.stderr b/src/tools/miri/tests/fail/both_borrows/pass_invalid_shr_option.tree.stderr index f2eaf767a47af..f687b3f867fc3 100644 --- a/src/tools/miri/tests/fail/both_borrows/pass_invalid_shr_option.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/pass_invalid_shr_option.tree.stderr @@ -5,19 +5,13 @@ LL | foo(some_xref); | ^^^^^^^^^ reborrow through at ALLOC[0x0] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this reborrow (acting as a child read access) -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this reborrow (acting as a child read access) +help: the accessed tag was created here, in the initial state Frozen --> $DIR/pass_invalid_shr_option.rs:LL:CC | LL | let some_xref = unsafe { Some(&*xraw) }; | ^^^^^^^^^^^^ -help: the conflicting tag was created here, in the initial state Frozen - --> $DIR/pass_invalid_shr_option.rs:LL:CC - | -LL | let some_xref = unsafe { Some(&*xraw) }; - | ^^^^^^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] --> $DIR/pass_invalid_shr_option.rs:LL:CC | LL | unsafe { *xraw = 42 }; // unfreeze diff --git a/src/tools/miri/tests/fail/both_borrows/pass_invalid_shr_tuple.tree.stderr b/src/tools/miri/tests/fail/both_borrows/pass_invalid_shr_tuple.tree.stderr index 9cfd4239c85f8..845838f93d57d 100644 --- a/src/tools/miri/tests/fail/both_borrows/pass_invalid_shr_tuple.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/pass_invalid_shr_tuple.tree.stderr @@ -5,19 +5,13 @@ LL | foo(pair_xref); | ^^^^^^^^^ reborrow through at ALLOC[0x0] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this reborrow (acting as a child read access) -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this reborrow (acting as a child read access) +help: the accessed tag was created here, in the initial state Frozen --> $DIR/pass_invalid_shr_tuple.rs:LL:CC | LL | let pair_xref = unsafe { (&*xraw0, &*xraw1) }; | ^^^^^^^^^^^^^^^^^^ -help: the conflicting tag was created here, in the initial state Frozen - --> $DIR/pass_invalid_shr_tuple.rs:LL:CC - | -LL | let pair_xref = unsafe { (&*xraw0, &*xraw1) }; - | ^^^^^^^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x4] --> $DIR/pass_invalid_shr_tuple.rs:LL:CC | LL | unsafe { *xraw0 = 42 }; // unfreeze diff --git a/src/tools/miri/tests/fail/both_borrows/return_invalid_shr_option.tree.stderr b/src/tools/miri/tests/fail/both_borrows/return_invalid_shr_option.tree.stderr index 1169200b57c1f..e770372925130 100644 --- a/src/tools/miri/tests/fail/both_borrows/return_invalid_shr_option.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/return_invalid_shr_option.tree.stderr @@ -5,19 +5,13 @@ LL | ret | ^^^ reborrow through at ALLOC[0x4] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this reborrow (acting as a child read access) -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this reborrow (acting as a child read access) +help: the accessed tag was created here, in the initial state Frozen --> $DIR/return_invalid_shr_option.rs:LL:CC | LL | let ret = Some(unsafe { &(*xraw).1 }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -help: the conflicting tag was created here, in the initial state Frozen - --> $DIR/return_invalid_shr_option.rs:LL:CC - | -LL | let ret = Some(unsafe { &(*xraw).1 }); - | ^^^^^^^^^^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x8] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x8] --> $DIR/return_invalid_shr_option.rs:LL:CC | LL | unsafe { *xraw = (42, 23) }; // unfreeze diff --git a/src/tools/miri/tests/fail/both_borrows/return_invalid_shr_tuple.tree.stderr b/src/tools/miri/tests/fail/both_borrows/return_invalid_shr_tuple.tree.stderr index af410534140fe..a5c6be3f13aa8 100644 --- a/src/tools/miri/tests/fail/both_borrows/return_invalid_shr_tuple.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/return_invalid_shr_tuple.tree.stderr @@ -5,19 +5,13 @@ LL | ret | ^^^ reborrow through at ALLOC[0x4] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this reborrow (acting as a child read access) -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this reborrow (acting as a child read access) +help: the accessed tag was created here, in the initial state Frozen --> $DIR/return_invalid_shr_tuple.rs:LL:CC | LL | let ret = (unsafe { &(*xraw).1 },); | ^^^^^^^^^^^^^^^^^^^^^^^^ -help: the conflicting tag was created here, in the initial state Frozen - --> $DIR/return_invalid_shr_tuple.rs:LL:CC - | -LL | let ret = (unsafe { &(*xraw).1 },); - | ^^^^^^^^^^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x8] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x8] --> $DIR/return_invalid_shr_tuple.rs:LL:CC | LL | unsafe { *xraw = (42, 23) }; // unfreeze diff --git a/src/tools/miri/tests/fail/both_borrows/shr_frozen_violation1.tree.stderr b/src/tools/miri/tests/fail/both_borrows/shr_frozen_violation1.tree.stderr index 97cd64c03be8c..45b3bceb7287e 100644 --- a/src/tools/miri/tests/fail/both_borrows/shr_frozen_violation1.tree.stderr +++ b/src/tools/miri/tests/fail/both_borrows/shr_frozen_violation1.tree.stderr @@ -5,18 +5,12 @@ LL | *(x as *const i32 as *mut i32) = 7; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ write access through at ALLOC[0x0] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Frozen which forbids this child write access -help: the accessed tag was created here + = help: the accessed tag has state Frozen which forbids this child write access +help: the accessed tag was created here, in the initial state Frozen --> $DIR/shr_frozen_violation1.rs:LL:CC | LL | fn unknown_code(x: &i32) { | ^ -help: the conflicting tag was created here, in the initial state Frozen - --> $DIR/shr_frozen_violation1.rs:LL:CC - | -LL | unknown_code(&*x); - | ^^^ = note: BACKTRACE (of the first span): = note: inside `unknown_code` at $DIR/shr_frozen_violation1.rs:LL:CC note: inside `foo` diff --git a/src/tools/miri/tests/fail/tree_borrows/alternate-read-write.stderr b/src/tools/miri/tests/fail/tree_borrows/alternate-read-write.stderr index 98371cbe7a262..bd969d089c37a 100644 --- a/src/tools/miri/tests/fail/tree_borrows/alternate-read-write.stderr +++ b/src/tools/miri/tests/fail/tree_borrows/alternate-read-write.stderr @@ -5,25 +5,19 @@ LL | *y += 1; // Failure | ^^^^^^^ write access through at ALLOC[0x0] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Frozen which forbids this child write access -help: the accessed tag was created here + = help: the accessed tag has state Frozen which forbids this child write access +help: the accessed tag was created here, in the initial state Reserved --> $DIR/alternate-read-write.rs:LL:CC | LL | let y = unsafe { &mut *(x as *mut u8) }; | ^^^^^^^^^^^^^^^^^^^^ -help: the conflicting tag was created here, in the initial state Reserved - --> $DIR/alternate-read-write.rs:LL:CC - | -LL | let y = unsafe { &mut *(x as *mut u8) }; - | ^^^^^^^^^^^^^^^^^^^^ -help: the conflicting tag later transitioned to Active due to a child write access at offsets [0x0..0x1] +help: the accessed tag later transitioned to Active due to a child write access at offsets [0x0..0x1] --> $DIR/alternate-read-write.rs:LL:CC | LL | *y += 1; // Success | ^^^^^^^ = help: this transition corresponds to the first write to a 2-phase borrowed mutable reference -help: the conflicting tag later transitioned to Frozen due to a foreign read access at offsets [0x0..0x1] +help: the accessed tag later transitioned to Frozen due to a foreign read access at offsets [0x0..0x1] --> $DIR/alternate-read-write.rs:LL:CC | LL | let _val = *x; diff --git a/src/tools/miri/tests/fail/tree_borrows/children-can-alias.uniq.stderr b/src/tools/miri/tests/fail/tree_borrows/children-can-alias.uniq.stderr index cc1fea30f53e4..cdfa8a74238a4 100644 --- a/src/tools/miri/tests/fail/tree_borrows/children-can-alias.uniq.stderr +++ b/src/tools/miri/tests/fail/tree_borrows/children-can-alias.uniq.stderr @@ -5,19 +5,13 @@ LL | child2.write(2); | ^^^^^^^^^^^^^^^ write access through at ALLOC[0x0] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this child write access -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this child write access +help: the accessed tag was created here, in the initial state Reserved --> $DIR/children-can-alias.rs:LL:CC | LL | let child2 = x.as_ptr(); | ^^^^^^^^^^ -help: the conflicting tag was created here, in the initial state Reserved - --> $DIR/children-can-alias.rs:LL:CC - | -LL | let child2 = x.as_ptr(); - | ^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x1] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x0..0x1] --> $DIR/children-can-alias.rs:LL:CC | LL | child1.write(1); diff --git a/src/tools/miri/tests/fail/tree_borrows/error-range.stderr b/src/tools/miri/tests/fail/tree_borrows/error-range.stderr index 37f0b9b62b037..090ae4507bd75 100644 --- a/src/tools/miri/tests/fail/tree_borrows/error-range.stderr +++ b/src/tools/miri/tests/fail/tree_borrows/error-range.stderr @@ -5,19 +5,13 @@ LL | rmut[5] += 1; | ^^^^^^^^^^^^ read access through at ALLOC[0x5] is forbidden | = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental - = help: the accessed tag is a child of the conflicting tag - = help: the conflicting tag has state Disabled which forbids this child read access -help: the accessed tag was created here + = help: the accessed tag has state Disabled which forbids this child read access +help: the accessed tag was created here, in the initial state Reserved --> $DIR/error-range.rs:LL:CC | LL | let rmut = &mut *addr_of_mut!(data[0..6]); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -help: the conflicting tag was created here, in the initial state Reserved - --> $DIR/error-range.rs:LL:CC - | -LL | let rmut = &mut *addr_of_mut!(data[0..6]); - | ^^^^ -help: the conflicting tag later transitioned to Disabled due to a foreign write access at offsets [0x5..0x6] +help: the accessed tag later transitioned to Disabled due to a foreign write access at offsets [0x5..0x6] --> $DIR/error-range.rs:LL:CC | LL | data[5] = 1; diff --git a/src/tools/miri/tests/fail/tree_borrows/strongly-protected.rs b/src/tools/miri/tests/fail/tree_borrows/strongly-protected.rs index 484c7c3bbff78..037031d0345c4 100644 --- a/src/tools/miri/tests/fail/tree_borrows/strongly-protected.rs +++ b/src/tools/miri/tests/fail/tree_borrows/strongly-protected.rs @@ -1,14 +1,15 @@ //@compile-flags: -Zmiri-tree-borrows //@error-in-other-file: /deallocation through .* is forbidden/ -fn inner(x: &mut i32, f: fn(&mut i32)) { +fn inner(x: &mut i32, f: fn(*mut i32)) { // `f` may mutate, but it may not deallocate! + // `f` takes a raw pointer so that the only protector + // is that on `x` f(x) } fn main() { - inner(Box::leak(Box::new(0)), |x| { - let raw = x as *mut _; + inner(Box::leak(Box::new(0)), |raw| { drop(unsafe { Box::from_raw(raw) }); }); } diff --git a/src/tools/miri/tests/fail/tree_borrows/strongly-protected.stderr b/src/tools/miri/tests/fail/tree_borrows/strongly-protected.stderr index f0afcc7b3ff1e..9da43055af240 100644 --- a/src/tools/miri/tests/fail/tree_borrows/strongly-protected.stderr +++ b/src/tools/miri/tests/fail/tree_borrows/strongly-protected.stderr @@ -15,7 +15,7 @@ LL | drop(unsafe { Box::from_raw(raw) }); help: the strongly protected tag was created here, in the initial state Reserved --> $DIR/strongly-protected.rs:LL:CC | -LL | fn inner(x: &mut i32, f: fn(&mut i32)) { +LL | fn inner(x: &mut i32, f: fn(*mut i32)) { | ^ = note: BACKTRACE (of the first span): = note: inside `std::alloc::dealloc` at RUSTLIB/alloc/src/alloc.rs:LL:CC @@ -28,7 +28,7 @@ note: inside closure | LL | drop(unsafe { Box::from_raw(raw) }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - = note: inside `<{closure@$DIR/strongly-protected.rs:LL:CC} as std::ops::FnOnce<(&mut i32,)>>::call_once - shim` at RUSTLIB/core/src/ops/function.rs:LL:CC + = note: inside `<{closure@$DIR/strongly-protected.rs:LL:CC} as std::ops::FnOnce<(*mut i32,)>>::call_once - shim` at RUSTLIB/core/src/ops/function.rs:LL:CC note: inside `inner` --> $DIR/strongly-protected.rs:LL:CC | @@ -37,8 +37,7 @@ LL | f(x) note: inside `main` --> $DIR/strongly-protected.rs:LL:CC | -LL | / inner(Box::leak(Box::new(0)), |x| { -LL | | let raw = x as *mut _; +LL | / inner(Box::leak(Box::new(0)), |raw| { LL | | drop(unsafe { Box::from_raw(raw) }); LL | | }); | |______^ From 34e50f56d6572337651175b36bbfc947088beb7a Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 22 Aug 2024 10:39:20 +0300 Subject: [PATCH 103/124] Create an assist to convert closure to freestanding fn The assist converts all captures to parameters. --- .../crates/hir-ty/src/infer/closure.rs | 113 +- .../rust-analyzer/crates/hir-ty/src/mir.rs | 17 +- src/tools/rust-analyzer/crates/hir/src/lib.rs | 87 ++ .../src/handlers/convert_closure_to_fn.rs | 1271 +++++++++++++++++ .../crates/ide-assists/src/lib.rs | 2 + .../crates/ide-assists/src/tests/generated.rs | 34 + 6 files changed, 1511 insertions(+), 13 deletions(-) create mode 100644 src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index 68e1a6ee821c7..36327d1d49c0a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -19,7 +19,8 @@ use hir_expand::name::Name; use intern::sym; use rustc_hash::FxHashMap; use smallvec::{smallvec, SmallVec}; -use stdx::never; +use stdx::{format_to, never}; +use syntax::utils::is_raw_identifier; use crate::{ db::{HirDatabase, InternedClosure}, @@ -251,6 +252,11 @@ impl CapturedItem { self.place.local } + /// Returns whether this place has any field (aka. non-deref) projections. + pub fn has_field_projections(&self) -> bool { + self.place.projections.iter().any(|it| !matches!(it, ProjectionElem::Deref)) + } + pub fn ty(&self, subst: &Substitution) -> Ty { self.ty.clone().substitute(Interner, utils::ClosureSubst(subst).parent_subst()) } @@ -263,6 +269,97 @@ impl CapturedItem { self.span_stacks.iter().map(|stack| *stack.last().expect("empty span stack")).collect() } + /// Converts the place to a name that can be inserted into source code. + pub fn place_to_name(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String { + let body = db.body(owner); + let mut result = body[self.place.local].name.unescaped().display(db.upcast()).to_string(); + for proj in &self.place.projections { + match proj { + ProjectionElem::Deref => {} + ProjectionElem::Field(Either::Left(f)) => { + match &*f.parent.variant_data(db.upcast()) { + VariantData::Record(fields) => { + result.push('_'); + result.push_str(fields[f.local_id].name.as_str()) + } + VariantData::Tuple(fields) => { + let index = fields.iter().position(|it| it.0 == f.local_id); + if let Some(index) = index { + format_to!(result, "_{index}"); + } + } + VariantData::Unit => {} + } + } + ProjectionElem::Field(Either::Right(f)) => format_to!(result, "_{}", f.index), + &ProjectionElem::ClosureField(field) => format_to!(result, "_{field}"), + ProjectionElem::Index(_) + | ProjectionElem::ConstantIndex { .. } + | ProjectionElem::Subslice { .. } + | ProjectionElem::OpaqueCast(_) => { + never!("Not happen in closure capture"); + continue; + } + } + } + if is_raw_identifier(&result, db.crate_graph()[owner.module(db.upcast()).krate()].edition) { + result.insert_str(0, "r#"); + } + result + } + + pub fn display_place_source_code(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String { + let body = db.body(owner); + let krate = owner.krate(db.upcast()); + let edition = db.crate_graph()[krate].edition; + let mut result = body[self.place.local].name.display(db.upcast(), edition).to_string(); + for proj in &self.place.projections { + match proj { + // In source code autoderef kicks in. + ProjectionElem::Deref => {} + ProjectionElem::Field(Either::Left(f)) => { + let variant_data = f.parent.variant_data(db.upcast()); + match &*variant_data { + VariantData::Record(fields) => format_to!( + result, + ".{}", + fields[f.local_id].name.display(db.upcast(), edition) + ), + VariantData::Tuple(fields) => format_to!( + result, + ".{}", + fields.iter().position(|it| it.0 == f.local_id).unwrap_or_default() + ), + VariantData::Unit => {} + } + } + ProjectionElem::Field(Either::Right(f)) => { + let field = f.index; + format_to!(result, ".{field}"); + } + &ProjectionElem::ClosureField(field) => { + format_to!(result, ".{field}"); + } + ProjectionElem::Index(_) + | ProjectionElem::ConstantIndex { .. } + | ProjectionElem::Subslice { .. } + | ProjectionElem::OpaqueCast(_) => { + never!("Not happen in closure capture"); + continue; + } + } + } + let final_derefs_count = self + .place + .projections + .iter() + .rev() + .take_while(|proj| matches!(proj, ProjectionElem::Deref)) + .count(); + result.insert_str(0, &"*".repeat(final_derefs_count)); + result + } + pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String { let body = db.body(owner); let krate = owner.krate(db.upcast()); @@ -451,14 +548,6 @@ impl InferenceContext<'_> { }); } - fn is_ref_span(&self, span: MirSpan) -> bool { - match span { - MirSpan::ExprId(expr) => matches!(self.body[expr], Expr::Ref { .. }), - MirSpan::BindingId(_) => true, - MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false, - } - } - fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) { // The first span is the identifier, and it must always remain. truncate_to += 1; @@ -467,7 +556,7 @@ impl InferenceContext<'_> { let mut actual_truncate_to = 0; for &span in &*span_stack { actual_truncate_to += 1; - if !self.is_ref_span(span) { + if !span.is_ref_span(self.body) { remained -= 1; if remained == 0 { break; @@ -475,7 +564,7 @@ impl InferenceContext<'_> { } } if actual_truncate_to < span_stack.len() - && self.is_ref_span(span_stack[actual_truncate_to]) + && span_stack[actual_truncate_to].is_ref_span(self.body) { // Include the ref operator if there is one, we will fix it later (in `strip_captures_ref_span()`) if it's incorrect. actual_truncate_to += 1; @@ -1147,7 +1236,7 @@ impl InferenceContext<'_> { for capture in &mut captures { if matches!(capture.kind, CaptureKind::ByValue) { for span_stack in &mut capture.span_stacks { - if self.is_ref_span(span_stack[span_stack.len() - 1]) { + if span_stack[span_stack.len() - 1].is_ref_span(self.body) { span_stack.truncate(span_stack.len() - 1); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs index be094b13889be..22d4da0e755ae 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs @@ -16,7 +16,8 @@ use base_db::CrateId; use chalk_ir::Mutability; use either::Either; use hir_def::{ - hir::{BindingId, Expr, ExprId, Ordering, PatId}, + body::Body, + hir::{BindingAnnotation, BindingId, Expr, ExprId, Ordering, PatId}, DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId, }; use la_arena::{Arena, ArenaMap, Idx, RawIdx}; @@ -1174,6 +1175,20 @@ pub enum MirSpan { Unknown, } +impl MirSpan { + pub fn is_ref_span(&self, body: &Body) -> bool { + match *self { + MirSpan::ExprId(expr) => matches!(body[expr], Expr::Ref { .. }), + // FIXME: Figure out if this is correct wrt. match ergonomics. + MirSpan::BindingId(binding) => matches!( + body.bindings[binding].mode, + BindingAnnotation::Ref | BindingAnnotation::RefMut + ), + MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false, + } + } +} + impl_from!(ExprId, PatId for MirSpan); impl From<&ExprId> for MirSpan { diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 2ff5f0d538af0..58340c74fea9d 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -78,6 +78,7 @@ use hir_ty::{ use itertools::Itertools; use nameres::diagnostics::DefDiagnosticKind; use rustc_hash::FxHashSet; +use smallvec::SmallVec; use span::{Edition, EditionedFileId, FileId, MacroCallId, SyntaxContextId}; use stdx::{impl_from, never}; use syntax::{ @@ -4113,6 +4114,15 @@ impl ClosureCapture { Local { parent: self.owner, binding_id: self.capture.local() } } + /// Returns whether this place has any field (aka. non-deref) projections. + pub fn has_field_projections(&self) -> bool { + self.capture.has_field_projections() + } + + pub fn usages(&self) -> CaptureUsages { + CaptureUsages { parent: self.owner, spans: self.capture.spans() } + } + pub fn kind(&self) -> CaptureKind { match self.capture.kind() { hir_ty::CaptureKind::ByRef( @@ -4128,6 +4138,15 @@ impl ClosureCapture { } } + /// Converts the place to a name that can be inserted into source code. + pub fn place_to_name(&self, db: &dyn HirDatabase) -> String { + self.capture.place_to_name(self.owner, db) + } + + pub fn display_place_source_code(&self, db: &dyn HirDatabase) -> String { + self.capture.display_place_source_code(self.owner, db) + } + pub fn display_place(&self, db: &dyn HirDatabase) -> String { self.capture.display_place(self.owner, db) } @@ -4141,6 +4160,74 @@ pub enum CaptureKind { Move, } +#[derive(Debug, Clone)] +pub struct CaptureUsages { + parent: DefWithBodyId, + spans: SmallVec<[mir::MirSpan; 3]>, +} + +impl CaptureUsages { + pub fn sources(&self, db: &dyn HirDatabase) -> Vec { + let (body, source_map) = db.body_with_source_map(self.parent); + let mut result = Vec::with_capacity(self.spans.len()); + for &span in self.spans.iter() { + let is_ref = span.is_ref_span(&body); + match span { + mir::MirSpan::ExprId(expr) => { + if let Ok(expr) = source_map.expr_syntax(expr) { + result.push(CaptureUsageSource { + is_ref, + source: expr.map(AstPtr::wrap_left), + }) + } + } + mir::MirSpan::PatId(pat) => { + if let Ok(pat) = source_map.pat_syntax(pat) { + result.push(CaptureUsageSource { + is_ref, + source: pat.map(AstPtr::wrap_right), + }); + } + } + mir::MirSpan::BindingId(binding) => result.extend( + source_map + .patterns_for_binding(binding) + .iter() + .filter_map(|&pat| source_map.pat_syntax(pat).ok()) + .map(|pat| CaptureUsageSource { + is_ref, + source: pat.map(AstPtr::wrap_right), + }), + ), + mir::MirSpan::SelfParam | mir::MirSpan::Unknown => { + unreachable!("invalid capture usage span") + } + } + } + result + } +} + +#[derive(Debug)] +pub struct CaptureUsageSource { + is_ref: bool, + source: InFile>>, +} + +impl CaptureUsageSource { + pub fn source(&self) -> AstPtr> { + self.source.value + } + + pub fn file_id(&self) -> HirFileId { + self.source.file_id + } + + pub fn is_ref(&self) -> bool { + self.is_ref + } +} + #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct Type { env: Arc, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs new file mode 100644 index 0000000000000..79f303b37a442 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs @@ -0,0 +1,1271 @@ +use either::Either; +use hir::{CaptureKind, ClosureCapture, FileRangeWrapper, HirDisplay}; +use ide_db::{ + assists::{AssistId, AssistKind}, + base_db::SourceDatabase, + defs::Definition, + search::FileReferenceNode, + source_change::SourceChangeBuilder, + FxHashSet, +}; +use stdx::format_to; +use syntax::{ + algo::{skip_trivia_token, skip_whitespace_token}, + ast::{ + self, + edit::{AstNodeEdit, IndentLevel}, + make, HasArgList, HasGenericParams, HasName, + }, + hacks::parse_expr_from_str, + ted, AstNode, Direction, SyntaxKind, SyntaxNode, TextSize, ToSmolStr, T, +}; + +use crate::assist_context::{AssistContext, Assists}; + +// Assist: convert_closure_to_fn +// +// This converts a closure to a freestanding function, changing all captures to parameters. +// +// ``` +// # //- minicore: copy +// # struct String; +// # impl String { +// # fn new() -> Self {} +// # fn push_str(&mut self, s: &str) {} +// # } +// fn main() { +// let mut s = String::new(); +// let closure = |$0a| s.push_str(a); +// closure("abc"); +// } +// ``` +// -> +// ``` +// # struct String; +// # impl String { +// # fn new() -> Self {} +// # fn push_str(&mut self, s: &str) {} +// # } +// fn main() { +// let mut s = String::new(); +// fn closure(a: &str, s: &mut String) { +// s.push_str(a) +// } +// closure("abc", &mut s); +// } +// ``` +pub(crate) fn convert_closure_to_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let closure = ctx.find_node_at_offset::()?; + if ctx.find_node_at_offset::() != Some(ast::Expr::ClosureExpr(closure.clone())) { + // Not inside the parameter list. + return None; + } + let closure_name = closure.syntax().parent().and_then(|parent| { + let closure_decl = ast::LetStmt::cast(parent)?; + match closure_decl.pat()? { + ast::Pat::IdentPat(pat) => Some((closure_decl, pat.clone(), pat.name()?)), + _ => None, + } + }); + let module = ctx.sema.scope(closure.syntax())?.module(); + let closure_ty = ctx.sema.type_of_expr(&closure.clone().into())?; + let callable = closure_ty.original.as_callable(ctx.db())?; + let closure_ty = closure_ty.original.as_closure()?; + + let mut ret_ty = callable.return_type(); + let mut closure_mentioned_generic_params = ret_ty.generic_params(ctx.db()); + + let mut params = callable + .params() + .into_iter() + .map(|param| { + let node = ctx.sema.source(param.clone())?.value.right()?; + let param_ty = param.ty(); + closure_mentioned_generic_params.extend(param_ty.generic_params(ctx.db())); + match node.ty() { + Some(_) => Some(node), + None => { + let ty = param_ty + .display_source_code(ctx.db(), module.into(), true) + .unwrap_or_else(|_| "_".to_owned()); + Some(make::param(node.pat()?, make::ty(&ty))) + } + } + }) + .collect::>>()?; + + let mut body = closure.body()?.clone_for_update(); + let mut is_gen = false; + let mut is_async = closure.async_token().is_some(); + if is_async { + ret_ty = ret_ty.future_output(ctx.db())?; + } + // We defer the wrapping of the body in the block, because `make::block()` will generate a new node, + // but we need to locate `AstPtr`s inside the body. + let mut wrap_body_in_block = true; + if let ast::Expr::BlockExpr(block) = &body { + if let Some(async_token) = block.async_token() { + if !is_async { + is_async = true; + ret_ty = ret_ty.future_output(ctx.db())?; + let token_idx = async_token.index(); + let whitespace_tokens_after_count = async_token + .siblings_with_tokens(Direction::Next) + .skip(1) + .take_while(|token| token.kind() == SyntaxKind::WHITESPACE) + .count(); + body.syntax().splice_children( + token_idx..token_idx + whitespace_tokens_after_count + 1, + Vec::new(), + ); + } + } + if let Some(gen_token) = block.gen_token() { + is_gen = true; + ret_ty = ret_ty.iterator_item(ctx.db())?; + let token_idx = gen_token.index(); + let whitespace_tokens_after_count = gen_token + .siblings_with_tokens(Direction::Next) + .skip(1) + .take_while(|token| token.kind() == SyntaxKind::WHITESPACE) + .count(); + body.syntax().splice_children( + token_idx..token_idx + whitespace_tokens_after_count + 1, + Vec::new(), + ); + } + + if block.try_token().is_none() + && block.unsafe_token().is_none() + && block.label().is_none() + && block.const_token().is_none() + && block.async_token().is_none() + { + wrap_body_in_block = false; + } + }; + + acc.add( + AssistId("convert_closure_to_fn", AssistKind::RefactorRewrite), + "Convert closure to fn", + closure.param_list()?.syntax().text_range(), + |builder| { + let closure_name_or_default = closure_name + .as_ref() + .map(|(_, _, it)| it.clone()) + .unwrap_or_else(|| make::name("fun_name")); + let captures = closure_ty.captured_items(ctx.db()); + let capture_tys = closure_ty.capture_types(ctx.db()); + + let mut captures_as_args = Vec::with_capacity(captures.len()); + + let body_root = body.syntax().ancestors().last().unwrap(); + // We need to defer this work because otherwise the text range of elements is being messed up, and + // replacements for the next captures won't work. + let mut capture_usages_replacement_map = Vec::with_capacity(captures.len()); + + for (capture, capture_ty) in std::iter::zip(&captures, &capture_tys) { + // FIXME: Allow configuring the replacement of `self`. + let capture_name = + if capture.local().is_self(ctx.db()) && !capture.has_field_projections() { + make::name("this") + } else { + make::name(&capture.place_to_name(ctx.db())) + }; + + closure_mentioned_generic_params.extend(capture_ty.generic_params(ctx.db())); + + let capture_ty = capture_ty + .display_source_code(ctx.db(), module.into(), true) + .unwrap_or_else(|_| "_".to_owned()); + params.push(make::param( + ast::Pat::IdentPat(make::ident_pat(false, false, capture_name.clone_subtree())), + make::ty(&capture_ty), + )); + + for capture_usage in capture.usages().sources(ctx.db()) { + if capture_usage.file_id() != ctx.file_id() { + // This is from a macro, don't change it. + continue; + } + + let capture_usage_source = capture_usage.source(); + let capture_usage_source = capture_usage_source.to_node(&body_root); + let expr = match capture_usage_source { + Either::Left(expr) => expr, + Either::Right(pat) => { + let Some(expr) = expr_of_pat(pat) else { continue }; + expr + } + }; + let replacement = wrap_capture_in_deref_if_needed( + &expr, + &capture_name, + capture.kind(), + capture_usage.is_ref(), + ) + .clone_for_update(); + capture_usages_replacement_map.push((expr, replacement)); + } + + captures_as_args.push(capture_as_arg(ctx, capture)); + } + + let (closure_type_params, closure_where_clause) = + compute_closure_type_params(ctx, closure_mentioned_generic_params, &closure); + + for (old, new) in capture_usages_replacement_map { + if old == body { + body = new; + } else { + ted::replace(old.syntax(), new.syntax()); + } + } + + let body = if wrap_body_in_block { + make::block_expr([], Some(body)) + } else { + ast::BlockExpr::cast(body.syntax().clone()).unwrap() + }; + + let params = make::param_list(None, params); + let ret_ty = if ret_ty.is_unit() { + None + } else { + let ret_ty = ret_ty + .display_source_code(ctx.db(), module.into(), true) + .unwrap_or_else(|_| "_".to_owned()); + Some(make::ret_type(make::ty(&ret_ty))) + }; + let mut fn_ = make::fn_( + None, + closure_name_or_default.clone(), + closure_type_params, + closure_where_clause, + params, + body, + ret_ty, + is_async, + false, + false, + is_gen, + ); + fn_ = fn_.dedent(IndentLevel::from_token(&fn_.syntax().last_token().unwrap())); + + builder.edit_file(ctx.file_id()); + match &closure_name { + Some((closure_decl, _, _)) => { + fn_ = fn_.indent(closure_decl.indent_level()); + builder.replace(closure_decl.syntax().text_range(), fn_.to_string()); + } + None => { + let Some(top_stmt) = + closure.syntax().ancestors().skip(1).find_map(|ancestor| { + ast::Stmt::cast(ancestor.clone()).map(Either::Left).or_else(|| { + ast::ClosureExpr::cast(ancestor.clone()) + .map(Either::Left) + .or_else(|| ast::BlockExpr::cast(ancestor).map(Either::Right)) + .map(Either::Right) + }) + }) + else { + return; + }; + builder.replace( + closure.syntax().text_range(), + closure_name_or_default.to_string(), + ); + match top_stmt { + Either::Left(stmt) => { + let indent = stmt.indent_level(); + fn_ = fn_.indent(indent); + let range = stmt + .syntax() + .first_token() + .and_then(|token| { + skip_whitespace_token(token.prev_token()?, Direction::Prev) + }) + .map(|it| it.text_range().end()) + .unwrap_or_else(|| stmt.syntax().text_range().start()); + builder.insert(range, format!("\n{indent}{fn_}")); + } + Either::Right(Either::Left(closure_inside_closure)) => { + let Some(closure_body) = closure_inside_closure.body() else { return }; + // FIXME: Maybe we can indent this properly, adding newlines and all, but this is hard. + builder.insert( + closure_body.syntax().text_range().start(), + format!("{{ {fn_} "), + ); + builder + .insert(closure_body.syntax().text_range().end(), " }".to_owned()); + } + Either::Right(Either::Right(block_expr)) => { + let Some(tail_expr) = block_expr.tail_expr() else { return }; + let Some(insert_in) = + tail_expr.syntax().first_token().and_then(|token| { + skip_whitespace_token(token.prev_token()?, Direction::Prev) + }) + else { + return; + }; + let indent = tail_expr.indent_level(); + fn_ = fn_.indent(indent); + builder + .insert(insert_in.text_range().end(), format!("\n{indent}{fn_}")); + } + } + } + } + + handle_calls( + builder, + ctx, + closure_name.as_ref().map(|(_, it, _)| it), + &captures_as_args, + &closure, + ); + + // FIXME: Place the cursor at `fun_name`, like rename does. + }, + )?; + Some(()) +} + +fn compute_closure_type_params( + ctx: &AssistContext<'_>, + mentioned_generic_params: FxHashSet, + closure: &ast::ClosureExpr, +) -> (Option, Option) { + if mentioned_generic_params.is_empty() { + return (None, None); + } + + let mut mentioned_names = mentioned_generic_params + .iter() + .filter_map(|param| match param { + hir::GenericParam::TypeParam(param) => { + Some(param.name(ctx.db()).unescaped().display(ctx.db()).to_smolstr()) + } + hir::GenericParam::ConstParam(param) => { + Some(param.name(ctx.db()).unescaped().display(ctx.db()).to_smolstr()) + } + hir::GenericParam::LifetimeParam(_) => None, + }) + .collect::>(); + + let Some((container_params, container_where, container)) = + closure.syntax().ancestors().find_map(ast::AnyHasGenericParams::cast).and_then( + |container| { + Some((container.generic_param_list()?, container.where_clause(), container)) + }, + ) + else { + return (None, None); + }; + let containing_impl = if ast::AssocItem::can_cast(container.syntax().kind()) { + container + .syntax() + .ancestors() + .find_map(ast::Impl::cast) + .and_then(|impl_| Some((impl_.generic_param_list()?, impl_.where_clause()))) + } else { + None + }; + + let all_params = container_params + .type_or_const_params() + .chain(containing_impl.iter().flat_map(|(param_list, _)| param_list.type_or_const_params())) + .filter_map(|param| Some(param.name()?.text().to_smolstr())) + .collect::>(); + + // A fixpoint algorithm to detect (very roughly) if we need to include a generic parameter + // by checking if it is mentioned by another parameter we need to include. + let mut reached_fixpoint = false; + let mut container_where_bounds_indices = Vec::new(); + let mut impl_where_bounds_indices = Vec::new(); + while !reached_fixpoint { + reached_fixpoint = true; + + let mut insert_name = |syntax: &SyntaxNode| { + let has_name = syntax + .descendants() + .filter_map(ast::NameOrNameRef::cast) + .any(|name| mentioned_names.contains(&*name.text())); + let mut has_new_params = false; + if has_name { + syntax + .descendants() + .filter_map(ast::NameOrNameRef::cast) + .filter(|name| all_params.contains(name.text().trim_start_matches("r#"))) + .for_each(|name| { + if mentioned_names.insert(name.text().trim_start_matches("r#").to_smolstr()) + { + // We do this here so we don't do it if there are only matches that are not in `all_params`. + has_new_params = true; + reached_fixpoint = false; + } + }); + } + has_new_params + }; + + for param in container_params.type_or_const_params() { + insert_name(param.syntax()); + } + for (pred_index, pred) in container_where.iter().flat_map(|it| it.predicates()).enumerate() + { + if insert_name(pred.syntax()) { + container_where_bounds_indices.push(pred_index); + } + } + if let Some((impl_params, impl_where)) = &containing_impl { + for param in impl_params.type_or_const_params() { + insert_name(param.syntax()); + } + for (pred_index, pred) in impl_where.iter().flat_map(|it| it.predicates()).enumerate() { + if insert_name(pred.syntax()) { + impl_where_bounds_indices.push(pred_index); + } + } + } + } + + // Order matters here (for beauty). First the outer impl parameters, then the direct container's. + let include_params = containing_impl + .iter() + .flat_map(|(impl_params, _)| { + impl_params.type_or_const_params().filter(|param| { + param.name().is_some_and(|name| { + mentioned_names.contains(name.text().trim_start_matches("r#")) + }) + }) + }) + .chain(container_params.type_or_const_params().filter(|param| { + param + .name() + .is_some_and(|name| mentioned_names.contains(name.text().trim_start_matches("r#"))) + })) + .map(ast::TypeOrConstParam::into); + let include_where_bounds = containing_impl + .as_ref() + .and_then(|(_, it)| it.as_ref()) + .into_iter() + .flat_map(|where_| { + impl_where_bounds_indices.iter().filter_map(|&index| where_.predicates().nth(index)) + }) + .chain(container_where.iter().flat_map(|where_| { + container_where_bounds_indices + .iter() + .filter_map(|&index| where_.predicates().nth(index)) + })) + .collect::>(); + let where_clause = + (!include_where_bounds.is_empty()).then(|| make::where_clause(include_where_bounds)); + + // FIXME: Consider generic parameters that do not appear in params/return type/captures but + // written explicitly inside the closure. + (Some(make::generic_param_list(include_params)), where_clause) +} + +fn wrap_capture_in_deref_if_needed( + expr: &ast::Expr, + capture_name: &ast::Name, + capture_kind: CaptureKind, + is_ref: bool, +) -> ast::Expr { + fn peel_parens(mut expr: ast::Expr) -> ast::Expr { + loop { + if ast::ParenExpr::can_cast(expr.syntax().kind()) { + let Some(parent) = expr.syntax().parent().and_then(ast::Expr::cast) else { break }; + expr = parent; + } else { + break; + } + } + expr + } + + let capture_name = make::expr_path(make::path_from_text(&capture_name.text())); + if capture_kind == CaptureKind::Move || is_ref { + return capture_name; + } + let expr_parent = expr.syntax().parent().and_then(ast::Expr::cast); + let expr_parent_peeled_parens = expr_parent.map(peel_parens); + let does_autoderef = match expr_parent_peeled_parens { + Some( + ast::Expr::AwaitExpr(_) + | ast::Expr::CallExpr(_) + | ast::Expr::FieldExpr(_) + | ast::Expr::FormatArgsExpr(_) + | ast::Expr::MethodCallExpr(_), + ) => true, + Some(ast::Expr::IndexExpr(parent_expr)) if parent_expr.base().as_ref() == Some(expr) => { + true + } + _ => false, + }; + if does_autoderef { + return capture_name; + } + make::expr_prefix(T![*], capture_name) +} + +fn capture_as_arg(ctx: &AssistContext<'_>, capture: &ClosureCapture) -> ast::Expr { + let place = + parse_expr_from_str(&capture.display_place_source_code(ctx.db()), ctx.file_id().edition()) + .expect("`display_place_source_code()` produced an invalid expr"); + let needs_mut = match capture.kind() { + CaptureKind::SharedRef => false, + CaptureKind::MutableRef | CaptureKind::UniqueSharedRef => true, + CaptureKind::Move => return place, + }; + if let ast::Expr::PrefixExpr(expr) = &place { + if expr.op_kind() == Some(ast::UnaryOp::Deref) { + return expr.expr().expect("`display_place_source_code()` produced an invalid expr"); + } + } + make::expr_ref(place, needs_mut) +} + +fn handle_calls( + builder: &mut SourceChangeBuilder, + ctx: &AssistContext<'_>, + closure_name: Option<&ast::IdentPat>, + captures_as_args: &[ast::Expr], + closure: &ast::ClosureExpr, +) { + if captures_as_args.is_empty() { + return; + } + + match closure_name { + Some(closure_name) => { + let Some(closure_def) = ctx.sema.to_def(closure_name) else { return }; + let closure_usages = Definition::from(closure_def).usages(&ctx.sema).all(); + for (_, usages) in closure_usages { + for usage in usages { + let name = match usage.name { + FileReferenceNode::Name(name) => name.syntax().clone(), + FileReferenceNode::NameRef(name_ref) => name_ref.syntax().clone(), + FileReferenceNode::FormatStringEntry(..) => continue, + FileReferenceNode::Lifetime(_) => { + unreachable!("impossible usage") + } + }; + let Some(expr) = name.parent().and_then(|it| { + ast::Expr::cast( + ast::PathSegment::cast(it)?.parent_path().syntax().parent()?, + ) + }) else { + continue; + }; + handle_call(builder, ctx, expr, captures_as_args); + } + } + } + None => { + handle_call(builder, ctx, ast::Expr::ClosureExpr(closure.clone()), captures_as_args); + } + } +} + +fn handle_call( + builder: &mut SourceChangeBuilder, + ctx: &AssistContext<'_>, + closure_ref: ast::Expr, + captures_as_args: &[ast::Expr], +) -> Option<()> { + let call = + ast::CallExpr::cast(peel_blocks_and_refs_and_parens(closure_ref).syntax().parent()?)?; + let args = call.arg_list()?; + // The really last token is `)`; we need one before that. + let has_trailing_comma = args.syntax().last_token()?.prev_token().is_some_and(|token| { + skip_trivia_token(token, Direction::Prev).is_some_and(|token| token.kind() == T![,]) + }); + let has_existing_args = args.args().next().is_some(); + + let FileRangeWrapper { file_id, range } = ctx.sema.original_range_opt(args.syntax())?; + let first_arg_indent = args.args().next().map(|it| it.indent_level()); + let arg_list_indent = args.indent_level(); + let insert_newlines = + first_arg_indent.is_some_and(|first_arg_indent| first_arg_indent != arg_list_indent); + let indent = + if insert_newlines { first_arg_indent.unwrap().to_string() } else { String::new() }; + // FIXME: This text manipulation seems risky. + let text = ctx.db().file_text(file_id.file_id()); + let mut text = text[..u32::from(range.end()).try_into().unwrap()].trim_end(); + if !text.ends_with(')') { + return None; + } + text = text[..text.len() - 1].trim_end(); + let offset = TextSize::new(text.len().try_into().unwrap()); + + let mut to_insert = String::new(); + if has_existing_args && !has_trailing_comma { + to_insert.push(','); + } + if insert_newlines { + to_insert.push('\n'); + } + let (last_arg, rest_args) = + captures_as_args.split_last().expect("already checked has captures"); + if !insert_newlines && has_existing_args { + to_insert.push(' '); + } + if let Some((first_arg, rest_args)) = rest_args.split_first() { + format_to!(to_insert, "{indent}{first_arg},",); + if insert_newlines { + to_insert.push('\n'); + } + for new_arg in rest_args { + if !insert_newlines { + to_insert.push(' '); + } + format_to!(to_insert, "{indent}{new_arg},",); + if insert_newlines { + to_insert.push('\n'); + } + } + if !insert_newlines { + to_insert.push(' '); + } + } + format_to!(to_insert, "{indent}{last_arg}"); + if has_trailing_comma { + to_insert.push(','); + } + + builder.edit_file(file_id); + builder.insert(offset, to_insert); + + Some(()) +} + +fn peel_blocks_and_refs_and_parens(mut expr: ast::Expr) -> ast::Expr { + loop { + let Some(parent) = expr.syntax().parent() else { break }; + if matches!(parent.kind(), SyntaxKind::PAREN_EXPR | SyntaxKind::REF_EXPR) { + expr = ast::Expr::cast(parent).unwrap(); + continue; + } + if let Some(stmt_list) = ast::StmtList::cast(parent) { + if let Some(block) = stmt_list.syntax().parent().and_then(ast::BlockExpr::cast) { + expr = ast::Expr::BlockExpr(block); + continue; + } + } + break; + } + expr +} + +// FIXME: +// Somehow handle the case of `let Struct { field, .. } = capture`. +// Replacing `capture` with `capture_field` won't work. +fn expr_of_pat(pat: ast::Pat) -> Option { + 'find_expr: { + for ancestor in pat.syntax().ancestors() { + if let Some(let_stmt) = ast::LetStmt::cast(ancestor.clone()) { + break 'find_expr let_stmt.initializer(); + } + if ast::MatchArm::can_cast(ancestor.kind()) { + if let Some(match_) = + ancestor.parent().and_then(|it| it.parent()).and_then(ast::MatchExpr::cast) + { + break 'find_expr match_.expr(); + } + } + if ast::ExprStmt::can_cast(ancestor.kind()) { + break; + } + } + None + } +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn handles_unique_captures() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore:copy +fn main() { + let s = &mut true; + let closure = |$0| { *s = false; }; + closure(); +} +"#, + r#" +fn main() { + let s = &mut true; + fn closure(s: &mut bool) { *s = false; } + closure(s); +} +"#, + ); + } + + #[test] + fn multiple_capture_usages() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore:copy +struct A { a: i32, b: bool } +fn main() { + let mut a = A { a: 123, b: false }; + let closure = |$0| { + let b = a.b; + a = A { a: 456, b: true }; + }; + closure(); +} +"#, + r#" +struct A { a: i32, b: bool } +fn main() { + let mut a = A { a: 123, b: false }; + fn closure(a: &mut A) { + let b = a.b; + *a = A { a: 456, b: true }; + } + closure(&mut a); +} +"#, + ); + } + + #[test] + fn changes_names_of_place() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore:copy +struct A { b: &'static B, c: i32 } +struct B(bool, i32); +struct C; +impl C { + fn foo(&self) { + let a = A { b: &B(false, 0), c: 123 }; + let closure = |$0| { + let b = a.b.1; + let c = &*self; + }; + closure(); + } +} +"#, + r#" +struct A { b: &'static B, c: i32 } +struct B(bool, i32); +struct C; +impl C { + fn foo(&self) { + let a = A { b: &B(false, 0), c: 123 }; + fn closure(this: &C, a_b_1: &i32) { + let b = *a_b_1; + let c = this; + } + closure(self, &a.b.1); + } +} +"#, + ); + } + + #[test] + fn self_with_fields_does_not_change_to_this() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore:copy +struct A { b: &'static B, c: i32 } +struct B(bool, i32); +impl A { + fn foo(&self) { + let closure = |$0| { + let b = self.b.1; + }; + closure(); + } +} +"#, + r#" +struct A { b: &'static B, c: i32 } +struct B(bool, i32); +impl A { + fn foo(&self) { + fn closure(self_b_1: &i32) { + let b = *self_b_1; + } + closure(&self.b.1); + } +} +"#, + ); + } + + #[test] + fn replaces_async_closure_with_async_fn() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy, future +fn foo(&self) { + let closure = async |$0| 1; + closure(); +} +"#, + r#" +fn foo(&self) { + async fn closure() -> i32 { + 1 + } + closure(); +} +"#, + ); + } + + #[test] + fn replaces_async_block_with_async_fn() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy, future +fn foo() { + let closure = |$0| async { 1 }; + closure(); +} +"#, + r#" +fn foo() { + async fn closure() -> i32 { 1 } + closure(); +} +"#, + ); + } + + #[test] + #[ignore = "FIXME: we do not do type inference for gen blocks yet"] + fn replaces_gen_block_with_gen_fn() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy, iterator +//- /lib.rs edition:2024 +fn foo() { + let closure = |$0| gen { + yield 1; + }; + closure(); +} +"#, + r#" +fn foo() { + gen fn closure() -> i32 { + yield 1; + } + closure(); +} +"#, + ); + } + + #[test] + fn leaves_block_in_place() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy +fn foo() { + let closure = |$0| {}; + closure(); +} +"#, + r#" +fn foo() { + fn closure() {} + closure(); +} +"#, + ); + } + + #[test] + fn wraps_in_block_if_needed() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy +fn foo() { + let a = 1; + let closure = |$0| a; + closure(); +} +"#, + r#" +fn foo() { + let a = 1; + fn closure(a: &i32) -> i32 { + *a + } + closure(&a); +} +"#, + ); + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy +fn foo() { + let closure = |$0| 'label: {}; + closure(); +} +"#, + r#" +fn foo() { + fn closure() { + 'label: {} + } + closure(); +} +"#, + ); + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy +fn foo() { + let closure = |$0| { + const { () } + }; + closure(); +} +"#, + r#" +fn foo() { + fn closure() { + const { () } + } + closure(); +} +"#, + ); + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy +fn foo() { + let closure = |$0| unsafe { }; + closure(); +} +"#, + r#" +fn foo() { + fn closure() { + unsafe { } + } + closure(); +} +"#, + ); + } + + #[test] + fn closure_in_closure() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy +fn foo() { + let a = 1; + || |$0| { let b = &a; }; +} +"#, + r#" +fn foo() { + let a = 1; + || { fn fun_name(a: &i32) { let b = a; } fun_name }; +} +"#, + ); + } + + #[test] + fn closure_in_block() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy +fn foo() { + { + let a = 1; + |$0| { let b = &a; } + }; +} +"#, + r#" +fn foo() { + { + let a = 1; + fn fun_name(a: &i32) { let b = a; } + fun_name + }; +} +"#, + ); + } + + #[test] + fn finds_pat_for_expr() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy +struct A { b: B } +struct B(bool, i32); +fn foo() { + let mut a = A { b: B(true, 0) }; + let closure = |$0| { + let A { b: B(_, ref mut c) } = a; + }; + closure(); +} +"#, + r#" +struct A { b: B } +struct B(bool, i32); +fn foo() { + let mut a = A { b: B(true, 0) }; + fn closure(a_b_1: &mut i32) { + let A { b: B(_, ref mut c) } = a_b_1; + } + closure(&mut a.b.1); +} +"#, + ); + } + + #[test] + fn with_existing_params() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy +fn foo() { + let (mut a, b) = (0.1, "abc"); + let closure = |$0p1: i32, p2: &mut bool| { + a = 1.2; + let c = b; + }; + closure(0, &mut false); +} +"#, + r#" +fn foo() { + let (mut a, b) = (0.1, "abc"); + fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) { + *a = 1.2; + let c = *b; + } + closure(0, &mut false, &mut a, &b); +} +"#, + ); + } + + #[test] + fn with_existing_params_newlines() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy +fn foo() { + let (mut a, b) = (0.1, "abc"); + let closure = |$0p1: i32, p2| { + let _: &mut bool = p2; + a = 1.2; + let c = b; + }; + closure( + 0, + &mut false + ); +} +"#, + r#" +fn foo() { + let (mut a, b) = (0.1, "abc"); + fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) { + let _: &mut bool = p2; + *a = 1.2; + let c = *b; + } + closure( + 0, + &mut false, + &mut a, + &b + ); +} +"#, + ); + } + + #[test] + fn with_existing_params_trailing_comma() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy +fn foo() { + let (mut a, b) = (0.1, "abc"); + let closure = |$0p1: i32, p2| { + let _: &mut bool = p2; + a = 1.2; + let c = b; + }; + closure( + 0, + &mut false, + ); +} +"#, + r#" +fn foo() { + let (mut a, b) = (0.1, "abc"); + fn closure(p1: i32, p2: &mut bool, a: &mut f64, b: &&str) { + let _: &mut bool = p2; + *a = 1.2; + let c = *b; + } + closure( + 0, + &mut false, + &mut a, + &b, + ); +} +"#, + ); + } + + #[test] + fn closure_using_generic_params() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy +struct Foo(A, B); +impl, const C: usize> Foo { + fn foo(a: A, b: D) + where + E: From, + { + let closure = |$0c: F| { + let a = B::from(a); + let b = E::from(b); + }; + } +} +"#, + r#" +struct Foo(A, B); +impl, const C: usize> Foo { + fn foo(a: A, b: D) + where + E: From, + { + fn closure, D, E, F>(c: F, a: A, b: D) where E: From { + let a = B::from(a); + let b = E::from(b); + } + } +} +"#, + ); + } + + #[test] + fn closure_in_stmt() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore: copy +fn bar(_: impl FnOnce() -> i32) {} +fn foo() { + let a = 123; + bar(|$0| a); +} +"#, + r#" +fn bar(_: impl FnOnce() -> i32) {} +fn foo() { + let a = 123; + fn fun_name(a: &i32) -> i32 { + *a + } + bar(fun_name); +} +"#, + ); + } + + #[test] + fn unique_and_imm() { + check_assist( + convert_closure_to_fn, + r#" +//- minicore:copy +fn main() { + let a = &mut true; + let closure = |$0| { + let b = &a; + *a = false; + }; + closure(); +} +"#, + r#" +fn main() { + let a = &mut true; + fn closure(a: &mut &mut bool) { + let b = a; + **a = false; + } + closure(&mut a); +} +"#, + ); + } + + #[test] + fn only_applicable_in_param_list() { + check_assist_not_applicable( + convert_closure_to_fn, + r#" +//- minicore:copy +fn main() { + let closure = || { $0 }; +} +"#, + ); + check_assist_not_applicable( + convert_closure_to_fn, + r#" +//- minicore:copy +fn main() { + let $0closure = || { }; +} +"#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index a9399ba6b7f47..d60ffc128550d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -116,6 +116,7 @@ mod handlers { mod bool_to_enum; mod change_visibility; mod convert_bool_then; + mod convert_closure_to_fn; mod convert_comment_block; mod convert_comment_from_or_to_doc; mod convert_from_to_tryfrom; @@ -245,6 +246,7 @@ mod handlers { toggle_async_sugar::sugar_impl_future_into_async, convert_comment_block::convert_comment_block, convert_comment_from_or_to_doc::convert_comment_from_or_to_doc, + convert_closure_to_fn::convert_closure_to_fn, convert_from_to_tryfrom::convert_from_to_tryfrom, convert_integer_literal::convert_integer_literal, convert_into_to_from::convert_into_to_from, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index a2287b2977ddd..e8ec278b3eb81 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -382,6 +382,40 @@ fn main() { ) } +#[test] +fn doctest_convert_closure_to_fn() { + check_doc_test( + "convert_closure_to_fn", + r#####" +//- minicore: copy +struct String; +impl String { + fn new() -> Self {} + fn push_str(&mut self, s: &str) {} +} +fn main() { + let mut s = String::new(); + let closure = |$0a| s.push_str(a); + closure("abc"); +} +"#####, + r#####" +struct String; +impl String { + fn new() -> Self {} + fn push_str(&mut self, s: &str) {} +} +fn main() { + let mut s = String::new(); + fn closure(a: &str, s: &mut String) { + s.push_str(a) + } + closure("abc", &mut s); +} +"#####, + ) +} + #[test] fn doctest_convert_for_loop_with_for_each() { check_doc_test( From 5be5cec23cc2e0ac284caad9d65e4fbcb81157d3 Mon Sep 17 00:00:00 2001 From: Johannes Hostert Date: Tue, 27 Aug 2024 15:58:38 +0200 Subject: [PATCH 104/124] Add explanation to TB's "piecewise bottom-up" traversal --- .../src/borrow_tracker/tree_borrows/tree.rs | 155 +++++++++++++----- 1 file changed, 111 insertions(+), 44 deletions(-) diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs index 6aba61527d467..728a664680da4 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs @@ -302,7 +302,20 @@ enum ContinueTraversal { SkipSelfAndChildren, } +#[derive(Clone, Copy)] +pub enum ChildrenVisitMode { + VisitChildrenOfAccessed, + SkipChildrenOfAccessed, +} + +enum RecursionState { + BeforeChildren, + AfterChildren, +} + /// Stack of nodes left to explore in a tree traversal. +/// See the docs of `traverse_this_parents_children_other` for details on the +/// traversal order. struct TreeVisitorStack { /// Identifier of the original access. initial: UniIndex, @@ -316,10 +329,12 @@ struct TreeVisitorStack { /// Mutable state of the visit: the tags left to handle. /// Every tag pushed should eventually be handled, /// and the precise order is relevant for diagnostics. - /// Since the traversal is bottom-up, we need to remember - /// whether we're here initially (false) or after visiting all - /// children (true). The bool indicates this. - stack: Vec<(UniIndex, AccessRelatedness, bool)>, + /// Since the traversal is piecewise bottom-up, we need to + /// remember whether we're here initially, or after visiting all children. + /// The last element indicates this. + /// This is just an artifact of how you hand-roll recursion, + /// it does not have a deeper meaning otherwise. + stack: Vec<(UniIndex, AccessRelatedness, RecursionState)>, } impl @@ -362,64 +377,85 @@ where &mut self, this: &mut TreeVisitor<'_>, accessed_node: UniIndex, - push_children_of_accessed: bool, + visit_children: ChildrenVisitMode, ) -> Result<(), OutErr> { + // We want to visit the accessed node's children first. + // However, we will below walk up our parents and push their children (our cousins) + // onto the stack. To ensure correct iteration order, this method thus finishes + // by reversing the stack. This only works if the stack is empty initially. assert!(self.stack.is_empty()); // First, handle accessed node. A bunch of things need to // be handled differently here compared to the further parents // of `accesssed_node`. { self.propagate_at(this, accessed_node, AccessRelatedness::This)?; - if push_children_of_accessed { + if matches!(visit_children, ChildrenVisitMode::VisitChildrenOfAccessed) { let accessed_node = this.nodes.get(accessed_node).unwrap(); + // We `rev()` here because we reverse the entire stack later. for &child in accessed_node.children.iter().rev() { - self.stack.push((child, AccessRelatedness::AncestorAccess, false)); + self.stack.push(( + child, + AccessRelatedness::AncestorAccess, + RecursionState::BeforeChildren, + )); } } } - // Then, handle the accessed node's parent. Here, we need to + // Then, handle the accessed node's parents. Here, we need to // make sure we only mark the "cousin" subtrees for later visitation, // not the subtree that contains the accessed node. let mut last_node = accessed_node; while let Some(current) = this.nodes.get(last_node).unwrap().parent { self.propagate_at(this, current, AccessRelatedness::StrictChildAccess)?; let node = this.nodes.get(current).unwrap(); + // We `rev()` here because we reverse the entire stack later. for &child in node.children.iter().rev() { if last_node == child { continue; } - self.stack.push((child, AccessRelatedness::DistantAccess, false)); + self.stack.push(( + child, + AccessRelatedness::DistantAccess, + RecursionState::BeforeChildren, + )); } last_node = current; } + // Reverse the stack, as discussed above. self.stack.reverse(); Ok(()) } fn finish_foreign_accesses(&mut self, this: &mut TreeVisitor<'_>) -> Result<(), OutErr> { - while let Some((idx, rel_pos, is_final)) = self.stack.last_mut() { + while let Some((idx, rel_pos, step)) = self.stack.last_mut() { let idx = *idx; let rel_pos = *rel_pos; - if *is_final { - self.stack.pop(); - self.propagate_at(this, idx, rel_pos)?; - } else { - *is_final = true; - let handle_children = self.should_continue_at(this, idx, rel_pos); - match handle_children { - ContinueTraversal::Recurse => { - // add all children, and also leave the node itself - // on the stack so that it can be visited later. - let node = this.nodes.get(idx).unwrap(); - for &child in node.children.iter() { - self.stack.push((child, rel_pos, false)); + match *step { + // How to do bottom-up traversal, 101: Before you handle a node, you handle all children. + // For this, you must first find the children, which is what this code here does. + RecursionState::BeforeChildren => { + // Next time we come back will be when all the children are handled. + *step = RecursionState::AfterChildren; + // Now push the children, except if we are told to skip this subtree. + let handle_children = self.should_continue_at(this, idx, rel_pos); + match handle_children { + ContinueTraversal::Recurse => { + let node = this.nodes.get(idx).unwrap(); + for &child in node.children.iter() { + self.stack.push((child, rel_pos, RecursionState::BeforeChildren)); + } + } + ContinueTraversal::SkipSelfAndChildren => { + // skip self + self.stack.pop(); + continue; } } - ContinueTraversal::SkipSelfAndChildren => { - // skip self - self.stack.pop(); - continue; - } + } + // All the children are handled, let's actually visit this node + RecursionState::AfterChildren => { + self.stack.pop(); + self.propagate_at(this, idx, rel_pos)?; } } } @@ -437,19 +473,42 @@ where } impl<'tree> TreeVisitor<'tree> { - // Applies `f_propagate` to every vertex of the tree bottom-up in the following order: first - // all ancestors of `start` (starting with `start` itself), then children of `start`, then the rest, - // always going bottom-up. - // This ensures that errors are triggered in the following order - // - first invalid accesses with insufficient permissions, closest to the accessed node first, - // - then protector violations, bottom-up, starting with the children of the accessed node, and then - // going upwards and outwards. - // - // `f_propagate` should follow the following format: for a given `Node` it updates its - // `Permission` depending on the position relative to `start` (given by an - // `AccessRelatedness`). - // `f_continue` is called before on foreign nodes, and describes whether to continue - // with the subtree at that node. + /// Applies `f_propagate` to every vertex of the tree in a piecewise bottom-up way: First, visit + /// all ancestors of `start` (starting with `start` itself), then children of `start`, then the rest, + /// going bottom-up in each of these two "pieces" / sections. + /// This ensures that errors are triggered in the following order + /// - first invalid accesses with insufficient permissions, closest to the accessed node first, + /// - then protector violations, bottom-up, starting with the children of the accessed node, and then + /// going upwards and outwards. + /// + /// The following graphic visualizes it, with numbers indicating visitation order and `start` being + /// the node that is visited first ("1"): + /// + /// ```text + /// 3 + /// /| + /// / | + /// 9 2 + /// | |\ + /// | | \ + /// 8 1 7 + /// / \ + /// 4 6 + /// | + /// 5 + /// ``` + /// + /// `f_propagate` should follow the following format: for a given `Node` it updates its + /// `Permission` depending on the position relative to `start` (given by an + /// `AccessRelatedness`). + /// `f_continue` is called earlier on foreign nodes, and describes whether to even start + /// visiting the subtree at that node. If it e.g. returns `SkipSelfAndChildren` on node 6 + /// above, then nodes 5 _and_ 6 would not be visited by `f_propagate`. It is not used for + /// notes having a child access (nodes 1, 2, 3). + /// + /// Finally, remember that the iteration order is not relevant for UB, it only affects + /// diagnostics. It also affects tree traversal optimizations built on top of this, so + /// those need to be reviewed carefully as well whenever this changes. fn traverse_this_parents_children_other( mut self, start: BorTag, @@ -463,14 +522,18 @@ impl<'tree> TreeVisitor<'tree> { // undergoing a child access. Also pushes the children and the other // cousin nodes (i.e. all nodes undergoing a foreign access) to the stack // to be processed later. - stack.go_upwards_from_accessed(&mut self, start_idx, true)?; + stack.go_upwards_from_accessed( + &mut self, + start_idx, + ChildrenVisitMode::VisitChildrenOfAccessed, + )?; // Now visit all the foreign nodes we remembered earlier. // For this we go bottom-up, but also allow f_continue to skip entire // subtrees from being visited if it would be a NOP. stack.finish_foreign_accesses(&mut self) } - // Like `traverse_this_parents_children_other`, but skips the children of `start`. + /// Like `traverse_this_parents_children_other`, but skips the children of `start`. fn traverse_nonchildren( mut self, start: BorTag, @@ -483,7 +546,11 @@ impl<'tree> TreeVisitor<'tree> { // Visits the accessed node itself, and all its parents, i.e. all nodes // undergoing a child access. Also pushes the other cousin nodes to the // stack, but not the children of the accessed node. - stack.go_upwards_from_accessed(&mut self, start_idx, false)?; + stack.go_upwards_from_accessed( + &mut self, + start_idx, + ChildrenVisitMode::SkipChildrenOfAccessed, + )?; // Now visit all the foreign nodes we remembered earlier. // For this we go bottom-up, but also allow f_continue to skip entire // subtrees from being visited if it would be a NOP. From e26779e784cf36e0dc48add24ca961255f4d8a76 Mon Sep 17 00:00:00 2001 From: Johannes Hostert Date: Fri, 23 Aug 2024 22:22:20 +0200 Subject: [PATCH 105/124] Make Tree Borrows Provenance GC compact the tree Follow-up on #3833 and #3835. In these PRs, the TB GC was fixed to no longer cause a stack overflow. One test that motivated it was the test `fill::horizontal_line` in `tiny_skia`. But not causing stack overflows was not a large improvents, since it did not fix the fundamental issue: The tree was too large. The test now ran, but it required gigabytes of memory and hours of time, whereas it finishes within seconds in Stacked Borrows. The problem in that test was that it used [`slice::chunked`](https://doc.rust-lang.org/std/primitive.slice.html#method.chunks) to iterate a slice in chunks. That iterator is written to reborrow at each call to `next`, which creates a linear tree with a bunch of intermediary nodes, which also fragments the `RangeMap` for that allocation. The solution is to now compact the tree, so that these interior nodes are removed. Care is taken to not remove nodes that are protected, or that otherwise restrict their children. --- .../src/borrow_tracker/tree_borrows/perms.rs | 39 +++++++- .../src/borrow_tracker/tree_borrows/tree.rs | 94 ++++++++++++++++--- .../borrow_tracker/tree_borrows/tree/tests.rs | 65 +++++++++++++ .../tests/pass-dep/concurrency/linux-futex.rs | 4 +- 4 files changed, 185 insertions(+), 17 deletions(-) diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs index 5461edb51d3af..85c5ba627dfd8 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs @@ -130,7 +130,7 @@ mod transition { Active => if protected { // We wrote, someone else reads -- that's bad. - // (If this is initialized, this move-to-protected will mean insta-UB.) + // (Since Active is always initialized, this move-to-protected will mean insta-UB.) Disabled } else { // We don't want to disable here to allow read-read reordering: it is crucial @@ -267,6 +267,43 @@ impl Permission { transition::perform_access(kind, rel_pos, old_state, protected) .map(|new_state| PermTransition { from: old_state, to: new_state }) } + + /// During a provenance GC, we want to compact the tree. + /// For this, we want to merge nodes upwards if they have a singleton parent. + /// But we need to be careful: If the parent is Frozen, and the child is Reserved, + /// we can not do such a merge. In general, such a merge is possible if the parent + /// allows similar accesses, and in particular if the parent never causes UB on its + /// own. This is enforced by a test, namely `tree_compacting_is_sound`. See that + /// test for more information. + /// This method is only sound if the parent is not protected. We never attempt to + /// remove protected parents. + pub fn can_be_replaced_by_child(self, child: Self) -> bool { + match (self.inner, child.inner) { + // ReservedIM can be replaced by anything, as it allows all + // transitions. + (ReservedIM, _) => true, + // Reserved (as parent, where conflictedness does not matter) + // can be replaced by all but ReservedIM, + // since ReservedIM alone would survive foreign writes + (ReservedFrz { .. }, ReservedIM) => false, + (ReservedFrz { .. }, _) => true, + // Active can not be replaced by something surviving + // foreign reads and then remaining writable + (Active, ReservedIM) => false, + (Active, ReservedFrz { .. }) => false, + (Active, Active) => true, + // Active can be replaced by Frozen, since it is not protected + (Active, Frozen) => true, + (Active, Disabled) => true, + // Frozen can only be replaced by Disabled + (Frozen, Frozen) => true, + (Frozen, Disabled) => true, + (Frozen, _) => false, + // Disabled can not be replaced by anything else + (Disabled, Disabled) => true, + (Disabled, _) => false, + } + } } impl PermTransition { diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs index 728a664680da4..b9521deeb0f98 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs @@ -128,6 +128,22 @@ impl LocationState { Ok(transition) } + /// Like `perform_access`, but ignores the diagnostics, and also is pure. + /// As such, it returns `Some(x)` if the transition succeeded, or `None` + /// if there was an error. + #[allow(unused)] + fn perform_access_no_fluff( + mut self, + access_kind: AccessKind, + rel_pos: AccessRelatedness, + protected: bool, + ) -> Option { + match self.perform_access(access_kind, rel_pos, protected) { + Ok(_) => Some(self), + Err(_) => None, + } + } + // Helper to optimize the tree traversal. // The optimization here consists of observing thanks to the tests // `foreign_read_is_noop_after_foreign_write` and `all_transitions_idempotent`, @@ -840,6 +856,57 @@ impl Tree { node.children.is_empty() && !live.contains(&node.tag) } + /// Checks whether a node can be replaced by its only child. + /// If so, returns the index of said only child. + /// If not, returns none. + fn can_be_replaced_by_single_child( + &self, + idx: UniIndex, + live: &FxHashSet, + ) -> Option { + let node = self.nodes.get(idx).unwrap(); + if node.children.len() != 1 || live.contains(&node.tag) || node.parent.is_none() { + return None; + } + // Since protected nodes are never GC'd (see `borrow_tracker::GlobalStateInner::visit_provenance`), + // we know that `node` is not protected because otherwise `live` would + // have contained `node.tag`. + let child_idx = node.children[0]; + let child = self.nodes.get(child_idx).unwrap(); + for (_, data) in self.rperms.iter_all() { + let parent_perm = + data.get(idx).map(|x| x.permission).unwrap_or_else(|| node.default_initial_perm); + let child_perm = data + .get(child_idx) + .map(|x| x.permission) + .unwrap_or_else(|| child.default_initial_perm); + if !parent_perm.can_be_replaced_by_child(child_perm) { + return None; + } + } + + Some(child_idx) + } + + /// Properly removes a node. + /// The node to be removed should not otherwise be usable. It also + /// should have no children, but this is not checked, so that nodes + /// whose children were rotated somewhere else can be deleted without + /// having to first modify them to clear that array. + /// otherwise (i.e. the GC should have marked it as removable). + fn remove_useless_node(&mut self, this: UniIndex) { + // Due to the API of UniMap we must make sure to call + // `UniValMap::remove` for the key of this node on *all* maps that used it + // (which are `self.nodes` and every range of `self.rperms`) + // before we can safely apply `UniKeyMap::remove` to truly remove + // this tag from the `tag_mapping`. + let node = self.nodes.remove(this).unwrap(); + for (_perms_range, perms) in self.rperms.iter_mut_all() { + perms.remove(this); + } + self.tag_mapping.remove(&node.tag); + } + /// Traverses the entire tree looking for useless tags. /// Removes from the tree all useless child nodes of root. /// It will not delete the root itself. @@ -883,23 +950,20 @@ impl Tree { // Remove all useless children. children_of_node.retain_mut(|idx| { if self.is_useless(*idx, live) { - // Note: In the rest of this comment, "this node" refers to `idx`. - // This node has no more children (if there were any, they have already been removed). - // It is also unreachable as determined by the GC, so we can remove it everywhere. - // Due to the API of UniMap we must make sure to call - // `UniValMap::remove` for the key of this node on *all* maps that used it - // (which are `self.nodes` and every range of `self.rperms`) - // before we can safely apply `UniKeyMap::remove` to truly remove - // this tag from the `tag_mapping`. - let node = self.nodes.remove(*idx).unwrap(); - for (_perms_range, perms) in self.rperms.iter_mut_all() { - perms.remove(*idx); - } - self.tag_mapping.remove(&node.tag); - // now delete it + // delete it everywhere else + self.remove_useless_node(*idx); + // and delete it from children_of_node false } else { - // do nothing, but retain + if let Some(nextchild) = self.can_be_replaced_by_single_child(*idx, live) { + // delete the in-between child + self.remove_useless_node(*idx); + // set the new child's parent + self.nodes.get_mut(nextchild).unwrap().parent = Some(*tag); + // save the new child in children_of_node + *idx = nextchild; + } + // retain it true } }); diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/tree/tests.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/tree/tests.rs index 654419c099d3c..f64f7bf8e8c1e 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/tree/tests.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/tree/tests.rs @@ -64,6 +64,71 @@ fn all_read_accesses_commute() { } } +fn as_foreign_or_child(related: AccessRelatedness) -> &'static str { + if related.is_foreign() { "foreign" } else { "child" } +} + +fn as_protected(b: bool) -> &'static str { + if b { " (protected)" } else { "" } +} + +fn as_lazy_or_init(b: bool) -> &'static str { + if b { "initialized" } else { "lazy" } +} + +/// Test that tree compacting (as performed by the GC) is sound. +/// Specifically, the GC will replace a parent by a child if the parent is not +/// protected, and if `can_be_replaced_by_child(parent, child)` is true. +/// To check that this is sound, the function must be a simulation, i.e. +/// if both are accessed, the results must still be in simulation, and also +/// if an access is UB, it must also be UB if done only at the child. +#[test] +fn tree_compacting_is_sound() { + // The parent is unprotected + let parent_protected = false; + for ([parent, child], child_protected) in <([LocationState; 2], bool)>::exhaustive() { + if child_protected { + precondition!(child.compatible_with_protector()) + } + precondition!(parent.permission().can_be_replaced_by_child(child.permission())); + for (kind, rel) in <(AccessKind, AccessRelatedness)>::exhaustive() { + let new_parent = parent.perform_access_no_fluff(kind, rel, parent_protected); + let new_child = child.perform_access_no_fluff(kind, rel, child_protected); + match (new_parent, new_child) { + (Some(np), Some(nc)) => { + assert!( + np.permission().can_be_replaced_by_child(nc.permission()), + "`can_be_replaced_by_child` is not a simulation: on a {} {} to a {} parent and a {} {}{} child, the parent becomes {}, the child becomes {}, and these are not in simulation!", + as_foreign_or_child(rel), + kind, + parent.permission(), + as_lazy_or_init(child.is_initialized()), + child.permission(), + as_protected(child_protected), + np.permission(), + nc.permission() + ) + } + (_, None) => { + // the child produced UB, this is fine no matter what the parent does + } + (None, Some(nc)) => { + panic!( + "`can_be_replaced_by_child` does not have the UB property: on a {} {} to a(n) {} parent and a(n) {} {}{} child, only the parent causes UB, while the child becomes {}, and it is not allowed for only the parent to cause UB!", + as_foreign_or_child(rel), + kind, + parent.permission(), + as_lazy_or_init(child.is_initialized()), + child.permission(), + as_protected(child_protected), + nc.permission() + ) + } + } + } + } +} + #[test] #[rustfmt::skip] // Ensure that of 2 accesses happen, one foreign and one a child, and we are protected, that we diff --git a/src/tools/miri/tests/pass-dep/concurrency/linux-futex.rs b/src/tools/miri/tests/pass-dep/concurrency/linux-futex.rs index d21f953672dee..399d6df73ff12 100644 --- a/src/tools/miri/tests/pass-dep/concurrency/linux-futex.rs +++ b/src/tools/miri/tests/pass-dep/concurrency/linux-futex.rs @@ -158,7 +158,9 @@ fn wait_wake() { ); } - assert!((200..1000).contains(&start.elapsed().as_millis())); + // When running this in stress-gc mode, things can take quite long. + // So the timeout is 3000 ms. + assert!((200..3000).contains(&start.elapsed().as_millis())); t.join().unwrap(); } From ae3c270480940edfae6f008f5f3fd2f9a68efe7c Mon Sep 17 00:00:00 2001 From: The Miri Cronjob Bot Date: Wed, 28 Aug 2024 05:05:02 +0000 Subject: [PATCH 106/124] Preparing for merge from rustc --- src/tools/miri/rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index 6124cc327ad09..2c9a861e66e48 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -3f121b9461cce02a703a0e7e450568849dfaa074 +d9a2cc4daee38c63b2f69710ed61d40acc32b709 From abcfc17dc761d1d64859aaad247271500bfafdda Mon Sep 17 00:00:00 2001 From: tiif Date: Tue, 27 Aug 2024 17:15:41 +0800 Subject: [PATCH 107/124] Add test for tokio file io and mpsc --- src/tools/miri/test_dependencies/Cargo.lock | 7 ++++ src/tools/miri/test_dependencies/Cargo.toml | 2 +- .../miri/tests/pass-dep/tokio/file-io.rs | 41 +++++++++++++++++++ .../miri/tests/pass-dep/tokio/mpsc-await.rs | 20 +++++++++ .../tests/pass-dep/tokio/mpsc-await.stdout | 2 + src/tools/miri/tests/pass-dep/tokio/sleep.rs | 3 +- .../miri/tests/pass-dep/tokio/tokio_mvp.rs | 6 --- 7 files changed, 72 insertions(+), 9 deletions(-) create mode 100644 src/tools/miri/tests/pass-dep/tokio/file-io.rs create mode 100644 src/tools/miri/tests/pass-dep/tokio/mpsc-await.rs create mode 100644 src/tools/miri/tests/pass-dep/tokio/mpsc-await.stdout delete mode 100644 src/tools/miri/tests/pass-dep/tokio/tokio_mvp.rs diff --git a/src/tools/miri/test_dependencies/Cargo.lock b/src/tools/miri/test_dependencies/Cargo.lock index bbead8782233c..39d412817289d 100644 --- a/src/tools/miri/test_dependencies/Cargo.lock +++ b/src/tools/miri/test_dependencies/Cargo.lock @@ -44,6 +44,12 @@ version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +[[package]] +name = "bytes" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50" + [[package]] name = "cc" version = "1.1.7" @@ -304,6 +310,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daa4fb1bc778bd6f04cbfc4bb2d06a7396a8f299dc33ea1900cedaa316f467b1" dependencies = [ "backtrace", + "bytes", "libc", "mio", "pin-project-lite", diff --git a/src/tools/miri/test_dependencies/Cargo.toml b/src/tools/miri/test_dependencies/Cargo.toml index ce11a8abb0ea9..c24422df26cf2 100644 --- a/src/tools/miri/test_dependencies/Cargo.toml +++ b/src/tools/miri/test_dependencies/Cargo.toml @@ -20,7 +20,7 @@ tempfile = "3" page_size = "0.6" # Avoid pulling in all of tokio's dependencies. # However, without `net` and `signal`, tokio uses fewer relevant system APIs. -tokio = { version = "1.24", features = ["macros", "rt-multi-thread", "time", "net", "fs", "sync", "signal"] } +tokio = { version = "1.24", features = ["macros", "rt-multi-thread", "time", "net", "fs", "sync", "signal", "io-util"] } [target.'cfg(windows)'.dependencies] windows-sys = { version = "0.52", features = [ "Win32_Foundation", "Win32_System_Threading" ] } diff --git a/src/tools/miri/tests/pass-dep/tokio/file-io.rs b/src/tools/miri/tests/pass-dep/tokio/file-io.rs new file mode 100644 index 0000000000000..d14af299cd467 --- /dev/null +++ b/src/tools/miri/tests/pass-dep/tokio/file-io.rs @@ -0,0 +1,41 @@ +//@compile-flags: -Zmiri-disable-isolation +//@only-target-linux: We only support tokio on Linux + +use std::fs::remove_file; +use tokio::fs::{File, OpenOptions}; +use tokio::io::{self, AsyncReadExt, AsyncWriteExt}; + +#[path = "../../utils/mod.rs"] +mod utils; + +#[tokio::main] +async fn main() { + test_create_and_write().await.unwrap(); + test_create_and_read().await.unwrap(); +} + +async fn test_create_and_write() -> io::Result<()> { + let path = utils::prepare("foo.txt"); + let mut file = File::create(&path).await?; + + // Write 10 bytes to the file. + file.write(b"some bytes").await?; + assert_eq!(file.metadata().await.unwrap().len(), 10); + + remove_file(&path).unwrap(); + Ok(()) +} + +async fn test_create_and_read() -> io::Result<()> { + let bytes = b"more bytes"; + let path = utils::prepare_with_content("foo.txt", bytes); + let mut file = OpenOptions::new().read(true).open(&path).await.unwrap(); + let mut buffer = [0u8; 10]; + + // Read the whole file. + file.read(&mut buffer[..]).await?; + assert_eq!(&buffer, b"more bytes"); + + remove_file(&path).unwrap(); + Ok(()) +} diff --git a/src/tools/miri/tests/pass-dep/tokio/mpsc-await.rs b/src/tools/miri/tests/pass-dep/tokio/mpsc-await.rs new file mode 100644 index 0000000000000..7dea07c6e7da9 --- /dev/null +++ b/src/tools/miri/tests/pass-dep/tokio/mpsc-await.rs @@ -0,0 +1,20 @@ +//@only-target-linux: We only support tokio on Linux +use tokio::sync::mpsc; + +#[tokio::main] +async fn main() { + let (tx, mut rx) = mpsc::channel(32); + let tx2 = tx.clone(); + + tokio::spawn(async move { + tx.send("sending from handle").await.unwrap(); + }); + + tokio::spawn(async move { + tx2.send("sending from handle").await.unwrap(); + }); + + while let Some(message) = rx.recv().await { + println!("GOT = {}", message); + } +} diff --git a/src/tools/miri/tests/pass-dep/tokio/mpsc-await.stdout b/src/tools/miri/tests/pass-dep/tokio/mpsc-await.stdout new file mode 100644 index 0000000000000..52dc6483186f3 --- /dev/null +++ b/src/tools/miri/tests/pass-dep/tokio/mpsc-await.stdout @@ -0,0 +1,2 @@ +GOT = sending from handle +GOT = sending from handle diff --git a/src/tools/miri/tests/pass-dep/tokio/sleep.rs b/src/tools/miri/tests/pass-dep/tokio/sleep.rs index e6b02c02d0308..5e63037c8a652 100644 --- a/src/tools/miri/tests/pass-dep/tokio/sleep.rs +++ b/src/tools/miri/tests/pass-dep/tokio/sleep.rs @@ -1,5 +1,4 @@ -//@compile-flags: -Zmiri-permissive-provenance -Zmiri-backtrace=full -//@only-target-x86_64-unknown-linux: support for tokio only on linux and x86 +//@only-target-linux: We only support tokio on Linux use tokio::time::{sleep, Duration, Instant}; diff --git a/src/tools/miri/tests/pass-dep/tokio/tokio_mvp.rs b/src/tools/miri/tests/pass-dep/tokio/tokio_mvp.rs deleted file mode 100644 index 769a7a7d3849d..0000000000000 --- a/src/tools/miri/tests/pass-dep/tokio/tokio_mvp.rs +++ /dev/null @@ -1,6 +0,0 @@ -// Need to disable preemption to stay on the supported MVP codepath in mio. -//@compile-flags: -Zmiri-permissive-provenance -Zmiri-preemption-rate=0 -//@only-target-x86_64-unknown-linux: support for tokio exists only on linux and x86 - -#[tokio::main] -async fn main() {} From 51055f71414da15963d547729fb11bf3e952da18 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Wed, 28 Aug 2024 13:00:26 +0300 Subject: [PATCH 108/124] Fix cwd used for proc macro expansion --- src/tools/rust-analyzer/crates/project-model/src/env.rs | 2 +- src/tools/rust-analyzer/crates/project-model/src/tests.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/tools/rust-analyzer/crates/project-model/src/env.rs b/src/tools/rust-analyzer/crates/project-model/src/env.rs index ec0f94b8f013e..ff9d2035f60a7 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/env.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/env.rs @@ -65,7 +65,7 @@ pub(crate) fn inject_rustc_tool_env( // NOTE: Technically we should set this for all crates, but that will worsen the deduplication // logic so for now just keeping it proc-macros ought to be fine. if kind.is_proc_macro() { - env.set("CARGO_RUSTC_CURRENT_DIR", cargo.manifest_path().to_string()); + env.set("CARGO_RUSTC_CURRENT_DIR", cargo.manifest_path().parent().to_string()); } } diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index 920ee7bd4f11d..f540bb94c1963 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -278,7 +278,7 @@ fn crate_graph_dedup() { assert_eq!(regex_crate_graph.iter().count(), 60); crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |(_, a), (_, b)| a == b); - assert_eq!(crate_graph.iter().count(), 119); + assert_eq!(crate_graph.iter().count(), 118); } #[test] From e34f35edd8bad1e6139ea1558689f4ef3ef1ab3b Mon Sep 17 00:00:00 2001 From: Johannes Hostert Date: Tue, 27 Aug 2024 21:09:33 +0200 Subject: [PATCH 109/124] Add benchmark for TB slowdown --- .../bench-cargo-miri/slice-chunked/Cargo.lock | 7 +++++ .../bench-cargo-miri/slice-chunked/Cargo.toml | 8 ++++++ .../slice-chunked/src/main.rs | 26 +++++++++++++++++++ 3 files changed, 41 insertions(+) create mode 100644 src/tools/miri/bench-cargo-miri/slice-chunked/Cargo.lock create mode 100644 src/tools/miri/bench-cargo-miri/slice-chunked/Cargo.toml create mode 100644 src/tools/miri/bench-cargo-miri/slice-chunked/src/main.rs diff --git a/src/tools/miri/bench-cargo-miri/slice-chunked/Cargo.lock b/src/tools/miri/bench-cargo-miri/slice-chunked/Cargo.lock new file mode 100644 index 0000000000000..0f9e5db52d634 --- /dev/null +++ b/src/tools/miri/bench-cargo-miri/slice-chunked/Cargo.lock @@ -0,0 +1,7 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "slice-chunked" +version = "0.1.0" diff --git a/src/tools/miri/bench-cargo-miri/slice-chunked/Cargo.toml b/src/tools/miri/bench-cargo-miri/slice-chunked/Cargo.toml new file mode 100644 index 0000000000000..8fed6ad2f3159 --- /dev/null +++ b/src/tools/miri/bench-cargo-miri/slice-chunked/Cargo.toml @@ -0,0 +1,8 @@ +[package] +name = "slice-chunked" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] diff --git a/src/tools/miri/bench-cargo-miri/slice-chunked/src/main.rs b/src/tools/miri/bench-cargo-miri/slice-chunked/src/main.rs new file mode 100644 index 0000000000000..b498c8bec7582 --- /dev/null +++ b/src/tools/miri/bench-cargo-miri/slice-chunked/src/main.rs @@ -0,0 +1,26 @@ +//! This is a small example using slice::chunks, which creates a very large Tree Borrows tree. +//! Thanks to ##3837, the GC now compacts the tree, so this test can be run in a reasonable time again. +//! The actual code is adapted from tiny_skia, see https://github.com/RazrFalcon/tiny-skia/blob/master/src/pixmap.rs#L121 +//! To make this benchmark demonstrate the effectiveness, run with MIRIFLAGS="-Zmiri-tree-borrows -Zmiri-provenance-gc=100" + +const N: usize = 1000; + +fn input_vec() -> Vec { + vec![0; N] +} + +fn main() { + let data_len = 2 * N; + let mut rgba_data = Vec::with_capacity(data_len); + let img_data = input_vec(); + for slice in img_data.chunks(2) { + let gray = slice[0]; + let alpha = slice[1]; + rgba_data.push(gray); + rgba_data.push(gray); + rgba_data.push(gray); + rgba_data.push(alpha); + } + + assert_eq!(rgba_data.len(), data_len); +} From 84134c61bc4e616bf144bb8498810df2bf7a48e4 Mon Sep 17 00:00:00 2001 From: Johannes Hostert Date: Wed, 28 Aug 2024 13:55:30 +0200 Subject: [PATCH 110/124] address nits --- src/tools/miri/src/borrow_tracker/mod.rs | 6 +++++ .../src/borrow_tracker/tree_borrows/perms.rs | 9 ++++--- .../src/borrow_tracker/tree_borrows/tree.rs | 26 +++++++++++-------- 3 files changed, 26 insertions(+), 15 deletions(-) diff --git a/src/tools/miri/src/borrow_tracker/mod.rs b/src/tools/miri/src/borrow_tracker/mod.rs index 7a3d76a9beb35..9e205cd0064aa 100644 --- a/src/tools/miri/src/borrow_tracker/mod.rs +++ b/src/tools/miri/src/borrow_tracker/mod.rs @@ -71,6 +71,12 @@ pub struct FrameState { impl VisitProvenance for FrameState { fn visit_provenance(&self, visit: &mut VisitWith<'_>) { + // Visit all protected tags. At least in Tree Borrows, + // protected tags can not be GC'd because they still have + // an access coming when the protector ends. Additionally, + // the tree compacting mechanism of TB's GC relies on the fact + // that all protected tags are marked as live for correctness, + // so we _have_ to visit them here. for (id, tag) in &self.protected_tags { visit(Some(*id), Some(*tag)); } diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs index 85c5ba627dfd8..c29bd719b15df 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs @@ -288,18 +288,19 @@ impl Permission { (ReservedFrz { .. }, ReservedIM) => false, (ReservedFrz { .. }, _) => true, // Active can not be replaced by something surviving - // foreign reads and then remaining writable + // foreign reads and then remaining writable. (Active, ReservedIM) => false, (Active, ReservedFrz { .. }) => false, + // Replacing a state by itself is always okay, even if the child state is protected. (Active, Active) => true, - // Active can be replaced by Frozen, since it is not protected + // Active can be replaced by Frozen, since it is not protected. (Active, Frozen) => true, (Active, Disabled) => true, - // Frozen can only be replaced by Disabled + // Frozen can only be replaced by Disabled (and itself). (Frozen, Frozen) => true, (Frozen, Disabled) => true, (Frozen, _) => false, - // Disabled can not be replaced by anything else + // Disabled can not be replaced by anything else. (Disabled, Disabled) => true, (Disabled, _) => false, } diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs index b9521deeb0f98..53e722259fdfb 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs @@ -128,10 +128,10 @@ impl LocationState { Ok(transition) } - /// Like `perform_access`, but ignores the diagnostics, and also is pure. - /// As such, it returns `Some(x)` if the transition succeeded, or `None` - /// if there was an error. - #[allow(unused)] + /// Like `perform_access`, but ignores the concrete error cause and also uses state-passing + /// rather than a mutable reference. As such, it returns `Some(x)` if the transition succeeded, + /// or `None` if there was an error. + #[cfg(test)] fn perform_access_no_fluff( mut self, access_kind: AccessKind, @@ -865,14 +865,18 @@ impl Tree { live: &FxHashSet, ) -> Option { let node = self.nodes.get(idx).unwrap(); + + // We never want to replace the root node, as it is also kept in `root_ptr_tags`. if node.children.len() != 1 || live.contains(&node.tag) || node.parent.is_none() { return None; } - // Since protected nodes are never GC'd (see `borrow_tracker::GlobalStateInner::visit_provenance`), + // Since protected nodes are never GC'd (see `borrow_tracker::FrameExtra::visit_provenance`), // we know that `node` is not protected because otherwise `live` would // have contained `node.tag`. let child_idx = node.children[0]; let child = self.nodes.get(child_idx).unwrap(); + // Check that for that one child, `can_be_replaced_by_child` holds for the permission + // on all locations. for (_, data) in self.rperms.iter_all() { let parent_perm = data.get(idx).map(|x| x.permission).unwrap_or_else(|| node.default_initial_perm); @@ -893,7 +897,6 @@ impl Tree { /// should have no children, but this is not checked, so that nodes /// whose children were rotated somewhere else can be deleted without /// having to first modify them to clear that array. - /// otherwise (i.e. the GC should have marked it as removable). fn remove_useless_node(&mut self, this: UniIndex) { // Due to the API of UniMap we must make sure to call // `UniValMap::remove` for the key of this node on *all* maps that used it @@ -950,17 +953,18 @@ impl Tree { // Remove all useless children. children_of_node.retain_mut(|idx| { if self.is_useless(*idx, live) { - // delete it everywhere else + // Delete `idx` node everywhere else. self.remove_useless_node(*idx); - // and delete it from children_of_node + // And delete it from children_of_node. false } else { if let Some(nextchild) = self.can_be_replaced_by_single_child(*idx, live) { - // delete the in-between child + // `nextchild` is our grandchild, and will become our direct child. + // Delete the in-between node, `idx`. self.remove_useless_node(*idx); - // set the new child's parent + // Set the new child's parent. self.nodes.get_mut(nextchild).unwrap().parent = Some(*tag); - // save the new child in children_of_node + // Save the new child in children_of_node. *idx = nextchild; } // retain it From 4880eee7810bb080ffe4cfa43e79150c31e7cd09 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Wed, 28 Aug 2024 23:20:46 +0300 Subject: [PATCH 111/124] Fix name resolution of shadowed builtin macro --- .../crates/hir-def/src/nameres/collector.rs | 6 +++++- .../crates/ide/src/goto_definition.rs | 19 +++++++++++++++++++ .../crates/test-utils/src/minicore.rs | 9 +++++++++ 3 files changed, 33 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 22eb5a174d393..96db3db8f0d8a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -1606,7 +1606,11 @@ impl ModCollector<'_, '_> { // Prelude module is always considered to be `#[macro_use]`. if let Some((prelude_module, _use)) = self.def_collector.def_map.prelude { - if prelude_module.krate != krate && is_crate_root { + // Don't insert macros from the prelude into blocks, as they can be shadowed by other macros. + if prelude_module.krate != krate + && is_crate_root + && self.def_collector.def_map.block.is_none() + { cov_mark::hit!(prelude_is_macro_use); self.def_collector.import_macros_from_extern_crate( prelude_module.krate, diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index 5769f2cabc750..971cd3ef585c4 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -2731,4 +2731,23 @@ fn main() { "#, ) } + + #[test] + fn shadow_builtin_macro() { + check( + r#" +//- minicore: column +//- /a.rs crate:a +#[macro_export] +macro_rules! column { () => {} } + // ^^^^^^ + +//- /b.rs crate:b deps:a +use a::column; +fn foo() { + $0column!(); +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs index 7dbc498ead196..3be4469beef0a 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs @@ -63,6 +63,7 @@ //! unsize: sized //! todo: panic //! unimplemented: panic +//! column: #![rustc_coherence_is_core] @@ -1617,6 +1618,14 @@ pub mod error { } // endregion:error +// region:column +#[rustc_builtin_macro] +#[macro_export] +macro_rules! column { + () => {}; +} +// endregion:column + pub mod prelude { pub mod v1 { pub use crate::{ From 2a7ec0b0ad0b9d7aea147694c23b5f52b6dc6fa9 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Wed, 28 Aug 2024 23:55:31 +0300 Subject: [PATCH 112/124] Consider all expressions that autoderef in "Extract variable", not just method and field accesses. --- .../src/handlers/extract_variable.rs | 51 +++++++++++++++++-- 1 file changed, 48 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index 0ef71a386614f..33cd5252813f1 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -58,9 +58,15 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op } let parent = to_extract.syntax().parent().and_then(ast::Expr::cast); - let needs_adjust = parent - .as_ref() - .map_or(false, |it| matches!(it, ast::Expr::FieldExpr(_) | ast::Expr::MethodCallExpr(_))); + // Any expression that autoderefs may need adjustment. + let needs_adjust = parent.as_ref().map_or(false, |it| match it { + ast::Expr::FieldExpr(_) + | ast::Expr::MethodCallExpr(_) + | ast::Expr::CallExpr(_) + | ast::Expr::AwaitExpr(_) => true, + ast::Expr::IndexExpr(index) if index.base().as_ref() == Some(&to_extract) => true, + _ => false, + }); let anchor = Anchor::from(&to_extract)?; let target = to_extract.syntax().text_range(); @@ -1220,6 +1226,45 @@ fn foo(s: &S) { ); } + #[test] + fn test_extract_var_index_deref() { + check_assist( + extract_variable, + r#" +//- minicore: index +struct X; + +impl std::ops::Index for X { + type Output = i32; + fn index(&self) -> &Self::Output { 0 } +} + +struct S { + sub: X +} + +fn foo(s: &S) { + $0s.sub$0[0]; +}"#, + r#" +struct X; + +impl std::ops::Index for X { + type Output = i32; + fn index(&self) -> &Self::Output { 0 } +} + +struct S { + sub: X +} + +fn foo(s: &S) { + let $0sub = &s.sub; + sub[0]; +}"#, + ); + } + #[test] fn test_extract_var_reference_parameter_deep_nesting() { check_assist( From fe5f91ed8e54eeac1cc81930982d01483d745a65 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 29 Aug 2024 00:10:26 +0300 Subject: [PATCH 113/124] Don't add reference when it isn't needed for the "Extract variable" assist I.e. don't generate `let var_name = &foo()`. Anything that creates a new value don't need a reference. That excludes mostly field accesses and indexing. I had a thought that we can also not generate a reference for fields and indexing as long as the type is `Copy`, but sometimes people impl `Copy` even when they don't want to copy the values (e.g. a large type), so I didn't do that. --- .../src/handlers/extract_variable.rs | 43 ++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index 33cd5252813f1..e6d1662e68761 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -67,6 +67,15 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op ast::Expr::IndexExpr(index) if index.base().as_ref() == Some(&to_extract) => true, _ => false, }); + let needs_ref = needs_adjust + && matches!( + to_extract, + ast::Expr::FieldExpr(_) + | ast::Expr::IndexExpr(_) + | ast::Expr::MacroExpr(_) + | ast::Expr::ParenExpr(_) + | ast::Expr::PathExpr(_) + ); let anchor = Anchor::from(&to_extract)?; let target = to_extract.syntax().text_range(); @@ -93,10 +102,16 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => { make::ident_pat(false, true, make::name(&var_name)) } + _ if needs_adjust + && !needs_ref + && ty.as_ref().is_some_and(|ty| ty.is_mutable_reference()) => + { + make::ident_pat(false, true, make::name(&var_name)) + } _ => make::ident_pat(false, false, make::name(&var_name)), }; - let to_extract = match ty.as_ref().filter(|_| needs_adjust) { + let to_extract = match ty.as_ref().filter(|_| needs_ref) { Some(receiver_type) if receiver_type.is_mutable_reference() => { make::expr_ref(to_extract, true) } @@ -1503,6 +1518,32 @@ fn foo() { fn foo() { let mut $0var_name = 0; let v = &mut var_name; +}"#, + ); + } + + #[test] + fn generates_no_ref_on_calls() { + check_assist( + extract_variable, + r#" +struct S; +impl S { + fn do_work(&mut self) {} +} +fn bar() -> S { S } +fn foo() { + $0bar()$0.do_work(); +}"#, + r#" +struct S; +impl S { + fn do_work(&mut self) {} +} +fn bar() -> S { S } +fn foo() { + let mut $0bar = bar(); + bar.do_work(); }"#, ); } From 5aefe78301cc5cb2146575cc99cb32fc44dbdc59 Mon Sep 17 00:00:00 2001 From: Wilfred Hughes Date: Wed, 28 Aug 2024 14:15:51 -0700 Subject: [PATCH 114/124] internal: Avoid newlines in fetch workspace errors Most logs lines don't have newlines, ensure fetch workspace errors follow this pattern. Before: 2024-08-28T21:11:58.431856Z ERROR FetchWorkspaceError: rust-analyzer failed to discover workspace After: 2024-08-28T21:11:58.431856Z ERROR FetchWorkspaceError: rust-analyzer failed to discover workspace --- src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 1d4ee71e5c1a0..616d6b49351c1 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -689,7 +689,7 @@ impl GlobalState { self.fetch_workspaces_queue .op_completed(Some((workspaces, force_reload_crate_graph))); if let Err(e) = self.fetch_workspace_error() { - error!("FetchWorkspaceError:\n{e}"); + error!("FetchWorkspaceError: {e}"); } self.wants_to_switch = Some("fetched workspace".to_owned()); (Progress::End, None) @@ -729,7 +729,7 @@ impl GlobalState { BuildDataProgress::End(build_data_result) => { self.fetch_build_data_queue.op_completed(build_data_result); if let Err(e) = self.fetch_build_data_error() { - error!("FetchBuildDataError:\n{e}"); + error!("FetchBuildDataError: {e}"); } if self.wants_to_switch.is_none() { From 3ff3d39b96584fc263facc002c723e7c8d8c5a85 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 29 Aug 2024 00:35:45 +0300 Subject: [PATCH 115/124] Also handle deref expressions in "Extract variable" And BTW, remove the parentheses of the extracted expression if there are. --- .../src/handlers/extract_variable.rs | 72 +++++++++++++++---- .../crates/ide-assists/src/tests/generated.rs | 2 +- 2 files changed, 59 insertions(+), 15 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index e6d1662e68761..5ae75bb1ff8e0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -20,7 +20,7 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists}; // -> // ``` // fn main() { -// let $0var_name = (1 + 2); +// let $0var_name = 1 + 2; // var_name * 4; // } // ``` @@ -59,7 +59,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let parent = to_extract.syntax().parent().and_then(ast::Expr::cast); // Any expression that autoderefs may need adjustment. - let needs_adjust = parent.as_ref().map_or(false, |it| match it { + let mut needs_adjust = parent.as_ref().map_or(false, |it| match it { ast::Expr::FieldExpr(_) | ast::Expr::MethodCallExpr(_) | ast::Expr::CallExpr(_) @@ -67,15 +67,21 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op ast::Expr::IndexExpr(index) if index.base().as_ref() == Some(&to_extract) => true, _ => false, }); + let mut to_extract_no_ref = peel_parens(to_extract.clone()); let needs_ref = needs_adjust - && matches!( - to_extract, + && match &to_extract_no_ref { ast::Expr::FieldExpr(_) - | ast::Expr::IndexExpr(_) - | ast::Expr::MacroExpr(_) - | ast::Expr::ParenExpr(_) - | ast::Expr::PathExpr(_) - ); + | ast::Expr::IndexExpr(_) + | ast::Expr::MacroExpr(_) + | ast::Expr::ParenExpr(_) + | ast::Expr::PathExpr(_) => true, + ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(ast::UnaryOp::Deref) => { + to_extract_no_ref = prefix.expr()?; + needs_adjust = false; + false + } + _ => false, + }; let anchor = Anchor::from(&to_extract)?; let target = to_extract.syntax().text_range(); @@ -111,19 +117,19 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op _ => make::ident_pat(false, false, make::name(&var_name)), }; - let to_extract = match ty.as_ref().filter(|_| needs_ref) { + let to_extract_no_ref = match ty.as_ref().filter(|_| needs_ref) { Some(receiver_type) if receiver_type.is_mutable_reference() => { - make::expr_ref(to_extract, true) + make::expr_ref(to_extract_no_ref, true) } Some(receiver_type) if receiver_type.is_reference() => { - make::expr_ref(to_extract, false) + make::expr_ref(to_extract_no_ref, false) } - _ => to_extract, + _ => to_extract_no_ref, }; let expr_replace = edit.make_syntax_mut(expr_replace); let let_stmt = - make::let_stmt(ident_pat.into(), None, Some(to_extract)).clone_for_update(); + make::let_stmt(ident_pat.into(), None, Some(to_extract_no_ref)).clone_for_update(); let name_expr = make::expr_path(make::ext::ident_path(&var_name)).clone_for_update(); match anchor { @@ -223,6 +229,14 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op ) } +fn peel_parens(mut expr: ast::Expr) -> ast::Expr { + while let ast::Expr::ParenExpr(parens) = &expr { + let Some(expr_inside) = parens.expr() else { break }; + expr = expr_inside; + } + expr +} + /// Check whether the node is a valid expression which can be extracted to a variable. /// In general that's true for any expression, but in some cases that would produce invalid code. fn valid_target_expr(node: SyntaxNode) -> Option { @@ -1547,4 +1561,34 @@ fn foo() { }"#, ); } + + #[test] + fn generates_no_ref_for_deref() { + check_assist( + extract_variable, + r#" +struct S; +impl S { + fn do_work(&mut self) {} +} +fn bar() -> S { S } +fn foo() { + let v = &mut &mut bar(); + $0(**v)$0.do_work(); +} +"#, + r#" +struct S; +impl S { + fn do_work(&mut self) {} +} +fn bar() -> S { S } +fn foo() { + let v = &mut &mut bar(); + let $0s = *v; + s.do_work(); +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index 595ce1affb029..0a3242c738419 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -994,7 +994,7 @@ fn main() { "#####, r#####" fn main() { - let $0var_name = (1 + 2); + let $0var_name = 1 + 2; var_name * 4; } "#####, From b3fcd8e6b256823a9c2ddfd330601064ffaa4558 Mon Sep 17 00:00:00 2001 From: Chayim Refael Friedman Date: Thu, 29 Aug 2024 01:14:25 +0300 Subject: [PATCH 116/124] Consider field attributes when converting from tuple to named struct and the opposite --- .../convert_named_struct_to_tuple_struct.rs | 31 ++++++++++++++++--- .../convert_tuple_struct_to_named_struct.rs | 29 +++++++++++++---- 2 files changed, 49 insertions(+), 11 deletions(-) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs index 370559792763f..8d4ff84084bd3 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs @@ -2,8 +2,8 @@ use either::Either; use ide_db::{defs::Definition, search::FileReference}; use itertools::Itertools; use syntax::{ - ast::{self, AstNode, HasGenericParams, HasVisibility}, - match_ast, SyntaxKind, + ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility}, + match_ast, ted, SyntaxKind, }; use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists}; @@ -87,9 +87,14 @@ fn edit_struct_def( ) { // Note that we don't need to consider macro files in this function because this is // currently not triggered for struct definitions inside macro calls. - let tuple_fields = record_fields - .fields() - .filter_map(|f| Some(ast::make::tuple_field(f.visibility(), f.ty()?))); + let tuple_fields = record_fields.fields().filter_map(|f| { + let field = ast::make::tuple_field(f.visibility(), f.ty()?).clone_for_update(); + ted::insert_all( + ted::Position::first_child_of(field.syntax()), + f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(), + ); + Some(field) + }); let tuple_fields = ast::make::tuple_field_list(tuple_fields); let record_fields_text_range = record_fields.syntax().text_range(); @@ -975,6 +980,22 @@ impl HasAssoc for Struct { let Self::Assoc { value } = a; } } +"#, + ); + } + + #[test] + fn fields_with_attrs() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +pub struct $0Foo { + #[my_custom_attr] + value: u32, +} +"#, + r#" +pub struct Foo(#[my_custom_attr] u32); "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index c72bd411d644e..f01b4ea0fd44f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -1,8 +1,8 @@ use either::Either; use ide_db::defs::{Definition, NameRefClass}; use syntax::{ - ast::{self, AstNode, HasGenericParams, HasVisibility}, - match_ast, SyntaxKind, SyntaxNode, + ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility}, + match_ast, ted, SyntaxKind, SyntaxNode, }; use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists}; @@ -83,10 +83,14 @@ fn edit_struct_def( tuple_fields: ast::TupleFieldList, names: Vec, ) { - let record_fields = tuple_fields - .fields() - .zip(names) - .filter_map(|(f, name)| Some(ast::make::record_field(f.visibility(), name, f.ty()?))); + let record_fields = tuple_fields.fields().zip(names).filter_map(|(f, name)| { + let field = ast::make::record_field(f.visibility(), name, f.ty()?).clone_for_update(); + ted::insert_all( + ted::Position::first_child_of(field.syntax()), + f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(), + ); + Some(field) + }); let record_fields = ast::make::record_field_list(record_fields); let tuple_fields_text_range = tuple_fields.syntax().text_range(); @@ -904,6 +908,19 @@ where T: Foo, { pub field1: T } +"#, + ); + } + + #[test] + fn fields_with_attrs() { + check_assist( + convert_tuple_struct_to_named_struct, + r#" +pub struct $0Foo(#[my_custom_attr] u32); +"#, + r#" +pub struct Foo { #[my_custom_attr] field1: u32 } "#, ); } From c2c1bd0c137cf50e578d8749142c21eaf0946f3d Mon Sep 17 00:00:00 2001 From: David Richey Date: Wed, 28 Aug 2024 14:49:53 -0500 Subject: [PATCH 117/124] Fix incorrect symbol definitions in SCIP output --- src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index a8a02712fe291..ceb8534fdf553 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -142,7 +142,9 @@ impl flags::Scip { let mut symbol_roles = Default::default(); if let Some(def) = token.definition { - if def.range == text_range { + // if the the range of the def and the range of the token are the same, this must be the definition. + // they also must be in the same file. See https://github.com/rust-lang/rust-analyzer/pull/17988 + if def.file_id == file_id && def.range == text_range { symbol_roles |= scip_types::SymbolRole::Definition as i32; } From b0c3324838a7f5666147536f59ec52e9d9ce33b9 Mon Sep 17 00:00:00 2001 From: The Miri Cronjob Bot Date: Thu, 29 Aug 2024 04:59:24 +0000 Subject: [PATCH 118/124] Preparing for merge from rustc --- src/tools/miri/rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index 2c9a861e66e48..b0910b8116e54 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -d9a2cc4daee38c63b2f69710ed61d40acc32b709 +acb4e8b6251f1d8da36f08e7a70fa23fc581839e From b5be3ab38b50ca0dc32e022feca9993c64e583e6 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Thu, 29 Aug 2024 07:50:18 +0200 Subject: [PATCH 119/124] fix wasm test --- .../function_calls}/target_feature_wasm.rs | 4 ++-- .../fail/function_calls/target_feature_wasm.stderr | 13 +++++++++++++ .../fail/intrinsics/intrinsic_target_feature.rs | 2 +- .../miri/tests/pass/shims/x86/intrinsics-sha.rs | 2 +- .../miri/tests/pass/shims/x86/intrinsics-x86-adx.rs | 2 +- .../tests/pass/shims/x86/intrinsics-x86-aes-vaes.rs | 2 +- .../miri/tests/pass/shims/x86/intrinsics-x86-avx.rs | 2 +- .../tests/pass/shims/x86/intrinsics-x86-avx2.rs | 2 +- .../tests/pass/shims/x86/intrinsics-x86-avx512.rs | 2 +- .../miri/tests/pass/shims/x86/intrinsics-x86-bmi.rs | 2 +- .../shims/x86/intrinsics-x86-pause-without-sse2.rs | 2 +- .../pass/shims/x86/intrinsics-x86-pclmulqdq.rs | 2 +- .../pass/shims/x86/intrinsics-x86-sse3-ssse3.rs | 2 +- .../tests/pass/shims/x86/intrinsics-x86-sse41.rs | 2 +- .../tests/pass/shims/x86/intrinsics-x86-sse42.rs | 2 +- 15 files changed, 28 insertions(+), 15 deletions(-) rename src/tools/miri/tests/{panic => fail/function_calls}/target_feature_wasm.rs (79%) create mode 100644 src/tools/miri/tests/fail/function_calls/target_feature_wasm.stderr diff --git a/src/tools/miri/tests/panic/target_feature_wasm.rs b/src/tools/miri/tests/fail/function_calls/target_feature_wasm.rs similarity index 79% rename from src/tools/miri/tests/panic/target_feature_wasm.rs rename to src/tools/miri/tests/fail/function_calls/target_feature_wasm.rs index c67d2983f78fc..bd400e8824ac3 100644 --- a/src/tools/miri/tests/panic/target_feature_wasm.rs +++ b/src/tools/miri/tests/fail/function_calls/target_feature_wasm.rs @@ -1,4 +1,4 @@ -//@only-target-wasm32: tests WASM-specific behavior +//@only-target-wasm: tests WASM-specific behavior //@compile-flags: -C target-feature=-simd128 fn main() { @@ -6,7 +6,7 @@ fn main() { // But if the compiler actually uses the target feature, it will lead to an error when the module is loaded. // We emulate this with an "unsupported" error. assert!(!cfg!(target_feature = "simd128")); - simd128_fn(); + simd128_fn(); //~ERROR: unavailable target features } #[target_feature(enable = "simd128")] diff --git a/src/tools/miri/tests/fail/function_calls/target_feature_wasm.stderr b/src/tools/miri/tests/fail/function_calls/target_feature_wasm.stderr new file mode 100644 index 0000000000000..dc0aca77f9eb5 --- /dev/null +++ b/src/tools/miri/tests/fail/function_calls/target_feature_wasm.stderr @@ -0,0 +1,13 @@ +error: abnormal termination: calling a function that requires unavailable target features: simd128 + --> $DIR/target_feature_wasm.rs:LL:CC + | +LL | simd128_fn(); + | ^^^^^^^^^^^^ calling a function that requires unavailable target features: simd128 + | + = note: BACKTRACE: + = note: inside `main` at $DIR/target_feature_wasm.rs:LL:CC + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to 1 previous error + diff --git a/src/tools/miri/tests/fail/intrinsics/intrinsic_target_feature.rs b/src/tools/miri/tests/fail/intrinsics/intrinsic_target_feature.rs index 860798f2ab156..eb5a16360fff8 100644 --- a/src/tools/miri/tests/fail/intrinsics/intrinsic_target_feature.rs +++ b/src/tools/miri/tests/fail/intrinsics/intrinsic_target_feature.rs @@ -7,7 +7,7 @@ //@ignore-target-avr //@ignore-target-s390x //@ignore-target-thumbv7em -//@ignore-target-wasm32 +//@ignore-target-wasm // Explicitly disable SSE4.1 because it is enabled by default on macOS //@compile-flags: -C target-feature=-sse4.1 diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-sha.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-sha.rs index e65fdc3fbed68..79ac4432dffac 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-sha.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-sha.rs @@ -6,7 +6,7 @@ //@ignore-target-avr //@ignore-target-s390x //@ignore-target-thumbv7em -//@ignore-target-wasm32 +//@ignore-target-wasm //@compile-flags: -C target-feature=+sha,+sse2,+ssse3,+sse4.1 #[cfg(target_arch = "x86")] diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-adx.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-adx.rs index 431e7f2c5eb60..0fd4b7c091097 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-adx.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-adx.rs @@ -6,7 +6,7 @@ //@ignore-target-avr //@ignore-target-s390x //@ignore-target-thumbv7em -//@ignore-target-wasm32 +//@ignore-target-wasm //@compile-flags: -C target-feature=+adx #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-aes-vaes.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-aes-vaes.rs index 7363c75361779..d4d1b6180a7b3 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-aes-vaes.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-aes-vaes.rs @@ -6,7 +6,7 @@ //@ignore-target-avr //@ignore-target-s390x //@ignore-target-thumbv7em -//@ignore-target-wasm32 +//@ignore-target-wasm //@compile-flags: -C target-feature=+aes,+vaes,+avx512f #![feature(avx512_target_feature, stdarch_x86_avx512)] diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-avx.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-avx.rs index 728f57d48f17e..3847a80be9053 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-avx.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-avx.rs @@ -6,7 +6,7 @@ //@ignore-target-avr //@ignore-target-s390x //@ignore-target-thumbv7em -//@ignore-target-wasm32 +//@ignore-target-wasm //@compile-flags: -C target-feature=+avx #[cfg(target_arch = "x86")] diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-avx2.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-avx2.rs index 80d125bb85650..8b8d8880e3bed 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-avx2.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-avx2.rs @@ -6,7 +6,7 @@ //@ignore-target-avr //@ignore-target-s390x //@ignore-target-thumbv7em -//@ignore-target-wasm32 +//@ignore-target-wasm //@compile-flags: -C target-feature=+avx2 #[cfg(target_arch = "x86")] diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-avx512.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-avx512.rs index 66bfcb20f1c99..a40eddde97c15 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-avx512.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-avx512.rs @@ -6,7 +6,7 @@ //@ignore-target-avr //@ignore-target-s390x //@ignore-target-thumbv7em -//@ignore-target-wasm32 +//@ignore-target-wasm //@compile-flags: -C target-feature=+avx512f,+avx512vl,+avx512bitalg,+avx512vpopcntdq #![feature(avx512_target_feature)] diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-bmi.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-bmi.rs index 33424117c4542..02f57f4b451e1 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-bmi.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-bmi.rs @@ -6,7 +6,7 @@ //@ignore-target-avr //@ignore-target-s390x //@ignore-target-thumbv7em -//@ignore-target-wasm32 +//@ignore-target-wasm //@compile-flags: -C target-feature=+bmi1,+bmi2 #[cfg(target_arch = "x86")] diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-pause-without-sse2.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-pause-without-sse2.rs index c8b92fd545850..60da88df046ef 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-pause-without-sse2.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-pause-without-sse2.rs @@ -6,7 +6,7 @@ //@ignore-target-avr //@ignore-target-s390x //@ignore-target-thumbv7em -//@ignore-target-wasm32 +//@ignore-target-wasm //@compile-flags: -C target-feature=-sse2 #[cfg(target_arch = "x86")] diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-pclmulqdq.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-pclmulqdq.rs index 2f242dd5379d2..86ac5835a168b 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-pclmulqdq.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-pclmulqdq.rs @@ -6,7 +6,7 @@ //@ignore-target-avr //@ignore-target-s390x //@ignore-target-thumbv7em -//@ignore-target-wasm32 +//@ignore-target-wasm //@compile-flags: -C target-feature=+pclmulqdq #[cfg(target_arch = "x86")] diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse3-ssse3.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse3-ssse3.rs index 7566be4431b7f..0b3be7f3cbd00 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse3-ssse3.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse3-ssse3.rs @@ -6,7 +6,7 @@ //@ignore-target-avr //@ignore-target-s390x //@ignore-target-thumbv7em -//@ignore-target-wasm32 +//@ignore-target-wasm // SSSE3 implicitly enables SSE3 //@compile-flags: -C target-feature=+ssse3 diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse41.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse41.rs index 06607f3fd59e1..8cd4e6308e29f 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse41.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse41.rs @@ -6,7 +6,7 @@ //@ignore-target-avr //@ignore-target-s390x //@ignore-target-thumbv7em -//@ignore-target-wasm32 +//@ignore-target-wasm //@compile-flags: -C target-feature=+sse4.1 #[cfg(target_arch = "x86")] diff --git a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse42.rs b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse42.rs index 3ac53ea8b933d..c87eb51877431 100644 --- a/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse42.rs +++ b/src/tools/miri/tests/pass/shims/x86/intrinsics-x86-sse42.rs @@ -6,7 +6,7 @@ //@ignore-target-avr //@ignore-target-s390x //@ignore-target-thumbv7em -//@ignore-target-wasm32 +//@ignore-target-wasm //@compile-flags: -C target-feature=+sse4.2 #[cfg(target_arch = "x86")] From 14ec1202e5ac51cd9c91ca5bb1f68a6cf688e3aa Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 29 Aug 2024 08:08:00 +0200 Subject: [PATCH 120/124] fix: Fix proc-macro server crashing when parsing a non-lexable string into a TokenStream --- .../proc-macro-srv/src/server_impl/rust_analyzer_span.rs | 8 +++++++- .../crates/proc-macro-srv/src/server_impl/token_id.rs | 8 +++++++- .../crates/proc-macro-srv/src/server_impl/token_stream.rs | 2 +- 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs index 552d99f51ba12..bfc3ea5d03a56 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs @@ -142,7 +142,13 @@ impl server::TokenStream for RaSpanServer { stream.is_empty() } fn from_str(&mut self, src: &str) -> Self::TokenStream { - Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") + Self::TokenStream::from_str(src, self.call_site).unwrap_or_else(|e| { + Self::TokenStream::from_str( + &format!("compile_error!(\"failed to parse str to token stream: {e}\")"), + self.call_site, + ) + .unwrap() + }) } fn to_string(&mut self, stream: &Self::TokenStream) -> String { stream.to_string() diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs index 7720c6d83c38d..e478b1c853be7 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_id.rs @@ -131,7 +131,13 @@ impl server::TokenStream for TokenIdServer { stream.is_empty() } fn from_str(&mut self, src: &str) -> Self::TokenStream { - Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") + Self::TokenStream::from_str(src, self.call_site).unwrap_or_else(|e| { + Self::TokenStream::from_str( + &format!("compile_error!(\"failed to parse str to token stream: {e}\")"), + self.call_site, + ) + .unwrap() + }) } fn to_string(&mut self, stream: &Self::TokenStream) -> String { stream.to_string() diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs index 4d8d496418bf8..dbcb5a3143a65 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs @@ -131,7 +131,7 @@ pub(super) mod token_stream { call_site, src, ) - .ok_or("lexing error")?; + .ok_or_else(|| format!("lexing error: {src}"))?; Ok(TokenStream::with_subtree(subtree)) } From 03d67458eef99fc23ef8b0bf701bc75e6f672d1c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 29 Aug 2024 08:41:43 +0200 Subject: [PATCH 121/124] fix: Fix TokenStream::to_string implementation dropping quotation marks --- .../proc-macro-srv/src/server_impl/rust_analyzer_span.rs | 9 +++++++-- src/tools/rust-analyzer/crates/tt/src/lib.rs | 2 +- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs index bfc3ea5d03a56..d508c19dd7195 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs @@ -507,12 +507,17 @@ mod tests { close: span, kind: tt::DelimiterKind::Brace, }, - token_trees: Box::new([]), + token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { + kind: tt::LitKind::Str, + symbol: Symbol::intern("string"), + suffix: None, + span, + }))]), }), ], }; - assert_eq!(s.to_string(), "struct T {}"); + assert_eq!(s.to_string(), "struct T {\"string\"}"); } #[test] diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs index 7b72f9ff108d3..8d915d0a51e32 100644 --- a/src/tools/rust-analyzer/crates/tt/src/lib.rs +++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs @@ -603,7 +603,7 @@ pub fn pretty(tkns: &[TokenTree]) -> String { TokenTree::Leaf(Leaf::Ident(ident)) => { format!("{}{}", ident.is_raw.as_str(), ident.sym) } - TokenTree::Leaf(Leaf::Literal(literal)) => literal.symbol.as_str().to_owned(), + TokenTree::Leaf(Leaf::Literal(literal)) => format!("{literal}"), TokenTree::Leaf(Leaf::Punct(punct)) => format!("{}", punct.char), TokenTree::Subtree(subtree) => { let content = pretty(&subtree.token_trees); From 2ec71c8e852146528ebc74eda761ccdd3b52303a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Thu, 29 Aug 2024 10:02:05 +0300 Subject: [PATCH 122/124] Preparing for merge from rust-lang/rust --- src/tools/rust-analyzer/rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index d4f1703d850c5..5f21a43f3be5e 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -80eb5a8e910e5185d47cdefe3732d839c78a5e7e +6cf068db566de080dfa7ed24a216ea3aed2b98ce From 518b41c2bdaaf7d6a511802d212a6dc6e72ff7d0 Mon Sep 17 00:00:00 2001 From: Jubilee Young Date: Thu, 29 Aug 2024 12:13:19 -0700 Subject: [PATCH 123/124] Try latest backtrace --- library/backtrace | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/backtrace b/library/backtrace index fc37b22dc8a38..230570f2dac80 160000 --- a/library/backtrace +++ b/library/backtrace @@ -1 +1 @@ -Subproject commit fc37b22dc8a384d93f6b7b4817266eec6433875e +Subproject commit 230570f2dac80a601f5c0b30da00cc9480bd35eb From 0453d9bee8f7a6035d76baefce52b35db188387f Mon Sep 17 00:00:00 2001 From: The Miri Cronjob Bot Date: Fri, 30 Aug 2024 05:02:07 +0000 Subject: [PATCH 124/124] Preparing for merge from rustc --- src/tools/miri/rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index b0910b8116e54..c33d9ad86149b 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -acb4e8b6251f1d8da36f08e7a70fa23fc581839e +0d634185dfddefe09047881175f35c65d68dcff1