diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 764617eafb7bb..b56dee3efb554 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -61,15 +61,16 @@ use std::ops::Deref; use base_db::{CrateId, Edition, FileId}; use hir_expand::{ - name::Name, proc_macro::ProcMacroKind, HirFileId, InFile, MacroCallId, MacroDefId, + name::Name, proc_macro::ProcMacroKind, ErasedAstId, HirFileId, InFile, MacroCallId, MacroDefId, }; use itertools::Itertools; use la_arena::Arena; use rustc_hash::{FxHashMap, FxHashSet}; -use span::FileAstId; +use span::{FileAstId, ROOT_ERASED_FILE_AST_ID}; use stdx::format_to; use syntax::{ast, SmolStr}; use triomphe::Arc; +use tt::TextRange; use crate::{ db::DefDatabase, @@ -677,6 +678,25 @@ impl ModuleData { } } + pub fn definition_source_range(&self, db: &dyn DefDatabase) -> InFile { + match &self.origin { + &ModuleOrigin::File { definition, .. } | &ModuleOrigin::CrateRoot { definition } => { + InFile::new( + definition.into(), + ErasedAstId::new(definition.into(), ROOT_ERASED_FILE_AST_ID) + .to_range(db.upcast()), + ) + } + &ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new( + definition_tree_id.file_id(), + AstId::new(definition_tree_id.file_id(), definition).to_range(db.upcast()), + ), + ModuleOrigin::BlockExpr { block, .. } => { + InFile::new(block.file_id, block.to_range(db.upcast())) + } + } + } + /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. /// `None` for the crate root or block. pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option> { @@ -684,6 +704,13 @@ impl ModuleData { let value = decl.to_node(db.upcast()); Some(InFile { file_id: decl.file_id, value }) } + + /// Returns the range which declares this module, either a `mod foo;` or a `mod foo {}`. + /// `None` for the crate root or block. + pub fn declaration_source_range(&self, db: &dyn DefDatabase) -> Option> { + let decl = self.origin.declaration()?; + Some(InFile { file_id: decl.file_id, value: decl.to_range(db.upcast()) }) + } } #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs index a500c24ce8811..04a4851ddb723 100644 --- a/crates/hir-expand/src/files.rs +++ b/crates/hir-expand/src/files.rs @@ -10,7 +10,7 @@ use syntax::{AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, use crate::{ db::{self, ExpandDatabase}, - map_node_range_up, span_for_offset, MacroFileIdExt, + map_node_range_up, map_node_range_up_rooted, span_for_offset, MacroFileIdExt, }; /// `InFile` stores a value of `T` inside a particular file/syntax tree. @@ -38,6 +38,9 @@ impl AstId { pub fn to_node(&self, db: &dyn ExpandDatabase) -> N { self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)) } + pub fn to_range(&self, db: &dyn ExpandDatabase) -> TextRange { + self.to_ptr(db).text_range() + } pub fn to_in_file_node(&self, db: &dyn ExpandDatabase) -> crate::InFile { crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))) } @@ -49,6 +52,9 @@ impl AstId { pub type ErasedAstId = crate::InFile; impl ErasedAstId { + pub fn to_range(&self, db: &dyn ExpandDatabase) -> TextRange { + self.to_ptr(db).text_range() + } pub fn to_ptr(&self, db: &dyn ExpandDatabase) -> SyntaxNodePtr { db.ast_id_map(self.file_id).get_erased(self.value) } @@ -173,24 +179,8 @@ impl InFile<&SyntaxNode> { /// /// For attributes and derives, this will point back to the attribute only. /// For the entire item use [`InFile::original_file_range_full`]. - pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, - HirFileIdRepr::MacroFile(mac_file) => { - if let Some((res, ctxt)) = - map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range()) - { - // FIXME: Figure out an API that makes proper use of ctx, this only exists to - // keep pre-token map rewrite behaviour. - if ctxt.is_root() { - return res; - } - } - // Fall back to whole macro call. - let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); - loc.kind.original_call_range(db) - } - } + pub fn original_file_range_rooted(self, db: &dyn db::ExpandDatabase) -> FileRange { + self.map(SyntaxNode::text_range).original_node_file_range_rooted(db) } /// Falls back to the macro call range if the node cannot be mapped up fully. @@ -198,23 +188,7 @@ impl InFile<&SyntaxNode> { self, db: &dyn db::ExpandDatabase, ) -> FileRange { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, - HirFileIdRepr::MacroFile(mac_file) => { - if let Some((res, ctxt)) = - map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range()) - { - // FIXME: Figure out an API that makes proper use of ctx, this only exists to - // keep pre-token map rewrite behaviour. - if ctxt.is_root() { - return res; - } - } - // Fall back to whole macro call. - let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); - loc.kind.original_call_range_with_body(db) - } - } + self.map(SyntaxNode::text_range).original_node_file_range_with_macro_call_body(db) } /// Attempts to map the syntax node back up its macro calls. @@ -222,17 +196,10 @@ impl InFile<&SyntaxNode> { self, db: &dyn db::ExpandDatabase, ) -> Option<(FileRange, SyntaxContextId)> { - match self.file_id.repr() { - HirFileIdRepr::FileId(file_id) => { - Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT)) - } - HirFileIdRepr::MacroFile(mac_file) => { - map_node_range_up(db, &db.expansion_span_map(mac_file), self.value.text_range()) - } - } + self.map(SyntaxNode::text_range).original_node_file_range_opt(db) } - pub fn original_syntax_node( + pub fn original_syntax_node_rooted( self, db: &dyn db::ExpandDatabase, ) -> Option> { @@ -242,25 +209,21 @@ impl InFile<&SyntaxNode> { HirFileIdRepr::FileId(file_id) => { return Some(InRealFile { file_id, value: self.value.clone() }) } - HirFileIdRepr::MacroFile(m) => m, + HirFileIdRepr::MacroFile(m) if m.is_attr_macro(db) => m, + _ => return None, }; - if !file_id.is_attr_macro(db) { - return None; - } - let (FileRange { file_id, range }, ctx) = - map_node_range_up(db, &db.expansion_span_map(file_id), self.value.text_range())?; + let FileRange { file_id, range } = + map_node_range_up_rooted(db, &db.expansion_span_map(file_id), self.value.text_range())?; - // FIXME: Figure out an API that makes proper use of ctx, this only exists to - // keep pre-token map rewrite behavior. - if !ctx.is_root() { - return None; - } - - let anc = db.parse(file_id).syntax_node().covering_element(range); let kind = self.value.kind(); - // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? - let value = anc.ancestors().find(|it| it.kind() == kind)?; + let value = db + .parse(file_id) + .syntax_node() + .covering_element(range) + .ancestors() + .take_while(|it| it.text_range() == range) + .find(|it| it.kind() == kind)?; Some(InRealFile::new(file_id, value)) } } @@ -355,8 +318,8 @@ impl InFile { match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value }, HirFileIdRepr::MacroFile(mac_file) => { - match map_node_range_up(db, &db.expansion_span_map(mac_file), self.value) { - Some((it, SyntaxContextId::ROOT)) => it, + match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) { + Some(it) => it, _ => { let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); loc.kind.original_call_range(db) @@ -366,6 +329,24 @@ impl InFile { } } + pub fn original_node_file_range_with_macro_call_body( + self, + db: &dyn db::ExpandDatabase, + ) -> FileRange { + match self.file_id.repr() { + HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value }, + HirFileIdRepr::MacroFile(mac_file) => { + match map_node_range_up_rooted(db, &db.expansion_span_map(mac_file), self.value) { + Some(it) => it, + _ => { + let loc = db.lookup_intern_macro_call(mac_file.macro_call_id); + loc.kind.original_call_range_with_body(db) + } + } + } + } + } + pub fn original_node_file_range_opt( self, db: &dyn db::ExpandDatabase, @@ -395,18 +376,12 @@ impl InFile { return None; } - let (FileRange { file_id, range }, ctx) = map_node_range_up( + let FileRange { file_id, range } = map_node_range_up_rooted( db, &db.expansion_span_map(file_id), self.value.syntax().text_range(), )?; - // FIXME: Figure out an API that makes proper use of ctx, this only exists to - // keep pre-token map rewrite behaviour. - if !ctx.is_root() { - return None; - } - // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes? let anc = db.parse(file_id).syntax_node().covering_element(range); let value = anc.ancestors().find_map(N::cast)?; diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 8136e7f0470af..924f0da6bdee8 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -25,13 +25,16 @@ pub mod span_map; mod cfg_process; mod fixup; use attrs::collect_attrs; +use rustc_hash::FxHashMap; use triomphe::Arc; use std::{fmt, hash::Hash}; use base_db::{salsa::impl_intern_value_trivial, CrateId, Edition, FileId}; use either::Either; -use span::{ErasedFileAstId, FileRange, HirFileIdRepr, Span, SyntaxContextData, SyntaxContextId}; +use span::{ + ErasedFileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor, SyntaxContextData, SyntaxContextId, +}; use syntax::{ ast::{self, AstNode}, SyntaxNode, SyntaxToken, TextRange, TextSize, @@ -683,6 +686,8 @@ impl ExpansionInfo { } /// Maps the passed in file range down into a macro expansion if it is the input to a macro call. + /// + /// Note this does a linear search through the entire backing vector of the spanmap. pub fn map_range_down( &self, span: Span, @@ -793,7 +798,34 @@ impl ExpansionInfo { } } +/// Maps up the text range out of the expansion hierarchy back into the original file its from only +/// considering the root spans contained. +/// Unlike [`map_node_range_up`], this will not return `None` if any anchors or syntax contexts differ. +pub fn map_node_range_up_rooted( + db: &dyn ExpandDatabase, + exp_map: &ExpansionSpanMap, + range: TextRange, +) -> Option { + let mut spans = exp_map.spans_for_range(range).filter(|span| span.ctx.is_root()); + let Span { range, anchor, ctx: _ } = spans.next()?; + let mut start = range.start(); + let mut end = range.end(); + + for span in spans { + if span.anchor != anchor { + return None; + } + start = start.min(span.range.start()); + end = end.max(span.range.end()); + } + let anchor_offset = + db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start(); + Some(FileRange { file_id: anchor.file_id, range: TextRange::new(start, end) + anchor_offset }) +} + /// Maps up the text range out of the expansion hierarchy back into the original file its from. +/// +/// this will return `None` if any anchors or syntax contexts differ. pub fn map_node_range_up( db: &dyn ExpandDatabase, exp_map: &ExpansionSpanMap, @@ -819,6 +851,29 @@ pub fn map_node_range_up( )) } +/// Maps up the text range out of the expansion hierarchy back into the original file its from. +/// This version will aggregate the ranges of all spans with the same anchor and syntax context. +pub fn map_node_range_up_aggregated( + db: &dyn ExpandDatabase, + exp_map: &ExpansionSpanMap, + range: TextRange, +) -> FxHashMap<(SpanAnchor, SyntaxContextId), TextRange> { + let mut map = FxHashMap::default(); + for span in exp_map.spans_for_range(range) { + let range = map.entry((span.anchor, span.ctx)).or_insert_with(|| span.range); + *range = TextRange::new( + range.start().min(span.range.start()), + range.end().max(span.range.end()), + ); + } + for ((anchor, _), range) in &mut map { + let anchor_offset = + db.ast_id_map(anchor.file_id.into()).get_erased(anchor.ast_id).text_range().start(); + *range += anchor_offset; + } + map +} + /// Looks up the span at the given offset. pub fn span_for_offset( db: &dyn ExpandDatabase, diff --git a/crates/hir-expand/src/span_map.rs b/crates/hir-expand/src/span_map.rs index ef86be67096a2..29fec163347f9 100644 --- a/crates/hir-expand/src/span_map.rs +++ b/crates/hir-expand/src/span_map.rs @@ -1,5 +1,5 @@ //! Span maps for real files and macro expansions. -use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span}; +use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContextId}; use syntax::{AstNode, TextRange}; use triomphe::Arc; @@ -7,7 +7,7 @@ pub use span::RealSpanMap; use crate::db::ExpandDatabase; -pub type ExpansionSpanMap = span::SpanMap; +pub type ExpansionSpanMap = span::SpanMap; /// Spanmap for a macro file or a real file #[derive(Clone, Debug, PartialEq, Eq)] diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs index 2153a87d34712..d699067b5a645 100644 --- a/crates/hir-ty/src/tests.rs +++ b/crates/hir-ty/src/tests.rs @@ -164,7 +164,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour Some(value) => value, None => continue, }; - let range = node.as_ref().original_file_range(&db); + let range = node.as_ref().original_file_range_rooted(&db); if let Some(expected) = types.remove(&range) { let actual = if display_source { ty.display_source_code(&db, def.module(&db), true).unwrap() @@ -180,7 +180,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour Some(value) => value, None => continue, }; - let range = node.as_ref().original_file_range(&db); + let range = node.as_ref().original_file_range_rooted(&db); if let Some(expected) = types.remove(&range) { let actual = if display_source { ty.display_source_code(&db, def.module(&db), true).unwrap() @@ -211,7 +211,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour }) else { continue; }; - let range = node.as_ref().original_file_range(&db); + let range = node.as_ref().original_file_range_rooted(&db); let actual = format!( "expected {}, got {}", mismatch.expected.display_test(&db), diff --git a/crates/hir/src/has_source.rs b/crates/hir/src/has_source.rs index d10884517f92d..7cdcdd76d1851 100644 --- a/crates/hir/src/has_source.rs +++ b/crates/hir/src/has_source.rs @@ -9,6 +9,7 @@ use hir_def::{ }; use hir_expand::{HirFileId, InFile}; use syntax::ast; +use tt::TextRange; use crate::{ db::HirDatabase, Adt, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl, @@ -37,6 +38,12 @@ impl Module { def_map[self.id.local_id].definition_source(db.upcast()) } + /// Returns a node which defines this module. That is, a file or a `mod foo {}` with items. + pub fn definition_source_range(self, db: &dyn HirDatabase) -> InFile { + let def_map = self.id.def_map(db.upcast()); + def_map[self.id.local_id].definition_source_range(db.upcast()) + } + pub fn definition_source_file_id(self, db: &dyn HirDatabase) -> HirFileId { let def_map = self.id.def_map(db.upcast()); def_map[self.id.local_id].definition_source_file_id() @@ -71,6 +78,13 @@ impl Module { let def_map = self.id.def_map(db.upcast()); def_map[self.id.local_id].declaration_source(db.upcast()) } + + /// Returns a text range which declares this module, either a `mod foo;` or a `mod foo {}`. + /// `None` for the crate root. + pub fn declaration_source_range(self, db: &dyn HirDatabase) -> Option> { + let def_map = self.id.def_map(db.upcast()); + def_map[self.id.local_id].declaration_source_range(db.upcast()) + } } impl HasSource for Field { diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 02a5d2afaca6b..ca47b37d68808 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -960,7 +960,7 @@ impl<'db> SemanticsImpl<'db> { /// macro file the node resides in. pub fn original_range(&self, node: &SyntaxNode) -> FileRange { let node = self.find_file(node); - node.original_file_range(self.db.upcast()) + node.original_file_range_rooted(self.db.upcast()) } /// Attempts to map the node out of macro expanded files returning the original file range. @@ -984,9 +984,9 @@ impl<'db> SemanticsImpl<'db> { /// Attempts to map the node out of macro expanded files. /// This only work for attribute expansions, as other ones do not have nodes as input. - pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option { + pub fn original_syntax_node_rooted(&self, node: &SyntaxNode) -> Option { let InFile { file_id, .. } = self.find_file(node); - InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map( + InFile::new(file_id, node).original_syntax_node_rooted(self.db.upcast()).map( |InRealFile { file_id, value }| { self.cache(find_root(&value), file_id.into()); value @@ -997,7 +997,7 @@ impl<'db> SemanticsImpl<'db> { pub fn diagnostics_display_range(&self, src: InFile) -> FileRange { let root = self.parse_or_expand(src.file_id); let node = src.map(|it| it.to_node(&root)); - node.as_ref().original_file_range(self.db.upcast()) + node.as_ref().original_file_range_rooted(self.db.upcast()) } fn token_ancestors_with_macros( diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs index d9205eb54194e..3b88836c24bd4 100644 --- a/crates/hir/src/symbols.rs +++ b/crates/hir/src/symbols.rs @@ -49,7 +49,7 @@ impl DeclarationLocation { return FileRange { file_id, range: self.ptr.text_range() }; } let node = resolve_node(db, self.hir_file_id, &self.ptr); - node.as_ref().original_file_range(db.upcast()) + node.as_ref().original_file_range_rooted(db.upcast()) } } diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index fe2f8ed6417d1..ff051fa870f5f 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -198,7 +198,7 @@ fn get_adt_source( adt: &hir::Adt, fn_name: &str, ) -> Option<(Option, FileId)> { - let range = adt.source(ctx.sema.db)?.syntax().original_file_range(ctx.sema.db); + let range = adt.source(ctx.sema.db)?.syntax().original_file_range_rooted(ctx.sema.db); let file = ctx.sema.parse(range.file_id); let adt_source = ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?; diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index 50ec4347dc2a1..a90fe83857e93 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -206,7 +206,7 @@ pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< let fn_body = fn_source.value.body()?; let param_list = fn_source.value.param_list()?; - let FileRange { file_id, range } = fn_source.syntax().original_file_range(ctx.sema.db); + let FileRange { file_id, range } = fn_source.syntax().original_file_range_rooted(ctx.sema.db); if file_id == ctx.file_id() && range.contains(ctx.offset()) { cov_mark::hit!(inline_call_recursive); return None; diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs index 7394d63be5868..7946784150224 100644 --- a/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -96,7 +96,7 @@ fn complete_trait_impl_name( .parent() } }?; - let item = ctx.sema.original_syntax_node(&item)?; + let item = ctx.sema.original_syntax_node_rooted(&item)?; // item -> ASSOC_ITEM_LIST -> IMPL let impl_def = ast::Impl::cast(item.parent()?.parent()?)?; let replacement_range = { diff --git a/crates/ide-completion/src/completions/mod_.rs b/crates/ide-completion/src/completions/mod_.rs index ecf5b29e2c0c3..c2faa2d939d87 100644 --- a/crates/ide-completion/src/completions/mod_.rs +++ b/crates/ide-completion/src/completions/mod_.rs @@ -2,7 +2,7 @@ use std::iter; -use hir::{HirFileIdExt, Module, ModuleSource}; +use hir::{HirFileIdExt, Module}; use ide_db::{ base_db::{SourceDatabaseExt, VfsPath}, FxHashSet, RootDatabase, SymbolKind, @@ -57,7 +57,7 @@ pub(crate) fn complete_mod( .collect::>(); let module_declaration_file = - current_module.declaration_source(ctx.db).map(|module_declaration_source_file| { + current_module.declaration_source_range(ctx.db).map(|module_declaration_source_file| { module_declaration_source_file.file_id.original_file(ctx.db) }); @@ -148,9 +148,7 @@ fn module_chain_to_containing_module_file( ) -> Vec { let mut path = iter::successors(Some(current_module), |current_module| current_module.parent(db)) - .take_while(|current_module| { - matches!(current_module.definition_source(db).value, ModuleSource::Module(_)) - }) + .take_while(|current_module| current_module.is_inline(db)) .collect::>(); path.reverse(); path diff --git a/crates/ide-db/src/helpers.rs b/crates/ide-db/src/helpers.rs index 4ac8a7c4c4aa3..db44b1e72325a 100644 --- a/crates/ide-db/src/helpers.rs +++ b/crates/ide-db/src/helpers.rs @@ -71,7 +71,7 @@ pub fn visit_file_defs( let mut defs: VecDeque<_> = module.declarations(db).into(); while let Some(def) = defs.pop_front() { if let ModuleDef::Module(submodule) = def { - if let hir::ModuleSource::Module(_) = submodule.definition_source(db).value { + if submodule.is_inline(db) { defs.extend(submodule.declarations(db)); submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into())); } diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index 006d8882c11e0..a3ecc1036059a 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -190,22 +190,15 @@ impl SearchScope { let mut entries = IntMap::default(); let (file_id, range) = { - let InFile { file_id, value } = module.definition_source(db); + let InFile { file_id, value } = module.definition_source_range(db); if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db) { (file_id, Some(call_source.text_range())) } else { - ( - file_id.original_file(db), - match value { - ModuleSource::SourceFile(_) => None, - ModuleSource::Module(it) => Some(it.syntax().text_range()), - ModuleSource::BlockExpr(it) => Some(it.syntax().text_range()), - }, - ) + (file_id.original_file(db), Some(value)) } }; - entries.insert(file_id, range); + entries.entry(file_id).or_insert(range); let mut to_visit: Vec<_> = module.children(db).collect(); while let Some(module) = to_visit.pop() { diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs index db28928a24ea2..a0fad7c850c6b 100644 --- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -38,7 +38,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option Option { let adt_source = adt_union.source(ctx.sema.db)?; let adt_syntax = adt_source.syntax(); - let Some(field_list) = adt_source.value.record_field_list() else { - return None; - }; - let range = adt_syntax.original_file_range(ctx.sema.db); + let field_list = adt_source.value.record_field_list()?; + let range = adt_syntax.original_file_range_rooted(ctx.sema.db); let field_name = make::name(field_name); let (offset, record_field) = @@ -144,7 +142,7 @@ fn add_field_to_struct_fix( ) -> Option { let struct_source = adt_struct.source(ctx.sema.db)?; let struct_syntax = struct_source.syntax(); - let struct_range = struct_syntax.original_file_range(ctx.sema.db); + let struct_range = struct_syntax.original_file_range_rooted(ctx.sema.db); let field_list = struct_source.value.field_list(); match field_list { Some(FieldList::RecordFieldList(field_list)) => { diff --git a/crates/ide-ssr/src/matching.rs b/crates/ide-ssr/src/matching.rs index fb98e95684740..cfda1c692aea8 100644 --- a/crates/ide-ssr/src/matching.rs +++ b/crates/ide-ssr/src/matching.rs @@ -125,9 +125,12 @@ impl<'db, 'sema> Matcher<'db, 'sema> { let match_state = Matcher { sema, restrict_range: *restrict_range, rule }; // First pass at matching, where we check that node types and idents match. match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?; - match_state.validate_range(&sema.original_range(code))?; + let file_range = sema + .original_range_opt(code) + .ok_or(MatchFailed { reason: Some("def site definition".to_owned()) })?; + match_state.validate_range(&file_range)?; let mut the_match = Match { - range: sema.original_range(code), + range: file_range, matched_node: code.clone(), placeholder_values: FxHashMap::default(), ignored_comments: Vec::new(), @@ -175,7 +178,10 @@ impl<'db, 'sema> Matcher<'db, 'sema> { self.check_constraint(constraint, code)?; } if let Phase::Second(matches_out) = phase { - let original_range = self.sema.original_range(code); + let original_range = self + .sema + .original_range_opt(code) + .ok_or(MatchFailed { reason: Some("def site definition".to_owned()) })?; // We validated the range for the node when we started the match, so the placeholder // probably can't fail range validation, but just to be safe... self.validate_range(&original_range)?; @@ -487,7 +493,13 @@ impl<'db, 'sema> Matcher<'db, 'sema> { match_out.placeholder_values.insert( placeholder.ident.clone(), PlaceholderMatch::from_range(FileRange { - file_id: self.sema.original_range(code).file_id, + file_id: self + .sema + .original_range_opt(code) + .ok_or(MatchFailed { + reason: Some("def site definition".to_owned()), + })? + .file_id, range: first_matched_token .text_range() .cover(last_matched_token.text_range()), diff --git a/crates/ide-ssr/src/search.rs b/crates/ide-ssr/src/search.rs index 8d2d796122a2d..55a49da242404 100644 --- a/crates/ide-ssr/src/search.rs +++ b/crates/ide-ssr/src/search.rs @@ -190,12 +190,9 @@ impl MatchFinder<'_> { // When matching within a macro expansion, we only want to allow matches of // nodes that originated entirely from within the token tree of the macro call. // i.e. we don't want to match something that came from the macro itself. - self.slow_scan_node( - &expanded, - rule, - &Some(self.sema.original_range(tt.syntax())), - matches_out, - ); + if let Some(range) = self.sema.original_range_opt(tt.syntax()) { + self.slow_scan_node(&expanded, rule, &Some(range), matches_out); + } } } } @@ -227,7 +224,7 @@ impl MatchFinder<'_> { // There is no range restriction. return true; } - let node_range = self.sema.original_range(code); + let Some(node_range) = self.sema.original_range_opt(code) else { return false }; for range in &self.restrict_ranges { if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) { return true; diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index caa10a1ed6895..2123c98605db2 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -852,7 +852,7 @@ fn orig_range( value: &SyntaxNode, ) -> UpmappingResult<(FileRange, Option)> { UpmappingResult { - call_site: (InFile::new(hir_file, value).original_file_range(db), None), + call_site: (InFile::new(hir_file, value).original_file_range_rooted(db), None), def_site: None, } } diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 5fe46444ff41c..79324bf3877ba 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -138,7 +138,9 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { }) { if let Some(def) = def { let file_id = match def { - Definition::Module(it) => it.declaration_source(db).map(|src| src.file_id), + Definition::Module(it) => { + it.declaration_source_range(db).map(|src| src.file_id) + } Definition::Function(it) => it.source(db).map(|src| src.file_id), _ => None, }; @@ -269,15 +271,10 @@ fn find_related_tests_in_module( Some(it) => it, _ => return, }; - let mod_source = parent_module.definition_source(sema.db); - let range = match &mod_source.value { - hir::ModuleSource::Module(m) => m.syntax().text_range(), - hir::ModuleSource::BlockExpr(b) => b.syntax().text_range(), - hir::ModuleSource::SourceFile(f) => f.syntax().text_range(), - }; + let mod_source = parent_module.definition_source_range(sema.db); let file_id = mod_source.file_id.original_file(sema.db); - let mod_scope = SearchScope::file_range(FileRange { file_id, range }); + let mod_scope = SearchScope::file_range(FileRange { file_id, range: mod_source.value }); let fn_pos = FilePosition { file_id, offset: fn_name.syntax().text_range().start() }; find_related_tests(sema, syntax, fn_pos, Some(mod_scope), tests) } @@ -405,14 +402,15 @@ fn runnable_mod_outline_definition( let attrs = def.attrs(sema.db); let cfg = attrs.cfg(); - match def.definition_source(sema.db).value { - hir::ModuleSource::SourceFile(_) => Some(Runnable { + if def.as_source_file_id(sema.db).is_some() { + Some(Runnable { use_name_in_title: false, nav: def.to_nav(sema.db).call_site(), kind: RunnableKind::TestMod { path }, cfg, - }), - _ => None, + }) + } else { + None } } diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index c7ffb0a41a519..cb8f8dff50d36 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -23,8 +23,11 @@ pub trait SpanMapper { fn span_for(&self, range: TextRange) -> S; } -impl SpanMapper for SpanMap { - fn span_for(&self, range: TextRange) -> S { +impl SpanMapper> for SpanMap +where + SpanData: Span, +{ + fn span_for(&self, range: TextRange) -> SpanData { self.span_at(range.start()) } } @@ -121,7 +124,7 @@ where pub fn token_tree_to_syntax_node( tt: &tt::Subtree>, entry_point: parser::TopEntryPoint, -) -> (Parse, SpanMap>) +) -> (Parse, SpanMap) where SpanData: Span, Ctx: Copy, @@ -833,7 +836,7 @@ where cursor: Cursor<'a, SpanData>, text_pos: TextSize, inner: SyntaxTreeBuilder, - token_map: SpanMap>, + token_map: SpanMap, } impl<'a, Ctx> TtTreeSink<'a, Ctx> @@ -850,7 +853,7 @@ where } } - fn finish(mut self) -> (Parse, SpanMap>) { + fn finish(mut self) -> (Parse, SpanMap) { self.token_map.finish(); (self.inner.finish(), self.token_map) } diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index ef184032bfb09..5c474908e7aed 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -1053,7 +1053,7 @@ fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id: }; let root = db.parse_or_expand(src.file_id); let node = src.map(|e| e.to_node(&root).syntax().clone()); - let original_range = node.as_ref().original_file_range(db); + let original_range = node.as_ref().original_file_range_rooted(db); let path = vfs.file_path(original_range.file_id); let line_index = db.line_index(original_range.file_id); let text_range = original_range.range; @@ -1069,7 +1069,7 @@ fn location_csv_pat(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, pat_id: Pa }; let root = db.parse_or_expand(src.file_id); let node = src.map(|e| e.to_node(&root).syntax().clone()); - let original_range = node.as_ref().original_file_range(db); + let original_range = node.as_ref().original_file_range_rooted(db); let path = vfs.file_path(original_range.file_id); let line_index = db.line_index(original_range.file_id); let text_range = original_range.range; @@ -1088,7 +1088,7 @@ fn expr_syntax_range<'a>( if let Ok(src) = src { let root = db.parse_or_expand(src.file_id); let node = src.map(|e| e.to_node(&root).syntax().clone()); - let original_range = node.as_ref().original_file_range(db); + let original_range = node.as_ref().original_file_range_rooted(db); let path = vfs.file_path(original_range.file_id); let line_index = db.line_index(original_range.file_id); let text_range = original_range.range; @@ -1109,7 +1109,7 @@ fn pat_syntax_range<'a>( if let Ok(src) = src { let root = db.parse_or_expand(src.file_id); let node = src.map(|e| e.to_node(&root).syntax().clone()); - let original_range = node.as_ref().original_file_range(db); + let original_range = node.as_ref().original_file_range_rooted(db); let path = vfs.file_path(original_range.file_id); let line_index = db.line_index(original_range.file_id); let text_range = original_range.range; diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs index 0fe3275863d70..b2624762dff05 100644 --- a/crates/span/src/lib.rs +++ b/crates/span/src/lib.rs @@ -56,6 +56,12 @@ pub struct SpanData { pub ctx: Ctx, } +impl SpanData { + pub fn eq_ignoring_ctx(self, other: Self) -> bool { + self.anchor == other.anchor && self.range == other.range + } +} + impl Span { #[deprecated = "dummy spans will panic if surfaced incorrectly, as such they should be replaced appropriately"] pub const DUMMY: Self = SpanData { diff --git a/crates/span/src/map.rs b/crates/span/src/map.rs index 9f8101c816e5f..7b42551099e9b 100644 --- a/crates/span/src/map.rs +++ b/crates/span/src/map.rs @@ -7,17 +7,20 @@ use stdx::{always, itertools::Itertools}; use syntax::{TextRange, TextSize}; use vfs::FileId; -use crate::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; +use crate::{ + ErasedFileAstId, Span, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID, +}; /// Maps absolute text ranges for the corresponding file to the relevant span data. #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub struct SpanMap { - spans: Vec<(TextSize, S)>, - // FIXME: Should be - // spans: Vec<(TextSize, crate::SyntaxContextId)>, + spans: Vec<(TextSize, SpanData)>, } -impl SpanMap { +impl SpanMap +where + SpanData: Copy, +{ /// Creates a new empty [`SpanMap`]. pub fn empty() -> Self { Self { spans: Vec::new() } @@ -34,7 +37,7 @@ impl SpanMap { } /// Pushes a new span onto the [`SpanMap`]. - pub fn push(&mut self, offset: TextSize, span: S) { + pub fn push(&mut self, offset: TextSize, span: SpanData) { if cfg!(debug_assertions) { if let Some(&(last_offset, _)) = self.spans.last() { assert!( @@ -49,13 +52,12 @@ impl SpanMap { /// Returns all [`TextRange`]s that correspond to the given span. /// /// Note this does a linear search through the entire backing vector. - pub fn ranges_with_span(&self, span: S) -> impl Iterator + '_ + pub fn ranges_with_span(&self, span: SpanData) -> impl Iterator + '_ where - S: Eq, + S: Copy, { - // FIXME: This should ignore the syntax context! self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| { - if s != span { + if !s.eq_ignoring_ctx(span) { return None; } let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0); @@ -64,21 +66,21 @@ impl SpanMap { } /// Returns the span at the given position. - pub fn span_at(&self, offset: TextSize) -> S { + pub fn span_at(&self, offset: TextSize) -> SpanData { let entry = self.spans.partition_point(|&(it, _)| it <= offset); self.spans[entry].1 } /// Returns the spans associated with the given range. /// In other words, this will return all spans that correspond to all offsets within the given range. - pub fn spans_for_range(&self, range: TextRange) -> impl Iterator + '_ { + pub fn spans_for_range(&self, range: TextRange) -> impl Iterator> + '_ { let (start, end) = (range.start(), range.end()); let start_entry = self.spans.partition_point(|&(it, _)| it <= start); let end_entry = self.spans[start_entry..].partition_point(|&(it, _)| it <= end); // FIXME: this might be wrong? self.spans[start_entry..][..end_entry].iter().map(|&(_, s)| s) } - pub fn iter(&self) -> impl Iterator + '_ { + pub fn iter(&self) -> impl Iterator)> + '_ { self.spans.iter().copied() } }