X-Git-Url: https://git.lizzy.rs/?a=blobdiff_plain;f=crates%2Fhir%2Fsrc%2Fsemantics.rs;h=75f6b025779587ec935b97b721005d16a043ba8f;hb=749eeef3e75a3acc993fdd454ebadaa7e319509a;hp=302d666318386294996130a9dc247a952b682fc6;hpb=274d9f90aed4320f223fe98916fc0cd71ad6f92f;p=rust.git diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 302d6663183..75f6b025779 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -18,16 +18,16 @@ use syntax::{ algo::skip_trivia_token, ast::{self, HasAttrs, HasGenericParams, HasLoopBody}, - match_ast, AstNode, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize, + match_ast, AstNode, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize, }; use crate::{ db::HirDatabase, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, source_analyzer::{resolve_hir_path, resolve_hir_path_as_macro, SourceAnalyzer}, - Access, AssocItem, Callable, ConstParam, Crate, Field, Function, HasSource, HirFileId, Impl, - InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path, ScopeDef, Trait, - Type, TypeAlias, TypeParam, VariantDef, + Access, AssocItem, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasSource, + HirFileId, Impl, InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path, + ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef, }; #[derive(Debug, Clone, PartialEq, Eq)] @@ -43,6 +43,8 @@ pub enum PathResolution { SelfType(Impl), Macro(MacroDef), AssocItem(AssocItem), + BuiltinAttr(BuiltinAttr), + ToolModule(ToolModule), } impl PathResolution { @@ -63,9 +65,11 @@ fn in_type_ns(&self) -> Option { PathResolution::Def(ModuleDef::TypeAlias(alias)) => { Some(TypeNs::TypeAliasId((*alias).into())) } - PathResolution::Local(_) | PathResolution::Macro(_) | PathResolution::ConstParam(_) => { - None - } + PathResolution::BuiltinAttr(_) + | PathResolution::ToolModule(_) + | PathResolution::Local(_) + | PathResolution::Macro(_) + | PathResolution::ConstParam(_) => None, PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())), PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())), PathResolution::AssocItem(AssocItem::Const(_) | AssocItem::Function(_)) => None, @@ -143,6 +147,10 @@ pub fn parse(&self, file_id: FileId) -> ast::SourceFile { self.imp.parse(file_id) } + pub fn parse_or_expand(&self, file_id: HirFileId) -> Option { + self.imp.parse_or_expand(file_id) + } + pub fn expand(&self, macro_call: &ast::MacroCall) -> Option { self.imp.expand(macro_call) } @@ -178,10 +186,12 @@ pub fn speculative_expand_attr_macro( self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map) } + /// Descend the token into macrocalls to its first mapped counterpart. pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken { - self.imp.descend_into_macros(token).pop().unwrap() + self.imp.descend_into_macros_single(token) } + /// Descend the token into macrocalls to all its mapped counterparts. pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { self.imp.descend_into_macros(token) } @@ -226,6 +236,7 @@ pub fn token_ancestors_with_macros( token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it)) } + /// Iterates the ancestors of the given node, climbing up macro expansions while doing so. pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator + '_ { self.imp.ancestors_with_macros(node) } @@ -409,6 +420,12 @@ fn parse(&self, file_id: FileId) -> ast::SourceFile { tree } + fn parse_or_expand(&self, file_id: HirFileId) -> Option { + let node = self.db.parse_or_expand(file_id)?; + self.cache(node.clone(), file_id); + Some(node) + } + fn expand(&self, macro_call: &ast::MacroCall) -> Option { let sa = self.analyze(macro_call.syntax()); let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?; @@ -509,72 +526,102 @@ fn speculative_expand_attr( }; if first == last { - self.descend_into_macros_impl(first, |InFile { value, .. }| { - if let Some(node) = value.ancestors().find_map(N::cast) { - res.push(node) - } - }); + self.descend_into_macros_impl( + first, + |InFile { value, .. }| { + if let Some(node) = value.ancestors().find_map(N::cast) { + res.push(node) + } + }, + false, + ); } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; - self.descend_into_macros_impl(first, |token| { - scratch.push(token); - }); + self.descend_into_macros_impl( + first, + |token| { + scratch.push(token); + }, + false, + ); let mut scratch = scratch.into_iter(); - self.descend_into_macros_impl(last, |InFile { value: last, file_id: last_fid }| { - if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { - if first_fid == last_fid { - if let Some(p) = first.parent() { - let range = first.text_range().cover(last.text_range()); - let node = find_root(&p) - .covering_element(range) - .ancestors() - .take_while(|it| it.text_range() == range) - .find_map(N::cast); - if let Some(node) = node { - res.push(node); + self.descend_into_macros_impl( + last, + |InFile { value: last, file_id: last_fid }| { + if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { + if first_fid == last_fid { + if let Some(p) = first.parent() { + let range = first.text_range().cover(last.text_range()); + let node = find_root(&p) + .covering_element(range) + .ancestors() + .take_while(|it| it.text_range() == range) + .find_map(N::cast); + if let Some(node) = node { + res.push(node); + } } } } - } - }); + }, + false, + ); } res } fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { let mut res = smallvec![]; - self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value)); + self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value), false); + res + } + + fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken { + let mut res = token.clone(); + self.descend_into_macros_impl(token, |InFile { value, .. }| res = value, true); res } - fn descend_into_macros_impl(&self, token: SyntaxToken, mut f: impl FnMut(InFile)) { + fn descend_into_macros_impl( + &self, + token: SyntaxToken, + mut f: impl FnMut(InFile), + single: bool, + ) { let _p = profile::span("descend_into_macros"); let parent = match token.parent() { Some(it) => it, None => return, }; let sa = self.analyze(&parent); - let mut stack: SmallVec<[_; 1]> = smallvec![InFile::new(sa.file_id, token)]; + let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)]; let mut cache = self.expansion_info_cache.borrow_mut(); let mut mcache = self.macro_call_cache.borrow_mut(); let mut process_expansion_for_token = - |stack: &mut SmallVec<_>, file_id, item, token: InFile<&_>| { - let mapped_tokens = cache - .entry(file_id) - .or_insert_with(|| file_id.expansion_info(self.db.upcast())) - .as_ref()? - .map_token_down(self.db.upcast(), item, token)?; + |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| { + let expansion_info = cache + .entry(macro_file) + .or_insert_with(|| macro_file.expansion_info(self.db.upcast())) + .as_ref()?; + + { + let InFile { file_id, value } = expansion_info.expanded(); + self.cache(value, file_id); + } + + let mut mapped_tokens = + expansion_info.map_token_down(self.db.upcast(), item, token)?; let len = stack.len(); // requeue the tokens we got from mapping our current token down - stack.extend(mapped_tokens.inspect(|token| { - if let Some(parent) = token.value.parent() { - self.cache(find_root(&parent), token.file_id); - } - })); + if single { + stack.extend(mapped_tokens.next()); + } else { + stack.extend(mapped_tokens); + } // if the length changed we have found a mapping for the token (stack.len() != len).then(|| ()) }; @@ -606,17 +653,15 @@ fn descend_into_macros_impl(&self, token: SyntaxToken, mut f: impl FnMut(InFile< } // or are we inside a function-like macro call - if let Some(macro_call) = token.value.ancestors().find_map(ast::MacroCall::cast) { - let tt = macro_call.token_tree()?; - let l_delim = match tt.left_delimiter_token() { - Some(it) => it.text_range().end(), - None => tt.syntax().text_range().start(), - }; - let r_delim = match tt.right_delimiter_token() { - Some(it) => it.text_range().start(), - None => tt.syntax().text_range().end(), - }; - if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) { + if let Some(tt) = + // FIXME replace map.while_some with take_while once stable + token.value.ancestors().map(ast::TokenTree::cast).while_some().last() + { + let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?; + if tt.left_delimiter_token().map_or(false, |it| it == token.value) { + return None; + } + if tt.right_delimiter_token().map_or(false, |it| it == token.value) { return None; } @@ -808,7 +853,7 @@ fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option { fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option { let item_in_file = self.find_file(item.syntax().clone()).with_value(item.clone()); let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(item_in_file))?; - Some(MacroDef { id: self.db.lookup_intern_macro(macro_call_id).def }) + Some(MacroDef { id: self.db.lookup_intern_macro_call(macro_call_id).def }) } fn resolve_path(&self, path: &ast::Path) -> Option { @@ -857,13 +902,13 @@ fn to_module_def(&self, file: FileId) -> impl Iterator { } fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { - let sa = self.analyze(node); - SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver } + let SourceAnalyzer { file_id, resolver, .. } = self.analyze(node); + SemanticsScope { db: self.db, file_id, resolver } } fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { - let sa = self.analyze_with_offset(node, offset); - SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver } + let SourceAnalyzer { file_id, resolver, .. } = self.analyze_with_offset(node, offset); + SemanticsScope { db: self.db, file_id, resolver } } fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { @@ -884,9 +929,11 @@ fn source(&self, def: Def) -> Option> fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer { self.analyze_impl(node, None) } + fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer { self.analyze_impl(node, Some(offset)) } + fn analyze_impl(&self, node: &SyntaxNode, offset: Option) -> SourceAnalyzer { let _p = profile::span("Semantics::analyze_impl"); let node = self.find_file(node.clone()); @@ -1117,8 +1164,7 @@ pub fn krate(&self) -> Option { } /// Note: `FxHashSet` should be treated as an opaque type, passed into `Type - // FIXME: rename to visible_traits to not repeat scope? - pub fn traits_in_scope(&self) -> FxHashSet { + pub fn visible_traits(&self) -> FxHashSet { let resolver = &self.resolver; resolver.traits_in_scope(self.db.upcast()) }