]> git.lizzy.rs Git - rust.git/blobdiff - crates/hir/src/semantics.rs
internal: Record unresolved derive invocations in hir
[rust.git] / crates / hir / src / semantics.rs
index b6e5ee75313d33adcecbce894c2fa06860decad4..645b10f8534e11fb0c173152aef7bd50c2ada83f 100644 (file)
     resolver::{self, HasResolver, Resolver, TypeNs},
     AsMacroCall, FunctionId, TraitId, VariantId,
 };
-use hir_expand::{name::AsName, ExpansionInfo};
+use hir_expand::{name::AsName, ExpansionInfo, MacroCallId};
 use hir_ty::{associated_type_shorthand_candidates, Interner};
 use itertools::Itertools;
 use rustc_hash::{FxHashMap, FxHashSet};
 use smallvec::{smallvec, SmallVec};
 use syntax::{
     algo::skip_trivia_token,
-    ast::{self, HasGenericParams, HasLoopBody},
-    match_ast, AstNode, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize,
+    ast::{self, HasAttrs, HasGenericParams, HasLoopBody},
+    match_ast, AstNode, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
 };
 
 use crate::{
     db::HirDatabase,
     semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
-    source_analyzer::{resolve_hir_path, SourceAnalyzer},
-    Access, AssocItem, Callable, ConstParam, Crate, Field, Function, HasSource, HirFileId, Impl,
-    InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path, ScopeDef, Trait,
-    Type, TypeAlias, TypeParam, VariantDef,
+    source_analyzer::{resolve_hir_path, resolve_hir_path_as_macro, SourceAnalyzer},
+    Access, AssocItem, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasSource,
+    HirFileId, Impl, InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path,
+    ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
 };
 
 #[derive(Debug, Clone, PartialEq, Eq)]
@@ -43,6 +43,8 @@ pub enum PathResolution {
     SelfType(Impl),
     Macro(MacroDef),
     AssocItem(AssocItem),
+    BuiltinAttr(BuiltinAttr),
+    ToolModule(ToolModule),
 }
 
 impl PathResolution {
@@ -63,9 +65,11 @@ fn in_type_ns(&self) -> Option<TypeNs> {
             PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
                 Some(TypeNs::TypeAliasId((*alias).into()))
             }
-            PathResolution::Local(_) | PathResolution::Macro(_) | PathResolution::ConstParam(_) => {
-                None
-            }
+            PathResolution::BuiltinAttr(_)
+            | PathResolution::ToolModule(_)
+            | PathResolution::Local(_)
+            | PathResolution::Macro(_)
+            | PathResolution::ConstParam(_) => None,
             PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
             PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
             PathResolution::AssocItem(AssocItem::Const(_) | AssocItem::Function(_)) => None,
@@ -121,7 +125,10 @@ pub struct SemanticsImpl<'db> {
     pub db: &'db dyn HirDatabase,
     s2d_cache: RefCell<SourceToDefCache>,
     expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>,
+    // Rootnode to HirFileId cache
     cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
+    // MacroCall to its expansion's HirFileId cache
+    macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, HirFileId>>,
 }
 
 impl<DB> fmt::Debug for Semantics<'_, DB> {
@@ -140,6 +147,10 @@ pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
         self.imp.parse(file_id)
     }
 
+    pub fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+        self.imp.parse_or_expand(file_id)
+    }
+
     pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
         self.imp.expand(macro_call)
     }
@@ -149,6 +160,10 @@ pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
         self.imp.expand_attr_macro(item)
     }
 
+    pub fn resolve_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<Option<MacroDef>>> {
+        self.imp.resolve_derive_macro(derive)
+    }
+
     pub fn expand_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<SyntaxNode>> {
         self.imp.expand_derive_macro(derive)
     }
@@ -175,10 +190,12 @@ pub fn speculative_expand_attr_macro(
         self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
     }
 
+    /// Descend the token into macrocalls to its first mapped counterpart.
     pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
-        self.imp.descend_into_macros(token).pop().unwrap()
+        self.imp.descend_into_macros_single(token)
     }
 
+    /// Descend the token into macrocalls to all its mapped counterparts.
     pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
         self.imp.descend_into_macros(token)
     }
@@ -197,7 +214,7 @@ pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
     }
 
     pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
-        self.imp.find_file(syntax_node.clone()).file_id
+        self.imp.find_file(syntax_node).file_id
     }
 
     pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
@@ -208,6 +225,10 @@ pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
         self.imp.original_range_opt(node)
     }
 
+    pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+        self.imp.original_ast_node(node)
+    }
+
     pub fn diagnostics_display_range(&self, diagnostics: InFile<SyntaxNodePtr>) -> FileRange {
         self.imp.diagnostics_display_range(diagnostics)
     }
@@ -219,6 +240,7 @@ pub fn token_ancestors_with_macros(
         token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
     }
 
+    /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
     pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
         self.imp.ancestors_with_macros(node)
     }
@@ -344,7 +366,7 @@ pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Fi
     }
 
     pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
-        let src = self.imp.find_file(src.syntax().clone()).with_value(src).cloned();
+        let src = self.imp.find_file(src.syntax()).with_value(src).cloned();
         T::to_def(&self.imp, src)
     }
 
@@ -392,6 +414,7 @@ fn new(db: &'db dyn HirDatabase) -> Self {
             s2d_cache: Default::default(),
             cache: Default::default(),
             expansion_info_cache: Default::default(),
+            macro_call_cache: Default::default(),
         }
     }
 
@@ -401,8 +424,14 @@ fn parse(&self, file_id: FileId) -> ast::SourceFile {
         tree
     }
 
+    fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+        let node = self.db.parse_or_expand(file_id)?;
+        self.cache(node.clone(), file_id);
+        Some(node)
+    }
+
     fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
-        let sa = self.analyze(macro_call.syntax());
+        let sa = self.analyze_no_infer(macro_call.syntax());
         let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
         let node = self.db.parse_or_expand(file_id)?;
         self.cache(node.clone(), file_id);
@@ -410,8 +439,7 @@ fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
     }
 
     fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
-        let sa = self.analyze(item.syntax());
-        let src = InFile::new(sa.file_id, item.clone());
+        let src = self.find_file(item.syntax()).with_value(item.clone());
         let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
         let file_id = macro_call_id.as_file();
         let node = self.db.parse_or_expand(file_id)?;
@@ -419,34 +447,43 @@ fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
         Some(node)
     }
 
+    fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroDef>>> {
+        let res = self
+            .derive_macro_calls(attr)?
+            .into_iter()
+            .map(|call| Some(MacroDef { id: self.db.lookup_intern_macro_call(call?).def }))
+            .collect();
+        Some(res)
+    }
+
     fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> {
+        let res: Vec<_> = self
+            .derive_macro_calls(attr)?
+            .into_iter()
+            .flat_map(|call| {
+                let file_id = call?.as_file();
+                let node = self.db.parse_or_expand(file_id)?;
+                self.cache(node.clone(), file_id);
+                Some(node)
+            })
+            .collect();
+        Some(res)
+    }
+
+    fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> {
         let item = attr.syntax().parent().and_then(ast::Item::cast)?;
-        let sa = self.analyze(item.syntax());
-        let item = InFile::new(sa.file_id, &item);
-        let src = InFile::new(sa.file_id, attr.clone());
+        let file_id = self.find_file(item.syntax()).file_id;
+        let item = InFile::new(file_id, &item);
+        let src = InFile::new(file_id, attr.clone());
         self.with_ctx(|ctx| {
-            let macro_call_ids = ctx.attr_to_derive_macro_call(item, src)?;
-
-            let expansions: Vec<_> = macro_call_ids
-                .iter()
-                .map(|call| call.as_file())
-                .flat_map(|file_id| {
-                    let node = self.db.parse_or_expand(file_id)?;
-                    self.cache(node.clone(), file_id);
-                    Some(node)
-                })
-                .collect();
-            if expansions.is_empty() {
-                None
-            } else {
-                Some(expansions)
-            }
+            let res = ctx.attr_to_derive_macro_call(item, src)?;
+            Some(res.to_vec())
         })
     }
 
     fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
-        let sa = self.analyze(item.syntax());
-        let src = InFile::new(sa.file_id, item.clone());
+        let file_id = self.find_file(item.syntax()).file_id;
+        let src = InFile::new(file_id, item.clone());
         self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
     }
 
@@ -456,11 +493,12 @@ fn speculative_expand(
         speculative_args: &ast::TokenTree,
         token_to_map: SyntaxToken,
     ) -> Option<(SyntaxNode, SyntaxToken)> {
-        let sa = self.analyze(actual_macro_call.syntax());
-        let macro_call = InFile::new(sa.file_id, actual_macro_call);
-        let krate = sa.resolver.krate()?;
+        let SourceAnalyzer { file_id, resolver, .. } =
+            self.analyze_no_infer(actual_macro_call.syntax());
+        let macro_call = InFile::new(file_id, actual_macro_call);
+        let krate = resolver.krate()?;
         let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
-            sa.resolver.resolve_path_as_macro(self.db.upcast(), &path)
+            resolver.resolve_path_as_macro(self.db.upcast(), &path)
         })?;
         hir_expand::db::expand_speculative(
             self.db.upcast(),
@@ -476,8 +514,8 @@ fn speculative_expand_attr(
         speculative_args: &ast::Item,
         token_to_map: SyntaxToken,
     ) -> Option<(SyntaxNode, SyntaxToken)> {
-        let sa = self.analyze(actual_macro_call.syntax());
-        let macro_call = InFile::new(sa.file_id, actual_macro_call.clone());
+        let file_id = self.find_file(actual_macro_call.syntax()).file_id;
+        let macro_call = InFile::new(file_id, actual_macro_call.clone());
         let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?;
         hir_expand::db::expand_speculative(
             self.db.upcast(),
@@ -501,71 +539,102 @@ fn speculative_expand_attr(
         };
 
         if first == last {
-            self.descend_into_macros_impl(first, |InFile { value, .. }| {
-                if let Some(node) = value.ancestors().find_map(N::cast) {
-                    res.push(node)
-                }
-            });
+            self.descend_into_macros_impl(
+                first,
+                &mut |InFile { value, .. }| {
+                    if let Some(node) = value.ancestors().find_map(N::cast) {
+                        res.push(node)
+                    }
+                },
+                false,
+            );
         } else {
             // Descend first and last token, then zip them to look for the node they belong to
             let mut scratch: SmallVec<[_; 1]> = smallvec![];
-            self.descend_into_macros_impl(first, |token| {
-                scratch.push(token);
-            });
+            self.descend_into_macros_impl(
+                first,
+                &mut |token| {
+                    scratch.push(token);
+                },
+                false,
+            );
 
             let mut scratch = scratch.into_iter();
-            self.descend_into_macros_impl(last, |InFile { value: last, file_id: last_fid }| {
-                if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
-                    if first_fid == last_fid {
-                        if let Some(p) = first.parent() {
-                            let range = first.text_range().cover(last.text_range());
-                            let node = find_root(&p)
-                                .covering_element(range)
-                                .ancestors()
-                                .take_while(|it| it.text_range() == range)
-                                .find_map(N::cast);
-                            if let Some(node) = node {
-                                res.push(node);
+            self.descend_into_macros_impl(
+                last,
+                &mut |InFile { value: last, file_id: last_fid }| {
+                    if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
+                        if first_fid == last_fid {
+                            if let Some(p) = first.parent() {
+                                let range = first.text_range().cover(last.text_range());
+                                let node = find_root(&p)
+                                    .covering_element(range)
+                                    .ancestors()
+                                    .take_while(|it| it.text_range() == range)
+                                    .find_map(N::cast);
+                                if let Some(node) = node {
+                                    res.push(node);
+                                }
                             }
                         }
                     }
-                }
-            });
+                },
+                false,
+            );
         }
         res
     }
 
     fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
         let mut res = smallvec![];
-        self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value));
+        self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res.push(value), false);
+        res
+    }
+
+    fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+        let mut res = token.clone();
+        self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res = value, true);
         res
     }
 
-    fn descend_into_macros_impl(&self, token: SyntaxToken, mut f: impl FnMut(InFile<SyntaxToken>)) {
+    fn descend_into_macros_impl(
+        &self,
+        token: SyntaxToken,
+        f: &mut dyn FnMut(InFile<SyntaxToken>),
+        single: bool,
+    ) {
         let _p = profile::span("descend_into_macros");
         let parent = match token.parent() {
             Some(it) => it,
             None => return,
         };
-        let sa = self.analyze(&parent);
-        let mut stack: SmallVec<[_; 1]> = smallvec![InFile::new(sa.file_id, token)];
+        let sa = self.analyze_no_infer(&parent);
+        let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
         let mut cache = self.expansion_info_cache.borrow_mut();
+        let mut mcache = self.macro_call_cache.borrow_mut();
 
         let mut process_expansion_for_token =
-            |stack: &mut SmallVec<_>, file_id, item, token: InFile<&_>| {
-                let mapped_tokens = cache
-                    .entry(file_id)
-                    .or_insert_with(|| file_id.expansion_info(self.db.upcast()))
-                    .as_ref()?
-                    .map_token_down(self.db.upcast(), item, token)?;
+            |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
+                let expansion_info = cache
+                    .entry(macro_file)
+                    .or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
+                    .as_ref()?;
+
+                {
+                    let InFile { file_id, value } = expansion_info.expanded();
+                    self.cache(value, file_id);
+                }
+
+                let mut mapped_tokens =
+                    expansion_info.map_token_down(self.db.upcast(), item, token)?;
 
                 let len = stack.len();
                 // requeue the tokens we got from mapping our current token down
-                stack.extend(mapped_tokens.inspect(|token| {
-                    if let Some(parent) = token.value.parent() {
-                        self.cache(find_root(&parent), token.file_id);
-                    }
-                }));
+                if single {
+                    stack.extend(mapped_tokens.next());
+                } else {
+                    stack.extend(mapped_tokens);
+                }
                 // if the length changed we have found a mapping for the token
                 (stack.len() != len).then(|| ())
             };
@@ -578,14 +647,13 @@ fn descend_into_macros_impl(&self, token: SyntaxToken, mut f: impl FnMut(InFile<
             let was_not_remapped = (|| {
                 // are we inside an attribute macro call
                 let containing_attribute_macro_call = self.with_ctx(|ctx| {
-                    token
-                        .value
-                        .ancestors()
-                        .filter_map(ast::Item::cast)
-                        .filter_map(|item| {
-                            Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
-                        })
-                        .last()
+                    token.value.ancestors().filter_map(ast::Item::cast).find_map(|item| {
+                        if item.attrs().next().is_none() {
+                            // Don't force populate the dyn cache for items that don't have an attribute anyways
+                            return None;
+                        }
+                        Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
+                    })
                 });
                 if let Some((call_id, item)) = containing_attribute_macro_call {
                     let file_id = call_id.as_file();
@@ -598,21 +666,27 @@ fn descend_into_macros_impl(&self, token: SyntaxToken, mut f: impl FnMut(InFile<
                 }
 
                 // or are we inside a function-like macro call
-                if let Some(macro_call) = token.value.ancestors().find_map(ast::MacroCall::cast) {
-                    let tt = macro_call.token_tree()?;
-                    let l_delim = match tt.left_delimiter_token() {
-                        Some(it) => it.text_range().end(),
-                        None => tt.syntax().text_range().start(),
-                    };
-                    let r_delim = match tt.right_delimiter_token() {
-                        Some(it) => it.text_range().start(),
-                        None => tt.syntax().text_range().end(),
-                    };
-                    if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) {
+                if let Some(tt) =
+                    // FIXME replace map.while_some with take_while once stable
+                    token.value.ancestors().map(ast::TokenTree::cast).while_some().last()
+                {
+                    let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
+                    if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
+                        return None;
+                    }
+                    if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
                         return None;
                     }
 
-                    let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
+                    let mcall = token.with_value(macro_call);
+                    let file_id = match mcache.get(&mcall) {
+                        Some(&it) => it,
+                        None => {
+                            let it = sa.expand(self.db, mcall.as_ref())?;
+                            mcache.insert(mcall, it);
+                            it
+                        }
+                    };
                     return process_expansion_for_token(&mut stack, file_id, None, token.as_ref());
                 }
 
@@ -651,13 +725,18 @@ fn descend_node_at_offset(
     }
 
     fn original_range(&self, node: &SyntaxNode) -> FileRange {
-        let node = self.find_file(node.clone());
-        node.as_ref().original_file_range(self.db.upcast())
+        let node = self.find_file(node);
+        node.original_file_range(self.db.upcast())
     }
 
     fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
-        let node = self.find_file(node.clone());
-        node.as_ref().original_file_range_opt(self.db.upcast())
+        let node = self.find_file(node);
+        node.original_file_range_opt(self.db.upcast())
+    }
+
+    fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+        let InFile { file_id, .. } = self.find_file(node.syntax());
+        InFile::new(file_id, node).original_ast_node(self.db.upcast()).map(|it| it.value)
     }
 
     fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
@@ -678,7 +757,7 @@ fn ancestors_with_macros(
         &self,
         node: SyntaxNode,
     ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
-        let node = self.find_file(node);
+        let node = self.find_file(&node);
         node.ancestors_with_macros(self.db.upcast()).map(|it| it.value)
     }
 
@@ -699,7 +778,8 @@ fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimePar
             gpl.lifetime_params()
                 .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
         })?;
-        let src = self.find_file(lifetime_param.syntax().clone()).with_value(lifetime_param);
+        let file_id = self.find_file(lifetime_param.syntax()).file_id;
+        let src = InFile::new(file_id, lifetime_param);
         ToDef::to_def(self, src)
     }
 
@@ -721,7 +801,8 @@ fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
                     .map_or(false, |lt| lt.text() == text)
             })
         })?;
-        let src = self.find_file(label.syntax().clone()).with_value(label);
+        let file_id = self.find_file(label.syntax()).file_id;
+        let src = InFile::new(file_id, label);
         ToDef::to_def(self, src)
     }
 
@@ -755,7 +836,7 @@ fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId>
 
     fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
         let (func, subst) = self.analyze(call.syntax()).resolve_method_call(self.db, call)?;
-        let ty = self.db.value_ty(func.into()).substitute(&Interner, &subst);
+        let ty = self.db.value_ty(func.into()).substitute(Interner, &subst);
         let resolver = self.analyze(call.syntax()).resolver;
         let ty = Type::new_with_resolver(self.db, &resolver, ty)?;
         let mut res = ty.as_callable(self.db)?;
@@ -780,14 +861,14 @@ fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field>
 
     fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
         let sa = self.analyze(macro_call.syntax());
-        let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
+        let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
         sa.resolve_macro_call(self.db, macro_call)
     }
 
     fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<MacroDef> {
-        let item_in_file = self.find_file(item.syntax().clone()).with_value(item.clone());
+        let item_in_file = self.find_file(item.syntax()).with_value(item.clone());
         let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(item_in_file))?;
-        Some(MacroDef { id: self.db.lookup_intern_macro(macro_call_id).def })
+        Some(MacroDef { id: self.db.lookup_intern_macro_call(macro_call_id).def })
     }
 
     fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
@@ -836,13 +917,14 @@ fn to_module_def(&self, file: FileId) -> impl Iterator<Item = Module> {
     }
 
     fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
-        let sa = self.analyze(node);
-        SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
+        let SourceAnalyzer { file_id, resolver, .. } = self.analyze_no_infer(node);
+        SemanticsScope { db: self.db, file_idresolver }
     }
 
     fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
-        let sa = self.analyze_with_offset(node, offset);
-        SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
+        let SourceAnalyzer { file_id, resolver, .. } =
+            self.analyze_with_offset_no_infer(node, offset);
+        SemanticsScope { db: self.db, file_id, resolver }
     }
 
     fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
@@ -861,15 +943,25 @@ fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
     }
 
     fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
-        self.analyze_impl(node, None)
+        self.analyze_impl(node, None, true)
+    }
+
+    fn analyze_no_infer(&self, node: &SyntaxNode) -> SourceAnalyzer {
+        self.analyze_impl(node, None, false)
     }
-    fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
-        self.analyze_impl(node, Some(offset))
+
+    fn analyze_with_offset_no_infer(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
+        self.analyze_impl(node, Some(offset), false)
     }
-    fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer {
+
+    fn analyze_impl(
+        &self,
+        node: &SyntaxNode,
+        offset: Option<TextSize>,
+        infer_body: bool,
+    ) -> SourceAnalyzer {
         let _p = profile::span("Semantics::analyze_impl");
-        let node = self.find_file(node.clone());
-        let node = node.as_ref();
+        let node = self.find_file(node);
 
         let container = match self.with_ctx(|ctx| ctx.find_container(node)) {
             Some(it) => it,
@@ -878,7 +970,11 @@ fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAna
 
         let resolver = match container {
             ChildContainer::DefWithBodyId(def) => {
-                return SourceAnalyzer::new_for_body(self.db, def, node, offset)
+                return if infer_body {
+                    SourceAnalyzer::new_for_body(self.db, def, node, offset)
+                } else {
+                    SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
+                }
             }
             ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
             ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
@@ -899,7 +995,7 @@ fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
     }
 
     fn assert_contains_node(&self, node: &SyntaxNode) {
-        self.find_file(node.clone());
+        self.find_file(node);
     }
 
     fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
@@ -907,8 +1003,8 @@ fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
         cache.get(root_node).copied()
     }
 
-    fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> {
-        let root_node = find_root(&node);
+    fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
+        let root_node = find_root(node);
         let file_id = self.lookup(&root_node).unwrap_or_else(|| {
             panic!(
                 "\n\nFailed to lookup {:?} in this Semantics.\n\
@@ -1049,6 +1145,7 @@ fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def> {
     (crate::TypeParam, ast::TypeParam, type_param_to_def),
     (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
     (crate::ConstParam, ast::ConstParam, const_param_to_def),
+    (crate::GenericParam, ast::GenericParam, generic_param_to_def),
     (crate::MacroDef, ast::Macro, macro_to_def),
     (crate::Local, ast::IdentPat, bind_pat_to_def),
     (crate::Local, ast::SelfParam, self_param_to_def),
@@ -1096,8 +1193,7 @@ pub fn krate(&self) -> Option<Crate> {
     }
 
     /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
-    // FIXME: rename to visible_traits to not repeat scope?
-    pub fn traits_in_scope(&self) -> FxHashSet<TraitId> {
+    pub fn visible_traits(&self) -> FxHashSet<TraitId> {
         let resolver = &self.resolver;
         resolver.traits_in_scope(self.db.upcast())
     }
@@ -1134,4 +1230,14 @@ pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
         let path = Path::from_src(path.clone(), &ctx)?;
         resolve_hir_path(self.db, &self.resolver, &path)
     }
+
+    /// Resolve a path as-if it was written at the given scope. This is
+    /// necessary a heuristic, as it doesn't take hygiene into account.
+    // FIXME: This special casing solely exists for attributes for now
+    // ideally we should have a path resolution infra that properly knows about overlapping namespaces
+    pub fn speculative_resolve_as_mac(&self, path: &ast::Path) -> Option<MacroDef> {
+        let ctx = body::LowerCtx::new(self.db.upcast(), self.file_id);
+        let path = Path::from_src(path.clone(), &ctx)?;
+        resolve_hir_path_as_macro(self.db, &self.resolver, &path)
+    }
 }