]> git.lizzy.rs Git - rust.git/blobdiff - crates/hir/src/semantics.rs
Add ConstParams to the HIR
[rust.git] / crates / hir / src / semantics.rs
index 621ebcbe3868c59176ed23438289699d6001f5c9..cd689c86935e24ebad1be98f3685126b96d99bf9 100644 (file)
@@ -6,28 +6,28 @@
 
 use base_db::{FileId, FileRange};
 use hir_def::{
-    lang_item::LangItemTarget,
     resolver::{self, HasResolver, Resolver, TypeNs},
-    src::HasSource,
-    AsMacroCall, FunctionId, Lookup, TraitId, VariantId,
+    AsMacroCall, FunctionId, TraitId, VariantId,
 };
 use hir_expand::{hygiene::Hygiene, name::AsName, ExpansionInfo};
 use hir_ty::associated_type_shorthand_candidates;
 use itertools::Itertools;
 use rustc_hash::{FxHashMap, FxHashSet};
 use syntax::{
-    algo::{find_node_at_offset, skip_trivia_token},
-    ast, AstNode, Direction, SmolStr, SyntaxNode, SyntaxToken, TextRange, TextSize,
+    algo::find_node_at_offset,
+    ast::{self, GenericParamsOwner, LoopBodyOwner},
+    match_ast, AstNode, SyntaxNode, SyntaxToken, TextSize,
 };
 
 use crate::{
+    code_model::Access,
     db::HirDatabase,
     diagnostics::Diagnostic,
     semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
     source_analyzer::{resolve_hir_path, SourceAnalyzer},
-    AssocItem, Callable, Crate, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef,
-    Module, ModuleDef, Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, TypeRef,
-    VariantDef,
+    AssocItem, Callable, ConstParam, Crate, Field, Function, HirFileId, Impl, InFile, Label,
+    LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path, ScopeDef, Trait, Type,
+    TypeAlias, TypeParam, VariantDef,
 };
 
 #[derive(Debug, Clone, PartialEq, Eq)]
@@ -38,7 +38,8 @@ pub enum PathResolution {
     Local(Local),
     /// A generic parameter
     TypeParam(TypeParam),
-    SelfType(ImplDef),
+    ConstParam(ConstParam),
+    SelfType(Impl),
     Macro(MacroDef),
     AssocItem(AssocItem),
 }
@@ -51,7 +52,7 @@ fn in_type_ns(&self) -> Option<TypeNs> {
                 Some(TypeNs::BuiltinType(*builtin))
             }
             PathResolution::Def(ModuleDef::Const(_))
-            | PathResolution::Def(ModuleDef::EnumVariant(_))
+            | PathResolution::Def(ModuleDef::Variant(_))
             | PathResolution::Def(ModuleDef::Function(_))
             | PathResolution::Def(ModuleDef::Module(_))
             | PathResolution::Def(ModuleDef::Static(_))
@@ -59,7 +60,9 @@ fn in_type_ns(&self) -> Option<TypeNs> {
             PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
                 Some(TypeNs::TypeAliasId((*alias).into()))
             }
-            PathResolution::Local(_) | PathResolution::Macro(_) => None,
+            PathResolution::Local(_) | PathResolution::Macro(_) | PathResolution::ConstParam(_) => {
+                None
+            }
             PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
             PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
             PathResolution::AssocItem(AssocItem::Const(_))
@@ -81,13 +84,6 @@ pub fn assoc_type_shorthand_candidates<R>(
     }
 }
 
-pub enum SelfKind {
-    Shared,
-    Mutable,
-    Consuming,
-    Copied,
-}
-
 /// Primary API to get semantic information, like types, from syntax trees.
 pub struct Semantics<'db, DB> {
     pub db: &'db DB,
@@ -185,6 +181,14 @@ pub fn find_node_at_offset_with_descend<N: AstNode>(
         self.imp.descend_node_at_offset(node, offset).find_map(N::cast)
     }
 
+    pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
+        self.imp.resolve_lifetime_param(lifetime)
+    }
+
+    pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
+        self.imp.resolve_label(lifetime)
+    }
+
     pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
         self.imp.type_of_expr(expr)
     }
@@ -197,10 +201,6 @@ pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
         self.imp.type_of_self(param)
     }
 
-    pub fn method_reciever_kind(&self, call: &ast::MethodCallExpr) -> Option<SelfKind> {
-        self.imp.method_receiver_kind(call)
-    }
-
     pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
         self.imp.resolve_method_call(call).map(Function::from)
     }
@@ -220,8 +220,8 @@ pub fn resolve_record_field(
         self.imp.resolve_record_field(field)
     }
 
-    pub fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option<Field> {
-        self.imp.resolve_record_field_pat(field)
+    pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+        self.imp.resolve_record_pat_field(field)
     }
 
     pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
@@ -310,9 +310,8 @@ fn parse(&self, file_id: FileId) -> ast::SourceFile {
     }
 
     fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
-        let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
-        let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
-        let file_id = sa.expand(self.db, macro_call)?;
+        let sa = self.analyze(macro_call.syntax());
+        let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
         let node = self.db.parse_or_expand(file_id)?;
         self.cache(node.clone(), file_id);
         Some(node)
@@ -324,9 +323,8 @@ fn speculative_expand(
         hypothetical_args: &ast::TokenTree,
         token_to_map: SyntaxToken,
     ) -> Option<(SyntaxNode, SyntaxToken)> {
-        let macro_call =
-            self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call);
-        let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
+        let sa = self.analyze(actual_macro_call.syntax());
+        let macro_call = InFile::new(sa.file_id, actual_macro_call);
         let krate = sa.resolver.krate()?;
         let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
             sa.resolver.resolve_path_as_macro(self.db.upcast(), &path)
@@ -342,10 +340,9 @@ fn speculative_expand(
     fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
         let _p = profile::span("descend_into_macros");
         let parent = token.parent();
-        let parent = self.find_file(parent);
-        let sa = self.analyze2(parent.as_ref(), None);
+        let sa = self.analyze(&parent);
 
-        let token = successors(Some(parent.with_value(token)), |token| {
+        let token = successors(Some(InFile::new(sa.file_id, token)), |token| {
             self.db.check_canceled();
             let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
             let tt = macro_call.token_tree()?;
@@ -385,7 +382,7 @@ fn descend_node_at_offset(
 
     fn original_range(&self, node: &SyntaxNode) -> FileRange {
         let node = self.find_file(node.clone());
-        original_range(self.db, node.as_ref())
+        node.as_ref().original_file_range(self.db.upcast())
     }
 
     fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
@@ -393,7 +390,7 @@ fn diagnostics_display_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
         let root = self.db.parse_or_expand(src.file_id).unwrap();
         let node = src.value.to_node(&root);
         self.cache(root, src.file_id);
-        original_range(self.db, src.with_value(&node))
+        src.with_value(&node).original_file_range(self.db.upcast())
     }
 
     fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
@@ -411,45 +408,62 @@ fn ancestors_at_offset_with_macros(
             .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
     }
 
+    fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
+        let text = lifetime.text();
+        let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
+            let gpl = match_ast! {
+                match syn {
+                    ast::Fn(it) => it.generic_param_list()?,
+                    ast::TypeAlias(it) => it.generic_param_list()?,
+                    ast::Struct(it) => it.generic_param_list()?,
+                    ast::Enum(it) => it.generic_param_list()?,
+                    ast::Union(it) => it.generic_param_list()?,
+                    ast::Trait(it) => it.generic_param_list()?,
+                    ast::Impl(it) => it.generic_param_list()?,
+                    ast::WherePred(it) => it.generic_param_list()?,
+                    ast::ForType(it) => it.generic_param_list()?,
+                    _ => return None,
+                }
+            };
+            gpl.lifetime_params()
+                .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()) == Some(text))
+        })?;
+        let src = self.find_file(lifetime_param.syntax().clone()).with_value(lifetime_param);
+        ToDef::to_def(self, src)
+    }
+
+    fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
+        let text = lifetime.text();
+        let label = lifetime.syntax().ancestors().find_map(|syn| {
+            let label = match_ast! {
+                match syn {
+                    ast::ForExpr(it) => it.label(),
+                    ast::WhileExpr(it) => it.label(),
+                    ast::LoopExpr(it) => it.label(),
+                    ast::EffectExpr(it) => it.label(),
+                    _ => None,
+                }
+            };
+            label.filter(|l| {
+                l.lifetime()
+                    .and_then(|lt| lt.lifetime_ident_token())
+                    .map_or(false, |lt| lt.text() == text)
+            })
+        })?;
+        let src = self.find_file(label.syntax().clone()).with_value(label);
+        ToDef::to_def(self, src)
+    }
+
     fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
-        self.analyze(expr.syntax()).type_of_expr(self.db, &expr)
+        self.analyze(expr.syntax()).type_of_expr(self.db, expr)
     }
 
     fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> {
-        self.analyze(pat.syntax()).type_of_pat(self.db, &pat)
+        self.analyze(pat.syntax()).type_of_pat(self.db, pat)
     }
 
     fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
-        self.analyze(param.syntax()).type_of_self(self.db, &param)
-    }
-
-    fn method_receiver_kind(&self, call: &ast::MethodCallExpr) -> Option<SelfKind> {
-        self.resolve_method_call(call).and_then(|func| {
-            let lookup = func.lookup(self.db.upcast());
-            let src = lookup.source(self.db.upcast());
-            let param_list = src.value.param_list()?;
-            let self_param = param_list.self_param()?;
-            if self_param.amp_token().is_some() {
-                return Some(if self_param.mut_token().is_some() {
-                    SelfKind::Mutable
-                } else {
-                    SelfKind::Shared
-                });
-            }
-
-            let ty = self.type_of_expr(&call.expr()?)?;
-            let krate = Function::from(func).krate(self.db)?;
-            let lang_item = self.db.lang_item(krate.id, SmolStr::new("copy"));
-            let copy_trait = match lang_item? {
-                LangItemTarget::TraitId(copy_trait) => Trait::from(copy_trait),
-                _ => return None,
-            };
-            Some(if ty.impls_trait(self.db, copy_trait, &[]) {
-                SelfKind::Copied
-            } else {
-                SelfKind::Consuming
-            })
-        })
+        self.analyze(param.syntax()).type_of_self(self.db, param)
     }
 
     fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> {
@@ -475,8 +489,8 @@ fn resolve_record_field(&self, field: &ast::RecordExprField) -> Option<(Field, O
         self.analyze(field.syntax()).resolve_record_field(self.db, field)
     }
 
-    fn resolve_record_field_pat(&self, field: &ast::RecordPatField) -> Option<Field> {
-        self.analyze(field.syntax()).resolve_record_field_pat(self.db, field)
+    fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+        self.analyze(field.syntax()).resolve_record_pat_field(self.db, field)
     }
 
     fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
@@ -531,15 +545,13 @@ fn to_module_def(&self, file: FileId) -> Option<Module> {
     }
 
     fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
-        let node = self.find_file(node.clone());
-        let resolver = self.analyze2(node.as_ref(), None).resolver;
-        SemanticsScope { db: self.db, file_id: node.file_id, resolver }
+        let sa = self.analyze(node);
+        SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
     }
 
     fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
-        let node = self.find_file(node.clone());
-        let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver;
-        SemanticsScope { db: self.db, file_id: node.file_id, resolver }
+        let sa = self.analyze_with_offset(node, offset);
+        SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
     }
 
     fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
@@ -549,21 +561,24 @@ fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
     }
 
     fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
-        let src = self.find_file(node.clone());
-        self.analyze2(src.as_ref(), None)
+        self.analyze_impl(node, None)
     }
+    fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
+        self.analyze_impl(node, Some(offset))
+    }
+    fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer {
+        let _p = profile::span("Semantics::analyze_impl");
+        let node = self.find_file(node.clone());
+        let node = node.as_ref();
 
-    fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextSize>) -> SourceAnalyzer {
-        let _p = profile::span("Semantics::analyze2");
-
-        let container = match self.with_ctx(|ctx| ctx.find_container(src)) {
+        let container = match self.with_ctx(|ctx| ctx.find_container(node)) {
             Some(it) => it,
-            None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src),
+            None => return SourceAnalyzer::new_for_resolver(Resolver::default(), node),
         };
 
         let resolver = match container {
             ChildContainer::DefWithBodyId(def) => {
-                return SourceAnalyzer::new_for_body(self.db, def, src, offset)
+                return SourceAnalyzer::new_for_body(self.db, def, node, offset)
             }
             ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
             ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
@@ -573,7 +588,7 @@ fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextSize>) -> Source
             ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()),
             ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
         };
-        SourceAnalyzer::new_for_resolver(resolver, src)
+        SourceAnalyzer::new_for_resolver(resolver, node)
     }
 
     fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
@@ -615,7 +630,7 @@ fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> {
 
     fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
         method_call_expr
-            .expr()
+            .receiver()
             .and_then(|expr| {
                 let field_expr = match expr {
                     ast::Expr::FieldExpr(field_expr) => field_expr,
@@ -627,9 +642,11 @@ fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool
                 }
 
                 let func = self.resolve_method_call(&method_call_expr).map(Function::from)?;
-                let is_unsafe = func.has_self_param(self.db)
-                    && matches!(func.params(self.db).first(), Some(TypeRef::Reference(..)));
-                Some(is_unsafe)
+                let res = match func.self_param(self.db)?.access(self.db) {
+                    Access::Shared | Access::Exclusive => true,
+                    Access::Owned => false,
+                };
+                Some(res)
             })
             .unwrap_or(false)
     }
@@ -720,23 +737,45 @@ fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def> {
     (crate::Enum, ast::Enum, enum_to_def),
     (crate::Union, ast::Union, union_to_def),
     (crate::Trait, ast::Trait, trait_to_def),
-    (crate::ImplDef, ast::Impl, impl_to_def),
+    (crate::Impl, ast::Impl, impl_to_def),
     (crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
     (crate::Const, ast::Const, const_to_def),
     (crate::Static, ast::Static, static_to_def),
     (crate::Function, ast::Fn, fn_to_def),
     (crate::Field, ast::RecordField, record_field_to_def),
     (crate::Field, ast::TupleField, tuple_field_to_def),
-    (crate::EnumVariant, ast::Variant, enum_variant_to_def),
+    (crate::Variant, ast::Variant, enum_variant_to_def),
     (crate::TypeParam, ast::TypeParam, type_param_to_def),
-    (crate::MacroDef, ast::MacroCall, macro_call_to_def), // this one is dubious, not all calls are macros
+    (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
+    (crate::ConstParam, ast::ConstParam, const_param_to_def),
+    (crate::MacroDef, ast::MacroRules, macro_rules_to_def),
     (crate::Local, ast::IdentPat, bind_pat_to_def),
+    (crate::Label, ast::Label, label_to_def),
 ];
 
 fn find_root(node: &SyntaxNode) -> SyntaxNode {
     node.ancestors().last().unwrap()
 }
 
+/// `SemanticScope` encapsulates the notion of a scope (the set of visible
+/// names) at a particular program point.
+///
+/// It is a bit tricky, as scopes do not really exist inside the compiler.
+/// Rather, the compiler directly computes for each reference the definition it
+/// refers to. It might transiently compute the explicit scope map while doing
+/// so, but, generally, this is not something left after the analysis.
+///
+/// However, we do very much need explicit scopes for IDE purposes --
+/// completion, at its core, lists the contents of the current scope. The notion
+/// of scope is also useful to answer questions like "what would be the meaning
+/// of this piece of code if we inserted it into this position?".
+///
+/// So `SemanticsScope` is constructed from a specific program point (a syntax
+/// node or just a raw offset) and provides access to the set of visible names
+/// on a somewhat best-effort basis.
+///
+/// Note that if you are wondering "what does this specific existing name mean?",
+/// you'd better use the `resolve_` family of methods.
 #[derive(Debug)]
 pub struct SemanticsScope<'a> {
     pub db: &'a dyn HirDatabase,
@@ -792,68 +831,3 @@ pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
         resolve_hir_path(self.db, &self.resolver, &path)
     }
 }
-
-// FIXME: Change `HasSource` trait to work with `Semantics` and remove this?
-pub fn original_range(db: &dyn HirDatabase, node: InFile<&SyntaxNode>) -> FileRange {
-    if let Some(range) = original_range_opt(db, node) {
-        let original_file = range.file_id.original_file(db.upcast());
-        if range.file_id == original_file.into() {
-            return FileRange { file_id: original_file, range: range.value };
-        }
-
-        log::error!("Fail to mapping up more for {:?}", range);
-        return FileRange { file_id: range.file_id.original_file(db.upcast()), range: range.value };
-    }
-
-    // Fall back to whole macro call
-    if let Some(expansion) = node.file_id.expansion_info(db.upcast()) {
-        if let Some(call_node) = expansion.call_node() {
-            return FileRange {
-                file_id: call_node.file_id.original_file(db.upcast()),
-                range: call_node.value.text_range(),
-            };
-        }
-    }
-
-    FileRange { file_id: node.file_id.original_file(db.upcast()), range: node.value.text_range() }
-}
-
-fn original_range_opt(
-    db: &dyn HirDatabase,
-    node: InFile<&SyntaxNode>,
-) -> Option<InFile<TextRange>> {
-    let expansion = node.file_id.expansion_info(db.upcast())?;
-
-    // the input node has only one token ?
-    let single = skip_trivia_token(node.value.first_token()?, Direction::Next)?
-        == skip_trivia_token(node.value.last_token()?, Direction::Prev)?;
-
-    Some(node.value.descendants().find_map(|it| {
-        let first = skip_trivia_token(it.first_token()?, Direction::Next)?;
-        let first = ascend_call_token(db, &expansion, node.with_value(first))?;
-
-        let last = skip_trivia_token(it.last_token()?, Direction::Prev)?;
-        let last = ascend_call_token(db, &expansion, node.with_value(last))?;
-
-        if (!single && first == last) || (first.file_id != last.file_id) {
-            return None;
-        }
-
-        Some(first.with_value(first.value.text_range().cover(last.value.text_range())))
-    })?)
-}
-
-fn ascend_call_token(
-    db: &dyn HirDatabase,
-    expansion: &ExpansionInfo,
-    token: InFile<SyntaxToken>,
-) -> Option<InFile<SyntaxToken>> {
-    let (mapped, origin) = expansion.map_token_up(token.as_ref())?;
-    if origin != Origin::Call {
-        return None;
-    }
-    if let Some(info) = mapped.file_id.expansion_info(db.upcast()) {
-        return ascend_call_token(db, &info, mapped);
-    }
-    Some(mapped)
-}