//! See `CompletionContext` structure.
-use hir::{Local, ScopeDef, Semantics, SemanticsScope, Type};
+use base_db::SourceDatabaseExt;
+use hir::{Local, Name, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo};
use ide_db::{
base_db::{FilePosition, SourceDatabase},
call_info::ActiveParameter,
};
use syntax::{
algo::find_node_at_offset,
- ast::{self, NameOrNameRef, NameOwner},
+ ast::{self, HasName, NameOrNameRef},
match_ast, AstNode, NodeOrToken,
SyntaxKind::{self, *},
SyntaxNode, SyntaxToken, TextRange, TextSize, T,
pub(super) in_loop_body: bool,
}
+#[derive(Debug)]
+pub(super) struct PatternContext {
+ pub(super) refutability: PatternRefutability,
+ pub(super) is_param: Option<ParamKind>,
+}
+
+#[derive(Debug)]
+pub(super) enum LifetimeContext {
+ LifetimeParam(Option<ast::LifetimeParam>),
+ Lifetime,
+ LabelRef,
+ LabelDef,
+}
+
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum CallKind {
Pat,
Mac,
Expr,
}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub(crate) enum ParamKind {
+ Function,
+ Closure,
+}
/// `CompletionContext` is created early during completion to figure out, where
/// exactly is the cursor, syntax-wise.
#[derive(Debug)]
pub(super) original_token: SyntaxToken,
/// The token before the cursor, in the macro-expanded file.
pub(super) token: SyntaxToken,
+ /// The crate of the current file.
pub(super) krate: Option<hir::Crate>,
pub(super) expected_name: Option<NameOrNameRef>,
pub(super) expected_type: Option<Type>,
pub(super) function_def: Option<ast::Fn>,
/// The parent impl of the cursor position if it exists.
pub(super) impl_def: Option<ast::Impl>,
- pub(super) name_ref_syntax: Option<ast::NameRef>,
-
- // potentially set if we are completing a lifetime
- pub(super) lifetime_syntax: Option<ast::Lifetime>,
- pub(super) lifetime_param_syntax: Option<ast::LifetimeParam>,
- pub(super) lifetime_allowed: bool,
- pub(super) is_label_ref: bool,
-
- // potentially set if we are completing a name
- pub(super) is_pat_or_const: Option<PatternRefutability>,
- pub(super) is_param: bool,
+ pub(super) name_syntax: Option<ast::NameLike>,
pub(super) completion_location: Option<ImmediateLocation>,
pub(super) prev_sibling: Option<ImmediatePrevSibling>,
pub(super) attribute_under_caret: Option<ast::Attr>,
pub(super) previous_token: Option<SyntaxToken>,
+ pub(super) lifetime_ctx: Option<LifetimeContext>,
+ pub(super) pattern_ctx: Option<PatternContext>,
pub(super) path_context: Option<PathCompletionContext>,
- pub(super) active_parameter: Option<ActiveParameter>,
pub(super) locals: Vec<(String, Local)>,
pub(super) incomplete_let: bool,
let fake_ident_token =
file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap();
- let krate = sema.to_module_def(position.file_id).map(|m| m.krate());
let original_token =
original_file.syntax().token_at_offset(position.offset).left_biased()?;
- let token = sema.descend_into_macros(original_token.clone());
+ let token = sema.descend_into_macros_single(original_token.clone());
let scope = sema.scope_at_offset(&token, position.offset);
+ let krate = scope.krate();
let mut locals = vec![];
scope.process_all_names(&mut |name, scope| {
if let ScopeDef::Local(local) = scope {
expected_type: None,
function_def: None,
impl_def: None,
- name_ref_syntax: None,
- lifetime_syntax: None,
- lifetime_param_syntax: None,
- lifetime_allowed: false,
- is_label_ref: false,
- is_pat_or_const: None,
- is_param: false,
+ name_syntax: None,
+ lifetime_ctx: None,
+ pattern_ctx: None,
completion_location: None,
prev_sibling: None,
attribute_under_caret: None,
previous_token: None,
path_context: None,
- active_parameter: ActiveParameter::at(db, position),
locals,
incomplete_let: false,
no_completion_required: false,
};
+ ctx.expand_and_fill(
+ original_file.syntax().clone(),
+ file_with_fake_ident.syntax().clone(),
+ position.offset,
+ fake_ident_token,
+ );
+ Some(ctx)
+ }
- let mut original_file = original_file.syntax().clone();
- let mut speculative_file = file_with_fake_ident.syntax().clone();
- let mut offset = position.offset;
- let mut fake_ident_token = fake_ident_token;
-
- // Are we inside a macro call?
- while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
- find_node_at_offset::<ast::MacroCall>(&original_file, offset),
- find_node_at_offset::<ast::MacroCall>(&speculative_file, offset),
- ) {
- if actual_macro_call.path().as_ref().map(|s| s.syntax().text())
- != macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text())
- {
- break;
+ /// Do the attribute expansion at the current cursor position for both original file and fake file
+ /// as long as possible. As soon as one of the two expansions fail we stop to stay in sync.
+ fn expand_and_fill(
+ &mut self,
+ mut original_file: SyntaxNode,
+ mut speculative_file: SyntaxNode,
+ mut offset: TextSize,
+ mut fake_ident_token: SyntaxToken,
+ ) {
+ loop {
+ // Expand attributes
+ if let (Some(actual_item), Some(item_with_fake_ident)) = (
+ find_node_at_offset::<ast::Item>(&original_file, offset),
+ find_node_at_offset::<ast::Item>(&speculative_file, offset),
+ ) {
+ match (
+ self.sema.expand_attr_macro(&actual_item),
+ self.sema.speculative_expand_attr_macro(
+ &actual_item,
+ &item_with_fake_ident,
+ fake_ident_token.clone(),
+ ),
+ ) {
+ (Some(actual_expansion), Some(speculative_expansion)) => {
+ let new_offset = speculative_expansion.1.text_range().start();
+ if new_offset > actual_expansion.text_range().end() {
+ break;
+ }
+ original_file = actual_expansion;
+ speculative_file = speculative_expansion.0;
+ fake_ident_token = speculative_expansion.1;
+ offset = new_offset;
+ continue;
+ }
+ (None, None) => (),
+ _ => break,
+ }
}
- let speculative_args = match macro_call_with_fake_ident.token_tree() {
- Some(tt) => tt,
- None => break,
- };
- if let (Some(actual_expansion), Some(speculative_expansion)) = (
- ctx.sema.expand(&actual_macro_call),
- ctx.sema.speculative_expand(
- &actual_macro_call,
- &speculative_args,
- fake_ident_token,
- ),
+
+ // Expand fn-like macro calls
+ if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
+ find_node_at_offset::<ast::MacroCall>(&original_file, offset),
+ find_node_at_offset::<ast::MacroCall>(&speculative_file, offset),
) {
- let new_offset = speculative_expansion.1.text_range().start();
- if new_offset > actual_expansion.text_range().end() {
+ let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
+ let mac_call_path1 =
+ macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
+ if mac_call_path0 != mac_call_path1 {
+ break;
+ }
+ let speculative_args = match macro_call_with_fake_ident.token_tree() {
+ Some(tt) => tt,
+ None => break,
+ };
+
+ if let (Some(actual_expansion), Some(speculative_expansion)) = (
+ self.sema.expand(&actual_macro_call),
+ self.sema.speculative_expand(
+ &actual_macro_call,
+ &speculative_args,
+ fake_ident_token,
+ ),
+ ) {
+ let new_offset = speculative_expansion.1.text_range().start();
+ if new_offset > actual_expansion.text_range().end() {
+ break;
+ }
+ original_file = actual_expansion;
+ speculative_file = speculative_expansion.0;
+ fake_ident_token = speculative_expansion.1;
+ offset = new_offset;
+ } else {
break;
}
- original_file = actual_expansion;
- speculative_file = speculative_expansion.0;
- fake_ident_token = speculative_expansion.1;
- offset = new_offset;
} else {
break;
}
}
- ctx.fill(&original_file, speculative_file, offset);
- Some(ctx)
+
+ self.fill(&original_file, speculative_file, offset);
}
/// Checks whether completions in that particular case don't make much sense.
self.previous_token.as_ref().map_or(false, |tok| tok.kind() == kind)
}
- pub(crate) fn expects_assoc_item(&self) -> bool {
- matches!(self.completion_location, Some(ImmediateLocation::Trait | ImmediateLocation::Impl))
+ pub(crate) fn dot_receiver(&self) -> Option<&ast::Expr> {
+ match &self.completion_location {
+ Some(
+ ImmediateLocation::MethodCall { receiver, .. }
+ | ImmediateLocation::FieldAccess { receiver, .. },
+ ) => receiver.as_ref(),
+ _ => None,
+ }
}
pub(crate) fn has_dot_receiver(&self) -> bool {
)
}
- pub(crate) fn dot_receiver(&self) -> Option<&ast::Expr> {
- match &self.completion_location {
- Some(
- ImmediateLocation::MethodCall { receiver, .. }
- | ImmediateLocation::FieldAccess { receiver, .. },
- ) => receiver.as_ref(),
- _ => None,
- }
+ pub(crate) fn expects_assoc_item(&self) -> bool {
+ matches!(self.completion_location, Some(ImmediateLocation::Trait | ImmediateLocation::Impl))
}
pub(crate) fn expects_non_trait_assoc_item(&self) -> bool {
}
pub(crate) fn has_block_expr_parent(&self) -> bool {
- matches!(self.completion_location, Some(ImmediateLocation::BlockExpr))
+ matches!(self.completion_location, Some(ImmediateLocation::StmtList))
}
pub(crate) fn expects_ident_pat_or_ref_expr(&self) -> bool {
| ImmediateLocation::ModDeclaration(_)
| ImmediateLocation::RecordPat(_)
| ImmediateLocation::RecordExpr(_)
+ | ImmediateLocation::Rename
)
)
}
self.path_context.as_ref().and_then(|it| it.qualifier.as_ref())
}
+ /// Checks if an item is visible and not `doc(hidden)` at the completion site.
+ pub(crate) fn is_visible<I>(&self, item: &I) -> bool
+ where
+ I: hir::HasVisibility + hir::HasAttrs + hir::HasCrate + Copy,
+ {
+ self.is_visible_impl(&item.visibility(self.db), &item.attrs(self.db), item.krate(self.db))
+ }
+
+ pub(crate) fn is_scope_def_hidden(&self, scope_def: &ScopeDef) -> bool {
+ if let (Some(attrs), Some(krate)) = (scope_def.attrs(self.db), scope_def.krate(self.db)) {
+ return self.is_doc_hidden(&attrs, krate);
+ }
+
+ false
+ }
+
+ /// Check if an item is `#[doc(hidden)]`.
+ pub(crate) fn is_item_hidden(&self, item: &hir::ItemInNs) -> bool {
+ let attrs = item.attrs(self.db);
+ let krate = item.krate(self.db);
+ match (attrs, krate) {
+ (Some(attrs), Some(krate)) => self.is_doc_hidden(&attrs, krate),
+ _ => false,
+ }
+ }
+
+ pub(crate) fn is_immediately_after_macro_bang(&self) -> bool {
+ self.token.kind() == BANG && self.token.parent().map_or(false, |it| it.kind() == MACRO_CALL)
+ }
+
+ /// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items.
+ pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
+ self.scope.process_all_names(&mut |name, def| {
+ if self.is_scope_def_hidden(&def) {
+ return;
+ }
+
+ f(name, def);
+ })
+ }
+
+ fn is_visible_impl(
+ &self,
+ vis: &hir::Visibility,
+ attrs: &hir::Attrs,
+ defining_crate: hir::Crate,
+ ) -> bool {
+ let module = match self.scope.module() {
+ Some(it) => it,
+ None => return false,
+ };
+ if !vis.is_visible_from(self.db, module.into()) {
+ // If the definition location is editable, also show private items
+ let root_file = defining_crate.root_file(self.db);
+ let source_root_id = self.db.file_source_root(root_file);
+ let is_editable = !self.db.source_root(source_root_id).is_library;
+ return is_editable;
+ }
+
+ !self.is_doc_hidden(attrs, defining_crate)
+ }
+
+ fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool {
+ let krate = match self.krate {
+ Some(it) => it,
+ None => return true,
+ };
+ if krate != defining_crate && attrs.has_doc_hidden() {
+ // `doc(hidden)` items are only completed within the defining crate.
+ return true;
+ }
+
+ false
+ }
+
fn fill_impl_def(&mut self) {
self.impl_def = self
.sema
cov_mark::hit!(expected_type_let_without_leading_char);
let ty = it.pat()
.and_then(|pat| self.sema.type_of_pat(&pat))
- .or_else(|| it.initializer().and_then(|it| self.sema.type_of_expr(&it)));
- let name = if let Some(ast::Pat::IdentPat(ident)) = it.pat() {
- ident.name().map(NameOrNameRef::Name)
- } else {
- None
+ .or_else(|| it.initializer().and_then(|it| self.sema.type_of_expr(&it)))
+ .map(TypeInfo::original);
+ let name = match it.pat() {
+ Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
+ Some(_) | None => None,
};
(ty, name)
})
.unwrap_or((None, None))
},
- ast::RecordExprFieldList(_it) => {
- cov_mark::hit!(expected_type_struct_field_without_leading_char);
+ ast::RecordExprFieldList(it) => {
// wouldn't try {} be nice...
(|| {
- let expr_field = self.token.prev_sibling_or_token()?
- .into_node()
- .and_then(ast::RecordExprField::cast)?;
- let (_, _, ty) = self.sema.resolve_record_field(&expr_field)?;
- Some((
- Some(ty),
- expr_field.field_name().map(NameOrNameRef::NameRef),
- ))
+ if self.token.kind() == T![..]
+ || self.token.prev_token().map(|t| t.kind()) == Some(T![..])
+ {
+ cov_mark::hit!(expected_type_struct_func_update);
+ let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
+ let ty = self.sema.type_of_expr(&record_expr.into())?;
+ Some((
+ Some(ty.original),
+ None
+ ))
+ } else {
+ cov_mark::hit!(expected_type_struct_field_without_leading_char);
+ let expr_field = self.token.prev_sibling_or_token()?
+ .into_node()
+ .and_then(ast::RecordExprField::cast)?;
+ let (_, _, ty) = self.sema.resolve_record_field(&expr_field)?;
+ Some((
+ Some(ty),
+ expr_field.field_name().map(NameOrNameRef::NameRef),
+ ))
+ }
})().unwrap_or((None, None))
},
ast::RecordExprField(it) => {
cov_mark::hit!(expected_type_struct_field_with_leading_char);
(
- it.expr().as_ref().and_then(|e| self.sema.type_of_expr(e)),
+ it.expr().as_ref().and_then(|e| self.sema.type_of_expr(e)).map(TypeInfo::original),
it.field_name().map(NameOrNameRef::NameRef),
)
},
ast::MatchExpr(it) => {
cov_mark::hit!(expected_type_match_arm_without_leading_char);
- let ty = it.expr()
- .and_then(|e| self.sema.type_of_expr(&e));
+ let ty = it.expr().and_then(|e| self.sema.type_of_expr(&e)).map(TypeInfo::original);
(ty, None)
},
ast::IfExpr(it) => {
cov_mark::hit!(expected_type_if_let_without_leading_char);
let ty = it.condition()
.and_then(|cond| cond.expr())
- .and_then(|e| self.sema.type_of_expr(&e));
+ .and_then(|e| self.sema.type_of_expr(&e))
+ .map(TypeInfo::original);
(ty, None)
},
ast::IdentPat(it) => {
cov_mark::hit!(expected_type_if_let_with_leading_char);
cov_mark::hit!(expected_type_match_arm_with_leading_char);
- let ty = self.sema.type_of_pat(&ast::Pat::from(it));
+ let ty = self.sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
(ty, None)
},
ast::Fn(it) => {
},
ast::ClosureExpr(it) => {
let ty = self.sema.type_of_expr(&it.into());
- ty.and_then(|ty| ty.as_callable(self.db))
+ ty.and_then(|ty| ty.original.as_callable(self.db))
.map(|c| (Some(c.return_type()), None))
.unwrap_or((None, None))
},
ast::Stmt(_it) => (None, None),
+ ast::Item(__) => (None, None),
_ => {
match node.parent() {
Some(n) => {
self.completion_location =
determine_location(&self.sema, original_file, offset, &name_like);
self.prev_sibling = determine_prev_sibling(&name_like);
+ self.name_syntax =
+ find_node_at_offset(original_file, name_like.syntax().text_range().start());
match name_like {
ast::NameLike::Lifetime(lifetime) => {
self.classify_lifetime(original_file, lifetime, offset);
lifetime: ast::Lifetime,
offset: TextSize,
) {
- self.lifetime_syntax =
- find_node_at_offset(original_file, lifetime.syntax().text_range().start());
if let Some(parent) = lifetime.syntax().parent() {
if parent.kind() == ERROR {
return;
}
- match_ast! {
+ self.lifetime_ctx = Some(match_ast! {
match parent {
- ast::LifetimeParam(_it) => {
- self.lifetime_allowed = true;
- self.lifetime_param_syntax =
- self.sema.find_node_at_offset_with_macros(original_file, offset);
- },
- ast::BreakExpr(_it) => self.is_label_ref = true,
- ast::ContinueExpr(_it) => self.is_label_ref = true,
- ast::Label(_it) => (),
- _ => self.lifetime_allowed = true,
+ ast::LifetimeParam(_it) => LifetimeContext::LifetimeParam(self.sema.find_node_at_offset_with_macros(original_file, offset)),
+ ast::BreakExpr(_it) => LifetimeContext::LabelRef,
+ ast::ContinueExpr(_it) => LifetimeContext::LabelRef,
+ ast::Label(_it) => LifetimeContext::LabelDef,
+ _ => LifetimeContext::Lifetime,
}
- }
+ });
}
}
fn classify_name(&mut self, name: ast::Name) {
- if let Some(bind_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) {
- self.is_pat_or_const = Some(PatternRefutability::Refutable);
- // if any of these is here our bind pat can't be a const pat anymore
- let complex_ident_pat = bind_pat.at_token().is_some()
- || bind_pat.ref_token().is_some()
- || bind_pat.mut_token().is_some();
- if complex_ident_pat {
- self.is_pat_or_const = None;
- } else {
- let irrefutable_pat = bind_pat.syntax().ancestors().find_map(|node| {
- match_ast! {
- match node {
- ast::LetStmt(it) => Some(it.pat()),
- ast::Param(it) => Some(it.pat()),
- _ => None,
- }
- }
- });
- if let Some(Some(pat)) = irrefutable_pat {
- // This check is here since we could be inside a pattern in the initializer expression of the let statement.
- if pat.syntax().text_range().contains_range(bind_pat.syntax().text_range()) {
- self.is_pat_or_const = Some(PatternRefutability::Irrefutable);
- }
- }
+ self.fill_impl_def();
- let is_name_in_field_pat = bind_pat
+ if let Some(bind_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) {
+ let is_name_in_field_pat = bind_pat
+ .syntax()
+ .parent()
+ .and_then(ast::RecordPatField::cast)
+ .map_or(false, |pat_field| pat_field.name_ref().is_none());
+ if is_name_in_field_pat {
+ return;
+ }
+ if bind_pat.is_simple_ident() {
+ let mut is_param = None;
+ let refutability = bind_pat
.syntax()
- .parent()
- .and_then(ast::RecordPatField::cast)
- .map_or(false, |pat_field| pat_field.name_ref().is_none());
- if is_name_in_field_pat {
- self.is_pat_or_const = None;
- }
+ .ancestors()
+ .skip_while(|it| ast::Pat::can_cast(it.kind()))
+ .next()
+ .map_or(PatternRefutability::Irrefutable, |node| {
+ match_ast! {
+ match node {
+ ast::LetStmt(__) => PatternRefutability::Irrefutable,
+ ast::Param(param) => {
+ let is_closure_param = param
+ .syntax()
+ .ancestors()
+ .nth(2)
+ .and_then(ast::ClosureExpr::cast)
+ .is_some();
+ is_param = Some(if is_closure_param {
+ ParamKind::Closure
+ } else {
+ ParamKind::Function
+ });
+ PatternRefutability::Irrefutable
+ },
+ ast::MatchArm(__) => PatternRefutability::Refutable,
+ ast::Condition(__) => PatternRefutability::Refutable,
+ ast::ForExpr(__) => PatternRefutability::Irrefutable,
+ _ => PatternRefutability::Irrefutable,
+ }
+ }
+ });
+ self.pattern_ctx = Some(PatternContext { refutability, is_param });
}
-
- self.fill_impl_def();
}
-
- self.is_param |= is_node::<ast::Param>(name.syntax());
}
fn classify_name_ref(&mut self, original_file: &SyntaxNode, name_ref: ast::NameRef) {
self.fill_impl_def();
- self.name_ref_syntax =
- find_node_at_offset(original_file, name_ref.syntax().text_range().start());
-
self.function_def = self
.sema
.token_ancestors_with_macros(self.token.clone())
if let Some(stmt) = ast::ExprStmt::cast(node.clone()) {
return Some(stmt.syntax().text_range() == name_ref.syntax().text_range());
}
- if let Some(block) = ast::BlockExpr::cast(node) {
+ if let Some(stmt_list) = ast::StmtList::cast(node) {
return Some(
- block.tail_expr().map(|e| e.syntax().text_range())
+ stmt_list.tail_expr().map(|e| e.syntax().text_range())
== Some(name_ref.syntax().text_range()),
);
}
syntax.covering_element(range).ancestors().find_map(N::cast)
}
-fn is_node<N: AstNode>(node: &SyntaxNode) -> bool {
- match node.ancestors().find_map(N::cast) {
- None => false,
- Some(n) => n.syntax().text_range() == node.text_range(),
- }
-}
-
fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
if let Some(qual) = path.qualifier() {
return Some((qual, false));
fn check_expected_type_and_name(ra_fixture: &str, expect: Expect) {
let (db, pos) = position(ra_fixture);
- let completion_context = CompletionContext::new(&db, pos, &TEST_CONFIG).unwrap();
+ let config = TEST_CONFIG;
+ let completion_context = CompletionContext::new(&db, pos, &config).unwrap();
let ty = completion_context
.expected_type
expect![[r#"ty: u32, name: t"#]],
);
}
+
+ #[test]
+ fn expected_type_functional_update() {
+ cov_mark::check!(expected_type_struct_func_update);
+ check_expected_type_and_name(
+ r#"
+struct Foo { field: u32 }
+fn foo() {
+ Foo {
+ ..$0
+ }
+}
+"#,
+ expect![[r#"ty: Foo, name: ?"#]],
+ );
+ }
}