11204: fix: `replace_qualified_name_with_use` does not use full item path for replacements r=Veykril a=Veykril
bors r+
Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
let infer = infer.as_ref();
let result = eval_const(
root,
- ConstEvalCtx {
+ &mut ConstEvalCtx {
exprs: &body.exprs,
pats: &body.pats,
local_data: HashMap::default(),
- infer,
+ infer: &mut |x| infer[x].clone(),
},
);
result
use chalk_ir::{IntTy, Scalar};
use hir_def::{
- builtin_type::BuiltinUint,
expr::{ArithOp, BinaryOp, Expr, Literal, Pat},
type_ref::ConstScalar,
};
use hir_expand::name::Name;
-use la_arena::Arena;
+use la_arena::{Arena, Idx};
-use crate::{Const, ConstData, ConstValue, InferenceResult, Interner, TyKind};
+use crate::{Const, ConstData, ConstValue, Interner, Ty, TyKind};
/// Extension trait for [`Const`]
pub trait ConstExt {
}
}
-#[derive(Clone)]
pub struct ConstEvalCtx<'a> {
pub exprs: &'a Arena<Expr>,
pub pats: &'a Arena<Pat>,
pub local_data: HashMap<Name, ComputedExpr>,
- pub infer: &'a InferenceResult,
+ pub infer: &'a mut dyn FnMut(Idx<Expr>) -> Ty,
}
#[derive(Debug, Clone)]
Panic(String),
}
-#[derive(Clone)]
+#[derive(Debug, Clone)]
pub enum ComputedExpr {
Literal(Literal),
Tuple(Box<[ComputedExpr]>),
}
}
-pub fn eval_const(expr: &Expr, mut ctx: ConstEvalCtx<'_>) -> Result<ComputedExpr, ConstEvalError> {
+pub fn eval_const(expr: &Expr, ctx: &mut ConstEvalCtx<'_>) -> Result<ComputedExpr, ConstEvalError> {
match expr {
Expr::Literal(l) => Ok(ComputedExpr::Literal(l.clone())),
&Expr::UnaryOp { expr, op } => {
- let ty = &ctx.infer[expr];
+ let ty = &(ctx.infer)(expr);
let ev = eval_const(&ctx.exprs[expr], ctx)?;
match op {
hir_def::expr::UnaryOp::Deref => Err(ConstEvalError::NotSupported("deref")),
}
}
&Expr::BinaryOp { lhs, rhs, op } => {
- let ty = &ctx.infer[lhs];
- let lhs = eval_const(&ctx.exprs[lhs], ctx.clone())?;
- let rhs = eval_const(&ctx.exprs[rhs], ctx.clone())?;
+ let ty = &(ctx.infer)(lhs);
+ let lhs = eval_const(&ctx.exprs[lhs], ctx)?;
+ let rhs = eval_const(&ctx.exprs[rhs], ctx)?;
let op = op.ok_or(ConstEvalError::IncompleteExpr)?;
let v1 = match lhs {
ComputedExpr::Literal(Literal::Int(v, _)) => v,
}
}
Expr::Block { statements, tail, .. } => {
+ let mut prev_values = HashMap::<Name, Option<ComputedExpr>>::default();
for statement in &**statements {
match statement {
&hir_def::expr::Statement::Let { pat, initializer, .. } => {
}
};
let value = match initializer {
- Some(x) => eval_const(&ctx.exprs[x], ctx.clone())?,
+ Some(x) => eval_const(&ctx.exprs[x], ctx)?,
None => continue,
};
- ctx.local_data.insert(name, value);
+ if !prev_values.contains_key(&name) {
+ let prev = ctx.local_data.insert(name.clone(), value);
+ prev_values.insert(name, prev);
+ } else {
+ ctx.local_data.insert(name, value);
+ }
}
&hir_def::expr::Statement::Expr { .. } => {
return Err(ConstEvalError::NotSupported("this kind of statement"))
}
}
}
- let tail_expr = match tail {
- &Some(x) => &ctx.exprs[x],
- None => return Ok(ComputedExpr::Tuple(Box::new([]))),
+ let r = match tail {
+ &Some(x) => eval_const(&ctx.exprs[x], ctx),
+ None => Ok(ComputedExpr::Tuple(Box::new([]))),
};
- eval_const(tail_expr, ctx)
+ // clean up local data, so caller will receive the exact map that passed to us
+ for (name, val) in prev_values {
+ match val {
+ Some(x) => ctx.local_data.insert(name, x),
+ None => ctx.local_data.remove(&name),
+ };
+ }
+ r
}
Expr::Path(p) => {
let name = p.mod_path().as_ident().ok_or(ConstEvalError::NotSupported("big paths"))?;
}
}
-// FIXME: support more than just evaluating literals
-pub fn eval_usize(expr: &Expr) -> Option<u64> {
- match expr {
- Expr::Literal(Literal::Uint(v, None | Some(BuiltinUint::Usize))) => (*v).try_into().ok(),
- _ => None,
+pub fn eval_usize(expr: Idx<Expr>, mut ctx: ConstEvalCtx<'_>) -> Option<u64> {
+ let expr = &ctx.exprs[expr];
+ if let Ok(ce) = eval_const(&expr, &mut ctx) {
+ match ce {
+ ComputedExpr::Literal(Literal::Int(x, _)) => return x.try_into().ok(),
+ ComputedExpr::Literal(Literal::Uint(x, _)) => return x.try_into().ok(),
+ _ => {}
+ }
}
+ None
}
/// Interns a possibly-unknown target usize
),
);
- let repeat_expr = &self.body.exprs[repeat];
- consteval::eval_usize(repeat_expr)
+ consteval::eval_usize(
+ repeat,
+ consteval::ConstEvalCtx {
+ exprs: &body.exprs,
+ pats: &body.pats,
+ local_data: Default::default(),
+ infer: &mut |x| self.infer_expr(x, &expected),
+ },
+ )
}
};
struct ErrorReplacer {
vars: usize,
}
- impl<'i> Folder<Interner> for ErrorReplacer {
+ impl Folder<Interner> for ErrorReplacer {
type Error = NoSolution;
fn as_dyn(&mut self) -> &mut dyn Folder<Interner, Error = Self::Error> {
use hir::{AsAssocItem, Semantics};
use ide_db::{
base_db::{AnchoredPath, FileId, FileLoader},
- defs::Definition,
+ defs::{Definition, IdentClass},
helpers::pick_best_token,
RootDatabase,
};
.filter_map(|token| {
let parent = token.parent()?;
if let Some(tt) = ast::TokenTree::cast(parent) {
- if let x @ Some(_) =
- try_lookup_include_path(sema, tt, token.clone(), position.file_id)
+ if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), position.file_id)
{
- return x;
+ return Some(vec![x]);
}
}
Some(
- Definition::from_token(sema, &token)
+ IdentClass::classify_token(sema, &token)?
+ .definitions()
.into_iter()
.flat_map(|def| {
try_find_trait_item_definition(sema.db, &def)
.unwrap_or_else(|| def_to_nav(sema.db, def))
})
- .collect::<Vec<_>>(),
+ .collect(),
)
})
.flatten()
tt: ast::TokenTree,
token: SyntaxToken,
file_id: FileId,
-) -> Option<Vec<NavigationTarget>> {
+) -> Option<NavigationTarget> {
let token = ast::String::cast(token)?;
let path = token.value()?.into_owned();
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
}
let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
let size = sema.db.file_text(file_id).len().try_into().ok()?;
- Some(vec![NavigationTarget {
+ Some(NavigationTarget {
file_id,
full_range: TextRange::new(0.into(), size),
name: path.into(),
container_name: None,
description: None,
docs: None,
- }])
+ })
}
/// finds the trait definition of an impl'd item
use hir::Semantics;
use ide_db::{
base_db::{FileId, FilePosition},
- defs::Definition,
+ defs::{Definition, IdentClass},
helpers::{for_each_break_expr, for_each_tail_expr, node_ext::walk_expr, pick_best_token},
search::{FileReference, ReferenceCategory, SearchScope},
RootDatabase,
fn find_defs(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
sema.descend_into_macros(token)
.into_iter()
- .flat_map(|token| Definition::from_token(sema, &token))
+ .filter_map(|token| IdentClass::classify_token(sema, &token).map(IdentClass::definitions))
+ .flatten()
.collect()
}
use hir::{HasSource, Semantics};
use ide_db::{
base_db::FileRange,
- defs::Definition,
+ defs::{Definition, IdentClass},
helpers::{pick_best_token, FamousDefs},
FxIndexSet, RootDatabase,
};
.iter()
.filter_map(|token| {
let node = token.parent()?;
- let defs = Definition::from_token(sema, token);
- Some(defs.into_iter().zip(iter::once(node).cycle()))
+ let class = IdentClass::classify_token(sema, token)?;
+ Some(class.definitions().into_iter().zip(iter::once(node).cycle()))
})
.flatten()
.unique_by(|&(def, _)| def)
check(
r#"
/// This is a doc
+const FOO$0: usize = {
+ let b = 4;
+ let a = { let b = 2; let a = b; a } + { let a = 1; a + b };
+ a
+};
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: usize = 7
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ check(
+ r#"
+/// This is a doc
const FOO$0: usize = 2 - 3;
"#,
expect![[r#"
);
}
+#[test]
+fn array_repeat_exp() {
+ check(
+ r#"
+fn main() {
+ let til$0e4 = [0_u32; (4 * 8 * 8) / 32];
+}
+ "#,
+ expect![[r#"
+ *tile4*
+
+ ```rust
+ let tile4: [u32; 8]
+ ```
+ "#]],
+ );
+}
+
#[test]
fn hover_mod_def() {
check(
use hir::{db::DefDatabase, AsAssocItem, AssocItemContainer, Crate, Name, Semantics};
use ide_db::{
base_db::{CrateOrigin, FileId, FileLoader, FilePosition},
- defs::Definition,
+ defs::{Definition, IdentClass},
helpers::pick_best_token,
RootDatabase,
};
let navs = sema
.descend_into_macros(original_token.clone())
.into_iter()
- .map(|token| {
- Definition::from_token(sema, &token)
- .into_iter()
- .flat_map(|def| def_to_moniker(sema.db, def, current_crate))
- .collect::<Vec<_>>()
+ .filter_map(|token| {
+ IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| {
+ it.into_iter().flat_map(|def| def_to_moniker(sema.db, def, current_crate))
+ })
})
.flatten()
.unique()
use hir::{db::HirDatabase, Crate, Module, Semantics};
use ide_db::{
base_db::{FileId, FileRange, SourceDatabaseExt},
- defs::Definition,
+ defs::{Definition, IdentClass},
RootDatabase,
};
use rustc_hash::FxHashSet;
fn get_definition(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Definition> {
for token in sema.descend_into_macros(token) {
- let def = Definition::from_token(sema, &token);
- if let [x] = def.as_slice() {
- return Some(*x);
+ let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions);
+ if let Some(&[x]) = def.as_deref() {
+ return Some(x);
} else {
continue;
};
check_assist_not_applicable(
add_explicit_type,
r#"
+//- minicore: option
+
fn main() {
- let $0l = [0.0; 2+2];
+ let $0l = [0.0; Some(2).unwrap()];
}
"#,
);
impl FooB for Foo {
$0fn foo< 'lt>(& 'lt self){}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn macro_trait_dyn_absolute_path() {
+ // https://github.com/rust-analyzer/rust-analyzer/issues/11100
+ check_assist(
+ add_missing_impl_members,
+ r#"
+macro_rules! foo {
+ () => {
+ trait MacroTrait {
+ fn trait_method(_: &dyn ::core::marker::Sized);
+ }
+ }
+}
+foo!();
+struct Foo;
+
+impl MacroTrait for Foo {
+ $0
+}
+"#,
+ r#"
+macro_rules! foo {
+ () => {
+ trait MacroTrait {
+ fn trait_method(_: &dyn ::core::marker::Sized);
+ }
+ }
+}
+foo!();
+struct Foo;
+
+impl MacroTrait for Foo {
+ fn trait_method(_: &dyn ::core::marker::Sized) {
+ ${0:todo!()}
+ }
+
}
"#,
)
// Walk up the tree while we have the same binary operator
while let Some(parent_expr) = expr.syntax().parent().and_then(ast::BinExpr::cast) {
- if let Some(parent_op) = expr.op_kind() {
- if parent_op == op {
- expr = parent_expr
+ match expr.op_kind() {
+ Some(parent_op) if parent_op == op => {
+ expr = parent_expr;
}
+ _ => break,
}
}
cov_mark::check!(demorgan_double_parens);
check_assist(apply_demorgan, "fn f() { (x ||$0 x) }", "fn f() { !(!x && !x) }")
}
+
+ // https://github.com/rust-analyzer/rust-analyzer/issues/10963
+ #[test]
+ fn demorgan_doesnt_hang() {
+ check_assist(
+ apply_demorgan,
+ "fn f() { 1 || 3 &&$0 4 || 5 }",
+ "fn f() { !(!1 || !3 || !4) || 5 }",
+ )
+ }
}
}
}
+ let reference_modifier = match get_receiver_type(&ctx, &to_extract) {
+ Some(receiver_type) if receiver_type.is_mutable_reference() => "&mut ",
+ Some(receiver_type) if receiver_type.is_reference() => "&",
+ _ => "",
+ };
+
let anchor = Anchor::from(&to_extract)?;
let indent = anchor.syntax().prev_sibling_or_token()?.as_token()?.clone();
let target = to_extract.syntax().text_range();
match anchor {
Anchor::Before(_) | Anchor::Replace(_) => {
- format_to!(buf, "let {} = ", var_name)
+ format_to!(buf, "let {} = {}", var_name, reference_modifier)
+ }
+ Anchor::WrapInBlock(_) => {
+ format_to!(buf, "{{ let {} = {}", var_name, reference_modifier)
}
- Anchor::WrapInBlock(_) => format_to!(buf, "{{ let {} = ", var_name),
};
format_to!(buf, "{}", to_extract.syntax());
}
}
+fn get_receiver_type(ctx: &AssistContext, expression: &ast::Expr) -> Option<hir::Type> {
+ let receiver = get_receiver(expression.clone())?;
+ Some(ctx.sema.type_of_expr(&receiver)?.original())
+}
+
+/// In the expression `a.b.c.x()`, find `a`
+fn get_receiver(expression: ast::Expr) -> Option<ast::Expr> {
+ match expression {
+ ast::Expr::FieldExpr(field) if field.expr().is_some() => {
+ let nested_expression = &field.expr()?;
+ get_receiver(nested_expression.to_owned())
+ }
+ _ => Some(expression),
+ }
+}
+
#[derive(Debug)]
enum Anchor {
Before(SyntaxNode),
",
);
}
+
+ #[test]
+ fn test_extract_var_mutable_reference_parameter() {
+ check_assist(
+ extract_variable,
+ r#"
+struct S {
+ vec: Vec<u8>
+}
+
+fn foo(s: &mut S) {
+ $0s.vec$0.push(0);
+}"#,
+ r#"
+struct S {
+ vec: Vec<u8>
+}
+
+fn foo(s: &mut S) {
+ let $0var_name = &mut s.vec;
+ var_name.push(0);
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_mutable_reference_parameter_deep_nesting() {
+ check_assist(
+ extract_variable,
+ r#"
+struct Y {
+ field: X
+}
+struct X {
+ field: S
+}
+struct S {
+ vec: Vec<u8>
+}
+
+fn foo(f: &mut Y) {
+ $0f.field.field.vec$0.push(0);
+}"#,
+ r#"
+struct Y {
+ field: X
+}
+struct X {
+ field: S
+}
+struct S {
+ vec: Vec<u8>
+}
+
+fn foo(f: &mut Y) {
+ let $0var_name = &mut f.field.field.vec;
+ var_name.push(0);
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_reference_parameter() {
+ check_assist(
+ extract_variable,
+ r#"
+struct X;
+
+impl X {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: &S) {
+ $0s.sub$0.do_thing();
+}"#,
+ r#"
+struct X;
+
+impl X {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: &S) {
+ let $0x = &s.sub;
+ x.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_reference_parameter_deep_nesting() {
+ check_assist(
+ extract_variable,
+ r#"
+struct Z;
+impl Z {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct Y {
+ field: Z
+}
+
+struct X {
+ field: Y
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: &S) {
+ $0s.sub.field.field$0.do_thing();
+}"#,
+ r#"
+struct Z;
+impl Z {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct Y {
+ field: Z
+}
+
+struct X {
+ field: Y
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: &S) {
+ let $0z = &s.sub.field.field;
+ z.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_regular_parameter() {
+ check_assist(
+ extract_variable,
+ r#"
+struct X;
+
+impl X {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: S) {
+ $0s.sub$0.do_thing();
+}"#,
+ r#"
+struct X;
+
+impl X {
+ fn do_thing(&self) {
+
+ }
+}
+
+struct S {
+ sub: X
+}
+
+fn foo(s: S) {
+ let $0x = s.sub;
+ x.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_mutable_reference_local() {
+ check_assist(
+ extract_variable,
+ r#"
+struct X;
+
+struct S {
+ sub: X
+}
+
+impl S {
+ fn new() -> S {
+ S {
+ sub: X::new()
+ }
+ }
+}
+
+impl X {
+ fn new() -> X {
+ X { }
+ }
+ fn do_thing(&self) {
+
+ }
+}
+
+
+fn foo() {
+ let local = &mut S::new();
+ $0local.sub$0.do_thing();
+}"#,
+ r#"
+struct X;
+
+struct S {
+ sub: X
+}
+
+impl S {
+ fn new() -> S {
+ S {
+ sub: X::new()
+ }
+ }
+}
+
+impl X {
+ fn new() -> X {
+ X { }
+ }
+ fn do_thing(&self) {
+
+ }
+}
+
+
+fn foo() {
+ let local = &mut S::new();
+ let $0x = &mut local.sub;
+ x.do_thing();
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_extract_var_reference_local() {
+ check_assist(
+ extract_variable,
+ r#"
+struct X;
+
+struct S {
+ sub: X
+}
+
+impl S {
+ fn new() -> S {
+ S {
+ sub: X::new()
+ }
+ }
+}
+
+impl X {
+ fn new() -> X {
+ X { }
+ }
+ fn do_thing(&self) {
+
+ }
+}
+
+
+fn foo() {
+ let local = &S::new();
+ $0local.sub$0.do_thing();
+}"#,
+ r#"
+struct X;
+
+struct S {
+ sub: X
+}
+
+impl S {
+ fn new() -> S {
+ S {
+ sub: X::new()
+ }
+ }
+}
+
+impl X {
+ fn new() -> X {
+ X { }
+ }
+ fn do_thing(&self) {
+
+ }
+}
+
+
+fn foo() {
+ let local = &S::new();
+ let $0x = &local.sub;
+ x.do_thing();
+}"#,
+ );
+ }
}
use rustc_hash::FxHashMap;
use syntax::{
ast::{self, HasModuleItem},
- match_ast, AstNode,
+ match_ast, AstNode, SyntaxKind,
};
use crate::{
/// `spam: &mut Spam` insert text/label and `spam` lookup string will be
/// suggested.
pub(crate) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> {
- if !matches!(ctx.pattern_ctx, Some(PatternContext { is_param: Some(ParamKind::Function), .. }))
- {
+ let param_of_fn =
+ matches!(ctx.pattern_ctx, Some(PatternContext { is_param: Some(ParamKind::Function), .. }));
+
+ if !param_of_fn {
return None;
}
- let mut params = FxHashMap::default();
+ let mut file_params = FxHashMap::default();
- let me = ctx.token.ancestors().find_map(ast::Fn::cast);
- let mut process_fn = |func: ast::Fn| {
- if Some(&func) == me.as_ref() {
- return;
- }
- func.param_list().into_iter().flat_map(|it| it.params()).for_each(|param| {
+ let mut extract_params = |f: ast::Fn| {
+ f.param_list().into_iter().flat_map(|it| it.params()).for_each(|param| {
if let Some(pat) = param.pat() {
// FIXME: We should be able to turn these into SmolStr without having to allocate a String
- let text = param.syntax().text().to_string();
- let lookup = pat.syntax().text().to_string();
- params.entry(text).or_insert(lookup);
+ let whole_param = param.syntax().text().to_string();
+ let binding = pat.syntax().text().to_string();
+ file_params.entry(whole_param).or_insert(binding);
}
});
};
ast::SourceFile(it) => it.items().filter_map(|item| match item {
ast::Item::Fn(it) => Some(it),
_ => None,
- }).for_each(&mut process_fn),
+ }).for_each(&mut extract_params),
ast::ItemList(it) => it.items().filter_map(|item| match item {
ast::Item::Fn(it) => Some(it),
_ => None,
- }).for_each(&mut process_fn),
+ }).for_each(&mut extract_params),
ast::AssocItemList(it) => it.assoc_items().filter_map(|item| match item {
ast::AssocItem::Fn(it) => Some(it),
_ => None,
- }).for_each(&mut process_fn),
+ }).for_each(&mut extract_params),
_ => continue,
}
};
}
+ let function = ctx.token.ancestors().find_map(ast::Fn::cast)?;
+ let param_list = function.param_list()?;
+
+ remove_duplicated(&mut file_params, param_list.params());
+
let self_completion_items = ["self", "&self", "mut self", "&mut self"];
- if ctx.impl_def.is_some() && me?.param_list()?.params().next().is_none() {
+ if should_add_self_completions(ctx, param_list) {
self_completion_items.into_iter().for_each(|self_item| {
add_new_item_to_acc(ctx, acc, self_item.to_string(), self_item.to_string())
});
}
- params.into_iter().for_each(|(label, lookup)| add_new_item_to_acc(ctx, acc, label, lookup));
+ file_params.into_iter().try_for_each(|(whole_param, binding)| {
+ Some(add_new_item_to_acc(ctx, acc, surround_with_commas(ctx, whole_param), binding))
+ })?;
Some(())
}
+fn remove_duplicated(
+ file_params: &mut FxHashMap<String, String>,
+ fn_params: ast::AstChildren<ast::Param>,
+) {
+ fn_params.for_each(|param| {
+ let whole_param = param.syntax().text().to_string();
+ file_params.remove(&whole_param);
+
+ if let Some(pattern) = param.pat() {
+ let binding = pattern.syntax().text().to_string();
+ file_params.retain(|_, v| v != &binding);
+ }
+ })
+}
+
+fn should_add_self_completions(ctx: &CompletionContext, param_list: ast::ParamList) -> bool {
+ let inside_impl = ctx.impl_def.is_some();
+ let no_params = param_list.params().next().is_none() && param_list.self_param().is_none();
+
+ inside_impl && no_params
+}
+
+fn surround_with_commas(ctx: &CompletionContext, param: String) -> String {
+ match fallible_surround_with_commas(ctx, ¶m) {
+ Some(surrounded) => surrounded,
+ // fallback to the original parameter
+ None => param,
+ }
+}
+
+fn fallible_surround_with_commas(ctx: &CompletionContext, param: &str) -> Option<String> {
+ let next_token = {
+ let t = ctx.token.next_token()?;
+ match t.kind() {
+ SyntaxKind::WHITESPACE => t.next_token()?,
+ _ => t,
+ }
+ };
+
+ let trailing_comma_missing = matches!(next_token.kind(), SyntaxKind::IDENT);
+ let trailing = if trailing_comma_missing { "," } else { "" };
+
+ let previous_token = if matches!(ctx.token.kind(), SyntaxKind::IDENT | SyntaxKind::WHITESPACE) {
+ ctx.previous_token.as_ref()?
+ } else {
+ &ctx.token
+ };
+
+ let needs_leading = !matches!(previous_token.kind(), SyntaxKind::L_PAREN | SyntaxKind::COMMA);
+ let leading = if needs_leading { ", " } else { "" };
+
+ Some(format!("{}{}{}", leading, param, trailing))
+}
+
fn add_new_item_to_acc(
ctx: &CompletionContext,
acc: &mut Completions,
//! See `CompletionContext` structure.
+use std::iter;
+
use base_db::SourceDatabaseExt;
use hir::{Local, Name, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo};
use ide_db::{
mut fake_ident_token: SyntaxToken,
) {
let _p = profile::span("CompletionContext::expand_and_fill");
- loop {
- // Expand attributes
- if let (Some(actual_item), Some(item_with_fake_ident)) = (
- find_node_at_offset::<ast::Item>(&original_file, offset),
- find_node_at_offset::<ast::Item>(&speculative_file, offset),
- ) {
+ 'expansion: loop {
+ let parent_item =
+ |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
+ let ancestor_items = iter::successors(
+ Option::zip(
+ find_node_at_offset::<ast::Item>(&original_file, offset),
+ find_node_at_offset::<ast::Item>(&speculative_file, offset),
+ ),
+ |(a, b)| parent_item(a).zip(parent_item(b)),
+ );
+ for (actual_item, item_with_fake_ident) in ancestor_items {
match (
self.sema.expand_attr_macro(&actual_item),
self.sema.speculative_expand_attr_macro(
fake_ident_token.clone(),
),
) {
- (Some(actual_expansion), Some(speculative_expansion)) => {
- let new_offset = speculative_expansion.1.text_range().start();
+ // maybe parent items have attributes
+ (None, None) => (),
+ // successful expansions
+ (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
+ let new_offset = fake_mapped_token.text_range().start();
if new_offset > actual_expansion.text_range().end() {
- break;
+ break 'expansion;
}
original_file = actual_expansion;
- speculative_file = speculative_expansion.0;
- fake_ident_token = speculative_expansion.1;
+ speculative_file = fake_expansion;
+ fake_ident_token = fake_mapped_token;
offset = new_offset;
- continue;
+ continue 'expansion;
}
- (None, None) => (),
- _ => break,
+ // exactly one expansion failed, inconsistent state so stop expanding completely
+ _ => break 'expansion,
}
}
None => break,
};
- if let (Some(actual_expansion), Some(speculative_expansion)) = (
+ match (
self.sema.expand(&actual_macro_call),
self.sema.speculative_expand(
&actual_macro_call,
&speculative_args,
- fake_ident_token,
+ fake_ident_token.clone(),
),
) {
- let new_offset = speculative_expansion.1.text_range().start();
- if new_offset > actual_expansion.text_range().end() {
- break;
+ // successful expansions
+ (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
+ let new_offset = fake_mapped_token.text_range().start();
+ if new_offset > actual_expansion.text_range().end() {
+ break;
+ }
+ original_file = actual_expansion;
+ speculative_file = fake_expansion;
+ fake_ident_token = fake_mapped_token;
+ offset = new_offset;
+ continue;
}
- original_file = actual_expansion;
- speculative_file = speculative_expansion.0;
- fake_ident_token = speculative_expansion.1;
- offset = new_offset;
- } else {
- break;
+ _ => break,
}
- } else {
- break;
}
+
+ break;
}
self.fill(&original_file, speculative_file, offset);
fn baz(file$0 id: u32) {}
"#,
expect![[r#"
- bn file_id: usize
+ bn file_id: usize,
+ kw mut
+ "#]],
+ );
+}
+
+#[test]
+fn repeated_param_name() {
+ check(
+ r#"
+fn foo(file_id: usize) {}
+fn bar(file_id: u32, $0) {}
+"#,
+ expect![[r#"
kw mut
"#]],
);
#[test]
fn in_impl_after_self() {
- // FIXME: self completions should not be here
check(
r#"
struct A {}
}
"#,
expect![[r#"
- bn self
- bn &self
- bn mut self
- bn &mut self
bn file_id: usize
kw mut
sp Self
}
impl Definition {
- pub fn from_token(
- sema: &Semantics<RootDatabase>,
- token: &SyntaxToken,
- ) -> ArrayVec<Definition, 2> {
- let parent = match token.parent() {
- Some(parent) => parent,
- None => return Default::default(),
- };
- // resolve derives if possible
- if let Some(ident) = ast::Ident::cast(token.clone()) {
- let attr = ast::TokenTree::cast(parent.clone())
- .and_then(|tt| tt.parent_meta())
- .and_then(|meta| meta.parent_attr());
- if let Some(attr) = attr {
- return sema
- .resolve_derive_ident(&attr, &ident)
- .map(Into::into)
- .into_iter()
- .collect();
- }
- }
- Self::from_node(sema, &parent)
- }
-
- pub fn from_node(sema: &Semantics<RootDatabase>, node: &SyntaxNode) -> ArrayVec<Definition, 2> {
- let mut res = ArrayVec::new();
- (|| {
- match_ast! {
- match node {
- ast::Name(name) => {
- match NameClass::classify(&sema, &name)? {
- NameClass::Definition(it) | NameClass::ConstReference(it) => res.push(it),
- NameClass::PatFieldShorthand { local_def, field_ref } => {
- res.push(Definition::Local(local_def));
- res.push(Definition::Field(field_ref));
- }
- }
- },
- ast::NameRef(name_ref) => {
- match NameRefClass::classify(sema, &name_ref)? {
- NameRefClass::Definition(it) => res.push(it),
- NameRefClass::FieldShorthand { local_ref, field_ref } => {
- res.push(Definition::Local(local_ref));
- res.push(Definition::Field(field_ref));
- }
- }
- },
- ast::Lifetime(lifetime) => {
- let def = if let Some(x) = NameClass::classify_lifetime(&sema, &lifetime) {
- NameClass::defined(x)
- } else {
- NameRefClass::classify_lifetime(&sema, &lifetime).and_then(|class| match class {
- NameRefClass::Definition(it) => Some(it),
- _ => None,
- })
- };
- if let Some(def) = def {
- res.push(def);
- }
- },
- _ => (),
- }
- }
- Some(())
- })();
- res
- }
-
pub fn canonical_module_path(&self, db: &RootDatabase) -> Option<impl Iterator<Item = Module>> {
self.module(db).map(|it| it.path_to_root(db).into_iter().rev())
}
}
}
+pub enum IdentClass {
+ NameClass(NameClass),
+ NameRefClass(NameRefClass),
+}
+
+impl IdentClass {
+ pub fn classify_node(sema: &Semantics<RootDatabase>, node: &SyntaxNode) -> Option<IdentClass> {
+ match_ast! {
+ match node {
+ ast::Name(name) => NameClass::classify(sema, &name).map(IdentClass::NameClass),
+ ast::NameRef(name_ref) => NameRefClass::classify(sema, &name_ref).map(IdentClass::NameRefClass),
+ ast::Lifetime(lifetime) => {
+ NameClass::classify_lifetime(sema, &lifetime)
+ .map(IdentClass::NameClass)
+ .or_else(|| NameRefClass::classify_lifetime(sema, &lifetime).map(IdentClass::NameRefClass))
+ },
+ _ => None,
+ }
+ }
+ }
+
+ pub fn classify_token(
+ sema: &Semantics<RootDatabase>,
+ token: &SyntaxToken,
+ ) -> Option<IdentClass> {
+ let parent = token.parent()?;
+ // resolve derives if possible
+ if let Some(ident) = ast::Ident::cast(token.clone()) {
+ let attr = ast::TokenTree::cast(parent.clone())
+ .and_then(|tt| tt.parent_meta())
+ .and_then(|meta| meta.parent_attr());
+ if let Some(attr) = attr {
+ return NameRefClass::classify_derive(sema, &attr, &ident)
+ .map(IdentClass::NameRefClass);
+ }
+ }
+ Self::classify_node(sema, &parent)
+ }
+
+ pub fn definitions(self) -> ArrayVec<Definition, 2> {
+ let mut res = ArrayVec::new();
+ match self {
+ IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => {
+ res.push(it)
+ }
+ IdentClass::NameClass(NameClass::PatFieldShorthand { local_def, field_ref }) => {
+ res.push(Definition::Local(local_def));
+ res.push(Definition::Field(field_ref));
+ }
+ IdentClass::NameRefClass(NameRefClass::Definition(it)) => res.push(it),
+ IdentClass::NameRefClass(NameRefClass::FieldShorthand { local_ref, field_ref }) => {
+ res.push(Definition::Local(local_ref));
+ res.push(Definition::Field(field_ref));
+ }
+ }
+ res
+ }
+}
+
/// On a first blush, a single `ast::Name` defines a single definition at some
/// scope. That is, that, by just looking at the syntactical category, we can
/// unambiguously define the semantic category.
_ => None,
}
}
+
+ pub fn classify_derive(
+ sema: &Semantics<RootDatabase>,
+ attr: &ast::Attr,
+ ident: &ast::Ident,
+ ) -> Option<NameRefClass> {
+ sema.resolve_derive_ident(&attr, &ident).map(Definition::from).map(NameRefClass::Definition)
+ }
}
impl_from!(
LIFETIME_IDENT if is_next(|it| is_text(it), true) => {
mods.push(do_ws(after, tok));
}
- AS_KW => {
+ AS_KW | DYN_KW => {
mods.push(do_ws(after, tok));
}
T![;] => {