]> git.lizzy.rs Git - rust.git/commitdiff
Merge #11204
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>
Wed, 5 Jan 2022 22:47:30 +0000 (22:47 +0000)
committerGitHub <noreply@github.com>
Wed, 5 Jan 2022 22:47:30 +0000 (22:47 +0000)
11204: fix: `replace_qualified_name_with_use` does not use full item path for replacements r=Veykril a=Veykril

bors r+

Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
19 files changed:
crates/hir/src/lib.rs
crates/hir_ty/src/consteval.rs
crates/hir_ty/src/infer/expr.rs
crates/hir_ty/src/lib.rs
crates/ide/src/goto_definition.rs
crates/ide/src/highlight_related.rs
crates/ide/src/hover.rs
crates/ide/src/hover/tests.rs
crates/ide/src/moniker.rs
crates/ide/src/static_index.rs
crates/ide_assists/src/handlers/add_explicit_type.rs
crates/ide_assists/src/handlers/add_missing_impl_members.rs
crates/ide_assists/src/handlers/apply_demorgan.rs
crates/ide_assists/src/handlers/extract_variable.rs
crates/ide_completion/src/completions/fn_param.rs
crates/ide_completion/src/context.rs
crates/ide_completion/src/tests/fn_param.rs
crates/ide_db/src/defs.rs
crates/ide_db/src/helpers/insert_whitespace_into_node.rs

index 6306ae534da59b0bb1429087ddd292716a6eee01..9a1a893650cd64befee3e39009619b38916dae3f 100644 (file)
@@ -1540,11 +1540,11 @@ pub fn eval(self, db: &dyn HirDatabase) -> Result<ComputedExpr, ConstEvalError>
         let infer = infer.as_ref();
         let result = eval_const(
             root,
-            ConstEvalCtx {
+            &mut ConstEvalCtx {
                 exprs: &body.exprs,
                 pats: &body.pats,
                 local_data: HashMap::default(),
-                infer,
+                infer: &mut |x| infer[x].clone(),
             },
         );
         result
index 0005a86b7f65f3fef318ed15ae80ba0be8e0bd50..68e6e0582e3700344d2c8f91805aa3584dd60618 100644 (file)
@@ -4,14 +4,13 @@
 
 use chalk_ir::{IntTy, Scalar};
 use hir_def::{
-    builtin_type::BuiltinUint,
     expr::{ArithOp, BinaryOp, Expr, Literal, Pat},
     type_ref::ConstScalar,
 };
 use hir_expand::name::Name;
-use la_arena::Arena;
+use la_arena::{Arena, Idx};
 
-use crate::{Const, ConstData, ConstValue, InferenceResult, Interner, TyKind};
+use crate::{Const, ConstData, ConstValue, Interner, Ty, TyKind};
 
 /// Extension trait for [`Const`]
 pub trait ConstExt {
@@ -41,12 +40,11 @@ fn is_unknown(&self) -> bool {
     }
 }
 
-#[derive(Clone)]
 pub struct ConstEvalCtx<'a> {
     pub exprs: &'a Arena<Expr>,
     pub pats: &'a Arena<Pat>,
     pub local_data: HashMap<Name, ComputedExpr>,
-    pub infer: &'a InferenceResult,
+    pub infer: &'a mut dyn FnMut(Idx<Expr>) -> Ty,
 }
 
 #[derive(Debug, Clone)]
@@ -57,7 +55,7 @@ pub enum ConstEvalError {
     Panic(String),
 }
 
-#[derive(Clone)]
+#[derive(Debug, Clone)]
 pub enum ComputedExpr {
     Literal(Literal),
     Tuple(Box<[ComputedExpr]>),
@@ -130,11 +128,11 @@ fn is_valid(scalar: &Scalar, value: i128) -> bool {
     }
 }
 
-pub fn eval_const(expr: &Expr, mut ctx: ConstEvalCtx<'_>) -> Result<ComputedExpr, ConstEvalError> {
+pub fn eval_const(expr: &Expr, ctx: &mut ConstEvalCtx<'_>) -> Result<ComputedExpr, ConstEvalError> {
     match expr {
         Expr::Literal(l) => Ok(ComputedExpr::Literal(l.clone())),
         &Expr::UnaryOp { expr, op } => {
-            let ty = &ctx.infer[expr];
+            let ty = &(ctx.infer)(expr);
             let ev = eval_const(&ctx.exprs[expr], ctx)?;
             match op {
                 hir_def::expr::UnaryOp::Deref => Err(ConstEvalError::NotSupported("deref")),
@@ -190,9 +188,9 @@ pub fn eval_const(expr: &Expr, mut ctx: ConstEvalCtx<'_>) -> Result<ComputedExpr
             }
         }
         &Expr::BinaryOp { lhs, rhs, op } => {
-            let ty = &ctx.infer[lhs];
-            let lhs = eval_const(&ctx.exprs[lhs], ctx.clone())?;
-            let rhs = eval_const(&ctx.exprs[rhs], ctx.clone())?;
+            let ty = &(ctx.infer)(lhs);
+            let lhs = eval_const(&ctx.exprs[lhs], ctx)?;
+            let rhs = eval_const(&ctx.exprs[rhs], ctx)?;
             let op = op.ok_or(ConstEvalError::IncompleteExpr)?;
             let v1 = match lhs {
                 ComputedExpr::Literal(Literal::Int(v, _)) => v,
@@ -241,6 +239,7 @@ pub fn eval_const(expr: &Expr, mut ctx: ConstEvalCtx<'_>) -> Result<ComputedExpr
             }
         }
         Expr::Block { statements, tail, .. } => {
+            let mut prev_values = HashMap::<Name, Option<ComputedExpr>>::default();
             for statement in &**statements {
                 match statement {
                     &hir_def::expr::Statement::Let { pat, initializer, .. } => {
@@ -252,21 +251,33 @@ pub fn eval_const(expr: &Expr, mut ctx: ConstEvalCtx<'_>) -> Result<ComputedExpr
                             }
                         };
                         let value = match initializer {
-                            Some(x) => eval_const(&ctx.exprs[x], ctx.clone())?,
+                            Some(x) => eval_const(&ctx.exprs[x], ctx)?,
                             None => continue,
                         };
-                        ctx.local_data.insert(name, value);
+                        if !prev_values.contains_key(&name) {
+                            let prev = ctx.local_data.insert(name.clone(), value);
+                            prev_values.insert(name, prev);
+                        } else {
+                            ctx.local_data.insert(name, value);
+                        }
                     }
                     &hir_def::expr::Statement::Expr { .. } => {
                         return Err(ConstEvalError::NotSupported("this kind of statement"))
                     }
                 }
             }
-            let tail_expr = match tail {
-                &Some(x) => &ctx.exprs[x],
-                None => return Ok(ComputedExpr::Tuple(Box::new([]))),
+            let r = match tail {
+                &Some(x) => eval_const(&ctx.exprs[x], ctx),
+                None => Ok(ComputedExpr::Tuple(Box::new([]))),
             };
-            eval_const(tail_expr, ctx)
+            // clean up local data, so caller will receive the exact map that passed to us
+            for (name, val) in prev_values {
+                match val {
+                    Some(x) => ctx.local_data.insert(name, x),
+                    None => ctx.local_data.remove(&name),
+                };
+            }
+            r
         }
         Expr::Path(p) => {
             let name = p.mod_path().as_ident().ok_or(ConstEvalError::NotSupported("big paths"))?;
@@ -280,12 +291,16 @@ pub fn eval_const(expr: &Expr, mut ctx: ConstEvalCtx<'_>) -> Result<ComputedExpr
     }
 }
 
-// FIXME: support more than just evaluating literals
-pub fn eval_usize(expr: &Expr) -> Option<u64> {
-    match expr {
-        Expr::Literal(Literal::Uint(v, None | Some(BuiltinUint::Usize))) => (*v).try_into().ok(),
-        _ => None,
+pub fn eval_usize(expr: Idx<Expr>, mut ctx: ConstEvalCtx<'_>) -> Option<u64> {
+    let expr = &ctx.exprs[expr];
+    if let Ok(ce) = eval_const(&expr, &mut ctx) {
+        match ce {
+            ComputedExpr::Literal(Literal::Int(x, _)) => return x.try_into().ok(),
+            ComputedExpr::Literal(Literal::Uint(x, _)) => return x.try_into().ok(),
+            _ => {}
+        }
     }
+    None
 }
 
 /// Interns a possibly-unknown target usize
index a892a680d7e4408f42a9e8f456acefdfef8ed8a5..54b1680214e9faeb8fc2ed989c29139d533492e2 100644 (file)
@@ -799,8 +799,15 @@ fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
                             ),
                         );
 
-                        let repeat_expr = &self.body.exprs[repeat];
-                        consteval::eval_usize(repeat_expr)
+                        consteval::eval_usize(
+                            repeat,
+                            consteval::ConstEvalCtx {
+                                exprs: &body.exprs,
+                                pats: &body.pats,
+                                local_data: Default::default(),
+                                infer: &mut |x| self.infer_expr(x, &expected),
+                            },
+                        )
                     }
                 };
 
index 1950820a37272a363a698933ff6d3354f2c43500..f1280fcc101ce6ecb2bf6e21a026bf31a84b6c2f 100644 (file)
@@ -358,7 +358,7 @@ pub fn replace_errors_with_variables<T>(t: &T) -> Canonical<T::Result>
     struct ErrorReplacer {
         vars: usize,
     }
-    impl<'i> Folder<Interner> for ErrorReplacer {
+    impl Folder<Interner> for ErrorReplacer {
         type Error = NoSolution;
 
         fn as_dyn(&mut self) -> &mut dyn Folder<Interner, Error = Self::Error> {
index 4d638e687ae35d0f7c199eacf38cfc58b78190bc..29fc1fd2e03bd2b9ecdea234ff949a9578bec4d0 100644 (file)
@@ -4,7 +4,7 @@
 use hir::{AsAssocItem, Semantics};
 use ide_db::{
     base_db::{AnchoredPath, FileId, FileLoader},
-    defs::Definition,
+    defs::{Definition, IdentClass},
     helpers::pick_best_token,
     RootDatabase,
 };
@@ -46,20 +46,20 @@ pub(crate) fn goto_definition(
         .filter_map(|token| {
             let parent = token.parent()?;
             if let Some(tt) = ast::TokenTree::cast(parent) {
-                if let x @ Some(_) =
-                    try_lookup_include_path(sema, tt, token.clone(), position.file_id)
+                if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), position.file_id)
                 {
-                    return x;
+                    return Some(vec![x]);
                 }
             }
             Some(
-                Definition::from_token(sema, &token)
+                IdentClass::classify_token(sema, &token)?
+                    .definitions()
                     .into_iter()
                     .flat_map(|def| {
                         try_find_trait_item_definition(sema.db, &def)
                             .unwrap_or_else(|| def_to_nav(sema.db, def))
                     })
-                    .collect::<Vec<_>>(),
+                    .collect(),
             )
         })
         .flatten()
@@ -74,7 +74,7 @@ fn try_lookup_include_path(
     tt: ast::TokenTree,
     token: SyntaxToken,
     file_id: FileId,
-) -> Option<Vec<NavigationTarget>> {
+) -> Option<NavigationTarget> {
     let token = ast::String::cast(token)?;
     let path = token.value()?.into_owned();
     let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
@@ -84,7 +84,7 @@ fn try_lookup_include_path(
     }
     let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
     let size = sema.db.file_text(file_id).len().try_into().ok()?;
-    Some(vec![NavigationTarget {
+    Some(NavigationTarget {
         file_id,
         full_range: TextRange::new(0.into(), size),
         name: path.into(),
@@ -93,7 +93,7 @@ fn try_lookup_include_path(
         container_name: None,
         description: None,
         docs: None,
-    }])
+    })
 }
 
 /// finds the trait definition of an impl'd item
index 054b8f297b5fb0b666441562c05e8687a64c380e..b6d9e4021d9b8132e9403e0d5db993b0000d3fbf 100644 (file)
@@ -1,7 +1,7 @@
 use hir::Semantics;
 use ide_db::{
     base_db::{FileId, FilePosition},
-    defs::Definition,
+    defs::{Definition, IdentClass},
     helpers::{for_each_break_expr, for_each_tail_expr, node_ext::walk_expr, pick_best_token},
     search::{FileReference, ReferenceCategory, SearchScope},
     RootDatabase,
@@ -293,7 +293,8 @@ fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange
 fn find_defs(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
     sema.descend_into_macros(token)
         .into_iter()
-        .flat_map(|token| Definition::from_token(sema, &token))
+        .filter_map(|token| IdentClass::classify_token(sema, &token).map(IdentClass::definitions))
+        .flatten()
         .collect()
 }
 
index 9d19d7c1c015e35fb4c6a14fd2733a0ced74c1c6..21bea25a5f77ae7b629ba3f09c12ac558af726c5 100644 (file)
@@ -9,7 +9,7 @@
 use hir::{HasSource, Semantics};
 use ide_db::{
     base_db::FileRange,
-    defs::Definition,
+    defs::{Definition, IdentClass},
     helpers::{pick_best_token, FamousDefs},
     FxIndexSet, RootDatabase,
 };
@@ -129,8 +129,8 @@ pub(crate) fn hover(
         .iter()
         .filter_map(|token| {
             let node = token.parent()?;
-            let defs = Definition::from_token(sema, token);
-            Some(defs.into_iter().zip(iter::once(node).cycle()))
+            let class = IdentClass::classify_token(sema, token)?;
+            Some(class.definitions().into_iter().zip(iter::once(node).cycle()))
         })
         .flatten()
         .unique_by(|&(def, _)| def)
index 2bfda1aff2e75ac8586c70d64ccb3787ae9a75f6..ed76c84ab47e8e379953c11678edbe86320b161b 100644 (file)
@@ -3350,6 +3350,31 @@ fn hover_const_eval() {
     check(
         r#"
 /// This is a doc
+const FOO$0: usize = {
+    let b = 4;
+    let a = { let b = 2; let a = b; a } + { let a = 1; a + b };
+    a
+};
+"#,
+        expect![[r#"
+            *FOO*
+
+            ```rust
+            test
+            ```
+
+            ```rust
+            const FOO: usize = 7
+            ```
+
+            ---
+
+            This is a doc
+        "#]],
+    );
+    check(
+        r#"
+/// This is a doc
 const FOO$0: usize = 2 - 3;
 "#,
         expect![[r#"
@@ -3443,6 +3468,24 @@ fn foo() {
     );
 }
 
+#[test]
+fn array_repeat_exp() {
+    check(
+        r#"
+fn main() {
+    let til$0e4 = [0_u32; (4 * 8 * 8) / 32];
+}
+        "#,
+        expect![[r#"
+            *tile4*
+
+            ```rust
+            let tile4: [u32; 8]
+            ```
+            "#]],
+    );
+}
+
 #[test]
 fn hover_mod_def() {
     check(
index c93c471f4948e7cf8e75bb31c9337c13367f1cf1..53f6d7ec7ad8bdd7aebbc508fa1e9de40d0b0c0d 100644 (file)
@@ -4,7 +4,7 @@
 use hir::{db::DefDatabase, AsAssocItem, AssocItemContainer, Crate, Name, Semantics};
 use ide_db::{
     base_db::{CrateOrigin, FileId, FileLoader, FilePosition},
-    defs::Definition,
+    defs::{Definition, IdentClass},
     helpers::pick_best_token,
     RootDatabase,
 };
@@ -82,11 +82,10 @@ pub(crate) fn moniker(
     let navs = sema
         .descend_into_macros(original_token.clone())
         .into_iter()
-        .map(|token| {
-            Definition::from_token(sema, &token)
-                .into_iter()
-                .flat_map(|def| def_to_moniker(sema.db, def, current_crate))
-                .collect::<Vec<_>>()
+        .filter_map(|token| {
+            IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| {
+                it.into_iter().flat_map(|def| def_to_moniker(sema.db, def, current_crate))
+            })
         })
         .flatten()
         .unique()
index 77d202cc39c5774998cbaae6fe25221036d55b7f..d5bfbd18941c3c8395940567d0391d8945568b22 100644 (file)
@@ -6,7 +6,7 @@
 use hir::{db::HirDatabase, Crate, Module, Semantics};
 use ide_db::{
     base_db::{FileId, FileRange, SourceDatabaseExt},
-    defs::Definition,
+    defs::{Definition, IdentClass},
     RootDatabase,
 };
 use rustc_hash::FxHashSet;
@@ -195,9 +195,9 @@ pub fn compute(analysis: &Analysis) -> StaticIndex {
 
 fn get_definition(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Definition> {
     for token in sema.descend_into_macros(token) {
-        let def = Definition::from_token(sema, &token);
-        if let [x] = def.as_slice() {
-            return Some(*x);
+        let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions);
+        if let Some(&[x]) = def.as_deref() {
+            return Some(x);
         } else {
             continue;
         };
index d7e1be900ffb0c9658ad93170dccfd5b767a7616..945edfd999b1d6e0d77b699dcf47c7876bdb77ff 100644 (file)
@@ -208,8 +208,10 @@ fn main() {
         check_assist_not_applicable(
             add_explicit_type,
             r#"
+//- minicore: option
+
 fn main() {
-    let $0l = [0.0; 2+2];
+    let $0l = [0.0; Some(2).unwrap()];
 }
 "#,
         );
index a10eca10d11946c1b350b0e1189b56c2d91d2de1..b6af72cbac547cdb1bf718ef3d21332c88e4c04b 100644 (file)
@@ -938,6 +938,47 @@ fn foo<'lt>(&'lt self) {}
 impl FooB for Foo {
     $0fn foo< 'lt>(& 'lt self){}
 
+}
+"#,
+        )
+    }
+
+    #[test]
+    fn macro_trait_dyn_absolute_path() {
+        // https://github.com/rust-analyzer/rust-analyzer/issues/11100
+        check_assist(
+            add_missing_impl_members,
+            r#"
+macro_rules! foo {
+    () => {
+        trait MacroTrait {
+            fn trait_method(_: &dyn ::core::marker::Sized);
+        }
+    }
+}
+foo!();
+struct Foo;
+
+impl MacroTrait for Foo {
+    $0
+}
+"#,
+            r#"
+macro_rules! foo {
+    () => {
+        trait MacroTrait {
+            fn trait_method(_: &dyn ::core::marker::Sized);
+        }
+    }
+}
+foo!();
+struct Foo;
+
+impl MacroTrait for Foo {
+    fn trait_method(_: &dyn ::core::marker::Sized) {
+        ${0:todo!()}
+    }
+
 }
 "#,
         )
index b3fcf6578a3ba913e615e1c37efa4c1bb8308d2c..21907ab41fb93628abc6cde8fe283d836c42f33c 100644 (file)
@@ -42,10 +42,11 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext) -> Option<(
 
     // Walk up the tree while we have the same binary operator
     while let Some(parent_expr) = expr.syntax().parent().and_then(ast::BinExpr::cast) {
-        if let Some(parent_op) = expr.op_kind() {
-            if parent_op == op {
-                expr = parent_expr
+        match expr.op_kind() {
+            Some(parent_op) if parent_op == op => {
+                expr = parent_expr;
             }
+            _ => break,
         }
     }
 
@@ -220,4 +221,14 @@ fn demorgan_doesnt_double_parens() {
         cov_mark::check!(demorgan_double_parens);
         check_assist(apply_demorgan, "fn f() { (x ||$0 x) }", "fn f() { !(!x && !x) }")
     }
+
+    // https://github.com/rust-analyzer/rust-analyzer/issues/10963
+    #[test]
+    fn demorgan_doesnt_hang() {
+        check_assist(
+            apply_demorgan,
+            "fn f() { 1 || 3 &&$0 4 || 5 }",
+            "fn f() { !(!1 || !3 || !4) || 5 }",
+        )
+    }
 }
index 7a57ab3b9b7cc2b45722653605c480e6881d3ed3..aaed2b67fe8f56b087a5b5762e8d41129107411a 100644 (file)
@@ -52,6 +52,12 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option
         }
     }
 
+    let reference_modifier = match get_receiver_type(&ctx, &to_extract) {
+        Some(receiver_type) if receiver_type.is_mutable_reference() => "&mut ",
+        Some(receiver_type) if receiver_type.is_reference() => "&",
+        _ => "",
+    };
+
     let anchor = Anchor::from(&to_extract)?;
     let indent = anchor.syntax().prev_sibling_or_token()?.as_token()?.clone();
     let target = to_extract.syntax().text_range();
@@ -79,9 +85,11 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option
 
             match anchor {
                 Anchor::Before(_) | Anchor::Replace(_) => {
-                    format_to!(buf, "let {} = ", var_name)
+                    format_to!(buf, "let {} = {}", var_name, reference_modifier)
+                }
+                Anchor::WrapInBlock(_) => {
+                    format_to!(buf, "{{ let {} = {}", var_name, reference_modifier)
                 }
-                Anchor::WrapInBlock(_) => format_to!(buf, "{{ let {} = ", var_name),
             };
             format_to!(buf, "{}", to_extract.syntax());
 
@@ -146,6 +154,22 @@ fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> {
     }
 }
 
+fn get_receiver_type(ctx: &AssistContext, expression: &ast::Expr) -> Option<hir::Type> {
+    let receiver = get_receiver(expression.clone())?;
+    Some(ctx.sema.type_of_expr(&receiver)?.original())
+}
+
+/// In the expression `a.b.c.x()`, find `a`
+fn get_receiver(expression: ast::Expr) -> Option<ast::Expr> {
+    match expression {
+        ast::Expr::FieldExpr(field) if field.expr().is_some() => {
+            let nested_expression = &field.expr()?;
+            get_receiver(nested_expression.to_owned())
+        }
+        _ => Some(expression),
+    }
+}
+
 #[derive(Debug)]
 enum Anchor {
     Before(SyntaxNode),
@@ -900,4 +924,330 @@ fn extract_var_no_block_body() {
 ",
         );
     }
+
+    #[test]
+    fn test_extract_var_mutable_reference_parameter() {
+        check_assist(
+            extract_variable,
+            r#"
+struct S {
+    vec: Vec<u8>
+}
+
+fn foo(s: &mut S) {
+    $0s.vec$0.push(0);
+}"#,
+            r#"
+struct S {
+    vec: Vec<u8>
+}
+
+fn foo(s: &mut S) {
+    let $0var_name = &mut s.vec;
+    var_name.push(0);
+}"#,
+        );
+    }
+
+    #[test]
+    fn test_extract_var_mutable_reference_parameter_deep_nesting() {
+        check_assist(
+            extract_variable,
+            r#"
+struct Y {
+    field: X
+}
+struct X {
+    field: S
+}
+struct S {
+    vec: Vec<u8>
+}
+
+fn foo(f: &mut Y) {
+    $0f.field.field.vec$0.push(0);
+}"#,
+            r#"
+struct Y {
+    field: X
+}
+struct X {
+    field: S
+}
+struct S {
+    vec: Vec<u8>
+}
+
+fn foo(f: &mut Y) {
+    let $0var_name = &mut f.field.field.vec;
+    var_name.push(0);
+}"#,
+        );
+    }
+
+    #[test]
+    fn test_extract_var_reference_parameter() {
+        check_assist(
+            extract_variable,
+            r#"
+struct X;
+
+impl X {
+    fn do_thing(&self) {
+
+    }
+}
+
+struct S {
+    sub: X
+}
+
+fn foo(s: &S) {
+    $0s.sub$0.do_thing();
+}"#,
+            r#"
+struct X;
+
+impl X {
+    fn do_thing(&self) {
+
+    }
+}
+
+struct S {
+    sub: X
+}
+
+fn foo(s: &S) {
+    let $0x = &s.sub;
+    x.do_thing();
+}"#,
+        );
+    }
+
+    #[test]
+    fn test_extract_var_reference_parameter_deep_nesting() {
+        check_assist(
+            extract_variable,
+            r#"
+struct Z;
+impl Z {
+    fn do_thing(&self) {
+
+    }
+}
+
+struct Y {
+    field: Z
+}
+
+struct X {
+    field: Y
+}
+
+struct S {
+    sub: X
+}
+
+fn foo(s: &S) {
+    $0s.sub.field.field$0.do_thing();
+}"#,
+            r#"
+struct Z;
+impl Z {
+    fn do_thing(&self) {
+
+    }
+}
+
+struct Y {
+    field: Z
+}
+
+struct X {
+    field: Y
+}
+
+struct S {
+    sub: X
+}
+
+fn foo(s: &S) {
+    let $0z = &s.sub.field.field;
+    z.do_thing();
+}"#,
+        );
+    }
+
+    #[test]
+    fn test_extract_var_regular_parameter() {
+        check_assist(
+            extract_variable,
+            r#"
+struct X;
+
+impl X {
+    fn do_thing(&self) {
+
+    }
+}
+
+struct S {
+    sub: X
+}
+
+fn foo(s: S) {
+    $0s.sub$0.do_thing();
+}"#,
+            r#"
+struct X;
+
+impl X {
+    fn do_thing(&self) {
+
+    }
+}
+
+struct S {
+    sub: X
+}
+
+fn foo(s: S) {
+    let $0x = s.sub;
+    x.do_thing();
+}"#,
+        );
+    }
+
+    #[test]
+    fn test_extract_var_mutable_reference_local() {
+        check_assist(
+            extract_variable,
+            r#"
+struct X;
+
+struct S {
+    sub: X
+}
+
+impl S {
+    fn new() -> S {
+        S {
+            sub: X::new()
+        }
+    }
+}
+
+impl X {
+    fn new() -> X {
+        X { }
+    }
+    fn do_thing(&self) {
+
+    }
+}
+
+
+fn foo() {
+    let local = &mut S::new();
+    $0local.sub$0.do_thing();
+}"#,
+            r#"
+struct X;
+
+struct S {
+    sub: X
+}
+
+impl S {
+    fn new() -> S {
+        S {
+            sub: X::new()
+        }
+    }
+}
+
+impl X {
+    fn new() -> X {
+        X { }
+    }
+    fn do_thing(&self) {
+
+    }
+}
+
+
+fn foo() {
+    let local = &mut S::new();
+    let $0x = &mut local.sub;
+    x.do_thing();
+}"#,
+        );
+    }
+
+    #[test]
+    fn test_extract_var_reference_local() {
+        check_assist(
+            extract_variable,
+            r#"
+struct X;
+
+struct S {
+    sub: X
+}
+
+impl S {
+    fn new() -> S {
+        S {
+            sub: X::new()
+        }
+    }
+}
+
+impl X {
+    fn new() -> X {
+        X { }
+    }
+    fn do_thing(&self) {
+
+    }
+}
+
+
+fn foo() {
+    let local = &S::new();
+    $0local.sub$0.do_thing();
+}"#,
+            r#"
+struct X;
+
+struct S {
+    sub: X
+}
+
+impl S {
+    fn new() -> S {
+        S {
+            sub: X::new()
+        }
+    }
+}
+
+impl X {
+    fn new() -> X {
+        X { }
+    }
+    fn do_thing(&self) {
+
+    }
+}
+
+
+fn foo() {
+    let local = &S::new();
+    let $0x = &local.sub;
+    x.do_thing();
+}"#,
+        );
+    }
 }
index 5acda464982cbf21a20d75ccbaecc17b539763bd..bb4ce0a24dc00d13b29b8f757cb8c3755ee55c92 100644 (file)
@@ -3,7 +3,7 @@
 use rustc_hash::FxHashMap;
 use syntax::{
     ast::{self, HasModuleItem},
-    match_ast, AstNode,
+    match_ast, AstNode, SyntaxKind,
 };
 
 use crate::{
 /// `spam: &mut Spam` insert text/label and `spam` lookup string will be
 /// suggested.
 pub(crate) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) -> Option<()> {
-    if !matches!(ctx.pattern_ctx, Some(PatternContext { is_param: Some(ParamKind::Function), .. }))
-    {
+    let param_of_fn =
+        matches!(ctx.pattern_ctx, Some(PatternContext { is_param: Some(ParamKind::Function), .. }));
+
+    if !param_of_fn {
         return None;
     }
 
-    let mut params = FxHashMap::default();
+    let mut file_params = FxHashMap::default();
 
-    let me = ctx.token.ancestors().find_map(ast::Fn::cast);
-    let mut process_fn = |func: ast::Fn| {
-        if Some(&func) == me.as_ref() {
-            return;
-        }
-        func.param_list().into_iter().flat_map(|it| it.params()).for_each(|param| {
+    let mut extract_params = |f: ast::Fn| {
+        f.param_list().into_iter().flat_map(|it| it.params()).for_each(|param| {
             if let Some(pat) = param.pat() {
                 // FIXME: We should be able to turn these into SmolStr without having to allocate a String
-                let text = param.syntax().text().to_string();
-                let lookup = pat.syntax().text().to_string();
-                params.entry(text).or_insert(lookup);
+                let whole_param = param.syntax().text().to_string();
+                let binding = pat.syntax().text().to_string();
+                file_params.entry(whole_param).or_insert(binding);
             }
         });
     };
@@ -44,32 +42,93 @@ pub(crate) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
                 ast::SourceFile(it) => it.items().filter_map(|item| match item {
                     ast::Item::Fn(it) => Some(it),
                     _ => None,
-                }).for_each(&mut process_fn),
+                }).for_each(&mut extract_params),
                 ast::ItemList(it) => it.items().filter_map(|item| match item {
                     ast::Item::Fn(it) => Some(it),
                     _ => None,
-                }).for_each(&mut process_fn),
+                }).for_each(&mut extract_params),
                 ast::AssocItemList(it) => it.assoc_items().filter_map(|item| match item {
                     ast::AssocItem::Fn(it) => Some(it),
                     _ => None,
-                }).for_each(&mut process_fn),
+                }).for_each(&mut extract_params),
                 _ => continue,
             }
         };
     }
 
+    let function = ctx.token.ancestors().find_map(ast::Fn::cast)?;
+    let param_list = function.param_list()?;
+
+    remove_duplicated(&mut file_params, param_list.params());
+
     let self_completion_items = ["self", "&self", "mut self", "&mut self"];
-    if ctx.impl_def.is_some() && me?.param_list()?.params().next().is_none() {
+    if should_add_self_completions(ctx, param_list) {
         self_completion_items.into_iter().for_each(|self_item| {
             add_new_item_to_acc(ctx, acc, self_item.to_string(), self_item.to_string())
         });
     }
 
-    params.into_iter().for_each(|(label, lookup)| add_new_item_to_acc(ctx, acc, label, lookup));
+    file_params.into_iter().try_for_each(|(whole_param, binding)| {
+        Some(add_new_item_to_acc(ctx, acc, surround_with_commas(ctx, whole_param), binding))
+    })?;
 
     Some(())
 }
 
+fn remove_duplicated(
+    file_params: &mut FxHashMap<String, String>,
+    fn_params: ast::AstChildren<ast::Param>,
+) {
+    fn_params.for_each(|param| {
+        let whole_param = param.syntax().text().to_string();
+        file_params.remove(&whole_param);
+
+        if let Some(pattern) = param.pat() {
+            let binding = pattern.syntax().text().to_string();
+            file_params.retain(|_, v| v != &binding);
+        }
+    })
+}
+
+fn should_add_self_completions(ctx: &CompletionContext, param_list: ast::ParamList) -> bool {
+    let inside_impl = ctx.impl_def.is_some();
+    let no_params = param_list.params().next().is_none() && param_list.self_param().is_none();
+
+    inside_impl && no_params
+}
+
+fn surround_with_commas(ctx: &CompletionContext, param: String) -> String {
+    match fallible_surround_with_commas(ctx, &param) {
+        Some(surrounded) => surrounded,
+        // fallback to the original parameter
+        None => param,
+    }
+}
+
+fn fallible_surround_with_commas(ctx: &CompletionContext, param: &str) -> Option<String> {
+    let next_token = {
+        let t = ctx.token.next_token()?;
+        match t.kind() {
+            SyntaxKind::WHITESPACE => t.next_token()?,
+            _ => t,
+        }
+    };
+
+    let trailing_comma_missing = matches!(next_token.kind(), SyntaxKind::IDENT);
+    let trailing = if trailing_comma_missing { "," } else { "" };
+
+    let previous_token = if matches!(ctx.token.kind(), SyntaxKind::IDENT | SyntaxKind::WHITESPACE) {
+        ctx.previous_token.as_ref()?
+    } else {
+        &ctx.token
+    };
+
+    let needs_leading = !matches!(previous_token.kind(), SyntaxKind::L_PAREN | SyntaxKind::COMMA);
+    let leading = if needs_leading { ", " } else { "" };
+
+    Some(format!("{}{}{}", leading, param, trailing))
+}
+
 fn add_new_item_to_acc(
     ctx: &CompletionContext,
     acc: &mut Completions,
index 6e43aa608ac62a2e58588577746481e7e6db9c42..2374d689cbb5582077a2e4c9cdd192ed605ef26a 100644 (file)
@@ -1,5 +1,7 @@
 //! See `CompletionContext` structure.
 
+use std::iter;
+
 use base_db::SourceDatabaseExt;
 use hir::{Local, Name, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo};
 use ide_db::{
@@ -431,12 +433,17 @@ fn expand_and_fill(
         mut fake_ident_token: SyntaxToken,
     ) {
         let _p = profile::span("CompletionContext::expand_and_fill");
-        loop {
-            // Expand attributes
-            if let (Some(actual_item), Some(item_with_fake_ident)) = (
-                find_node_at_offset::<ast::Item>(&original_file, offset),
-                find_node_at_offset::<ast::Item>(&speculative_file, offset),
-            ) {
+        'expansion: loop {
+            let parent_item =
+                |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
+            let ancestor_items = iter::successors(
+                Option::zip(
+                    find_node_at_offset::<ast::Item>(&original_file, offset),
+                    find_node_at_offset::<ast::Item>(&speculative_file, offset),
+                ),
+                |(a, b)| parent_item(a).zip(parent_item(b)),
+            );
+            for (actual_item, item_with_fake_ident) in ancestor_items {
                 match (
                     self.sema.expand_attr_macro(&actual_item),
                     self.sema.speculative_expand_attr_macro(
@@ -445,19 +452,22 @@ fn expand_and_fill(
                         fake_ident_token.clone(),
                     ),
                 ) {
-                    (Some(actual_expansion), Some(speculative_expansion)) => {
-                        let new_offset = speculative_expansion.1.text_range().start();
+                    // maybe parent items have attributes
+                    (None, None) => (),
+                    // successful expansions
+                    (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
+                        let new_offset = fake_mapped_token.text_range().start();
                         if new_offset > actual_expansion.text_range().end() {
-                            break;
+                            break 'expansion;
                         }
                         original_file = actual_expansion;
-                        speculative_file = speculative_expansion.0;
-                        fake_ident_token = speculative_expansion.1;
+                        speculative_file = fake_expansion;
+                        fake_ident_token = fake_mapped_token;
                         offset = new_offset;
-                        continue;
+                        continue 'expansion;
                     }
-                    (None, None) => (),
-                    _ => break,
+                    // exactly one expansion failed, inconsistent state so stop expanding completely
+                    _ => break 'expansion,
                 }
             }
 
@@ -477,28 +487,31 @@ fn expand_and_fill(
                     None => break,
                 };
 
-                if let (Some(actual_expansion), Some(speculative_expansion)) = (
+                match (
                     self.sema.expand(&actual_macro_call),
                     self.sema.speculative_expand(
                         &actual_macro_call,
                         &speculative_args,
-                        fake_ident_token,
+                        fake_ident_token.clone(),
                     ),
                 ) {
-                    let new_offset = speculative_expansion.1.text_range().start();
-                    if new_offset > actual_expansion.text_range().end() {
-                        break;
+                    // successful expansions
+                    (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
+                        let new_offset = fake_mapped_token.text_range().start();
+                        if new_offset > actual_expansion.text_range().end() {
+                            break;
+                        }
+                        original_file = actual_expansion;
+                        speculative_file = fake_expansion;
+                        fake_ident_token = fake_mapped_token;
+                        offset = new_offset;
+                        continue;
                     }
-                    original_file = actual_expansion;
-                    speculative_file = speculative_expansion.0;
-                    fake_ident_token = speculative_expansion.1;
-                    offset = new_offset;
-                } else {
-                    break;
+                    _ => break,
                 }
-            } else {
-                break;
             }
+
+            break;
         }
 
         self.fill(&original_file, speculative_file, offset);
index 8a07aefafe34abcad7aa4cd4381f8b8205a8e500..940cecf395d87a36e9c93b5f0b89f9b7c1ae17a6 100644 (file)
@@ -46,7 +46,20 @@ fn bar(file_id: usize) {}
 fn baz(file$0 id: u32) {}
 "#,
         expect![[r#"
-            bn file_id: usize
+            bn file_id: usize,
+            kw mut
+        "#]],
+    );
+}
+
+#[test]
+fn repeated_param_name() {
+    check(
+        r#"
+fn foo(file_id: usize) {}
+fn bar(file_id: u32, $0) {}
+"#,
+        expect![[r#"
             kw mut
         "#]],
     );
@@ -126,7 +139,6 @@ fn new($0) {}
 
 #[test]
 fn in_impl_after_self() {
-    // FIXME: self completions should not be here
     check(
         r#"
 struct A {}
@@ -137,10 +149,6 @@ fn new(self, $0) {}
 }
 "#,
         expect![[r#"
-            bn self
-            bn &self
-            bn mut self
-            bn &mut self
             bn file_id: usize
             kw mut
             sp Self
index 1501c4eda53aa9a4bb495d3473388b87193dc934..172acdbc3c402a53779a5acef58a174ccfc1a3ce 100644 (file)
@@ -42,74 +42,6 @@ pub enum Definition {
 }
 
 impl Definition {
-    pub fn from_token(
-        sema: &Semantics<RootDatabase>,
-        token: &SyntaxToken,
-    ) -> ArrayVec<Definition, 2> {
-        let parent = match token.parent() {
-            Some(parent) => parent,
-            None => return Default::default(),
-        };
-        // resolve derives if possible
-        if let Some(ident) = ast::Ident::cast(token.clone()) {
-            let attr = ast::TokenTree::cast(parent.clone())
-                .and_then(|tt| tt.parent_meta())
-                .and_then(|meta| meta.parent_attr());
-            if let Some(attr) = attr {
-                return sema
-                    .resolve_derive_ident(&attr, &ident)
-                    .map(Into::into)
-                    .into_iter()
-                    .collect();
-            }
-        }
-        Self::from_node(sema, &parent)
-    }
-
-    pub fn from_node(sema: &Semantics<RootDatabase>, node: &SyntaxNode) -> ArrayVec<Definition, 2> {
-        let mut res = ArrayVec::new();
-        (|| {
-            match_ast! {
-                match node {
-                    ast::Name(name) => {
-                        match NameClass::classify(&sema, &name)? {
-                            NameClass::Definition(it) | NameClass::ConstReference(it) => res.push(it),
-                            NameClass::PatFieldShorthand { local_def, field_ref } => {
-                                res.push(Definition::Local(local_def));
-                                res.push(Definition::Field(field_ref));
-                            }
-                        }
-                    },
-                    ast::NameRef(name_ref) => {
-                        match NameRefClass::classify(sema, &name_ref)? {
-                            NameRefClass::Definition(it) => res.push(it),
-                            NameRefClass::FieldShorthand { local_ref, field_ref } => {
-                                res.push(Definition::Local(local_ref));
-                                res.push(Definition::Field(field_ref));
-                            }
-                        }
-                    },
-                    ast::Lifetime(lifetime) => {
-                        let def = if let Some(x) = NameClass::classify_lifetime(&sema, &lifetime) {
-                            NameClass::defined(x)
-                        } else {
-                            NameRefClass::classify_lifetime(&sema, &lifetime).and_then(|class| match class {
-                                NameRefClass::Definition(it) => Some(it),
-                                _ => None,
-                            })
-                        };
-                        if let Some(def) = def {
-                            res.push(def);
-                        }
-                    },
-                    _ => (),
-                }
-            }
-            Some(())
-        })();
-        res
-    }
-
     pub fn canonical_module_path(&self, db: &RootDatabase) -> Option<impl Iterator<Item = Module>> {
         self.module(db).map(|it| it.path_to_root(db).into_iter().rev())
     }
@@ -184,6 +116,65 @@ pub fn name(&self, db: &RootDatabase) -> Option<Name> {
     }
 }
 
+pub enum IdentClass {
+    NameClass(NameClass),
+    NameRefClass(NameRefClass),
+}
+
+impl IdentClass {
+    pub fn classify_node(sema: &Semantics<RootDatabase>, node: &SyntaxNode) -> Option<IdentClass> {
+        match_ast! {
+            match node {
+                ast::Name(name) => NameClass::classify(sema, &name).map(IdentClass::NameClass),
+                ast::NameRef(name_ref) => NameRefClass::classify(sema, &name_ref).map(IdentClass::NameRefClass),
+                ast::Lifetime(lifetime) => {
+                    NameClass::classify_lifetime(sema, &lifetime)
+                        .map(IdentClass::NameClass)
+                        .or_else(|| NameRefClass::classify_lifetime(sema, &lifetime).map(IdentClass::NameRefClass))
+                },
+                _ => None,
+            }
+        }
+    }
+
+    pub fn classify_token(
+        sema: &Semantics<RootDatabase>,
+        token: &SyntaxToken,
+    ) -> Option<IdentClass> {
+        let parent = token.parent()?;
+        // resolve derives if possible
+        if let Some(ident) = ast::Ident::cast(token.clone()) {
+            let attr = ast::TokenTree::cast(parent.clone())
+                .and_then(|tt| tt.parent_meta())
+                .and_then(|meta| meta.parent_attr());
+            if let Some(attr) = attr {
+                return NameRefClass::classify_derive(sema, &attr, &ident)
+                    .map(IdentClass::NameRefClass);
+            }
+        }
+        Self::classify_node(sema, &parent)
+    }
+
+    pub fn definitions(self) -> ArrayVec<Definition, 2> {
+        let mut res = ArrayVec::new();
+        match self {
+            IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => {
+                res.push(it)
+            }
+            IdentClass::NameClass(NameClass::PatFieldShorthand { local_def, field_ref }) => {
+                res.push(Definition::Local(local_def));
+                res.push(Definition::Field(field_ref));
+            }
+            IdentClass::NameRefClass(NameRefClass::Definition(it)) => res.push(it),
+            IdentClass::NameRefClass(NameRefClass::FieldShorthand { local_ref, field_ref }) => {
+                res.push(Definition::Local(local_ref));
+                res.push(Definition::Field(field_ref));
+            }
+        }
+        res
+    }
+}
+
 /// On a first blush, a single `ast::Name` defines a single definition at some
 /// scope. That is, that, by just looking at the syntactical category, we can
 /// unambiguously define the semantic category.
@@ -465,6 +456,14 @@ pub fn classify_lifetime(
             _ => None,
         }
     }
+
+    pub fn classify_derive(
+        sema: &Semantics<RootDatabase>,
+        attr: &ast::Attr,
+        ident: &ast::Ident,
+    ) -> Option<NameRefClass> {
+        sema.resolve_derive_ident(&attr, &ident).map(Definition::from).map(NameRefClass::Definition)
+    }
 }
 
 impl_from!(
index 251a4caa13278636788c7447599d68f4521ce4a3..9d31966cea67a4926453fc7d55cbdcb8e6f61759 100644 (file)
@@ -88,7 +88,7 @@ pub fn insert_ws_into(syn: SyntaxNode) -> SyntaxNode {
             LIFETIME_IDENT if is_next(|it| is_text(it), true) => {
                 mods.push(do_ws(after, tok));
             }
-            AS_KW => {
+            AS_KW | DYN_KW => {
                 mods.push(do_ws(after, tok));
             }
             T![;] => {