]> git.lizzy.rs Git - rust.git/commitdiff
Merge #3309
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>
Sat, 29 Feb 2020 15:36:03 +0000 (15:36 +0000)
committerGitHub <noreply@github.com>
Sat, 29 Feb 2020 15:36:03 +0000 (15:36 +0000)
3309: Find cargo toml up the fs r=matklad a=not-much-io

Currently rust-analyzer will look for Cargo.toml in the root of the project and if failing that then go down the filesystem until root.

This unfortunately wouldn't work automatically with (what I imagine is) a fairly common project structure. As an example with multiple languages like:
```
js/
  ..
rust/
  Cargo.toml
  ...
```

Added this small change so rust-analyzer would glance one level up if not found in root or down the filesystem.

## Why not go deeper?

Could be problematic with large project vendored dependencies etc.

## Why not add a Cargo.toml manual setting option?

Loosely related and a good idea, however the convenience of having this automated also is hard to pass up.

## Testing?

Build a binary with various logs and checked it in a project with such a structure:

```
[ERROR ra_project_model] find_cargo_toml()
[ERROR ra_project_model] find_cargo_toml_up_the_fs()
[ERROR ra_project_model] entities: ReadDir("/workspaces/my-project")
[ERROR ra_project_model] candidate: "/workspaces/my-project/rust/Cargo.toml", exists: true
```

## Edge Cases?

If you have multiple Cargo.toml files one level deeper AND not in the root, will get whatever comes first (order undefined), example:
```
crate1/
    Cargo.toml
crate2/
     Cargo.toml
... (no root Cargo.toml)
```

However this is quite unusual and wouldn't have worked before either. This is only resolvable via manually choosing.

Co-authored-by: nmio <kristo.koert@gmail.com>
109 files changed:
.github/workflows/ci.yaml
Cargo.lock
crates/ra_assists/Cargo.toml
crates/ra_assists/src/assist_ctx.rs
crates/ra_assists/src/ast_transform.rs
crates/ra_assists/src/handlers/add_explicit_type.rs
crates/ra_assists/src/handlers/add_missing_impl_members.rs
crates/ra_assists/src/handlers/add_new.rs
crates/ra_assists/src/handlers/auto_import.rs
crates/ra_assists/src/handlers/change_visibility.rs
crates/ra_assists/src/handlers/early_return.rs
crates/ra_assists/src/handlers/fill_match_arms.rs
crates/ra_assists/src/handlers/inline_local_variable.rs
crates/ra_assists/src/handlers/introduce_variable.rs
crates/ra_assists/src/handlers/move_bounds.rs
crates/ra_assists/src/handlers/raw_string.rs
crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs
crates/ra_assists/src/lib.rs
crates/ra_assists/src/utils.rs
crates/ra_assists/src/utils/insert_use.rs [new file with mode: 0644]
crates/ra_cargo_watch/Cargo.toml
crates/ra_hir/src/from_id.rs
crates/ra_hir/src/lib.rs
crates/ra_hir/src/semantics.rs [new file with mode: 0644]
crates/ra_hir/src/source_analyzer.rs
crates/ra_hir/src/source_binder.rs
crates/ra_hir_def/Cargo.toml
crates/ra_hir_ty/src/infer.rs
crates/ra_hir_ty/src/infer/expr.rs
crates/ra_hir_ty/src/tests.rs
crates/ra_hir_ty/src/tests/coercion.rs
crates/ra_ide/Cargo.toml
crates/ra_ide/src/call_hierarchy.rs
crates/ra_ide/src/call_info.rs
crates/ra_ide/src/completion.rs
crates/ra_ide/src/completion/complete_dot.rs
crates/ra_ide/src/completion/complete_macro_in_item_position.rs
crates/ra_ide/src/completion/complete_path.rs
crates/ra_ide/src/completion/complete_pattern.rs
crates/ra_ide/src/completion/complete_postfix.rs
crates/ra_ide/src/completion/complete_record_literal.rs
crates/ra_ide/src/completion/complete_record_pattern.rs
crates/ra_ide/src/completion/complete_scope.rs
crates/ra_ide/src/completion/complete_trait_impl.rs
crates/ra_ide/src/completion/completion_context.rs
crates/ra_ide/src/diagnostics.rs
crates/ra_ide/src/display/navigation_target.rs
crates/ra_ide/src/expand.rs [deleted file]
crates/ra_ide/src/expand_macro.rs
crates/ra_ide/src/extend_selection.rs
crates/ra_ide/src/goto_definition.rs
crates/ra_ide/src/goto_type_definition.rs
crates/ra_ide/src/hover.rs
crates/ra_ide/src/impls.rs
crates/ra_ide/src/inlay_hints.rs
crates/ra_ide/src/lib.rs
crates/ra_ide/src/marks.rs
crates/ra_ide/src/mock_analysis.rs
crates/ra_ide/src/parent_module.rs
crates/ra_ide/src/references.rs
crates/ra_ide/src/references/classify.rs
crates/ra_ide/src/references/rename.rs
crates/ra_ide/src/runnables.rs
crates/ra_ide/src/snapshots/highlighting.html
crates/ra_ide/src/snapshots/rainbow_highlighting.html
crates/ra_ide/src/ssr.rs
crates/ra_ide/src/syntax_highlighting.rs
crates/ra_ide/src/syntax_highlighting/html.rs [new file with mode: 0644]
crates/ra_ide/src/syntax_highlighting/tags.rs [new file with mode: 0644]
crates/ra_ide/src/syntax_highlighting/tests.rs [new file with mode: 0644]
crates/ra_ide_db/Cargo.toml
crates/ra_ide_db/src/defs.rs
crates/ra_ide_db/src/imports_locator.rs
crates/ra_ide_db/src/line_index.rs
crates/ra_mbe/src/syntax_bridge.rs
crates/ra_parser/src/grammar/expressions.rs
crates/ra_prof/Cargo.toml
crates/ra_syntax/src/algo.rs
crates/ra_syntax/src/ast/edit.rs
crates/ra_syntax/src/ast/make.rs
crates/ra_syntax/src/ast/tokens.rs
crates/ra_syntax/test_data/parser/inline/ok/0130_let_stmt.rs
crates/ra_syntax/test_data/parser/inline/ok/0130_let_stmt.txt
crates/ra_text_edit/Cargo.toml
crates/rust-analyzer/Cargo.toml
crates/rust-analyzer/src/caps.rs
crates/rust-analyzer/src/cli/analysis_stats.rs
crates/rust-analyzer/src/conv.rs
crates/rust-analyzer/src/main_loop.rs
crates/rust-analyzer/src/main_loop/handlers.rs
crates/rust-analyzer/src/req.rs
crates/rust-analyzer/src/semantic_tokens.rs
crates/test_utils/Cargo.toml
crates/test_utils/src/lib.rs
editors/code/package-lock.json
editors/code/package.json
editors/code/src/client.ts
editors/code/src/config.ts
editors/code/src/inlay_hints.ts
editors/code/src/installation/download_artifact.ts
editors/code/src/installation/download_file.ts
editors/code/src/installation/server.ts
editors/code/src/main.ts
editors/code/src/util.ts
xtask/src/ast_src.rs
xtask/src/codegen/gen_assists_docs.rs
xtask/src/install.rs
xtask/src/lib.rs
xtask/src/not_bash.rs

index 8ab47106d0c69ab496264835597585cec629e826..3f41d32f7bae1b983b499dfe5c27f19ab715da25 100644 (file)
@@ -8,6 +8,16 @@ on:
       - trying
 
 jobs:
+  rust-audit:
+    name: Audit Rust vulnerabilities
+    runs-on: ubuntu-latest
+    steps:
+        - name: Checkout repository
+          uses: actions/checkout@v1
+
+        - run: cargo install cargo-audit
+        - run: cargo audit
+
   rust:
     name: Rust
     runs-on: ${{ matrix.os }}
@@ -79,7 +89,7 @@ jobs:
         if: matrix.os == 'windows-latest'
         run: Remove-Item ./target/debug/xtask.exe
 
-  type-script:
+  typescript:
     name: TypeScript
     runs-on: ubuntu-latest
     env:
@@ -96,7 +106,12 @@ jobs:
 
       - run: npm ci
         working-directory: ./editors/code
-      - run: npm run fmt
+
+      - run: npm audit
+        working-directory: ./editors/code
+
+      - run: npm run lint
         working-directory: ./editors/code
+
       - run: npm run package --scripts-prepend-node-path
         working-directory: ./editors/code
index e5400f5ebdcd59ad53e511598081e7e397c17654..49fddef4b9fd761bee60a299eaa39391d2bd959b 100644 (file)
@@ -662,9 +662,9 @@ dependencies = [
 
 [[package]]
 name = "lsp-types"
-version = "0.70.2"
+version = "0.71.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6743fb3902ab3dfa6ce030daeac6ff492e20bb0fee840739d16f6bfb0efaf91c"
+checksum = "efa6b75633b0c3412ee36fc416e6d9c1e4ff576b536217f4ac3f34ac83d9e564"
 dependencies = [
  "base64",
  "bitflags",
@@ -882,9 +882,9 @@ dependencies = [
 
 [[package]]
 name = "proc-macro2"
-version = "1.0.8"
+version = "1.0.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3acb317c6ff86a4e579dfa00fc5e6cca91ecbb4e7eb2df0468805b674eb88548"
+checksum = "6c09721c6781493a2a492a96b5a5bf19b65917fe6728884e7c44dd0c60ca3435"
 dependencies = [
  "unicode-xid",
 ]
@@ -906,7 +906,6 @@ version = "0.1.0"
 name = "ra_assists"
 version = "0.1.0"
 dependencies = [
- "either",
  "format-buf",
  "join_to_string",
  "ra_db",
@@ -927,10 +926,8 @@ dependencies = [
  "cargo_metadata",
  "crossbeam-channel",
  "insta",
- "jod-thread",
  "log",
  "lsp-types",
- "parking_lot",
  "serde_json",
 ]
 
@@ -988,7 +985,6 @@ dependencies = [
  "drop_bomb",
  "either",
  "insta",
- "itertools",
  "log",
  "once_cell",
  "ra_arena",
@@ -1046,7 +1042,6 @@ version = "0.1.0"
 dependencies = [
  "either",
  "format-buf",
- "fst",
  "indexmap",
  "insta",
  "itertools",
@@ -1063,29 +1058,17 @@ dependencies = [
  "ra_syntax",
  "ra_text_edit",
  "rand",
- "rayon",
  "rustc-hash",
- "superslice",
  "test_utils",
- "unicase",
 ]
 
 [[package]]
 name = "ra_ide_db"
 version = "0.1.0"
 dependencies = [
- "either",
- "format-buf",
  "fst",
- "indexmap",
- "insta",
- "itertools",
- "join_to_string",
  "log",
- "once_cell",
- "ra_cfg",
  "ra_db",
- "ra_fmt",
  "ra_hir",
  "ra_prof",
  "ra_syntax",
@@ -1093,8 +1076,6 @@ dependencies = [
  "rayon",
  "rustc-hash",
  "superslice",
- "test_utils",
- "unicase",
 ]
 
 [[package]]
@@ -1122,7 +1103,6 @@ name = "ra_prof"
 version = "0.1.0"
 dependencies = [
  "backtrace",
- "itertools",
  "jemalloc-ctl",
  "jemallocator",
  "once_cell",
@@ -1165,7 +1145,6 @@ dependencies = [
 name = "ra_text_edit"
 version = "0.1.0"
 dependencies = [
- "test_utils",
  "text_unit",
 ]
 
@@ -1324,7 +1303,6 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "crossbeam-channel",
- "either",
  "env_logger",
  "globset",
  "itertools",
@@ -1534,9 +1512,9 @@ checksum = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f"
 
 [[package]]
 name = "syn"
-version = "1.0.15"
+version = "1.0.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7a0294dc449adc58bb6592fff1a23d3e5e6e235afc6a0ffca2657d19e7bbffe5"
+checksum = "123bd9499cfb380418d509322d7a6d52e5315f064fe4b3ad18a53d6b92c07859"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1577,9 +1555,9 @@ dependencies = [
 
 [[package]]
 name = "text_unit"
-version = "0.1.9"
+version = "0.1.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e08bbcb7a3adbda0eb23431206b653bdad3d8dea311e72d36bf2215e27a42579"
+checksum = "20431e104bfecc1a40872578dbc390e10290a0e9c35fffe3ce6f73c15a9dbfc2"
 
 [[package]]
 name = "thin-dst"
@@ -1605,15 +1583,6 @@ dependencies = [
  "num_cpus",
 ]
 
-[[package]]
-name = "unicase"
-version = "2.6.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
-dependencies = [
- "version_check",
-]
-
 [[package]]
 name = "unicode-bidi"
 version = "0.3.4"
@@ -1656,12 +1625,6 @@ dependencies = [
  "serde",
 ]
 
-[[package]]
-name = "version_check"
-version = "0.9.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce"
-
 [[package]]
 name = "walkdir"
 version = "2.3.1"
index 12a933645fd7e7d8483fa70cd62cd7fa2a9b5b2e..d314dc8e67e7a152cb7ef8cd355944e86a6452f4 100644 (file)
@@ -11,7 +11,6 @@ doctest = false
 format-buf = "1.0.0"
 join_to_string = "0.1.3"
 rustc-hash = "1.1.0"
-either = "1.5.3"
 
 ra_syntax = { path = "../ra_syntax" }
 ra_text_edit = { path = "../ra_text_edit" }
index 5aab5fb8b673dd71f007a4f62ad6c6cf62ec2561..c25d2e3239b830d01787323fea3fe9a12405bed0 100644 (file)
@@ -1,6 +1,6 @@
 //! This module defines `AssistCtx` -- the API surface that is exposed to assists.
-use hir::{InFile, SourceAnalyzer, SourceBinder};
-use ra_db::{FileRange, SourceDatabase};
+use hir::Semantics;
+use ra_db::FileRange;
 use ra_fmt::{leading_indent, reindent};
 use ra_ide_db::RootDatabase;
 use ra_syntax::{
@@ -74,29 +74,23 @@ pub(crate) fn into_resolved(self) -> Option<ResolvedAssist> {
 /// Note, however, that we don't actually use such two-phase logic at the
 /// moment, because the LSP API is pretty awkward in this place, and it's much
 /// easier to just compute the edit eagerly :-)
-#[derive(Debug)]
+#[derive(Clone)]
 pub(crate) struct AssistCtx<'a> {
+    pub(crate) sema: &'a Semantics<'a, RootDatabase>,
     pub(crate) db: &'a RootDatabase,
     pub(crate) frange: FileRange,
     source_file: SourceFile,
     should_compute_edit: bool,
 }
 
-impl Clone for AssistCtx<'_> {
-    fn clone(&self) -> Self {
-        AssistCtx {
-            db: self.db,
-            frange: self.frange,
-            source_file: self.source_file.clone(),
-            should_compute_edit: self.should_compute_edit,
-        }
-    }
-}
-
 impl<'a> AssistCtx<'a> {
-    pub fn new(db: &RootDatabase, frange: FileRange, should_compute_edit: bool) -> AssistCtx {
-        let parse = db.parse(frange.file_id);
-        AssistCtx { db, frange, source_file: parse.tree(), should_compute_edit }
+    pub fn new(
+        sema: &'a Semantics<'a, RootDatabase>,
+        frange: FileRange,
+        should_compute_edit: bool,
+    ) -> AssistCtx<'a> {
+        let source_file = sema.parse(frange.file_id);
+        AssistCtx { sema, db: sema.db, frange, source_file, should_compute_edit }
     }
 
     pub(crate) fn add_assist(
@@ -138,18 +132,6 @@ pub(crate) fn find_node_at_offset<N: AstNode>(&self) -> Option<N> {
     pub(crate) fn covering_element(&self) -> SyntaxElement {
         find_covering_element(self.source_file.syntax(), self.frange.range)
     }
-    pub(crate) fn source_binder(&self) -> SourceBinder<'a, RootDatabase> {
-        SourceBinder::new(self.db)
-    }
-    pub(crate) fn source_analyzer(
-        &self,
-        node: &SyntaxNode,
-        offset: Option<TextUnit>,
-    ) -> SourceAnalyzer {
-        let src = InFile::new(self.frange.file_id.into(), node);
-        self.source_binder().analyze(src, offset)
-    }
-
     pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement {
         find_covering_element(self.source_file.syntax(), range)
     }
index c6d15af5fc6aa34a3de9911f8899e8e7d797f1a3..a74ac42d5d229431d0a6dd2a29f44c841231efe1 100644 (file)
@@ -1,15 +1,12 @@
 //! `AstTransformer`s are functions that replace nodes in an AST and can be easily combined.
 use rustc_hash::FxHashMap;
 
-use hir::{InFile, PathResolution};
+use hir::{PathResolution, SemanticsScope};
 use ra_ide_db::RootDatabase;
 use ra_syntax::ast::{self, AstNode};
 
 pub trait AstTransform<'a> {
-    fn get_substitution(
-        &self,
-        node: InFile<&ra_syntax::SyntaxNode>,
-    ) -> Option<ra_syntax::SyntaxNode>;
+    fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode>;
 
     fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a>;
     fn or<T: AstTransform<'a> + 'a>(self, other: T) -> Box<dyn AstTransform<'a> + 'a>
@@ -23,10 +20,7 @@ fn or<T: AstTransform<'a> + 'a>(self, other: T) -> Box<dyn AstTransform<'a> + 'a
 struct NullTransformer;
 
 impl<'a> AstTransform<'a> for NullTransformer {
-    fn get_substitution(
-        &self,
-        _node: InFile<&ra_syntax::SyntaxNode>,
-    ) -> Option<ra_syntax::SyntaxNode> {
+    fn get_substitution(&self, _node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
         None
     }
     fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {
@@ -35,14 +29,16 @@ fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTrans
 }
 
 pub struct SubstituteTypeParams<'a> {
-    db: &'a RootDatabase,
+    source_scope: &'a SemanticsScope<'a, RootDatabase>,
     substs: FxHashMap<hir::TypeParam, ast::TypeRef>,
     previous: Box<dyn AstTransform<'a> + 'a>,
 }
 
 impl<'a> SubstituteTypeParams<'a> {
     pub fn for_trait_impl(
+        source_scope: &'a SemanticsScope<'a, RootDatabase>,
         db: &'a RootDatabase,
+        // FIXME: there's implicit invariant that `trait_` and  `source_scope` match...
         trait_: hir::Trait,
         impl_block: ast::ImplBlock,
     ) -> SubstituteTypeParams<'a> {
@@ -56,7 +52,7 @@ pub fn for_trait_impl(
             .zip(substs.into_iter())
             .collect();
         return SubstituteTypeParams {
-            db,
+            source_scope,
             substs: substs_by_param,
             previous: Box::new(NullTransformer),
         };
@@ -80,15 +76,15 @@ fn get_syntactic_substs(impl_block: ast::ImplBlock) -> Option<Vec<ast::TypeRef>>
     }
     fn get_substitution_inner(
         &self,
-        node: InFile<&ra_syntax::SyntaxNode>,
+        node: &ra_syntax::SyntaxNode,
     ) -> Option<ra_syntax::SyntaxNode> {
-        let type_ref = ast::TypeRef::cast(node.value.clone())?;
+        let type_ref = ast::TypeRef::cast(node.clone())?;
         let path = match &type_ref {
             ast::TypeRef::PathType(path_type) => path_type.path()?,
             _ => return None,
         };
-        let analyzer = hir::SourceAnalyzer::new(self.db, node, None);
-        let resolution = analyzer.resolve_path(self.db, &path)?;
+        let path = hir::Path::from_ast(path)?;
+        let resolution = self.source_scope.resolve_hir_path(&path)?;
         match resolution {
             hir::PathResolution::TypeParam(tp) => Some(self.substs.get(&tp)?.syntax().clone()),
             _ => None,
@@ -97,10 +93,7 @@ fn get_substitution_inner(
 }
 
 impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> {
-    fn get_substitution(
-        &self,
-        node: InFile<&ra_syntax::SyntaxNode>,
-    ) -> Option<ra_syntax::SyntaxNode> {
+    fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
         self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node))
     }
     fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {
@@ -109,29 +102,34 @@ fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTrans
 }
 
 pub struct QualifyPaths<'a> {
+    target_scope: &'a SemanticsScope<'a, RootDatabase>,
+    source_scope: &'a SemanticsScope<'a, RootDatabase>,
     db: &'a RootDatabase,
-    from: Option<hir::Module>,
     previous: Box<dyn AstTransform<'a> + 'a>,
 }
 
 impl<'a> QualifyPaths<'a> {
-    pub fn new(db: &'a RootDatabase, from: Option<hir::Module>) -> Self {
-        Self { db, from, previous: Box::new(NullTransformer) }
+    pub fn new(
+        target_scope: &'a SemanticsScope<'a, RootDatabase>,
+        source_scope: &'a SemanticsScope<'a, RootDatabase>,
+        db: &'a RootDatabase,
+    ) -> Self {
+        Self { target_scope, source_scope, db, previous: Box::new(NullTransformer) }
     }
 
     fn get_substitution_inner(
         &self,
-        node: InFile<&ra_syntax::SyntaxNode>,
+        node: &ra_syntax::SyntaxNode,
     ) -> Option<ra_syntax::SyntaxNode> {
         // FIXME handle value ns?
-        let from = self.from?;
-        let p = ast::Path::cast(node.value.clone())?;
+        let from = self.target_scope.module()?;
+        let p = ast::Path::cast(node.clone())?;
         if p.segment().and_then(|s| s.param_list()).is_some() {
             // don't try to qualify `Fn(Foo) -> Bar` paths, they are in prelude anyway
             return None;
         }
-        let analyzer = hir::SourceAnalyzer::new(self.db, node, None);
-        let resolution = analyzer.resolve_path(self.db, &p)?;
+        let hir_path = hir::Path::from_ast(p.clone());
+        let resolution = self.source_scope.resolve_hir_path(&hir_path?)?;
         match resolution {
             PathResolution::Def(def) => {
                 let found_path = from.find_use_path(self.db, def)?;
@@ -140,7 +138,7 @@ fn get_substitution_inner(
                 let type_args = p
                     .segment()
                     .and_then(|s| s.type_arg_list())
-                    .map(|arg_list| apply(self, node.with_value(arg_list)));
+                    .map(|arg_list| apply(self, arg_list));
                 if let Some(type_args) = type_args {
                     let last_segment = path.segment().unwrap();
                     path = path.with_segment(last_segment.with_type_args(type_args))
@@ -157,11 +155,11 @@ fn get_substitution_inner(
     }
 }
 
-pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: InFile<N>) -> N {
-    let syntax = node.value.syntax();
-    let result = ra_syntax::algo::replace_descendants(syntax, &|element| match element {
+pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N {
+    let syntax = node.syntax();
+    let result = ra_syntax::algo::replace_descendants(syntax, |element| match element {
         ra_syntax::SyntaxElement::Node(n) => {
-            let replacement = transformer.get_substitution(node.with_value(&n))?;
+            let replacement = transformer.get_substitution(&n)?;
             Some(replacement.into())
         }
         _ => None,
@@ -170,10 +168,7 @@ pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: InFile<N>
 }
 
 impl<'a> AstTransform<'a> for QualifyPaths<'a> {
-    fn get_substitution(
-        &self,
-        node: InFile<&ra_syntax::SyntaxNode>,
-    ) -> Option<ra_syntax::SyntaxNode> {
+    fn get_substitution(&self, node: &ra_syntax::SyntaxNode) -> Option<ra_syntax::SyntaxNode> {
         self.get_substitution_inner(node).or_else(|| self.previous.get_substitution(node))
     }
     fn chain_before(self, other: Box<dyn AstTransform<'a> + 'a>) -> Box<dyn AstTransform<'a> + 'a> {
index 2cb9d2f48ebf4e0b5527e0767b338a711641f896..a63ef48b1d060505d3782f2a2a551aa6ce2f93c0 100644 (file)
@@ -51,14 +51,13 @@ pub(crate) fn add_explicit_type(ctx: AssistCtx) -> Option<Assist> {
         }
     }
     // Infer type
-    let db = ctx.db;
-    let analyzer = ctx.source_analyzer(stmt.syntax(), None);
-    let ty = analyzer.type_of(db, &expr)?;
+    let ty = ctx.sema.type_of_expr(&expr)?;
     // Assist not applicable if the type is unknown
     if ty.contains_unknown() {
         return None;
     }
 
+    let db = ctx.db;
     ctx.add_assist(
         AssistId("add_explicit_type"),
         format!("Insert explicit type '{}'", ty.display(db)),
index ab21388c8d4159a6974b632ca366a6ed7c24687c..4005014bdf3ff4f719167fafca7215b04c4ee39b 100644 (file)
@@ -1,4 +1,4 @@
-use hir::{HasSource, InFile};
+use hir::HasSource;
 use ra_syntax::{
     ast::{self, edit, make, AstNode, NameOwner},
     SmolStr,
@@ -104,9 +104,7 @@ fn add_missing_impl_members_inner(
     let impl_node = ctx.find_node_at_offset::<ast::ImplBlock>()?;
     let impl_item_list = impl_node.item_list()?;
 
-    let analyzer = ctx.source_analyzer(impl_node.syntax(), None);
-
-    let trait_ = resolve_target_trait(ctx.db, &analyzer, &impl_node)?;
+    let trait_ = resolve_target_trait(&ctx.sema, &impl_node)?;
 
     let def_name = |item: &ast::ImplItem| -> Option<SmolStr> {
         match item {
@@ -117,7 +115,7 @@ fn add_missing_impl_members_inner(
         .map(|it| it.text().clone())
     };
 
-    let missing_items = get_missing_impl_items(ctx.db, &analyzer, &impl_node)
+    let missing_items = get_missing_impl_items(&ctx.sema, &impl_node)
         .iter()
         .map(|i| match i {
             hir::AssocItem::Function(i) => ast::ImplItem::FnDef(i.source(ctx.db).value),
@@ -138,23 +136,17 @@ fn add_missing_impl_members_inner(
         return None;
     }
 
-    let db = ctx.db;
-    let file_id = ctx.frange.file_id;
-    let trait_file_id = trait_.source(db).file_id;
+    let sema = ctx.sema;
 
     ctx.add_assist(AssistId(assist_id), label, |edit| {
         let n_existing_items = impl_item_list.impl_items().count();
-        let module = hir::SourceAnalyzer::new(
-            db,
-            hir::InFile::new(file_id.into(), impl_node.syntax()),
-            None,
-        )
-        .module();
-        let ast_transform = QualifyPaths::new(db, module)
-            .or(SubstituteTypeParams::for_trait_impl(db, trait_, impl_node));
+        let source_scope = sema.scope_for_def(trait_);
+        let target_scope = sema.scope(impl_item_list.syntax());
+        let ast_transform = QualifyPaths::new(&target_scope, &source_scope, sema.db)
+            .or(SubstituteTypeParams::for_trait_impl(&source_scope, sema.db, trait_, impl_node));
         let items = missing_items
             .into_iter()
-            .map(|it| ast_transform::apply(&*ast_transform, InFile::new(trait_file_id, it)))
+            .map(|it| ast_transform::apply(&*ast_transform, it))
             .map(|it| match it {
                 ast::ImplItem::FnDef(def) => ast::ImplItem::FnDef(add_body(def)),
                 _ => it,
@@ -181,9 +173,10 @@ fn add_body(fn_def: ast::FnDef) -> ast::FnDef {
 
 #[cfg(test)]
 mod tests {
-    use super::*;
     use crate::helpers::{check_assist, check_assist_not_applicable};
 
+    use super::*;
+
     #[test]
     fn test_add_missing_impl_members() {
         check_assist(
index dd070e8ec0385dac2517c7aedee17f5c04678584..166e907fb40e0f10800b62cd3589006ef7083674 100644 (file)
@@ -1,5 +1,5 @@
 use format_buf::format;
-use hir::{Adt, InFile};
+use hir::Adt;
 use join_to_string::join;
 use ra_syntax::{
     ast::{
@@ -133,16 +133,11 @@ fn find_struct_impl(ctx: &AssistCtx, strukt: &ast::StructDef) -> Option<Option<a
     let module = strukt.syntax().ancestors().find(|node| {
         ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind())
     })?;
-    let mut sb = ctx.source_binder();
 
-    let struct_def = {
-        let src = InFile { file_id: ctx.frange.file_id.into(), value: strukt.clone() };
-        sb.to_def(src)?
-    };
+    let struct_def = ctx.sema.to_def(strukt)?;
 
     let block = module.descendants().filter_map(ast::ImplBlock::cast).find_map(|impl_blk| {
-        let src = InFile { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() };
-        let blk = sb.to_def(src)?;
+        let blk = ctx.sema.to_def(&impl_blk)?;
 
         // FIXME: handle e.g. `struct S<T>; impl<U> S<U> {}`
         // (we currently use the wrong type parameter)
index c4aea2a06703bb956d2d6c1aed19b1c1b64f0732..c8bf181f94005f92bc4a61d65fb02d1bb2aaef39 100644 (file)
@@ -1,10 +1,11 @@
 use crate::{
     assist_ctx::{Assist, AssistCtx},
-    insert_use_statement, AssistId,
+    utils::insert_use_statement,
+    AssistId,
 };
 use hir::{
-    db::HirDatabase, AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution,
-    SourceAnalyzer, Trait, Type,
+    AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, Semantics, Trait,
+    Type,
 };
 use ra_ide_db::{imports_locator::ImportsLocator, RootDatabase};
 use ra_prof::profile;
@@ -51,7 +52,6 @@ pub(crate) fn auto_import(ctx: AssistCtx) -> Option<Assist> {
         group.add_assist(AssistId("auto_import"), format!("Import `{}`", &import), |edit| {
             edit.target(auto_import_assets.syntax_under_caret.text_range());
             insert_use_statement(
-                &auto_import_assets.syntax_under_caret,
                 &auto_import_assets.syntax_under_caret,
                 &import,
                 edit.text_edit_builder(),
@@ -78,14 +78,9 @@ fn new(ctx: &AssistCtx) -> Option<Self> {
 
     fn for_method_call(method_call: ast::MethodCallExpr, ctx: &AssistCtx) -> Option<Self> {
         let syntax_under_caret = method_call.syntax().to_owned();
-        let source_analyzer = ctx.source_analyzer(&syntax_under_caret, None);
-        let module_with_name_to_import = source_analyzer.module()?;
+        let module_with_name_to_import = ctx.sema.scope(&syntax_under_caret).module()?;
         Some(Self {
-            import_candidate: ImportCandidate::for_method_call(
-                &method_call,
-                &source_analyzer,
-                ctx.db,
-            )?,
+            import_candidate: ImportCandidate::for_method_call(&ctx.sema, &method_call)?,
             module_with_name_to_import,
             syntax_under_caret,
         })
@@ -97,14 +92,9 @@ fn for_regular_path(path_under_caret: ast::Path, ctx: &AssistCtx) -> Option<Self
             return None;
         }
 
-        let source_analyzer = ctx.source_analyzer(&syntax_under_caret, None);
-        let module_with_name_to_import = source_analyzer.module()?;
+        let module_with_name_to_import = ctx.sema.scope(&syntax_under_caret).module()?;
         Some(Self {
-            import_candidate: ImportCandidate::for_regular_path(
-                &path_under_caret,
-                &source_analyzer,
-                ctx.db,
-            )?,
+            import_candidate: ImportCandidate::for_regular_path(&ctx.sema, &path_under_caret)?,
             module_with_name_to_import,
             syntax_under_caret,
         })
@@ -229,25 +219,23 @@ enum ImportCandidate {
 
 impl ImportCandidate {
     fn for_method_call(
+        sema: &Semantics<RootDatabase>,
         method_call: &ast::MethodCallExpr,
-        source_analyzer: &SourceAnalyzer,
-        db: &impl HirDatabase,
     ) -> Option<Self> {
-        if source_analyzer.resolve_method_call(method_call).is_some() {
+        if sema.resolve_method_call(method_call).is_some() {
             return None;
         }
         Some(Self::TraitMethod(
-            source_analyzer.type_of(db, &method_call.expr()?)?,
+            sema.type_of_expr(&method_call.expr()?)?,
             method_call.name_ref()?.syntax().to_string(),
         ))
     }
 
     fn for_regular_path(
+        sema: &Semantics<RootDatabase>,
         path_under_caret: &ast::Path,
-        source_analyzer: &SourceAnalyzer,
-        db: &impl HirDatabase,
     ) -> Option<Self> {
-        if source_analyzer.resolve_path(db, path_under_caret).is_some() {
+        if sema.resolve_path(path_under_caret).is_some() {
             return None;
         }
 
@@ -256,17 +244,15 @@ fn for_regular_path(
             let qualifier_start = qualifier.syntax().descendants().find_map(ast::NameRef::cast)?;
             let qualifier_start_path =
                 qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?;
-            if let Some(qualifier_start_resolution) =
-                source_analyzer.resolve_path(db, &qualifier_start_path)
-            {
+            if let Some(qualifier_start_resolution) = sema.resolve_path(&qualifier_start_path) {
                 let qualifier_resolution = if qualifier_start_path == qualifier {
                     qualifier_start_resolution
                 } else {
-                    source_analyzer.resolve_path(db, &qualifier)?
+                    sema.resolve_path(&qualifier)?
                 };
                 if let PathResolution::Def(ModuleDef::Adt(assoc_item_path)) = qualifier_resolution {
                     Some(ImportCandidate::TraitAssocItem(
-                        assoc_item_path.ty(db),
+                        assoc_item_path.ty(sema.db),
                         segment.syntax().to_string(),
                     ))
                 } else {
index f325b6f9225bf92290255b228bbee215d76ed932..54e0a6c8405ca32be8ae4abc5acd5a58ffa71a8b 100644 (file)
@@ -2,8 +2,8 @@
     ast::{self, NameOwner, VisibilityOwner},
     AstNode,
     SyntaxKind::{
-        ATTR, COMMENT, ENUM_DEF, FN_DEF, IDENT, MODULE, STRUCT_DEF, TRAIT_DEF, VISIBILITY,
-        WHITESPACE,
+        ATTR, COMMENT, CONST_DEF, ENUM_DEF, FN_DEF, IDENT, MODULE, STRUCT_DEF, TRAIT_DEF,
+        VISIBILITY, WHITESPACE,
     },
     SyntaxNode, TextUnit, T,
 };
@@ -30,13 +30,13 @@ pub(crate) fn change_visibility(ctx: AssistCtx) -> Option<Assist> {
 
 fn add_vis(ctx: AssistCtx) -> Option<Assist> {
     let item_keyword = ctx.token_at_offset().find(|leaf| match leaf.kind() {
-        T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true,
+        T![const] | T![fn] | T![mod] | T![struct] | T![enum] | T![trait] => true,
         _ => false,
     });
 
     let (offset, target) = if let Some(keyword) = item_keyword {
         let parent = keyword.parent();
-        let def_kws = vec![FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF];
+        let def_kws = vec![CONST_DEF, FN_DEF, MODULE, STRUCT_DEF, ENUM_DEF, TRAIT_DEF];
         // Parent is not a definition, can't add visibility
         if !def_kws.iter().any(|&def_kw| def_kw == parent.kind()) {
             return None;
@@ -135,6 +135,11 @@ fn change_visibility_pub_crate_to_pub() {
         check_assist(change_visibility, "<|>pub(crate) fn foo() {}", "<|>pub fn foo() {}")
     }
 
+    #[test]
+    fn change_visibility_const() {
+        check_assist(change_visibility, "<|>const FOO = 3u8;", "<|>pub(crate) const FOO = 3u8;");
+    }
+
     #[test]
     fn change_visibility_handles_comment_attrs() {
         check_assist(
index 22f88884f4a17774671c516e251f8f7c031ddf84..f3167b4e59f90ab9dbf7f68c838a62323baacd35 100644 (file)
@@ -112,16 +112,19 @@ pub(crate) fn convert_to_guarded_return(ctx: AssistCtx) -> Option<Assist> {
             Some((path, bound_ident)) => {
                 // If-let.
                 let match_expr = {
-                    let happy_arm = make::match_arm(
-                        once(
-                            make::tuple_struct_pat(
-                                path,
-                                once(make::bind_pat(make::name("it")).into()),
-                            )
-                            .into(),
-                        ),
-                        make::expr_path(make::path_from_name_ref(make::name_ref("it"))),
-                    );
+                    let happy_arm = {
+                        let pat = make::tuple_struct_pat(
+                            path,
+                            once(make::bind_pat(make::name("it")).into()),
+                        );
+                        let expr = {
+                            let name_ref = make::name_ref("it");
+                            let segment = make::path_segment(name_ref);
+                            let path = make::path_unqualified(segment);
+                            make::expr_path(path)
+                        };
+                        make::match_arm(once(pat.into()), expr)
+                    };
 
                     let sad_arm = make::match_arm(
                         // FIXME: would be cool to use `None` or `Err(_)` if appropriate
index ae2437ed32f1f785cd277936b11f02e626b757fd..e5d8c639d121e1a23915731ae384255858f72d2a 100644 (file)
@@ -2,10 +2,11 @@
 
 use std::iter;
 
-use hir::{db::HirDatabase, Adt, HasSource};
+use hir::{db::HirDatabase, Adt, HasSource, Semantics};
 use ra_syntax::ast::{self, edit::IndentLevel, make, AstNode, NameOwner};
 
 use crate::{Assist, AssistCtx, AssistId};
+use ra_ide_db::RootDatabase;
 
 // Assist: fill_match_arms
 //
@@ -46,10 +47,9 @@ pub(crate) fn fill_match_arms(ctx: AssistCtx) -> Option<Assist> {
     };
 
     let expr = match_expr.expr()?;
-    let (enum_def, module) = {
-        let analyzer = ctx.source_analyzer(expr.syntax(), None);
-        (resolve_enum_def(ctx.db, &analyzer, &expr)?, analyzer.module()?)
-    };
+    let enum_def = resolve_enum_def(&ctx.sema, &expr)?;
+    let module = ctx.sema.scope(expr.syntax()).module()?;
+
     let variants = enum_def.variants(ctx.db);
     if variants.is_empty() {
         return None;
@@ -81,18 +81,11 @@ fn is_trivial(arm: &ast::MatchArm) -> bool {
     }
 }
 
-fn resolve_enum_def(
-    db: &impl HirDatabase,
-    analyzer: &hir::SourceAnalyzer,
-    expr: &ast::Expr,
-) -> Option<hir::Enum> {
-    let expr_ty = analyzer.type_of(db, &expr)?;
-
-    let result = expr_ty.autoderef(db).find_map(|ty| match ty.as_adt() {
+fn resolve_enum_def(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<hir::Enum> {
+    sema.type_of_expr(&expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
         Some(Adt::Enum(e)) => Some(e),
         _ => None,
-    });
-    result
+    })
 }
 
 fn build_pat(
index 91b588243eb4f40ec82f71bc19b2f219639337a5..53a72309b1d2a6f70f3198a05fa10195cc6536b3 100644 (file)
@@ -44,8 +44,7 @@ pub(crate) fn inline_local_variable(ctx: AssistCtx) -> Option<Assist> {
     } else {
         let_stmt.syntax().text_range()
     };
-    let analyzer = ctx.source_analyzer(bind_pat.syntax(), None);
-    let refs = analyzer.find_all_refs(&bind_pat);
+    let refs = ctx.sema.find_all_refs(&bind_pat);
     if refs.is_empty() {
         return None;
     };
index 7312ce6871e0e64ad54200a597c6c1aa8656b922..b453c51fbc55056f788f202c5de60e9b83df8062 100644 (file)
@@ -136,15 +136,13 @@ fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> {
 mod tests {
     use test_utils::covers;
 
-    use crate::helpers::{
-        check_assist_range, check_assist_range_not_applicable, check_assist_range_target,
-    };
+    use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target};
 
     use super::*;
 
     #[test]
     fn test_introduce_var_simple() {
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn foo() {
@@ -161,16 +159,13 @@ fn foo() {
     #[test]
     fn introduce_var_in_comment_is_not_applicable() {
         covers!(introduce_var_in_comment_is_not_applicable);
-        check_assist_range_not_applicable(
-            introduce_variable,
-            "fn main() { 1 + /* <|>comment<|> */ 1; }",
-        );
+        check_assist_not_applicable(introduce_variable, "fn main() { 1 + /* <|>comment<|> */ 1; }");
     }
 
     #[test]
     fn test_introduce_var_expr_stmt() {
         covers!(test_introduce_var_expr_stmt);
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn foo() {
@@ -181,7 +176,7 @@ fn foo() {
     let <|>var_name = 1 + 1;
 }",
         );
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn foo() {
@@ -198,7 +193,7 @@ fn foo() {
 
     #[test]
     fn test_introduce_var_part_of_expr_stmt() {
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn foo() {
@@ -215,7 +210,7 @@ fn foo() {
     #[test]
     fn test_introduce_var_last_expr() {
         covers!(test_introduce_var_last_expr);
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn foo() {
@@ -227,7 +222,7 @@ fn foo() {
     bar(var_name)
 }",
         );
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn foo() {
@@ -243,7 +238,7 @@ fn foo() {
 
     #[test]
     fn test_introduce_var_in_match_arm_no_block() {
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn main() {
@@ -268,7 +263,7 @@ fn main() {
 
     #[test]
     fn test_introduce_var_in_match_arm_with_block() {
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn main() {
@@ -300,7 +295,7 @@ fn main() {
 
     #[test]
     fn test_introduce_var_in_closure_no_block() {
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn main() {
@@ -317,7 +312,7 @@ fn main() {
 
     #[test]
     fn test_introduce_var_in_closure_with_block() {
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn main() {
@@ -334,7 +329,7 @@ fn main() {
 
     #[test]
     fn test_introduce_var_path_simple() {
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn main() {
@@ -352,7 +347,7 @@ fn main() {
 
     #[test]
     fn test_introduce_var_path_method() {
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn main() {
@@ -370,7 +365,7 @@ fn main() {
 
     #[test]
     fn test_introduce_var_return() {
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn foo() -> u32 {
@@ -388,7 +383,7 @@ fn foo() -> u32 {
 
     #[test]
     fn test_introduce_var_does_not_add_extra_whitespace() {
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn foo() -> u32 {
@@ -407,7 +402,7 @@ fn foo() -> u32 {
 ",
         );
 
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn foo() -> u32 {
@@ -424,7 +419,7 @@ fn foo() -> u32 {
 ",
         );
 
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn foo() -> u32 {
@@ -452,7 +447,7 @@ fn foo() -> u32 {
 
     #[test]
     fn test_introduce_var_break() {
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn main() {
@@ -474,7 +469,7 @@ fn main() {
 
     #[test]
     fn test_introduce_var_for_cast() {
-        check_assist_range(
+        check_assist(
             introduce_variable,
             "
 fn main() {
@@ -492,27 +487,20 @@ fn main() {
 
     #[test]
     fn test_introduce_var_for_return_not_applicable() {
-        check_assist_range_not_applicable(introduce_variable, "fn foo() { <|>return<|>; } ");
+        check_assist_not_applicable(introduce_variable, "fn foo() { <|>return<|>; } ");
     }
 
     #[test]
     fn test_introduce_var_for_break_not_applicable() {
-        check_assist_range_not_applicable(
-            introduce_variable,
-            "fn main() { loop { <|>break<|>; }; }",
-        );
+        check_assist_not_applicable(introduce_variable, "fn main() { loop { <|>break<|>; }; }");
     }
 
     // FIXME: This is not quite correct, but good enough(tm) for the sorting heuristic
     #[test]
     fn introduce_var_target() {
-        check_assist_range_target(
-            introduce_variable,
-            "fn foo() -> u32 { <|>return 2 + 2<|>; }",
-            "2 + 2",
-        );
+        check_assist_target(introduce_variable, "fn foo() -> u32 { <|>return 2 + 2<|>; }", "2 + 2");
 
-        check_assist_range_target(
+        check_assist_target(
             introduce_variable,
             "
 fn main() {
index 90793b5fc3a16fbb89d593f3f7fc9267be7d6e85..86b2353660b722ac8a72805aaa8eb1f0a080e58e 100644 (file)
@@ -72,7 +72,11 @@ pub(crate) fn move_bounds_to_where_clause(ctx: AssistCtx) -> Option<Assist> {
 }
 
 fn build_predicate(param: ast::TypeParam) -> Option<ast::WherePred> {
-    let path = make::path_from_name_ref(make::name_ref(&param.name()?.syntax().to_string()));
+    let path = {
+        let name_ref = make::name_ref(&param.name()?.syntax().to_string());
+        let segment = make::path_segment(name_ref);
+        make::path_unqualified(segment)
+    };
     let predicate = make::where_pred(path, param.type_bound_list()?.bounds());
     Some(predicate)
 }
index 2c0a1e12656de1b191ccdeb2358c10d780310d70..7e4b83f13fc302a168a36c9541e34da06377e422 100644 (file)
@@ -1,5 +1,6 @@
 use ra_syntax::{
-    ast, AstToken,
+    ast::{self, HasStringValue},
+    AstToken,
     SyntaxKind::{RAW_STRING, STRING},
     TextUnit,
 };
index eac452413429eb058c9636e6733203f5d0520524..94f5d6c5036f8792aa6111cd7524223c0de76bd1 100644 (file)
@@ -1,42 +1,12 @@
-use hir::{self, ModPath};
-use ra_syntax::{
-    ast::{self, NameOwner},
-    AstNode, Direction, SmolStr,
-    SyntaxKind::{PATH, PATH_SEGMENT},
-    SyntaxNode, TextRange, T,
-};
-use ra_text_edit::TextEditBuilder;
+use hir;
+use ra_syntax::{ast, AstNode, SmolStr, TextRange};
 
 use crate::{
     assist_ctx::{Assist, AssistCtx},
+    utils::insert_use_statement,
     AssistId,
 };
 
-/// Creates and inserts a use statement for the given path to import.
-/// The use statement is inserted in the scope most appropriate to the
-/// the cursor position given, additionally merged with the existing use imports.
-pub fn insert_use_statement(
-    // Ideally the position of the cursor, used to
-    position: &SyntaxNode,
-    // The statement to use as anchor (last resort)
-    anchor: &SyntaxNode,
-    path_to_import: &ModPath,
-    edit: &mut TextEditBuilder,
-) {
-    let target = path_to_import.to_string().split("::").map(SmolStr::new).collect::<Vec<_>>();
-    let container = position.ancestors().find_map(|n| {
-        if let Some(module) = ast::Module::cast(n.clone()) {
-            return module.item_list().map(|it| it.syntax().clone());
-        }
-        ast::SourceFile::cast(n).map(|it| it.syntax().clone())
-    });
-
-    if let Some(container) = container {
-        let action = best_action_for_target(container, anchor.clone(), &target);
-        make_assist(&action, &target, edit);
-    }
-}
-
 // Assist: replace_qualified_name_with_use
 //
 // Adds a use statement for a given fully-qualified name.
@@ -63,522 +33,25 @@ pub(crate) fn replace_qualified_name_with_use(ctx: AssistCtx) -> Option<Assist>
         return None;
     }
 
-    let module = path.syntax().ancestors().find_map(ast::Module::cast);
-    let position = match module.and_then(|it| it.item_list()) {
-        Some(item_list) => item_list.syntax().clone(),
-        None => {
-            let current_file = path.syntax().ancestors().find_map(ast::SourceFile::cast)?;
-            current_file.syntax().clone()
-        }
-    };
-
     ctx.add_assist(
         AssistId("replace_qualified_name_with_use"),
         "Replace qualified path with use",
         |edit| {
-            replace_with_use(&position, &path, &segments, edit.text_edit_builder());
+            let path_to_import = hir_path.mod_path().clone();
+            insert_use_statement(path.syntax(), &path_to_import, edit.text_edit_builder());
+
+            if let Some(last) = path.segment() {
+                // Here we are assuming the assist will provide a correct use statement
+                // so we can delete the path qualifier
+                edit.delete(TextRange::from_to(
+                    path.syntax().text_range().start(),
+                    last.syntax().text_range().start(),
+                ));
+            }
         },
     )
 }
 
-fn collect_path_segments_raw(
-    segments: &mut Vec<ast::PathSegment>,
-    mut path: ast::Path,
-) -> Option<usize> {
-    let oldlen = segments.len();
-    loop {
-        let mut children = path.syntax().children_with_tokens();
-        let (first, second, third) = (
-            children.next().map(|n| (n.clone(), n.kind())),
-            children.next().map(|n| (n.clone(), n.kind())),
-            children.next().map(|n| (n.clone(), n.kind())),
-        );
-        match (first, second, third) {
-            (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => {
-                path = ast::Path::cast(subpath.as_node()?.clone())?;
-                segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?);
-            }
-            (Some((segment, PATH_SEGMENT)), _, _) => {
-                segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?);
-                break;
-            }
-            (_, _, _) => return None,
-        }
-    }
-    // We need to reverse only the new added segments
-    let only_new_segments = segments.split_at_mut(oldlen).1;
-    only_new_segments.reverse();
-    Some(segments.len() - oldlen)
-}
-
-fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) {
-    let mut iter = segments.iter();
-    if let Some(s) = iter.next() {
-        buf.push_str(s);
-    }
-    for s in iter {
-        buf.push_str("::");
-        buf.push_str(s);
-    }
-}
-
-/// Returns the number of common segments.
-fn compare_path_segments(left: &[SmolStr], right: &[ast::PathSegment]) -> usize {
-    left.iter().zip(right).take_while(|(l, r)| compare_path_segment(l, r)).count()
-}
-
-fn compare_path_segment(a: &SmolStr, b: &ast::PathSegment) -> bool {
-    if let Some(kb) = b.kind() {
-        match kb {
-            ast::PathSegmentKind::Name(nameref_b) => a == nameref_b.text(),
-            ast::PathSegmentKind::SelfKw => a == "self",
-            ast::PathSegmentKind::SuperKw => a == "super",
-            ast::PathSegmentKind::CrateKw => a == "crate",
-            ast::PathSegmentKind::Type { .. } => false, // not allowed in imports
-        }
-    } else {
-        false
-    }
-}
-
-fn compare_path_segment_with_name(a: &SmolStr, b: &ast::Name) -> bool {
-    a == b.text()
-}
-
-#[derive(Clone, Debug)]
-enum ImportAction {
-    Nothing,
-    // Add a brand new use statement.
-    AddNewUse {
-        anchor: Option<SyntaxNode>, // anchor node
-        add_after_anchor: bool,
-    },
-
-    // To split an existing use statement creating a nested import.
-    AddNestedImport {
-        // how may segments matched with the target path
-        common_segments: usize,
-        path_to_split: ast::Path,
-        // the first segment of path_to_split we want to add into the new nested list
-        first_segment_to_split: Option<ast::PathSegment>,
-        // Wether to add 'self' in addition to the target path
-        add_self: bool,
-    },
-    // To add the target path to an existing nested import tree list.
-    AddInTreeList {
-        common_segments: usize,
-        // The UseTreeList where to add the target path
-        tree_list: ast::UseTreeList,
-        add_self: bool,
-    },
-}
-
-impl ImportAction {
-    fn add_new_use(anchor: Option<SyntaxNode>, add_after_anchor: bool) -> Self {
-        ImportAction::AddNewUse { anchor, add_after_anchor }
-    }
-
-    fn add_nested_import(
-        common_segments: usize,
-        path_to_split: ast::Path,
-        first_segment_to_split: Option<ast::PathSegment>,
-        add_self: bool,
-    ) -> Self {
-        ImportAction::AddNestedImport {
-            common_segments,
-            path_to_split,
-            first_segment_to_split,
-            add_self,
-        }
-    }
-
-    fn add_in_tree_list(
-        common_segments: usize,
-        tree_list: ast::UseTreeList,
-        add_self: bool,
-    ) -> Self {
-        ImportAction::AddInTreeList { common_segments, tree_list, add_self }
-    }
-
-    fn better(left: ImportAction, right: ImportAction) -> ImportAction {
-        if left.is_better(&right) {
-            left
-        } else {
-            right
-        }
-    }
-
-    fn is_better(&self, other: &ImportAction) -> bool {
-        match (self, other) {
-            (ImportAction::Nothing, _) => true,
-            (ImportAction::AddInTreeList { .. }, ImportAction::Nothing) => false,
-            (
-                ImportAction::AddNestedImport { common_segments: n, .. },
-                ImportAction::AddInTreeList { common_segments: m, .. },
-            )
-            | (
-                ImportAction::AddInTreeList { common_segments: n, .. },
-                ImportAction::AddNestedImport { common_segments: m, .. },
-            )
-            | (
-                ImportAction::AddInTreeList { common_segments: n, .. },
-                ImportAction::AddInTreeList { common_segments: m, .. },
-            )
-            | (
-                ImportAction::AddNestedImport { common_segments: n, .. },
-                ImportAction::AddNestedImport { common_segments: m, .. },
-            ) => n > m,
-            (ImportAction::AddInTreeList { .. }, _) => true,
-            (ImportAction::AddNestedImport { .. }, ImportAction::Nothing) => false,
-            (ImportAction::AddNestedImport { .. }, _) => true,
-            (ImportAction::AddNewUse { .. }, _) => false,
-        }
-    }
-}
-
-// Find out the best ImportAction to import target path against current_use_tree.
-// If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList.
-fn walk_use_tree_for_best_action(
-    current_path_segments: &mut Vec<ast::PathSegment>, // buffer containing path segments
-    current_parent_use_tree_list: Option<ast::UseTreeList>, // will be Some value if we are in a nested import
-    current_use_tree: ast::UseTree, // the use tree we are currently examinating
-    target: &[SmolStr],             // the path we want to import
-) -> ImportAction {
-    // We save the number of segments in the buffer so we can restore the correct segments
-    // before returning. Recursive call will add segments so we need to delete them.
-    let prev_len = current_path_segments.len();
-
-    let tree_list = current_use_tree.use_tree_list();
-    let alias = current_use_tree.alias();
-
-    let path = match current_use_tree.path() {
-        Some(path) => path,
-        None => {
-            // If the use item don't have a path, it means it's broken (syntax error)
-            return ImportAction::add_new_use(
-                current_use_tree
-                    .syntax()
-                    .ancestors()
-                    .find_map(ast::UseItem::cast)
-                    .map(|it| it.syntax().clone()),
-                true,
-            );
-        }
-    };
-
-    // This can happen only if current_use_tree is a direct child of a UseItem
-    if let Some(name) = alias.and_then(|it| it.name()) {
-        if compare_path_segment_with_name(&target[0], &name) {
-            return ImportAction::Nothing;
-        }
-    }
-
-    collect_path_segments_raw(current_path_segments, path.clone());
-
-    // We compare only the new segments added in the line just above.
-    // The first prev_len segments were already compared in 'parent' recursive calls.
-    let left = target.split_at(prev_len).1;
-    let right = current_path_segments.split_at(prev_len).1;
-    let common = compare_path_segments(left, &right);
-    let mut action = match common {
-        0 => ImportAction::add_new_use(
-            // e.g: target is std::fmt and we can have
-            // use foo::bar
-            // We add a brand new use statement
-            current_use_tree
-                .syntax()
-                .ancestors()
-                .find_map(ast::UseItem::cast)
-                .map(|it| it.syntax().clone()),
-            true,
-        ),
-        common if common == left.len() && left.len() == right.len() => {
-            // e.g: target is std::fmt and we can have
-            // 1- use std::fmt;
-            // 2- use std::fmt::{ ... }
-            if let Some(list) = tree_list {
-                // In case 2 we need to add self to the nested list
-                // unless it's already there
-                let has_self = list.use_trees().map(|it| it.path()).any(|p| {
-                    p.and_then(|it| it.segment())
-                        .and_then(|it| it.kind())
-                        .filter(|k| *k == ast::PathSegmentKind::SelfKw)
-                        .is_some()
-                });
-
-                if has_self {
-                    ImportAction::Nothing
-                } else {
-                    ImportAction::add_in_tree_list(current_path_segments.len(), list, true)
-                }
-            } else {
-                // Case 1
-                ImportAction::Nothing
-            }
-        }
-        common if common != left.len() && left.len() == right.len() => {
-            // e.g: target is std::fmt and we have
-            // use std::io;
-            // We need to split.
-            let segments_to_split = current_path_segments.split_at(prev_len + common).1;
-            ImportAction::add_nested_import(
-                prev_len + common,
-                path,
-                Some(segments_to_split[0].clone()),
-                false,
-            )
-        }
-        common if common == right.len() && left.len() > right.len() => {
-            // e.g: target is std::fmt and we can have
-            // 1- use std;
-            // 2- use std::{ ... };
-
-            // fallback action
-            let mut better_action = ImportAction::add_new_use(
-                current_use_tree
-                    .syntax()
-                    .ancestors()
-                    .find_map(ast::UseItem::cast)
-                    .map(|it| it.syntax().clone()),
-                true,
-            );
-            if let Some(list) = tree_list {
-                // Case 2, check recursively if the path is already imported in the nested list
-                for u in list.use_trees() {
-                    let child_action = walk_use_tree_for_best_action(
-                        current_path_segments,
-                        Some(list.clone()),
-                        u,
-                        target,
-                    );
-                    if child_action.is_better(&better_action) {
-                        better_action = child_action;
-                        if let ImportAction::Nothing = better_action {
-                            return better_action;
-                        }
-                    }
-                }
-            } else {
-                // Case 1, split adding self
-                better_action = ImportAction::add_nested_import(prev_len + common, path, None, true)
-            }
-            better_action
-        }
-        common if common == left.len() && left.len() < right.len() => {
-            // e.g: target is std::fmt and we can have
-            // use std::fmt::Debug;
-            let segments_to_split = current_path_segments.split_at(prev_len + common).1;
-            ImportAction::add_nested_import(
-                prev_len + common,
-                path,
-                Some(segments_to_split[0].clone()),
-                true,
-            )
-        }
-        common if common < left.len() && common < right.len() => {
-            // e.g: target is std::fmt::nested::Debug
-            // use std::fmt::Display
-            let segments_to_split = current_path_segments.split_at(prev_len + common).1;
-            ImportAction::add_nested_import(
-                prev_len + common,
-                path,
-                Some(segments_to_split[0].clone()),
-                false,
-            )
-        }
-        _ => unreachable!(),
-    };
-
-    // If we are inside a UseTreeList adding a use statement become adding to the existing
-    // tree list.
-    action = match (current_parent_use_tree_list, action.clone()) {
-        (Some(use_tree_list), ImportAction::AddNewUse { .. }) => {
-            ImportAction::add_in_tree_list(prev_len, use_tree_list, false)
-        }
-        (_, _) => action,
-    };
-
-    // We remove the segments added
-    current_path_segments.truncate(prev_len);
-    action
-}
-
-fn best_action_for_target(
-    container: SyntaxNode,
-    anchor: SyntaxNode,
-    target: &[SmolStr],
-) -> ImportAction {
-    let mut storage = Vec::with_capacity(16); // this should be the only allocation
-    let best_action = container
-        .children()
-        .filter_map(ast::UseItem::cast)
-        .filter_map(|it| it.use_tree())
-        .map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target))
-        .fold(None, |best, a| match best {
-            Some(best) => Some(ImportAction::better(best, a)),
-            None => Some(a),
-        });
-
-    match best_action {
-        Some(action) => action,
-        None => {
-            // We have no action and no UseItem was found in container so we find
-            // another item and we use it as anchor.
-            // If there are no items above, we choose the target path itself as anchor.
-            // todo: we should include even whitespace blocks as anchor candidates
-            let anchor = container
-                .children()
-                .find(|n| n.text_range().start() < anchor.text_range().start())
-                .or_else(|| Some(anchor));
-
-            let add_after_anchor = anchor
-                .clone()
-                .and_then(ast::Attr::cast)
-                .map(|attr| attr.kind() == ast::AttrKind::Inner)
-                .unwrap_or(false);
-            ImportAction::add_new_use(anchor, add_after_anchor)
-        }
-    }
-}
-
-fn make_assist(action: &ImportAction, target: &[SmolStr], edit: &mut TextEditBuilder) {
-    match action {
-        ImportAction::AddNewUse { anchor, add_after_anchor } => {
-            make_assist_add_new_use(anchor, *add_after_anchor, target, edit)
-        }
-        ImportAction::AddInTreeList { common_segments, tree_list, add_self } => {
-            // We know that the fist n segments already exists in the use statement we want
-            // to modify, so we want to add only the last target.len() - n segments.
-            let segments_to_add = target.split_at(*common_segments).1;
-            make_assist_add_in_tree_list(tree_list, segments_to_add, *add_self, edit)
-        }
-        ImportAction::AddNestedImport {
-            common_segments,
-            path_to_split,
-            first_segment_to_split,
-            add_self,
-        } => {
-            let segments_to_add = target.split_at(*common_segments).1;
-            make_assist_add_nested_import(
-                path_to_split,
-                first_segment_to_split,
-                segments_to_add,
-                *add_self,
-                edit,
-            )
-        }
-        _ => {}
-    }
-}
-
-fn make_assist_add_new_use(
-    anchor: &Option<SyntaxNode>,
-    after: bool,
-    target: &[SmolStr],
-    edit: &mut TextEditBuilder,
-) {
-    if let Some(anchor) = anchor {
-        let indent = ra_fmt::leading_indent(anchor);
-        let mut buf = String::new();
-        if after {
-            buf.push_str("\n");
-            if let Some(spaces) = &indent {
-                buf.push_str(spaces);
-            }
-        }
-        buf.push_str("use ");
-        fmt_segments_raw(target, &mut buf);
-        buf.push_str(";");
-        if !after {
-            buf.push_str("\n\n");
-            if let Some(spaces) = &indent {
-                buf.push_str(&spaces);
-            }
-        }
-        let position = if after { anchor.text_range().end() } else { anchor.text_range().start() };
-        edit.insert(position, buf);
-    }
-}
-
-fn make_assist_add_in_tree_list(
-    tree_list: &ast::UseTreeList,
-    target: &[SmolStr],
-    add_self: bool,
-    edit: &mut TextEditBuilder,
-) {
-    let last = tree_list.use_trees().last();
-    if let Some(last) = last {
-        let mut buf = String::new();
-        let comma = last.syntax().siblings(Direction::Next).find(|n| n.kind() == T![,]);
-        let offset = if let Some(comma) = comma {
-            comma.text_range().end()
-        } else {
-            buf.push_str(",");
-            last.syntax().text_range().end()
-        };
-        if add_self {
-            buf.push_str(" self")
-        } else {
-            buf.push_str(" ");
-        }
-        fmt_segments_raw(target, &mut buf);
-        edit.insert(offset, buf);
-    } else {
-    }
-}
-
-fn make_assist_add_nested_import(
-    path: &ast::Path,
-    first_segment_to_split: &Option<ast::PathSegment>,
-    target: &[SmolStr],
-    add_self: bool,
-    edit: &mut TextEditBuilder,
-) {
-    let use_tree = path.syntax().ancestors().find_map(ast::UseTree::cast);
-    if let Some(use_tree) = use_tree {
-        let (start, add_colon_colon) = if let Some(first_segment_to_split) = first_segment_to_split
-        {
-            (first_segment_to_split.syntax().text_range().start(), false)
-        } else {
-            (use_tree.syntax().text_range().end(), true)
-        };
-        let end = use_tree.syntax().text_range().end();
-
-        let mut buf = String::new();
-        if add_colon_colon {
-            buf.push_str("::");
-        }
-        buf.push_str("{");
-        if add_self {
-            buf.push_str("self, ");
-        }
-        fmt_segments_raw(target, &mut buf);
-        if !target.is_empty() {
-            buf.push_str(", ");
-        }
-        edit.insert(start, buf);
-        edit.insert(end, "}".to_string());
-    }
-}
-
-fn replace_with_use(
-    container: &SyntaxNode,
-    path: &ast::Path,
-    target: &[SmolStr],
-    edit: &mut TextEditBuilder,
-) {
-    let action = best_action_for_target(container.clone(), path.syntax().clone(), target);
-    make_assist(&action, target, edit);
-    if let Some(last) = path.segment() {
-        // Here we are assuming the assist will provide a correct use statement
-        // so we can delete the path qualifier
-        edit.delete(TextRange::from_to(
-            path.syntax().text_range().start(),
-            last.syntax().text_range().start(),
-        ));
-    }
-}
-
 fn collect_hir_path_segments(path: &hir::Path) -> Option<Vec<SmolStr>> {
     let mut ps = Vec::<SmolStr>::with_capacity(10);
     match path.kind() {
index d7998b0d1a5ca04951877dc1f4b1b830eae38e16..deeada2de52868c20204fa21a7cebac90bec2255 100644 (file)
@@ -18,7 +18,7 @@
 use ra_text_edit::TextEdit;
 
 pub(crate) use crate::assist_ctx::{Assist, AssistCtx, AssistHandler};
-pub use crate::handlers::replace_qualified_name_with_use::insert_use_statement;
+use hir::Semantics;
 
 /// Unique identifier of the assist, should not be shown to the user
 /// directly.
@@ -63,7 +63,8 @@ pub struct ResolvedAssist {
 /// Assists are returned in the "unresolved" state, that is only labels are
 /// returned, without actual edits.
 pub fn unresolved_assists(db: &RootDatabase, range: FileRange) -> Vec<AssistLabel> {
-    let ctx = AssistCtx::new(db, range, false);
+    let sema = Semantics::new(db);
+    let ctx = AssistCtx::new(&sema, range, false);
     handlers::all()
         .iter()
         .filter_map(|f| f(ctx.clone()))
@@ -77,7 +78,8 @@ pub fn unresolved_assists(db: &RootDatabase, range: FileRange) -> Vec<AssistLabe
 /// Assists are returned in the "resolved" state, that is with edit fully
 /// computed.
 pub fn resolved_assists(db: &RootDatabase, range: FileRange) -> Vec<ResolvedAssist> {
-    let ctx = AssistCtx::new(db, range, true);
+    let sema = Semantics::new(db);
+    let ctx = AssistCtx::new(&sema, range, true);
     let mut a = handlers::all()
         .iter()
         .filter_map(|f| f(ctx.clone()))
@@ -162,9 +164,10 @@ mod helpers {
     use ra_db::{fixture::WithFixture, FileId, FileRange, SourceDatabaseExt};
     use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase};
     use ra_syntax::TextRange;
-    use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range};
+    use test_utils::{add_cursor, assert_eq_text, extract_range_or_offset, RangeOrOffset};
 
     use crate::{AssistCtx, AssistHandler};
+    use hir::Semantics;
 
     pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
         let (mut db, file_id) = RootDatabase::with_single_file(text);
@@ -176,81 +179,66 @@ pub(crate) fn with_single_file(text: &str) -> (RootDatabase, FileId) {
     }
 
     pub(crate) fn check_assist(assist: AssistHandler, before: &str, after: &str) {
-        let (before_cursor_pos, before) = extract_offset(before);
-        let (db, file_id) = with_single_file(&before);
-        let frange =
-            FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
-        let assist =
-            assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable");
-        let action = assist.0[0].action.clone().unwrap();
-
-        let actual = action.edit.apply(&before);
-        let actual_cursor_pos = match action.cursor_position {
-            None => action
-                .edit
-                .apply_to_offset(before_cursor_pos)
-                .expect("cursor position is affected by the edit"),
-            Some(off) => off,
-        };
-        let actual = add_cursor(&actual, actual_cursor_pos);
-        assert_eq_text!(after, &actual);
-    }
-
-    pub(crate) fn check_assist_range(assist: AssistHandler, before: &str, after: &str) {
-        let (range, before) = extract_range(before);
-        let (db, file_id) = with_single_file(&before);
-        let frange = FileRange { file_id, range };
-        let assist =
-            assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable");
-        let action = assist.0[0].action.clone().unwrap();
-
-        let mut actual = action.edit.apply(&before);
-        if let Some(pos) = action.cursor_position {
-            actual = add_cursor(&actual, pos);
-        }
-        assert_eq_text!(after, &actual);
+        check(assist, before, ExpectedResult::After(after));
     }
 
+    // FIXME: instead of having a separate function here, maybe use
+    // `extract_ranges` and mark the target as `<target> </target>` in the
+    // fixuture?
     pub(crate) fn check_assist_target(assist: AssistHandler, before: &str, target: &str) {
-        let (before_cursor_pos, before) = extract_offset(before);
-        let (db, file_id) = with_single_file(&before);
-        let frange =
-            FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
-        let assist =
-            assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable");
-        let action = assist.0[0].action.clone().unwrap();
-
-        let range = action.target.expect("expected target on action");
-        assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target);
+        check(assist, before, ExpectedResult::Target(target));
     }
 
-    pub(crate) fn check_assist_range_target(assist: AssistHandler, before: &str, target: &str) {
-        let (range, before) = extract_range(before);
-        let (db, file_id) = with_single_file(&before);
-        let frange = FileRange { file_id, range };
-        let assist =
-            assist(AssistCtx::new(&db, frange, true)).expect("code action is not applicable");
-        let action = assist.0[0].action.clone().unwrap();
-
-        let range = action.target.expect("expected target on action");
-        assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target);
+    pub(crate) fn check_assist_not_applicable(assist: AssistHandler, before: &str) {
+        check(assist, before, ExpectedResult::NotApplicable);
     }
 
-    pub(crate) fn check_assist_not_applicable(assist: AssistHandler, before: &str) {
-        let (before_cursor_pos, before) = extract_offset(before);
-        let (db, file_id) = with_single_file(&before);
-        let frange =
-            FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
-        let assist = assist(AssistCtx::new(&db, frange, true));
-        assert!(assist.is_none());
+    enum ExpectedResult<'a> {
+        NotApplicable,
+        After(&'a str),
+        Target(&'a str),
     }
 
-    pub(crate) fn check_assist_range_not_applicable(assist: AssistHandler, before: &str) {
-        let (range, before) = extract_range(before);
+    fn check(assist: AssistHandler, before: &str, expected: ExpectedResult) {
+        let (range_or_offset, before) = extract_range_or_offset(before);
+        let range: TextRange = range_or_offset.into();
+
         let (db, file_id) = with_single_file(&before);
         let frange = FileRange { file_id, range };
-        let assist = assist(AssistCtx::new(&db, frange, true));
-        assert!(assist.is_none());
+        let sema = Semantics::new(&db);
+        let assist_ctx = AssistCtx::new(&sema, frange, true);
+
+        match (assist(assist_ctx), expected) {
+            (Some(assist), ExpectedResult::After(after)) => {
+                let action = assist.0[0].action.clone().unwrap();
+
+                let mut actual = action.edit.apply(&before);
+                match action.cursor_position {
+                    None => {
+                        if let RangeOrOffset::Offset(before_cursor_pos) = range_or_offset {
+                            let off = action
+                                .edit
+                                .apply_to_offset(before_cursor_pos)
+                                .expect("cursor position is affected by the edit");
+                            actual = add_cursor(&actual, off)
+                        }
+                    }
+                    Some(off) => actual = add_cursor(&actual, off),
+                };
+
+                assert_eq_text!(after, &actual);
+            }
+            (Some(assist), ExpectedResult::Target(target)) => {
+                let action = assist.0[0].action.clone().unwrap();
+                let range = action.target.expect("expected target on action");
+                assert_eq_text!(&before[range.start().to_usize()..range.end().to_usize()], target);
+            }
+            (Some(_), ExpectedResult::NotApplicable) => panic!("assist should not be applicable!"),
+            (None, ExpectedResult::After(_)) | (None, ExpectedResult::Target(_)) => {
+                panic!("code action is not applicable")
+            }
+            (None, ExpectedResult::NotApplicable) => (),
+        };
     }
 }
 
index 6ff44c95cb25716cdea820f919fd7dbeb5241a0c..d544caee7b159f40344ea507b1827e65e47eefc9 100644 (file)
@@ -1,16 +1,18 @@
 //! Assorted functions shared by several assists.
+pub(crate) mod insert_use;
 
+use hir::Semantics;
+use ra_ide_db::RootDatabase;
 use ra_syntax::{
     ast::{self, make, NameOwner},
     AstNode, T,
 };
-
-use hir::db::HirDatabase;
 use rustc_hash::FxHashSet;
 
+pub use insert_use::insert_use_statement;
+
 pub fn get_missing_impl_items(
-    db: &impl HirDatabase,
-    analyzer: &hir::SourceAnalyzer,
+    sema: &Semantics<RootDatabase>,
     impl_block: &ast::ImplBlock,
 ) -> Vec<hir::AssocItem> {
     // Names must be unique between constants and functions. However, type aliases
@@ -42,15 +44,17 @@ pub fn get_missing_impl_items(
         }
     }
 
-    resolve_target_trait(db, analyzer, impl_block).map_or(vec![], |target_trait| {
+    resolve_target_trait(sema, impl_block).map_or(vec![], |target_trait| {
         target_trait
-            .items(db)
+            .items(sema.db)
             .iter()
             .filter(|i| match i {
-                hir::AssocItem::Function(f) => !impl_fns_consts.contains(&f.name(db).to_string()),
-                hir::AssocItem::TypeAlias(t) => !impl_type.contains(&t.name(db).to_string()),
+                hir::AssocItem::Function(f) => {
+                    !impl_fns_consts.contains(&f.name(sema.db).to_string())
+                }
+                hir::AssocItem::TypeAlias(t) => !impl_type.contains(&t.name(sema.db).to_string()),
                 hir::AssocItem::Const(c) => c
-                    .name(db)
+                    .name(sema.db)
                     .map(|n| !impl_fns_consts.contains(&n.to_string()))
                     .unwrap_or_default(),
             })
@@ -60,8 +64,7 @@ pub fn get_missing_impl_items(
 }
 
 pub(crate) fn resolve_target_trait(
-    db: &impl HirDatabase,
-    analyzer: &hir::SourceAnalyzer,
+    sema: &Semantics<RootDatabase>,
     impl_block: &ast::ImplBlock,
 ) -> Option<hir::Trait> {
     let ast_path = impl_block
@@ -70,7 +73,7 @@ pub(crate) fn resolve_target_trait(
         .and_then(ast::PathType::cast)?
         .path()?;
 
-    match analyzer.resolve_path(db, &ast_path) {
+    match sema.resolve_path(&ast_path) {
         Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def),
         _ => None,
     }
diff --git a/crates/ra_assists/src/utils/insert_use.rs b/crates/ra_assists/src/utils/insert_use.rs
new file mode 100644 (file)
index 0000000..36fd2fc
--- /dev/null
@@ -0,0 +1,510 @@
+//! Handle syntactic aspects of inserting a new `use`.
+
+use hir::{self, ModPath};
+use ra_syntax::{
+    ast::{self, NameOwner},
+    AstNode, Direction, SmolStr,
+    SyntaxKind::{PATH, PATH_SEGMENT},
+    SyntaxNode, T,
+};
+use ra_text_edit::TextEditBuilder;
+
+/// Creates and inserts a use statement for the given path to import.
+/// The use statement is inserted in the scope most appropriate to the
+/// the cursor position given, additionally merged with the existing use imports.
+pub fn insert_use_statement(
+    // Ideally the position of the cursor, used to
+    position: &SyntaxNode,
+    path_to_import: &ModPath,
+    edit: &mut TextEditBuilder,
+) {
+    let target = path_to_import.to_string().split("::").map(SmolStr::new).collect::<Vec<_>>();
+    let container = position.ancestors().find_map(|n| {
+        if let Some(module) = ast::Module::cast(n.clone()) {
+            return module.item_list().map(|it| it.syntax().clone());
+        }
+        ast::SourceFile::cast(n).map(|it| it.syntax().clone())
+    });
+
+    if let Some(container) = container {
+        let action = best_action_for_target(container, position.clone(), &target);
+        make_assist(&action, &target, edit);
+    }
+}
+
+fn collect_path_segments_raw(
+    segments: &mut Vec<ast::PathSegment>,
+    mut path: ast::Path,
+) -> Option<usize> {
+    let oldlen = segments.len();
+    loop {
+        let mut children = path.syntax().children_with_tokens();
+        let (first, second, third) = (
+            children.next().map(|n| (n.clone(), n.kind())),
+            children.next().map(|n| (n.clone(), n.kind())),
+            children.next().map(|n| (n.clone(), n.kind())),
+        );
+        match (first, second, third) {
+            (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => {
+                path = ast::Path::cast(subpath.as_node()?.clone())?;
+                segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?);
+            }
+            (Some((segment, PATH_SEGMENT)), _, _) => {
+                segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?);
+                break;
+            }
+            (_, _, _) => return None,
+        }
+    }
+    // We need to reverse only the new added segments
+    let only_new_segments = segments.split_at_mut(oldlen).1;
+    only_new_segments.reverse();
+    Some(segments.len() - oldlen)
+}
+
+fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) {
+    let mut iter = segments.iter();
+    if let Some(s) = iter.next() {
+        buf.push_str(s);
+    }
+    for s in iter {
+        buf.push_str("::");
+        buf.push_str(s);
+    }
+}
+
+/// Returns the number of common segments.
+fn compare_path_segments(left: &[SmolStr], right: &[ast::PathSegment]) -> usize {
+    left.iter().zip(right).take_while(|(l, r)| compare_path_segment(l, r)).count()
+}
+
+fn compare_path_segment(a: &SmolStr, b: &ast::PathSegment) -> bool {
+    if let Some(kb) = b.kind() {
+        match kb {
+            ast::PathSegmentKind::Name(nameref_b) => a == nameref_b.text(),
+            ast::PathSegmentKind::SelfKw => a == "self",
+            ast::PathSegmentKind::SuperKw => a == "super",
+            ast::PathSegmentKind::CrateKw => a == "crate",
+            ast::PathSegmentKind::Type { .. } => false, // not allowed in imports
+        }
+    } else {
+        false
+    }
+}
+
+fn compare_path_segment_with_name(a: &SmolStr, b: &ast::Name) -> bool {
+    a == b.text()
+}
+
+#[derive(Clone, Debug)]
+enum ImportAction {
+    Nothing,
+    // Add a brand new use statement.
+    AddNewUse {
+        anchor: Option<SyntaxNode>, // anchor node
+        add_after_anchor: bool,
+    },
+
+    // To split an existing use statement creating a nested import.
+    AddNestedImport {
+        // how may segments matched with the target path
+        common_segments: usize,
+        path_to_split: ast::Path,
+        // the first segment of path_to_split we want to add into the new nested list
+        first_segment_to_split: Option<ast::PathSegment>,
+        // Wether to add 'self' in addition to the target path
+        add_self: bool,
+    },
+    // To add the target path to an existing nested import tree list.
+    AddInTreeList {
+        common_segments: usize,
+        // The UseTreeList where to add the target path
+        tree_list: ast::UseTreeList,
+        add_self: bool,
+    },
+}
+
+impl ImportAction {
+    fn add_new_use(anchor: Option<SyntaxNode>, add_after_anchor: bool) -> Self {
+        ImportAction::AddNewUse { anchor, add_after_anchor }
+    }
+
+    fn add_nested_import(
+        common_segments: usize,
+        path_to_split: ast::Path,
+        first_segment_to_split: Option<ast::PathSegment>,
+        add_self: bool,
+    ) -> Self {
+        ImportAction::AddNestedImport {
+            common_segments,
+            path_to_split,
+            first_segment_to_split,
+            add_self,
+        }
+    }
+
+    fn add_in_tree_list(
+        common_segments: usize,
+        tree_list: ast::UseTreeList,
+        add_self: bool,
+    ) -> Self {
+        ImportAction::AddInTreeList { common_segments, tree_list, add_self }
+    }
+
+    fn better(left: ImportAction, right: ImportAction) -> ImportAction {
+        if left.is_better(&right) {
+            left
+        } else {
+            right
+        }
+    }
+
+    fn is_better(&self, other: &ImportAction) -> bool {
+        match (self, other) {
+            (ImportAction::Nothing, _) => true,
+            (ImportAction::AddInTreeList { .. }, ImportAction::Nothing) => false,
+            (
+                ImportAction::AddNestedImport { common_segments: n, .. },
+                ImportAction::AddInTreeList { common_segments: m, .. },
+            )
+            | (
+                ImportAction::AddInTreeList { common_segments: n, .. },
+                ImportAction::AddNestedImport { common_segments: m, .. },
+            )
+            | (
+                ImportAction::AddInTreeList { common_segments: n, .. },
+                ImportAction::AddInTreeList { common_segments: m, .. },
+            )
+            | (
+                ImportAction::AddNestedImport { common_segments: n, .. },
+                ImportAction::AddNestedImport { common_segments: m, .. },
+            ) => n > m,
+            (ImportAction::AddInTreeList { .. }, _) => true,
+            (ImportAction::AddNestedImport { .. }, ImportAction::Nothing) => false,
+            (ImportAction::AddNestedImport { .. }, _) => true,
+            (ImportAction::AddNewUse { .. }, _) => false,
+        }
+    }
+}
+
+// Find out the best ImportAction to import target path against current_use_tree.
+// If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList.
+fn walk_use_tree_for_best_action(
+    current_path_segments: &mut Vec<ast::PathSegment>, // buffer containing path segments
+    current_parent_use_tree_list: Option<ast::UseTreeList>, // will be Some value if we are in a nested import
+    current_use_tree: ast::UseTree, // the use tree we are currently examinating
+    target: &[SmolStr],             // the path we want to import
+) -> ImportAction {
+    // We save the number of segments in the buffer so we can restore the correct segments
+    // before returning. Recursive call will add segments so we need to delete them.
+    let prev_len = current_path_segments.len();
+
+    let tree_list = current_use_tree.use_tree_list();
+    let alias = current_use_tree.alias();
+
+    let path = match current_use_tree.path() {
+        Some(path) => path,
+        None => {
+            // If the use item don't have a path, it means it's broken (syntax error)
+            return ImportAction::add_new_use(
+                current_use_tree
+                    .syntax()
+                    .ancestors()
+                    .find_map(ast::UseItem::cast)
+                    .map(|it| it.syntax().clone()),
+                true,
+            );
+        }
+    };
+
+    // This can happen only if current_use_tree is a direct child of a UseItem
+    if let Some(name) = alias.and_then(|it| it.name()) {
+        if compare_path_segment_with_name(&target[0], &name) {
+            return ImportAction::Nothing;
+        }
+    }
+
+    collect_path_segments_raw(current_path_segments, path.clone());
+
+    // We compare only the new segments added in the line just above.
+    // The first prev_len segments were already compared in 'parent' recursive calls.
+    let left = target.split_at(prev_len).1;
+    let right = current_path_segments.split_at(prev_len).1;
+    let common = compare_path_segments(left, &right);
+    let mut action = match common {
+        0 => ImportAction::add_new_use(
+            // e.g: target is std::fmt and we can have
+            // use foo::bar
+            // We add a brand new use statement
+            current_use_tree
+                .syntax()
+                .ancestors()
+                .find_map(ast::UseItem::cast)
+                .map(|it| it.syntax().clone()),
+            true,
+        ),
+        common if common == left.len() && left.len() == right.len() => {
+            // e.g: target is std::fmt and we can have
+            // 1- use std::fmt;
+            // 2- use std::fmt::{ ... }
+            if let Some(list) = tree_list {
+                // In case 2 we need to add self to the nested list
+                // unless it's already there
+                let has_self = list.use_trees().map(|it| it.path()).any(|p| {
+                    p.and_then(|it| it.segment())
+                        .and_then(|it| it.kind())
+                        .filter(|k| *k == ast::PathSegmentKind::SelfKw)
+                        .is_some()
+                });
+
+                if has_self {
+                    ImportAction::Nothing
+                } else {
+                    ImportAction::add_in_tree_list(current_path_segments.len(), list, true)
+                }
+            } else {
+                // Case 1
+                ImportAction::Nothing
+            }
+        }
+        common if common != left.len() && left.len() == right.len() => {
+            // e.g: target is std::fmt and we have
+            // use std::io;
+            // We need to split.
+            let segments_to_split = current_path_segments.split_at(prev_len + common).1;
+            ImportAction::add_nested_import(
+                prev_len + common,
+                path,
+                Some(segments_to_split[0].clone()),
+                false,
+            )
+        }
+        common if common == right.len() && left.len() > right.len() => {
+            // e.g: target is std::fmt and we can have
+            // 1- use std;
+            // 2- use std::{ ... };
+
+            // fallback action
+            let mut better_action = ImportAction::add_new_use(
+                current_use_tree
+                    .syntax()
+                    .ancestors()
+                    .find_map(ast::UseItem::cast)
+                    .map(|it| it.syntax().clone()),
+                true,
+            );
+            if let Some(list) = tree_list {
+                // Case 2, check recursively if the path is already imported in the nested list
+                for u in list.use_trees() {
+                    let child_action = walk_use_tree_for_best_action(
+                        current_path_segments,
+                        Some(list.clone()),
+                        u,
+                        target,
+                    );
+                    if child_action.is_better(&better_action) {
+                        better_action = child_action;
+                        if let ImportAction::Nothing = better_action {
+                            return better_action;
+                        }
+                    }
+                }
+            } else {
+                // Case 1, split adding self
+                better_action = ImportAction::add_nested_import(prev_len + common, path, None, true)
+            }
+            better_action
+        }
+        common if common == left.len() && left.len() < right.len() => {
+            // e.g: target is std::fmt and we can have
+            // use std::fmt::Debug;
+            let segments_to_split = current_path_segments.split_at(prev_len + common).1;
+            ImportAction::add_nested_import(
+                prev_len + common,
+                path,
+                Some(segments_to_split[0].clone()),
+                true,
+            )
+        }
+        common if common < left.len() && common < right.len() => {
+            // e.g: target is std::fmt::nested::Debug
+            // use std::fmt::Display
+            let segments_to_split = current_path_segments.split_at(prev_len + common).1;
+            ImportAction::add_nested_import(
+                prev_len + common,
+                path,
+                Some(segments_to_split[0].clone()),
+                false,
+            )
+        }
+        _ => unreachable!(),
+    };
+
+    // If we are inside a UseTreeList adding a use statement become adding to the existing
+    // tree list.
+    action = match (current_parent_use_tree_list, action.clone()) {
+        (Some(use_tree_list), ImportAction::AddNewUse { .. }) => {
+            ImportAction::add_in_tree_list(prev_len, use_tree_list, false)
+        }
+        (_, _) => action,
+    };
+
+    // We remove the segments added
+    current_path_segments.truncate(prev_len);
+    action
+}
+
+fn best_action_for_target(
+    container: SyntaxNode,
+    anchor: SyntaxNode,
+    target: &[SmolStr],
+) -> ImportAction {
+    let mut storage = Vec::with_capacity(16); // this should be the only allocation
+    let best_action = container
+        .children()
+        .filter_map(ast::UseItem::cast)
+        .filter_map(|it| it.use_tree())
+        .map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target))
+        .fold(None, |best, a| match best {
+            Some(best) => Some(ImportAction::better(best, a)),
+            None => Some(a),
+        });
+
+    match best_action {
+        Some(action) => action,
+        None => {
+            // We have no action and no UseItem was found in container so we find
+            // another item and we use it as anchor.
+            // If there are no items above, we choose the target path itself as anchor.
+            // todo: we should include even whitespace blocks as anchor candidates
+            let anchor = container.children().next().or_else(|| Some(anchor));
+
+            let add_after_anchor = anchor
+                .clone()
+                .and_then(ast::Attr::cast)
+                .map(|attr| attr.kind() == ast::AttrKind::Inner)
+                .unwrap_or(false);
+            ImportAction::add_new_use(anchor, add_after_anchor)
+        }
+    }
+}
+
+fn make_assist(action: &ImportAction, target: &[SmolStr], edit: &mut TextEditBuilder) {
+    match action {
+        ImportAction::AddNewUse { anchor, add_after_anchor } => {
+            make_assist_add_new_use(anchor, *add_after_anchor, target, edit)
+        }
+        ImportAction::AddInTreeList { common_segments, tree_list, add_self } => {
+            // We know that the fist n segments already exists in the use statement we want
+            // to modify, so we want to add only the last target.len() - n segments.
+            let segments_to_add = target.split_at(*common_segments).1;
+            make_assist_add_in_tree_list(tree_list, segments_to_add, *add_self, edit)
+        }
+        ImportAction::AddNestedImport {
+            common_segments,
+            path_to_split,
+            first_segment_to_split,
+            add_self,
+        } => {
+            let segments_to_add = target.split_at(*common_segments).1;
+            make_assist_add_nested_import(
+                path_to_split,
+                first_segment_to_split,
+                segments_to_add,
+                *add_self,
+                edit,
+            )
+        }
+        _ => {}
+    }
+}
+
+fn make_assist_add_new_use(
+    anchor: &Option<SyntaxNode>,
+    after: bool,
+    target: &[SmolStr],
+    edit: &mut TextEditBuilder,
+) {
+    if let Some(anchor) = anchor {
+        let indent = ra_fmt::leading_indent(anchor);
+        let mut buf = String::new();
+        if after {
+            buf.push_str("\n");
+            if let Some(spaces) = &indent {
+                buf.push_str(spaces);
+            }
+        }
+        buf.push_str("use ");
+        fmt_segments_raw(target, &mut buf);
+        buf.push_str(";");
+        if !after {
+            buf.push_str("\n\n");
+            if let Some(spaces) = &indent {
+                buf.push_str(&spaces);
+            }
+        }
+        let position = if after { anchor.text_range().end() } else { anchor.text_range().start() };
+        edit.insert(position, buf);
+    }
+}
+
+fn make_assist_add_in_tree_list(
+    tree_list: &ast::UseTreeList,
+    target: &[SmolStr],
+    add_self: bool,
+    edit: &mut TextEditBuilder,
+) {
+    let last = tree_list.use_trees().last();
+    if let Some(last) = last {
+        let mut buf = String::new();
+        let comma = last.syntax().siblings(Direction::Next).find(|n| n.kind() == T![,]);
+        let offset = if let Some(comma) = comma {
+            comma.text_range().end()
+        } else {
+            buf.push_str(",");
+            last.syntax().text_range().end()
+        };
+        if add_self {
+            buf.push_str(" self")
+        } else {
+            buf.push_str(" ");
+        }
+        fmt_segments_raw(target, &mut buf);
+        edit.insert(offset, buf);
+    } else {
+    }
+}
+
+fn make_assist_add_nested_import(
+    path: &ast::Path,
+    first_segment_to_split: &Option<ast::PathSegment>,
+    target: &[SmolStr],
+    add_self: bool,
+    edit: &mut TextEditBuilder,
+) {
+    let use_tree = path.syntax().ancestors().find_map(ast::UseTree::cast);
+    if let Some(use_tree) = use_tree {
+        let (start, add_colon_colon) = if let Some(first_segment_to_split) = first_segment_to_split
+        {
+            (first_segment_to_split.syntax().text_range().start(), false)
+        } else {
+            (use_tree.syntax().text_range().end(), true)
+        };
+        let end = use_tree.syntax().text_range().end();
+
+        let mut buf = String::new();
+        if add_colon_colon {
+            buf.push_str("::");
+        }
+        buf.push_str("{");
+        if add_self {
+            buf.push_str("self, ");
+        }
+        fmt_segments_raw(target, &mut buf);
+        if !target.is_empty() {
+            buf.push_str(", ");
+        }
+        edit.insert(start, buf);
+        edit.insert(end, "}".to_string());
+    }
+}
index b09650d98d3aebbcfae9f9c182ce077ce57df35d..1fdbffea18cb949ab5542c490624a4969d4284e7 100644 (file)
@@ -6,13 +6,10 @@ authors = ["rust-analyzer developers"]
 
 [dependencies]
 crossbeam-channel = "0.4.0"
-lsp-types = { version = "0.70.1", features = ["proposed"] }
+lsp-types = { version = "0.71.0", features = ["proposed"] }
 log = "0.4.8"
 cargo_metadata = "0.9.1"
-jod-thread = "0.1.0"
-parking_lot = "0.10.0"
 serde_json = "1.0.48"
 
 [dev-dependencies]
 insta = "0.13.1"
-serde_json = "1.0.48"
index c16c17072b7d5bc3664b5fe435dfdd2ae02b7918..3aa7c487041b44c798ec8b0b15bc7c8d4345073d 100644 (file)
@@ -40,6 +40,7 @@ fn from(ty: $ty) -> $id {
     (hir_def::ConstId, crate::Const),
     (hir_def::FunctionId, crate::Function),
     (hir_def::ImplId, crate::ImplBlock),
+    (hir_def::TypeParamId, crate::TypeParam),
     (hir_expand::MacroDefId, crate::MacroDef)
 ];
 
index 7a9745ebebc5619bec8ed894fd0c1909fe0c5698..3aa964fb664039b214484a6484330f7066c23bd4 100644 (file)
@@ -26,9 +26,10 @@ fn from(it: $sv) -> $e {
     }
 }
 
+mod semantics;
 pub mod db;
-pub mod source_analyzer;
-pub mod source_binder;
+mod source_analyzer;
+mod source_binder;
 
 pub mod diagnostics;
 
@@ -45,8 +46,8 @@ fn from(it: $sv) -> $e {
         StructField, Trait, Type, TypeAlias, TypeParam, Union, VariantDef,
     },
     has_source::HasSource,
-    source_analyzer::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
-    source_binder::SourceBinder,
+    semantics::{original_range, Semantics, SemanticsScope},
+    source_analyzer::PathResolution,
 };
 
 pub use hir_def::{
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs
new file mode 100644 (file)
index 0000000..4a9cb7b
--- /dev/null
@@ -0,0 +1,405 @@
+//! See `Semantics`.
+
+use std::{cell::RefCell, fmt, iter::successors};
+
+use hir_def::{
+    resolver::{self, HasResolver, Resolver},
+    DefWithBodyId, TraitId,
+};
+use ra_db::{FileId, FileRange};
+use ra_syntax::{
+    algo::skip_trivia_token, ast, match_ast, AstNode, Direction, SyntaxNode, SyntaxToken,
+    TextRange, TextUnit,
+};
+use rustc_hash::{FxHashMap, FxHashSet};
+
+use crate::{
+    db::HirDatabase,
+    source_analyzer::{resolve_hir_path, ReferenceDescriptor, SourceAnalyzer},
+    source_binder::{ChildContainer, SourceBinder},
+    Function, HirFileId, InFile, Local, MacroDef, Module, ModuleDef, Name, Origin, Path,
+    PathResolution, ScopeDef, StructField, Trait, Type, TypeParam, VariantDef,
+};
+use hir_expand::ExpansionInfo;
+use ra_prof::profile;
+
+/// Primary API to get semantic information, like types, from syntax trees.
+pub struct Semantics<'db, DB> {
+    pub db: &'db DB,
+    sb: RefCell<SourceBinder>,
+    cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
+}
+
+impl<DB> fmt::Debug for Semantics<'_, DB> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "Semantics {{ ... }}")
+    }
+}
+
+impl<'db, DB: HirDatabase> Semantics<'db, DB> {
+    pub fn new(db: &DB) -> Semantics<DB> {
+        let sb = RefCell::new(SourceBinder::new());
+        Semantics { db, sb, cache: RefCell::default() }
+    }
+
+    pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
+        let tree = self.db.parse(file_id).tree();
+        self.cache(tree.syntax().clone(), file_id.into());
+        tree
+    }
+
+    pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+        let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
+        let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
+        let file_id = sa.expand(self.db, macro_call)?;
+        let node = self.db.parse_or_expand(file_id)?;
+        self.cache(node.clone(), file_id);
+        Some(node)
+    }
+
+    pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
+        let parent = token.parent();
+        let parent = self.find_file(parent);
+        let sa = self.analyze2(parent.as_ref(), None);
+
+        let token = successors(Some(parent.with_value(token)), |token| {
+            let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
+            let tt = macro_call.token_tree()?;
+            if !token.value.text_range().is_subrange(&tt.syntax().text_range()) {
+                return None;
+            }
+            let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
+            let token = file_id.expansion_info(self.db)?.map_token_down(token.as_ref())?;
+
+            self.cache(find_root(&token.value.parent()), token.file_id);
+
+            Some(token)
+        })
+        .last()
+        .unwrap();
+
+        token.value
+    }
+
+    pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
+        let node = self.find_file(node.clone());
+        original_range(self.db, node.as_ref())
+    }
+
+    pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
+        let node = self.find_file(node);
+        node.ancestors_with_macros(self.db).map(|it| it.value)
+    }
+
+    pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
+        self.analyze(expr.syntax()).type_of(self.db, &expr)
+    }
+
+    pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> {
+        self.analyze(pat.syntax()).type_of_pat(self.db, &pat)
+    }
+
+    pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
+        self.analyze(call.syntax()).resolve_method_call(call)
+    }
+
+    pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<StructField> {
+        self.analyze(field.syntax()).resolve_field(field)
+    }
+
+    pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<StructField> {
+        self.analyze(field.syntax()).resolve_record_field(field)
+    }
+
+    pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<VariantDef> {
+        self.analyze(record_lit.syntax()).resolve_record_literal(record_lit)
+    }
+
+    pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<VariantDef> {
+        self.analyze(record_pat.syntax()).resolve_record_pattern(record_pat)
+    }
+
+    pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
+        let sa = self.analyze(macro_call.syntax());
+        let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
+        sa.resolve_macro_call(self.db, macro_call)
+    }
+
+    pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
+        self.analyze(path.syntax()).resolve_path(self.db, path)
+    }
+
+    pub fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef> {
+        self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat)
+    }
+
+    // FIXME: use this instead?
+    // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
+
+    pub fn to_def<T: ToDef + Clone>(&self, src: &T) -> Option<T::Def> {
+        T::to_def(self, src)
+    }
+
+    pub fn to_module_def(&self, file: FileId) -> Option<Module> {
+        let mut sb = self.sb.borrow_mut();
+        sb.to_module_def(self.db, file)
+    }
+
+    pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db, DB> {
+        let node = self.find_file(node.clone());
+        let resolver = self.analyze2(node.as_ref(), None).resolver;
+        SemanticsScope { db: self.db, resolver }
+    }
+
+    pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextUnit) -> SemanticsScope<'db, DB> {
+        let node = self.find_file(node.clone());
+        let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver;
+        SemanticsScope { db: self.db, resolver }
+    }
+
+    pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db, DB> {
+        let resolver = def.id.resolver(self.db);
+        SemanticsScope { db: self.db, resolver }
+    }
+
+    // FIXME: we only use this in `inline_local_variable` assist, ideally, we
+    // should switch to general reference search infra there.
+    pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
+        self.analyze(pat.syntax()).find_all_refs(pat)
+    }
+
+    fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
+        let src = self.find_file(node.clone());
+        self.analyze2(src.as_ref(), None)
+    }
+
+    fn analyze2(&self, src: InFile<&SyntaxNode>, offset: Option<TextUnit>) -> SourceAnalyzer {
+        let _p = profile("Semantics::analyze2");
+
+        let container = match self.sb.borrow_mut().find_container(self.db, src) {
+            Some(it) => it,
+            None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src),
+        };
+
+        let resolver = match container {
+            ChildContainer::DefWithBodyId(def) => {
+                return SourceAnalyzer::new_for_body(self.db, def, src, offset)
+            }
+            ChildContainer::TraitId(it) => it.resolver(self.db),
+            ChildContainer::ImplId(it) => it.resolver(self.db),
+            ChildContainer::ModuleId(it) => it.resolver(self.db),
+            ChildContainer::EnumId(it) => it.resolver(self.db),
+            ChildContainer::VariantId(it) => it.resolver(self.db),
+            ChildContainer::GenericDefId(it) => it.resolver(self.db),
+        };
+        SourceAnalyzer::new_for_resolver(resolver, src)
+    }
+
+    fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
+        assert!(root_node.parent().is_none());
+        let mut cache = self.cache.borrow_mut();
+        let prev = cache.insert(root_node, file_id);
+        assert!(prev == None || prev == Some(file_id))
+    }
+
+    pub fn assert_contains_node(&self, node: &SyntaxNode) {
+        self.find_file(node.clone());
+    }
+
+    fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
+        let cache = self.cache.borrow();
+        cache.get(root_node).copied()
+    }
+
+    fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> {
+        let root_node = find_root(&node);
+        let file_id = self.lookup(&root_node).unwrap_or_else(|| {
+            panic!(
+                "\n\nFailed to lookup {:?} in this Semantics.\n\
+                 Make sure to use only query nodes, derived from this instance of Semantics.\n\
+                 root node:   {:?}\n\
+                 known nodes: {}\n\n",
+                node,
+                root_node,
+                self.cache
+                    .borrow()
+                    .keys()
+                    .map(|it| format!("{:?}", it))
+                    .collect::<Vec<_>>()
+                    .join(", ")
+            )
+        });
+        InFile::new(file_id, node)
+    }
+}
+
+pub trait ToDef: Sized + AstNode + 'static {
+    type Def;
+    fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: &Self) -> Option<Self::Def>;
+}
+
+macro_rules! to_def_impls {
+    ($(($def:path, $ast:path)),* ,) => {$(
+        impl ToDef for $ast {
+            type Def = $def;
+            fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: &Self)
+                -> Option<Self::Def>
+            {
+                let src = sema.find_file(src.syntax().clone()).with_value(src);
+                sema.sb.borrow_mut().to_id(sema.db, src.cloned()).map(Into::into)
+            }
+        }
+    )*}
+}
+
+to_def_impls![
+    (crate::Module, ast::Module),
+    (crate::Struct, ast::StructDef),
+    (crate::Enum, ast::EnumDef),
+    (crate::Union, ast::UnionDef),
+    (crate::Trait, ast::TraitDef),
+    (crate::ImplBlock, ast::ImplBlock),
+    (crate::TypeAlias, ast::TypeAliasDef),
+    (crate::Const, ast::ConstDef),
+    (crate::Static, ast::StaticDef),
+    (crate::Function, ast::FnDef),
+    (crate::StructField, ast::RecordFieldDef),
+    (crate::EnumVariant, ast::EnumVariant),
+    (crate::TypeParam, ast::TypeParam),
+    (crate::MacroDef, ast::MacroCall), // this one is dubious, not all calls are macros
+];
+
+impl ToDef for ast::BindPat {
+    type Def = Local;
+
+    fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: &Self) -> Option<Local> {
+        let src = sema.find_file(src.syntax().clone()).with_value(src);
+        let file_id = src.file_id;
+        let mut sb = sema.sb.borrow_mut();
+        let db = sema.db;
+        let parent: DefWithBodyId = src.value.syntax().ancestors().find_map(|it| {
+            let res = match_ast! {
+                match it {
+                    ast::ConstDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
+                    ast::StaticDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
+                    ast::FnDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
+                    _ => return None,
+                }
+            };
+            Some(res)
+        })?;
+        let (_body, source_map) = db.body_with_source_map(parent);
+        let src = src.cloned().map(ast::Pat::from);
+        let pat_id = source_map.node_pat(src.as_ref())?;
+        Some(Local { parent: parent.into(), pat_id })
+    }
+}
+
+fn find_root(node: &SyntaxNode) -> SyntaxNode {
+    node.ancestors().last().unwrap()
+}
+
+pub struct SemanticsScope<'a, DB> {
+    pub db: &'a DB,
+    resolver: Resolver,
+}
+
+impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> {
+    pub fn module(&self) -> Option<Module> {
+        Some(Module { id: self.resolver.module()? })
+    }
+
+    /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
+    // FIXME: rename to visible_traits to not repeat scope?
+    pub fn traits_in_scope(&self) -> FxHashSet<TraitId> {
+        let resolver = &self.resolver;
+        resolver.traits_in_scope(self.db)
+    }
+
+    pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
+        let resolver = &self.resolver;
+
+        resolver.process_all_names(self.db, &mut |name, def| {
+            let def = match def {
+                resolver::ScopeDef::PerNs(it) => it.into(),
+                resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
+                resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
+                resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }),
+                resolver::ScopeDef::Local(pat_id) => {
+                    let parent = resolver.body_owner().unwrap().into();
+                    ScopeDef::Local(Local { parent, pat_id })
+                }
+            };
+            f(name, def)
+        })
+    }
+
+    pub fn resolve_hir_path(&self, path: &Path) -> Option<PathResolution> {
+        resolve_hir_path(self.db, &self.resolver, path)
+    }
+}
+
+// FIXME: Change `HasSource` trait to work with `Semantics` and remove this?
+pub fn original_range(db: &impl HirDatabase, node: InFile<&SyntaxNode>) -> FileRange {
+    if let Some(range) = original_range_opt(db, node) {
+        let original_file = range.file_id.original_file(db);
+        if range.file_id == original_file.into() {
+            return FileRange { file_id: original_file, range: range.value };
+        }
+
+        log::error!("Fail to mapping up more for {:?}", range);
+        return FileRange { file_id: range.file_id.original_file(db), range: range.value };
+    }
+
+    // Fall back to whole macro call
+    if let Some(expansion) = node.file_id.expansion_info(db) {
+        if let Some(call_node) = expansion.call_node() {
+            return FileRange {
+                file_id: call_node.file_id.original_file(db),
+                range: call_node.value.text_range(),
+            };
+        }
+    }
+
+    FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
+}
+
+fn original_range_opt(
+    db: &impl HirDatabase,
+    node: InFile<&SyntaxNode>,
+) -> Option<InFile<TextRange>> {
+    let expansion = node.file_id.expansion_info(db)?;
+
+    // the input node has only one token ?
+    let single = skip_trivia_token(node.value.first_token()?, Direction::Next)?
+        == skip_trivia_token(node.value.last_token()?, Direction::Prev)?;
+
+    Some(node.value.descendants().find_map(|it| {
+        let first = skip_trivia_token(it.first_token()?, Direction::Next)?;
+        let first = ascend_call_token(db, &expansion, node.with_value(first))?;
+
+        let last = skip_trivia_token(it.last_token()?, Direction::Prev)?;
+        let last = ascend_call_token(db, &expansion, node.with_value(last))?;
+
+        if (!single && first == last) || (first.file_id != last.file_id) {
+            return None;
+        }
+
+        Some(first.with_value(first.value.text_range().extend_to(&last.value.text_range())))
+    })?)
+}
+
+fn ascend_call_token(
+    db: &impl HirDatabase,
+    expansion: &ExpansionInfo,
+    token: InFile<SyntaxToken>,
+) -> Option<InFile<SyntaxToken>> {
+    let (mapped, origin) = expansion.map_token_up(token.as_ref())?;
+    if origin != Origin::Call {
+        return None;
+    }
+    if let Some(info) = mapped.file_id.expansion_info(db) {
+        return ascend_call_token(db, &info, mapped);
+    }
+    Some(mapped)
+}
index efa3f8a79cb2535e2201d41e07c2025bcd68a47a..c650a9e08fb96bce2785d3009937c4994bf8ea04 100644 (file)
 use hir_def::{
     body::{
         scope::{ExprScopes, ScopeId},
-        BodySourceMap,
+        Body, BodySourceMap,
     },
-    expr::{ExprId, PatId},
-    resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs},
-    AsMacroCall, DefWithBodyId, TraitId,
+    expr::{ExprId, Pat, PatId},
+    resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
+    AsMacroCall, DefWithBodyId,
 };
-use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile, MacroCallId};
+use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
 use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment};
 use ra_syntax::{
     ast::{self, AstNode},
-    AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit,
+    AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
 };
-use rustc_hash::FxHashSet;
 
 use crate::{
-    db::HirDatabase, Adt, Const, DefWithBody, EnumVariant, Function, Local, MacroDef, Name, Path,
-    ScopeDef, Static, Struct, Trait, Type, TypeAlias, TypeParam,
+    db::HirDatabase, Adt, Const, EnumVariant, Function, Local, MacroDef, ModuleDef, Path, Static,
+    Struct, Trait, Type, TypeAlias, TypeParam,
 };
 
 /// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
 /// original source files. It should not be used inside the HIR itself.
 #[derive(Debug)]
-pub struct SourceAnalyzer {
+pub(crate) struct SourceAnalyzer {
     file_id: HirFileId,
-    resolver: Resolver,
-    body_owner: Option<DefWithBody>,
+    pub(crate) resolver: Resolver,
+    body: Option<Arc<Body>>,
     body_source_map: Option<Arc<BodySourceMap>>,
     infer: Option<Arc<InferenceResult>>,
     scopes: Option<Arc<ExprScopes>>,
@@ -55,64 +54,20 @@ pub enum PathResolution {
     AssocItem(crate::AssocItem),
 }
 
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct ScopeEntryWithSyntax {
-    pub(crate) name: Name,
-    pub(crate) ptr: Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>,
-}
-
-impl ScopeEntryWithSyntax {
-    pub fn name(&self) -> &Name {
-        &self.name
-    }
-
-    pub fn ptr(&self) -> Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>> {
-        self.ptr
-    }
-}
-
 #[derive(Debug)]
 pub struct ReferenceDescriptor {
     pub range: TextRange,
     pub name: String,
 }
 
-#[derive(Debug)]
-pub struct Expansion {
-    macro_call_id: MacroCallId,
-}
-
-impl Expansion {
-    pub fn map_token_down(
-        &self,
-        db: &impl HirDatabase,
-        token: InFile<&SyntaxToken>,
-    ) -> Option<InFile<SyntaxToken>> {
-        let exp_info = self.file_id().expansion_info(db)?;
-        exp_info.map_token_down(token)
-    }
-
-    pub fn file_id(&self) -> HirFileId {
-        self.macro_call_id.as_file()
-    }
-}
-
 impl SourceAnalyzer {
-    pub fn new(
-        db: &impl HirDatabase,
-        node: InFile<&SyntaxNode>,
-        offset: Option<TextUnit>,
-    ) -> SourceAnalyzer {
-        crate::source_binder::SourceBinder::new(db).analyze(node, offset)
-    }
-
     pub(crate) fn new_for_body(
         db: &impl HirDatabase,
         def: DefWithBodyId,
         node: InFile<&SyntaxNode>,
         offset: Option<TextUnit>,
     ) -> SourceAnalyzer {
-        let (_body, source_map) = db.body_with_source_map(def);
+        let (body, source_map) = db.body_with_source_map(def);
         let scopes = db.expr_scopes(def);
         let scope = match offset {
             None => scope_for(&scopes, &source_map, node),
@@ -121,7 +76,7 @@ pub(crate) fn new_for_body(
         let resolver = resolver_for_scope(db, def, scope);
         SourceAnalyzer {
             resolver,
-            body_owner: Some(def.into()),
+            body: Some(body),
             body_source_map: Some(source_map),
             infer: Some(db.infer(def)),
             scopes: Some(scopes),
@@ -135,7 +90,7 @@ pub(crate) fn new_for_resolver(
     ) -> SourceAnalyzer {
         SourceAnalyzer {
             resolver,
-            body_owner: None,
+            body: None,
             body_source_map: None,
             infer: None,
             scopes: None,
@@ -143,10 +98,6 @@ pub(crate) fn new_for_resolver(
         }
     }
 
-    pub fn module(&self) -> Option<crate::code_model::Module> {
-        Some(crate::code_model::Module { id: self.resolver.module()? })
-    }
-
     fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> {
         let src = InFile { file_id: self.file_id, value: expr };
         self.body_source_map.as_ref()?.node_expr(src)
@@ -180,7 +131,7 @@ fn trait_env(&self, db: &impl HirDatabase) -> Arc<TraitEnvironment> {
         TraitEnvironment::lower(db, &self.resolver)
     }
 
-    pub fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> {
+    pub(crate) fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> {
         let expr_id = if let Some(expr) = self.expand_expr(db, InFile::new(self.file_id, expr)) {
             self.body_source_map.as_ref()?.node_expr(expr.as_ref())?
         } else {
@@ -192,24 +143,27 @@ pub fn type_of(&self, db: &impl HirDatabase, expr: &ast::Expr) -> Option<Type> {
         Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } })
     }
 
-    pub fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> {
+    pub(crate) fn type_of_pat(&self, db: &impl HirDatabase, pat: &ast::Pat) -> Option<Type> {
         let pat_id = self.pat_id(pat)?;
         let ty = self.infer.as_ref()?[pat_id].clone();
         let environment = self.trait_env(db);
         Some(Type { krate: self.resolver.krate()?, ty: InEnvironment { value: ty, environment } })
     }
 
-    pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
+    pub(crate) fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
         let expr_id = self.expr_id(&call.clone().into())?;
         self.infer.as_ref()?.method_resolution(expr_id).map(Function::from)
     }
 
-    pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
+    pub(crate) fn resolve_field(&self, field: &ast::FieldExpr) -> Option<crate::StructField> {
         let expr_id = self.expr_id(&field.clone().into())?;
         self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
     }
 
-    pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<crate::StructField> {
+    pub(crate) fn resolve_record_field(
+        &self,
+        field: &ast::RecordField,
+    ) -> Option<crate::StructField> {
         let expr_id = match field.expr() {
             Some(it) => self.expr_id(&it)?,
             None => {
@@ -220,17 +174,23 @@ pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<crate::St
         self.infer.as_ref()?.record_field_resolution(expr_id).map(|it| it.into())
     }
 
-    pub fn resolve_record_literal(&self, record_lit: &ast::RecordLit) -> Option<crate::VariantDef> {
+    pub(crate) fn resolve_record_literal(
+        &self,
+        record_lit: &ast::RecordLit,
+    ) -> Option<crate::VariantDef> {
         let expr_id = self.expr_id(&record_lit.clone().into())?;
         self.infer.as_ref()?.variant_resolution_for_expr(expr_id).map(|it| it.into())
     }
 
-    pub fn resolve_record_pattern(&self, record_pat: &ast::RecordPat) -> Option<crate::VariantDef> {
+    pub(crate) fn resolve_record_pattern(
+        &self,
+        record_pat: &ast::RecordPat,
+    ) -> Option<crate::VariantDef> {
         let pat_id = self.pat_id(&record_pat.clone().into())?;
         self.infer.as_ref()?.variant_resolution_for_pat(pat_id).map(|it| it.into())
     }
 
-    pub fn resolve_macro_call(
+    pub(crate) fn resolve_macro_call(
         &self,
         db: &impl HirDatabase,
         macro_call: InFile<&ast::MacroCall>,
@@ -240,52 +200,29 @@ pub fn resolve_macro_call(
         self.resolver.resolve_path_as_macro(db, path.mod_path()).map(|it| it.into())
     }
 
-    pub fn resolve_hir_path(
+    pub(crate) fn resolve_bind_pat_to_const(
         &self,
         db: &impl HirDatabase,
-        path: &crate::Path,
-    ) -> Option<PathResolution> {
-        let types =
-            self.resolver.resolve_path_in_type_ns_fully(db, path.mod_path()).map(|ty| match ty {
-                TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
-                TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }),
-                TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
-                    PathResolution::Def(Adt::from(it).into())
-                }
-                TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
-                TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
-                TypeNs::BuiltinType(it) => PathResolution::Def(it.into()),
-                TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
-            });
-        let values =
-            self.resolver.resolve_path_in_value_ns_fully(db, path.mod_path()).and_then(|val| {
-                let res = match val {
-                    ValueNs::LocalBinding(pat_id) => {
-                        let var = Local { parent: self.body_owner?, pat_id };
-                        PathResolution::Local(var)
-                    }
-                    ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
-                    ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
-                    ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
-                    ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
-                    ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
-                };
-                Some(res)
-            });
-
-        let items = self
-            .resolver
-            .resolve_module_path_in_items(db, path.mod_path())
-            .take_types()
-            .map(|it| PathResolution::Def(it.into()));
-        types.or(values).or(items).or_else(|| {
-            self.resolver
-                .resolve_path_as_macro(db, path.mod_path())
-                .map(|def| PathResolution::Macro(def.into()))
-        })
+        pat: &ast::BindPat,
+    ) -> Option<ModuleDef> {
+        let pat_id = self.pat_id(&pat.clone().into())?;
+        let body = self.body.as_ref()?;
+        let path = match &body[pat_id] {
+            Pat::Path(path) => path,
+            _ => return None,
+        };
+        let res = resolve_hir_path(db, &self.resolver, &path)?;
+        match res {
+            PathResolution::Def(def) => Some(def),
+            _ => None,
+        }
     }
 
-    pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<PathResolution> {
+    pub(crate) fn resolve_path(
+        &self,
+        db: &impl HirDatabase,
+        path: &ast::Path,
+    ) -> Option<PathResolution> {
         if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) {
             let expr_id = self.expr_id(&path_expr.into())?;
             if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) {
@@ -300,40 +237,24 @@ pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option<Pa
         }
         // This must be a normal source file rather than macro file.
         let hir_path = crate::Path::from_ast(path.clone())?;
-        self.resolve_hir_path(db, &hir_path)
+        resolve_hir_path(db, &self.resolver, &hir_path)
     }
 
-    fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> {
+    fn resolve_local_name(
+        &self,
+        name_ref: &ast::NameRef,
+    ) -> Option<Either<AstPtr<ast::Pat>, AstPtr<ast::SelfParam>>> {
         let name = name_ref.as_name();
         let source_map = self.body_source_map.as_ref()?;
         let scopes = self.scopes.as_ref()?;
         let scope = scope_for(scopes, source_map, InFile::new(self.file_id, name_ref.syntax()))?;
         let entry = scopes.resolve_name_in_scope(scope, &name)?;
-        Some(ScopeEntryWithSyntax {
-            name: entry.name().clone(),
-            ptr: source_map.pat_syntax(entry.pat())?.value,
-        })
-    }
-
-    pub fn process_all_names(&self, db: &impl HirDatabase, f: &mut dyn FnMut(Name, ScopeDef)) {
-        self.resolver.process_all_names(db, &mut |name, def| {
-            let def = match def {
-                resolver::ScopeDef::PerNs(it) => it.into(),
-                resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
-                resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
-                resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(TypeParam { id }),
-                resolver::ScopeDef::Local(pat_id) => {
-                    let parent = self.resolver.body_owner().unwrap().into();
-                    ScopeDef::Local(Local { parent, pat_id })
-                }
-            };
-            f(name, def)
-        })
+        Some(source_map.pat_syntax(entry.pat())?.value)
     }
 
     // FIXME: we only use this in `inline_local_variable` assist, ideally, we
     // should switch to general reference search infra there.
-    pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
+    pub(crate) fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
         let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
         let ptr = Either::Left(AstPtr::new(&ast::Pat::from(pat.clone())));
         fn_def
@@ -342,7 +263,7 @@ pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
             .filter_map(ast::NameRef::cast)
             .filter(|name_ref| match self.resolve_local_name(&name_ref) {
                 None => false,
-                Some(entry) => entry.ptr() == ptr,
+                Some(d_ptr) => d_ptr == ptr,
             })
             .map(|name_ref| ReferenceDescriptor {
                 name: name_ref.text().to_string(),
@@ -351,19 +272,14 @@ pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
             .collect()
     }
 
-    /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
-    pub fn traits_in_scope(&self, db: &impl HirDatabase) -> FxHashSet<TraitId> {
-        self.resolver.traits_in_scope(db)
-    }
-
-    pub fn expand(
+    pub(crate) fn expand(
         &self,
         db: &impl HirDatabase,
         macro_call: InFile<&ast::MacroCall>,
-    ) -> Option<Expansion> {
+    ) -> Option<HirFileId> {
         let macro_call_id =
             macro_call.as_call_id(db, |path| self.resolver.resolve_path_as_macro(db, &path))?;
-        Some(Expansion { macro_call_id })
+        Some(macro_call_id.as_file())
     }
 }
 
@@ -409,6 +325,47 @@ fn scope_for_offset(
         })
 }
 
+pub(crate) fn resolve_hir_path(
+    db: &impl HirDatabase,
+    resolver: &Resolver,
+    path: &crate::Path,
+) -> Option<PathResolution> {
+    let types = resolver.resolve_path_in_type_ns_fully(db, path.mod_path()).map(|ty| match ty {
+        TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
+        TypeNs::GenericParam(id) => PathResolution::TypeParam(TypeParam { id }),
+        TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => PathResolution::Def(Adt::from(it).into()),
+        TypeNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
+        TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
+        TypeNs::BuiltinType(it) => PathResolution::Def(it.into()),
+        TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
+    });
+    let body_owner = resolver.body_owner();
+    let values = resolver.resolve_path_in_value_ns_fully(db, path.mod_path()).and_then(|val| {
+        let res = match val {
+            ValueNs::LocalBinding(pat_id) => {
+                let var = Local { parent: body_owner?.into(), pat_id };
+                PathResolution::Local(var)
+            }
+            ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
+            ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
+            ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
+            ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
+            ValueNs::EnumVariantId(it) => PathResolution::Def(EnumVariant::from(it).into()),
+        };
+        Some(res)
+    });
+
+    let items = resolver
+        .resolve_module_path_in_items(db, path.mod_path())
+        .take_types()
+        .map(|it| PathResolution::Def(it.into()));
+    types.or(values).or(items).or_else(|| {
+        resolver
+            .resolve_path_as_macro(db, path.mod_path())
+            .map(|def| PathResolution::Macro(def.into()))
+    })
+}
+
 // XXX: during completion, cursor might be outside of any particular
 // expression. Try to figure out the correct scope...
 fn adjust(
index f3150f5784a5662de5e3e324b866652c8144f29c..4353e25ac847057ad0c132b86ef671d9bf1e2d8d 100644 (file)
     child_by_source::ChildBySource,
     dyn_map::DynMap,
     keys::{self, Key},
-    resolver::{HasResolver, Resolver},
     ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, GenericDefId, ImplId, ModuleId,
-    StaticId, StructFieldId, StructId, TraitId, TypeAliasId, UnionId, VariantId,
+    StaticId, StructFieldId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, VariantId,
 };
 use hir_expand::{name::AsName, AstId, InFile, MacroDefId, MacroDefKind};
+use ra_db::FileId;
 use ra_prof::profile;
 use ra_syntax::{
     ast::{self, NameOwner},
-    match_ast, AstNode, SyntaxNode, TextUnit,
+    match_ast, AstNode, SyntaxNode,
 };
 use rustc_hash::FxHashMap;
 
-use crate::{db::HirDatabase, Local, Module, SourceAnalyzer, TypeParam};
-use ra_db::FileId;
+use crate::{db::HirDatabase, Module};
 
-pub struct SourceBinder<'a, DB> {
-    pub db: &'a DB,
+pub(crate) struct SourceBinder {
     child_by_source_cache: FxHashMap<ChildContainer, DynMap>,
 }
 
-impl<DB: HirDatabase> SourceBinder<'_, DB> {
-    pub fn new(db: &DB) -> SourceBinder<DB> {
-        SourceBinder { db, child_by_source_cache: FxHashMap::default() }
-    }
-
-    pub fn analyze(
-        &mut self,
-        src: InFile<&SyntaxNode>,
-        offset: Option<TextUnit>,
-    ) -> SourceAnalyzer {
-        let _p = profile("SourceBinder::analyzer");
-        let container = match self.find_container(src) {
-            Some(it) => it,
-            None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src),
-        };
-
-        let resolver = match container {
-            ChildContainer::DefWithBodyId(def) => {
-                return SourceAnalyzer::new_for_body(self.db, def, src, offset)
-            }
-            ChildContainer::TraitId(it) => it.resolver(self.db),
-            ChildContainer::ImplId(it) => it.resolver(self.db),
-            ChildContainer::ModuleId(it) => it.resolver(self.db),
-            ChildContainer::EnumId(it) => it.resolver(self.db),
-            ChildContainer::VariantId(it) => it.resolver(self.db),
-            ChildContainer::GenericDefId(it) => it.resolver(self.db),
-        };
-        SourceAnalyzer::new_for_resolver(resolver, src)
-    }
-
-    pub fn to_def<T: ToDef>(&mut self, src: InFile<T>) -> Option<T::Def> {
-        T::to_def(self, src)
+impl SourceBinder {
+    pub(crate) fn new() -> SourceBinder {
+        SourceBinder { child_by_source_cache: FxHashMap::default() }
     }
 
-    pub fn to_module_def(&mut self, file: FileId) -> Option<Module> {
+    pub(crate) fn to_module_def(&mut self, db: &impl HirDatabase, file: FileId) -> Option<Module> {
         let _p = profile("SourceBinder::to_module_def");
-        let (krate, local_id) = self.db.relevant_crates(file).iter().find_map(|&crate_id| {
-            let crate_def_map = self.db.crate_def_map(crate_id);
+        let (krate, local_id) = db.relevant_crates(file).iter().find_map(|&crate_id| {
+            let crate_def_map = db.crate_def_map(crate_id);
             let local_id = crate_def_map.modules_for_file(file).next()?;
             Some((crate_id, local_id))
         })?;
         Some(Module { id: ModuleId { krate, local_id } })
     }
 
-    fn to_id<T: ToId>(&mut self, src: InFile<T>) -> Option<T::ID> {
-        T::to_id(self, src)
+    pub(crate) fn to_id<T: ToId>(
+        &mut self,
+        db: &impl HirDatabase,
+        src: InFile<T>,
+    ) -> Option<T::ID> {
+        T::to_id(db, self, src)
     }
 
-    fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> {
-        for container in src.cloned().ancestors_with_macros(self.db).skip(1) {
+    pub(crate) fn find_container(
+        &mut self,
+        db: &impl HirDatabase,
+        src: InFile<&SyntaxNode>,
+    ) -> Option<ChildContainer> {
+        for container in src.cloned().ancestors_with_macros(db).skip(1) {
             let res: ChildContainer = match_ast! {
                 match (container.value) {
                     ast::TraitDef(it) => {
-                        let def: TraitId = self.to_id(container.with_value(it))?;
+                        let def: TraitId = self.to_id(db, container.with_value(it))?;
                         def.into()
                     },
                     ast::ImplBlock(it) => {
-                        let def: ImplId = self.to_id(container.with_value(it))?;
+                        let def: ImplId = self.to_id(db, container.with_value(it))?;
                         def.into()
                     },
                     ast::FnDef(it) => {
-                        let def: FunctionId = self.to_id(container.with_value(it))?;
+                        let def: FunctionId = self.to_id(db, container.with_value(it))?;
                         DefWithBodyId::from(def).into()
                     },
                     ast::StaticDef(it) => {
-                        let def: StaticId = self.to_id(container.with_value(it))?;
+                        let def: StaticId = self.to_id(db, container.with_value(it))?;
                         DefWithBodyId::from(def).into()
                     },
                     ast::ConstDef(it) => {
-                        let def: ConstId = self.to_id(container.with_value(it))?;
+                        let def: ConstId = self.to_id(db, container.with_value(it))?;
                         DefWithBodyId::from(def).into()
                     },
                     ast::EnumDef(it) => {
-                        let def: EnumId = self.to_id(container.with_value(it))?;
+                        let def: EnumId = self.to_id(db, container.with_value(it))?;
                         def.into()
                     },
                     ast::StructDef(it) => {
-                        let def: StructId = self.to_id(container.with_value(it))?;
+                        let def: StructId = self.to_id(db, container.with_value(it))?;
                         VariantId::from(def).into()
                     },
                     ast::UnionDef(it) => {
-                        let def: UnionId = self.to_id(container.with_value(it))?;
+                        let def: UnionId = self.to_id(db, container.with_value(it))?;
                         VariantId::from(def).into()
                     },
                     ast::Module(it) => {
-                        let def: ModuleId = self.to_id(container.with_value(it))?;
+                        let def: ModuleId = self.to_id(db, container.with_value(it))?;
                         def.into()
                     },
                     _ => { continue },
@@ -119,12 +96,11 @@ fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer>
             return Some(res);
         }
 
-        let c = self.to_module_def(src.file_id.original_file(self.db))?;
+        let c = self.to_module_def(db, src.file_id.original_file(db))?;
         Some(c.id.into())
     }
 
-    fn child_by_source(&mut self, container: ChildContainer) -> &DynMap {
-        let db = self.db;
+    fn child_by_source(&mut self, db: &impl HirDatabase, container: ChildContainer) -> &DynMap {
         self.child_by_source_cache.entry(container).or_insert_with(|| match container {
             ChildContainer::DefWithBodyId(it) => it.child_by_source(db),
             ChildContainer::ModuleId(it) => it.child_by_source(db),
@@ -137,49 +113,17 @@ fn child_by_source(&mut self, container: ChildContainer) -> &DynMap {
     }
 }
 
-pub trait ToId: Sized {
+pub(crate) trait ToId: Sized {
     type ID: Sized + Copy + 'static;
-    fn to_id<DB: HirDatabase>(sb: &mut SourceBinder<'_, DB>, src: InFile<Self>)
-        -> Option<Self::ID>;
-}
-
-pub trait ToDef: Sized + AstNode + 'static {
-    type Def;
-    fn to_def<DB: HirDatabase>(
-        sb: &mut SourceBinder<'_, DB>,
+    fn to_id<DB: HirDatabase>(
+        db: &DB,
+        sb: &mut SourceBinder,
         src: InFile<Self>,
-    ) -> Option<Self::Def>;
-}
-
-macro_rules! to_def_impls {
-    ($(($def:path, $ast:path)),* ,) => {$(
-        impl ToDef for $ast {
-            type Def = $def;
-            fn to_def<DB: HirDatabase>(sb: &mut SourceBinder<'_, DB>, src: InFile<Self>)
-                -> Option<Self::Def>
-            { sb.to_id(src).map(Into::into) }
-        }
-    )*}
+    ) -> Option<Self::ID>;
 }
 
-to_def_impls![
-    (crate::Module, ast::Module),
-    (crate::Struct, ast::StructDef),
-    (crate::Enum, ast::EnumDef),
-    (crate::Union, ast::UnionDef),
-    (crate::Trait, ast::TraitDef),
-    (crate::ImplBlock, ast::ImplBlock),
-    (crate::TypeAlias, ast::TypeAliasDef),
-    (crate::Const, ast::ConstDef),
-    (crate::Static, ast::StaticDef),
-    (crate::Function, ast::FnDef),
-    (crate::StructField, ast::RecordFieldDef),
-    (crate::EnumVariant, ast::EnumVariant),
-    (crate::MacroDef, ast::MacroCall), // this one is dubious, not all calls are macros
-];
-
 #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
-enum ChildContainer {
+pub(crate) enum ChildContainer {
     DefWithBodyId(DefWithBodyId),
     ModuleId(ModuleId),
     TraitId(TraitId),
@@ -201,7 +145,7 @@ enum ChildContainer {
     GenericDefId
 }
 
-pub trait ToIdByKey: Sized + AstNode + 'static {
+pub(crate) trait ToIdByKey: Sized + AstNode + 'static {
     type ID: Sized + Copy + 'static;
     const KEY: Key<Self, Self::ID>;
 }
@@ -209,11 +153,11 @@ pub trait ToIdByKey: Sized + AstNode + 'static {
 impl<T: ToIdByKey> ToId for T {
     type ID = <T as ToIdByKey>::ID;
     fn to_id<DB: HirDatabase>(
-        sb: &mut SourceBinder<'_, DB>,
+        db: &DB,
+        sb: &mut SourceBinder,
         src: InFile<Self>,
     ) -> Option<Self::ID> {
-        let container = sb.find_container(src.as_ref().map(|it| it.syntax()))?;
-        let db = sb.db;
+        let container = sb.find_container(db, src.as_ref().map(|it| it.syntax()))?;
         let dyn_map =
             &*sb.child_by_source_cache.entry(container).or_insert_with(|| match container {
                 ChildContainer::DefWithBodyId(it) => it.child_by_source(db),
@@ -255,68 +199,44 @@ impl ToIdByKey for $ast {
 impl ToId for ast::MacroCall {
     type ID = MacroDefId;
     fn to_id<DB: HirDatabase>(
-        sb: &mut SourceBinder<'_, DB>,
+        db: &DB,
+        sb: &mut SourceBinder,
         src: InFile<Self>,
     ) -> Option<Self::ID> {
         let kind = MacroDefKind::Declarative;
 
-        let krate = sb.to_module_def(src.file_id.original_file(sb.db))?.id.krate;
+        let krate = sb.to_module_def(db, src.file_id.original_file(db))?.id.krate;
 
-        let ast_id =
-            Some(AstId::new(src.file_id, sb.db.ast_id_map(src.file_id).ast_id(&src.value)));
+        let ast_id = Some(AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.value)));
 
         Some(MacroDefId { krate: Some(krate), ast_id, kind })
     }
 }
 
-impl ToDef for ast::BindPat {
-    type Def = Local;
-
-    fn to_def<DB: HirDatabase>(sb: &mut SourceBinder<'_, DB>, src: InFile<Self>) -> Option<Local> {
-        let file_id = src.file_id;
-        let parent: DefWithBodyId = src.value.syntax().ancestors().find_map(|it| {
-            let res = match_ast! {
-                match it {
-                    ast::ConstDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
-                    ast::StaticDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
-                    ast::FnDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
-                    _ => return None,
-                }
-            };
-            Some(res)
-        })?;
-        let (_body, source_map) = sb.db.body_with_source_map(parent);
-        let src = src.map(ast::Pat::from);
-        let pat_id = source_map.node_pat(src.as_ref())?;
-        Some(Local { parent: parent.into(), pat_id })
-    }
-}
-
-impl ToDef for ast::TypeParam {
-    type Def = TypeParam;
+impl ToId for ast::TypeParam {
+    type ID = TypeParamId;
 
-    fn to_def<DB: HirDatabase>(
-        sb: &mut SourceBinder<'_, DB>,
-        src: InFile<ast::TypeParam>,
-    ) -> Option<TypeParam> {
-        let mut sb = SourceBinder::new(sb.db);
+    fn to_id<DB: HirDatabase>(
+        db: &DB,
+        sb: &mut SourceBinder,
+        src: InFile<Self>,
+    ) -> Option<Self::ID> {
         let file_id = src.file_id;
         let parent: GenericDefId = src.value.syntax().ancestors().find_map(|it| {
             let res = match_ast! {
                 match it {
-                    ast::FnDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
-                    ast::StructDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
-                    ast::EnumDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
-                    ast::TraitDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
-                    ast::TypeAliasDef(value) => { sb.to_id(InFile { value, file_id})?.into() },
-                    ast::ImplBlock(value) => { sb.to_id(InFile { value, file_id})?.into() },
+                    ast::FnDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
+                    ast::StructDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
+                    ast::EnumDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
+                    ast::TraitDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
+                    ast::TypeAliasDef(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
+                    ast::ImplBlock(value) => { sb.to_id(db, InFile { value, file_id})?.into() },
                     _ => return None,
                 }
             };
             Some(res)
         })?;
-        let &id = sb.child_by_source(parent.into())[keys::TYPE_PARAM].get(&src)?;
-        Some(TypeParam { id })
+        sb.child_by_source(db, parent.into())[keys::TYPE_PARAM].get(&src).copied()
     }
 }
 
@@ -324,7 +244,8 @@ impl ToId for ast::Module {
     type ID = ModuleId;
 
     fn to_id<DB: HirDatabase>(
-        sb: &mut SourceBinder<'_, DB>,
+        db: &DB,
+        sb: &mut SourceBinder,
         src: InFile<ast::Module>,
     ) -> Option<ModuleId> {
         {
@@ -333,7 +254,7 @@ fn to_id<DB: HirDatabase>(
                 .as_ref()
                 .map(|it| it.syntax())
                 .cloned()
-                .ancestors_with_macros(sb.db)
+                .ancestors_with_macros(db)
                 .skip(1)
                 .find_map(|it| {
                     let m = ast::Module::cast(it.value.clone())?;
@@ -341,15 +262,15 @@ fn to_id<DB: HirDatabase>(
                 });
 
             let parent_module = match parent_declaration {
-                Some(parent_declaration) => sb.to_id(parent_declaration)?,
+                Some(parent_declaration) => sb.to_id(db, parent_declaration)?,
                 None => {
-                    let file_id = src.file_id.original_file(sb.db);
-                    sb.to_module_def(file_id)?.id
+                    let file_id = src.file_id.original_file(db);
+                    sb.to_module_def(db, file_id)?.id
                 }
             };
 
             let child_name = src.value.name()?.as_name();
-            let def_map = sb.db.crate_def_map(parent_module.krate);
+            let def_map = db.crate_def_map(parent_module.krate);
             let child_id = *def_map[parent_module.local_id].children.get(&child_name)?;
             Some(ModuleId { krate: parent_module.krate, local_id: child_id })
         }
index 5053d06886d1e1891e93f1b2fcdb650aa02691fa..fa25cc4fbb5efb6e0590e3a2fe7a3364c36cd876 100644 (file)
@@ -14,7 +14,6 @@ rustc-hash = "1.1.0"
 either = "1.5.3"
 anymap = "0.12.1"
 drop_bomb = "0.1.4"
-itertools = "0.8.2"
 
 ra_arena = { path = "../ra_arena" }
 ra_db = { path = "../ra_db" }
index 6e1d268dea48bdd09ec109305ea664dc5ac7fd28..569d46cc380e8e183b7268a261300c8aba48ec9b 100644 (file)
@@ -583,21 +583,52 @@ fn fallback_value(self) -> Ty {
 #[derive(Clone, PartialEq, Eq, Debug)]
 struct Expectation {
     ty: Ty,
-    // FIXME: In some cases, we need to be aware whether the expectation is that
-    // the type match exactly what we passed, or whether it just needs to be
-    // coercible to the expected type. See Expectation::rvalue_hint in rustc.
+    /// See the `rvalue_hint` method.
+    rvalue_hint: bool,
 }
 
 impl Expectation {
     /// The expectation that the type of the expression needs to equal the given
     /// type.
     fn has_type(ty: Ty) -> Self {
-        Expectation { ty }
+        Expectation { ty, rvalue_hint: false }
+    }
+
+    /// The following explanation is copied straight from rustc:
+    /// Provides an expectation for an rvalue expression given an *optional*
+    /// hint, which is not required for type safety (the resulting type might
+    /// be checked higher up, as is the case with `&expr` and `box expr`), but
+    /// is useful in determining the concrete type.
+    ///
+    /// The primary use case is where the expected type is a fat pointer,
+    /// like `&[isize]`. For example, consider the following statement:
+    ///
+    ///    let x: &[isize] = &[1, 2, 3];
+    ///
+    /// In this case, the expected type for the `&[1, 2, 3]` expression is
+    /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the
+    /// expectation `ExpectHasType([isize])`, that would be too strong --
+    /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`.
+    /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced
+    /// to the type `&[isize]`. Therefore, we propagate this more limited hint,
+    /// which still is useful, because it informs integer literals and the like.
+    /// See the test case `test/ui/coerce-expect-unsized.rs` and #20169
+    /// for examples of where this comes up,.
+    fn rvalue_hint(ty: Ty) -> Self {
+        Expectation { ty, rvalue_hint: true }
     }
 
     /// This expresses no expectation on the type.
     fn none() -> Self {
-        Expectation { ty: Ty::Unknown }
+        Expectation { ty: Ty::Unknown, rvalue_hint: false }
+    }
+
+    fn coercion_target(&self) -> &Ty {
+        if self.rvalue_hint {
+            &Ty::Unknown
+        } else {
+            &self.ty
+        }
     }
 }
 
index 9d5f756257e2018b8a7997bcc1aae30e16f1498c..3db5b2b5152786539821c877b4a817ee76d6b24a 100644 (file)
@@ -42,14 +42,14 @@ pub(super) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) ->
     /// Return the type after possible coercion.
     pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
         let ty = self.infer_expr_inner(expr, &expected);
-        let ty = if !self.coerce(&ty, &expected.ty) {
+        let ty = if !self.coerce(&ty, &expected.coercion_target()) {
             self.result
                 .type_mismatches
                 .insert(expr, TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() });
             // Return actual type when type mismatch.
             // This is needed for diagnostic when return type mismatch.
             ty
-        } else if expected.ty == Ty::Unknown {
+        } else if expected.coercion_target() == &Ty::Unknown {
             ty
         } else {
             expected.ty.clone()
@@ -297,7 +297,7 @@ fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
                             // FIXME: throw type error - expected mut reference but found shared ref,
                             // which cannot be coerced
                         }
-                        Expectation::has_type(Ty::clone(exp_inner))
+                        Expectation::rvalue_hint(Ty::clone(exp_inner))
                     } else {
                         Expectation::none()
                     };
@@ -542,7 +542,7 @@ fn infer_block(
         let ty = if let Some(expr) = tail {
             self.infer_expr_coerce(expr, expected)
         } else {
-            self.coerce(&Ty::unit(), &expected.ty);
+            self.coerce(&Ty::unit(), expected.coercion_target());
             Ty::unit()
         };
         if diverges {
index 240cc03a2c2eab61348c9af13ed2f03c67553ece..087edcc922b2875d3e6d64fa65b0cc4207c89ebe 100644 (file)
@@ -51,8 +51,8 @@ fn type_at(content: &str) -> String {
     type_at_pos(&db, file_pos)
 }
 
-fn infer(content: &str) -> String {
-    infer_with_mismatches(content, false)
+fn infer(ra_fixture: &str) -> String {
+    infer_with_mismatches(ra_fixture, false)
 }
 
 fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
index 60ad6e9be3ab728f34d02a351bd3ce09a88d57c4..1e303f5ce4be78853edd6eb36854449bba1692fe 100644 (file)
@@ -457,6 +457,37 @@ fn test() {
     );
 }
 
+#[test]
+fn coerce_autoderef_block() {
+    assert_snapshot!(
+        infer_with_mismatches(r#"
+struct String {}
+#[lang = "deref"]
+trait Deref { type Target; }
+impl Deref for String { type Target = str; }
+fn takes_ref_str(x: &str) {}
+fn returns_string() -> String { loop {} }
+fn test() {
+    takes_ref_str(&{ returns_string() });
+}
+"#, true),
+        @r###"
+    [127; 128) 'x': &str
+    [136; 138) '{}': ()
+    [169; 180) '{ loop {} }': String
+    [171; 178) 'loop {}': !
+    [176; 178) '{}': ()
+    [191; 236) '{     ... }); }': ()
+    [197; 210) 'takes_ref_str': fn takes_ref_str(&str) -> ()
+    [197; 233) 'takes_...g() })': ()
+    [211; 232) '&{ ret...ng() }': &String
+    [212; 232) '{ retu...ng() }': String
+    [214; 228) 'returns_string': fn returns_string() -> String
+    [214; 230) 'return...ring()': String
+    "###
+    );
+}
+
 #[test]
 fn closure_return_coerce() {
     assert_snapshot!(
index 3407d25989b97d364e55ad1c4ed7dda98ad822d1..7625fc8c8268d344a0317dfd0e35720101107395 100644 (file)
@@ -17,11 +17,7 @@ indexmap = "1.3.2"
 itertools = "0.8.2"
 join_to_string = "0.1.3"
 log = "0.4.8"
-rayon = "1.3.0"
-fst = { version = "0.3.5", default-features = false }
 rustc-hash = "1.1.0"
-unicase = "2.6.0"
-superslice = "1.0.0"
 rand = { version = "0.7.3", features = ["small_rng"] }
 once_cell = "1.3.1"
 
index 51ac59a718b0db75a0712c68e7d6fd34978b8c27..b00b6d43104ab5fbeac7c60fd4e144253dc50477 100644 (file)
@@ -2,13 +2,13 @@
 
 use indexmap::IndexMap;
 
-use hir::db::AstDatabase;
+use hir::Semantics;
 use ra_ide_db::RootDatabase;
 use ra_syntax::{ast, match_ast, AstNode, TextRange};
 
 use crate::{
-    call_info::FnCallNode, display::ToNav, expand::descend_into_macros, goto_definition,
-    references, FilePosition, NavigationTarget, RangeInfo,
+    call_info::FnCallNode, display::ToNav, goto_definition, references, FilePosition,
+    NavigationTarget, RangeInfo,
 };
 
 #[derive(Debug, Clone)]
@@ -38,30 +38,31 @@ pub(crate) fn call_hierarchy(
 }
 
 pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
+    let sema = Semantics::new(db);
     // 1. Find all refs
     // 2. Loop through refs and determine unique fndef. This will become our `from: CallHierarchyItem,` in the reply.
     // 3. Add ranges relative to the start of the fndef.
     let refs = references::find_all_refs(db, position, None)?;
 
     let mut calls = CallLocations::default();
-    let mut sb = hir::SourceBinder::new(db);
 
     for reference in refs.info.references() {
         let file_id = reference.file_range.file_id;
-        let file = db.parse_or_expand(file_id.into())?;
+        let file = sema.parse(file_id);
+        let file = file.syntax();
         let token = file.token_at_offset(reference.file_range.range.start()).next()?;
-        let token = descend_into_macros(db, file_id, token);
-        let syntax = token.value.parent();
+        let token = sema.descend_into_macros(token);
+        let syntax = token.parent();
 
         // This target is the containing function
         if let Some(nav) = syntax.ancestors().find_map(|node| {
             match_ast! {
                 match node {
                     ast::FnDef(it) => {
-                        let def = sb.to_def(token.with_value(it))?;
-                        Some(def.to_nav(sb.db))
+                        let def = sema.to_def(&it)?;
+                        Some(def.to_nav(sema.db))
                     },
-                    _ => { None },
+                    _ => None,
                 }
             }
         }) {
@@ -74,11 +75,13 @@ pub(crate) fn incoming_calls(db: &RootDatabase, position: FilePosition) -> Optio
 }
 
 pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
+    let sema = Semantics::new(db);
     let file_id = position.file_id;
-    let file = db.parse_or_expand(file_id.into())?;
+    let file = sema.parse(file_id);
+    let file = file.syntax();
     let token = file.token_at_offset(position.offset).next()?;
-    let token = descend_into_macros(db, file_id, token);
-    let syntax = token.value.parent();
+    let token = sema.descend_into_macros(token);
+    let syntax = token.parent();
 
     let mut calls = CallLocations::default();
 
@@ -87,14 +90,11 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio
         .filter_map(|node| FnCallNode::with_node_exact(&node))
         .filter_map(|call_node| {
             let name_ref = call_node.name_ref()?;
-            let name_ref = token.with_value(name_ref.syntax());
-
-            let analyzer = hir::SourceAnalyzer::new(db, name_ref, None);
 
             if let Some(func_target) = match &call_node {
                 FnCallNode::CallExpr(expr) => {
                     //FIXME: Type::as_callable is broken
-                    let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
+                    let callable_def = sema.type_of_expr(&expr.expr()?)?.as_callable()?;
                     match callable_def {
                         hir::CallableDef::FunctionId(it) => {
                             let fn_def: hir::Function = it.into();
@@ -105,15 +105,15 @@ pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Optio
                     }
                 }
                 FnCallNode::MethodCallExpr(expr) => {
-                    let function = analyzer.resolve_method_call(&expr)?;
+                    let function = sema.resolve_method_call(&expr)?;
                     Some(function.to_nav(db))
                 }
-                FnCallNode::MacroCallExpr(expr) => {
-                    let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?;
+                FnCallNode::MacroCallExpr(macro_call) => {
+                    let macro_def = sema.resolve_macro_call(&macro_call)?;
                     Some(macro_def.to_nav(db))
                 }
             } {
-                Some((func_target, name_ref.value.text_range()))
+                Some((func_target, name_ref.syntax().text_range()))
             } else {
                 None
             }
index 7c6322cb48309778a04552983f52ce22456cf0ab..2b35a380300ef59351de1e58249e463b649bc900 100644 (file)
@@ -1,48 +1,55 @@
 //! FIXME: write short doc here
-use hir::db::AstDatabase;
+use hir::Semantics;
 use ra_ide_db::RootDatabase;
 use ra_syntax::{
     ast::{self, ArgListOwner},
-    match_ast, AstNode, SyntaxNode,
+    match_ast, AstNode, SyntaxNode, SyntaxToken,
 };
 use test_utils::tested_by;
 
-use crate::{expand::descend_into_macros, CallInfo, FilePosition, FunctionSignature};
+use crate::{CallInfo, FilePosition, FunctionSignature};
 
 /// Computes parameter information for the given call expression.
 pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> {
-    let file = db.parse_or_expand(position.file_id.into())?;
+    let sema = Semantics::new(db);
+    let file = sema.parse(position.file_id);
+    let file = file.syntax();
     let token = file.token_at_offset(position.offset).next()?;
-    let token = descend_into_macros(db, position.file_id, token);
+    let token = sema.descend_into_macros(token);
+    call_info_for_token(&sema, token)
+}
 
+pub(crate) fn call_info_for_token(
+    sema: &Semantics<RootDatabase>,
+    token: SyntaxToken,
+) -> Option<CallInfo> {
     // Find the calling expression and it's NameRef
-    let calling_node = FnCallNode::with_node(&token.value.parent())?;
-    let name_ref = calling_node.name_ref()?;
-    let name_ref = token.with_value(name_ref.syntax());
+    let calling_node = FnCallNode::with_node(&token.parent())?;
 
-    let analyzer = hir::SourceAnalyzer::new(db, name_ref, None);
     let (mut call_info, has_self) = match &calling_node {
-        FnCallNode::CallExpr(expr) => {
+        FnCallNode::CallExpr(call) => {
             //FIXME: Type::as_callable is broken
-            let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
+            let callable_def = sema.type_of_expr(&call.expr()?)?.as_callable()?;
             match callable_def {
                 hir::CallableDef::FunctionId(it) => {
                     let fn_def = it.into();
-                    (CallInfo::with_fn(db, fn_def), fn_def.has_self_param(db))
+                    (CallInfo::with_fn(sema.db, fn_def), fn_def.has_self_param(sema.db))
+                }
+                hir::CallableDef::StructId(it) => {
+                    (CallInfo::with_struct(sema.db, it.into())?, false)
                 }
-                hir::CallableDef::StructId(it) => (CallInfo::with_struct(db, it.into())?, false),
                 hir::CallableDef::EnumVariantId(it) => {
-                    (CallInfo::with_enum_variant(db, it.into())?, false)
+                    (CallInfo::with_enum_variant(sema.db, it.into())?, false)
                 }
             }
         }
-        FnCallNode::MethodCallExpr(expr) => {
-            let function = analyzer.resolve_method_call(&expr)?;
-            (CallInfo::with_fn(db, function), function.has_self_param(db))
+        FnCallNode::MethodCallExpr(method_call) => {
+            let function = sema.resolve_method_call(&method_call)?;
+            (CallInfo::with_fn(sema.db, function), function.has_self_param(sema.db))
         }
-        FnCallNode::MacroCallExpr(expr) => {
-            let macro_def = analyzer.resolve_macro_call(db, name_ref.with_value(&expr))?;
-            (CallInfo::with_macro(db, macro_def)?, false)
+        FnCallNode::MacroCallExpr(macro_call) => {
+            let macro_def = sema.resolve_macro_call(&macro_call)?;
+            (CallInfo::with_macro(sema.db, macro_def)?, false)
         }
     };
 
@@ -62,7 +69,7 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
                 let num_args_at_callsite = arg_list.args().count();
 
                 let arg_list_range = arg_list.syntax().text_range();
-                if !arg_list_range.contains_inclusive(position.offset) {
+                if !arg_list_range.contains_inclusive(token.text_range().start()) {
                     tested_by!(call_info_bad_offset);
                     return None;
                 }
@@ -71,7 +78,9 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
                     num_args_at_callsite,
                     arg_list
                         .args()
-                        .take_while(|arg| arg.syntax().text_range().end() < position.offset)
+                        .take_while(|arg| {
+                            arg.syntax().text_range().end() < token.text_range().start()
+                        })
                         .count(),
                 );
 
@@ -101,7 +110,13 @@ fn with_node(syntax: &SyntaxNode) -> Option<FnCallNode> {
             match_ast! {
                 match node {
                     ast::CallExpr(it) => { Some(FnCallNode::CallExpr(it)) },
-                    ast::MethodCallExpr(it) => { Some(FnCallNode::MethodCallExpr(it)) },
+                    ast::MethodCallExpr(it) => {
+                        let arg_list = it.arg_list()?;
+                        if !syntax.text_range().is_subrange(&arg_list.syntax().text_range()) {
+                            return None;
+                        }
+                        Some(FnCallNode::MethodCallExpr(it))
+                    },
                     ast::MacroCall(it) => { Some(FnCallNode::MacroCallExpr(it)) },
                     _ => { None },
                 }
index 4bdc6ba232b84c6ce2f9d13f6569177893167650..c378c2c627f87202de69269f449195971f9e6aa3 100644 (file)
@@ -17,7 +17,6 @@
 mod complete_macro_in_item_position;
 mod complete_trait_impl;
 
-use ra_db::SourceDatabase;
 use ra_ide_db::RootDatabase;
 
 #[cfg(test)]
@@ -57,8 +56,7 @@
 /// identifier prefix/fuzzy match should be done higher in the stack, together
 /// with ordering of completions (currently this is done by the client).
 pub(crate) fn completions(db: &RootDatabase, position: FilePosition) -> Option<Completions> {
-    let original_parse = db.parse(position.file_id);
-    let ctx = CompletionContext::new(db, &original_parse, position)?;
+    let ctx = CompletionContext::new(db, position)?;
 
     let mut acc = Completions::default();
 
index 2ca78c927fc2551d48f91efdc86887275d8e145e..a6e0158b2a1d6d861b4cb1531b48a3b98d3ff34b 100644 (file)
@@ -16,7 +16,7 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
         _ => return,
     };
 
-    let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) {
+    let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
         Some(ty) => ty,
         _ => return,
     };
@@ -55,7 +55,7 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Ty
 fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) {
     if let Some(krate) = ctx.module.map(|it| it.krate()) {
         let mut seen_methods = FxHashSet::default();
-        let traits_in_scope = ctx.analyzer.traits_in_scope(ctx.db);
+        let traits_in_scope = ctx.scope().traits_in_scope();
         receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| {
             if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) {
                 acc.add_function(ctx, func);
index faadd1e3f4e4d0e52ea6811e1498cb6a64b2fe64..1866d9e6c6d9b197cc235dc330416d8f1e4eeb90 100644 (file)
@@ -5,7 +5,7 @@
 pub(super) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &CompletionContext) {
     // Show only macros in top level.
     if ctx.is_new_item {
-        ctx.analyzer.process_all_names(ctx.db, &mut |name, res| {
+        ctx.scope().process_all_names(&mut |name, res| {
             if let hir::ScopeDef::MacroDef(mac) = res {
                 acc.add_macro(ctx, Some(name.to_string()), mac);
             }
index 2d7f09a6c02be2923e3c1ed51daefe58c02dc320..c626e90cc8bf02f4f7a9e627692ca8b892b9b8b8 100644 (file)
@@ -11,7 +11,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
         Some(path) => path.clone(),
         _ => return,
     };
-    let def = match ctx.analyzer.resolve_hir_path(ctx.db, &path) {
+    let def = match ctx.scope().resolve_hir_path(&path) {
         Some(PathResolution::Def(def)) => def,
         _ => return,
     };
@@ -49,7 +49,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
             // FIXME: complete T::AssocType
             let krate = ctx.module.map(|m| m.krate());
             if let Some(krate) = krate {
-                let traits_in_scope = ctx.analyzer.traits_in_scope(ctx.db);
+                let traits_in_scope = ctx.scope().traits_in_scope();
                 ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| {
                     match item {
                         hir::AssocItem::Function(func) => {
index fd03b1c40586ad07fa4f83d00accddbbdd79b724..c2c6ca002c399679a88caf4eceef7878b3d03c7c 100644 (file)
@@ -9,7 +9,7 @@ pub(super) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) {
     }
     // FIXME: ideally, we should look at the type we are matching against and
     // suggest variants + auto-imports
-    ctx.analyzer.process_all_names(ctx.db, &mut |name, res| {
+    ctx.scope().process_all_names(&mut |name, res| {
         let def = match &res {
             hir::ScopeDef::ModuleDef(def) => def,
             _ => return,
index 5470dc291fa4c4f2a52b8335356afb10065e9dc8..8a74f993ab62b3bc0dddeefd7afaa73d800bc138 100644 (file)
@@ -29,7 +29,7 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
         dot_receiver.syntax().text().to_string()
     };
 
-    let receiver_ty = match ctx.analyzer.type_of(ctx.db, &dot_receiver) {
+    let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
         Some(it) => it,
         None => return,
     };
index 577c394d22e645de841b00904e2ca58864a8e2ea..f98353d769cbec06b03775a300af032a3c9978bc 100644 (file)
@@ -5,10 +5,7 @@
 /// Complete fields in fields literals.
 pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionContext) {
     let (ty, variant) = match ctx.record_lit_syntax.as_ref().and_then(|it| {
-        Some((
-            ctx.analyzer.type_of(ctx.db, &it.clone().into())?,
-            ctx.analyzer.resolve_record_literal(it)?,
-        ))
+        Some((ctx.sema.type_of_expr(&it.clone().into())?, ctx.sema.resolve_record_literal(it)?))
     }) {
         Some(it) => it,
         _ => return,
index a56c7e3a1fca7b2c7bc08822bf78d16aafb6c955..9bdeae49f98d6bb348f2dbd7514f4334b7e58e0a 100644 (file)
@@ -4,10 +4,7 @@
 
 pub(super) fn complete_record_pattern(acc: &mut Completions, ctx: &CompletionContext) {
     let (ty, variant) = match ctx.record_lit_pat.as_ref().and_then(|it| {
-        Some((
-            ctx.analyzer.type_of_pat(ctx.db, &it.clone().into())?,
-            ctx.analyzer.resolve_record_pattern(it)?,
-        ))
+        Some((ctx.sema.type_of_pat(&it.clone().into())?, ctx.sema.resolve_record_pattern(it)?))
     }) {
         Some(it) => it,
         _ => return,
index e2ee86dd15dd2c555d954834bdd2418f6881a10c..aad016d4aba7a21d5eaf70fd07c5428da261e80e 100644 (file)
@@ -7,9 +7,7 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) {
         return;
     }
 
-    ctx.analyzer.process_all_names(ctx.db, &mut |name, res| {
-        acc.add_resolution(ctx, name.to_string(), &res)
-    });
+    ctx.scope().process_all_names(&mut |name, res| acc.add_resolution(ctx, name.to_string(), &res));
 }
 
 #[cfg(test)]
index 83628e35c2b8f1df9726f4a6b99dd43738266c0b..9a27c164b705616e66c2792d45d1e2b4397baca1 100644 (file)
@@ -64,11 +64,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
     if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) {
         match trigger.kind() {
             SyntaxKind::FN_DEF => {
-                for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block)
-                    .iter()
-                    .filter_map(|item| match item {
-                        hir::AssocItem::Function(fn_item) => Some(fn_item),
-                        _ => None,
+                for missing_fn in
+                    get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| {
+                        match item {
+                            hir::AssocItem::Function(fn_item) => Some(fn_item),
+                            _ => None,
+                        }
                     })
                 {
                     add_function_impl(&trigger, acc, ctx, &missing_fn);
@@ -76,11 +77,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
             }
 
             SyntaxKind::TYPE_ALIAS_DEF => {
-                for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block)
-                    .iter()
-                    .filter_map(|item| match item {
-                        hir::AssocItem::TypeAlias(type_item) => Some(type_item),
-                        _ => None,
+                for missing_fn in
+                    get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| {
+                        match item {
+                            hir::AssocItem::TypeAlias(type_item) => Some(type_item),
+                            _ => None,
+                        }
                     })
                 {
                     add_type_alias_impl(&trigger, acc, ctx, &missing_fn);
@@ -88,11 +90,12 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
             }
 
             SyntaxKind::CONST_DEF => {
-                for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block)
-                    .iter()
-                    .filter_map(|item| match item {
-                        hir::AssocItem::Const(const_item) => Some(const_item),
-                        _ => None,
+                for missing_fn in
+                    get_missing_impl_items(&ctx.sema, &impl_block).iter().filter_map(|item| {
+                        match item {
+                            hir::AssocItem::Const(const_item) => Some(const_item),
+                            _ => None,
+                        }
                     })
                 {
                     add_const_impl(&trigger, acc, ctx, &missing_fn);
index 8678a3234f14f489e3eda4e6ce3019897f6d3f79..81321a897f7c929ff111d91742f63c12843896fd 100644 (file)
@@ -1,9 +1,11 @@
 //! FIXME: write short doc here
 
+use hir::{Semantics, SemanticsScope};
+use ra_db::SourceDatabase;
 use ra_ide_db::RootDatabase;
 use ra_syntax::{
     algo::{find_covering_element, find_node_at_offset},
-    ast, AstNode, Parse, SourceFile,
+    ast, AstNode, SourceFile,
     SyntaxKind::*,
     SyntaxNode, SyntaxToken, TextRange, TextUnit,
 };
@@ -15,8 +17,8 @@
 /// exactly is the cursor, syntax-wise.
 #[derive(Debug)]
 pub(crate) struct CompletionContext<'a> {
+    pub(super) sema: Semantics<'a, RootDatabase>,
     pub(super) db: &'a RootDatabase,
-    pub(super) analyzer: hir::SourceAnalyzer,
     pub(super) offset: TextUnit,
     pub(super) token: SyntaxToken,
     pub(super) module: Option<hir::Module>,
@@ -51,20 +53,26 @@ pub(crate) struct CompletionContext<'a> {
 impl<'a> CompletionContext<'a> {
     pub(super) fn new(
         db: &'a RootDatabase,
-        original_parse: &'a Parse<ast::SourceFile>,
         position: FilePosition,
     ) -> Option<CompletionContext<'a>> {
-        let mut sb = hir::SourceBinder::new(db);
-        let module = sb.to_module_def(position.file_id);
-        let token =
-            original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;
-        let analyzer = sb.analyze(
-            hir::InFile::new(position.file_id.into(), &token.parent()),
-            Some(position.offset),
-        );
+        let sema = Semantics::new(db);
+
+        let original_file = sema.parse(position.file_id);
+
+        // Insert a fake ident to get a valid parse tree. We will use this file
+        // to determine context, though the original_file will be used for
+        // actual completion.
+        let file_with_fake_ident = {
+            let parse = db.parse(position.file_id);
+            let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string());
+            parse.reparse(&edit).tree()
+        };
+
+        let module = sema.to_module_def(position.file_id);
+        let token = original_file.syntax().token_at_offset(position.offset).left_biased()?;
         let mut ctx = CompletionContext {
+            sema,
             db,
-            analyzer,
             token,
             offset: position.offset,
             module,
@@ -87,7 +95,7 @@ pub(super) fn new(
             has_type_args: false,
             dot_receiver_is_ambiguous_float_literal: false,
         };
-        ctx.fill(&original_parse, position.offset);
+        ctx.fill(&original_file, file_with_fake_ident, position.offset);
         Some(ctx)
     }
 
@@ -100,29 +108,33 @@ pub(crate) fn source_range(&self) -> TextRange {
         }
     }
 
-    fn fill(&mut self, original_parse: &'a Parse<ast::SourceFile>, offset: TextUnit) {
-        // Insert a fake ident to get a valid parse tree. We will use this file
-        // to determine context, though the original_file will be used for
-        // actual completion.
-        let file = {
-            let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string());
-            original_parse.reparse(&edit).tree()
-        };
+    pub(crate) fn scope(&self) -> SemanticsScope<'_, RootDatabase> {
+        self.sema.scope_at_offset(&self.token.parent(), self.offset)
+    }
 
+    fn fill(
+        &mut self,
+        original_file: &ast::SourceFile,
+        file_with_fake_ident: ast::SourceFile,
+        offset: TextUnit,
+    ) {
         // First, let's try to complete a reference to some declaration.
-        if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), offset) {
+        if let Some(name_ref) =
+            find_node_at_offset::<ast::NameRef>(file_with_fake_ident.syntax(), offset)
+        {
             // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`.
             // See RFC#1685.
             if is_node::<ast::Param>(name_ref.syntax()) {
                 self.is_param = true;
                 return;
             }
-            self.classify_name_ref(original_parse.tree(), name_ref);
+            self.classify_name_ref(original_file, name_ref);
         }
 
         // Otherwise, see if this is a declaration. We can use heuristics to
         // suggest declaration names, see `CompletionKind::Magic`.
-        if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) {
+        if let Some(name) = find_node_at_offset::<ast::Name>(file_with_fake_ident.syntax(), offset)
+        {
             if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) {
                 let parent = bind_pat.syntax().parent();
                 if parent.clone().and_then(ast::MatchArm::cast).is_some()
@@ -136,13 +148,12 @@ fn fill(&mut self, original_parse: &'a Parse<ast::SourceFile>, offset: TextUnit)
                 return;
             }
             if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() {
-                self.record_lit_pat =
-                    find_node_at_offset(original_parse.tree().syntax(), self.offset);
+                self.record_lit_pat = find_node_at_offset(original_file.syntax(), self.offset);
             }
         }
     }
 
-    fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) {
+    fn classify_name_ref(&mut self, original_file: &SourceFile, name_ref: ast::NameRef) {
         self.name_ref_syntax =
             find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start());
         let name_range = name_ref.syntax().text_range();
index 9cf86b26d8b0b738b0b168b4bb6cbe70ef4c107d..a52f7fdd9f216bd604f553fed953a5cf76fd02ba 100644 (file)
@@ -2,7 +2,10 @@
 
 use std::cell::RefCell;
 
-use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink};
+use hir::{
+    diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink},
+    Semantics,
+};
 use itertools::Itertools;
 use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt};
 use ra_ide_db::RootDatabase;
@@ -24,7 +27,7 @@ pub enum Severity {
 
 pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> {
     let _p = profile("diagnostics");
-    let mut sb = hir::SourceBinder::new(db);
+    let sema = Semantics::new(db);
     let parse = db.parse(file_id);
     let mut res = Vec::new();
 
@@ -110,7 +113,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
             fix: Some(fix),
         })
     });
-    if let Some(m) = sb.to_module_def(file_id) {
+    if let Some(m) = sema.to_module_def(file_id) {
         m.diagnostics(db, &mut sink);
     };
     drop(sink);
index c9d0058a621795540be5a7987beedc1d3e1be85f..5afb23764e917b556cf9f3637c588b869167488d 100644 (file)
@@ -1,7 +1,7 @@
 //! FIXME: write short doc here
 
 use either::Either;
-use hir::{AssocItem, FieldSource, HasSource, InFile, ModuleSource};
+use hir::{original_range, AssocItem, FieldSource, HasSource, InFile, ModuleSource};
 use ra_db::{FileId, SourceDatabase};
 use ra_ide_db::RootDatabase;
 use ra_syntax::{
     TextRange,
 };
 
-use crate::{expand::original_range, references::NameDefinition, FileSymbol};
+use crate::{
+    // expand::original_range,
+    references::NameDefinition,
+    FileSymbol,
+};
 
 use super::short_label::ShortLabel;
 
diff --git a/crates/ra_ide/src/expand.rs b/crates/ra_ide/src/expand.rs
deleted file mode 100644 (file)
index 9f3aaa3..0000000
+++ /dev/null
@@ -1,102 +0,0 @@
-//! Utilities to work with files, produced by macros.
-use std::iter::successors;
-
-use hir::{InFile, Origin};
-use ra_db::FileId;
-use ra_ide_db::RootDatabase;
-use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken, TextRange};
-
-use crate::FileRange;
-
-pub(crate) fn original_range(db: &RootDatabase, node: InFile<&SyntaxNode>) -> FileRange {
-    if let Some((range, Origin::Call)) = original_range_and_origin(db, node) {
-        return range;
-    }
-
-    if let Some(expansion) = node.file_id.expansion_info(db) {
-        if let Some(call_node) = expansion.call_node() {
-            return FileRange {
-                file_id: call_node.file_id.original_file(db),
-                range: call_node.value.text_range(),
-            };
-        }
-    }
-
-    FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
-}
-
-fn original_range_and_origin(
-    db: &RootDatabase,
-    node: InFile<&SyntaxNode>,
-) -> Option<(FileRange, Origin)> {
-    let expansion = node.file_id.expansion_info(db)?;
-
-    // the input node has only one token ?
-    let single = node.value.first_token()? == node.value.last_token()?;
-
-    // FIXME: We should handle recurside macro expansions
-    let (range, origin) = node.value.descendants().find_map(|it| {
-        let first = it.first_token()?;
-        let last = it.last_token()?;
-
-        if !single && first == last {
-            return None;
-        }
-
-        // Try to map first and last tokens of node, and, if success, return the union range of mapped tokens
-        let (first, first_origin) = expansion.map_token_up(node.with_value(&first))?;
-        let (last, last_origin) = expansion.map_token_up(node.with_value(&last))?;
-
-        if first.file_id != last.file_id || first_origin != last_origin {
-            return None;
-        }
-
-        // FIXME: Add union method in TextRange
-        Some((
-            first.with_value(union_range(first.value.text_range(), last.value.text_range())),
-            first_origin,
-        ))
-    })?;
-
-    return Some((
-        FileRange { file_id: range.file_id.original_file(db), range: range.value },
-        origin,
-    ));
-
-    fn union_range(a: TextRange, b: TextRange) -> TextRange {
-        let start = a.start().min(b.start());
-        let end = a.end().max(b.end());
-        TextRange::from_to(start, end)
-    }
-}
-
-pub(crate) fn descend_into_macros(
-    db: &RootDatabase,
-    file_id: FileId,
-    token: SyntaxToken,
-) -> InFile<SyntaxToken> {
-    let src = InFile::new(file_id.into(), token);
-
-    let source_analyzer =
-        hir::SourceAnalyzer::new(db, src.with_value(src.value.parent()).as_ref(), None);
-
-    descend_into_macros_with_analyzer(db, &source_analyzer, src)
-}
-
-pub(crate) fn descend_into_macros_with_analyzer(
-    db: &RootDatabase,
-    source_analyzer: &hir::SourceAnalyzer,
-    src: InFile<SyntaxToken>,
-) -> InFile<SyntaxToken> {
-    successors(Some(src), |token| {
-        let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
-        let tt = macro_call.token_tree()?;
-        if !token.value.text_range().is_subrange(&tt.syntax().text_range()) {
-            return None;
-        }
-        let exp = source_analyzer.expand(db, token.with_value(&macro_call))?;
-        exp.map_token_down(db, token.as_ref())
-    })
-    .last()
-    .unwrap()
-}
index af2783befb3d1e3f123fdcfa2b838f2248d8d283..5a079de2709d8b130e3f9b3ef6c11e503cc5962d 100644 (file)
@@ -1,7 +1,6 @@
 //! This modules implements "expand macro" functionality in the IDE
 
-use hir::db::AstDatabase;
-use ra_db::SourceDatabase;
+use hir::Semantics;
 use ra_ide_db::RootDatabase;
 use ra_syntax::{
     algo::{find_node_at_offset, replace_descendants},
@@ -17,13 +16,12 @@ pub struct ExpandedMacro {
 }
 
 pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
-    let parse = db.parse(position.file_id);
-    let file = parse.tree();
+    let sema = Semantics::new(db);
+    let file = sema.parse(position.file_id);
     let name_ref = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset)?;
     let mac = name_ref.syntax().ancestors().find_map(ast::MacroCall::cast)?;
 
-    let source = hir::InFile::new(position.file_id.into(), mac.syntax());
-    let expanded = expand_macro_recur(db, source, source.with_value(&mac))?;
+    let expanded = expand_macro_recur(&sema, &mac)?;
 
     // FIXME:
     // macro expansion may lose all white space information
@@ -33,21 +31,16 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
 }
 
 fn expand_macro_recur(
-    db: &RootDatabase,
-    source: hir::InFile<&SyntaxNode>,
-    macro_call: hir::InFile<&ast::MacroCall>,
+    sema: &Semantics<RootDatabase>,
+    macro_call: &ast::MacroCall,
 ) -> Option<SyntaxNode> {
-    let analyzer = hir::SourceAnalyzer::new(db, source, None);
-    let expansion = analyzer.expand(db, macro_call)?;
-    let macro_file_id = expansion.file_id();
-    let mut expanded: SyntaxNode = db.parse_or_expand(macro_file_id)?;
+    let mut expanded = sema.expand(macro_call)?;
 
     let children = expanded.descendants().filter_map(ast::MacroCall::cast);
     let mut replaces: FxHashMap<SyntaxElement, SyntaxElement> = FxHashMap::default();
 
     for child in children.into_iter() {
-        let node = hir::InFile::new(macro_file_id, &child);
-        if let Some(new_node) = expand_macro_recur(db, source, node) {
+        if let Some(new_node) = expand_macro_recur(sema, &child) {
             // Replace the whole node if it is root
             // `replace_descendants` will not replace the parent node
             // but `SyntaxNode::descendants include itself
@@ -59,7 +52,7 @@ fn expand_macro_recur(
         }
     }
 
-    Some(replace_descendants(&expanded, &|n| replaces.get(n).cloned()))
+    Some(replace_descendants(&expanded, |n| replaces.get(n).cloned()))
 }
 
 // FIXME: It would also be cool to share logic here and in the mbe tests,
@@ -120,10 +113,12 @@ fn is_text(k: SyntaxKind) -> bool {
 
 #[cfg(test)]
 mod tests {
-    use super::*;
-    use crate::mock_analysis::analysis_and_position;
     use insta::assert_snapshot;
 
+    use crate::mock_analysis::analysis_and_position;
+
+    use super::*;
+
     fn check_expand_macro(fixture: &str) -> ExpandedMacro {
         let (analysis, pos) = analysis_and_position(fixture);
         analysis.expand_macro(pos).unwrap().unwrap()
index 1e7d0621a6cb98bd2a850d5afa4d8c8f2723353d..f5a06335128a40be39da0e38b4c869f77b7822ae 100644 (file)
@@ -2,26 +2,26 @@
 
 use std::iter::successors;
 
-use hir::db::AstDatabase;
-use ra_db::SourceDatabase;
+use hir::Semantics;
 use ra_ide_db::RootDatabase;
 use ra_syntax::{
-    algo::find_covering_element,
+    algo::{self, find_covering_element, skip_trivia_token},
     ast::{self, AstNode, AstToken},
-    Direction, NodeOrToken, SyntaxElement,
+    Direction, NodeOrToken,
     SyntaxKind::{self, *},
     SyntaxNode, SyntaxToken, TextRange, TextUnit, TokenAtOffset, T,
 };
 
-use crate::{expand::descend_into_macros, FileId, FileRange};
+use crate::FileRange;
 
 pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange {
-    let src = db.parse(frange.file_id).tree();
-    try_extend_selection(db, src.syntax(), frange).unwrap_or(frange.range)
+    let sema = Semantics::new(db);
+    let src = sema.parse(frange.file_id);
+    try_extend_selection(&sema, src.syntax(), frange).unwrap_or(frange.range)
 }
 
 fn try_extend_selection(
-    db: &RootDatabase,
+    sema: &Semantics<RootDatabase>,
     root: &SyntaxNode,
     frange: FileRange,
 ) -> Option<TextRange> {
@@ -86,7 +86,7 @@ fn try_extend_selection(
     // if we are in single token_tree, we maybe live in macro or attr
     if node.kind() == TOKEN_TREE {
         if let Some(macro_call) = node.ancestors().find_map(ast::MacroCall::cast) {
-            if let Some(range) = extend_tokens_from_range(db, frange.file_id, macro_call, range) {
+            if let Some(range) = extend_tokens_from_range(sema, macro_call, range) {
                 return Some(range);
             }
         }
@@ -96,7 +96,7 @@ fn try_extend_selection(
         return Some(node.text_range());
     }
 
-    let node = shallowest_node(&node.into()).unwrap();
+    let node = shallowest_node(&node.into());
 
     if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
         if let Some(range) = extend_list_item(&node) {
@@ -108,8 +108,7 @@ fn try_extend_selection(
 }
 
 fn extend_tokens_from_range(
-    db: &RootDatabase,
-    file_id: FileId,
+    sema: &Semantics<RootDatabase>,
     macro_call: ast::MacroCall,
     original_range: TextRange,
 ) -> Option<TextRange> {
@@ -119,54 +118,50 @@ fn extend_tokens_from_range(
         NodeOrToken::Token(it) => (it.clone(), it),
     };
 
-    let mut first_token = skip_whitespace(first_token, Direction::Next)?;
-    let mut last_token = skip_whitespace(last_token, Direction::Prev)?;
+    let mut first_token = skip_trivia_token(first_token, Direction::Next)?;
+    let mut last_token = skip_trivia_token(last_token, Direction::Prev)?;
 
     while !first_token.text_range().is_subrange(&original_range) {
-        first_token = skip_whitespace(first_token.next_token()?, Direction::Next)?;
+        first_token = skip_trivia_token(first_token.next_token()?, Direction::Next)?;
     }
     while !last_token.text_range().is_subrange(&original_range) {
-        last_token = skip_whitespace(last_token.prev_token()?, Direction::Prev)?;
+        last_token = skip_trivia_token(last_token.prev_token()?, Direction::Prev)?;
     }
 
     // compute original mapped token range
-    let expanded = {
-        let first_node = descend_into_macros(db, file_id, first_token.clone());
-        let first_node = first_node.map(|it| it.text_range());
-
-        let last_node = descend_into_macros(db, file_id, last_token.clone());
-        if last_node.file_id == file_id.into() || first_node.file_id != last_node.file_id {
-            return None;
+    let extended = {
+        let fst_expanded = sema.descend_into_macros(first_token.clone());
+        let lst_expanded = sema.descend_into_macros(last_token.clone());
+        let mut lca = algo::least_common_ancestor(&fst_expanded.parent(), &lst_expanded.parent())?;
+        lca = shallowest_node(&lca);
+        if lca.first_token() == Some(fst_expanded) && lca.last_token() == Some(lst_expanded) {
+            lca = lca.parent()?;
         }
-        first_node.map(|it| union_range(it, last_node.value.text_range()))
+        lca
     };
 
     // Compute parent node range
-    let src = db.parse_or_expand(expanded.file_id)?;
-    let parent = shallowest_node(&find_covering_element(&src, expanded.value))?.parent()?;
-
     let validate = |token: &SyntaxToken| {
-        let node = descend_into_macros(db, file_id, token.clone());
-        node.file_id == expanded.file_id
-            && node.value.text_range().is_subrange(&parent.text_range())
+        let expanded = sema.descend_into_macros(token.clone());
+        algo::least_common_ancestor(&extended, &expanded.parent()).as_ref() == Some(&extended)
     };
 
     // Find the first and last text range under expanded parent
     let first = successors(Some(first_token), |token| {
         let token = token.prev_token()?;
-        skip_whitespace(token, Direction::Prev)
+        skip_trivia_token(token, Direction::Prev)
     })
     .take_while(validate)
     .last()?;
 
     let last = successors(Some(last_token), |token| {
         let token = token.next_token()?;
-        skip_whitespace(token, Direction::Next)
+        skip_trivia_token(token, Direction::Next)
     })
     .take_while(validate)
     .last()?;
 
-    let range = union_range(first.text_range(), last.text_range());
+    let range = first.text_range().extend_to(&last.text_range());
     if original_range.is_subrange(&range) && original_range != range {
         Some(range)
     } else {
@@ -174,25 +169,9 @@ fn extend_tokens_from_range(
     }
 }
 
-fn skip_whitespace(mut token: SyntaxToken, direction: Direction) -> Option<SyntaxToken> {
-    while token.kind() == WHITESPACE {
-        token = match direction {
-            Direction::Next => token.next_token()?,
-            Direction::Prev => token.prev_token()?,
-        }
-    }
-    Some(token)
-}
-
-fn union_range(range: TextRange, r: TextRange) -> TextRange {
-    let start = range.start().min(r.start());
-    let end = range.end().max(r.end());
-    TextRange::from_to(start, end)
-}
-
 /// Find the shallowest node with same range, which allows us to traverse siblings.
-fn shallowest_node(node: &SyntaxElement) -> Option<SyntaxNode> {
-    node.ancestors().take_while(|n| n.text_range() == node.text_range()).last()
+fn shallowest_node(node: &SyntaxNode) -> SyntaxNode {
+    node.ancestors().take_while(|n| n.text_range() == node.text_range()).last().unwrap()
 }
 
 fn extend_single_word_in_comment_or_string(
index feff1ec3f694c4ef5294a8e941c3d19db582e0de..621ab982c350f3f3104ea3dc827c9338330b08d5 100644 (file)
@@ -1,7 +1,7 @@
 //! FIXME: write short doc here
 
-use hir::{db::AstDatabase, InFile, SourceBinder};
-use ra_ide_db::{symbol_index, RootDatabase};
+use hir::Semantics;
+use ra_ide_db::{defs::classify_name, symbol_index, RootDatabase};
 use ra_syntax::{
     ast::{self},
     match_ast, AstNode,
@@ -11,8 +11,7 @@
 
 use crate::{
     display::{ToNav, TryToNav},
-    expand::descend_into_macros,
-    references::{classify_name, classify_name_ref},
+    references::classify_name_ref,
     FilePosition, NavigationTarget, RangeInfo,
 };
 
@@ -20,18 +19,20 @@ pub(crate) fn goto_definition(
     db: &RootDatabase,
     position: FilePosition,
 ) -> Option<RangeInfo<Vec<NavigationTarget>>> {
-    let file = db.parse_or_expand(position.file_id.into())?;
+    let sema = Semantics::new(db);
+    let file = sema.parse(position.file_id).syntax().clone();
     let original_token = pick_best(file.token_at_offset(position.offset))?;
-    let token = descend_into_macros(db, position.file_id, original_token.clone());
+    let token = sema.descend_into_macros(original_token.clone());
 
-    let mut sb = SourceBinder::new(db);
     let nav_targets = match_ast! {
-        match (token.value.parent()) {
+        match (token.parent()) {
             ast::NameRef(name_ref) => {
-                reference_definition(&mut sb, token.with_value(&name_ref)).to_vec()
+                reference_definition(&sema, &name_ref).to_vec()
             },
             ast::Name(name) => {
-                name_definition(&mut sb, token.with_value(&name))?
+                let def = classify_name(&sema, &name)?.definition();
+                let nav = def.try_to_nav(sema.db)?;
+                vec![nav]
             },
             _ => return None,
         }
@@ -68,36 +69,27 @@ fn to_vec(self) -> Vec<NavigationTarget> {
 }
 
 pub(crate) fn reference_definition(
-    sb: &mut SourceBinder<RootDatabase>,
-    name_ref: InFile<&ast::NameRef>,
+    sema: &Semantics<RootDatabase>,
+    name_ref: &ast::NameRef,
 ) -> ReferenceResult {
     use self::ReferenceResult::*;
 
-    let name_kind = classify_name_ref(sb, name_ref);
+    let name_kind = classify_name_ref(sema, name_ref);
     if let Some(def) = name_kind {
-        return match def.try_to_nav(sb.db) {
+        return match def.try_to_nav(sema.db) {
             Some(nav) => ReferenceResult::Exact(nav),
             None => ReferenceResult::Approximate(Vec::new()),
         };
     }
 
     // Fallback index based approach:
-    let navs = symbol_index::index_resolve(sb.db, name_ref.value)
+    let navs = symbol_index::index_resolve(sema.db, name_ref)
         .into_iter()
-        .map(|s| s.to_nav(sb.db))
+        .map(|s| s.to_nav(sema.db))
         .collect();
     Approximate(navs)
 }
 
-fn name_definition(
-    sb: &mut SourceBinder<RootDatabase>,
-    name: InFile<&ast::Name>,
-) -> Option<Vec<NavigationTarget>> {
-    let def = classify_name(sb, name)?;
-    let nav = def.try_to_nav(sb.db)?;
-    Some(vec![nav])
-}
-
 #[cfg(test)]
 mod tests {
     use test_utils::{assert_eq_text, covers};
index 69940fc36a35847e438b784f6df8a2bc2f566c84..869a4708b7d6504c13fb5fbc07bf638fb5debd76 100644 (file)
@@ -1,31 +1,31 @@
 //! FIXME: write short doc here
 
-use hir::db::AstDatabase;
 use ra_ide_db::RootDatabase;
-use ra_syntax::{ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset};
+use ra_syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset};
 
-use crate::{
-    display::ToNav, expand::descend_into_macros, FilePosition, NavigationTarget, RangeInfo,
-};
+use crate::{display::ToNav, FilePosition, NavigationTarget, RangeInfo};
 
 pub(crate) fn goto_type_definition(
     db: &RootDatabase,
     position: FilePosition,
 ) -> Option<RangeInfo<Vec<NavigationTarget>>> {
-    let file = db.parse_or_expand(position.file_id.into())?;
-    let token = pick_best(file.token_at_offset(position.offset))?;
-    let token = descend_into_macros(db, position.file_id, token);
-
-    let node = token
-        .value
-        .ancestors()
-        .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?;
-
-    let analyzer = hir::SourceAnalyzer::new(db, token.with_value(&node), None);
+    let sema = hir::Semantics::new(db);
+
+    let file: ast::SourceFile = sema.parse(position.file_id);
+    let token: SyntaxToken = pick_best(file.syntax().token_at_offset(position.offset))?;
+    let token: SyntaxToken = sema.descend_into_macros(token);
+
+    let (ty, node) = sema.ancestors_with_macros(token.parent()).find_map(|node| {
+        let ty = match_ast! {
+            match node {
+                ast::Expr(expr) => { sema.type_of_expr(&expr)? },
+                ast::Pat(pat) => { sema.type_of_pat(&pat)? },
+                _ => { return None },
+            }
+        };
 
-    let ty: hir::Type = ast::Expr::cast(node.clone())
-        .and_then(|e| analyzer.type_of(db, &e))
-        .or_else(|| ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p)))?;
+        Some((ty, node))
+    })?;
 
     let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt())?;
 
index 1c6ca36df1d3785823528019de4bc30f7269d4bc..5073bb1cf8964b410748893db5440065b0cbee3b 100644 (file)
@@ -1,10 +1,11 @@
 //! FIXME: write short doc here
 
-use hir::{db::AstDatabase, Adt, HasSource, HirDisplay, SourceBinder};
-use ra_db::SourceDatabase;
-use ra_ide_db::{defs::NameDefinition, RootDatabase};
+use hir::{Adt, HasSource, HirDisplay, Semantics};
+use ra_ide_db::{
+    defs::{classify_name, NameDefinition},
+    RootDatabase,
+};
 use ra_syntax::{
-    algo::find_covering_element,
     ast::{self, DocCommentsOwner},
     match_ast, AstNode,
     SyntaxKind::*,
@@ -13,9 +14,8 @@
 
 use crate::{
     display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel},
-    expand::{descend_into_macros, original_range},
-    references::{classify_name, classify_name_ref},
-    FilePosition, FileRange, RangeInfo,
+    references::classify_name_ref,
+    FilePosition, RangeInfo,
 };
 
 /// Contains the results when hovering over an item
@@ -143,25 +143,25 @@ fn from_def_source<A, D>(db: &RootDatabase, def: D) -> Option<String>
 }
 
 pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> {
-    let file = db.parse_or_expand(position.file_id.into())?;
+    let sema = Semantics::new(db);
+    let file = sema.parse(position.file_id).syntax().clone();
     let token = pick_best(file.token_at_offset(position.offset))?;
-    let token = descend_into_macros(db, position.file_id, token);
+    let token = sema.descend_into_macros(token);
 
     let mut res = HoverResult::new();
 
-    let mut sb = SourceBinder::new(db);
     if let Some((node, name_kind)) = match_ast! {
-        match (token.value.parent()) {
+        match (token.parent()) {
             ast::NameRef(name_ref) => {
-                classify_name_ref(&mut sb, token.with_value(&name_ref)).map(|d| (name_ref.syntax().clone(), d))
+                classify_name_ref(&sema, &name_ref).map(|d| (name_ref.syntax().clone(), d))
             },
             ast::Name(name) => {
-                classify_name(&mut sb, token.with_value(&name)).map(|d| (name.syntax().clone(), d))
+                classify_name(&sema, &name).map(|d| (name.syntax().clone(), d.definition()))
             },
             _ => None,
         }
     } {
-        let range = original_range(db, token.with_value(&node)).range;
+        let range = sema.original_range(&node).range;
         res.extend(hover_text_from_name_kind(db, name_kind));
 
         if !res.is_empty() {
@@ -170,17 +170,28 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
     }
 
     let node = token
-        .value
         .ancestors()
         .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?;
 
-    let frange = original_range(db, token.with_value(&node));
-    res.extend(type_of(db, frange).map(rust_code_markup));
-    if res.is_empty() {
-        return None;
-    }
-    let range = node.text_range();
+    let ty = match_ast! {
+        match node {
+            ast::MacroCall(_it) => {
+                // If this node is a MACRO_CALL, it means that `descend_into_macros` failed to resolve.
+                // (e.g expanding a builtin macro). So we give up here.
+                return None;
+            },
+            ast::Expr(it) => {
+                sema.type_of_expr(&it)
+            },
+            ast::Pat(it) => {
+                sema.type_of_pat(&it)
+            },
+            _ => None,
+        }
+    }?;
 
+    res.extend(Some(rust_code_markup(ty.display_truncated(db, None).to_string())));
+    let range = sema.original_range(&node).range;
     Some(RangeInfo::new(range, res))
 }
 
@@ -196,35 +207,13 @@ fn priority(n: &SyntaxToken) -> usize {
     }
 }
 
-pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
-    let parse = db.parse(frange.file_id);
-    let leaf_node = find_covering_element(parse.tree().syntax(), frange.range);
-    // if we picked identifier, expand to pattern/expression
-    let node = leaf_node
-        .ancestors()
-        .take_while(|it| it.text_range() == leaf_node.text_range())
-        .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?;
-    let analyzer =
-        hir::SourceAnalyzer::new(db, hir::InFile::new(frange.file_id.into(), &node), None);
-    let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
-    {
-        ty
-    } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, &p)) {
-        ty
-    } else {
-        return None;
-    };
-    Some(ty.display_truncated(db, None).to_string())
-}
-
 #[cfg(test)]
 mod tests {
-    use crate::mock_analysis::{
-        analysis_and_position, single_file_with_position, single_file_with_range,
-    };
     use ra_db::FileLoader;
     use ra_syntax::TextRange;
 
+    use crate::mock_analysis::{analysis_and_position, single_file_with_position};
+
     fn trim_markup(s: &str) -> &str {
         s.trim_start_matches("```rust\n").trim_end_matches("\n```")
     }
@@ -251,6 +240,11 @@ fn check_hover_result(fixture: &str, expected: &[&str]) -> String {
         content[hover.range].to_string()
     }
 
+    fn check_hover_no_result(fixture: &str) {
+        let (analysis, position) = analysis_and_position(fixture);
+        assert!(analysis.hover(position).unwrap().is_none());
+    }
+
     #[test]
     fn hover_shows_type_of_an_expression() {
         let (analysis, position) = single_file_with_position(
@@ -510,37 +504,6 @@ fn hover_for_param_edge() {
         assert_eq!(trim_markup_opt(hover.info.first()), Some("i32"));
     }
 
-    #[test]
-    fn test_type_of_for_function() {
-        let (analysis, range) = single_file_with_range(
-            "
-            pub fn foo() -> u32 { 1 };
-
-            fn main() {
-                let foo_test = <|>foo()<|>;
-            }
-            ",
-        );
-
-        let type_name = analysis.type_of(range).unwrap().unwrap();
-        assert_eq!("u32", &type_name);
-    }
-
-    #[test]
-    fn test_type_of_for_expr() {
-        let (analysis, range) = single_file_with_range(
-            "
-            fn main() {
-                let foo: usize = 1;
-                let bar = <|>1 + foo<|>;
-            }
-            ",
-        );
-
-        let type_name = analysis.type_of(range).unwrap().unwrap();
-        assert_eq!("usize", &type_name);
-    }
-
     #[test]
     fn test_hover_infer_associated_method_result() {
         let (analysis, position) = single_file_with_position(
@@ -754,6 +717,89 @@ fn foo(bar:u32) {
         assert_eq!(hover_on, "bar")
     }
 
+    #[test]
+    fn test_hover_through_expr_in_macro_recursive() {
+        let hover_on = check_hover_result(
+            "
+            //- /lib.rs
+            macro_rules! id_deep {
+                ($($tt:tt)*) => { $($tt)* }
+            }
+            macro_rules! id {
+                ($($tt:tt)*) => { id_deep!($($tt)*) }
+            }
+            fn foo(bar:u32) {
+                let a = id!(ba<|>r);
+            }
+            ",
+            &["u32"],
+        );
+
+        assert_eq!(hover_on, "bar")
+    }
+
+    #[test]
+    fn test_hover_through_func_in_macro_recursive() {
+        let hover_on = check_hover_result(
+            "
+            //- /lib.rs
+            macro_rules! id_deep {
+                ($($tt:tt)*) => { $($tt)* }
+            }
+            macro_rules! id {
+                ($($tt:tt)*) => { id_deep!($($tt)*) }
+            }
+            fn bar() -> u32 {
+                0
+            }
+            fn foo() {
+                let a = id!([0u32, bar(<|>)] );
+            }
+            ",
+            &["u32"],
+        );
+
+        assert_eq!(hover_on, "bar()")
+    }
+
+    #[test]
+    fn test_hover_through_literal_string_in_macro() {
+        let hover_on = check_hover_result(
+            r#"
+            //- /lib.rs
+            macro_rules! arr {
+                ($($tt:tt)*) => { [$($tt)*)] }
+            }
+            fn foo() {
+                let mastered_for_itunes = "";
+                let _ = arr!("Tr<|>acks", &mastered_for_itunes);
+            }
+            "#,
+            &["&str"],
+        );
+
+        assert_eq!(hover_on, "\"Tracks\"");
+    }
+
+    #[test]
+    fn test_hover_through_literal_string_in_builtin_macro() {
+        check_hover_no_result(
+            r#"
+            //- /lib.rs
+            #[rustc_builtin_macro]
+            macro_rules! assert {
+                ($cond:expr) => {{ /* compiler built-in */ }};
+                ($cond:expr,) => {{ /* compiler built-in */ }};
+                ($cond:expr, $($arg:tt)+) => {{ /* compiler built-in */ }};
+            }
+
+            fn foo() {
+                assert!("hel<|>lo");
+            }
+            "#,
+        );
+    }
+
     #[test]
     fn test_hover_non_ascii_space_doc() {
         check_hover_result(
index 64a2dadc85eb603757a1cf4fe65efd1d598ccd88..bf82b2a168598c17c0b3bd90ea116a0ca7e473a3 100644 (file)
@@ -1,7 +1,6 @@
 //! FIXME: write short doc here
 
-use hir::{Crate, ImplBlock, SourceBinder};
-use ra_db::SourceDatabase;
+use hir::{Crate, ImplBlock, Semantics};
 use ra_ide_db::RootDatabase;
 use ra_syntax::{algo::find_node_at_offset, ast, AstNode};
 
@@ -11,21 +10,21 @@ pub(crate) fn goto_implementation(
     db: &RootDatabase,
     position: FilePosition,
 ) -> Option<RangeInfo<Vec<NavigationTarget>>> {
-    let parse = db.parse(position.file_id);
-    let syntax = parse.tree().syntax().clone();
-    let mut sb = SourceBinder::new(db);
+    let sema = Semantics::new(db);
+    let source_file = sema.parse(position.file_id);
+    let syntax = source_file.syntax().clone();
 
-    let krate = sb.to_module_def(position.file_id)?.krate();
+    let krate = sema.to_module_def(position.file_id)?.krate();
 
     if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) {
         return Some(RangeInfo::new(
             nominal_def.syntax().text_range(),
-            impls_for_def(&mut sb, position, &nominal_def, krate)?,
+            impls_for_def(&sema, &nominal_def, krate)?,
         ));
     } else if let Some(trait_def) = find_node_at_offset::<ast::TraitDef>(&syntax, position.offset) {
         return Some(RangeInfo::new(
             trait_def.syntax().text_range(),
-            impls_for_trait(&mut sb, position, &trait_def, krate)?,
+            impls_for_trait(&sema, &trait_def, krate)?,
         ));
     }
 
@@ -33,49 +32,37 @@ pub(crate) fn goto_implementation(
 }
 
 fn impls_for_def(
-    sb: &mut SourceBinder<RootDatabase>,
-    position: FilePosition,
+    sema: &Semantics<RootDatabase>,
     node: &ast::NominalDef,
     krate: Crate,
 ) -> Option<Vec<NavigationTarget>> {
     let ty = match node {
-        ast::NominalDef::StructDef(def) => {
-            let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() };
-            sb.to_def(src)?.ty(sb.db)
-        }
-        ast::NominalDef::EnumDef(def) => {
-            let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() };
-            sb.to_def(src)?.ty(sb.db)
-        }
-        ast::NominalDef::UnionDef(def) => {
-            let src = hir::InFile { file_id: position.file_id.into(), value: def.clone() };
-            sb.to_def(src)?.ty(sb.db)
-        }
+        ast::NominalDef::StructDef(def) => sema.to_def(def)?.ty(sema.db),
+        ast::NominalDef::EnumDef(def) => sema.to_def(def)?.ty(sema.db),
+        ast::NominalDef::UnionDef(def) => sema.to_def(def)?.ty(sema.db),
     };
 
-    let impls = ImplBlock::all_in_crate(sb.db, krate);
+    let impls = ImplBlock::all_in_crate(sema.db, krate);
 
     Some(
         impls
             .into_iter()
-            .filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(sb.db)))
-            .map(|imp| imp.to_nav(sb.db))
+            .filter(|impl_block| ty.is_equal_for_find_impls(&impl_block.target_ty(sema.db)))
+            .map(|imp| imp.to_nav(sema.db))
             .collect(),
     )
 }
 
 fn impls_for_trait(
-    sb: &mut SourceBinder<RootDatabase>,
-    position: FilePosition,
+    sema: &Semantics<RootDatabase>,
     node: &ast::TraitDef,
     krate: Crate,
 ) -> Option<Vec<NavigationTarget>> {
-    let src = hir::InFile { file_id: position.file_id.into(), value: node.clone() };
-    let tr = sb.to_def(src)?;
+    let tr = sema.to_def(node)?;
 
-    let impls = ImplBlock::for_trait(sb.db, krate, tr);
+    let impls = ImplBlock::for_trait(sema.db, krate, tr);
 
-    Some(impls.into_iter().map(|imp| imp.to_nav(sb.db)).collect())
+    Some(impls.into_iter().map(|imp| imp.to_nav(sema.db)).collect())
 }
 
 #[cfg(test)]
index b42aa15235a98371c8fe3c8db05d4bca4e783e7c..35e3f782d1801068790c167508e584e85c6ec4fc 100644 (file)
@@ -1,12 +1,11 @@
 //! FIXME: write short doc here
 
-use hir::{Adt, HirDisplay, SourceAnalyzer, SourceBinder, Type};
-use once_cell::unsync::Lazy;
+use hir::{Adt, HirDisplay, Semantics, Type};
 use ra_ide_db::RootDatabase;
 use ra_prof::profile;
 use ra_syntax::{
     ast::{self, ArgListOwner, AstNode, TypeAscriptionOwner},
-    match_ast, SmolStr, SourceFile, SyntaxNode, TextRange,
+    match_ast, SmolStr, SyntaxNode, TextRange,
 };
 
 use crate::{FileId, FunctionSignature};
@@ -27,38 +26,36 @@ pub struct InlayHint {
 pub(crate) fn inlay_hints(
     db: &RootDatabase,
     file_id: FileId,
-    file: &SourceFile,
     max_inlay_hint_length: Option<usize>,
 ) -> Vec<InlayHint> {
-    let mut sb = SourceBinder::new(db);
+    let sema = Semantics::new(db);
+    let file = sema.parse(file_id);
     let mut res = Vec::new();
     for node in file.syntax().descendants() {
-        get_inlay_hints(&mut res, &mut sb, file_id, &node, max_inlay_hint_length);
+        get_inlay_hints(&mut res, &sema, &node, max_inlay_hint_length);
     }
     res
 }
 
 fn get_inlay_hints(
     acc: &mut Vec<InlayHint>,
-    sb: &mut SourceBinder<RootDatabase>,
-    file_id: FileId,
+    sema: &Semantics<RootDatabase>,
     node: &SyntaxNode,
     max_inlay_hint_length: Option<usize>,
 ) -> Option<()> {
     let _p = profile("get_inlay_hints");
-    let db = sb.db;
-    let analyzer = Lazy::new(move || sb.analyze(hir::InFile::new(file_id.into(), node), None));
+    let db = sema.db;
     match_ast! {
         match node {
             ast::CallExpr(it) => {
-                get_param_name_hints(acc, db, &analyzer, ast::Expr::from(it));
+                get_param_name_hints(acc, sema, ast::Expr::from(it));
             },
             ast::MethodCallExpr(it) => {
-                get_param_name_hints(acc, db, &analyzer, ast::Expr::from(it));
+                get_param_name_hints(acc, sema, ast::Expr::from(it));
             },
             ast::BindPat(it) => {
                 let pat = ast::Pat::from(it.clone());
-                let ty = analyzer.type_of_pat(db, &pat)?;
+                let ty = sema.type_of_pat(&pat)?;
 
                 if should_not_display_type_hint(db, &it, &ty) {
                     return None;
@@ -125,8 +122,7 @@ fn should_not_display_type_hint(db: &RootDatabase, bind_pat: &ast::BindPat, pat_
 
 fn get_param_name_hints(
     acc: &mut Vec<InlayHint>,
-    db: &RootDatabase,
-    analyzer: &SourceAnalyzer,
+    sema: &Semantics<RootDatabase>,
     expr: ast::Expr,
 ) -> Option<()> {
     let args = match &expr {
@@ -138,7 +134,7 @@ fn get_param_name_hints(
     // we need args len to determine whether to skip or not the &self parameter
     .collect::<Vec<_>>();
 
-    let fn_signature = get_fn_signature(db, analyzer, &expr)?;
+    let fn_signature = get_fn_signature(sema, &expr)?;
     let n_params_to_skip =
         if fn_signature.has_self_param && fn_signature.parameter_names.len() > args.len() {
             1
@@ -184,28 +180,26 @@ fn should_show_param_hint(
     true
 }
 
-fn get_fn_signature(
-    db: &RootDatabase,
-    analyzer: &SourceAnalyzer,
-    expr: &ast::Expr,
-) -> Option<FunctionSignature> {
+fn get_fn_signature(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<FunctionSignature> {
     match expr {
         ast::Expr::CallExpr(expr) => {
             // FIXME: Type::as_callable is broken for closures
-            let callable_def = analyzer.type_of(db, &expr.expr()?)?.as_callable()?;
+            let callable_def = sema.type_of_expr(&expr.expr()?)?.as_callable()?;
             match callable_def {
                 hir::CallableDef::FunctionId(it) => {
-                    Some(FunctionSignature::from_hir(db, it.into()))
+                    Some(FunctionSignature::from_hir(sema.db, it.into()))
+                }
+                hir::CallableDef::StructId(it) => {
+                    FunctionSignature::from_struct(sema.db, it.into())
                 }
-                hir::CallableDef::StructId(it) => FunctionSignature::from_struct(db, it.into()),
                 hir::CallableDef::EnumVariantId(it) => {
-                    FunctionSignature::from_enum_variant(db, it.into())
+                    FunctionSignature::from_enum_variant(sema.db, it.into())
                 }
             }
         }
         ast::Expr::MethodCallExpr(expr) => {
-            let fn_def = analyzer.resolve_method_call(&expr)?;
-            Some(FunctionSignature::from_hir(db, fn_def))
+            let fn_def = sema.resolve_method_call(&expr)?;
+            Some(FunctionSignature::from_hir(sema.db, fn_def))
         }
         _ => None,
     }
index 82e10bc7e6ad199b1a21f83791fe5a5a4cebc660..f61028f78c0b18377793e18cdc6082725b5e2c4b 100644 (file)
@@ -35,7 +35,6 @@
 mod matching_brace;
 mod display;
 mod inlay_hints;
-mod expand;
 mod expand_macro;
 mod ssr;
 
@@ -75,7 +74,9 @@
     runnables::{Runnable, RunnableKind, TestId},
     source_change::{FileSystemEdit, SourceChange, SourceFileEdit},
     ssr::SsrError,
-    syntax_highlighting::{tags, HighlightedRange},
+    syntax_highlighting::{
+        Highlight, HighlightModifier, HighlightModifiers, HighlightTag, HighlightedRange,
+    },
 };
 
 pub use hir::Documentation;
@@ -319,9 +320,7 @@ pub fn inlay_hints(
         file_id: FileId,
         max_inlay_hint_length: Option<usize>,
     ) -> Cancelable<Vec<InlayHint>> {
-        self.with_db(|db| {
-            inlay_hints::inlay_hints(db, file_id, &db.parse(file_id).tree(), max_inlay_hint_length)
-        })
+        self.with_db(|db| inlay_hints::inlay_hints(db, file_id, max_inlay_hint_length))
     }
 
     /// Returns the set of folding ranges.
@@ -425,9 +424,14 @@ pub fn runnables(&self, file_id: FileId) -> Cancelable<Vec<Runnable>> {
         self.with_db(|db| runnables::runnables(db, file_id))
     }
 
-    /// Computes syntax highlighting for the given file.
+    /// Computes syntax highlighting for the given file
     pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> {
-        self.with_db(|db| syntax_highlighting::highlight(db, file_id))
+        self.with_db(|db| syntax_highlighting::highlight(db, file_id, None))
+    }
+
+    /// Computes syntax highlighting for the given file range.
+    pub fn highlight_range(&self, frange: FileRange) -> Cancelable<Vec<HighlightedRange>> {
+        self.with_db(|db| syntax_highlighting::highlight(db, frange.file_id, Some(frange.range)))
     }
 
     /// Computes syntax highlighting for the given file.
@@ -451,11 +455,6 @@ pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> {
         self.with_db(|db| diagnostics::diagnostics(db, file_id))
     }
 
-    /// Computes the type of the expression at the given position.
-    pub fn type_of(&self, frange: FileRange) -> Cancelable<Option<String>> {
-        self.with_db(|db| hover::type_of(db, frange))
-    }
-
     /// Returns the edit required to rename reference at the position to the new
     /// name.
     pub fn rename(
index bcb67e3737c9bb68e5f0267af7f993ce4dbb3fd5..7b8b727b4aec8da59a646aa1fddacd42f85cf88e 100644 (file)
@@ -11,4 +11,5 @@
     call_info_bad_offset
     dont_complete_current_use
     test_resolve_parent_module_on_module_decl
+    search_filters_by_range
 );
index 081aaee8c92270c732483b407b9ffbe0636615f8..f4cd6deb7747e2d05328565fa3f287a9b1918004 100644 (file)
@@ -124,28 +124,28 @@ pub fn analysis(self) -> Analysis {
 }
 
 /// Creates analysis from a multi-file fixture, returns positions marked with <|>.
-pub fn analysis_and_position(fixture: &str) -> (Analysis, FilePosition) {
-    let (mock, position) = MockAnalysis::with_files_and_position(fixture);
+pub fn analysis_and_position(ra_fixture: &str) -> (Analysis, FilePosition) {
+    let (mock, position) = MockAnalysis::with_files_and_position(ra_fixture);
     (mock.analysis(), position)
 }
 
 /// Creates analysis for a single file.
-pub fn single_file(code: &str) -> (Analysis, FileId) {
+pub fn single_file(ra_fixture: &str) -> (Analysis, FileId) {
     let mut mock = MockAnalysis::new();
-    let file_id = mock.add_file("/main.rs", code);
+    let file_id = mock.add_file("/main.rs", ra_fixture);
     (mock.analysis(), file_id)
 }
 
 /// Creates analysis for a single file, returns position marked with <|>.
-pub fn single_file_with_position(code: &str) -> (Analysis, FilePosition) {
+pub fn single_file_with_position(ra_fixture: &str) -> (Analysis, FilePosition) {
     let mut mock = MockAnalysis::new();
-    let pos = mock.add_file_with_position("/main.rs", code);
+    let pos = mock.add_file_with_position("/main.rs", ra_fixture);
     (mock.analysis(), pos)
 }
 
 /// Creates analysis for a single file, returns range marked with a pair of <|>.
-pub fn single_file_with_range(code: &str) -> (Analysis, FileRange) {
+pub fn single_file_with_range(ra_fixture: &str) -> (Analysis, FileRange) {
     let mut mock = MockAnalysis::new();
-    let pos = mock.add_file_with_range("/main.rs", code);
+    let pos = mock.add_file_with_range("/main.rs", ra_fixture);
     (mock.analysis(), pos)
 }
index af14d6ab3a620f8771bb537b7e5f651dd2e908a6..2c4bdb039361594b9e28b5c1174596381a8ba8b4 100644 (file)
@@ -1,6 +1,7 @@
 //! FIXME: write short doc here
 
-use ra_db::{CrateId, FileId, FilePosition, SourceDatabase};
+use hir::Semantics;
+use ra_db::{CrateId, FileId, FilePosition};
 use ra_ide_db::RootDatabase;
 use ra_syntax::{
     algo::find_node_at_offset,
 /// This returns `Vec` because a module may be included from several places. We
 /// don't handle this case yet though, so the Vec has length at most one.
 pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> {
-    let mut sb = hir::SourceBinder::new(db);
-    let parse = db.parse(position.file_id);
+    let sema = Semantics::new(db);
+    let source_file = sema.parse(position.file_id);
 
-    let mut module = find_node_at_offset::<ast::Module>(parse.tree().syntax(), position.offset);
+    let mut module = find_node_at_offset::<ast::Module>(source_file.syntax(), position.offset);
 
     // If cursor is literally on `mod foo`, go to the grandpa.
     if let Some(m) = &module {
@@ -30,8 +31,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
     }
 
     let module = match module {
-        Some(module) => sb.to_def(hir::InFile::new(position.file_id.into(), module)),
-        None => sb.to_module_def(position.file_id),
+        Some(module) => sema.to_def(&module),
+        None => sema.to_module_def(position.file_id),
     };
     let module = match module {
         None => return Vec::new(),
@@ -43,8 +44,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
 
 /// Returns `Vec` for the same reason as `parent_module`
 pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
-    let mut sb = hir::SourceBinder::new(db);
-    let module = match sb.to_module_def(file_id) {
+    let sema = Semantics::new(db);
+    let module = match sema.to_module_def(file_id) {
         Some(it) => it,
         None => return Vec::new(),
     };
index aadc2dbcb0e2c28f1dd7f8c85944da07cfcab95d..f763013aecc8c11d1d9a6c384766daf1a5aaf835 100644 (file)
 mod rename;
 mod search_scope;
 
-use crate::expand::descend_into_macros_with_analyzer;
-use hir::{InFile, SourceBinder};
+use hir::Semantics;
 use once_cell::unsync::Lazy;
-use ra_db::{SourceDatabase, SourceDatabaseExt};
+use ra_db::SourceDatabaseExt;
 use ra_ide_db::RootDatabase;
 use ra_prof::profile;
 use ra_syntax::{
     algo::find_node_at_offset,
     ast::{self, NameOwner},
-    match_ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset,
+    match_ast, AstNode, SyntaxKind, SyntaxNode, TextRange, TextUnit, TokenAtOffset,
 };
+use test_utils::tested_by;
 
 use crate::{display::TryToNav, FilePosition, FileRange, NavigationTarget, RangeInfo};
 
-pub(crate) use self::{
-    classify::{classify_name, classify_name_ref},
-    rename::rename,
-};
-pub(crate) use ra_ide_db::defs::NameDefinition;
+pub(crate) use self::{classify::classify_name_ref, rename::rename};
+pub(crate) use ra_ide_db::defs::{classify_name, NameDefinition};
 
 pub use self::search_scope::SearchScope;
 
@@ -114,8 +111,8 @@ pub(crate) fn find_all_refs(
     position: FilePosition,
     search_scope: Option<SearchScope>,
 ) -> Option<RangeInfo<ReferenceSearchResult>> {
-    let parse = db.parse(position.file_id);
-    let syntax = parse.tree().syntax().clone();
+    let sema = Semantics::new(db);
+    let syntax = sema.parse(position.file_id).syntax().clone();
 
     let (opt_name, search_kind) =
         if let Some(name) = get_struct_def_name_for_struc_litetal_search(&syntax, position) {
@@ -124,7 +121,7 @@ pub(crate) fn find_all_refs(
             (find_node_at_offset::<ast::Name>(&syntax, position.offset), ReferenceKind::Other)
         };
 
-    let RangeInfo { range, info: (name, def) } = find_name(db, &syntax, position, opt_name)?;
+    let RangeInfo { range, info: (name, def) } = find_name(&sema, &syntax, position, opt_name)?;
     let declaration = def.try_to_nav(db)?;
 
     let search_scope = {
@@ -152,19 +149,18 @@ pub(crate) fn find_all_refs(
 }
 
 fn find_name(
-    db: &RootDatabase,
+    sema: &Semantics<RootDatabase>,
     syntax: &SyntaxNode,
     position: FilePosition,
     opt_name: Option<ast::Name>,
 ) -> Option<RangeInfo<(String, NameDefinition)>> {
-    let mut sb = SourceBinder::new(db);
     if let Some(name) = opt_name {
-        let def = classify_name(&mut sb, InFile::new(position.file_id.into(), &name))?;
+        let def = classify_name(sema, &name)?.definition();
         let range = name.syntax().text_range();
         return Some(RangeInfo::new(range, (name.text().to_string(), def)));
     }
     let name_ref = find_node_at_offset::<ast::NameRef>(&syntax, position.offset)?;
-    let def = classify_name_ref(&mut sb, InFile::new(position.file_id.into(), &name_ref))?;
+    let def = classify_name_ref(sema, &name_ref)?;
     let range = name_ref.syntax().text_range();
     Some(RangeInfo::new(range, (name_ref.text().to_string(), def)))
 }
@@ -182,64 +178,53 @@ fn process_definition(
 
     for (file_id, search_range) in scope {
         let text = db.file_text(file_id);
+        let search_range =
+            search_range.unwrap_or(TextRange::offset_len(0.into(), TextUnit::of_str(&text)));
 
-        let parse = Lazy::new(|| SourceFile::parse(&text));
-        let mut sb = Lazy::new(|| SourceBinder::new(db));
-        let mut analyzer = None;
+        let sema = Semantics::new(db);
+        let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
 
         for (idx, _) in text.match_indices(pat) {
             let offset = TextUnit::from_usize(idx);
+            if !search_range.contains_inclusive(offset) {
+                tested_by!(search_filters_by_range);
+                continue;
+            }
 
-            let (name_ref, range) = if let Some(name_ref) =
-                find_node_at_offset::<ast::NameRef>(parse.tree().syntax(), offset)
-            {
-                let range = name_ref.syntax().text_range();
-                (InFile::new(file_id.into(), name_ref), range)
-            } else {
-                // Handle macro token cases
-                let t = match parse.tree().syntax().token_at_offset(offset) {
-                    TokenAtOffset::None => continue,
-                    TokenAtOffset::Single(t) => t,
-                    TokenAtOffset::Between(_, t) => t,
-                };
-                let range = t.text_range();
-                let analyzer = analyzer.get_or_insert_with(|| {
-                    sb.analyze(InFile::new(file_id.into(), parse.tree().syntax()), None)
-                });
-                let expanded = descend_into_macros_with_analyzer(
-                    db,
-                    &analyzer,
-                    InFile::new(file_id.into(), t),
-                );
-                if let Some(token) = ast::NameRef::cast(expanded.value.parent()) {
-                    (expanded.with_value(token), range)
+            let name_ref =
+                if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&tree, offset) {
+                    name_ref
                 } else {
-                    continue;
-                }
-            };
+                    // Handle macro token cases
+                    let token = match tree.token_at_offset(offset) {
+                        TokenAtOffset::None => continue,
+                        TokenAtOffset::Single(t) => t,
+                        TokenAtOffset::Between(_, t) => t,
+                    };
+                    let expanded = sema.descend_into_macros(token);
+                    match ast::NameRef::cast(expanded.parent()) {
+                        Some(name_ref) => name_ref,
+                        _ => continue,
+                    }
+                };
 
-            if let Some(search_range) = search_range {
-                if !range.is_subrange(&search_range) {
-                    continue;
-                }
-            }
             // FIXME: reuse sb
             // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098
 
-            if let Some(d) = classify_name_ref(&mut sb, name_ref.as_ref()) {
+            if let Some(d) = classify_name_ref(&sema, &name_ref) {
                 if d == def {
-                    let kind = if is_record_lit_name_ref(&name_ref.value)
-                        || is_call_expr_name_ref(&name_ref.value)
-                    {
-                        ReferenceKind::StructLiteral
-                    } else {
-                        ReferenceKind::Other
-                    };
-
+                    let kind =
+                        if is_record_lit_name_ref(&name_ref) || is_call_expr_name_ref(&name_ref) {
+                            ReferenceKind::StructLiteral
+                        } else {
+                            ReferenceKind::Other
+                        };
+
+                    let file_range = sema.original_range(name_ref.syntax());
                     refs.push(Reference {
-                        file_range: FileRange { file_id, range },
+                        file_range,
                         kind,
-                        access: reference_access(&d, &name_ref.value),
+                        access: reference_access(&d, &name_ref),
                     });
                 }
             }
@@ -348,6 +333,8 @@ fn is_call_expr_name_ref(name_ref: &ast::NameRef) -> bool {
 
 #[cfg(test)]
 mod tests {
+    use test_utils::covers;
+
     use crate::{
         mock_analysis::{analysis_and_position, single_file_with_position, MockAnalysis},
         Declaration, Reference, ReferenceSearchResult, SearchScope,
@@ -455,6 +442,27 @@ fn main() {
         );
     }
 
+    #[test]
+    fn search_filters_by_range() {
+        covers!(search_filters_by_range);
+        let code = r#"
+            fn foo() {
+                let spam<|> = 92;
+                spam + spam
+            }
+            fn bar() {
+                let spam = 92;
+                spam + spam
+            }
+        "#;
+        let refs = get_all_refs(code);
+        check_result(
+            refs,
+            "spam BIND_PAT FileId(1) [44; 48) Other Write",
+            &["FileId(1) [71; 75) Other Read", "FileId(1) [78; 82) Other Read"],
+        );
+    }
+
     #[test]
     fn test_find_all_refs_for_param_inside() {
         let code = r#"
index 478e18871205d82a588052286ec07e70c4af3ff6..91b21429a65b0211623a698d8a0def7d42ddaa9b 100644 (file)
@@ -1,34 +1,32 @@
 //! Functions that are used to classify an element from its definition or reference.
 
-use hir::{InFile, PathResolution, SourceBinder};
+use hir::{PathResolution, Semantics};
+use ra_ide_db::defs::NameDefinition;
+use ra_ide_db::RootDatabase;
 use ra_prof::profile;
 use ra_syntax::{ast, AstNode};
 use test_utils::tested_by;
 
-use super::NameDefinition;
-use ra_ide_db::RootDatabase;
-
-pub use ra_ide_db::defs::{classify_name, from_module_def, from_struct_field};
+pub use ra_ide_db::defs::{from_module_def, from_struct_field};
 
 pub(crate) fn classify_name_ref(
-    sb: &mut SourceBinder<RootDatabase>,
-    name_ref: InFile<&ast::NameRef>,
+    sema: &Semantics<RootDatabase>,
+    name_ref: &ast::NameRef,
 ) -> Option<NameDefinition> {
     let _p = profile("classify_name_ref");
 
-    let parent = name_ref.value.syntax().parent()?;
-    let analyzer = sb.analyze(name_ref.map(|it| it.syntax()), None);
+    let parent = name_ref.syntax().parent()?;
 
     if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
         tested_by!(goto_def_for_methods);
-        if let Some(func) = analyzer.resolve_method_call(&method_call) {
+        if let Some(func) = sema.resolve_method_call(&method_call) {
             return Some(from_module_def(func.into()));
         }
     }
 
     if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
         tested_by!(goto_def_for_fields);
-        if let Some(field) = analyzer.resolve_field(&field_expr) {
+        if let Some(field) = sema.resolve_field(&field_expr) {
             return Some(from_struct_field(field));
         }
     }
@@ -36,22 +34,20 @@ pub(crate) fn classify_name_ref(
     if let Some(record_field) = ast::RecordField::cast(parent.clone()) {
         tested_by!(goto_def_for_record_fields);
         tested_by!(goto_def_for_field_init_shorthand);
-        if let Some(field_def) = analyzer.resolve_record_field(&record_field) {
+        if let Some(field_def) = sema.resolve_record_field(&record_field) {
             return Some(from_struct_field(field_def));
         }
     }
 
     if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) {
         tested_by!(goto_def_for_macros);
-        if let Some(macro_def) =
-            analyzer.resolve_macro_call(sb.db, name_ref.with_value(&macro_call))
-        {
+        if let Some(macro_def) = sema.resolve_macro_call(&macro_call) {
             return Some(NameDefinition::Macro(macro_def));
         }
     }
 
-    let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?;
-    let resolved = analyzer.resolve_path(sb.db, &path)?;
+    let path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?;
+    let resolved = sema.resolve_path(&path)?;
     let res = match resolved {
         PathResolution::Def(def) => from_module_def(def),
         PathResolution::AssocItem(item) => {
index bdb90020bbe4329cd41f676ba693ed55f381f869..5b4bcf434e40bda02d6cdf01e5249abe703889e1 100644 (file)
@@ -1,7 +1,7 @@
 //! FIXME: write short doc here
 
-use hir::ModuleSource;
-use ra_db::{RelativePath, RelativePathBuf, SourceDatabase, SourceDatabaseExt};
+use hir::{ModuleSource, Semantics};
+use ra_db::{RelativePath, RelativePathBuf, SourceDatabaseExt};
 use ra_ide_db::RootDatabase;
 use ra_syntax::{
     algo::find_node_at_offset, ast, lex_single_valid_syntax_kind, AstNode, SyntaxKind, SyntaxNode,
@@ -24,15 +24,16 @@ pub(crate) fn rename(
         _ => return None,
     }
 
-    let parse = db.parse(position.file_id);
+    let sema = Semantics::new(db);
+    let source_file = sema.parse(position.file_id);
     if let Some((ast_name, ast_module)) =
-        find_name_and_module_at_offset(parse.tree().syntax(), position)
+        find_name_and_module_at_offset(source_file.syntax(), position)
     {
         let range = ast_name.syntax().text_range();
-        rename_mod(db, &ast_name, &ast_module, position, new_name)
+        rename_mod(&sema, &ast_name, &ast_module, position, new_name)
             .map(|info| RangeInfo::new(range, info))
     } else {
-        rename_reference(db, position, new_name)
+        rename_reference(sema.db, position, new_name)
     }
 }
 
@@ -54,7 +55,7 @@ fn source_edit_from_file_id_range(
 }
 
 fn rename_mod(
-    db: &RootDatabase,
+    sema: &Semantics<RootDatabase>,
     ast_name: &ast::Name,
     ast_module: &ast::Module,
     position: FilePosition,
@@ -62,13 +63,12 @@ fn rename_mod(
 ) -> Option<SourceChange> {
     let mut source_file_edits = Vec::new();
     let mut file_system_edits = Vec::new();
-    let module_src = hir::InFile { file_id: position.file_id.into(), value: ast_module.clone() };
-    if let Some(module) = hir::SourceBinder::new(db).to_def(module_src) {
-        let src = module.definition_source(db);
-        let file_id = src.file_id.original_file(db);
+    if let Some(module) = sema.to_def(ast_module) {
+        let src = module.definition_source(sema.db);
+        let file_id = src.file_id.original_file(sema.db);
         match src.value {
             ModuleSource::SourceFile(..) => {
-                let mod_path: RelativePathBuf = db.file_relative_path(file_id);
+                let mod_path: RelativePathBuf = sema.db.file_relative_path(file_id);
                 // mod is defined in path/to/dir/mod.rs
                 let dst_path = if mod_path.file_stem() == Some("mod") {
                     mod_path
@@ -82,7 +82,7 @@ fn rename_mod(
                 if let Some(path) = dst_path {
                     let move_file = FileSystemEdit::MoveFile {
                         src: file_id,
-                        dst_source_root: db.file_source_root(position.file_id),
+                        dst_source_root: sema.db.file_source_root(position.file_id),
                         dst_path: path,
                     };
                     file_system_edits.push(move_file);
@@ -98,7 +98,7 @@ fn rename_mod(
     };
     source_file_edits.push(edit);
 
-    if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(db, position, None) {
+    if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(sema.db, position, None) {
         let ref_edits = refs.references.into_iter().map(|reference| {
             source_edit_from_file_id_range(
                 reference.file_range.file_id,
index be2a67d0aeff81bc0f07ac3fa8e3808ebabce1f1..74877e90fec5d887fbf14cd3a59be292bc150530 100644 (file)
@@ -1,8 +1,7 @@
 //! FIXME: write short doc here
 
-use hir::{InFile, SourceBinder};
+use hir::Semantics;
 use itertools::Itertools;
-use ra_db::SourceDatabase;
 use ra_ide_db::RootDatabase;
 use ra_syntax::{
     ast::{self, AstNode, AttrsOwner, ModuleItemOwner, NameOwner},
@@ -42,46 +41,33 @@ pub enum RunnableKind {
 }
 
 pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
-    let parse = db.parse(file_id);
-    let mut sb = SourceBinder::new(db);
-    parse.tree().syntax().descendants().filter_map(|i| runnable(db, &mut sb, file_id, i)).collect()
+    let sema = Semantics::new(db);
+    let source_file = sema.parse(file_id);
+    source_file.syntax().descendants().filter_map(|i| runnable(&sema, i)).collect()
 }
 
-fn runnable(
-    db: &RootDatabase,
-    source_binder: &mut SourceBinder<RootDatabase>,
-    file_id: FileId,
-    item: SyntaxNode,
-) -> Option<Runnable> {
+fn runnable(sema: &Semantics<RootDatabase>, item: SyntaxNode) -> Option<Runnable> {
     match_ast! {
         match item {
-            ast::FnDef(it) => { runnable_fn(db, source_binder, file_id, it) },
-            ast::Module(it) => { runnable_mod(db, source_binder, file_id, it) },
-            _ => { None },
+            ast::FnDef(it) => { runnable_fn(sema, it) },
+            ast::Module(it) => { runnable_mod(sema, it) },
+            _ => None,
         }
     }
 }
 
-fn runnable_fn(
-    db: &RootDatabase,
-    source_binder: &mut SourceBinder<RootDatabase>,
-    file_id: FileId,
-    fn_def: ast::FnDef,
-) -> Option<Runnable> {
+fn runnable_fn(sema: &Semantics<RootDatabase>, fn_def: ast::FnDef) -> Option<Runnable> {
     let name_string = fn_def.name()?.text().to_string();
 
     let kind = if name_string == "main" {
         RunnableKind::Bin
     } else {
-        let test_id = if let Some(module) = source_binder
-            .to_def(InFile::new(file_id.into(), fn_def.clone()))
-            .map(|def| def.module(db))
-        {
+        let test_id = if let Some(module) = sema.to_def(&fn_def).map(|def| def.module(sema.db)) {
             let path = module
-                .path_to_root(db)
+                .path_to_root(sema.db)
                 .into_iter()
                 .rev()
-                .filter_map(|it| it.name(db))
+                .filter_map(|it| it.name(sema.db))
                 .map(|name| name.to_string())
                 .chain(std::iter::once(name_string))
                 .join("::");
@@ -115,12 +101,7 @@ fn has_test_related_attribute(fn_def: &ast::FnDef) -> bool {
         .any(|attribute_text| attribute_text.contains("test"))
 }
 
-fn runnable_mod(
-    db: &RootDatabase,
-    source_binder: &mut SourceBinder<RootDatabase>,
-    file_id: FileId,
-    module: ast::Module,
-) -> Option<Runnable> {
+fn runnable_mod(sema: &Semantics<RootDatabase>, module: ast::Module) -> Option<Runnable> {
     let has_test_function = module
         .item_list()?
         .items()
@@ -133,9 +114,10 @@ fn runnable_mod(
         return None;
     }
     let range = module.syntax().text_range();
-    let module = source_binder.to_def(InFile::new(file_id.into(), module))?;
+    let module = sema.to_def(&module)?;
 
-    let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::");
+    let path =
+        module.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::");
     Some(Runnable { range, kind: RunnableKind::TestMod { path } })
 }
 
index a02dbaf2fe91bf18fb3958f19f487fc22e5abaae..495b07f690e74deb917a52621f0a821312269aae 100644 (file)
@@ -3,70 +3,78 @@
 body                { margin: 0; }
 pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
 
+.lifetime           { color: #DFAF8F; font-style: italic; }
 .comment            { color: #7F9F7F; }
-.string             { color: #CC9393; }
+.struct, .enum      { color: #7CB8BB; }
+.enum_variant       { color: #BDE0F3; }
+.string_literal     { color: #CC9393; }
 .field              { color: #94BFF3; }
 .function           { color: #93E0E3; }
 .parameter          { color: #94BFF3; }
 .text               { color: #DCDCCC; }
 .type               { color: #7CB8BB; }
-.type\.builtin     { color: #8CD0D3; }
-.type\.param       { color: #20999D; }
+.builtin_type       { color: #8CD0D3; }
+.type_param         { color: #DFAF8F; }
 .attribute          { color: #94BFF3; }
-.literal            { color: #BFEBBF; }
-.literal\.numeric  { color: #6A8759; }
+.numeric_literal    { color: #BFEBBF; }
 .macro              { color: #94BFF3; }
 .module             { color: #AFD8AF; }
 .variable           { color: #DCDCCC; }
-.variable\.mut     { color: #DCDCCC; text-decoration: underline; }
+.mutable            { text-decoration: underline; }
 
-.keyword            { color: #F0DFAF; }
-.keyword\.unsafe   { color: #DFAF8F; }
-.keyword\.control  { color: #F0DFAF; font-weight: bold; }
+.keyword            { color: #F0DFAF; font-weight: bold; }
+.keyword.unsafe     { color: #BC8383; font-weight: bold; }
+.control            { font-style: italic; }
 </style>
 <pre><code><span class="attribute">#</span><span class="attribute">[</span><span class="attribute">derive</span><span class="attribute">(</span><span class="attribute">Clone</span><span class="attribute">,</span><span class="attribute"> </span><span class="attribute">Debug</span><span class="attribute">)</span><span class="attribute">]</span>
-<span class="keyword">struct</span> <span class="type">Foo</span> {
-    <span class="keyword">pub</span> <span class="field">x</span>: <span class="type.builtin">i32</span>,
-    <span class="keyword">pub</span> <span class="field">y</span>: <span class="type.builtin">i32</span>,
+<span class="keyword">struct</span> <span class="struct declaration">Foo</span> {
+    <span class="keyword">pub</span> <span class="field declaration">x</span>: <span class="builtin_type">i32</span>,
+    <span class="keyword">pub</span> <span class="field declaration">y</span>: <span class="builtin_type">i32</span>,
 }
 
-<span class="keyword">fn</span> <span class="function">foo</span>&lt;<span class="type.param">T</span>&gt;() -&gt; <span class="type.param">T</span> {
-    <span class="macro">unimplemented</span><span class="macro">!</span>();
-    <span class="function">foo</span>::&lt;<span class="type.builtin">i32</span>&gt;();
+<span class="keyword">fn</span> <span class="function declaration">foo</span>&lt;<span class="lifetime declaration">'a</span>, <span class="type_param declaration">T</span>&gt;() -&gt; <span class="type_param">T</span> {
+    <span class="function">foo</span>::&lt;<span class="lifetime">'a</span>, <span class="builtin_type">i32</span>&gt;()
 }
 
 <span class="macro">macro_rules</span><span class="macro">!</span> def_fn {
     ($($tt:tt)*) =&gt; {$($tt)*}
 }
 
-<span class="macro">def_fn</span><span class="macro">!</span>{
-    <span class="keyword">fn</span> <span class="function">bar</span>() -&gt; <span class="type.builtin">u32</span> {
-        <span class="literal.numeric">100</span>
+<span class="macro">def_fn</span><span class="macro">!</span> {
+    <span class="keyword">fn</span> <span class="function declaration">bar</span>() -&gt; <span class="builtin_type">u32</span> {
+        <span class="numeric_literal">100</span>
     }
 }
 
 <span class="comment">// comment</span>
-<span class="keyword">fn</span> <span class="function">main</span>() {
-    <span class="macro">println</span><span class="macro">!</span>(<span class="string">"Hello, {}!"</span>, <span class="literal.numeric">92</span>);
+<span class="keyword">fn</span> <span class="function declaration">main</span>() {
+    <span class="macro">println</span><span class="macro">!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>);
 
-    <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut">vec</span> = Vec::new();
-    <span class="keyword.control">if</span> <span class="keyword">true</span> {
-        <span class="keyword">let</span> <span class="variable">x</span> = <span class="literal.numeric">92</span>;
-        <span class="variable.mut">vec</span>.push(<span class="type">Foo</span> { <span class="field">x</span>, <span class="field">y</span>: <span class="literal.numeric">1</span> });
+    <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = Vec::new();
+    <span class="keyword control">if</span> <span class="keyword">true</span> {
+        <span class="keyword">let</span> <span class="variable declaration">x</span> = <span class="numeric_literal">92</span>;
+        <span class="variable mutable">vec</span>.push(<span class="struct">Foo</span> { <span class="field">x</span>, <span class="field">y</span>: <span class="numeric_literal">1</span> });
     }
-    <span class="keyword.unsafe">unsafe</span> { <span class="variable.mut">vec</span>.set_len(<span class="literal.numeric">0</span>); }
+    <span class="keyword unsafe">unsafe</span> { <span class="variable mutable">vec</span>.set_len(<span class="numeric_literal">0</span>); }
 
-    <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut">x</span> = <span class="literal.numeric">42</span>;
-    <span class="keyword">let</span> <span class="variable.mut">y</span> = &<span class="keyword">mut</span> <span class="variable.mut">x</span>;
-    <span class="keyword">let</span> <span class="variable">z</span> = &<span class="variable.mut">y</span>;
+    <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">x</span> = <span class="numeric_literal">42</span>;
+    <span class="keyword">let</span> <span class="variable declaration mutable">y</span> = &<span class="keyword">mut</span> <span class="variable mutable">x</span>;
+    <span class="keyword">let</span> <span class="variable declaration">z</span> = &<span class="variable mutable">y</span>;
 
-    <span class="variable.mut">y</span>;
+    <span class="variable mutable">y</span>;
 }
 
-<span class="keyword">enum</span> <span class="type">E</span>&lt;<span class="type.param">X</span>&gt; {
-    <span class="constant">V</span>(<span class="type.param">X</span>)
+<span class="keyword">enum</span> <span class="enum declaration">Option</span>&lt;<span class="type_param declaration">T</span>&gt; {
+    <span class="enum_variant declaration">Some</span>(<span class="type_param">T</span>),
+    <span class="enum_variant declaration">None</span>,
 }
+<span class="keyword">use</span> <span class="enum">Option</span>::*;
 
-<span class="keyword">impl</span>&lt;<span class="type.param">X</span>&gt; <span class="type">E</span>&lt;<span class="type.param">X</span>&gt; {
-    <span class="keyword">fn</span> <span class="function">new</span>&lt;<span class="type.param">T</span>&gt;() -&gt; <span class="type">E</span>&lt;<span class="type.param">T</span>&gt; {}
+<span class="keyword">impl</span>&lt;<span class="type_param declaration">T</span>&gt; <span class="enum">Option</span>&lt;<span class="type_param">T</span>&gt; {
+    <span class="keyword">fn</span> <span class="function declaration">and</span>&lt;<span class="type_param declaration">U</span>&gt;(<span class="keyword">self</span>, <span class="variable declaration">other</span>: <span class="enum">Option</span>&lt;<span class="type_param">U</span>&gt;) -&gt; <span class="enum">Option</span>&lt;(<span class="type_param">T</span>, <span class="type_param">U</span>)&gt; {
+        <span class="keyword control">match</span> <span class="variable">other</span> {
+            <span class="enum_variant">None</span> =&gt; <span class="macro">unimplemented</span><span class="macro">!</span>(),
+            <span class="variable declaration">Nope</span> =&gt; <span class="variable">Nope</span>,
+        }
+    }
 }</code></pre>
\ No newline at end of file
index 95f038f00011a7c2556dd5e2c9dc7f3f8c12d695..dddbfc0dd69ab70cd6b5bd91b40abd6e812dcbb0 100644 (file)
@@ -3,36 +3,38 @@
 body                { margin: 0; }
 pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
 
+.lifetime           { color: #DFAF8F; font-style: italic; }
 .comment            { color: #7F9F7F; }
-.string             { color: #CC9393; }
+.struct, .enum      { color: #7CB8BB; }
+.enum_variant       { color: #BDE0F3; }
+.string_literal     { color: #CC9393; }
 .field              { color: #94BFF3; }
 .function           { color: #93E0E3; }
 .parameter          { color: #94BFF3; }
 .text               { color: #DCDCCC; }
 .type               { color: #7CB8BB; }
-.type\.builtin     { color: #8CD0D3; }
-.type\.param       { color: #20999D; }
+.builtin_type       { color: #8CD0D3; }
+.type_param         { color: #DFAF8F; }
 .attribute          { color: #94BFF3; }
-.literal            { color: #BFEBBF; }
-.literal\.numeric  { color: #6A8759; }
+.numeric_literal    { color: #BFEBBF; }
 .macro              { color: #94BFF3; }
 .module             { color: #AFD8AF; }
 .variable           { color: #DCDCCC; }
-.variable\.mut     { color: #DCDCCC; text-decoration: underline; }
+.mutable            { text-decoration: underline; }
 
-.keyword            { color: #F0DFAF; }
-.keyword\.unsafe   { color: #DFAF8F; }
-.keyword\.control  { color: #F0DFAF; font-weight: bold; }
+.keyword            { color: #F0DFAF; font-weight: bold; }
+.keyword.unsafe     { color: #BC8383; font-weight: bold; }
+.control            { font-style: italic; }
 </style>
-<pre><code><span class="keyword">fn</span> <span class="function">main</span>() {
-    <span class="keyword">let</span> <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>;
-    <span class="keyword">let</span> <span class="variable" data-binding-hash="4303609361109701698" style="color: hsl(242,75%,88%);">x</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string();
-    <span class="keyword">let</span> <span class="variable" data-binding-hash="13865792086344377029" style="color: hsl(340,64%,86%);">y</span> = <span class="variable" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span>.to_string();
+<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span>() {
+    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string_literal">"hello"</span>;
+    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="2705725358298919760" style="color: hsl(17,51%,74%);">x</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string();
+    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="3365759661443752373" style="color: hsl(127,76%,66%);">y</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string();
 
-    <span class="keyword">let</span> <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span> = <span class="string">"other color please!"</span>;
-    <span class="keyword">let</span> <span class="variable" data-binding-hash="12461245066629867975" style="color: hsl(132,91%,68%);">y</span> = <span class="variable" data-binding-hash="7011301204224269512" style="color: hsl(198,45%,40%);">x</span>.to_string();
+    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span> = <span class="string_literal">"other color please!"</span>;
+    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="6717528807933952652" style="color: hsl(85,49%,84%);">y</span> = <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span>.to_string();
 }
 
-<span class="keyword">fn</span> <span class="function">bar</span>() {
-    <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable.mut" data-binding-hash="2217585909179791122" style="color: hsl(280,74%,48%);">hello</span> = <span class="string">"hello"</span>;
+<span class="keyword">fn</span> <span class="function declaration">bar</span>() {
+    <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string_literal">"hello"</span>;
 }</code></pre>
\ No newline at end of file
index 902c29fc62078e1a232f565ee3cb913760ef4f37..c011a2e74f3c41f2a5aecc24a50261ad78821d2a 100644 (file)
@@ -3,9 +3,8 @@
 use crate::source_change::SourceFileEdit;
 use ra_ide_db::RootDatabase;
 use ra_syntax::ast::make::expr_from_text;
-use ra_syntax::AstNode;
-use ra_syntax::SyntaxElement;
-use ra_syntax::SyntaxNode;
+use ra_syntax::ast::{AstToken, Comment};
+use ra_syntax::{AstNode, SyntaxElement, SyntaxNode};
 use ra_text_edit::{TextEdit, TextEditBuilder};
 use rustc_hash::FxHashMap;
 use std::collections::HashMap;
@@ -72,6 +71,7 @@ struct SsrTemplate {
 struct Match {
     place: SyntaxNode,
     binding: Binding,
+    ignored_comments: Vec<Comment>,
 }
 
 #[derive(Debug)]
@@ -179,44 +179,61 @@ fn check(
         pattern: &SyntaxElement,
         code: &SyntaxElement,
         placeholders: &[Var],
-        match_: &mut Match,
-    ) -> bool {
+        mut match_: Match,
+    ) -> Option<Match> {
         match (pattern, code) {
             (SyntaxElement::Token(ref pattern), SyntaxElement::Token(ref code)) => {
-                pattern.text() == code.text()
+                if pattern.text() == code.text() {
+                    Some(match_)
+                } else {
+                    None
+                }
             }
             (SyntaxElement::Node(ref pattern), SyntaxElement::Node(ref code)) => {
                 if placeholders.iter().any(|n| n.0.as_str() == pattern.text()) {
                     match_.binding.insert(Var(pattern.text().to_string()), code.clone());
-                    true
+                    Some(match_)
                 } else {
-                    pattern.green().children().count() == code.green().children().count()
-                        && pattern
-                            .children_with_tokens()
-                            .zip(code.children_with_tokens())
-                            .all(|(a, b)| check(&a, &b, placeholders, match_))
+                    let mut pattern_children = pattern
+                        .children_with_tokens()
+                        .filter(|element| !element.kind().is_trivia());
+                    let mut code_children =
+                        code.children_with_tokens().filter(|element| !element.kind().is_trivia());
+                    let new_ignored_comments = code.children_with_tokens().filter_map(|element| {
+                        element.as_token().and_then(|token| Comment::cast(token.clone()))
+                    });
+                    match_.ignored_comments.extend(new_ignored_comments);
+                    let match_from_children = pattern_children
+                        .by_ref()
+                        .zip(code_children.by_ref())
+                        .fold(Some(match_), |accum, (a, b)| {
+                            accum.and_then(|match_| check(&a, &b, placeholders, match_))
+                        });
+                    match_from_children.and_then(|match_| {
+                        if pattern_children.count() == 0 && code_children.count() == 0 {
+                            Some(match_)
+                        } else {
+                            None
+                        }
+                    })
                 }
             }
-            _ => false,
+            _ => None,
         }
     }
     let kind = pattern.pattern.kind();
     let matches = code
-        .descendants_with_tokens()
+        .descendants()
         .filter(|n| n.kind() == kind)
         .filter_map(|code| {
-            let mut match_ =
-                Match { place: code.as_node().unwrap().clone(), binding: HashMap::new() };
-            if check(
+            let match_ =
+                Match { place: code.clone(), binding: HashMap::new(), ignored_comments: vec![] };
+            check(
                 &SyntaxElement::from(pattern.pattern.clone()),
-                &code,
+                &SyntaxElement::from(code),
                 &pattern.vars,
-                &mut match_,
-            ) {
-                Some(match_)
-            } else {
-                None
-            }
+                match_,
+            )
         })
         .collect();
     SsrMatches { matches }
@@ -225,18 +242,28 @@ fn check(
 fn replace(matches: &SsrMatches, template: &SsrTemplate) -> TextEdit {
     let mut builder = TextEditBuilder::default();
     for match_ in &matches.matches {
-        builder.replace(match_.place.text_range(), render_replace(&match_.binding, template));
+        builder.replace(
+            match_.place.text_range(),
+            render_replace(&match_.binding, &match_.ignored_comments, template),
+        );
     }
     builder.finish()
 }
 
-fn render_replace(binding: &Binding, template: &SsrTemplate) -> String {
+fn render_replace(
+    binding: &Binding,
+    ignored_comments: &Vec<Comment>,
+    template: &SsrTemplate,
+) -> String {
     let mut builder = TextEditBuilder::default();
     for element in template.template.descendants() {
         if let Some(var) = template.placeholders.get(&element) {
             builder.replace(element.text_range(), binding[var].to_string())
         }
     }
+    for comment in ignored_comments {
+        builder.insert(template.template.text_range().end(), comment.syntax().to_string())
+    }
     builder.finish().apply(&template.template.text().to_string())
 }
 
@@ -325,4 +352,66 @@ fn parse_match_replace() {
         let edit = replace(&matches, &query.template);
         assert_eq!(edit.apply(input), "fn main() { bar(1+2); }");
     }
+
+    fn assert_ssr_transform(query: &str, input: &str, result: &str) {
+        let query: SsrQuery = query.parse().unwrap();
+        let code = SourceFile::parse(input).tree();
+        let matches = find(&query.pattern, code.syntax());
+        let edit = replace(&matches, &query.template);
+        assert_eq!(edit.apply(input), result);
+    }
+
+    #[test]
+    fn ssr_function_to_method() {
+        assert_ssr_transform(
+            "my_function($a:expr, $b:expr) ==>> ($a).my_method($b)",
+            "loop { my_function( other_func(x, y), z + w) }",
+            "loop { (other_func(x, y)).my_method(z + w) }",
+        )
+    }
+
+    #[test]
+    fn ssr_nested_function() {
+        assert_ssr_transform(
+            "foo($a:expr, $b:expr, $c:expr) ==>> bar($c, baz($a, $b))",
+            "fn main { foo  (x + value.method(b), x+y-z, true && false) }",
+            "fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }",
+        )
+    }
+
+    #[test]
+    fn ssr_expected_spacing() {
+        assert_ssr_transform(
+            "foo($x:expr) + bar() ==>> bar($x)",
+            "fn main() { foo(5) + bar() }",
+            "fn main() { bar(5) }",
+        );
+    }
+
+    #[test]
+    fn ssr_with_extra_space() {
+        assert_ssr_transform(
+            "foo($x:expr  ) +    bar() ==>> bar($x)",
+            "fn main() { foo(  5 )  +bar(   ) }",
+            "fn main() { bar(5) }",
+        );
+    }
+
+    #[test]
+    fn ssr_keeps_nested_comment() {
+        assert_ssr_transform(
+            "foo($x:expr) ==>> bar($x)",
+            "fn main() { foo(other(5 /* using 5 */)) }",
+            "fn main() { bar(other(5 /* using 5 */)) }",
+        )
+    }
+
+    #[test]
+    fn ssr_keeps_comment() {
+        assert_ssr_transform(
+            "foo($x:expr) ==>> bar($x)",
+            "fn main() { foo(5 /* using 5 */) }",
+            "fn main() { bar(5)/* using 5 */ }",
+        )
+    }
 }
index 812229b4e0b483a3056ecca0a750d027a7185ed3..b94b6a0224d5a4588ed746e277a5a77d776d039b 100644 (file)
-//! FIXME: write short doc here
+//! Implements syntax highlighting.
 
-use hir::{HirFileId, InFile, Name, SourceAnalyzer, SourceBinder};
-use ra_db::SourceDatabase;
-use ra_ide_db::{defs::NameDefinition, RootDatabase};
+mod tags;
+mod html;
+#[cfg(test)]
+mod tests;
+
+use hir::{Name, Semantics};
+use ra_ide_db::{
+    defs::{classify_name, NameClass, NameDefinition},
+    RootDatabase,
+};
 use ra_prof::profile;
 use ra_syntax::{
-    ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, TextRange,
-    WalkEvent, T,
+    ast::{self, HasQuotes, HasStringValue},
+    AstNode, AstToken, Direction, NodeOrToken, SyntaxElement,
+    SyntaxKind::*,
+    SyntaxToken, TextRange, WalkEvent, T,
 };
 use rustc_hash::FxHashMap;
 
-use crate::{
-    expand::descend_into_macros_with_analyzer,
-    references::{classify_name, classify_name_ref},
-    FileId,
-};
+use crate::{call_info::call_info_for_token, references::classify_name_ref, Analysis, FileId};
 
-pub mod tags {
-    pub const FIELD: &str = "field";
-    pub const FUNCTION: &str = "function";
-    pub const MODULE: &str = "module";
-    pub const CONSTANT: &str = "constant";
-    pub const MACRO: &str = "macro";
-
-    pub const VARIABLE: &str = "variable";
-    pub const VARIABLE_MUT: &str = "variable.mut";
-
-    pub const TYPE: &str = "type";
-    pub const TYPE_BUILTIN: &str = "type.builtin";
-    pub const TYPE_SELF: &str = "type.self";
-    pub const TYPE_PARAM: &str = "type.param";
-    pub const TYPE_LIFETIME: &str = "type.lifetime";
-
-    pub const LITERAL_BYTE: &str = "literal.byte";
-    pub const LITERAL_NUMERIC: &str = "literal.numeric";
-    pub const LITERAL_CHAR: &str = "literal.char";
-
-    pub const LITERAL_COMMENT: &str = "comment";
-    pub const LITERAL_STRING: &str = "string";
-    pub const LITERAL_ATTRIBUTE: &str = "attribute";
-
-    pub const KEYWORD: &str = "keyword";
-    pub const KEYWORD_UNSAFE: &str = "keyword.unsafe";
-    pub const KEYWORD_CONTROL: &str = "keyword.control";
-}
+pub(crate) use html::highlight_as_html;
+pub use tags::{Highlight, HighlightModifier, HighlightModifiers, HighlightTag};
 
 #[derive(Debug)]
 pub struct HighlightedRange {
     pub range: TextRange,
-    pub tag: &'static str,
+    pub highlight: Highlight,
     pub binding_hash: Option<u64>,
 }
 
-fn is_control_keyword(kind: SyntaxKind) -> bool {
-    match kind {
-        T![for]
-        | T![loop]
-        | T![while]
-        | T![continue]
-        | T![break]
-        | T![if]
-        | T![else]
-        | T![match]
-        | T![return] => true,
-        _ => false,
-    }
-}
-
-pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
+pub(crate) fn highlight(
+    db: &RootDatabase,
+    file_id: FileId,
+    range_to_highlight: Option<TextRange>,
+) -> Vec<HighlightedRange> {
     let _p = profile("highlight");
-    let parse = db.parse(file_id);
-    let root = parse.tree().syntax().clone();
+    let sema = Semantics::new(db);
+
+    // Determine the root based on the given range.
+    let (root, range_to_highlight) = {
+        let source_file = sema.parse(file_id);
+        match range_to_highlight {
+            Some(range) => {
+                let node = match source_file.syntax().covering_element(range) {
+                    NodeOrToken::Node(it) => it,
+                    NodeOrToken::Token(it) => it.parent(),
+                };
+                (node, range)
+            }
+            None => (source_file.syntax().clone(), source_file.syntax().text_range()),
+        }
+    };
 
-    let mut sb = SourceBinder::new(db);
     let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
     let mut res = Vec::new();
-    let analyzer = sb.analyze(InFile::new(file_id.into(), &root), None);
 
-    let mut in_macro_call = None;
+    let mut current_macro_call: Option<ast::MacroCall> = None;
 
+    // Walk all nodes, keeping track of whether we are inside a macro or not.
+    // If in macro, expand it first and highlight the expanded code.
     for event in root.preorder_with_tokens() {
-        match event {
-            WalkEvent::Enter(node) => match node.kind() {
-                MACRO_CALL => {
-                    in_macro_call = Some(node.clone());
-                    if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) {
-                        res.push(HighlightedRange { range, tag: tags::MACRO, binding_hash: None });
-                    }
-                }
-                _ if in_macro_call.is_some() => {
-                    if let Some(token) = node.as_token() {
-                        if let Some((tag, binding_hash)) = highlight_token_tree(
-                            &mut sb,
-                            &analyzer,
-                            &mut bindings_shadow_count,
-                            InFile::new(file_id.into(), token.clone()),
-                        ) {
-                            res.push(HighlightedRange {
-                                range: node.text_range(),
-                                tag,
-                                binding_hash,
-                            });
-                        }
-                    }
-                }
-                _ => {
-                    if let Some((tag, binding_hash)) = highlight_node(
-                        &mut sb,
-                        &mut bindings_shadow_count,
-                        InFile::new(file_id.into(), node.clone()),
-                    ) {
-                        res.push(HighlightedRange { range: node.text_range(), tag, binding_hash });
-                    }
-                }
-            },
-            WalkEvent::Leave(node) => {
-                if let Some(m) = in_macro_call.as_ref() {
-                    if *m == node {
-                        in_macro_call = None;
-                    }
+        let event_range = match &event {
+            WalkEvent::Enter(it) => it.text_range(),
+            WalkEvent::Leave(it) => it.text_range(),
+        };
+
+        // Element outside of the viewport, no need to highlight
+        if range_to_highlight.intersection(&event_range).is_none() {
+            continue;
+        }
+
+        // Track "inside macro" state
+        match event.clone().map(|it| it.into_node().and_then(ast::MacroCall::cast)) {
+            WalkEvent::Enter(Some(mc)) => {
+                current_macro_call = Some(mc.clone());
+                if let Some(range) = macro_call_range(&mc) {
+                    res.push(HighlightedRange {
+                        range,
+                        highlight: HighlightTag::Macro.into(),
+                        binding_hash: None,
+                    });
                 }
+                continue;
             }
+            WalkEvent::Leave(Some(mc)) => {
+                assert!(current_macro_call == Some(mc));
+                current_macro_call = None;
+                continue;
+            }
+            _ => (),
+        }
+
+        let element = match event {
+            WalkEvent::Enter(it) => it,
+            WalkEvent::Leave(_) => continue,
+        };
+
+        let range = element.text_range();
+
+        let element_to_highlight = if current_macro_call.is_some() {
+            // Inside a macro -- expand it first
+            let token = match element.clone().into_token() {
+                Some(it) if it.parent().kind() == TOKEN_TREE => it,
+                _ => continue,
+            };
+            let token = sema.descend_into_macros(token.clone());
+            let parent = token.parent();
+            // We only care Name and Name_ref
+            match (token.kind(), parent.kind()) {
+                (IDENT, NAME) | (IDENT, NAME_REF) => parent.into(),
+                _ => token.into(),
+            }
+        } else {
+            element.clone()
+        };
+
+        if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) {
+            let expanded = element_to_highlight.as_token().unwrap().clone();
+            if highlight_injection(&mut res, &sema, token, expanded).is_some() {
+                eprintln!("res = {:?}", res);
+                continue;
+            }
+        }
+
+        if let Some((highlight, binding_hash)) =
+            highlight_element(&sema, &mut bindings_shadow_count, element_to_highlight)
+        {
+            res.push(HighlightedRange { range, highlight, binding_hash });
         }
     }
 
     res
 }
 
-fn highlight_macro(node: InFile<SyntaxElement>) -> Option<TextRange> {
-    let macro_call = ast::MacroCall::cast(node.value.as_node()?.clone())?;
+fn macro_call_range(macro_call: &ast::MacroCall) -> Option<TextRange> {
     let path = macro_call.path()?;
     let name_ref = path.segment()?.name_ref()?;
 
@@ -144,101 +151,100 @@ fn highlight_macro(node: InFile<SyntaxElement>) -> Option<TextRange> {
     Some(TextRange::from_to(range_start, range_end))
 }
 
-fn highlight_token_tree(
-    sb: &mut SourceBinder<RootDatabase>,
-    analyzer: &SourceAnalyzer,
-    bindings_shadow_count: &mut FxHashMap<Name, u32>,
-    token: InFile<SyntaxToken>,
-) -> Option<(&'static str, Option<u64>)> {
-    if token.value.parent().kind() != TOKEN_TREE {
-        return None;
-    }
-    let token = descend_into_macros_with_analyzer(sb.db, analyzer, token);
-    let expanded = {
-        let parent = token.value.parent();
-        // We only care Name and Name_ref
-        match (token.value.kind(), parent.kind()) {
-            (IDENT, NAME) | (IDENT, NAME_REF) => token.with_value(parent.into()),
-            _ => token.map(|it| it.into()),
-        }
-    };
-
-    highlight_node(sb, bindings_shadow_count, expanded)
-}
-
-fn highlight_node(
-    sb: &mut SourceBinder<RootDatabase>,
+fn highlight_element(
+    sema: &Semantics<RootDatabase>,
     bindings_shadow_count: &mut FxHashMap<Name, u32>,
-    node: InFile<SyntaxElement>,
-) -> Option<(&'static str, Option<u64>)> {
-    let db = sb.db;
+    element: SyntaxElement,
+) -> Option<(Highlight, Option<u64>)> {
+    let db = sema.db;
     let mut binding_hash = None;
-    let tag = match node.value.kind() {
+    let highlight: Highlight = match element.kind() {
         FN_DEF => {
             bindings_shadow_count.clear();
             return None;
         }
-        COMMENT => tags::LITERAL_COMMENT,
-        STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => tags::LITERAL_STRING,
-        ATTR => tags::LITERAL_ATTRIBUTE,
-        // Special-case field init shorthand
-        NAME_REF if node.value.parent().and_then(ast::RecordField::cast).is_some() => tags::FIELD,
-        NAME_REF if node.value.ancestors().any(|it| it.kind() == ATTR) => return None,
-        NAME_REF => {
-            let name_ref = node.value.as_node().cloned().and_then(ast::NameRef::cast).unwrap();
-            let name_kind = classify_name_ref(sb, node.with_value(&name_ref));
+
+        // Highlight definitions depending on the "type" of the definition.
+        NAME => {
+            let name = element.into_node().and_then(ast::Name::cast).unwrap();
+            let name_kind = classify_name(sema, &name);
+
+            if let Some(NameClass::NameDefinition(NameDefinition::Local(local))) = &name_kind {
+                if let Some(name) = local.name(db) {
+                    let shadow_count = bindings_shadow_count.entry(name.clone()).or_default();
+                    *shadow_count += 1;
+                    binding_hash = Some(calc_binding_hash(&name, *shadow_count))
+                }
+            };
+
             match name_kind {
-                Some(name_kind) => {
-                    if let NameDefinition::Local(local) = &name_kind {
-                        if let Some(name) = local.name(db) {
-                            let shadow_count =
-                                bindings_shadow_count.entry(name.clone()).or_default();
-                            binding_hash =
-                                Some(calc_binding_hash(node.file_id, &name, *shadow_count))
-                        }
-                    };
-
-                    highlight_name(db, name_kind)
+                Some(NameClass::NameDefinition(def)) => {
+                    highlight_name(db, def) | HighlightModifier::Definition
                 }
-                _ => return None,
+                Some(NameClass::ConstReference(def)) => highlight_name(db, def),
+                None => highlight_name_by_syntax(name) | HighlightModifier::Definition,
             }
         }
-        NAME => {
-            let name = node.value.as_node().cloned().and_then(ast::Name::cast).unwrap();
-            let name_kind = classify_name(sb, node.with_value(&name));
 
-            if let Some(NameDefinition::Local(local)) = &name_kind {
+        // Highlight references like the definitions they resolve to
+
+        // Special-case field init shorthand
+        NAME_REF if element.parent().and_then(ast::RecordField::cast).is_some() => {
+            HighlightTag::Field.into()
+        }
+        NAME_REF if element.ancestors().any(|it| it.kind() == ATTR) => return None,
+        NAME_REF => {
+            let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap();
+            let name_kind = classify_name_ref(sema, &name_ref)?;
+
+            if let NameDefinition::Local(local) = &name_kind {
                 if let Some(name) = local.name(db) {
                     let shadow_count = bindings_shadow_count.entry(name.clone()).or_default();
-                    *shadow_count += 1;
-                    binding_hash = Some(calc_binding_hash(node.file_id, &name, *shadow_count))
+                    binding_hash = Some(calc_binding_hash(&name, *shadow_count))
                 }
             };
 
-            match name_kind {
-                Some(name_kind) => highlight_name(db, name_kind),
-                None => name.syntax().parent().map_or(tags::FUNCTION, |x| match x.kind() {
-                    STRUCT_DEF | ENUM_DEF | TRAIT_DEF | TYPE_ALIAS_DEF => tags::TYPE,
-                    TYPE_PARAM => tags::TYPE_PARAM,
-                    RECORD_FIELD_DEF => tags::FIELD,
-                    _ => tags::FUNCTION,
-                }),
+            highlight_name(db, name_kind)
+        }
+
+        // Simple token-based highlighting
+        COMMENT => HighlightTag::Comment.into(),
+        STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => HighlightTag::StringLiteral.into(),
+        ATTR => HighlightTag::Attribute.into(),
+        INT_NUMBER | FLOAT_NUMBER => HighlightTag::NumericLiteral.into(),
+        BYTE => HighlightTag::ByteLiteral.into(),
+        CHAR => HighlightTag::CharLiteral.into(),
+        LIFETIME => {
+            let h = Highlight::new(HighlightTag::Lifetime);
+            dbg!(match element.parent().map(|it| it.kind()) {
+                Some(LIFETIME_PARAM) | Some(LABEL) => h | HighlightModifier::Definition,
+                _ => h,
+            })
+        }
+
+        k if k.is_keyword() => {
+            let h = Highlight::new(HighlightTag::Keyword);
+            match k {
+                T![break]
+                | T![continue]
+                | T![else]
+                | T![for]
+                | T![if]
+                | T![loop]
+                | T![match]
+                | T![return]
+                | T![while] => h | HighlightModifier::Control,
+                T![unsafe] => h | HighlightModifier::Unsafe,
+                _ => h,
             }
         }
-        INT_NUMBER | FLOAT_NUMBER => tags::LITERAL_NUMERIC,
-        BYTE => tags::LITERAL_BYTE,
-        CHAR => tags::LITERAL_CHAR,
-        LIFETIME => tags::TYPE_LIFETIME,
-        T![unsafe] => tags::KEYWORD_UNSAFE,
-        k if is_control_keyword(k) => tags::KEYWORD_CONTROL,
-        k if k.is_keyword() => tags::KEYWORD,
 
         _ => return None,
     };
 
-    return Some((tag, binding_hash));
+    return Some((highlight, binding_hash));
 
-    fn calc_binding_hash(file_id: HirFileId, name: &Name, shadow_count: u32) -> u64 {
+    fn calc_binding_hash(name: &Name, shadow_count: u32) -> u64 {
         fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 {
             use std::{collections::hash_map::DefaultHasher, hash::Hasher};
 
@@ -247,232 +253,98 @@ fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 {
             hasher.finish()
         }
 
-        hash((file_id, name, shadow_count))
-    }
-}
-
-pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
-    let parse = db.parse(file_id);
-
-    fn rainbowify(seed: u64) -> String {
-        use rand::prelude::*;
-        let mut rng = SmallRng::seed_from_u64(seed);
-        format!(
-            "hsl({h},{s}%,{l}%)",
-            h = rng.gen_range::<u16, _, _>(0, 361),
-            s = rng.gen_range::<u16, _, _>(42, 99),
-            l = rng.gen_range::<u16, _, _>(40, 91),
-        )
-    }
-
-    let mut ranges = highlight(db, file_id);
-    ranges.sort_by_key(|it| it.range.start());
-    // quick non-optimal heuristic to intersect token ranges and highlighted ranges
-    let mut frontier = 0;
-    let mut could_intersect: Vec<&HighlightedRange> = Vec::new();
-
-    let mut buf = String::new();
-    buf.push_str(&STYLE);
-    buf.push_str("<pre><code>");
-    let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.into_token());
-    for token in tokens {
-        could_intersect.retain(|it| token.text_range().start() <= it.range.end());
-        while let Some(r) = ranges.get(frontier) {
-            if r.range.start() <= token.text_range().end() {
-                could_intersect.push(r);
-                frontier += 1;
-            } else {
-                break;
-            }
-        }
-        let text = html_escape(&token.text());
-        let ranges = could_intersect
-            .iter()
-            .filter(|it| token.text_range().is_subrange(&it.range))
-            .collect::<Vec<_>>();
-        if ranges.is_empty() {
-            buf.push_str(&text);
-        } else {
-            let classes = ranges.iter().map(|x| x.tag).collect::<Vec<_>>().join(" ");
-            let binding_hash = ranges.first().and_then(|x| x.binding_hash);
-            let color = match (rainbow, binding_hash) {
-                (true, Some(hash)) => format!(
-                    " data-binding-hash=\"{}\" style=\"color: {};\"",
-                    hash,
-                    rainbowify(hash)
-                ),
-                _ => "".into(),
-            };
-            buf.push_str(&format!("<span class=\"{}\"{}>{}</span>", classes, color, text));
-        }
+        hash((name, shadow_count))
     }
-    buf.push_str("</code></pre>");
-    buf
 }
 
-fn highlight_name(db: &RootDatabase, def: NameDefinition) -> &'static str {
+fn highlight_name(db: &RootDatabase, def: NameDefinition) -> Highlight {
     match def {
-        NameDefinition::Macro(_) => tags::MACRO,
-        NameDefinition::StructField(_) => tags::FIELD,
-        NameDefinition::ModuleDef(hir::ModuleDef::Module(_)) => tags::MODULE,
-        NameDefinition::ModuleDef(hir::ModuleDef::Function(_)) => tags::FUNCTION,
-        NameDefinition::ModuleDef(hir::ModuleDef::Adt(_)) => tags::TYPE,
-        NameDefinition::ModuleDef(hir::ModuleDef::EnumVariant(_)) => tags::CONSTANT,
-        NameDefinition::ModuleDef(hir::ModuleDef::Const(_)) => tags::CONSTANT,
-        NameDefinition::ModuleDef(hir::ModuleDef::Static(_)) => tags::CONSTANT,
-        NameDefinition::ModuleDef(hir::ModuleDef::Trait(_)) => tags::TYPE,
-        NameDefinition::ModuleDef(hir::ModuleDef::TypeAlias(_)) => tags::TYPE,
-        NameDefinition::ModuleDef(hir::ModuleDef::BuiltinType(_)) => tags::TYPE_BUILTIN,
-        NameDefinition::SelfType(_) => tags::TYPE_SELF,
-        NameDefinition::TypeParam(_) => tags::TYPE_PARAM,
+        NameDefinition::Macro(_) => HighlightTag::Macro,
+        NameDefinition::StructField(_) => HighlightTag::Field,
+        NameDefinition::ModuleDef(def) => match def {
+            hir::ModuleDef::Module(_) => HighlightTag::Module,
+            hir::ModuleDef::Function(_) => HighlightTag::Function,
+            hir::ModuleDef::Adt(hir::Adt::Struct(_)) => HighlightTag::Struct,
+            hir::ModuleDef::Adt(hir::Adt::Enum(_)) => HighlightTag::Enum,
+            hir::ModuleDef::Adt(hir::Adt::Union(_)) => HighlightTag::Union,
+            hir::ModuleDef::EnumVariant(_) => HighlightTag::EnumVariant,
+            hir::ModuleDef::Const(_) => HighlightTag::Constant,
+            hir::ModuleDef::Static(_) => HighlightTag::Static,
+            hir::ModuleDef::Trait(_) => HighlightTag::Trait,
+            hir::ModuleDef::TypeAlias(_) => HighlightTag::TypeAlias,
+            hir::ModuleDef::BuiltinType(_) => HighlightTag::BuiltinType,
+        },
+        NameDefinition::SelfType(_) => HighlightTag::SelfType,
+        NameDefinition::TypeParam(_) => HighlightTag::TypeParam,
+        // FIXME: distinguish between locals and parameters
         NameDefinition::Local(local) => {
+            let mut h = Highlight::new(HighlightTag::Local);
             if local.is_mut(db) || local.ty(db).is_mutable_reference() {
-                tags::VARIABLE_MUT
-            } else {
-                tags::VARIABLE
+                h |= HighlightModifier::Mutable;
             }
+            return h;
         }
     }
+    .into()
 }
 
-//FIXME: like, real html escaping
-fn html_escape(text: &str) -> String {
-    text.replace("<", "&lt;").replace(">", "&gt;")
-}
-
-const STYLE: &str = "
-<style>
-body                { margin: 0; }
-pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
-
-.comment            { color: #7F9F7F; }
-.string             { color: #CC9393; }
-.field              { color: #94BFF3; }
-.function           { color: #93E0E3; }
-.parameter          { color: #94BFF3; }
-.text               { color: #DCDCCC; }
-.type               { color: #7CB8BB; }
-.type\\.builtin     { color: #8CD0D3; }
-.type\\.param       { color: #20999D; }
-.attribute          { color: #94BFF3; }
-.literal            { color: #BFEBBF; }
-.literal\\.numeric  { color: #6A8759; }
-.macro              { color: #94BFF3; }
-.module             { color: #AFD8AF; }
-.variable           { color: #DCDCCC; }
-.variable\\.mut     { color: #DCDCCC; text-decoration: underline; }
-
-.keyword            { color: #F0DFAF; }
-.keyword\\.unsafe   { color: #DFAF8F; }
-.keyword\\.control  { color: #F0DFAF; font-weight: bold; }
-</style>
-";
-
-#[cfg(test)]
-mod tests {
-    use std::fs;
-
-    use test_utils::{assert_eq_text, project_dir, read_text};
-
-    use crate::mock_analysis::{single_file, MockAnalysis};
-
-    #[test]
-    fn test_highlighting() {
-        let (analysis, file_id) = single_file(
-            r#"
-#[derive(Clone, Debug)]
-struct Foo {
-    pub x: i32,
-    pub y: i32,
-}
-
-fn foo<T>() -> T {
-    unimplemented!();
-    foo::<i32>();
-}
+fn highlight_name_by_syntax(name: ast::Name) -> Highlight {
+    let default = HighlightTag::Function.into();
 
-macro_rules! def_fn {
-    ($($tt:tt)*) => {$($tt)*}
-}
+    let parent = match name.syntax().parent() {
+        Some(it) => it,
+        _ => return default,
+    };
 
-def_fn!{
-    fn bar() -> u32 {
-        100
+    match parent.kind() {
+        STRUCT_DEF => HighlightTag::Struct.into(),
+        ENUM_DEF => HighlightTag::Enum.into(),
+        UNION_DEF => HighlightTag::Union.into(),
+        TRAIT_DEF => HighlightTag::Trait.into(),
+        TYPE_ALIAS_DEF => HighlightTag::TypeAlias.into(),
+        TYPE_PARAM => HighlightTag::TypeParam.into(),
+        RECORD_FIELD_DEF => HighlightTag::Field.into(),
+        _ => default,
     }
 }
 
-// comment
-fn main() {
-    println!("Hello, {}!", 92);
-
-    let mut vec = Vec::new();
-    if true {
-        let x = 92;
-        vec.push(Foo { x, y: 1 });
+fn highlight_injection(
+    acc: &mut Vec<HighlightedRange>,
+    sema: &Semantics<RootDatabase>,
+    literal: ast::RawString,
+    expanded: SyntaxToken,
+) -> Option<()> {
+    let call_info = call_info_for_token(&sema, expanded)?;
+    let idx = call_info.active_parameter?;
+    let name = call_info.signature.parameter_names.get(idx)?;
+    if name != "ra_fixture" {
+        return None;
     }
-    unsafe { vec.set_len(0); }
-
-    let mut x = 42;
-    let y = &mut x;
-    let z = &y;
-
-    y;
-}
-
-enum E<X> {
-    V(X)
-}
-
-impl<X> E<X> {
-    fn new<T>() -> E<T> {}
-}
-"#
-            .trim(),
-        );
-        let dst_file = project_dir().join("crates/ra_ide/src/snapshots/highlighting.html");
-        let actual_html = &analysis.highlight_as_html(file_id, false).unwrap();
-        let expected_html = &read_text(&dst_file);
-        fs::write(dst_file, &actual_html).unwrap();
-        assert_eq_text!(expected_html, actual_html);
+    let value = literal.value()?;
+    let (analysis, tmp_file_id) = Analysis::from_single_file(value);
+
+    if let Some(range) = literal.open_quote_text_range() {
+        acc.push(HighlightedRange {
+            range,
+            highlight: HighlightTag::StringLiteral.into(),
+            binding_hash: None,
+        })
     }
 
-    #[test]
-    fn test_rainbow_highlighting() {
-        let (analysis, file_id) = single_file(
-            r#"
-fn main() {
-    let hello = "hello";
-    let x = hello.to_string();
-    let y = hello.to_string();
-
-    let x = "other color please!";
-    let y = x.to_string();
-}
-
-fn bar() {
-    let mut hello = "hello";
-}
-"#
-            .trim(),
-        );
-        let dst_file = project_dir().join("crates/ra_ide/src/snapshots/rainbow_highlighting.html");
-        let actual_html = &analysis.highlight_as_html(file_id, true).unwrap();
-        let expected_html = &read_text(&dst_file);
-        fs::write(dst_file, &actual_html).unwrap();
-        assert_eq_text!(expected_html, actual_html);
+    for mut h in analysis.highlight(tmp_file_id).unwrap() {
+        if let Some(r) = literal.map_range_up(h.range) {
+            h.range = r;
+            acc.push(h)
+        }
     }
 
-    #[test]
-    fn accidentally_quadratic() {
-        let file = project_dir().join("crates/ra_syntax/test_data/accidentally_quadratic");
-        let src = fs::read_to_string(file).unwrap();
-
-        let mut mock = MockAnalysis::new();
-        let file_id = mock.add_file("/main.rs", &src);
-        let host = mock.analysis_host();
-
-        // let t = std::time::Instant::now();
-        let _ = host.analysis().highlight(file_id).unwrap();
-        // eprintln!("elapsed: {:?}", t.elapsed());
+    if let Some(range) = literal.close_quote_text_range() {
+        acc.push(HighlightedRange {
+            range,
+            highlight: HighlightTag::StringLiteral.into(),
+            binding_hash: None,
+        })
     }
+
+    Some(())
 }
diff --git a/crates/ra_ide/src/syntax_highlighting/html.rs b/crates/ra_ide/src/syntax_highlighting/html.rs
new file mode 100644 (file)
index 0000000..e13766c
--- /dev/null
@@ -0,0 +1,106 @@
+//! Renders a bit of code as HTML.
+
+use ra_db::SourceDatabase;
+use ra_syntax::AstNode;
+
+use crate::{FileId, HighlightedRange, RootDatabase};
+
+use super::highlight;
+
+pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
+    let parse = db.parse(file_id);
+
+    fn rainbowify(seed: u64) -> String {
+        use rand::prelude::*;
+        let mut rng = SmallRng::seed_from_u64(seed);
+        format!(
+            "hsl({h},{s}%,{l}%)",
+            h = rng.gen_range::<u16, _, _>(0, 361),
+            s = rng.gen_range::<u16, _, _>(42, 99),
+            l = rng.gen_range::<u16, _, _>(40, 91),
+        )
+    }
+
+    let mut ranges = highlight(db, file_id, None);
+    ranges.sort_by_key(|it| it.range.start());
+    // quick non-optimal heuristic to intersect token ranges and highlighted ranges
+    let mut frontier = 0;
+    let mut could_intersect: Vec<&HighlightedRange> = Vec::new();
+
+    let mut buf = String::new();
+    buf.push_str(&STYLE);
+    buf.push_str("<pre><code>");
+    let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.into_token());
+    for token in tokens {
+        could_intersect.retain(|it| token.text_range().start() <= it.range.end());
+        while let Some(r) = ranges.get(frontier) {
+            if r.range.start() <= token.text_range().end() {
+                could_intersect.push(r);
+                frontier += 1;
+            } else {
+                break;
+            }
+        }
+        let text = html_escape(&token.text());
+        let ranges = could_intersect
+            .iter()
+            .filter(|it| token.text_range().is_subrange(&it.range))
+            .collect::<Vec<_>>();
+        if ranges.is_empty() {
+            buf.push_str(&text);
+        } else {
+            let classes = ranges
+                .iter()
+                .map(|it| it.highlight.to_string().replace('.', " "))
+                .collect::<Vec<_>>()
+                .join(" ");
+            let binding_hash = ranges.first().and_then(|x| x.binding_hash);
+            let color = match (rainbow, binding_hash) {
+                (true, Some(hash)) => format!(
+                    " data-binding-hash=\"{}\" style=\"color: {};\"",
+                    hash,
+                    rainbowify(hash)
+                ),
+                _ => "".into(),
+            };
+            buf.push_str(&format!("<span class=\"{}\"{}>{}</span>", classes, color, text));
+        }
+    }
+    buf.push_str("</code></pre>");
+    buf
+}
+
+//FIXME: like, real html escaping
+fn html_escape(text: &str) -> String {
+    text.replace("<", "&lt;").replace(">", "&gt;")
+}
+
+const STYLE: &str = "
+<style>
+body                { margin: 0; }
+pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime           { color: #DFAF8F; font-style: italic; }
+.comment            { color: #7F9F7F; }
+.struct, .enum      { color: #7CB8BB; }
+.enum_variant       { color: #BDE0F3; }
+.string_literal     { color: #CC9393; }
+.field              { color: #94BFF3; }
+.function           { color: #93E0E3; }
+.parameter          { color: #94BFF3; }
+.text               { color: #DCDCCC; }
+.type               { color: #7CB8BB; }
+.builtin_type       { color: #8CD0D3; }
+.type_param         { color: #DFAF8F; }
+.attribute          { color: #94BFF3; }
+.numeric_literal    { color: #BFEBBF; }
+.macro              { color: #94BFF3; }
+.module             { color: #AFD8AF; }
+.variable           { color: #DCDCCC; }
+.mutable            { text-decoration: underline; }
+
+.keyword            { color: #F0DFAF; font-weight: bold; }
+.keyword.unsafe     { color: #BC8383; font-weight: bold; }
+.control            { font-style: italic; }
+</style>
+";
diff --git a/crates/ra_ide/src/syntax_highlighting/tags.rs b/crates/ra_ide/src/syntax_highlighting/tags.rs
new file mode 100644 (file)
index 0000000..8835a5d
--- /dev/null
@@ -0,0 +1,175 @@
+//! Defines token tags we use for syntax highlighting.
+//! A tag is not unlike a CSS class.
+
+use std::{fmt, ops};
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct Highlight {
+    pub tag: HighlightTag,
+    pub modifiers: HighlightModifiers,
+}
+
+#[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct HighlightModifiers(u32);
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub enum HighlightTag {
+    Attribute,
+    BuiltinType,
+    ByteLiteral,
+    CharLiteral,
+    Comment,
+    Constant,
+    Enum,
+    EnumVariant,
+    Field,
+    Function,
+    Keyword,
+    Lifetime,
+    Macro,
+    Module,
+    NumericLiteral,
+    SelfType,
+    Static,
+    StringLiteral,
+    Struct,
+    Trait,
+    TypeAlias,
+    TypeParam,
+    Union,
+    Local,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+#[repr(u8)]
+pub enum HighlightModifier {
+    /// Used with keywords like `if` and `break`.
+    Control = 0,
+    /// `foo` in `fn foo(x: i32)` is a definition, `foo` in `foo(90 + 2)` is
+    /// not.
+    Definition,
+    Mutable,
+    Unsafe,
+}
+
+impl HighlightTag {
+    fn as_str(self) -> &'static str {
+        match self {
+            HighlightTag::Attribute => "attribute",
+            HighlightTag::BuiltinType => "builtin_type",
+            HighlightTag::ByteLiteral => "byte_literal",
+            HighlightTag::CharLiteral => "char_literal",
+            HighlightTag::Comment => "comment",
+            HighlightTag::Constant => "constant",
+            HighlightTag::Enum => "enum",
+            HighlightTag::EnumVariant => "enum_variant",
+            HighlightTag::Field => "field",
+            HighlightTag::Function => "function",
+            HighlightTag::Keyword => "keyword",
+            HighlightTag::Lifetime => "lifetime",
+            HighlightTag::Macro => "macro",
+            HighlightTag::Module => "module",
+            HighlightTag::NumericLiteral => "numeric_literal",
+            HighlightTag::SelfType => "self_type",
+            HighlightTag::Static => "static",
+            HighlightTag::StringLiteral => "string_literal",
+            HighlightTag::Struct => "struct",
+            HighlightTag::Trait => "trait",
+            HighlightTag::TypeAlias => "type_alias",
+            HighlightTag::TypeParam => "type_param",
+            HighlightTag::Union => "union",
+            HighlightTag::Local => "variable",
+        }
+    }
+}
+
+impl fmt::Display for HighlightTag {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Display::fmt(self.as_str(), f)
+    }
+}
+
+impl HighlightModifier {
+    const ALL: &'static [HighlightModifier] = &[
+        HighlightModifier::Control,
+        HighlightModifier::Definition,
+        HighlightModifier::Mutable,
+        HighlightModifier::Unsafe,
+    ];
+
+    fn as_str(self) -> &'static str {
+        match self {
+            HighlightModifier::Control => "control",
+            HighlightModifier::Definition => "declaration",
+            HighlightModifier::Mutable => "mutable",
+            HighlightModifier::Unsafe => "unsafe",
+        }
+    }
+
+    fn mask(self) -> u32 {
+        1 << (self as u32)
+    }
+}
+
+impl fmt::Display for HighlightModifier {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Display::fmt(self.as_str(), f)
+    }
+}
+
+impl fmt::Display for Highlight {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "{}", self.tag)?;
+        for modifier in self.modifiers.iter() {
+            write!(f, ".{}", modifier)?
+        }
+        Ok(())
+    }
+}
+
+impl From<HighlightTag> for Highlight {
+    fn from(tag: HighlightTag) -> Highlight {
+        Highlight::new(tag)
+    }
+}
+
+impl Highlight {
+    pub(crate) fn new(tag: HighlightTag) -> Highlight {
+        Highlight { tag, modifiers: HighlightModifiers::default() }
+    }
+}
+
+impl ops::BitOr<HighlightModifier> for HighlightTag {
+    type Output = Highlight;
+
+    fn bitor(self, rhs: HighlightModifier) -> Highlight {
+        Highlight::new(self) | rhs
+    }
+}
+
+impl ops::BitOrAssign<HighlightModifier> for HighlightModifiers {
+    fn bitor_assign(&mut self, rhs: HighlightModifier) {
+        self.0 |= rhs.mask();
+    }
+}
+
+impl ops::BitOrAssign<HighlightModifier> for Highlight {
+    fn bitor_assign(&mut self, rhs: HighlightModifier) {
+        self.modifiers |= rhs;
+    }
+}
+
+impl ops::BitOr<HighlightModifier> for Highlight {
+    type Output = Highlight;
+
+    fn bitor(mut self, rhs: HighlightModifier) -> Highlight {
+        self |= rhs;
+        self
+    }
+}
+
+impl HighlightModifiers {
+    pub fn iter(self) -> impl Iterator<Item = HighlightModifier> {
+        HighlightModifier::ALL.iter().copied().filter(move |it| self.0 & it.mask() == it.mask())
+    }
+}
diff --git a/crates/ra_ide/src/syntax_highlighting/tests.rs b/crates/ra_ide/src/syntax_highlighting/tests.rs
new file mode 100644 (file)
index 0000000..98c0307
--- /dev/null
@@ -0,0 +1,133 @@
+use std::fs;
+
+use test_utils::{assert_eq_text, project_dir, read_text};
+
+use crate::{
+    mock_analysis::{single_file, MockAnalysis},
+    FileRange, TextRange,
+};
+
+#[test]
+fn test_highlighting() {
+    let (analysis, file_id) = single_file(
+        r#"
+#[derive(Clone, Debug)]
+struct Foo {
+    pub x: i32,
+    pub y: i32,
+}
+
+fn foo<'a, T>() -> T {
+    foo::<'a, i32>()
+}
+
+macro_rules! def_fn {
+    ($($tt:tt)*) => {$($tt)*}
+}
+
+def_fn! {
+    fn bar() -> u32 {
+        100
+    }
+}
+
+// comment
+fn main() {
+    println!("Hello, {}!", 92);
+
+    let mut vec = Vec::new();
+    if true {
+        let x = 92;
+        vec.push(Foo { x, y: 1 });
+    }
+    unsafe { vec.set_len(0); }
+
+    let mut x = 42;
+    let y = &mut x;
+    let z = &y;
+
+    y;
+}
+
+enum Option<T> {
+    Some(T),
+    None,
+}
+use Option::*;
+
+impl<T> Option<T> {
+    fn and<U>(self, other: Option<U>) -> Option<(T, U)> {
+        match other {
+            None => unimplemented!(),
+            Nope => Nope,
+        }
+    }
+}
+"#
+        .trim(),
+    );
+    let dst_file = project_dir().join("crates/ra_ide/src/snapshots/highlighting.html");
+    let actual_html = &analysis.highlight_as_html(file_id, false).unwrap();
+    let expected_html = &read_text(&dst_file);
+    fs::write(dst_file, &actual_html).unwrap();
+    assert_eq_text!(expected_html, actual_html);
+}
+
+#[test]
+fn test_rainbow_highlighting() {
+    let (analysis, file_id) = single_file(
+        r#"
+fn main() {
+    let hello = "hello";
+    let x = hello.to_string();
+    let y = hello.to_string();
+
+    let x = "other color please!";
+    let y = x.to_string();
+}
+
+fn bar() {
+    let mut hello = "hello";
+}
+"#
+        .trim(),
+    );
+    let dst_file = project_dir().join("crates/ra_ide/src/snapshots/rainbow_highlighting.html");
+    let actual_html = &analysis.highlight_as_html(file_id, true).unwrap();
+    let expected_html = &read_text(&dst_file);
+    fs::write(dst_file, &actual_html).unwrap();
+    assert_eq_text!(expected_html, actual_html);
+}
+
+#[test]
+fn accidentally_quadratic() {
+    let file = project_dir().join("crates/ra_syntax/test_data/accidentally_quadratic");
+    let src = fs::read_to_string(file).unwrap();
+
+    let mut mock = MockAnalysis::new();
+    let file_id = mock.add_file("/main.rs", &src);
+    let host = mock.analysis_host();
+
+    // let t = std::time::Instant::now();
+    let _ = host.analysis().highlight(file_id).unwrap();
+    // eprintln!("elapsed: {:?}", t.elapsed());
+}
+
+#[test]
+fn test_ranges() {
+    let (analysis, file_id) = single_file(
+        r#"
+            #[derive(Clone, Debug)]
+            struct Foo {
+                pub x: i32,
+                pub y: i32,
+            }"#,
+    );
+
+    // The "x"
+    let highlights = &analysis
+        .highlight_range(FileRange { file_id, range: TextRange::offset_len(82.into(), 1.into()) })
+        .unwrap();
+
+    assert_eq!(&highlights[0].highlight.to_string(), "field.declaration");
+}
index ee409e34e995fe4c0a52ec0bfc22face36b72ff6..7b285d28090f91a38ee829a28984200d934d9006 100644 (file)
@@ -11,30 +11,17 @@ doctest = false
 wasm = []
 
 [dependencies]
-either = "1.5.3"
-format-buf = "1.0.0"
-indexmap = "1.3.2"
-itertools = "0.8.2"
-join_to_string = "0.1.3"
 log = "0.4.8"
 rayon = "1.3.0"
 fst = { version = "0.3.5", default-features = false }
 rustc-hash = "1.1.0"
-unicase = "2.6.0"
 superslice = "1.0.0"
-once_cell = "1.3.1"
 
 ra_syntax = { path = "../ra_syntax" }
 ra_text_edit = { path = "../ra_text_edit" }
 ra_db = { path = "../ra_db" }
-ra_cfg = { path = "../ra_cfg" }
-ra_fmt = { path = "../ra_fmt" }
 ra_prof = { path = "../ra_prof" }
-test_utils = { path = "../test_utils" }
 
 # ra_ide should depend only on the top-level `hir` package. if you need
 # something from some `hir_xxx` subpackage, reexport the API via `hir`.
 hir = { path = "../ra_hir", package = "ra_hir" }
-
-[dev-dependencies]
-insta = "0.13.1"
index 04c214624b0d4a674240fa18440a97bef23c94f1..93f32ba855d8df0ae930f475985d44744a169f2e 100644 (file)
@@ -6,8 +6,8 @@
 // FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
 
 use hir::{
-    Adt, FieldSource, HasSource, ImplBlock, InFile, Local, MacroDef, Module, ModuleDef,
-    SourceBinder, StructField, TypeParam,
+    Adt, FieldSource, HasSource, ImplBlock, Local, MacroDef, Module, ModuleDef, Semantics,
+    StructField, TypeParam,
 };
 use ra_prof::profile;
 use ra_syntax::{
@@ -68,78 +68,97 @@ pub fn visibility(&self, db: &RootDatabase) -> Option<ast::Visibility> {
     }
 }
 
-pub fn classify_name(
-    sb: &mut SourceBinder<RootDatabase>,
-    name: InFile<&ast::Name>,
-) -> Option<NameDefinition> {
+pub enum NameClass {
+    NameDefinition(NameDefinition),
+    /// `None` in `if let None = Some(82) {}`
+    ConstReference(NameDefinition),
+}
+
+impl NameClass {
+    pub fn into_definition(self) -> Option<NameDefinition> {
+        match self {
+            NameClass::NameDefinition(it) => Some(it),
+            NameClass::ConstReference(_) => None,
+        }
+    }
+
+    pub fn definition(self) -> NameDefinition {
+        match self {
+            NameClass::NameDefinition(it) | NameClass::ConstReference(it) => it,
+        }
+    }
+}
+
+pub fn classify_name(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Option<NameClass> {
+    if let Some(bind_pat) = name.syntax().parent().and_then(ast::BindPat::cast) {
+        if let Some(def) = sema.resolve_bind_pat_to_const(&bind_pat) {
+            return Some(NameClass::ConstReference(NameDefinition::ModuleDef(def)));
+        }
+    }
+
+    classify_name_inner(sema, name).map(NameClass::NameDefinition)
+}
+
+fn classify_name_inner(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Option<NameDefinition> {
     let _p = profile("classify_name");
-    let parent = name.value.syntax().parent()?;
+    let parent = name.syntax().parent()?;
 
     match_ast! {
         match parent {
             ast::BindPat(it) => {
-                let src = name.with_value(it);
-                let local = sb.to_def(src)?;
+                let local = sema.to_def(&it)?;
                 Some(NameDefinition::Local(local))
             },
             ast::RecordFieldDef(it) => {
-                let src = name.with_value(it);
-                let field: hir::StructField = sb.to_def(src)?;
+                let field: hir::StructField = sema.to_def(&it)?;
                 Some(from_struct_field(field))
             },
             ast::Module(it) => {
-                let def = sb.to_def(name.with_value(it))?;
+                let def = sema.to_def(&it)?;
                 Some(from_module_def(def.into()))
             },
             ast::StructDef(it) => {
-                let src = name.with_value(it);
-                let def: hir::Struct = sb.to_def(src)?;
+                let def: hir::Struct = sema.to_def(&it)?;
+                Some(from_module_def(def.into()))
+            },
+            ast::UnionDef(it) => {
+                let def: hir::Union = sema.to_def(&it)?;
                 Some(from_module_def(def.into()))
             },
             ast::EnumDef(it) => {
-                let src = name.with_value(it);
-                let def: hir::Enum = sb.to_def(src)?;
+                let def: hir::Enum = sema.to_def(&it)?;
                 Some(from_module_def(def.into()))
             },
             ast::TraitDef(it) => {
-                let src = name.with_value(it);
-                let def: hir::Trait = sb.to_def(src)?;
+                let def: hir::Trait = sema.to_def(&it)?;
                 Some(from_module_def(def.into()))
             },
             ast::StaticDef(it) => {
-                let src = name.with_value(it);
-                let def: hir::Static = sb.to_def(src)?;
+                let def: hir::Static = sema.to_def(&it)?;
                 Some(from_module_def(def.into()))
             },
             ast::EnumVariant(it) => {
-                let src = name.with_value(it);
-                let def: hir::EnumVariant = sb.to_def(src)?;
+                let def: hir::EnumVariant = sema.to_def(&it)?;
                 Some(from_module_def(def.into()))
             },
             ast::FnDef(it) => {
-                let src = name.with_value(it);
-                let def: hir::Function = sb.to_def(src)?;
+                let def: hir::Function = sema.to_def(&it)?;
                 Some(from_module_def(def.into()))
             },
             ast::ConstDef(it) => {
-                let src = name.with_value(it);
-                let def: hir::Const = sb.to_def(src)?;
+                let def: hir::Const = sema.to_def(&it)?;
                 Some(from_module_def(def.into()))
             },
             ast::TypeAliasDef(it) => {
-                let src = name.with_value(it);
-                let def: hir::TypeAlias = sb.to_def(src)?;
+                let def: hir::TypeAlias = sema.to_def(&it)?;
                 Some(from_module_def(def.into()))
             },
             ast::MacroCall(it) => {
-                let src = name.with_value(it);
-                let def = sb.to_def(src.clone())?;
-
+                let def = sema.to_def(&it)?;
                 Some(NameDefinition::Macro(def))
             },
             ast::TypeParam(it) => {
-                let src = name.with_value(it);
-                let def = sb.to_def(src)?;
+                let def = sema.to_def(&it)?;
                 Some(NameDefinition::TypeParam(def))
             },
             _ => None,
index b8dd358a97b52c2272797351adb56ed387a4a6a5..e5fc3c47012782cdc39c1b2c4faf520917891b6b 100644 (file)
@@ -1,7 +1,7 @@
 //! This module contains an import search funcionality that is provided to the ra_assists module.
 //! Later, this should be moved away to a separate crate that is accessible from the ra_assists module.
 
-use hir::{db::HirDatabase, ModuleDef, SourceBinder};
+use hir::{ModuleDef, Semantics};
 use ra_prof::profile;
 use ra_syntax::{ast, AstNode, SyntaxKind::NAME};
 
 };
 
 pub struct ImportsLocator<'a> {
-    source_binder: SourceBinder<'a, RootDatabase>,
+    sema: Semantics<'a, RootDatabase>,
 }
 
 impl<'a> ImportsLocator<'a> {
     pub fn new(db: &'a RootDatabase) -> Self {
-        Self { source_binder: SourceBinder::new(db) }
+        Self { sema: Semantics::new(db) }
     }
 
     pub fn find_imports(&mut self, name_to_import: &str) -> Vec<ModuleDef> {
         let _p = profile("search_for_imports");
-        let db = self.source_binder.db;
+        let db = self.sema.db;
 
         let project_results = {
             let mut query = Query::new(name_to_import.to_string());
@@ -41,7 +41,7 @@ pub fn find_imports(&mut self, name_to_import: &str) -> Vec<ModuleDef> {
         project_results
             .into_iter()
             .chain(lib_results.into_iter())
-            .filter_map(|import_candidate| self.get_name_definition(db, &import_candidate))
+            .filter_map(|import_candidate| self.get_name_definition(&import_candidate))
             .filter_map(|name_definition_to_import| match name_definition_to_import {
                 NameDefinition::ModuleDef(module_def) => Some(module_def),
                 _ => None,
@@ -49,22 +49,17 @@ pub fn find_imports(&mut self, name_to_import: &str) -> Vec<ModuleDef> {
             .collect()
     }
 
-    fn get_name_definition(
-        &mut self,
-        db: &impl HirDatabase,
-        import_candidate: &FileSymbol,
-    ) -> Option<NameDefinition> {
+    fn get_name_definition(&mut self, import_candidate: &FileSymbol) -> Option<NameDefinition> {
         let _p = profile("get_name_definition");
-        let file_id = import_candidate.file_id.into();
-        let candidate_node = import_candidate.ptr.to_node(&db.parse_or_expand(file_id)?);
+        let file_id = import_candidate.file_id;
+
+        let candidate_node = import_candidate.ptr.to_node(self.sema.parse(file_id).syntax());
         let candidate_name_node = if candidate_node.kind() != NAME {
             candidate_node.children().find(|it| it.kind() == NAME)?
         } else {
             candidate_node
         };
-        classify_name(
-            &mut self.source_binder,
-            hir::InFile { file_id, value: &ast::Name::cast(candidate_name_node)? },
-        )
+        let name = ast::Name::cast(candidate_name_node)?;
+        classify_name(&self.sema, &name)?.into_definition()
     }
 }
index af7b759e5c665f02abbf5e8130383cbcb8913dfb..b9db5c276e97abab97d9b705331222f3ffe8bf8d 100644 (file)
@@ -1,7 +1,8 @@
 //! `LineIndex` maps flat `TextUnit` offsets into `(Line, Column)`
 //! representation.
+use std::iter;
 
-use ra_syntax::TextUnit;
+use ra_syntax::{TextRange, TextUnit};
 use rustc_hash::FxHashMap;
 use superslice::Ext;
 
@@ -87,6 +88,19 @@ pub fn offset(&self, line_col: LineCol) -> TextUnit {
         self.newlines[line_col.line as usize] + col
     }
 
+    pub fn lines(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ {
+        let lo = self.newlines.lower_bound(&range.start());
+        let hi = self.newlines.upper_bound(&range.end());
+        let all = iter::once(range.start())
+            .chain(self.newlines[lo..hi].iter().copied())
+            .chain(iter::once(range.end()));
+
+        all.clone()
+            .zip(all.skip(1))
+            .map(|(lo, hi)| TextRange::from_to(lo, hi))
+            .filter(|it| !it.is_empty())
+    }
+
     fn utf8_to_utf16_col(&self, line: u32, mut col: TextUnit) -> usize {
         if let Some(utf16_chars) = self.utf16_lines.get(&line) {
             let mut correction = TextUnit::from_usize(0);
@@ -221,4 +235,32 @@ fn test_string() {
 
         assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextUnit::from_usize(15));
     }
+
+    #[test]
+    fn test_splitlines() {
+        fn r(lo: u32, hi: u32) -> TextRange {
+            TextRange::from_to(lo.into(), hi.into())
+        }
+
+        let text = "a\nbb\nccc\n";
+        let line_index = LineIndex::new(text);
+
+        let actual = line_index.lines(r(0, 9)).collect::<Vec<_>>();
+        let expected = vec![r(0, 2), r(2, 5), r(5, 9)];
+        assert_eq!(actual, expected);
+
+        let text = "";
+        let line_index = LineIndex::new(text);
+
+        let actual = line_index.lines(r(0, 0)).collect::<Vec<_>>();
+        let expected = vec![];
+        assert_eq!(actual, expected);
+
+        let text = "\n";
+        let line_index = LineIndex::new(text);
+
+        let actual = line_index.lines(r(0, 1)).collect::<Vec<_>>();
+        let expected = vec![r(0, 1)];
+        assert_eq!(actual, expected)
+    }
 }
index ea2cac069e6432c6b985a714b5fbf4eb254c44c5..2aaf0215fa6b2ab464c261a2d4fd4169d5a234b9 100644 (file)
@@ -570,7 +570,7 @@ fn test_token_tree_last_child_is_white_space() {
         let token_tree = insert_children(
             &rbrace.parent().unwrap(),
             InsertPosition::Last,
-            &mut std::iter::once(space),
+            std::iter::once(space),
         );
 
         // Token Tree now is :
index 06c92645e06ef0638d5af244f7e771d4e01d4012..4163a2cf51c803aaa7eaec16b93aba1a2f80b4a6 100644 (file)
@@ -169,6 +169,7 @@ pub(super) fn stmt(p: &mut Parser, with_semi: StmtWithSemi) {
     //     let d: i32 = 92;
     //     let e: !;
     //     let _: ! = {};
+    //     let f = #[attr]||{};
     // }
     fn let_stmt(p: &mut Parser, m: Marker, with_semi: StmtWithSemi) {
         assert!(p.at(T![let]));
@@ -178,7 +179,7 @@ fn let_stmt(p: &mut Parser, m: Marker, with_semi: StmtWithSemi) {
             types::ascription(p);
         }
         if p.eat(T![=]) {
-            expressions::expr(p);
+            expressions::expr_with_attrs(p);
         }
 
         match with_semi {
index 823745795e8b45efb7a506c8a3f5b1378950a900..d15b089925c13602886032219d1c580a535bf6e1 100644 (file)
@@ -10,7 +10,6 @@ doctest = false
 
 [dependencies]
 once_cell = "1.3.1"
-itertools = "0.8.2"
 backtrace = { version = "0.3.44", optional = true }
 
 [target.'cfg(not(target_env = "msvc"))'.dependencies]
index 21fca99a6f2601e11e074aa05c1e5a504e0a546d..b87c7084e6f1d3e6bd39d969cd27baa17f946e03 100644 (file)
@@ -4,10 +4,11 @@
 
 use itertools::Itertools;
 use ra_text_edit::TextEditBuilder;
-use rustc_hash::FxHashMap;
+use rustc_hash::{FxHashMap, FxHashSet};
 
 use crate::{
-    AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit,
+    AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken,
+    TextRange, TextUnit,
 };
 
 /// Returns ancestors of the node at the offset, sorted by length. This should
@@ -37,6 +38,17 @@ pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) ->
     ancestors_at_offset(syntax, offset).find_map(N::cast)
 }
 
+/// Skip to next non `trivia` token
+pub fn skip_trivia_token(mut token: SyntaxToken, direction: Direction) -> Option<SyntaxToken> {
+    while token.kind().is_trivia() {
+        token = match direction {
+            Direction::Next => token.next_token()?,
+            Direction::Prev => token.prev_token()?,
+        }
+    }
+    Some(token)
+}
+
 /// Finds the first sibling in the given direction which is not `trivia`
 pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option<SyntaxElement> {
     return match element {
@@ -56,6 +68,11 @@ pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxEleme
     root.covering_element(range)
 }
 
+pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option<SyntaxNode> {
+    let u_ancestors = u.ancestors().collect::<FxHashSet<SyntaxNode>>();
+    v.ancestors().find(|it| u_ancestors.contains(it))
+}
+
 #[derive(Debug, PartialEq, Eq, Clone, Copy)]
 pub enum InsertPosition<T> {
     First,
@@ -125,6 +142,15 @@ fn go(
 /// This is a type-unsafe low-level editing API, if you need to use it,
 /// prefer to create a type-safe abstraction on top of it instead.
 pub fn insert_children(
+    parent: &SyntaxNode,
+    position: InsertPosition<SyntaxElement>,
+    to_insert: impl IntoIterator<Item = SyntaxElement>,
+) -> SyntaxNode {
+    let mut to_insert = to_insert.into_iter();
+    _insert_children(parent, position, &mut to_insert)
+}
+
+fn _insert_children(
     parent: &SyntaxNode,
     position: InsertPosition<SyntaxElement>,
     to_insert: &mut dyn Iterator<Item = SyntaxElement>,
@@ -159,6 +185,15 @@ pub fn insert_children(
 /// This is a type-unsafe low-level editing API, if you need to use it,
 /// prefer to create a type-safe abstraction on top of it instead.
 pub fn replace_children(
+    parent: &SyntaxNode,
+    to_delete: RangeInclusive<SyntaxElement>,
+    to_insert: impl IntoIterator<Item = SyntaxElement>,
+) -> SyntaxNode {
+    let mut to_insert = to_insert.into_iter();
+    _replace_children(parent, to_delete, &mut to_insert)
+}
+
+fn _replace_children(
     parent: &SyntaxNode,
     to_delete: RangeInclusive<SyntaxElement>,
     to_insert: &mut dyn Iterator<Item = SyntaxElement>,
@@ -185,14 +220,21 @@ pub fn replace_children(
 /// to create a type-safe abstraction on top of it instead.
 pub fn replace_descendants(
     parent: &SyntaxNode,
-    map: &impl Fn(&SyntaxElement) -> Option<SyntaxElement>,
+    map: impl Fn(&SyntaxElement) -> Option<SyntaxElement>,
+) -> SyntaxNode {
+    _replace_descendants(parent, &map)
+}
+
+fn _replace_descendants(
+    parent: &SyntaxNode,
+    map: &dyn Fn(&SyntaxElement) -> Option<SyntaxElement>,
 ) -> SyntaxNode {
     //  FIXME: this could be made much faster.
     let new_children = parent.children_with_tokens().map(|it| go(map, it)).collect::<Vec<_>>();
     return with_children(parent, new_children);
 
     fn go(
-        map: &impl Fn(&SyntaxElement) -> Option<SyntaxElement>,
+        map: &dyn Fn(&SyntaxElement) -> Option<SyntaxElement>,
         element: SyntaxElement,
     ) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
         if let Some(replacement) = map(&element) {
@@ -204,7 +246,7 @@ fn go(
         match element {
             NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()),
             NodeOrToken::Node(it) => {
-                NodeOrToken::Node(replace_descendants(&it, map).green().clone())
+                NodeOrToken::Node(_replace_descendants(&it, map).green().clone())
             }
         }
     }
index 0e78d8b63772dd67328de2f74cbb60865fedfad5..d2630e9e99c168489c3b2f67691372c7c934bd24 100644 (file)
@@ -276,7 +276,7 @@ pub fn replace_descendants<N: AstNode, D: AstNode>(
         .into_iter()
         .map(|(from, to)| (from.syntax().clone().into(), to.syntax().clone().into()))
         .collect::<FxHashMap<SyntaxElement, _>>();
-    let new_syntax = algo::replace_descendants(parent.syntax(), &|n| map.get(n).cloned());
+    let new_syntax = algo::replace_descendants(parent.syntax(), |n| map.get(n).cloned());
     N::cast(new_syntax).unwrap()
 }
 
@@ -331,7 +331,7 @@ fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode {
                 )
             })
             .collect();
-        algo::replace_descendants(&node, &|n| replacements.get(n).cloned())
+        algo::replace_descendants(&node, |n| replacements.get(n).cloned())
     }
 
     pub fn decrease_indent<N: AstNode>(self, node: N) -> N {
@@ -359,7 +359,7 @@ fn _decrease_indent(self, node: SyntaxNode) -> SyntaxNode {
                 )
             })
             .collect();
-        algo::replace_descendants(&node, &|n| replacements.get(n).cloned())
+        algo::replace_descendants(&node, |n| replacements.get(n).cloned())
     }
 }
 
@@ -389,7 +389,7 @@ fn insert_children<N: AstNode>(
     position: InsertPosition<SyntaxElement>,
     to_insert: impl IntoIterator<Item = SyntaxElement>,
 ) -> N {
-    let new_syntax = algo::insert_children(parent.syntax(), position, &mut to_insert.into_iter());
+    let new_syntax = algo::insert_children(parent.syntax(), position, to_insert);
     N::cast(new_syntax).unwrap()
 }
 
@@ -404,8 +404,7 @@ fn replace_children<N: AstNode>(
     to_replace: RangeInclusive<SyntaxElement>,
     to_insert: impl IntoIterator<Item = SyntaxElement>,
 ) -> N {
-    let new_syntax =
-        algo::replace_children(parent.syntax(), to_replace, &mut to_insert.into_iter());
+    let new_syntax = algo::replace_children(parent.syntax(), to_replace, to_insert);
     N::cast(new_syntax).unwrap()
 }
 
index 7c20fcc1047b2d0150c173a14949964d36689c13..3f11b747f0fa960c45694275e29d636d566dab23 100644 (file)
@@ -12,11 +12,14 @@ pub fn name_ref(text: &str) -> ast::NameRef {
     ast_from_text(&format!("fn f() {{ {}; }}", text))
 }
 
-pub fn path_from_name_ref(name_ref: ast::NameRef) -> ast::Path {
-    path_from_text(&name_ref.syntax().to_string())
+pub fn path_segment(name_ref: ast::NameRef) -> ast::PathSegment {
+    ast_from_text(&format!("use {};", name_ref.syntax()))
 }
-pub fn path_qualified(qual: ast::Path, name_ref: ast::NameRef) -> ast::Path {
-    path_from_text(&format!("{}::{}", qual.syntax(), name_ref.syntax()))
+pub fn path_unqualified(segment: ast::PathSegment) -> ast::Path {
+    path_from_text(&format!("use {}", segment.syntax()))
+}
+pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path {
+    path_from_text(&format!("{}::{}", qual.syntax(), segment.syntax()))
 }
 fn path_from_text(text: &str) -> ast::Path {
     ast_from_text(text)
index ed8661faf5db94417cfdb649450cc9ecdf529734..1a51b8d3b91799c4bb66411cd6fe9b147c33b94f 100644 (file)
@@ -110,6 +110,64 @@ pub fn spans_multiple_lines(&self) -> bool {
     }
 }
 
+pub struct QuoteOffsets {
+    pub quotes: [TextRange; 2],
+    pub contents: TextRange,
+}
+
+impl QuoteOffsets {
+    fn new(literal: &str) -> Option<QuoteOffsets> {
+        let left_quote = literal.find('"')?;
+        let right_quote = literal.rfind('"')?;
+        if left_quote == right_quote {
+            // `literal` only contains one quote
+            return None;
+        }
+
+        let start = TextUnit::from(0);
+        let left_quote = TextUnit::from_usize(left_quote) + TextUnit::of_char('"');
+        let right_quote = TextUnit::from_usize(right_quote);
+        let end = TextUnit::of_str(literal);
+
+        let res = QuoteOffsets {
+            quotes: [TextRange::from_to(start, left_quote), TextRange::from_to(right_quote, end)],
+            contents: TextRange::from_to(left_quote, right_quote),
+        };
+        Some(res)
+    }
+}
+
+pub trait HasQuotes: AstToken {
+    fn quote_offsets(&self) -> Option<QuoteOffsets> {
+        let text = self.text().as_str();
+        let offsets = QuoteOffsets::new(text)?;
+        let o = self.syntax().text_range().start();
+        let offsets = QuoteOffsets {
+            quotes: [offsets.quotes[0] + o, offsets.quotes[1] + o],
+            contents: offsets.contents + o,
+        };
+        Some(offsets)
+    }
+    fn open_quote_text_range(&self) -> Option<TextRange> {
+        self.quote_offsets().map(|it| it.quotes[0])
+    }
+
+    fn close_quote_text_range(&self) -> Option<TextRange> {
+        self.quote_offsets().map(|it| it.quotes[1])
+    }
+
+    fn text_range_between_quotes(&self) -> Option<TextRange> {
+        self.quote_offsets().map(|it| it.contents)
+    }
+}
+
+impl HasQuotes for String {}
+impl HasQuotes for RawString {}
+
+pub trait HasStringValue: HasQuotes {
+    fn value(&self) -> Option<std::string::String>;
+}
+
 pub struct String(SyntaxToken);
 
 impl AstToken for String {
@@ -124,21 +182,16 @@ fn syntax(&self) -> &SyntaxToken {
     }
 }
 
-impl String {
-    pub fn value(&self) -> Option<std::string::String> {
+impl HasStringValue for String {
+    fn value(&self) -> Option<std::string::String> {
         let text = self.text().as_str();
-        let usual_string_range = find_usual_string_range(text)?;
-        let start_of_inside = usual_string_range.start().to_usize() + 1;
-        let end_of_inside = usual_string_range.end().to_usize();
-        let inside_str = &text[start_of_inside..end_of_inside];
+        let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
 
-        let mut buf = std::string::String::with_capacity(inside_str.len());
+        let mut buf = std::string::String::with_capacity(text.len());
         let mut has_error = false;
-        rustc_lexer::unescape::unescape_str(inside_str, &mut |_, unescaped_char| {
-            match unescaped_char {
-                Ok(c) => buf.push(c),
-                Err(_) => has_error = true,
-            }
+        rustc_lexer::unescape::unescape_str(text, &mut |_, unescaped_char| match unescaped_char {
+            Ok(c) => buf.push(c),
+            Err(_) => has_error = true,
         });
 
         if has_error {
@@ -162,27 +215,18 @@ fn syntax(&self) -> &SyntaxToken {
     }
 }
 
-impl RawString {
-    pub fn value(&self) -> Option<std::string::String> {
+impl HasStringValue for RawString {
+    fn value(&self) -> Option<std::string::String> {
         let text = self.text().as_str();
-        let usual_string_range = find_usual_string_range(text)?;
-        let start_of_inside = usual_string_range.start().to_usize() + 1;
-        let end_of_inside = usual_string_range.end().to_usize();
-        let inside_str = &text[start_of_inside..end_of_inside];
-        Some(inside_str.to_string())
-    }
-}
-
-fn find_usual_string_range(s: &str) -> Option<TextRange> {
-    let left_quote = s.find('"')?;
-    let right_quote = s.rfind('"')?;
-    if left_quote == right_quote {
-        // `s` only contains one quote
-        None
-    } else {
-        Some(TextRange::from_to(
-            TextUnit::from(left_quote as u32),
-            TextUnit::from(right_quote as u32),
-        ))
+        let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+        Some(text.to_string())
+    }
+}
+
+impl RawString {
+    pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
+        let contents_range = self.text_range_between_quotes()?;
+        assert!(range.is_subrange(&TextRange::offset_len(0.into(), contents_range.len())));
+        Some(range + contents_range.start())
     }
 }
index 86803fe08aadb0cd86b6fc0b5276b0f7aab8ee9a..fa8ee49a23101f872274480bd1519351d175b96b 100644 (file)
@@ -5,4 +5,5 @@ fn foo() {
     let d: i32 = 92;
     let e: !;
     let _: ! = {};
+    let f = #[attr]||{};
 }
index 97166cfd5a749962bdde4e0e675e1458de334784..17739dfbd99b561bc9ba595c0ce31a1d26b7a18f 100644 (file)
@@ -1,5 +1,5 @@
-SOURCE_FILE@[0; 110)
-  FN_DEF@[0; 109)
+SOURCE_FILE@[0; 135)
+  FN_DEF@[0; 134)
     FN_KW@[0; 2) "fn"
     WHITESPACE@[2; 3) " "
     NAME@[3; 6)
@@ -8,8 +8,8 @@ SOURCE_FILE@[0; 110)
       L_PAREN@[6; 7) "("
       R_PAREN@[7; 8) ")"
     WHITESPACE@[8; 9) " "
-    BLOCK_EXPR@[9; 109)
-      BLOCK@[9; 109)
+    BLOCK_EXPR@[9; 134)
+      BLOCK@[9; 134)
         L_CURLY@[9; 10) "{"
         WHITESPACE@[10; 15) "\n    "
         LET_STMT@[15; 21)
@@ -97,6 +97,33 @@ SOURCE_FILE@[0; 110)
               L_CURLY@[104; 105) "{"
               R_CURLY@[105; 106) "}"
           SEMI@[106; 107) ";"
-        WHITESPACE@[107; 108) "\n"
-        R_CURLY@[108; 109) "}"
-  WHITESPACE@[109; 110) "\n"
+        WHITESPACE@[107; 112) "\n    "
+        LET_STMT@[112; 132)
+          LET_KW@[112; 115) "let"
+          WHITESPACE@[115; 116) " "
+          BIND_PAT@[116; 117)
+            NAME@[116; 117)
+              IDENT@[116; 117) "f"
+          WHITESPACE@[117; 118) " "
+          EQ@[118; 119) "="
+          WHITESPACE@[119; 120) " "
+          LAMBDA_EXPR@[120; 131)
+            ATTR@[120; 127)
+              POUND@[120; 121) "#"
+              L_BRACK@[121; 122) "["
+              PATH@[122; 126)
+                PATH_SEGMENT@[122; 126)
+                  NAME_REF@[122; 126)
+                    IDENT@[122; 126) "attr"
+              R_BRACK@[126; 127) "]"
+            PARAM_LIST@[127; 129)
+              PIPE@[127; 128) "|"
+              PIPE@[128; 129) "|"
+            BLOCK_EXPR@[129; 131)
+              BLOCK@[129; 131)
+                L_CURLY@[129; 130) "{"
+                R_CURLY@[130; 131) "}"
+          SEMI@[131; 132) ";"
+        WHITESPACE@[132; 133) "\n"
+        R_CURLY@[133; 134) "}"
+  WHITESPACE@[134; 135) "\n"
index a3214929970ba7c45009c5b885ee2b2bcd498382..cae28389dcfdd54438c028eb6eef76955df87f3a 100644 (file)
@@ -9,7 +9,5 @@ publish = false
 doctest = false
 
 [dependencies]
-text_unit = "0.1.9"
+text_unit = "0.1.10"
 
-[dev-dependencies]
-test_utils = { path = "../test_utils" }
index c5d6e3831b1c17c6abf21c5641033d61a468174f..d00545121bc8d5d36959c6086302744a9be86e1f 100644 (file)
@@ -15,13 +15,12 @@ path = "./src/bin/main.rs"
 [dependencies]
 anyhow = "1.0.26"
 crossbeam-channel = "0.4.0"
-either = "1.5.3"
 env_logger = { version = "0.7.1", default-features = false }
 globset = "0.4.4"
 itertools = "0.8.2"
 jod-thread = "0.1.0"
 log = "0.4.8"
-lsp-types = { version = "0.70.1", features = ["proposed"] }
+lsp-types = { version = "0.71.0", features = ["proposed"] }
 parking_lot = "0.10.0"
 pico-args = "0.3.1"
 rand = { version = "0.7.3", features = ["small_rng"] }
index 638987ee81967cd16dda30bf1e50ceaf027179f4..759bceb32a1243e2b957b35e9a90eb79cc2036da 100644 (file)
@@ -7,9 +7,9 @@
     CompletionOptions, DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability,
     ImplementationProviderCapability, RenameOptions, RenameProviderCapability, SaveOptions,
     SelectionRangeProviderCapability, SemanticTokensDocumentProvider, SemanticTokensLegend,
-    SemanticTokensOptions, SemanticTokensServerCapabilities, ServerCapabilities,
-    SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind,
-    TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions,
+    SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability,
+    TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability,
+    WorkDoneProgressOptions,
 };
 
 pub fn server_capabilities() -> ServerCapabilities {
@@ -60,20 +60,19 @@ pub fn server_capabilities() -> ServerCapabilities {
         execute_command_provider: None,
         workspace: None,
         call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
-        semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensOptions(
+        semantic_tokens_provider: Some(
             SemanticTokensOptions {
                 legend: SemanticTokensLegend {
-                    token_types: semantic_tokens::supported_token_types().iter().cloned().collect(),
-                    token_modifiers: semantic_tokens::supported_token_modifiers()
-                        .iter()
-                        .cloned()
-                        .collect(),
+                    token_types: semantic_tokens::SUPPORTED_TYPES.iter().cloned().collect(),
+                    token_modifiers: semantic_tokens::SUPPORTED_MODIFIERS.iter().cloned().collect(),
                 },
 
                 document_provider: Some(SemanticTokensDocumentProvider::Bool(true)),
-                ..SemanticTokensOptions::default()
-            },
-        )),
+                range_provider: Some(true),
+                work_done_progress_options: Default::default(),
+            }
+            .into(),
+        ),
         experimental: Default::default(),
     }
 }
index 4d59db1ee787ebd1f9128390f4dc6ca73da1b434..d70d34bdc3828ac3ebe8d9d11b63cab9eea53d44 100644 (file)
@@ -4,8 +4,8 @@
 use std::{collections::HashSet, fmt::Write, path::Path, time::Instant};
 
 use hir::{
-    db::{DefDatabase, HirDatabase},
-    AssocItem, Crate, HasSource, HirDisplay, ModuleDef,
+    db::{AstDatabase, DefDatabase, HirDatabase},
+    original_range, AssocItem, Crate, HasSource, HirDisplay, ModuleDef,
 };
 use hir_def::FunctionId;
 use hir_ty::{Ty, TypeWalk};
@@ -188,13 +188,19 @@ pub fn analysis_stats(
                     let src = sm.expr_syntax(expr_id);
                     if let Some(src) = src {
                         // FIXME: it might be nice to have a function (on Analysis?) that goes from Source<T> -> (LineCol, LineCol) directly
-                        let original_file = src.file_id.original_file(db);
-                        let path = db.file_relative_path(original_file);
-                        let line_index = host.analysis().file_line_index(original_file).unwrap();
-                        let text_range = src.value.either(
-                            |it| it.syntax_node_ptr().range(),
-                            |it| it.syntax_node_ptr().range(),
-                        );
+                        // But also, we should just turn the type mismatches into diagnostics and provide these
+                        let root = db.parse_or_expand(src.file_id).unwrap();
+                        let node = src.map(|e| {
+                            e.either(
+                                |p| p.to_node(&root).syntax().clone(),
+                                |p| p.to_node(&root).syntax().clone(),
+                            )
+                        });
+                        let original_range = original_range(db, node.as_ref());
+                        let path = db.file_relative_path(original_range.file_id);
+                        let line_index =
+                            host.analysis().file_line_index(original_range.file_id).unwrap();
+                        let text_range = original_range.range;
                         let (start, end) = (
                             line_index.line_col(text_range.start()),
                             line_index.line_col(text_range.end()),
index 5fcb46b6174b2d839588f007cc8100e9a6ab386e..ff156307a55b4d6aacb6a739b648a1114683b020 100644 (file)
@@ -9,15 +9,22 @@
     WorkspaceEdit,
 };
 use ra_ide::{
-    tags, translate_offset_with_edit, CompletionItem, CompletionItemKind, FileId, FilePosition,
-    FileRange, FileSystemEdit, Fold, FoldKind, InsertTextFormat, LineCol, LineIndex,
-    NavigationTarget, RangeInfo, ReferenceAccess, Severity, SourceChange, SourceFileEdit,
+    translate_offset_with_edit, CompletionItem, CompletionItemKind, FileId, FilePosition,
+    FileRange, FileSystemEdit, Fold, FoldKind, Highlight, HighlightModifier, HighlightTag,
+    InsertTextFormat, LineCol, LineIndex, NavigationTarget, RangeInfo, ReferenceAccess, Severity,
+    SourceChange, SourceFileEdit,
 };
 use ra_syntax::{SyntaxKind, TextRange, TextUnit};
 use ra_text_edit::{AtomTextEdit, TextEdit};
 use ra_vfs::LineEndings;
 
-use crate::{req, semantic_tokens, world::WorldSnapshot, Result};
+use crate::{
+    req,
+    semantic_tokens::{self, ModifierSet, CONSTANT, CONTROL, MUTABLE, UNSAFE},
+    world::WorldSnapshot,
+    Result,
+};
+use semantic_tokens::{ATTRIBUTE, BUILTIN_TYPE, ENUM_MEMBER, LIFETIME, TYPE_ALIAS, UNION};
 
 pub trait Conv {
     type Output;
@@ -303,73 +310,54 @@ fn conv_with(self, ctx: &FoldConvCtx) -> lsp_types::FoldingRange {
     }
 }
 
-impl Conv for &'static str {
-    type Output = (SemanticTokenType, Vec<SemanticTokenModifier>);
-
-    fn conv(self) -> (SemanticTokenType, Vec<SemanticTokenModifier>) {
-        let token_type: SemanticTokenType = match self {
-            tags::FIELD => SemanticTokenType::MEMBER,
-            tags::FUNCTION => SemanticTokenType::FUNCTION,
-            tags::MODULE => SemanticTokenType::NAMESPACE,
-            tags::CONSTANT => {
-                return (
-                    SemanticTokenType::VARIABLE,
-                    vec![SemanticTokenModifier::STATIC, SemanticTokenModifier::READONLY],
-                )
-            }
-            tags::MACRO => SemanticTokenType::MACRO,
-
-            tags::VARIABLE => {
-                return (SemanticTokenType::VARIABLE, vec![SemanticTokenModifier::READONLY])
-            }
-            tags::VARIABLE_MUT => SemanticTokenType::VARIABLE,
+impl Conv for Highlight {
+    type Output = (u32, u32);
 
-            tags::TYPE => SemanticTokenType::TYPE,
-            tags::TYPE_BUILTIN => SemanticTokenType::TYPE,
-            tags::TYPE_SELF => {
-                return (SemanticTokenType::TYPE, vec![SemanticTokenModifier::REFERENCE])
-            }
-            tags::TYPE_PARAM => SemanticTokenType::TYPE_PARAMETER,
-            tags::TYPE_LIFETIME => {
-                return (SemanticTokenType::LABEL, vec![SemanticTokenModifier::REFERENCE])
+    fn conv(self) -> Self::Output {
+        let mut mods = ModifierSet::default();
+        let type_ = match self.tag {
+            HighlightTag::Struct => SemanticTokenType::STRUCT,
+            HighlightTag::Enum => SemanticTokenType::ENUM,
+            HighlightTag::Union => UNION,
+            HighlightTag::TypeAlias => TYPE_ALIAS,
+            HighlightTag::Trait => SemanticTokenType::INTERFACE,
+            HighlightTag::BuiltinType => BUILTIN_TYPE,
+            HighlightTag::SelfType => SemanticTokenType::TYPE,
+            HighlightTag::Field => SemanticTokenType::MEMBER,
+            HighlightTag::Function => SemanticTokenType::FUNCTION,
+            HighlightTag::Module => SemanticTokenType::NAMESPACE,
+            HighlightTag::Constant => {
+                mods |= CONSTANT;
+                mods |= SemanticTokenModifier::STATIC;
+                SemanticTokenType::VARIABLE
             }
-
-            tags::LITERAL_BYTE => SemanticTokenType::NUMBER,
-            tags::LITERAL_NUMERIC => SemanticTokenType::NUMBER,
-            tags::LITERAL_CHAR => SemanticTokenType::NUMBER,
-
-            tags::LITERAL_COMMENT => {
-                return (SemanticTokenType::COMMENT, vec![SemanticTokenModifier::DOCUMENTATION])
+            HighlightTag::Static => {
+                mods |= SemanticTokenModifier::STATIC;
+                SemanticTokenType::VARIABLE
             }
-
-            tags::LITERAL_STRING => SemanticTokenType::STRING,
-            tags::LITERAL_ATTRIBUTE => SemanticTokenType::KEYWORD,
-
-            tags::KEYWORD => SemanticTokenType::KEYWORD,
-            tags::KEYWORD_UNSAFE => SemanticTokenType::KEYWORD,
-            tags::KEYWORD_CONTROL => SemanticTokenType::KEYWORD,
-            unknown => panic!("Unknown semantic token: {}", unknown),
+            HighlightTag::EnumVariant => ENUM_MEMBER,
+            HighlightTag::Macro => SemanticTokenType::MACRO,
+            HighlightTag::Local => SemanticTokenType::VARIABLE,
+            HighlightTag::TypeParam => SemanticTokenType::TYPE_PARAMETER,
+            HighlightTag::Lifetime => LIFETIME,
+            HighlightTag::ByteLiteral | HighlightTag::NumericLiteral => SemanticTokenType::NUMBER,
+            HighlightTag::CharLiteral | HighlightTag::StringLiteral => SemanticTokenType::STRING,
+            HighlightTag::Comment => SemanticTokenType::COMMENT,
+            HighlightTag::Attribute => ATTRIBUTE,
+            HighlightTag::Keyword => SemanticTokenType::KEYWORD,
         };
 
-        (token_type, vec![])
-    }
-}
-
-impl Conv for (SemanticTokenType, Vec<SemanticTokenModifier>) {
-    type Output = (u32, u32);
-
-    fn conv(self) -> Self::Output {
-        let token_index =
-            semantic_tokens::supported_token_types().iter().position(|it| *it == self.0).unwrap();
-        let mut token_modifier_bitset = 0;
-        for modifier in self.1.iter() {
-            token_modifier_bitset |= semantic_tokens::supported_token_modifiers()
-                .iter()
-                .position(|it| it == modifier)
-                .unwrap();
+        for modifier in self.modifiers.iter() {
+            let modifier = match modifier {
+                HighlightModifier::Definition => SemanticTokenModifier::DECLARATION,
+                HighlightModifier::Control => CONTROL,
+                HighlightModifier::Mutable => MUTABLE,
+                HighlightModifier::Unsafe => UNSAFE,
+            };
+            mods |= modifier;
         }
 
-        (token_index as u32, token_modifier_bitset as u32)
+        (semantic_tokens::type_index(type_), mods.0)
     }
 }
 
index 3d147d449e0f0bf4441a1df414107e2fc1ef8672..fe804aadaa9b0205deb0f881271a980feed175b2 100644 (file)
@@ -530,8 +530,9 @@ fn on_request(
         .on::<req::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)?
         .on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)?
         .on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)?
-        .on::<req::Ssr>(handlers::handle_ssr)?
         .on::<req::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
+        .on::<req::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range)?
+        .on::<req::Ssr>(handlers::handle_ssr)?
         .finish();
     Ok(())
 }
index e13e7c95a25f046e54a46ccfa27b2433b59849bd..6f517760f71a7df5751c29ad3545721b754307b8 100644 (file)
@@ -16,8 +16,8 @@
     CodeAction, CodeActionOrCommand, CodeActionResponse, CodeLens, Command, CompletionItem,
     Diagnostic, DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange,
     FoldingRangeParams, Hover, HoverContents, Location, MarkupContent, MarkupKind, Position,
-    PrepareRenameResponse, Range, RenameParams, SemanticTokenModifier, SemanticTokenType,
-    SemanticTokens, SemanticTokensParams, SemanticTokensResult, SymbolInformation,
+    PrepareRenameResponse, Range, RenameParams, SemanticTokens, SemanticTokensParams,
+    SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
     TextDocumentIdentifier, TextEdit, WorkspaceEdit,
 };
 use ra_ide::{
@@ -954,7 +954,7 @@ fn highlight(world: &WorldSnapshot, file_id: FileId) -> Result<Vec<Decoration>>
         .into_iter()
         .map(|h| Decoration {
             range: h.range.conv_with(&line_index),
-            tag: h.tag,
+            tag: h.highlight.to_string(),
             binding_hash: h.binding_hash.map(|x| x.to_string()),
         })
         .collect();
@@ -1078,14 +1078,41 @@ pub fn handle_semantic_tokens(
     let _p = profile("handle_semantic_tokens");
 
     let file_id = params.text_document.try_conv_with(&world)?;
+    let text = world.analysis().file_text(file_id)?;
     let line_index = world.analysis().file_line_index(file_id)?;
 
     let mut builder = SemanticTokensBuilder::default();
 
-    for h in world.analysis().highlight(file_id)?.into_iter() {
-        let type_and_modifiers: (SemanticTokenType, Vec<SemanticTokenModifier>) = h.tag.conv();
-        let (token_type, token_modifiers) = type_and_modifiers.conv();
-        builder.push(h.range.conv_with(&line_index), token_type, token_modifiers);
+    for highlight_range in world.analysis().highlight(file_id)?.into_iter() {
+        let (token_index, modifier_bitset) = highlight_range.highlight.conv();
+        for mut range in line_index.lines(highlight_range.range) {
+            if text[range].ends_with('\n') {
+                range = TextRange::from_to(range.start(), range.end() - TextUnit::of_char('\n'));
+            }
+            let range = range.conv_with(&line_index);
+            builder.push(range, token_index, modifier_bitset);
+        }
+    }
+
+    let tokens = SemanticTokens { data: builder.build(), ..Default::default() };
+
+    Ok(Some(tokens.into()))
+}
+
+pub fn handle_semantic_tokens_range(
+    world: WorldSnapshot,
+    params: SemanticTokensRangeParams,
+) -> Result<Option<SemanticTokensRangeResult>> {
+    let _p = profile("handle_semantic_tokens_range");
+
+    let frange = (&params.text_document, params.range).try_conv_with(&world)?;
+    let line_index = world.analysis().file_line_index(frange.file_id)?;
+
+    let mut builder = SemanticTokensBuilder::default();
+
+    for highlight_range in world.analysis().highlight_range(frange)?.into_iter() {
+        let (token_type, token_modifiers) = highlight_range.highlight.conv();
+        builder.push(highlight_range.range.conv_with(&line_index), token_type, token_modifiers);
     }
 
     let tokens = SemanticTokens { data: builder.build(), ..Default::default() };
index 3734899bc09b6ad965566655d9c54a37d0f418fb..fd6aef5971968831a6a01d171fb2ac6706536138 100644 (file)
     DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType,
     PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken,
     PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange,
-    SelectionRangeParams, SemanticTokensParams, SemanticTokensResult, ServerCapabilities,
-    ShowMessageParams, SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams,
-    TextEdit, WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams,
+    SelectionRangeParams, SemanticTokensParams, SemanticTokensRangeParams,
+    SemanticTokensRangeResult, SemanticTokensResult, ServerCapabilities, ShowMessageParams,
+    SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams, TextEdit,
+    WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams,
 };
 
 pub enum AnalyzerStatus {}
@@ -111,7 +112,7 @@ pub struct PublishDecorationsParams {
 #[serde(rename_all = "camelCase")]
 pub struct Decoration {
     pub range: Range,
-    pub tag: &'static str,
+    pub tag: String,
     pub binding_hash: Option<String>,
 }
 
index e6a8eb146f410ef5d6256b6cd4dca390d091abce..1b146e4d87e8647ee6adfeef175588160bcb6293 100644 (file)
@@ -1,8 +1,22 @@
 //! Semantic Tokens helpers
 
+use std::ops;
+
 use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType};
 
-const SUPPORTED_TYPES: &[SemanticTokenType] = &[
+pub(crate) const ATTRIBUTE: SemanticTokenType = SemanticTokenType::new("attribute");
+pub(crate) const BUILTIN_TYPE: SemanticTokenType = SemanticTokenType::new("builtinType");
+pub(crate) const ENUM_MEMBER: SemanticTokenType = SemanticTokenType::new("enumMember");
+pub(crate) const LIFETIME: SemanticTokenType = SemanticTokenType::new("lifetime");
+pub(crate) const TYPE_ALIAS: SemanticTokenType = SemanticTokenType::new("typeAlias");
+pub(crate) const UNION: SemanticTokenType = SemanticTokenType::new("union");
+
+pub(crate) const CONSTANT: SemanticTokenModifier = SemanticTokenModifier::new("constant");
+pub(crate) const CONTROL: SemanticTokenModifier = SemanticTokenModifier::new("control");
+pub(crate) const MUTABLE: SemanticTokenModifier = SemanticTokenModifier::new("mutable");
+pub(crate) const UNSAFE: SemanticTokenModifier = SemanticTokenModifier::new("unsafe");
+
+pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
     SemanticTokenType::COMMENT,
     SemanticTokenType::KEYWORD,
     SemanticTokenType::STRING,
     SemanticTokenType::VARIABLE,
     SemanticTokenType::PARAMETER,
     SemanticTokenType::LABEL,
+    ATTRIBUTE,
+    BUILTIN_TYPE,
+    ENUM_MEMBER,
+    LIFETIME,
+    TYPE_ALIAS,
+    UNION,
 ];
 
-const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
+pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
     SemanticTokenModifier::DOCUMENTATION,
     SemanticTokenModifier::DECLARATION,
     SemanticTokenModifier::DEFINITION,
     SemanticTokenModifier::ASYNC,
     SemanticTokenModifier::VOLATILE,
     SemanticTokenModifier::READONLY,
+    CONSTANT,
+    MUTABLE,
+    UNSAFE,
+    CONTROL,
 ];
 
-/// Token types that the server supports
-pub(crate) fn supported_token_types() -> &'static [SemanticTokenType] {
-    SUPPORTED_TYPES
-}
+#[derive(Default)]
+pub(crate) struct ModifierSet(pub(crate) u32);
 
-/// Token modifiers that the server supports
-pub(crate) fn supported_token_modifiers() -> &'static [SemanticTokenModifier] {
-    SUPPORTED_MODIFIERS
+impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet {
+    fn bitor_assign(&mut self, rhs: SemanticTokenModifier) {
+        let idx = SUPPORTED_MODIFIERS.iter().position(|it| it == &rhs).unwrap();
+        self.0 |= 1 << idx;
+    }
 }
 
 /// Tokens are encoded relative to each other.
@@ -92,3 +116,7 @@ pub fn build(self) -> Vec<SemanticToken> {
         self.data
     }
 }
+
+pub fn type_index(type_: SemanticTokenType) -> u32 {
+    SUPPORTED_TYPES.iter().position(|it| *it == type_).unwrap() as u32
+}
index 971592b7370bff11261c88444bb974f9a6ce67f6..6a7c6d6f9efbc5ea90700ce3344d36637fba4654 100644 (file)
@@ -9,5 +9,5 @@ doctest = false
 
 [dependencies]
 difference = "2.0.0"
-text_unit = "0.1.9"
+text_unit = "0.1.10"
 serde_json = "1.0.48"
index e6e8d71103be39a5aacfb25bf32a7ce011985780..69deddcb5bb14619eecb116a85b5c70da1e180e7 100644 (file)
@@ -83,6 +83,7 @@ fn try_extract_range(text: &str) -> Option<(TextRange, String)> {
     Some((TextRange::from_to(start, end), text))
 }
 
+#[derive(Clone, Copy)]
 pub enum RangeOrOffset {
     Range(TextRange),
     Offset(TextUnit),
index 0288a468e015a92315c990813c376c05c9ca2f00..6901363fc564378e5277d23ac93cf25492502ee3 100644 (file)
                 "@types/node": "*"
             }
         },
-        "@types/throttle-debounce": {
-            "version": "2.1.0",
-            "resolved": "https://registry.npmjs.org/@types/throttle-debounce/-/throttle-debounce-2.1.0.tgz",
-            "integrity": "sha512-5eQEtSCoESnh2FsiLTxE121IiE60hnMqcb435fShf4bpLRjEu1Eoekht23y6zXS9Ts3l+Szu3TARnTsA0GkOkQ==",
-            "dev": true
-        },
         "@types/vscode": {
             "version": "1.42.0",
             "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.42.0.tgz",
             "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=",
             "dev": true
         },
-        "throttle-debounce": {
-            "version": "2.1.0",
-            "resolved": "https://registry.npmjs.org/throttle-debounce/-/throttle-debounce-2.1.0.tgz",
-            "integrity": "sha512-AOvyNahXQuU7NN+VVvOOX+uW6FPaWdAOdRP5HfwYxAfCzXTFKRMoIMk+n+po318+ktcChx+F1Dd91G3YHeMKyg=="
-        },
         "through": {
             "version": "2.3.8",
             "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
index dff535fcd1f599ac6651624468980fe3be4f5d3d..862de32103f6afc0736de8a1cebbbac8c81d6363 100644 (file)
     "engines": {
         "vscode": "^1.42.0"
     },
+    "enableProposedApi": true,
     "scripts": {
         "vscode:prepublish": "tsc && rollup -c",
         "package": "vsce package -o rust-analyzer.vsix",
         "watch": "tsc --watch",
-        "fmt": "tsfmt -r && eslint -c .eslintrc.js --ext ts ./src/ --fix"
+        "lint": "tsfmt --verify && eslint -c .eslintrc.js --ext ts ./src",
+        "fix": " tsfmt -r       && eslint -c .eslintrc.js --ext ts ./src --fix"
     },
     "dependencies": {
         "jsonc-parser": "^2.1.0",
         "node-fetch": "^2.6.0",
-        "throttle-debounce": "^2.1.0",
         "vscode-languageclient": "^6.1.1"
     },
     "devDependencies": {
@@ -35,7 +36,6 @@
         "@rollup/plugin-node-resolve": "^7.1.1",
         "@types/node": "^12.12.27",
         "@types/node-fetch": "^2.5.4",
-        "@types/throttle-debounce": "^2.1.0",
         "@types/vscode": "^1.42.0",
         "@typescript-eslint/eslint-plugin": "^2.20.0",
         "@typescript-eslint/parser": "^2.20.0",
                 "key": "ctrl+shift+j",
                 "when": "editorTextFocus && editorLangId == rust"
             },
-            {
-                "command": "rust-analyzer.run",
-                "key": "ctrl+r",
-                "when": "editorTextFocus && editorLangId == rust"
-            },
             {
                 "command": "rust-analyzer.onEnter",
                 "key": "enter",
-                "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust"
+                "when": "editorTextFocus && !suggestWidgetVisible && editorLangId == rust && !vim.active || vim.mode == 'Insert' && editorTextFocus && !suggestWidgetVisible && editorLangId == rust"
             }
         ],
         "configuration": {
             "type": "object",
             "title": "Rust Analyzer",
             "properties": {
+                "rust-analyzer.highlighting.semanticTokens": {
+                    "type": "boolean",
+                    "default": false,
+                    "description": "Use proposed semantic tokens API for syntax highlighting"
+                },
                 "rust-analyzer.highlightingOn": {
                     "type": "boolean",
                     "default": false,
                 "rust-analyzer.featureFlags": {
                     "type": "object",
                     "default": {},
-                    "description": "Fine grained feature flags to disable annoying features"
+                    "description": "Fine grained feature flags to disable annoying features",
+                    "properties": {
+                        "lsp.diagnostics": {
+                            "type": "boolean",
+                            "description": "Whether to show diagnostics from `cargo check`"
+                        },
+                        "completion.insertion.add-call-parenthesis": {
+                            "type": "boolean",
+                            "description": "Whether to add parenthesis when completing functions"
+                        },
+                        "completion.enable-postfix": {
+                            "type": "boolean",
+                            "description": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc."
+                        },
+                        "notifications.workspace-loaded": {
+                            "type": "boolean",
+                            "description": "Whether to show `workspace loaded` message"
+                        },
+                        "notifications.cargo-toml-not-found": {
+                            "type": "boolean",
+                            "description": "Whether to show `can't find Cargo.toml` error message"
+                        }
+                    }
                 },
                 "rust-analyzer.serverPath": {
                     "type": [
                     "highContrast": "#BEBEBE"
                 }
             }
+        ],
+        "semanticTokenTypes": [
+            {
+                "id": "attribute",
+                "description": "Style for attributes"
+            },
+            {
+                "id": "builtinType",
+                "description": "Style for builtin types"
+            },
+            {
+                "id": "lifetime",
+                "description": "Style for lifetimes"
+            },
+            {
+                "id": "typeAlias",
+                "description": "Style for type aliases"
+            },
+            {
+                "id": "union",
+                "description": "Style for C-style untagged unions"
+            }
+        ],
+        "semanticTokenModifiers": [
+            {
+                "id": "constant",
+                "description": "Style for compile-time constants"
+            },
+            {
+                "id": "control",
+                "description": "Style for control flow keywords"
+            },
+            {
+                "id": "mutable",
+                "description": "Style for mutable bindings"
+            },
+            {
+                "id": "unsafe",
+                "description": "Style for unsafe operations"
+            }
+        ],
+        "semanticTokenStyleDefaults": [
+            {
+                "selector": "attribute",
+                "scope": [
+                    "meta.attribute"
+                ]
+            },
+            {
+                "selector": "builtinType",
+                "scope": [
+                    "support.type.primitive"
+                ]
+            },
+            {
+                "selector": "lifetime",
+                "scope": [
+                    "entity.name.lifetime.rust"
+                ]
+            },
+            {
+                "selector": "typeAlias",
+                "scope": [
+                    "entity.name.typeAlias"
+                ]
+            },
+            {
+                "selector": "union",
+                "scope": [
+                    "entity.name.union"
+                ]
+            },
+            {
+                "selector": "keyword.unsafe",
+                "scope": [
+                    "keyword.other.unsafe"
+                ]
+            },
+            {
+                "selector": "keyword.control",
+                "scope": [
+                    "keyword.control"
+                ]
+            },
+            {
+                "selector": "variable.constant",
+                "scope": [
+                    "entity.name.constant"
+                ]
+            },
+            {
+                "selector": "*.mutable",
+                "light": {
+                    "fontStyle": "underline"
+                },
+                "dark": {
+                    "fontStyle": "underline"
+                },
+                "highContrast": {
+                    "fontStyle": "underline"
+                }
+            }
         ]
     }
 }
index aaf2ef40e3e3f1ead29f524f2c8de33f729aa027..540f7c9ea775d0af4dd92c42f9d4f90a2c9c8bdd 100644 (file)
@@ -3,6 +3,7 @@ import * as vscode from 'vscode';
 
 import { Config } from './config';
 import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed';
+import { SemanticTokensFeature, DocumentSemanticsTokensSignature } from 'vscode-languageclient/lib/semanticTokens.proposed';
 
 export async function createClient(config: Config, serverPath: string): Promise<lc.LanguageClient> {
     // '.' Is the fallback if no folder is open
@@ -26,7 +27,7 @@ export async function createClient(config: Config, serverPath: string): Promise<
     const clientOptions: lc.LanguageClientOptions = {
         documentSelector: [{ scheme: 'file', language: 'rust' }],
         initializationOptions: {
-            publishDecorations: true,
+            publishDecorations: !config.highlightingSemanticTokens,
             lruCapacity: config.lruCapacity,
             maxInlayHintLength: config.maxInlayHintLength,
             cargoWatchEnable: cargoWatchOpts.enable,
@@ -41,6 +42,14 @@ export async function createClient(config: Config, serverPath: string): Promise<
             rustfmtArgs: config.rustfmtArgs,
         },
         traceOutputChannel,
+        middleware: {
+            // Workaround for https://github.com/microsoft/vscode-languageserver-node/issues/576
+            async provideDocumentSemanticTokens(document: vscode.TextDocument, token: vscode.CancellationToken, next: DocumentSemanticsTokensSignature) {
+                const res = await next(document, token);
+                if (res === undefined) throw new Error('busy');
+                return res;
+            }
+        } as any
     };
 
     const res = new lc.LanguageClient(
@@ -83,5 +92,10 @@ export async function createClient(config: Config, serverPath: string): Promise<
     // Here we want to just enable CallHierarchyFeature since it is available on stable.
     // Note that while the CallHierarchyFeature is stable the LSP protocol is not.
     res.registerFeature(new CallHierarchyFeature(res));
+
+    if (config.highlightingSemanticTokens) {
+        res.registerFeature(new SemanticTokensFeature(res));
+    }
+
     return res;
 }
index 47e8cd45d0646dd5b43ae4c285e95619cf9ee823..bf915102c8050262294b8e4f592e269689194429 100644 (file)
@@ -22,6 +22,7 @@ export class Config {
     private static readonly requiresReloadOpts = [
         "cargoFeatures",
         "cargo-watch",
+        "highlighting.semanticTokens"
     ]
         .map(opt => `${Config.rootSection}.${opt}`);
 
@@ -143,6 +144,7 @@ export class Config {
     // We don't do runtime config validation here for simplicity. More on stackoverflow:
     // https://stackoverflow.com/questions/60135780/what-is-the-best-way-to-type-check-the-configuration-for-vscode-extension
 
+    get highlightingSemanticTokens() { return this.cfg.get("highlighting.semanticTokens") as boolean; }
     get highlightingOn() { return this.cfg.get("highlightingOn") as boolean; }
     get rainbowHighlightingOn() { return this.cfg.get("rainbowHighlightingOn") as boolean; }
     get lruCapacity() { return this.cfg.get("lruCapacity") as null | number; }
index 5951cf1b457dc5c157b4542d0c064f633b7b28f8..6871bc11184cfeb868f0fb863b6b362362acccf2 100644 (file)
@@ -42,12 +42,14 @@ export function activateInlayHints(ctx: Ctx) {
 const typeHintDecorationType = vscode.window.createTextEditorDecorationType({
     after: {
         color: new vscode.ThemeColor('rust_analyzer.inlayHint'),
+        fontStyle: "normal",
     },
 });
 
 const parameterHintDecorationType = vscode.window.createTextEditorDecorationType({
     before: {
         color: new vscode.ThemeColor('rust_analyzer.inlayHint'),
+        fontStyle: "normal",
     },
 });
 
index 356723abaf5287b3ba3d2465cd638d876e9d86f5..97e4d67c21d2f600a9401525e4aa706085c6214a 100644 (file)
@@ -1,11 +1,10 @@
 import * as vscode from "vscode";
 import * as path from "path";
 import { promises as fs } from "fs";
-import { strict as assert } from "assert";
 
 import { ArtifactReleaseInfo } from "./interfaces";
 import { downloadFile } from "./download_file";
-import { throttle } from "throttle-debounce";
+import { assert } from "../util";
 
 /**
  * Downloads artifact from given `downloadUrl`.
@@ -20,11 +19,10 @@ export async function downloadArtifact(
     installationDir: string,
     displayName: string,
 ) {
-    await fs.mkdir(installationDir).catch(err => assert.strictEqual(
-        err?.code,
-        "EEXIST",
+    await fs.mkdir(installationDir).catch(err => assert(
+        err?.code === "EEXIST",
         `Couldn't create directory "${installationDir}" to download ` +
-        `${artifactFileName} artifact: ${err.message}`
+        `${artifactFileName} artifact: ${err?.message}`
     ));
 
     const installationPath = path.join(installationDir, artifactFileName);
@@ -38,19 +36,15 @@ export async function downloadArtifact(
         async (progress, _cancellationToken) => {
             let lastPrecentage = 0;
             const filePermissions = 0o755; // (rwx, r_x, r_x)
-            await downloadFile(downloadUrl, installationPath, filePermissions, throttle(
-                200,
-                /* noTrailing: */ true,
-                (readBytes, totalBytes) => {
-                    const newPercentage = (readBytes / totalBytes) * 100;
-                    progress.report({
-                        message: newPercentage.toFixed(0) + "%",
-                        increment: newPercentage - lastPrecentage
-                    });
+            await downloadFile(downloadUrl, installationPath, filePermissions, (readBytes, totalBytes) => {
+                const newPercentage = (readBytes / totalBytes) * 100;
+                progress.report({
+                    message: newPercentage.toFixed(0) + "%",
+                    increment: newPercentage - lastPrecentage
+                });
 
-                    lastPrecentage = newPercentage;
-                })
-            );
+                lastPrecentage = newPercentage;
+            });
         }
     );
 }
index 319cb995c4f7ff0cbe53b74482af2605fe0503a2..ee8949d61c4052dad087d24fa51a7c6d321ea0e8 100644 (file)
@@ -2,8 +2,7 @@ import fetch from "node-fetch";
 import * as fs from "fs";
 import * as stream from "stream";
 import * as util from "util";
-import { strict as assert } from "assert";
-import { log } from "../util";
+import { log, assert } from "../util";
 
 const pipeline = util.promisify(stream.pipeline);
 
index cb5e568448aedf979afd8fe3c1525eb6f4d11a34..6a6cf4f8c3ad97dc34168c3db16683a7a602e239 100644 (file)
@@ -1,13 +1,12 @@
 import * as vscode from "vscode";
 import * as path from "path";
-import { strict as assert } from "assert";
 import { promises as dns } from "dns";
 import { spawnSync } from "child_process";
 
 import { BinarySource } from "./interfaces";
 import { fetchArtifactReleaseInfo } from "./fetch_artifact_release_info";
 import { downloadArtifact } from "./download_artifact";
-import { log } from "../util";
+import { log, assert } from "../util";
 
 export async function ensureServerBinary(source: null | BinarySource): Promise<null | string> {
     if (!source) {
index 424ff1ac3d6a041e7c9565acf50c1f14623dfee4..ecf53cf775fed63580b5d31accdf91f3fca274d7 100644 (file)
@@ -89,7 +89,9 @@ export async function activate(context: vscode.ExtensionContext) {
 
     activateStatusDisplay(ctx);
 
-    activateHighlighting(ctx);
+    if (!ctx.config.highlightingSemanticTokens) {
+        activateHighlighting(ctx);
+    }
     activateInlayHints(ctx);
 }
 
index 68c2a94d0455833c6fd3cfe61b64295a721d69ae..f56c6badaf3cd62a4f9028ffaa17a9e791536150 100644 (file)
@@ -1,22 +1,31 @@
 import * as lc from "vscode-languageclient";
 import * as vscode from "vscode";
+import { strict as nativeAssert } from "assert";
 
-let enabled: boolean = false;
+export function assert(condition: boolean, explanation: string): asserts condition {
+    try {
+        nativeAssert(condition, explanation);
+    } catch (err) {
+        log.error(`Assertion failed:`, explanation);
+        throw err;
+    }
+}
 
 export const log = {
+    enabled: true,
     debug(message?: any, ...optionalParams: any[]): void {
-        if (!enabled) return;
+        if (!log.enabled) return;
         // eslint-disable-next-line no-console
         console.log(message, ...optionalParams);
     },
     error(message?: any, ...optionalParams: any[]): void {
-        if (!enabled) return;
+        if (!log.enabled) return;
         debugger;
         // eslint-disable-next-line no-console
         console.error(message, ...optionalParams);
     },
     setEnabled(yes: boolean): void {
-        enabled = yes;
+        log.enabled = yes;
     }
 };
 
index 2d9ae904b129a4c7df9a6053beae56d74edfcb41..98d8104e59cbc958113ffc0a00f52a35f7be1fc5 100644 (file)
@@ -1,3 +1,5 @@
+//! Defines input for code generation process.
+
 pub(crate) struct KindsSrc<'a> {
     pub(crate) punct: &'a [(&'a str, &'a str)],
     pub(crate) keywords: &'a [&'a str],
index 697e830df40ef16ef90f29254353f8644710f1c2..6da5ca89e7fba75badb72f1729ef388c320877a3 100644 (file)
@@ -1,3 +1,5 @@
+//! Generates `assists.md` documentation.
+
 use std::{fs, path::Path};
 
 use crate::{
index 3df021acc3059075f6455770e1283ed807bf917a..1d13b26da9711444f31e1496fc0fe0606557ef11 100644 (file)
@@ -4,7 +4,10 @@
 
 use anyhow::{bail, format_err, Context, Result};
 
-use crate::not_bash::{pushd, run};
+use crate::{
+    not_bash::{pushd, run},
+    project_root,
+};
 
 // Latest stable, feel free to send a PR if this lags behind.
 const REQUIRED_RUST_VERSION: u32 = 41;
@@ -24,6 +27,7 @@ pub struct ServerOpt {
 
 impl InstallCmd {
     pub fn run(self) -> Result<()> {
+        let _dir = pushd(project_root());
         let both = self.server.is_some() && self.client.is_some();
         if cfg!(target_os = "macos") {
             fix_path_for_mac().context("Fix path for mac")?
index 2bcd76d60b0bfcdb07b52424175ac9b3e057f2ce..e5da726ac2181eeac48ac37141b027d5b7ee433b 100644 (file)
@@ -1,4 +1,6 @@
-//! FIXME: write short doc here
+//! Support library for `cargo xtask` command.
+//!
+//! See https://github.com/matklad/cargo-xtask/
 
 pub mod not_bash;
 pub mod install;
index d5577cce9239d84d71e9492f9bb0a83c83bee6be..40f706d9f87e361312d8789b32dde3147251ed3c 100644 (file)
@@ -1,4 +1,5 @@
 //! A bad shell -- small cross platform module for writing glue code
+
 use std::{
     cell::RefCell,
     env,
@@ -130,6 +131,7 @@ fn with<F: FnOnce(&mut Env) -> T, T>(f: F) -> T {
     }
 
     fn pushd(&mut self, dir: PathBuf) {
+        let dir = self.cwd().join(dir);
         self.pushd_stack.push(dir)
     }
     fn popd(&mut self) {