]> git.lizzy.rs Git - rust.git/commitdiff
Rename ra_ssr -> ssr
authorAleksey Kladov <aleksey.kladov@gmail.com>
Thu, 13 Aug 2020 14:45:10 +0000 (16:45 +0200)
committerAleksey Kladov <aleksey.kladov@gmail.com>
Thu, 13 Aug 2020 15:02:44 +0000 (17:02 +0200)
28 files changed:
Cargo.lock
crates/ra_ide/Cargo.toml
crates/ra_ide/src/lib.rs
crates/ra_ide/src/ssr.rs [deleted file]
crates/ra_ssr/Cargo.toml [deleted file]
crates/ra_ssr/src/errors.rs [deleted file]
crates/ra_ssr/src/lib.rs [deleted file]
crates/ra_ssr/src/matching.rs [deleted file]
crates/ra_ssr/src/nester.rs [deleted file]
crates/ra_ssr/src/parsing.rs [deleted file]
crates/ra_ssr/src/replacing.rs [deleted file]
crates/ra_ssr/src/resolving.rs [deleted file]
crates/ra_ssr/src/search.rs [deleted file]
crates/ra_ssr/src/tests.rs [deleted file]
crates/rust-analyzer/Cargo.toml
crates/rust-analyzer/src/bin/args.rs
crates/rust-analyzer/src/cli.rs
crates/rust-analyzer/src/cli/ssr.rs
crates/ssr/Cargo.toml [new file with mode: 0644]
crates/ssr/src/errors.rs [new file with mode: 0644]
crates/ssr/src/lib.rs [new file with mode: 0644]
crates/ssr/src/matching.rs [new file with mode: 0644]
crates/ssr/src/nester.rs [new file with mode: 0644]
crates/ssr/src/parsing.rs [new file with mode: 0644]
crates/ssr/src/replacing.rs [new file with mode: 0644]
crates/ssr/src/resolving.rs [new file with mode: 0644]
crates/ssr/src/search.rs [new file with mode: 0644]
crates/ssr/src/tests.rs [new file with mode: 0644]

index 8704e43860ae0feac36a29f90521112dc0b280d9..4d8b748392d0e4c262104b76a31894c8c828b6a2 100644 (file)
@@ -1135,28 +1135,14 @@ dependencies = [
  "oorandom",
  "profile",
  "ra_assists",
- "ra_ssr",
  "rustc-hash",
+ "ssr",
  "stdx",
  "syntax",
  "test_utils",
  "text_edit",
 ]
 
-[[package]]
-name = "ra_ssr"
-version = "0.1.0"
-dependencies = [
- "base_db",
- "expect",
- "hir",
- "ide_db",
- "rustc-hash",
- "syntax",
- "test_utils",
- "text_edit",
-]
-
 [[package]]
 name = "rayon"
 version = "1.3.1"
@@ -1254,11 +1240,11 @@ dependencies = [
  "profile",
  "project_model",
  "ra_ide",
- "ra_ssr",
  "rayon",
  "rustc-hash",
  "serde",
  "serde_json",
+ "ssr",
  "stdx",
  "syntax",
  "test_utils",
@@ -1456,6 +1442,20 @@ dependencies = [
  "serde",
 ]
 
+[[package]]
+name = "ssr"
+version = "0.1.0"
+dependencies = [
+ "base_db",
+ "expect",
+ "hir",
+ "ide_db",
+ "rustc-hash",
+ "syntax",
+ "test_utils",
+ "text_edit",
+]
+
 [[package]]
 name = "stdx"
 version = "0.1.0"
index e25aad6cfb08f600026877ce9c03ad4e5e33002e..8519e9ccaec7a4a9021b854d33c4e7f5d075ffa5 100644 (file)
@@ -29,7 +29,7 @@ cfg = { path = "../cfg" }
 profile = { path = "../profile" }
 test_utils = { path = "../test_utils" }
 ra_assists = { path = "../ra_assists" }
-ra_ssr = { path = "../ra_ssr" }
+ssr = { path = "../ssr" }
 
 # ra_ide should depend only on the top-level `hir` package. if you need
 # something from some `hir_xxx` subpackage, reexport the API via `hir`.
index 66a234fff78d044e417369c1fbca8fe726e78a19..bbc9e4b8af2764353c8d78832f7db3cd6d4926bb 100644 (file)
@@ -39,7 +39,6 @@ macro_rules! eprintln {
 mod parent_module;
 mod references;
 mod runnables;
-mod ssr;
 mod status;
 mod syntax_highlighting;
 mod syntax_tree;
@@ -95,7 +94,7 @@ macro_rules! eprintln {
     RootDatabase,
 };
 pub use ra_assists::{Assist, AssistConfig, AssistId, AssistKind, ResolvedAssist};
-pub use ra_ssr::SsrError;
+pub use ssr::SsrError;
 pub use text_edit::{Indel, TextEdit};
 
 pub type Cancelable<T> = Result<T, Canceled>;
@@ -515,20 +514,23 @@ pub fn structural_search_replace(
         &self,
         query: &str,
         parse_only: bool,
-        position: FilePosition,
+        resolve_context: FilePosition,
         selections: Vec<FileRange>,
     ) -> Cancelable<Result<SourceChange, SsrError>> {
         self.with_db(|db| {
-            let edits = ssr::parse_search_replace(query, parse_only, db, position, selections)?;
+            let rule: ssr::SsrRule = query.parse()?;
+            let mut match_finder = ssr::MatchFinder::in_context(db, resolve_context, selections);
+            match_finder.add_rule(rule)?;
+            let edits = if parse_only { Vec::new() } else { match_finder.edits() };
             Ok(SourceChange::from(edits))
         })
     }
 
     /// Performs an operation on that may be Canceled.
-    fn with_db<F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe, T>(
-        &self,
-        f: F,
-    ) -> Cancelable<T> {
+    fn with_db<F, T>(&self, f: F) -> Cancelable<T>
+    where
+        F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
+    {
         self.db.catch_canceled(f)
     }
 }
diff --git a/crates/ra_ide/src/ssr.rs b/crates/ra_ide/src/ssr.rs
deleted file mode 100644 (file)
index a8a7041..0000000
+++ /dev/null
@@ -1,72 +0,0 @@
-use base_db::{FilePosition, FileRange};
-use ide_db::RootDatabase;
-
-use crate::SourceFileEdit;
-use ra_ssr::{MatchFinder, SsrError, SsrRule};
-
-// Feature: Structural Search and Replace
-//
-// Search and replace with named wildcards that will match any expression, type, path, pattern or item.
-// The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`.
-// A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
-// Within a macro call, a placeholder will match up until whatever token follows the placeholder.
-//
-// All paths in both the search pattern and the replacement template must resolve in the context
-// in which this command is invoked. Paths in the search pattern will then match the code if they
-// resolve to the same item, even if they're written differently. For example if we invoke the
-// command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers
-// to `foo::Bar` will match.
-//
-// Paths in the replacement template will be rendered appropriately for the context in which the
-// replacement occurs. For example if our replacement template is `foo::Bar` and we match some
-// code in the `foo` module, we'll insert just `Bar`.
-//
-// Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will
-// match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`.
-//
-// The scope of the search / replace will be restricted to the current selection if any, otherwise
-// it will apply to the whole workspace.
-//
-// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
-//
-// Supported constraints:
-//
-// |===
-// | Constraint    | Restricts placeholder
-//
-// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`)
-// | not(a)        | Negates the constraint `a`
-// |===
-//
-// Available via the command `rust-analyzer.ssr`.
-//
-// ```rust
-// // Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)]
-//
-// // BEFORE
-// String::from(foo(y + 5, z))
-//
-// // AFTER
-// String::from((y + 5).foo(z))
-// ```
-//
-// |===
-// | Editor  | Action Name
-//
-// | VS Code | **Rust Analyzer: Structural Search Replace**
-// |===
-pub fn parse_search_replace(
-    rule: &str,
-    parse_only: bool,
-    db: &RootDatabase,
-    resolve_context: FilePosition,
-    selections: Vec<FileRange>,
-) -> Result<Vec<SourceFileEdit>, SsrError> {
-    let rule: SsrRule = rule.parse()?;
-    let mut match_finder = MatchFinder::in_context(db, resolve_context, selections);
-    match_finder.add_rule(rule)?;
-    if parse_only {
-        return Ok(Vec::new());
-    }
-    Ok(match_finder.edits())
-}
diff --git a/crates/ra_ssr/Cargo.toml b/crates/ra_ssr/Cargo.toml
deleted file mode 100644 (file)
index 4d22a8a..0000000
+++ /dev/null
@@ -1,23 +0,0 @@
-[package]
-edition = "2018"
-name = "ra_ssr"
-version = "0.1.0"
-authors = ["rust-analyzer developers"]
-license = "MIT OR Apache-2.0"
-description = "Structural search and replace of Rust code"
-repository = "https://github.com/rust-analyzer/rust-analyzer"
-
-[lib]
-doctest = false
-
-[dependencies]
-text_edit = { path = "../text_edit" }
-syntax = { path = "../syntax" }
-base_db = { path = "../base_db" }
-ide_db = { path = "../ide_db" }
-hir = { path = "../hir" }
-rustc-hash = "1.1.0"
-test_utils = { path = "../test_utils" }
-
-[dev-dependencies]
-expect = { path = "../expect" }
diff --git a/crates/ra_ssr/src/errors.rs b/crates/ra_ssr/src/errors.rs
deleted file mode 100644 (file)
index c02baca..0000000
+++ /dev/null
@@ -1,29 +0,0 @@
-//! Code relating to errors produced by SSR.
-
-/// Constructs an SsrError taking arguments like the format macro.
-macro_rules! _error {
-    ($fmt:expr) => {$crate::SsrError::new(format!($fmt))};
-    ($fmt:expr, $($arg:tt)+) => {$crate::SsrError::new(format!($fmt, $($arg)+))}
-}
-pub(crate) use _error as error;
-
-/// Returns from the current function with an error, supplied by arguments as for format!
-macro_rules! _bail {
-    ($($tokens:tt)*) => {return Err(crate::errors::error!($($tokens)*))}
-}
-pub(crate) use _bail as bail;
-
-#[derive(Debug, PartialEq)]
-pub struct SsrError(pub(crate) String);
-
-impl std::fmt::Display for SsrError {
-    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
-        write!(f, "Parse error: {}", self.0)
-    }
-}
-
-impl SsrError {
-    pub(crate) fn new(message: impl Into<String>) -> SsrError {
-        SsrError(message.into())
-    }
-}
diff --git a/crates/ra_ssr/src/lib.rs b/crates/ra_ssr/src/lib.rs
deleted file mode 100644 (file)
index b4e3510..0000000
+++ /dev/null
@@ -1,286 +0,0 @@
-//! Structural Search Replace
-//!
-//! Allows searching the AST for code that matches one or more patterns and then replacing that code
-//! based on a template.
-
-mod matching;
-mod nester;
-mod parsing;
-mod replacing;
-mod resolving;
-mod search;
-#[macro_use]
-mod errors;
-#[cfg(test)]
-mod tests;
-
-use crate::errors::bail;
-pub use crate::errors::SsrError;
-pub use crate::matching::Match;
-use crate::matching::MatchFailureReason;
-use base_db::{FileId, FilePosition, FileRange};
-use hir::Semantics;
-use ide_db::source_change::SourceFileEdit;
-use resolving::ResolvedRule;
-use rustc_hash::FxHashMap;
-use syntax::{ast, AstNode, SyntaxNode, TextRange};
-
-// A structured search replace rule. Create by calling `parse` on a str.
-#[derive(Debug)]
-pub struct SsrRule {
-    /// A structured pattern that we're searching for.
-    pattern: parsing::RawPattern,
-    /// What we'll replace it with.
-    template: parsing::RawPattern,
-    parsed_rules: Vec<parsing::ParsedRule>,
-}
-
-#[derive(Debug)]
-pub struct SsrPattern {
-    raw: parsing::RawPattern,
-    parsed_rules: Vec<parsing::ParsedRule>,
-}
-
-#[derive(Debug, Default)]
-pub struct SsrMatches {
-    pub matches: Vec<Match>,
-}
-
-/// Searches a crate for pattern matches and possibly replaces them with something else.
-pub struct MatchFinder<'db> {
-    /// Our source of information about the user's code.
-    sema: Semantics<'db, ide_db::RootDatabase>,
-    rules: Vec<ResolvedRule>,
-    resolution_scope: resolving::ResolutionScope<'db>,
-    restrict_ranges: Vec<FileRange>,
-}
-
-impl<'db> MatchFinder<'db> {
-    /// Constructs a new instance where names will be looked up as if they appeared at
-    /// `lookup_context`.
-    pub fn in_context(
-        db: &'db ide_db::RootDatabase,
-        lookup_context: FilePosition,
-        mut restrict_ranges: Vec<FileRange>,
-    ) -> MatchFinder<'db> {
-        restrict_ranges.retain(|range| !range.range.is_empty());
-        let sema = Semantics::new(db);
-        let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context);
-        MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges }
-    }
-
-    /// Constructs an instance using the start of the first file in `db` as the lookup context.
-    pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
-        use base_db::SourceDatabaseExt;
-        use ide_db::symbol_index::SymbolsDatabase;
-        if let Some(first_file_id) = db
-            .local_roots()
-            .iter()
-            .next()
-            .and_then(|root| db.source_root(root.clone()).iter().next())
-        {
-            Ok(MatchFinder::in_context(
-                db,
-                FilePosition { file_id: first_file_id, offset: 0.into() },
-                vec![],
-            ))
-        } else {
-            bail!("No files to search");
-        }
-    }
-
-    /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take
-    /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to
-    /// match to it.
-    pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> {
-        for parsed_rule in rule.parsed_rules {
-            self.rules.push(ResolvedRule::new(
-                parsed_rule,
-                &self.resolution_scope,
-                self.rules.len(),
-            )?);
-        }
-        Ok(())
-    }
-
-    /// Finds matches for all added rules and returns edits for all found matches.
-    pub fn edits(&self) -> Vec<SourceFileEdit> {
-        use base_db::SourceDatabaseExt;
-        let mut matches_by_file = FxHashMap::default();
-        for m in self.matches().matches {
-            matches_by_file
-                .entry(m.range.file_id)
-                .or_insert_with(|| SsrMatches::default())
-                .matches
-                .push(m);
-        }
-        let mut edits = vec![];
-        for (file_id, matches) in matches_by_file {
-            let edit =
-                replacing::matches_to_edit(&matches, &self.sema.db.file_text(file_id), &self.rules);
-            edits.push(SourceFileEdit { file_id, edit });
-        }
-        edits
-    }
-
-    /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you
-    /// intend to do replacement, use `add_rule` instead.
-    pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> {
-        for parsed_rule in pattern.parsed_rules {
-            self.rules.push(ResolvedRule::new(
-                parsed_rule,
-                &self.resolution_scope,
-                self.rules.len(),
-            )?);
-        }
-        Ok(())
-    }
-
-    /// Returns matches for all added rules.
-    pub fn matches(&self) -> SsrMatches {
-        let mut matches = Vec::new();
-        let mut usage_cache = search::UsageCache::default();
-        for rule in &self.rules {
-            self.find_matches_for_rule(rule, &mut usage_cache, &mut matches);
-        }
-        nester::nest_and_remove_collisions(matches, &self.sema)
-    }
-
-    /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match
-    /// them, while recording reasons why they don't match. This API is useful for command
-    /// line-based debugging where providing a range is difficult.
-    pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> {
-        use base_db::SourceDatabaseExt;
-        let file = self.sema.parse(file_id);
-        let mut res = Vec::new();
-        let file_text = self.sema.db.file_text(file_id);
-        let mut remaining_text = file_text.as_str();
-        let mut base = 0;
-        let len = snippet.len() as u32;
-        while let Some(offset) = remaining_text.find(snippet) {
-            let start = base + offset as u32;
-            let end = start + len;
-            self.output_debug_for_nodes_at_range(
-                file.syntax(),
-                FileRange { file_id, range: TextRange::new(start.into(), end.into()) },
-                &None,
-                &mut res,
-            );
-            remaining_text = &remaining_text[offset + snippet.len()..];
-            base = end;
-        }
-        res
-    }
-
-    fn output_debug_for_nodes_at_range(
-        &self,
-        node: &SyntaxNode,
-        range: FileRange,
-        restrict_range: &Option<FileRange>,
-        out: &mut Vec<MatchDebugInfo>,
-    ) {
-        for node in node.children() {
-            let node_range = self.sema.original_range(&node);
-            if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range)
-            {
-                continue;
-            }
-            if node_range.range == range.range {
-                for rule in &self.rules {
-                    // For now we ignore rules that have a different kind than our node, otherwise
-                    // we get lots of noise. If at some point we add support for restricting rules
-                    // to a particular kind of thing (e.g. only match type references), then we can
-                    // relax this. We special-case expressions, since function calls can match
-                    // method calls.
-                    if rule.pattern.node.kind() != node.kind()
-                        && !(ast::Expr::can_cast(rule.pattern.node.kind())
-                            && ast::Expr::can_cast(node.kind()))
-                    {
-                        continue;
-                    }
-                    out.push(MatchDebugInfo {
-                        matched: matching::get_match(true, rule, &node, restrict_range, &self.sema)
-                            .map_err(|e| MatchFailureReason {
-                                reason: e.reason.unwrap_or_else(|| {
-                                    "Match failed, but no reason was given".to_owned()
-                                }),
-                            }),
-                        pattern: rule.pattern.node.clone(),
-                        node: node.clone(),
-                    });
-                }
-            } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) {
-                if let Some(expanded) = self.sema.expand(&macro_call) {
-                    if let Some(tt) = macro_call.token_tree() {
-                        self.output_debug_for_nodes_at_range(
-                            &expanded,
-                            range,
-                            &Some(self.sema.original_range(tt.syntax())),
-                            out,
-                        );
-                    }
-                }
-            }
-            self.output_debug_for_nodes_at_range(&node, range, restrict_range, out);
-        }
-    }
-}
-
-pub struct MatchDebugInfo {
-    node: SyntaxNode,
-    /// Our search pattern parsed as an expression or item, etc
-    pattern: SyntaxNode,
-    matched: Result<Match, MatchFailureReason>,
-}
-
-impl std::fmt::Debug for MatchDebugInfo {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        match &self.matched {
-            Ok(_) => writeln!(f, "Node matched")?,
-            Err(reason) => writeln!(f, "Node failed to match because: {}", reason.reason)?,
-        }
-        writeln!(
-            f,
-            "============ AST ===========\n\
-            {:#?}",
-            self.node
-        )?;
-        writeln!(f, "========= PATTERN ==========")?;
-        writeln!(f, "{:#?}", self.pattern)?;
-        writeln!(f, "============================")?;
-        Ok(())
-    }
-}
-
-impl SsrMatches {
-    /// Returns `self` with any nested matches removed and made into top-level matches.
-    pub fn flattened(self) -> SsrMatches {
-        let mut out = SsrMatches::default();
-        self.flatten_into(&mut out);
-        out
-    }
-
-    fn flatten_into(self, out: &mut SsrMatches) {
-        for mut m in self.matches {
-            for p in m.placeholder_values.values_mut() {
-                std::mem::replace(&mut p.inner_matches, SsrMatches::default()).flatten_into(out);
-            }
-            out.matches.push(m);
-        }
-    }
-}
-
-impl Match {
-    pub fn matched_text(&self) -> String {
-        self.matched_node.text().to_string()
-    }
-}
-
-impl std::error::Error for SsrError {}
-
-#[cfg(test)]
-impl MatchDebugInfo {
-    pub(crate) fn match_failure_reason(&self) -> Option<&str> {
-        self.matched.as_ref().err().map(|r| r.reason.as_str())
-    }
-}
diff --git a/crates/ra_ssr/src/matching.rs b/crates/ra_ssr/src/matching.rs
deleted file mode 100644 (file)
index ffc7202..0000000
+++ /dev/null
@@ -1,777 +0,0 @@
-//! This module is responsible for matching a search pattern against a node in the AST. In the
-//! process of matching, placeholder values are recorded.
-
-use crate::{
-    parsing::{Constraint, NodeKind, Placeholder},
-    resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo},
-    SsrMatches,
-};
-use base_db::FileRange;
-use hir::Semantics;
-use rustc_hash::FxHashMap;
-use std::{cell::Cell, iter::Peekable};
-use syntax::ast::{AstNode, AstToken};
-use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken};
-use test_utils::mark;
-
-// Creates a match error. If we're currently attempting to match some code that we thought we were
-// going to match, as indicated by the --debug-snippet flag, then populate the reason field.
-macro_rules! match_error {
-    ($e:expr) => {{
-            MatchFailed {
-                reason: if recording_match_fail_reasons() {
-                    Some(format!("{}", $e))
-                } else {
-                    None
-                }
-            }
-    }};
-    ($fmt:expr, $($arg:tt)+) => {{
-        MatchFailed {
-            reason: if recording_match_fail_reasons() {
-                Some(format!($fmt, $($arg)+))
-            } else {
-                None
-            }
-        }
-    }};
-}
-
-// Fails the current match attempt, recording the supplied reason if we're recording match fail reasons.
-macro_rules! fail_match {
-    ($($args:tt)*) => {return Err(match_error!($($args)*))};
-}
-
-/// Information about a match that was found.
-#[derive(Debug)]
-pub struct Match {
-    pub(crate) range: FileRange,
-    pub(crate) matched_node: SyntaxNode,
-    pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>,
-    pub(crate) ignored_comments: Vec<ast::Comment>,
-    pub(crate) rule_index: usize,
-    /// The depth of matched_node.
-    pub(crate) depth: usize,
-    // Each path in the template rendered for the module in which the match was found.
-    pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>,
-}
-
-/// Represents a `$var` in an SSR query.
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub(crate) struct Var(pub String);
-
-/// Information about a placeholder bound in a match.
-#[derive(Debug)]
-pub(crate) struct PlaceholderMatch {
-    /// The node that the placeholder matched to. If set, then we'll search for further matches
-    /// within this node. It isn't set when we match tokens within a macro call's token tree.
-    pub(crate) node: Option<SyntaxNode>,
-    pub(crate) range: FileRange,
-    /// More matches, found within `node`.
-    pub(crate) inner_matches: SsrMatches,
-}
-
-#[derive(Debug)]
-pub(crate) struct MatchFailureReason {
-    pub(crate) reason: String,
-}
-
-/// An "error" indicating that matching failed. Use the fail_match! macro to create and return this.
-#[derive(Clone)]
-pub(crate) struct MatchFailed {
-    /// The reason why we failed to match. Only present when debug_active true in call to
-    /// `get_match`.
-    pub(crate) reason: Option<String>,
-}
-
-/// Checks if `code` matches the search pattern found in `search_scope`, returning information about
-/// the match, if it does. Since we only do matching in this module and searching is done by the
-/// parent module, we don't populate nested matches.
-pub(crate) fn get_match(
-    debug_active: bool,
-    rule: &ResolvedRule,
-    code: &SyntaxNode,
-    restrict_range: &Option<FileRange>,
-    sema: &Semantics<ide_db::RootDatabase>,
-) -> Result<Match, MatchFailed> {
-    record_match_fails_reasons_scope(debug_active, || {
-        Matcher::try_match(rule, code, restrict_range, sema)
-    })
-}
-
-/// Checks if our search pattern matches a particular node of the AST.
-struct Matcher<'db, 'sema> {
-    sema: &'sema Semantics<'db, ide_db::RootDatabase>,
-    /// If any placeholders come from anywhere outside of this range, then the match will be
-    /// rejected.
-    restrict_range: Option<FileRange>,
-    rule: &'sema ResolvedRule,
-}
-
-/// Which phase of matching we're currently performing. We do two phases because most attempted
-/// matches will fail and it means we can defer more expensive checks to the second phase.
-enum Phase<'a> {
-    /// On the first phase, we perform cheap checks. No state is mutated and nothing is recorded.
-    First,
-    /// On the second phase, we construct the `Match`. Things like what placeholders bind to is
-    /// recorded.
-    Second(&'a mut Match),
-}
-
-impl<'db, 'sema> Matcher<'db, 'sema> {
-    fn try_match(
-        rule: &ResolvedRule,
-        code: &SyntaxNode,
-        restrict_range: &Option<FileRange>,
-        sema: &'sema Semantics<'db, ide_db::RootDatabase>,
-    ) -> Result<Match, MatchFailed> {
-        let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule };
-        // First pass at matching, where we check that node types and idents match.
-        match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
-        match_state.validate_range(&sema.original_range(code))?;
-        let mut the_match = Match {
-            range: sema.original_range(code),
-            matched_node: code.clone(),
-            placeholder_values: FxHashMap::default(),
-            ignored_comments: Vec::new(),
-            rule_index: rule.index,
-            depth: 0,
-            rendered_template_paths: FxHashMap::default(),
-        };
-        // Second matching pass, where we record placeholder matches, ignored comments and maybe do
-        // any other more expensive checks that we didn't want to do on the first pass.
-        match_state.attempt_match_node(
-            &mut Phase::Second(&mut the_match),
-            &rule.pattern.node,
-            code,
-        )?;
-        the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count();
-        if let Some(template) = &rule.template {
-            the_match.render_template_paths(template, sema)?;
-        }
-        Ok(the_match)
-    }
-
-    /// Checks that `range` is within the permitted range if any. This is applicable when we're
-    /// processing a macro expansion and we want to fail the match if we're working with a node that
-    /// didn't originate from the token tree of the macro call.
-    fn validate_range(&self, range: &FileRange) -> Result<(), MatchFailed> {
-        if let Some(restrict_range) = &self.restrict_range {
-            if restrict_range.file_id != range.file_id
-                || !restrict_range.range.contains_range(range.range)
-            {
-                fail_match!("Node originated from a macro");
-            }
-        }
-        Ok(())
-    }
-
-    fn attempt_match_node(
-        &self,
-        phase: &mut Phase,
-        pattern: &SyntaxNode,
-        code: &SyntaxNode,
-    ) -> Result<(), MatchFailed> {
-        // Handle placeholders.
-        if let Some(placeholder) = self.get_placeholder(&SyntaxElement::Node(pattern.clone())) {
-            for constraint in &placeholder.constraints {
-                self.check_constraint(constraint, code)?;
-            }
-            if let Phase::Second(matches_out) = phase {
-                let original_range = self.sema.original_range(code);
-                // We validated the range for the node when we started the match, so the placeholder
-                // probably can't fail range validation, but just to be safe...
-                self.validate_range(&original_range)?;
-                matches_out.placeholder_values.insert(
-                    Var(placeholder.ident.to_string()),
-                    PlaceholderMatch::new(code, original_range),
-                );
-            }
-            return Ok(());
-        }
-        // We allow a UFCS call to match a method call, provided they resolve to the same function.
-        if let Some(pattern_ufcs) = self.rule.pattern.ufcs_function_calls.get(pattern) {
-            if let Some(code) = ast::MethodCallExpr::cast(code.clone()) {
-                return self.attempt_match_ufcs_to_method_call(phase, pattern_ufcs, &code);
-            }
-            if let Some(code) = ast::CallExpr::cast(code.clone()) {
-                return self.attempt_match_ufcs_to_ufcs(phase, pattern_ufcs, &code);
-            }
-        }
-        if pattern.kind() != code.kind() {
-            fail_match!(
-                "Pattern had `{}` ({:?}), code had `{}` ({:?})",
-                pattern.text(),
-                pattern.kind(),
-                code.text(),
-                code.kind()
-            );
-        }
-        // Some kinds of nodes have special handling. For everything else, we fall back to default
-        // matching.
-        match code.kind() {
-            SyntaxKind::RECORD_EXPR_FIELD_LIST => {
-                self.attempt_match_record_field_list(phase, pattern, code)
-            }
-            SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code),
-            SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code),
-            _ => self.attempt_match_node_children(phase, pattern, code),
-        }
-    }
-
-    fn attempt_match_node_children(
-        &self,
-        phase: &mut Phase,
-        pattern: &SyntaxNode,
-        code: &SyntaxNode,
-    ) -> Result<(), MatchFailed> {
-        self.attempt_match_sequences(
-            phase,
-            PatternIterator::new(pattern),
-            code.children_with_tokens(),
-        )
-    }
-
-    fn attempt_match_sequences(
-        &self,
-        phase: &mut Phase,
-        pattern_it: PatternIterator,
-        mut code_it: SyntaxElementChildren,
-    ) -> Result<(), MatchFailed> {
-        let mut pattern_it = pattern_it.peekable();
-        loop {
-            match phase.next_non_trivial(&mut code_it) {
-                None => {
-                    if let Some(p) = pattern_it.next() {
-                        fail_match!("Part of the pattern was unmatched: {:?}", p);
-                    }
-                    return Ok(());
-                }
-                Some(SyntaxElement::Token(c)) => {
-                    self.attempt_match_token(phase, &mut pattern_it, &c)?;
-                }
-                Some(SyntaxElement::Node(c)) => match pattern_it.next() {
-                    Some(SyntaxElement::Node(p)) => {
-                        self.attempt_match_node(phase, &p, &c)?;
-                    }
-                    Some(p) => fail_match!("Pattern wanted '{}', code has {}", p, c.text()),
-                    None => fail_match!("Pattern reached end, code has {}", c.text()),
-                },
-            }
-        }
-    }
-
-    fn attempt_match_token(
-        &self,
-        phase: &mut Phase,
-        pattern: &mut Peekable<PatternIterator>,
-        code: &syntax::SyntaxToken,
-    ) -> Result<(), MatchFailed> {
-        phase.record_ignored_comments(code);
-        // Ignore whitespace and comments.
-        if code.kind().is_trivia() {
-            return Ok(());
-        }
-        if let Some(SyntaxElement::Token(p)) = pattern.peek() {
-            // If the code has a comma and the pattern is about to close something, then accept the
-            // comma without advancing the pattern. i.e. ignore trailing commas.
-            if code.kind() == SyntaxKind::COMMA && is_closing_token(p.kind()) {
-                return Ok(());
-            }
-            // Conversely, if the pattern has a comma and the code doesn't, skip that part of the
-            // pattern and continue to match the code.
-            if p.kind() == SyntaxKind::COMMA && is_closing_token(code.kind()) {
-                pattern.next();
-            }
-        }
-        // Consume an element from the pattern and make sure it matches.
-        match pattern.next() {
-            Some(SyntaxElement::Token(p)) => {
-                if p.kind() != code.kind() || p.text() != code.text() {
-                    fail_match!(
-                        "Pattern wanted token '{}' ({:?}), but code had token '{}' ({:?})",
-                        p.text(),
-                        p.kind(),
-                        code.text(),
-                        code.kind()
-                    )
-                }
-            }
-            Some(SyntaxElement::Node(p)) => {
-                // Not sure if this is actually reachable.
-                fail_match!(
-                    "Pattern wanted {:?}, but code had token '{}' ({:?})",
-                    p,
-                    code.text(),
-                    code.kind()
-                );
-            }
-            None => {
-                fail_match!("Pattern exhausted, while code remains: `{}`", code.text());
-            }
-        }
-        Ok(())
-    }
-
-    fn check_constraint(
-        &self,
-        constraint: &Constraint,
-        code: &SyntaxNode,
-    ) -> Result<(), MatchFailed> {
-        match constraint {
-            Constraint::Kind(kind) => {
-                kind.matches(code)?;
-            }
-            Constraint::Not(sub) => {
-                if self.check_constraint(&*sub, code).is_ok() {
-                    fail_match!("Constraint {:?} failed for '{}'", constraint, code.text());
-                }
-            }
-        }
-        Ok(())
-    }
-
-    /// Paths are matched based on whether they refer to the same thing, even if they're written
-    /// differently.
-    fn attempt_match_path(
-        &self,
-        phase: &mut Phase,
-        pattern: &SyntaxNode,
-        code: &SyntaxNode,
-    ) -> Result<(), MatchFailed> {
-        if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) {
-            let pattern_path = ast::Path::cast(pattern.clone()).unwrap();
-            let code_path = ast::Path::cast(code.clone()).unwrap();
-            if let (Some(pattern_segment), Some(code_segment)) =
-                (pattern_path.segment(), code_path.segment())
-            {
-                // Match everything within the segment except for the name-ref, which is handled
-                // separately via comparing what the path resolves to below.
-                self.attempt_match_opt(
-                    phase,
-                    pattern_segment.generic_arg_list(),
-                    code_segment.generic_arg_list(),
-                )?;
-                self.attempt_match_opt(
-                    phase,
-                    pattern_segment.param_list(),
-                    code_segment.param_list(),
-                )?;
-            }
-            if matches!(phase, Phase::Second(_)) {
-                let resolution = self
-                    .sema
-                    .resolve_path(&code_path)
-                    .ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?;
-                if pattern_resolved.resolution != resolution {
-                    fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text());
-                }
-            }
-        } else {
-            return self.attempt_match_node_children(phase, pattern, code);
-        }
-        Ok(())
-    }
-
-    fn attempt_match_opt<T: AstNode>(
-        &self,
-        phase: &mut Phase,
-        pattern: Option<T>,
-        code: Option<T>,
-    ) -> Result<(), MatchFailed> {
-        match (pattern, code) {
-            (Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()),
-            (None, None) => Ok(()),
-            (Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()),
-            (None, Some(c)) => {
-                fail_match!("Nothing in pattern to match code `{}`", c.syntax().text())
-            }
-        }
-    }
-
-    /// We want to allow the records to match in any order, so we have special matching logic for
-    /// them.
-    fn attempt_match_record_field_list(
-        &self,
-        phase: &mut Phase,
-        pattern: &SyntaxNode,
-        code: &SyntaxNode,
-    ) -> Result<(), MatchFailed> {
-        // Build a map keyed by field name.
-        let mut fields_by_name = FxHashMap::default();
-        for child in code.children() {
-            if let Some(record) = ast::RecordExprField::cast(child.clone()) {
-                if let Some(name) = record.field_name() {
-                    fields_by_name.insert(name.text().clone(), child.clone());
-                }
-            }
-        }
-        for p in pattern.children_with_tokens() {
-            if let SyntaxElement::Node(p) = p {
-                if let Some(name_element) = p.first_child_or_token() {
-                    if self.get_placeholder(&name_element).is_some() {
-                        // If the pattern is using placeholders for field names then order
-                        // independence doesn't make sense. Fall back to regular ordered
-                        // matching.
-                        return self.attempt_match_node_children(phase, pattern, code);
-                    }
-                    if let Some(ident) = only_ident(name_element) {
-                        let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| {
-                            match_error!(
-                                "Placeholder has record field '{}', but code doesn't",
-                                ident
-                            )
-                        })?;
-                        self.attempt_match_node(phase, &p, &code_record)?;
-                    }
-                }
-            }
-        }
-        if let Some(unmatched_fields) = fields_by_name.keys().next() {
-            fail_match!(
-                "{} field(s) of a record literal failed to match, starting with {}",
-                fields_by_name.len(),
-                unmatched_fields
-            );
-        }
-        Ok(())
-    }
-
-    /// Outside of token trees, a placeholder can only match a single AST node, whereas in a token
-    /// tree it can match a sequence of tokens. Note, that this code will only be used when the
-    /// pattern matches the macro invocation. For matches within the macro call, we'll already have
-    /// expanded the macro.
-    fn attempt_match_token_tree(
-        &self,
-        phase: &mut Phase,
-        pattern: &SyntaxNode,
-        code: &syntax::SyntaxNode,
-    ) -> Result<(), MatchFailed> {
-        let mut pattern = PatternIterator::new(pattern).peekable();
-        let mut children = code.children_with_tokens();
-        while let Some(child) = children.next() {
-            if let Some(placeholder) = pattern.peek().and_then(|p| self.get_placeholder(p)) {
-                pattern.next();
-                let next_pattern_token = pattern
-                    .peek()
-                    .and_then(|p| match p {
-                        SyntaxElement::Token(t) => Some(t.clone()),
-                        SyntaxElement::Node(n) => n.first_token(),
-                    })
-                    .map(|p| p.text().to_string());
-                let first_matched_token = child.clone();
-                let mut last_matched_token = child;
-                // Read code tokens util we reach one equal to the next token from our pattern
-                // or we reach the end of the token tree.
-                while let Some(next) = children.next() {
-                    match &next {
-                        SyntaxElement::Token(t) => {
-                            if Some(t.to_string()) == next_pattern_token {
-                                pattern.next();
-                                break;
-                            }
-                        }
-                        SyntaxElement::Node(n) => {
-                            if let Some(first_token) = n.first_token() {
-                                if Some(first_token.to_string()) == next_pattern_token {
-                                    if let Some(SyntaxElement::Node(p)) = pattern.next() {
-                                        // We have a subtree that starts with the next token in our pattern.
-                                        self.attempt_match_token_tree(phase, &p, &n)?;
-                                        break;
-                                    }
-                                }
-                            }
-                        }
-                    };
-                    last_matched_token = next;
-                }
-                if let Phase::Second(match_out) = phase {
-                    match_out.placeholder_values.insert(
-                        Var(placeholder.ident.to_string()),
-                        PlaceholderMatch::from_range(FileRange {
-                            file_id: self.sema.original_range(code).file_id,
-                            range: first_matched_token
-                                .text_range()
-                                .cover(last_matched_token.text_range()),
-                        }),
-                    );
-                }
-                continue;
-            }
-            // Match literal (non-placeholder) tokens.
-            match child {
-                SyntaxElement::Token(token) => {
-                    self.attempt_match_token(phase, &mut pattern, &token)?;
-                }
-                SyntaxElement::Node(node) => match pattern.next() {
-                    Some(SyntaxElement::Node(p)) => {
-                        self.attempt_match_token_tree(phase, &p, &node)?;
-                    }
-                    Some(SyntaxElement::Token(p)) => fail_match!(
-                        "Pattern has token '{}', code has subtree '{}'",
-                        p.text(),
-                        node.text()
-                    ),
-                    None => fail_match!("Pattern has nothing, code has '{}'", node.text()),
-                },
-            }
-        }
-        if let Some(p) = pattern.next() {
-            fail_match!("Reached end of token tree in code, but pattern still has {:?}", p);
-        }
-        Ok(())
-    }
-
-    fn attempt_match_ufcs_to_method_call(
-        &self,
-        phase: &mut Phase,
-        pattern_ufcs: &UfcsCallInfo,
-        code: &ast::MethodCallExpr,
-    ) -> Result<(), MatchFailed> {
-        use ast::ArgListOwner;
-        let code_resolved_function = self
-            .sema
-            .resolve_method_call(code)
-            .ok_or_else(|| match_error!("Failed to resolve method call"))?;
-        if pattern_ufcs.function != code_resolved_function {
-            fail_match!("Method call resolved to a different function");
-        }
-        if code_resolved_function.has_self_param(self.sema.db) {
-            if let (Some(pattern_type), Some(expr)) = (&pattern_ufcs.qualifier_type, &code.expr()) {
-                self.check_expr_type(pattern_type, expr)?;
-            }
-        }
-        // Check arguments.
-        let mut pattern_args = pattern_ufcs
-            .call_expr
-            .arg_list()
-            .ok_or_else(|| match_error!("Pattern function call has no args"))?
-            .args();
-        self.attempt_match_opt(phase, pattern_args.next(), code.expr())?;
-        let mut code_args =
-            code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args();
-        loop {
-            match (pattern_args.next(), code_args.next()) {
-                (None, None) => return Ok(()),
-                (p, c) => self.attempt_match_opt(phase, p, c)?,
-            }
-        }
-    }
-
-    fn attempt_match_ufcs_to_ufcs(
-        &self,
-        phase: &mut Phase,
-        pattern_ufcs: &UfcsCallInfo,
-        code: &ast::CallExpr,
-    ) -> Result<(), MatchFailed> {
-        use ast::ArgListOwner;
-        // Check that the first argument is the expected type.
-        if let (Some(pattern_type), Some(expr)) = (
-            &pattern_ufcs.qualifier_type,
-            &code.arg_list().and_then(|code_args| code_args.args().next()),
-        ) {
-            self.check_expr_type(pattern_type, expr)?;
-        }
-        self.attempt_match_node_children(phase, pattern_ufcs.call_expr.syntax(), code.syntax())
-    }
-
-    fn check_expr_type(
-        &self,
-        pattern_type: &hir::Type,
-        expr: &ast::Expr,
-    ) -> Result<(), MatchFailed> {
-        use hir::HirDisplay;
-        let code_type = self.sema.type_of_expr(&expr).ok_or_else(|| {
-            match_error!("Failed to get receiver type for `{}`", expr.syntax().text())
-        })?;
-        if !code_type
-            .autoderef(self.sema.db)
-            .any(|deref_code_type| *pattern_type == deref_code_type)
-        {
-            fail_match!(
-                "Pattern type `{}` didn't match code type `{}`",
-                pattern_type.display(self.sema.db),
-                code_type.display(self.sema.db)
-            );
-        }
-        Ok(())
-    }
-
-    fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> {
-        only_ident(element.clone()).and_then(|ident| self.rule.get_placeholder(&ident))
-    }
-}
-
-impl Match {
-    fn render_template_paths(
-        &mut self,
-        template: &ResolvedPattern,
-        sema: &Semantics<ide_db::RootDatabase>,
-    ) -> Result<(), MatchFailed> {
-        let module = sema
-            .scope(&self.matched_node)
-            .module()
-            .ok_or_else(|| match_error!("Matched node isn't in a module"))?;
-        for (path, resolved_path) in &template.resolved_paths {
-            if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
-                let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| {
-                    match_error!("Failed to render template path `{}` at match location")
-                })?;
-                self.rendered_template_paths.insert(path.clone(), mod_path);
-            }
-        }
-        Ok(())
-    }
-}
-
-impl Phase<'_> {
-    fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> {
-        loop {
-            let c = code_it.next();
-            if let Some(SyntaxElement::Token(t)) = &c {
-                self.record_ignored_comments(t);
-                if t.kind().is_trivia() {
-                    continue;
-                }
-            }
-            return c;
-        }
-    }
-
-    fn record_ignored_comments(&mut self, token: &SyntaxToken) {
-        if token.kind() == SyntaxKind::COMMENT {
-            if let Phase::Second(match_out) = self {
-                if let Some(comment) = ast::Comment::cast(token.clone()) {
-                    match_out.ignored_comments.push(comment);
-                }
-            }
-        }
-    }
-}
-
-fn is_closing_token(kind: SyntaxKind) -> bool {
-    kind == SyntaxKind::R_PAREN || kind == SyntaxKind::R_CURLY || kind == SyntaxKind::R_BRACK
-}
-
-pub(crate) fn record_match_fails_reasons_scope<F, T>(debug_active: bool, f: F) -> T
-where
-    F: Fn() -> T,
-{
-    RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(debug_active));
-    let res = f();
-    RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(false));
-    res
-}
-
-// For performance reasons, we don't want to record the reason why every match fails, only the bit
-// of code that the user indicated they thought would match. We use a thread local to indicate when
-// we are trying to match that bit of code. This saves us having to pass a boolean into all the bits
-// of code that can make the decision to not match.
-thread_local! {
-    pub static RECORDING_MATCH_FAIL_REASONS: Cell<bool> = Cell::new(false);
-}
-
-fn recording_match_fail_reasons() -> bool {
-    RECORDING_MATCH_FAIL_REASONS.with(|c| c.get())
-}
-
-impl PlaceholderMatch {
-    fn new(node: &SyntaxNode, range: FileRange) -> Self {
-        Self { node: Some(node.clone()), range, inner_matches: SsrMatches::default() }
-    }
-
-    fn from_range(range: FileRange) -> Self {
-        Self { node: None, range, inner_matches: SsrMatches::default() }
-    }
-}
-
-impl NodeKind {
-    fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> {
-        let ok = match self {
-            Self::Literal => {
-                mark::hit!(literal_constraint);
-                ast::Literal::can_cast(node.kind())
-            }
-        };
-        if !ok {
-            fail_match!("Code '{}' isn't of kind {:?}", node.text(), self);
-        }
-        Ok(())
-    }
-}
-
-// If `node` contains nothing but an ident then return it, otherwise return None.
-fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> {
-    match element {
-        SyntaxElement::Token(t) => {
-            if t.kind() == SyntaxKind::IDENT {
-                return Some(t);
-            }
-        }
-        SyntaxElement::Node(n) => {
-            let mut children = n.children_with_tokens();
-            if let (Some(only_child), None) = (children.next(), children.next()) {
-                return only_ident(only_child);
-            }
-        }
-    }
-    None
-}
-
-struct PatternIterator {
-    iter: SyntaxElementChildren,
-}
-
-impl Iterator for PatternIterator {
-    type Item = SyntaxElement;
-
-    fn next(&mut self) -> Option<SyntaxElement> {
-        while let Some(element) = self.iter.next() {
-            if !element.kind().is_trivia() {
-                return Some(element);
-            }
-        }
-        None
-    }
-}
-
-impl PatternIterator {
-    fn new(parent: &SyntaxNode) -> Self {
-        Self { iter: parent.children_with_tokens() }
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use crate::{MatchFinder, SsrRule};
-
-    #[test]
-    fn parse_match_replace() {
-        let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap();
-        let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
-
-        let (db, position, selections) = crate::tests::single_file(input);
-        let mut match_finder = MatchFinder::in_context(&db, position, selections);
-        match_finder.add_rule(rule).unwrap();
-        let matches = match_finder.matches();
-        assert_eq!(matches.matches.len(), 1);
-        assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)");
-        assert_eq!(matches.matches[0].placeholder_values.len(), 1);
-        assert_eq!(
-            matches.matches[0].placeholder_values[&Var("x".to_string())]
-                .node
-                .as_ref()
-                .unwrap()
-                .text(),
-            "1+2"
-        );
-
-        let edits = match_finder.edits();
-        assert_eq!(edits.len(), 1);
-        let edit = &edits[0];
-        let mut after = input.to_string();
-        edit.edit.apply(&mut after);
-        assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }");
-    }
-}
diff --git a/crates/ra_ssr/src/nester.rs b/crates/ra_ssr/src/nester.rs
deleted file mode 100644 (file)
index 6ac355d..0000000
+++ /dev/null
@@ -1,94 +0,0 @@
-//! Converts a flat collection of matches into a nested form suitable for replacement. When there
-//! are multiple matches for a node, or that overlap, priority is given to the earlier rule. Nested
-//! matches are only permitted if the inner match is contained entirely within a placeholder of an
-//! outer match.
-//!
-//! For example, if our search pattern is `foo(foo($a))` and the code had `foo(foo(foo(foo(42))))`,
-//! then we'll get 3 matches, however only the outermost and innermost matches can be accepted. The
-//! middle match would take the second `foo` from the outer match.
-
-use crate::{Match, SsrMatches};
-use rustc_hash::FxHashMap;
-use syntax::SyntaxNode;
-
-pub(crate) fn nest_and_remove_collisions(
-    mut matches: Vec<Match>,
-    sema: &hir::Semantics<ide_db::RootDatabase>,
-) -> SsrMatches {
-    // We sort the matches by depth then by rule index. Sorting by depth means that by the time we
-    // see a match, any parent matches or conflicting matches will have already been seen. Sorting
-    // by rule_index means that if there are two matches for the same node, the rule added first
-    // will take precedence.
-    matches.sort_by(|a, b| a.depth.cmp(&b.depth).then_with(|| a.rule_index.cmp(&b.rule_index)));
-    let mut collector = MatchCollector::default();
-    for m in matches {
-        collector.add_match(m, sema);
-    }
-    collector.into()
-}
-
-#[derive(Default)]
-struct MatchCollector {
-    matches_by_node: FxHashMap<SyntaxNode, Match>,
-}
-
-impl MatchCollector {
-    /// Attempts to add `m` to matches. If it conflicts with an existing match, it is discarded. If
-    /// it is entirely within the a placeholder of an existing match, then it is added as a child
-    /// match of the existing match.
-    fn add_match(&mut self, m: Match, sema: &hir::Semantics<ide_db::RootDatabase>) {
-        let matched_node = m.matched_node.clone();
-        if let Some(existing) = self.matches_by_node.get_mut(&matched_node) {
-            try_add_sub_match(m, existing, sema);
-            return;
-        }
-        for ancestor in sema.ancestors_with_macros(m.matched_node.clone()) {
-            if let Some(existing) = self.matches_by_node.get_mut(&ancestor) {
-                try_add_sub_match(m, existing, sema);
-                return;
-            }
-        }
-        self.matches_by_node.insert(matched_node, m);
-    }
-}
-
-/// Attempts to add `m` as a sub-match of `existing`.
-fn try_add_sub_match(m: Match, existing: &mut Match, sema: &hir::Semantics<ide_db::RootDatabase>) {
-    for p in existing.placeholder_values.values_mut() {
-        // Note, no need to check if p.range.file is equal to m.range.file, since we
-        // already know we're within `existing`.
-        if p.range.range.contains_range(m.range.range) {
-            // Convert the inner matches in `p` into a temporary MatchCollector. When
-            // we're done, we then convert it back into an SsrMatches. If we expected
-            // lots of inner matches, it might be worthwhile keeping a MatchCollector
-            // around for each placeholder match. However we expect most placeholder
-            // will have 0 and a few will have 1. More than that should hopefully be
-            // exceptional.
-            let mut collector = MatchCollector::default();
-            for m in std::mem::replace(&mut p.inner_matches.matches, Vec::new()) {
-                collector.matches_by_node.insert(m.matched_node.clone(), m);
-            }
-            collector.add_match(m, sema);
-            p.inner_matches = collector.into();
-            break;
-        }
-    }
-}
-
-impl From<MatchCollector> for SsrMatches {
-    fn from(mut match_collector: MatchCollector) -> Self {
-        let mut matches = SsrMatches::default();
-        for (_, m) in match_collector.matches_by_node.drain() {
-            matches.matches.push(m);
-        }
-        matches.matches.sort_by(|a, b| {
-            // Order matches by file_id then by start range. This should be sufficient since ranges
-            // shouldn't be overlapping.
-            a.range
-                .file_id
-                .cmp(&b.range.file_id)
-                .then_with(|| a.range.range.start().cmp(&b.range.range.start()))
-        });
-        matches
-    }
-}
diff --git a/crates/ra_ssr/src/parsing.rs b/crates/ra_ssr/src/parsing.rs
deleted file mode 100644 (file)
index 9570e96..0000000
+++ /dev/null
@@ -1,389 +0,0 @@
-//! This file contains code for parsing SSR rules, which look something like `foo($a) ==>> bar($b)`.
-//! We first split everything before and after the separator `==>>`. Next, both the search pattern
-//! and the replacement template get tokenized by the Rust tokenizer. Tokens are then searched for
-//! placeholders, which start with `$`. For replacement templates, this is the final form. For
-//! search patterns, we go further and parse the pattern as each kind of thing that we can match.
-//! e.g. expressions, type references etc.
-
-use crate::errors::bail;
-use crate::{SsrError, SsrPattern, SsrRule};
-use rustc_hash::{FxHashMap, FxHashSet};
-use std::str::FromStr;
-use syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T};
-use test_utils::mark;
-
-#[derive(Debug)]
-pub(crate) struct ParsedRule {
-    pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
-    pub(crate) pattern: SyntaxNode,
-    pub(crate) template: Option<SyntaxNode>,
-}
-
-#[derive(Debug)]
-pub(crate) struct RawPattern {
-    tokens: Vec<PatternElement>,
-}
-
-// Part of a search or replace pattern.
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub(crate) enum PatternElement {
-    Token(Token),
-    Placeholder(Placeholder),
-}
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub(crate) struct Placeholder {
-    /// The name of this placeholder. e.g. for "$a", this would be "a"
-    pub(crate) ident: SmolStr,
-    /// A unique name used in place of this placeholder when we parse the pattern as Rust code.
-    stand_in_name: String,
-    pub(crate) constraints: Vec<Constraint>,
-}
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub(crate) enum Constraint {
-    Kind(NodeKind),
-    Not(Box<Constraint>),
-}
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub(crate) enum NodeKind {
-    Literal,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub(crate) struct Token {
-    kind: SyntaxKind,
-    pub(crate) text: SmolStr,
-}
-
-impl ParsedRule {
-    fn new(
-        pattern: &RawPattern,
-        template: Option<&RawPattern>,
-    ) -> Result<Vec<ParsedRule>, SsrError> {
-        let raw_pattern = pattern.as_rust_code();
-        let raw_template = template.map(|t| t.as_rust_code());
-        let raw_template = raw_template.as_ref().map(|s| s.as_str());
-        let mut builder = RuleBuilder {
-            placeholders_by_stand_in: pattern.placeholders_by_stand_in(),
-            rules: Vec::new(),
-        };
-        builder.try_add(ast::Expr::parse(&raw_pattern), raw_template.map(ast::Expr::parse));
-        builder.try_add(ast::Type::parse(&raw_pattern), raw_template.map(ast::Type::parse));
-        builder.try_add(ast::Item::parse(&raw_pattern), raw_template.map(ast::Item::parse));
-        builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse));
-        builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse));
-        builder.build()
-    }
-}
-
-struct RuleBuilder {
-    placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
-    rules: Vec<ParsedRule>,
-}
-
-impl RuleBuilder {
-    fn try_add<T: AstNode>(&mut self, pattern: Result<T, ()>, template: Option<Result<T, ()>>) {
-        match (pattern, template) {
-            (Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule {
-                placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
-                pattern: pattern.syntax().clone(),
-                template: Some(template.syntax().clone()),
-            }),
-            (Ok(pattern), None) => self.rules.push(ParsedRule {
-                placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
-                pattern: pattern.syntax().clone(),
-                template: None,
-            }),
-            _ => {}
-        }
-    }
-
-    fn build(mut self) -> Result<Vec<ParsedRule>, SsrError> {
-        if self.rules.is_empty() {
-            bail!("Not a valid Rust expression, type, item, path or pattern");
-        }
-        // If any rules contain paths, then we reject any rules that don't contain paths. Allowing a
-        // mix leads to strange semantics, since the path-based rules only match things where the
-        // path refers to semantically the same thing, whereas the non-path-based rules could match
-        // anything. Specifically, if we have a rule like `foo ==>> bar` we only want to match the
-        // `foo` that is in the current scope, not any `foo`. However "foo" can be parsed as a
-        // pattern (IDENT_PAT -> NAME -> IDENT). Allowing such a rule through would result in
-        // renaming everything called `foo` to `bar`. It'd also be slow, since without a path, we'd
-        // have to use the slow-scan search mechanism.
-        if self.rules.iter().any(|rule| contains_path(&rule.pattern)) {
-            let old_len = self.rules.len();
-            self.rules.retain(|rule| contains_path(&rule.pattern));
-            if self.rules.len() < old_len {
-                mark::hit!(pattern_is_a_single_segment_path);
-            }
-        }
-        Ok(self.rules)
-    }
-}
-
-/// Returns whether there are any paths in `node`.
-fn contains_path(node: &SyntaxNode) -> bool {
-    node.kind() == SyntaxKind::PATH
-        || node.descendants().any(|node| node.kind() == SyntaxKind::PATH)
-}
-
-impl FromStr for SsrRule {
-    type Err = SsrError;
-
-    fn from_str(query: &str) -> Result<SsrRule, SsrError> {
-        let mut it = query.split("==>>");
-        let pattern = it.next().expect("at least empty string").trim();
-        let template = it
-            .next()
-            .ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))?
-            .trim()
-            .to_string();
-        if it.next().is_some() {
-            return Err(SsrError("More than one delimiter found".into()));
-        }
-        let raw_pattern = pattern.parse()?;
-        let raw_template = template.parse()?;
-        let parsed_rules = ParsedRule::new(&raw_pattern, Some(&raw_template))?;
-        let rule = SsrRule { pattern: raw_pattern, template: raw_template, parsed_rules };
-        validate_rule(&rule)?;
-        Ok(rule)
-    }
-}
-
-impl FromStr for RawPattern {
-    type Err = SsrError;
-
-    fn from_str(pattern_str: &str) -> Result<RawPattern, SsrError> {
-        Ok(RawPattern { tokens: parse_pattern(pattern_str)? })
-    }
-}
-
-impl RawPattern {
-    /// Returns this search pattern as Rust source code that we can feed to the Rust parser.
-    fn as_rust_code(&self) -> String {
-        let mut res = String::new();
-        for t in &self.tokens {
-            res.push_str(match t {
-                PatternElement::Token(token) => token.text.as_str(),
-                PatternElement::Placeholder(placeholder) => placeholder.stand_in_name.as_str(),
-            });
-        }
-        res
-    }
-
-    pub(crate) fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> {
-        let mut res = FxHashMap::default();
-        for t in &self.tokens {
-            if let PatternElement::Placeholder(placeholder) = t {
-                res.insert(SmolStr::new(placeholder.stand_in_name.clone()), placeholder.clone());
-            }
-        }
-        res
-    }
-}
-
-impl FromStr for SsrPattern {
-    type Err = SsrError;
-
-    fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> {
-        let raw_pattern = pattern_str.parse()?;
-        let parsed_rules = ParsedRule::new(&raw_pattern, None)?;
-        Ok(SsrPattern { raw: raw_pattern, parsed_rules })
-    }
-}
-
-/// Returns `pattern_str`, parsed as a search or replace pattern. If `remove_whitespace` is true,
-/// then any whitespace tokens will be removed, which we do for the search pattern, but not for the
-/// replace pattern.
-fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> {
-    let mut res = Vec::new();
-    let mut placeholder_names = FxHashSet::default();
-    let mut tokens = tokenize(pattern_str)?.into_iter();
-    while let Some(token) = tokens.next() {
-        if token.kind == T![$] {
-            let placeholder = parse_placeholder(&mut tokens)?;
-            if !placeholder_names.insert(placeholder.ident.clone()) {
-                bail!("Name `{}` repeats more than once", placeholder.ident);
-            }
-            res.push(PatternElement::Placeholder(placeholder));
-        } else {
-            res.push(PatternElement::Token(token));
-        }
-    }
-    Ok(res)
-}
-
-/// Checks for errors in a rule. e.g. the replace pattern referencing placeholders that the search
-/// pattern didn't define.
-fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
-    let mut defined_placeholders = FxHashSet::default();
-    for p in &rule.pattern.tokens {
-        if let PatternElement::Placeholder(placeholder) = p {
-            defined_placeholders.insert(&placeholder.ident);
-        }
-    }
-    let mut undefined = Vec::new();
-    for p in &rule.template.tokens {
-        if let PatternElement::Placeholder(placeholder) = p {
-            if !defined_placeholders.contains(&placeholder.ident) {
-                undefined.push(format!("${}", placeholder.ident));
-            }
-            if !placeholder.constraints.is_empty() {
-                bail!("Replacement placeholders cannot have constraints");
-            }
-        }
-    }
-    if !undefined.is_empty() {
-        bail!("Replacement contains undefined placeholders: {}", undefined.join(", "));
-    }
-    Ok(())
-}
-
-fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> {
-    let mut start = 0;
-    let (raw_tokens, errors) = syntax::tokenize(source);
-    if let Some(first_error) = errors.first() {
-        bail!("Failed to parse pattern: {}", first_error);
-    }
-    let mut tokens: Vec<Token> = Vec::new();
-    for raw_token in raw_tokens {
-        let token_len = usize::from(raw_token.len);
-        tokens.push(Token {
-            kind: raw_token.kind,
-            text: SmolStr::new(&source[start..start + token_len]),
-        });
-        start += token_len;
-    }
-    Ok(tokens)
-}
-
-fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> {
-    let mut name = None;
-    let mut constraints = Vec::new();
-    if let Some(token) = tokens.next() {
-        match token.kind {
-            SyntaxKind::IDENT => {
-                name = Some(token.text);
-            }
-            T!['{'] => {
-                let token =
-                    tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?;
-                if token.kind == SyntaxKind::IDENT {
-                    name = Some(token.text);
-                }
-                loop {
-                    let token = tokens
-                        .next()
-                        .ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?;
-                    match token.kind {
-                        T![:] => {
-                            constraints.push(parse_constraint(tokens)?);
-                        }
-                        T!['}'] => break,
-                        _ => bail!("Unexpected token while parsing placeholder: '{}'", token.text),
-                    }
-                }
-            }
-            _ => {
-                bail!("Placeholders should either be $name or ${{name:constraints}}");
-            }
-        }
-    }
-    let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?;
-    Ok(Placeholder::new(name, constraints))
-}
-
-fn parse_constraint(tokens: &mut std::vec::IntoIter<Token>) -> Result<Constraint, SsrError> {
-    let constraint_type = tokens
-        .next()
-        .ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))?
-        .text
-        .to_string();
-    match constraint_type.as_str() {
-        "kind" => {
-            expect_token(tokens, "(")?;
-            let t = tokens.next().ok_or_else(|| {
-                SsrError::new("Unexpected end of constraint while looking for kind")
-            })?;
-            if t.kind != SyntaxKind::IDENT {
-                bail!("Expected ident, found {:?} while parsing kind constraint", t.kind);
-            }
-            expect_token(tokens, ")")?;
-            Ok(Constraint::Kind(NodeKind::from(&t.text)?))
-        }
-        "not" => {
-            expect_token(tokens, "(")?;
-            let sub = parse_constraint(tokens)?;
-            expect_token(tokens, ")")?;
-            Ok(Constraint::Not(Box::new(sub)))
-        }
-        x => bail!("Unsupported constraint type '{}'", x),
-    }
-}
-
-fn expect_token(tokens: &mut std::vec::IntoIter<Token>, expected: &str) -> Result<(), SsrError> {
-    if let Some(t) = tokens.next() {
-        if t.text == expected {
-            return Ok(());
-        }
-        bail!("Expected {} found {}", expected, t.text);
-    }
-    bail!("Expected {} found end of stream", expected);
-}
-
-impl NodeKind {
-    fn from(name: &SmolStr) -> Result<NodeKind, SsrError> {
-        Ok(match name.as_str() {
-            "literal" => NodeKind::Literal,
-            _ => bail!("Unknown node kind '{}'", name),
-        })
-    }
-}
-
-impl Placeholder {
-    fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self {
-        Self { stand_in_name: format!("__placeholder_{}", name), constraints, ident: name }
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-
-    #[test]
-    fn parser_happy_case() {
-        fn token(kind: SyntaxKind, text: &str) -> PatternElement {
-            PatternElement::Token(Token { kind, text: SmolStr::new(text) })
-        }
-        fn placeholder(name: &str) -> PatternElement {
-            PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new()))
-        }
-        let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap();
-        assert_eq!(
-            result.pattern.tokens,
-            vec![
-                token(SyntaxKind::IDENT, "foo"),
-                token(T!['('], "("),
-                placeholder("a"),
-                token(T![,], ","),
-                token(SyntaxKind::WHITESPACE, " "),
-                placeholder("b"),
-                token(T![')'], ")"),
-            ]
-        );
-        assert_eq!(
-            result.template.tokens,
-            vec![
-                token(SyntaxKind::IDENT, "bar"),
-                token(T!['('], "("),
-                placeholder("b"),
-                token(T![,], ","),
-                token(SyntaxKind::WHITESPACE, " "),
-                placeholder("a"),
-                token(T![')'], ")"),
-            ]
-        );
-    }
-}
diff --git a/crates/ra_ssr/src/replacing.rs b/crates/ra_ssr/src/replacing.rs
deleted file mode 100644 (file)
index 8f8fe61..0000000
+++ /dev/null
@@ -1,194 +0,0 @@
-//! Code for applying replacement templates for matches that have previously been found.
-
-use crate::matching::Var;
-use crate::{resolving::ResolvedRule, Match, SsrMatches};
-use rustc_hash::{FxHashMap, FxHashSet};
-use syntax::ast::{self, AstToken};
-use syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize};
-use text_edit::TextEdit;
-
-/// Returns a text edit that will replace each match in `matches` with its corresponding replacement
-/// template. Placeholders in the template will have been substituted with whatever they matched to
-/// in the original code.
-pub(crate) fn matches_to_edit(
-    matches: &SsrMatches,
-    file_src: &str,
-    rules: &[ResolvedRule],
-) -> TextEdit {
-    matches_to_edit_at_offset(matches, file_src, 0.into(), rules)
-}
-
-fn matches_to_edit_at_offset(
-    matches: &SsrMatches,
-    file_src: &str,
-    relative_start: TextSize,
-    rules: &[ResolvedRule],
-) -> TextEdit {
-    let mut edit_builder = TextEdit::builder();
-    for m in &matches.matches {
-        edit_builder.replace(
-            m.range.range.checked_sub(relative_start).unwrap(),
-            render_replace(m, file_src, rules),
-        );
-    }
-    edit_builder.finish()
-}
-
-struct ReplacementRenderer<'a> {
-    match_info: &'a Match,
-    file_src: &'a str,
-    rules: &'a [ResolvedRule],
-    rule: &'a ResolvedRule,
-    out: String,
-    // Map from a range within `out` to a token in `template` that represents a placeholder. This is
-    // used to validate that the generated source code doesn't split any placeholder expansions (see
-    // below).
-    placeholder_tokens_by_range: FxHashMap<TextRange, SyntaxToken>,
-    // Which placeholder tokens need to be wrapped in parenthesis in order to ensure that when `out`
-    // is parsed, placeholders don't get split. e.g. if a template of `$a.to_string()` results in `1
-    // + 2.to_string()` then the placeholder value `1 + 2` was split and needs parenthesis.
-    placeholder_tokens_requiring_parenthesis: FxHashSet<SyntaxToken>,
-}
-
-fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String {
-    let rule = &rules[match_info.rule_index];
-    let template = rule
-        .template
-        .as_ref()
-        .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern");
-    let mut renderer = ReplacementRenderer {
-        match_info,
-        file_src,
-        rules,
-        rule,
-        out: String::new(),
-        placeholder_tokens_requiring_parenthesis: FxHashSet::default(),
-        placeholder_tokens_by_range: FxHashMap::default(),
-    };
-    renderer.render_node(&template.node);
-    renderer.maybe_rerender_with_extra_parenthesis(&template.node);
-    for comment in &match_info.ignored_comments {
-        renderer.out.push_str(&comment.syntax().to_string());
-    }
-    renderer.out
-}
-
-impl ReplacementRenderer<'_> {
-    fn render_node_children(&mut self, node: &SyntaxNode) {
-        for node_or_token in node.children_with_tokens() {
-            self.render_node_or_token(&node_or_token);
-        }
-    }
-
-    fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) {
-        match node_or_token {
-            SyntaxElement::Token(token) => {
-                self.render_token(&token);
-            }
-            SyntaxElement::Node(child_node) => {
-                self.render_node(&child_node);
-            }
-        }
-    }
-
-    fn render_node(&mut self, node: &SyntaxNode) {
-        use syntax::ast::AstNode;
-        if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) {
-            self.out.push_str(&mod_path.to_string());
-            // Emit everything except for the segment's name-ref, since we already effectively
-            // emitted that as part of `mod_path`.
-            if let Some(path) = ast::Path::cast(node.clone()) {
-                if let Some(segment) = path.segment() {
-                    for node_or_token in segment.syntax().children_with_tokens() {
-                        if node_or_token.kind() != SyntaxKind::NAME_REF {
-                            self.render_node_or_token(&node_or_token);
-                        }
-                    }
-                }
-            }
-        } else {
-            self.render_node_children(&node);
-        }
-    }
-
-    fn render_token(&mut self, token: &SyntaxToken) {
-        if let Some(placeholder) = self.rule.get_placeholder(&token) {
-            if let Some(placeholder_value) =
-                self.match_info.placeholder_values.get(&Var(placeholder.ident.to_string()))
-            {
-                let range = &placeholder_value.range.range;
-                let mut matched_text =
-                    self.file_src[usize::from(range.start())..usize::from(range.end())].to_owned();
-                let edit = matches_to_edit_at_offset(
-                    &placeholder_value.inner_matches,
-                    self.file_src,
-                    range.start(),
-                    self.rules,
-                );
-                let needs_parenthesis =
-                    self.placeholder_tokens_requiring_parenthesis.contains(token);
-                edit.apply(&mut matched_text);
-                if needs_parenthesis {
-                    self.out.push('(');
-                }
-                self.placeholder_tokens_by_range.insert(
-                    TextRange::new(
-                        TextSize::of(&self.out),
-                        TextSize::of(&self.out) + TextSize::of(&matched_text),
-                    ),
-                    token.clone(),
-                );
-                self.out.push_str(&matched_text);
-                if needs_parenthesis {
-                    self.out.push(')');
-                }
-            } else {
-                // We validated that all placeholder references were valid before we
-                // started, so this shouldn't happen.
-                panic!(
-                    "Internal error: replacement referenced unknown placeholder {}",
-                    placeholder.ident
-                );
-            }
-        } else {
-            self.out.push_str(token.text().as_str());
-        }
-    }
-
-    // Checks if the resulting code, when parsed doesn't split any placeholders due to different
-    // order of operations between the search pattern and the replacement template. If any do, then
-    // we rerender the template and wrap the problematic placeholders with parenthesis.
-    fn maybe_rerender_with_extra_parenthesis(&mut self, template: &SyntaxNode) {
-        if let Some(node) = parse_as_kind(&self.out, template.kind()) {
-            self.remove_node_ranges(node);
-            if self.placeholder_tokens_by_range.is_empty() {
-                return;
-            }
-            self.placeholder_tokens_requiring_parenthesis =
-                self.placeholder_tokens_by_range.values().cloned().collect();
-            self.out.clear();
-            self.render_node(template);
-        }
-    }
-
-    fn remove_node_ranges(&mut self, node: SyntaxNode) {
-        self.placeholder_tokens_by_range.remove(&node.text_range());
-        for child in node.children() {
-            self.remove_node_ranges(child);
-        }
-    }
-}
-
-fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option<SyntaxNode> {
-    use syntax::ast::AstNode;
-    if ast::Expr::can_cast(kind) {
-        if let Ok(expr) = ast::Expr::parse(code) {
-            return Some(expr.syntax().clone());
-        }
-    } else if ast::Item::can_cast(kind) {
-        if let Ok(item) = ast::Item::parse(code) {
-            return Some(item.syntax().clone());
-        }
-    }
-    None
-}
diff --git a/crates/ra_ssr/src/resolving.rs b/crates/ra_ssr/src/resolving.rs
deleted file mode 100644 (file)
index 020fd79..0000000
+++ /dev/null
@@ -1,299 +0,0 @@
-//! This module is responsible for resolving paths within rules.
-
-use crate::errors::error;
-use crate::{parsing, SsrError};
-use base_db::FilePosition;
-use parsing::Placeholder;
-use rustc_hash::FxHashMap;
-use syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken};
-use test_utils::mark;
-
-pub(crate) struct ResolutionScope<'db> {
-    scope: hir::SemanticsScope<'db>,
-    hygiene: hir::Hygiene,
-    node: SyntaxNode,
-}
-
-pub(crate) struct ResolvedRule {
-    pub(crate) pattern: ResolvedPattern,
-    pub(crate) template: Option<ResolvedPattern>,
-    pub(crate) index: usize,
-}
-
-pub(crate) struct ResolvedPattern {
-    pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
-    pub(crate) node: SyntaxNode,
-    // Paths in `node` that we've resolved.
-    pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>,
-    pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, UfcsCallInfo>,
-    pub(crate) contains_self: bool,
-}
-
-pub(crate) struct ResolvedPath {
-    pub(crate) resolution: hir::PathResolution,
-    /// The depth of the ast::Path that was resolved within the pattern.
-    pub(crate) depth: u32,
-}
-
-pub(crate) struct UfcsCallInfo {
-    pub(crate) call_expr: ast::CallExpr,
-    pub(crate) function: hir::Function,
-    pub(crate) qualifier_type: Option<hir::Type>,
-}
-
-impl ResolvedRule {
-    pub(crate) fn new(
-        rule: parsing::ParsedRule,
-        resolution_scope: &ResolutionScope,
-        index: usize,
-    ) -> Result<ResolvedRule, SsrError> {
-        let resolver =
-            Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in };
-        let resolved_template = if let Some(template) = rule.template {
-            Some(resolver.resolve_pattern_tree(template)?)
-        } else {
-            None
-        };
-        Ok(ResolvedRule {
-            pattern: resolver.resolve_pattern_tree(rule.pattern)?,
-            template: resolved_template,
-            index,
-        })
-    }
-
-    pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> {
-        if token.kind() != SyntaxKind::IDENT {
-            return None;
-        }
-        self.pattern.placeholders_by_stand_in.get(token.text())
-    }
-}
-
-struct Resolver<'a, 'db> {
-    resolution_scope: &'a ResolutionScope<'db>,
-    placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
-}
-
-impl Resolver<'_, '_> {
-    fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> {
-        use syntax::ast::AstNode;
-        use syntax::{SyntaxElement, T};
-        let mut resolved_paths = FxHashMap::default();
-        self.resolve(pattern.clone(), 0, &mut resolved_paths)?;
-        let ufcs_function_calls = resolved_paths
-            .iter()
-            .filter_map(|(path_node, resolved)| {
-                if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) {
-                    if let Some(call_expr) = ast::CallExpr::cast(grandparent.clone()) {
-                        if let hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) =
-                            resolved.resolution
-                        {
-                            let qualifier_type = self.resolution_scope.qualifier_type(path_node);
-                            return Some((
-                                grandparent,
-                                UfcsCallInfo { call_expr, function, qualifier_type },
-                            ));
-                        }
-                    }
-                }
-                None
-            })
-            .collect();
-        let contains_self =
-            pattern.descendants_with_tokens().any(|node_or_token| match node_or_token {
-                SyntaxElement::Token(t) => t.kind() == T![self],
-                _ => false,
-            });
-        Ok(ResolvedPattern {
-            node: pattern,
-            resolved_paths,
-            placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
-            ufcs_function_calls,
-            contains_self,
-        })
-    }
-
-    fn resolve(
-        &self,
-        node: SyntaxNode,
-        depth: u32,
-        resolved_paths: &mut FxHashMap<SyntaxNode, ResolvedPath>,
-    ) -> Result<(), SsrError> {
-        use syntax::ast::AstNode;
-        if let Some(path) = ast::Path::cast(node.clone()) {
-            if is_self(&path) {
-                // Self cannot be resolved like other paths.
-                return Ok(());
-            }
-            // Check if this is an appropriate place in the path to resolve. If the path is
-            // something like `a::B::<i32>::c` then we want to resolve `a::B`. If the path contains
-            // a placeholder. e.g. `a::$b::c` then we want to resolve `a`.
-            if !path_contains_type_arguments(path.qualifier())
-                && !self.path_contains_placeholder(&path)
-            {
-                let resolution = self
-                    .resolution_scope
-                    .resolve_path(&path)
-                    .ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?;
-                if self.ok_to_use_path_resolution(&resolution) {
-                    resolved_paths.insert(node, ResolvedPath { resolution, depth });
-                    return Ok(());
-                }
-            }
-        }
-        for node in node.children() {
-            self.resolve(node, depth + 1, resolved_paths)?;
-        }
-        Ok(())
-    }
-
-    /// Returns whether `path` contains a placeholder, but ignores any placeholders within type
-    /// arguments.
-    fn path_contains_placeholder(&self, path: &ast::Path) -> bool {
-        if let Some(segment) = path.segment() {
-            if let Some(name_ref) = segment.name_ref() {
-                if self.placeholders_by_stand_in.contains_key(name_ref.text()) {
-                    return true;
-                }
-            }
-        }
-        if let Some(qualifier) = path.qualifier() {
-            return self.path_contains_placeholder(&qualifier);
-        }
-        false
-    }
-
-    fn ok_to_use_path_resolution(&self, resolution: &hir::PathResolution) -> bool {
-        match resolution {
-            hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) => {
-                if function.has_self_param(self.resolution_scope.scope.db) {
-                    // If we don't use this path resolution, then we won't be able to match method
-                    // calls. e.g. `Foo::bar($s)` should match `x.bar()`.
-                    true
-                } else {
-                    mark::hit!(replace_associated_trait_default_function_call);
-                    false
-                }
-            }
-            hir::PathResolution::AssocItem(_) => {
-                // Not a function. Could be a constant or an associated type.
-                mark::hit!(replace_associated_trait_constant);
-                false
-            }
-            _ => true,
-        }
-    }
-}
-
-impl<'db> ResolutionScope<'db> {
-    pub(crate) fn new(
-        sema: &hir::Semantics<'db, ide_db::RootDatabase>,
-        resolve_context: FilePosition,
-    ) -> ResolutionScope<'db> {
-        use syntax::ast::AstNode;
-        let file = sema.parse(resolve_context.file_id);
-        // Find a node at the requested position, falling back to the whole file.
-        let node = file
-            .syntax()
-            .token_at_offset(resolve_context.offset)
-            .left_biased()
-            .map(|token| token.parent())
-            .unwrap_or_else(|| file.syntax().clone());
-        let node = pick_node_for_resolution(node);
-        let scope = sema.scope(&node);
-        ResolutionScope {
-            scope,
-            hygiene: hir::Hygiene::new(sema.db, resolve_context.file_id.into()),
-            node,
-        }
-    }
-
-    /// Returns the function in which SSR was invoked, if any.
-    pub(crate) fn current_function(&self) -> Option<SyntaxNode> {
-        self.node.ancestors().find(|node| node.kind() == SyntaxKind::FN).map(|node| node.clone())
-    }
-
-    fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> {
-        let hir_path = hir::Path::from_src(path.clone(), &self.hygiene)?;
-        // First try resolving the whole path. This will work for things like
-        // `std::collections::HashMap`, but will fail for things like
-        // `std::collections::HashMap::new`.
-        if let Some(resolution) = self.scope.resolve_hir_path(&hir_path) {
-            return Some(resolution);
-        }
-        // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if
-        // that succeeds, then iterate through the candidates on the resolved type with the provided
-        // name.
-        let resolved_qualifier = self.scope.resolve_hir_path_qualifier(&hir_path.qualifier()?)?;
-        if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
-            adt.ty(self.scope.db).iterate_path_candidates(
-                self.scope.db,
-                self.scope.module()?.krate(),
-                &self.scope.traits_in_scope(),
-                Some(hir_path.segments().last()?.name),
-                |_ty, assoc_item| Some(hir::PathResolution::AssocItem(assoc_item)),
-            )
-        } else {
-            None
-        }
-    }
-
-    fn qualifier_type(&self, path: &SyntaxNode) -> Option<hir::Type> {
-        use syntax::ast::AstNode;
-        if let Some(path) = ast::Path::cast(path.clone()) {
-            if let Some(qualifier) = path.qualifier() {
-                if let Some(resolved_qualifier) = self.resolve_path(&qualifier) {
-                    if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
-                        return Some(adt.ty(self.scope.db));
-                    }
-                }
-            }
-        }
-        None
-    }
-}
-
-fn is_self(path: &ast::Path) -> bool {
-    path.segment().map(|segment| segment.self_token().is_some()).unwrap_or(false)
-}
-
-/// Returns a suitable node for resolving paths in the current scope. If we create a scope based on
-/// a statement node, then we can't resolve local variables that were defined in the current scope
-/// (only in parent scopes). So we find another node, ideally a child of the statement where local
-/// variable resolution is permitted.
-fn pick_node_for_resolution(node: SyntaxNode) -> SyntaxNode {
-    match node.kind() {
-        SyntaxKind::EXPR_STMT => {
-            if let Some(n) = node.first_child() {
-                mark::hit!(cursor_after_semicolon);
-                return n;
-            }
-        }
-        SyntaxKind::LET_STMT | SyntaxKind::IDENT_PAT => {
-            if let Some(next) = node.next_sibling() {
-                return pick_node_for_resolution(next);
-            }
-        }
-        SyntaxKind::NAME => {
-            if let Some(parent) = node.parent() {
-                return pick_node_for_resolution(parent);
-            }
-        }
-        _ => {}
-    }
-    node
-}
-
-/// Returns whether `path` or any of its qualifiers contains type arguments.
-fn path_contains_type_arguments(path: Option<ast::Path>) -> bool {
-    if let Some(path) = path {
-        if let Some(segment) = path.segment() {
-            if segment.generic_arg_list().is_some() {
-                mark::hit!(type_arguments_within_path);
-                return true;
-            }
-        }
-        return path_contains_type_arguments(path.qualifier());
-    }
-    false
-}
diff --git a/crates/ra_ssr/src/search.rs b/crates/ra_ssr/src/search.rs
deleted file mode 100644 (file)
index 8509cfa..0000000
+++ /dev/null
@@ -1,282 +0,0 @@
-//! Searching for matches.
-
-use crate::{
-    matching,
-    resolving::{ResolvedPath, ResolvedPattern, ResolvedRule},
-    Match, MatchFinder,
-};
-use base_db::{FileId, FileRange};
-use ide_db::{
-    defs::Definition,
-    search::{Reference, SearchScope},
-};
-use rustc_hash::FxHashSet;
-use syntax::{ast, AstNode, SyntaxKind, SyntaxNode};
-use test_utils::mark;
-
-/// A cache for the results of find_usages. This is for when we have multiple patterns that have the
-/// same path. e.g. if the pattern was `foo::Bar` that can parse as a path, an expression, a type
-/// and as a pattern. In each, the usages of `foo::Bar` are the same and we'd like to avoid finding
-/// them more than once.
-#[derive(Default)]
-pub(crate) struct UsageCache {
-    usages: Vec<(Definition, Vec<Reference>)>,
-}
-
-impl<'db> MatchFinder<'db> {
-    /// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
-    /// replacement impossible, so further processing is required in order to properly nest matches
-    /// and remove overlapping matches. This is done in the `nesting` module.
-    pub(crate) fn find_matches_for_rule(
-        &self,
-        rule: &ResolvedRule,
-        usage_cache: &mut UsageCache,
-        matches_out: &mut Vec<Match>,
-    ) {
-        if rule.pattern.contains_self {
-            // If the pattern contains `self` we restrict the scope of the search to just the
-            // current method. No other method can reference the same `self`. This makes the
-            // behavior of `self` consistent with other variables.
-            if let Some(current_function) = self.resolution_scope.current_function() {
-                self.slow_scan_node(&current_function, rule, &None, matches_out);
-            }
-            return;
-        }
-        if pick_path_for_usages(&rule.pattern).is_none() {
-            self.slow_scan(rule, matches_out);
-            return;
-        }
-        self.find_matches_for_pattern_tree(rule, &rule.pattern, usage_cache, matches_out);
-    }
-
-    fn find_matches_for_pattern_tree(
-        &self,
-        rule: &ResolvedRule,
-        pattern: &ResolvedPattern,
-        usage_cache: &mut UsageCache,
-        matches_out: &mut Vec<Match>,
-    ) {
-        if let Some(resolved_path) = pick_path_for_usages(pattern) {
-            let definition: Definition = resolved_path.resolution.clone().into();
-            for reference in self.find_usages(usage_cache, definition) {
-                if let Some(node_to_match) = self.find_node_to_match(resolved_path, reference) {
-                    if !is_search_permitted_ancestors(&node_to_match) {
-                        mark::hit!(use_declaration_with_braces);
-                        continue;
-                    }
-                    self.try_add_match(rule, &node_to_match, &None, matches_out);
-                }
-            }
-        }
-    }
-
-    fn find_node_to_match(
-        &self,
-        resolved_path: &ResolvedPath,
-        reference: &Reference,
-    ) -> Option<SyntaxNode> {
-        let file = self.sema.parse(reference.file_range.file_id);
-        let depth = resolved_path.depth as usize;
-        let offset = reference.file_range.range.start();
-        if let Some(path) =
-            self.sema.find_node_at_offset_with_descend::<ast::Path>(file.syntax(), offset)
-        {
-            self.sema.ancestors_with_macros(path.syntax().clone()).skip(depth).next()
-        } else if let Some(path) =
-            self.sema.find_node_at_offset_with_descend::<ast::MethodCallExpr>(file.syntax(), offset)
-        {
-            // If the pattern contained a path and we found a reference to that path that wasn't
-            // itself a path, but was a method call, then we need to adjust how far up to try
-            // matching by how deep the path was within a CallExpr. The structure would have been
-            // CallExpr, PathExpr, Path - i.e. a depth offset of 2. We don't need to check if the
-            // path was part of a CallExpr because if it wasn't then all that will happen is we'll
-            // fail to match, which is the desired behavior.
-            const PATH_DEPTH_IN_CALL_EXPR: usize = 2;
-            if depth < PATH_DEPTH_IN_CALL_EXPR {
-                return None;
-            }
-            self.sema
-                .ancestors_with_macros(path.syntax().clone())
-                .skip(depth - PATH_DEPTH_IN_CALL_EXPR)
-                .next()
-        } else {
-            None
-        }
-    }
-
-    fn find_usages<'a>(
-        &self,
-        usage_cache: &'a mut UsageCache,
-        definition: Definition,
-    ) -> &'a [Reference] {
-        // Logically if a lookup succeeds we should just return it. Unfortunately returning it would
-        // extend the lifetime of the borrow, then we wouldn't be able to do the insertion on a
-        // cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two
-        // lookups in the case of a cache hit.
-        if usage_cache.find(&definition).is_none() {
-            let usages = definition.find_usages(&self.sema, Some(self.search_scope()));
-            usage_cache.usages.push((definition, usages));
-            return &usage_cache.usages.last().unwrap().1;
-        }
-        usage_cache.find(&definition).unwrap()
-    }
-
-    /// Returns the scope within which we want to search. We don't want un unrestricted search
-    /// scope, since we don't want to find references in external dependencies.
-    fn search_scope(&self) -> SearchScope {
-        // FIXME: We should ideally have a test that checks that we edit local roots and not library
-        // roots. This probably would require some changes to fixtures, since currently everything
-        // seems to get put into a single source root.
-        let mut files = Vec::new();
-        self.search_files_do(|file_id| {
-            files.push(file_id);
-        });
-        SearchScope::files(&files)
-    }
-
-    fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
-        self.search_files_do(|file_id| {
-            let file = self.sema.parse(file_id);
-            let code = file.syntax();
-            self.slow_scan_node(code, rule, &None, matches_out);
-        })
-    }
-
-    fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
-        if self.restrict_ranges.is_empty() {
-            // Unrestricted search.
-            use base_db::SourceDatabaseExt;
-            use ide_db::symbol_index::SymbolsDatabase;
-            for &root in self.sema.db.local_roots().iter() {
-                let sr = self.sema.db.source_root(root);
-                for file_id in sr.iter() {
-                    callback(file_id);
-                }
-            }
-        } else {
-            // Search is restricted, deduplicate file IDs (generally only one).
-            let mut files = FxHashSet::default();
-            for range in &self.restrict_ranges {
-                if files.insert(range.file_id) {
-                    callback(range.file_id);
-                }
-            }
-        }
-    }
-
-    fn slow_scan_node(
-        &self,
-        code: &SyntaxNode,
-        rule: &ResolvedRule,
-        restrict_range: &Option<FileRange>,
-        matches_out: &mut Vec<Match>,
-    ) {
-        if !is_search_permitted(code) {
-            return;
-        }
-        self.try_add_match(rule, &code, restrict_range, matches_out);
-        // If we've got a macro call, we already tried matching it pre-expansion, which is the only
-        // way to match the whole macro, now try expanding it and matching the expansion.
-        if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
-            if let Some(expanded) = self.sema.expand(&macro_call) {
-                if let Some(tt) = macro_call.token_tree() {
-                    // When matching within a macro expansion, we only want to allow matches of
-                    // nodes that originated entirely from within the token tree of the macro call.
-                    // i.e. we don't want to match something that came from the macro itself.
-                    self.slow_scan_node(
-                        &expanded,
-                        rule,
-                        &Some(self.sema.original_range(tt.syntax())),
-                        matches_out,
-                    );
-                }
-            }
-        }
-        for child in code.children() {
-            self.slow_scan_node(&child, rule, restrict_range, matches_out);
-        }
-    }
-
-    fn try_add_match(
-        &self,
-        rule: &ResolvedRule,
-        code: &SyntaxNode,
-        restrict_range: &Option<FileRange>,
-        matches_out: &mut Vec<Match>,
-    ) {
-        if !self.within_range_restrictions(code) {
-            mark::hit!(replace_nonpath_within_selection);
-            return;
-        }
-        if let Ok(m) = matching::get_match(false, rule, code, restrict_range, &self.sema) {
-            matches_out.push(m);
-        }
-    }
-
-    /// Returns whether `code` is within one of our range restrictions if we have any. No range
-    /// restrictions is considered unrestricted and always returns true.
-    fn within_range_restrictions(&self, code: &SyntaxNode) -> bool {
-        if self.restrict_ranges.is_empty() {
-            // There is no range restriction.
-            return true;
-        }
-        let node_range = self.sema.original_range(code);
-        for range in &self.restrict_ranges {
-            if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) {
-                return true;
-            }
-        }
-        false
-    }
-}
-
-/// Returns whether we support matching within `node` and all of its ancestors.
-fn is_search_permitted_ancestors(node: &SyntaxNode) -> bool {
-    if let Some(parent) = node.parent() {
-        if !is_search_permitted_ancestors(&parent) {
-            return false;
-        }
-    }
-    is_search_permitted(node)
-}
-
-/// Returns whether we support matching within this kind of node.
-fn is_search_permitted(node: &SyntaxNode) -> bool {
-    // FIXME: Properly handle use declarations. At the moment, if our search pattern is `foo::bar`
-    // and the code is `use foo::{baz, bar}`, we'll match `bar`, since it resolves to `foo::bar`.
-    // However we'll then replace just the part we matched `bar`. We probably need to instead remove
-    // `bar` and insert a new use declaration.
-    node.kind() != SyntaxKind::USE
-}
-
-impl UsageCache {
-    fn find(&mut self, definition: &Definition) -> Option<&[Reference]> {
-        // We expect a very small number of cache entries (generally 1), so a linear scan should be
-        // fast enough and avoids the need to implement Hash for Definition.
-        for (d, refs) in &self.usages {
-            if d == definition {
-                return Some(refs);
-            }
-        }
-        None
-    }
-}
-
-/// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't
-/// something that we can find references to. We then somewhat arbitrarily pick the path that is the
-/// longest as this is hopefully more likely to be less common, making it faster to find.
-fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> {
-    // FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are
-    // private to the current module, then we definitely would want to pick them over say a path
-    // from std. Possibly we should go further than this and intersect the search scopes for all
-    // resolved paths then search only in that scope.
-    pattern
-        .resolved_paths
-        .iter()
-        .filter(|(_, p)| {
-            !matches!(p.resolution, hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_)))
-        })
-        .map(|(node, resolved)| (node.text().len(), resolved))
-        .max_by(|(a, _), (b, _)| a.cmp(b))
-        .map(|(_, resolved)| resolved)
-}
diff --git a/crates/ra_ssr/src/tests.rs b/crates/ra_ssr/src/tests.rs
deleted file mode 100644 (file)
index 0d0a000..0000000
+++ /dev/null
@@ -1,1174 +0,0 @@
-use crate::{MatchFinder, SsrRule};
-use base_db::{salsa::Durability, FileId, FilePosition, FileRange, SourceDatabaseExt};
-use expect::{expect, Expect};
-use rustc_hash::FxHashSet;
-use std::sync::Arc;
-use test_utils::{mark, RangeOrOffset};
-
-fn parse_error_text(query: &str) -> String {
-    format!("{}", query.parse::<SsrRule>().unwrap_err())
-}
-
-#[test]
-fn parser_empty_query() {
-    assert_eq!(parse_error_text(""), "Parse error: Cannot find delimiter `==>>`");
-}
-
-#[test]
-fn parser_no_delimiter() {
-    assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delimiter `==>>`");
-}
-
-#[test]
-fn parser_two_delimiters() {
-    assert_eq!(
-        parse_error_text("foo() ==>> a ==>> b "),
-        "Parse error: More than one delimiter found"
-    );
-}
-
-#[test]
-fn parser_repeated_name() {
-    assert_eq!(
-        parse_error_text("foo($a, $a) ==>>"),
-        "Parse error: Name `a` repeats more than once"
-    );
-}
-
-#[test]
-fn parser_invalid_pattern() {
-    assert_eq!(
-        parse_error_text(" ==>> ()"),
-        "Parse error: Not a valid Rust expression, type, item, path or pattern"
-    );
-}
-
-#[test]
-fn parser_invalid_template() {
-    assert_eq!(
-        parse_error_text("() ==>> )"),
-        "Parse error: Not a valid Rust expression, type, item, path or pattern"
-    );
-}
-
-#[test]
-fn parser_undefined_placeholder_in_replacement() {
-    assert_eq!(
-        parse_error_text("42 ==>> $a"),
-        "Parse error: Replacement contains undefined placeholders: $a"
-    );
-}
-
-/// `code` may optionally contain a cursor marker `<|>`. If it doesn't, then the position will be
-/// the start of the file. If there's a second cursor marker, then we'll return a single range.
-pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Vec<FileRange>) {
-    use base_db::fixture::WithFixture;
-    use ide_db::symbol_index::SymbolsDatabase;
-    let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) {
-        ide_db::RootDatabase::with_range_or_offset(code)
-    } else {
-        let (db, file_id) = ide_db::RootDatabase::with_single_file(code);
-        (db, file_id, RangeOrOffset::Offset(0.into()))
-    };
-    let selections;
-    let position;
-    match range_or_offset {
-        RangeOrOffset::Range(range) => {
-            position = FilePosition { file_id, offset: range.start() };
-            selections = vec![FileRange { file_id, range: range }];
-        }
-        RangeOrOffset::Offset(offset) => {
-            position = FilePosition { file_id, offset };
-            selections = vec![];
-        }
-    }
-    let mut local_roots = FxHashSet::default();
-    local_roots.insert(base_db::fixture::WORKSPACE);
-    db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
-    (db, position, selections)
-}
-
-fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
-    assert_ssr_transforms(&[rule], input, expected);
-}
-
-fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
-    let (db, position, selections) = single_file(input);
-    let mut match_finder = MatchFinder::in_context(&db, position, selections);
-    for rule in rules {
-        let rule: SsrRule = rule.parse().unwrap();
-        match_finder.add_rule(rule).unwrap();
-    }
-    let edits = match_finder.edits();
-    if edits.is_empty() {
-        panic!("No edits were made");
-    }
-    assert_eq!(edits[0].file_id, position.file_id);
-    // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters
-    // stuff.
-    let mut actual = db.file_text(position.file_id).to_string();
-    edits[0].edit.apply(&mut actual);
-    expected.assert_eq(&actual);
-}
-
-fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet: &str) {
-    let debug_info = match_finder.debug_where_text_equal(file_id, snippet);
-    println!(
-        "Match debug info: {} nodes had text exactly equal to '{}'",
-        debug_info.len(),
-        snippet
-    );
-    for (index, d) in debug_info.iter().enumerate() {
-        println!("Node #{}\n{:#?}\n", index, d);
-    }
-}
-
-fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
-    let (db, position, selections) = single_file(code);
-    let mut match_finder = MatchFinder::in_context(&db, position, selections);
-    match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
-    let matched_strings: Vec<String> =
-        match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
-    if matched_strings != expected && !expected.is_empty() {
-        print_match_debug_info(&match_finder, position.file_id, &expected[0]);
-    }
-    assert_eq!(matched_strings, expected);
-}
-
-fn assert_no_match(pattern: &str, code: &str) {
-    let (db, position, selections) = single_file(code);
-    let mut match_finder = MatchFinder::in_context(&db, position, selections);
-    match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
-    let matches = match_finder.matches().flattened().matches;
-    if !matches.is_empty() {
-        print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text());
-        panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches);
-    }
-}
-
-fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) {
-    let (db, position, selections) = single_file(code);
-    let mut match_finder = MatchFinder::in_context(&db, position, selections);
-    match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
-    let mut reasons = Vec::new();
-    for d in match_finder.debug_where_text_equal(position.file_id, snippet) {
-        if let Some(reason) = d.match_failure_reason() {
-            reasons.push(reason.to_owned());
-        }
-    }
-    assert_eq!(reasons, vec![expected_reason]);
-}
-
-#[test]
-fn ssr_function_to_method() {
-    assert_ssr_transform(
-        "my_function($a, $b) ==>> ($a).my_method($b)",
-        "fn my_function() {} fn main() { loop { my_function( other_func(x, y), z + w) } }",
-        expect![["fn my_function() {} fn main() { loop { (other_func(x, y)).my_method(z + w) } }"]],
-    )
-}
-
-#[test]
-fn ssr_nested_function() {
-    assert_ssr_transform(
-        "foo($a, $b, $c) ==>> bar($c, baz($a, $b))",
-        r#"
-            //- /lib.rs crate:foo
-            fn foo() {}
-            fn bar() {}
-            fn baz() {}
-            fn main { foo  (x + value.method(b), x+y-z, true && false) }
-            "#,
-        expect![[r#"
-            fn foo() {}
-            fn bar() {}
-            fn baz() {}
-            fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }
-        "#]],
-    )
-}
-
-#[test]
-fn ssr_expected_spacing() {
-    assert_ssr_transform(
-        "foo($x) + bar() ==>> bar($x)",
-        "fn foo() {} fn bar() {} fn main() { foo(5) + bar() }",
-        expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
-    );
-}
-
-#[test]
-fn ssr_with_extra_space() {
-    assert_ssr_transform(
-        "foo($x  ) +    bar() ==>> bar($x)",
-        "fn foo() {} fn bar() {} fn main() { foo(  5 )  +bar(   ) }",
-        expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
-    );
-}
-
-#[test]
-fn ssr_keeps_nested_comment() {
-    assert_ssr_transform(
-        "foo($x) ==>> bar($x)",
-        "fn foo() {} fn bar() {} fn main() { foo(other(5 /* using 5 */)) }",
-        expect![["fn foo() {} fn bar() {} fn main() { bar(other(5 /* using 5 */)) }"]],
-    )
-}
-
-#[test]
-fn ssr_keeps_comment() {
-    assert_ssr_transform(
-        "foo($x) ==>> bar($x)",
-        "fn foo() {} fn bar() {} fn main() { foo(5 /* using 5 */) }",
-        expect![["fn foo() {} fn bar() {} fn main() { bar(5)/* using 5 */ }"]],
-    )
-}
-
-#[test]
-fn ssr_struct_lit() {
-    assert_ssr_transform(
-        "Foo{a: $a, b: $b} ==>> Foo::new($a, $b)",
-        r#"
-            struct Foo() {}
-            impl Foo { fn new() {} }
-            fn main() { Foo{b:2, a:1} }
-            "#,
-        expect![[r#"
-            struct Foo() {}
-            impl Foo { fn new() {} }
-            fn main() { Foo::new(1, 2) }
-        "#]],
-    )
-}
-
-#[test]
-fn ignores_whitespace() {
-    assert_matches("1+2", "fn f() -> i32 {1  +  2}", &["1  +  2"]);
-    assert_matches("1 + 2", "fn f() -> i32 {1+2}", &["1+2"]);
-}
-
-#[test]
-fn no_match() {
-    assert_no_match("1 + 3", "fn f() -> i32 {1  +  2}");
-}
-
-#[test]
-fn match_fn_definition() {
-    assert_matches("fn $a($b: $t) {$c}", "fn f(a: i32) {bar()}", &["fn f(a: i32) {bar()}"]);
-}
-
-#[test]
-fn match_struct_definition() {
-    let code = r#"
-        struct Option<T> {}
-        struct Bar {}
-        struct Foo {name: Option<String>}"#;
-    assert_matches("struct $n {$f: Option<String>}", code, &["struct Foo {name: Option<String>}"]);
-}
-
-#[test]
-fn match_expr() {
-    let code = r#"
-        fn foo() {}
-        fn f() -> i32 {foo(40 + 2, 42)}"#;
-    assert_matches("foo($a, $b)", code, &["foo(40 + 2, 42)"]);
-    assert_no_match("foo($a, $b, $c)", code);
-    assert_no_match("foo($a)", code);
-}
-
-#[test]
-fn match_nested_method_calls() {
-    assert_matches(
-        "$a.z().z().z()",
-        "fn f() {h().i().j().z().z().z().d().e()}",
-        &["h().i().j().z().z().z()"],
-    );
-}
-
-// Make sure that our node matching semantics don't differ within macro calls.
-#[test]
-fn match_nested_method_calls_with_macro_call() {
-    assert_matches(
-        "$a.z().z().z()",
-        r#"
-            macro_rules! m1 { ($a:expr) => {$a}; }
-            fn f() {m1!(h().i().j().z().z().z().d().e())}"#,
-        &["h().i().j().z().z().z()"],
-    );
-}
-
-#[test]
-fn match_complex_expr() {
-    let code = r#"
-        fn foo() {} fn bar() {}
-        fn f() -> i32 {foo(bar(40, 2), 42)}"#;
-    assert_matches("foo($a, $b)", code, &["foo(bar(40, 2), 42)"]);
-    assert_no_match("foo($a, $b, $c)", code);
-    assert_no_match("foo($a)", code);
-    assert_matches("bar($a, $b)", code, &["bar(40, 2)"]);
-}
-
-// Trailing commas in the code should be ignored.
-#[test]
-fn match_with_trailing_commas() {
-    // Code has comma, pattern doesn't.
-    assert_matches("foo($a, $b)", "fn foo() {} fn f() {foo(1, 2,);}", &["foo(1, 2,)"]);
-    assert_matches("Foo{$a, $b}", "struct Foo {} fn f() {Foo{1, 2,};}", &["Foo{1, 2,}"]);
-
-    // Pattern has comma, code doesn't.
-    assert_matches("foo($a, $b,)", "fn foo() {} fn f() {foo(1, 2);}", &["foo(1, 2)"]);
-    assert_matches("Foo{$a, $b,}", "struct Foo {} fn f() {Foo{1, 2};}", &["Foo{1, 2}"]);
-}
-
-#[test]
-fn match_type() {
-    assert_matches("i32", "fn f() -> i32 {1  +  2}", &["i32"]);
-    assert_matches(
-        "Option<$a>",
-        "struct Option<T> {} fn f() -> Option<i32> {42}",
-        &["Option<i32>"],
-    );
-    assert_no_match(
-        "Option<$a>",
-        "struct Option<T> {} struct Result<T, E> {} fn f() -> Result<i32, ()> {42}",
-    );
-}
-
-#[test]
-fn match_struct_instantiation() {
-    let code = r#"
-        struct Foo {bar: i32, baz: i32}
-        fn f() {Foo {bar: 1, baz: 2}}"#;
-    assert_matches("Foo {bar: 1, baz: 2}", code, &["Foo {bar: 1, baz: 2}"]);
-    // Now with placeholders for all parts of the struct.
-    assert_matches("Foo {$a: $b, $c: $d}", code, &["Foo {bar: 1, baz: 2}"]);
-    assert_matches("Foo {}", "struct Foo {} fn f() {Foo {}}", &["Foo {}"]);
-}
-
-#[test]
-fn match_path() {
-    let code = r#"
-        mod foo {
-            pub fn bar() {}
-        }
-        fn f() {foo::bar(42)}"#;
-    assert_matches("foo::bar", code, &["foo::bar"]);
-    assert_matches("$a::bar", code, &["foo::bar"]);
-    assert_matches("foo::$b", code, &["foo::bar"]);
-}
-
-#[test]
-fn match_pattern() {
-    assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]);
-}
-
-// If our pattern has a full path, e.g. a::b::c() and the code has c(), but c resolves to
-// a::b::c, then we should match.
-#[test]
-fn match_fully_qualified_fn_path() {
-    let code = r#"
-        mod a {
-            pub mod b {
-                pub fn c(_: i32) {}
-            }
-        }
-        use a::b::c;
-        fn f1() {
-            c(42);
-        }
-        "#;
-    assert_matches("a::b::c($a)", code, &["c(42)"]);
-}
-
-#[test]
-fn match_resolved_type_name() {
-    let code = r#"
-        mod m1 {
-            pub mod m2 {
-                pub trait Foo<T> {}
-            }
-        }
-        mod m3 {
-            trait Foo<T> {}
-            fn f1(f: Option<&dyn Foo<bool>>) {}
-        }
-        mod m4 {
-            use crate::m1::m2::Foo;
-            fn f1(f: Option<&dyn Foo<i32>>) {}
-        }
-        "#;
-    assert_matches("m1::m2::Foo<$t>", code, &["Foo<i32>"]);
-}
-
-#[test]
-fn type_arguments_within_path() {
-    mark::check!(type_arguments_within_path);
-    let code = r#"
-        mod foo {
-            pub struct Bar<T> {t: T}
-            impl<T> Bar<T> {
-                pub fn baz() {}
-            }
-        }
-        fn f1() {foo::Bar::<i32>::baz();}
-        "#;
-    assert_no_match("foo::Bar::<i64>::baz()", code);
-    assert_matches("foo::Bar::<i32>::baz()", code, &["foo::Bar::<i32>::baz()"]);
-}
-
-#[test]
-fn literal_constraint() {
-    mark::check!(literal_constraint);
-    let code = r#"
-        enum Option<T> { Some(T), None }
-        use Option::Some;
-        fn f1() {
-            let x1 = Some(42);
-            let x2 = Some("foo");
-            let x3 = Some(x1);
-            let x4 = Some(40 + 2);
-            let x5 = Some(true);
-        }
-        "#;
-    assert_matches("Some(${a:kind(literal)})", code, &["Some(42)", "Some(\"foo\")", "Some(true)"]);
-    assert_matches("Some(${a:not(kind(literal))})", code, &["Some(x1)", "Some(40 + 2)"]);
-}
-
-#[test]
-fn match_reordered_struct_instantiation() {
-    assert_matches(
-        "Foo {aa: 1, b: 2, ccc: 3}",
-        "struct Foo {} fn f() {Foo {b: 2, ccc: 3, aa: 1}}",
-        &["Foo {b: 2, ccc: 3, aa: 1}"],
-    );
-    assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {b: 1}}");
-    assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {a: 2}}");
-    assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {a: 1}}");
-    assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {b: 2}}");
-    assert_no_match("Foo {a: 1, }", "struct Foo {} fn f() {Foo {a: 1, b: 2}}");
-    assert_no_match("Foo {a: 1, z: 9}", "struct Foo {} fn f() {Foo {a: 1}}");
-}
-
-#[test]
-fn match_macro_invocation() {
-    assert_matches(
-        "foo!($a)",
-        "macro_rules! foo {() => {}} fn() {foo(foo!(foo()))}",
-        &["foo!(foo())"],
-    );
-    assert_matches(
-        "foo!(41, $a, 43)",
-        "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43)}",
-        &["foo!(41, 42, 43)"],
-    );
-    assert_no_match("foo!(50, $a, 43)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}");
-    assert_no_match("foo!(41, $a, 50)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}");
-    assert_matches(
-        "foo!($a())",
-        "macro_rules! foo {() => {}} fn() {foo!(bar())}",
-        &["foo!(bar())"],
-    );
-}
-
-// When matching within a macro expansion, we only allow matches of nodes that originated from
-// the macro call, not from the macro definition.
-#[test]
-fn no_match_expression_from_macro() {
-    assert_no_match(
-        "$a.clone()",
-        r#"
-            macro_rules! m1 {
-                () => {42.clone()}
-            }
-            fn f1() {m1!()}
-            "#,
-    );
-}
-
-// We definitely don't want to allow matching of an expression that part originates from the
-// macro call `42` and part from the macro definition `.clone()`.
-#[test]
-fn no_match_split_expression() {
-    assert_no_match(
-        "$a.clone()",
-        r#"
-            macro_rules! m1 {
-                ($x:expr) => {$x.clone()}
-            }
-            fn f1() {m1!(42)}
-            "#,
-    );
-}
-
-#[test]
-fn replace_function_call() {
-    // This test also makes sure that we ignore empty-ranges.
-    assert_ssr_transform(
-        "foo() ==>> bar()",
-        "fn foo() {<|><|>} fn bar() {} fn f1() {foo(); foo();}",
-        expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]],
-    );
-}
-
-#[test]
-fn replace_function_call_with_placeholders() {
-    assert_ssr_transform(
-        "foo($a, $b) ==>> bar($b, $a)",
-        "fn foo() {} fn bar() {} fn f1() {foo(5, 42)}",
-        expect![["fn foo() {} fn bar() {} fn f1() {bar(42, 5)}"]],
-    );
-}
-
-#[test]
-fn replace_nested_function_calls() {
-    assert_ssr_transform(
-        "foo($a) ==>> bar($a)",
-        "fn foo() {} fn bar() {} fn f1() {foo(foo(42))}",
-        expect![["fn foo() {} fn bar() {} fn f1() {bar(bar(42))}"]],
-    );
-}
-
-#[test]
-fn replace_associated_function_call() {
-    assert_ssr_transform(
-        "Foo::new() ==>> Bar::new()",
-        r#"
-            struct Foo {}
-            impl Foo { fn new() {} }
-            struct Bar {}
-            impl Bar { fn new() {} }
-            fn f1() {Foo::new();}
-            "#,
-        expect![[r#"
-            struct Foo {}
-            impl Foo { fn new() {} }
-            struct Bar {}
-            impl Bar { fn new() {} }
-            fn f1() {Bar::new();}
-        "#]],
-    );
-}
-
-#[test]
-fn replace_associated_trait_default_function_call() {
-    mark::check!(replace_associated_trait_default_function_call);
-    assert_ssr_transform(
-        "Bar2::foo() ==>> Bar2::foo2()",
-        r#"
-            trait Foo { fn foo() {} }
-            pub struct Bar {}
-            impl Foo for Bar {}
-            pub struct Bar2 {}
-            impl Foo for Bar2 {}
-            impl Bar2 { fn foo2() {} }
-            fn main() {
-                Bar::foo();
-                Bar2::foo();
-            }
-        "#,
-        expect![[r#"
-            trait Foo { fn foo() {} }
-            pub struct Bar {}
-            impl Foo for Bar {}
-            pub struct Bar2 {}
-            impl Foo for Bar2 {}
-            impl Bar2 { fn foo2() {} }
-            fn main() {
-                Bar::foo();
-                Bar2::foo2();
-            }
-        "#]],
-    );
-}
-
-#[test]
-fn replace_associated_trait_constant() {
-    mark::check!(replace_associated_trait_constant);
-    assert_ssr_transform(
-        "Bar2::VALUE ==>> Bar2::VALUE_2222",
-        r#"
-            trait Foo { const VALUE: i32; const VALUE_2222: i32; }
-            pub struct Bar {}
-            impl Foo for Bar { const VALUE: i32 = 1;  const VALUE_2222: i32 = 2; }
-            pub struct Bar2 {}
-            impl Foo for Bar2 { const VALUE: i32 = 1;  const VALUE_2222: i32 = 2; }
-            impl Bar2 { fn foo2() {} }
-            fn main() {
-                Bar::VALUE;
-                Bar2::VALUE;
-            }
-            "#,
-        expect![[r#"
-            trait Foo { const VALUE: i32; const VALUE_2222: i32; }
-            pub struct Bar {}
-            impl Foo for Bar { const VALUE: i32 = 1;  const VALUE_2222: i32 = 2; }
-            pub struct Bar2 {}
-            impl Foo for Bar2 { const VALUE: i32 = 1;  const VALUE_2222: i32 = 2; }
-            impl Bar2 { fn foo2() {} }
-            fn main() {
-                Bar::VALUE;
-                Bar2::VALUE_2222;
-            }
-        "#]],
-    );
-}
-
-#[test]
-fn replace_path_in_different_contexts() {
-    // Note the <|> inside module a::b which marks the point where the rule is interpreted. We
-    // replace foo with bar, but both need different path qualifiers in different contexts. In f4,
-    // foo is unqualified because of a use statement, however the replacement needs to be fully
-    // qualified.
-    assert_ssr_transform(
-        "c::foo() ==>> c::bar()",
-        r#"
-            mod a {
-                pub mod b {<|>
-                    pub mod c {
-                        pub fn foo() {}
-                        pub fn bar() {}
-                        fn f1() { foo() }
-                    }
-                    fn f2() { c::foo() }
-                }
-                fn f3() { b::c::foo() }
-            }
-            use a::b::c::foo;
-            fn f4() { foo() }
-            "#,
-        expect![[r#"
-            mod a {
-                pub mod b {
-                    pub mod c {
-                        pub fn foo() {}
-                        pub fn bar() {}
-                        fn f1() { bar() }
-                    }
-                    fn f2() { c::bar() }
-                }
-                fn f3() { b::c::bar() }
-            }
-            use a::b::c::foo;
-            fn f4() { a::b::c::bar() }
-            "#]],
-    );
-}
-
-#[test]
-fn replace_associated_function_with_generics() {
-    assert_ssr_transform(
-        "c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()",
-        r#"
-            mod c {
-                pub struct Foo<T> {v: T}
-                impl<T> Foo<T> { pub fn new() {} }
-                fn f1() {
-                    Foo::<i32>::new();
-                }
-            }
-            mod d {
-                pub struct Bar<T> {v: T}
-                impl<T> Bar<T> { pub fn default() {} }
-                fn f1() {
-                    super::c::Foo::<i32>::new();
-                }
-            }
-            "#,
-        expect![[r#"
-            mod c {
-                pub struct Foo<T> {v: T}
-                impl<T> Foo<T> { pub fn new() {} }
-                fn f1() {
-                    crate::d::Bar::<i32>::default();
-                }
-            }
-            mod d {
-                pub struct Bar<T> {v: T}
-                impl<T> Bar<T> { pub fn default() {} }
-                fn f1() {
-                    Bar::<i32>::default();
-                }
-            }
-            "#]],
-    );
-}
-
-#[test]
-fn replace_type() {
-    assert_ssr_transform(
-        "Result<(), $a> ==>> Option<$a>",
-        "struct Result<T, E> {} struct Option<T> {} fn f1() -> Result<(), Vec<Error>> {foo()}",
-        expect![[
-            "struct Result<T, E> {} struct Option<T> {} fn f1() -> Option<Vec<Error>> {foo()}"
-        ]],
-    );
-}
-
-#[test]
-fn replace_macro_invocations() {
-    assert_ssr_transform(
-        "try!($a) ==>> $a?",
-        "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(try!(foo()));}",
-        expect![["macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}"]],
-    );
-    assert_ssr_transform(
-        "foo!($a($b)) ==>> foo($b, $a)",
-        "macro_rules! foo {() => {}} fn f1() {foo!(abc(def() + 2));}",
-        expect![["macro_rules! foo {() => {}} fn f1() {foo(def() + 2, abc);}"]],
-    );
-}
-
-#[test]
-fn replace_binary_op() {
-    assert_ssr_transform(
-        "$a + $b ==>> $b + $a",
-        "fn f() {2 * 3 + 4 * 5}",
-        expect![["fn f() {4 * 5 + 2 * 3}"]],
-    );
-    assert_ssr_transform(
-        "$a + $b ==>> $b + $a",
-        "fn f() {1 + 2 + 3 + 4}",
-        expect![[r#"fn f() {4 + (3 + (2 + 1))}"#]],
-    );
-}
-
-#[test]
-fn match_binary_op() {
-    assert_matches("$a + $b", "fn f() {1 + 2 + 3 + 4}", &["1 + 2", "1 + 2 + 3", "1 + 2 + 3 + 4"]);
-}
-
-#[test]
-fn multiple_rules() {
-    assert_ssr_transforms(
-        &["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"],
-        "fn add() {} fn add_one() {} fn f() -> i32 {3 + 2 + 1}",
-        expect![["fn add() {} fn add_one() {} fn f() -> i32 {add_one(add(3, 2))}"]],
-    )
-}
-
-#[test]
-fn multiple_rules_with_nested_matches() {
-    assert_ssr_transforms(
-        &["foo1($a) ==>> bar1($a)", "foo2($a) ==>> bar2($a)"],
-        r#"
-            fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
-            fn f() {foo1(foo2(foo1(foo2(foo1(42)))))}
-            "#,
-        expect![[r#"
-            fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
-            fn f() {bar1(bar2(bar1(bar2(bar1(42)))))}
-        "#]],
-    )
-}
-
-#[test]
-fn match_within_macro_invocation() {
-    let code = r#"
-            macro_rules! foo {
-                ($a:stmt; $b:expr) => {
-                    $b
-                };
-            }
-            struct A {}
-            impl A {
-                fn bar() {}
-            }
-            fn f1() {
-                let aaa = A {};
-                foo!(macro_ignores_this(); aaa.bar());
-            }
-        "#;
-    assert_matches("$a.bar()", code, &["aaa.bar()"]);
-}
-
-#[test]
-fn replace_within_macro_expansion() {
-    assert_ssr_transform(
-        "$a.foo() ==>> bar($a)",
-        r#"
-            macro_rules! macro1 {
-                ($a:expr) => {$a}
-            }
-            fn bar() {}
-            fn f() {macro1!(5.x().foo().o2())}
-            "#,
-        expect![[r#"
-            macro_rules! macro1 {
-                ($a:expr) => {$a}
-            }
-            fn bar() {}
-            fn f() {macro1!(bar(5.x()).o2())}
-            "#]],
-    )
-}
-
-#[test]
-fn replace_outside_and_within_macro_expansion() {
-    assert_ssr_transform(
-        "foo($a) ==>> bar($a)",
-        r#"
-            fn foo() {} fn bar() {}
-            macro_rules! macro1 {
-                ($a:expr) => {$a}
-            }
-            fn f() {foo(foo(macro1!(foo(foo(42)))))}
-            "#,
-        expect![[r#"
-            fn foo() {} fn bar() {}
-            macro_rules! macro1 {
-                ($a:expr) => {$a}
-            }
-            fn f() {bar(bar(macro1!(bar(bar(42)))))}
-        "#]],
-    )
-}
-
-#[test]
-fn preserves_whitespace_within_macro_expansion() {
-    assert_ssr_transform(
-        "$a + $b ==>> $b - $a",
-        r#"
-            macro_rules! macro1 {
-                ($a:expr) => {$a}
-            }
-            fn f() {macro1!(1   *   2 + 3 + 4}
-            "#,
-        expect![[r#"
-            macro_rules! macro1 {
-                ($a:expr) => {$a}
-            }
-            fn f() {macro1!(4 - (3 - 1   *   2)}
-            "#]],
-    )
-}
-
-#[test]
-fn add_parenthesis_when_necessary() {
-    assert_ssr_transform(
-        "foo($a) ==>> $a.to_string()",
-        r#"
-        fn foo(_: i32) {}
-        fn bar3(v: i32) {
-            foo(1 + 2);
-            foo(-v);
-        }
-        "#,
-        expect![[r#"
-            fn foo(_: i32) {}
-            fn bar3(v: i32) {
-                (1 + 2).to_string();
-                (-v).to_string();
-            }
-        "#]],
-    )
-}
-
-#[test]
-fn match_failure_reasons() {
-    let code = r#"
-        fn bar() {}
-        macro_rules! foo {
-            ($a:expr) => {
-                1 + $a + 2
-            };
-        }
-        fn f1() {
-            bar(1, 2);
-            foo!(5 + 43.to_string() + 5);
-        }
-        "#;
-    assert_match_failure_reason(
-        "bar($a, 3)",
-        code,
-        "bar(1, 2)",
-        r#"Pattern wanted token '3' (INT_NUMBER), but code had token '2' (INT_NUMBER)"#,
-    );
-    assert_match_failure_reason(
-        "42.to_string()",
-        code,
-        "43.to_string()",
-        r#"Pattern wanted token '42' (INT_NUMBER), but code had token '43' (INT_NUMBER)"#,
-    );
-}
-
-#[test]
-fn overlapping_possible_matches() {
-    // There are three possible matches here, however the middle one, `foo(foo(foo(42)))` shouldn't
-    // match because it overlaps with the outer match. The inner match is permitted since it's is
-    // contained entirely within the placeholder of the outer match.
-    assert_matches(
-        "foo(foo($a))",
-        "fn foo() {} fn main() {foo(foo(foo(foo(42))))}",
-        &["foo(foo(42))", "foo(foo(foo(foo(42))))"],
-    );
-}
-
-#[test]
-fn use_declaration_with_braces() {
-    // It would be OK for a path rule to match and alter a use declaration. We shouldn't mess it up
-    // though. In particular, we must not change `use foo::{baz, bar}` to `use foo::{baz,
-    // foo2::bar2}`.
-    mark::check!(use_declaration_with_braces);
-    assert_ssr_transform(
-        "foo::bar ==>> foo2::bar2",
-        r#"
-        mod foo { pub fn bar() {} pub fn baz() {} }
-        mod foo2 { pub fn bar2() {} }
-        use foo::{baz, bar};
-        fn main() { bar() }
-        "#,
-        expect![["
-        mod foo { pub fn bar() {} pub fn baz() {} }
-        mod foo2 { pub fn bar2() {} }
-        use foo::{baz, bar};
-        fn main() { foo2::bar2() }
-        "]],
-    )
-}
-
-#[test]
-fn ufcs_matches_method_call() {
-    let code = r#"
-    struct Foo {}
-    impl Foo {
-        fn new(_: i32) -> Foo { Foo {} }
-        fn do_stuff(&self, _: i32) {}
-    }
-    struct Bar {}
-    impl Bar {
-        fn new(_: i32) -> Bar { Bar {} }
-        fn do_stuff(&self, v: i32) {}
-    }
-    fn main() {
-        let b = Bar {};
-        let f = Foo {};
-        b.do_stuff(1);
-        f.do_stuff(2);
-        Foo::new(4).do_stuff(3);
-        // Too many / too few args - should never match
-        f.do_stuff(2, 10);
-        f.do_stuff();
-    }
-    "#;
-    assert_matches("Foo::do_stuff($a, $b)", code, &["f.do_stuff(2)", "Foo::new(4).do_stuff(3)"]);
-    // The arguments needs special handling in the case of a function call matching a method call
-    // and the first argument is different.
-    assert_matches("Foo::do_stuff($a, 2)", code, &["f.do_stuff(2)"]);
-    assert_matches("Foo::do_stuff(Foo::new(4), $b)", code, &["Foo::new(4).do_stuff(3)"]);
-
-    assert_ssr_transform(
-        "Foo::do_stuff(Foo::new($a), $b) ==>> Bar::new($b).do_stuff($a)",
-        code,
-        expect![[r#"
-            struct Foo {}
-            impl Foo {
-                fn new(_: i32) -> Foo { Foo {} }
-                fn do_stuff(&self, _: i32) {}
-            }
-            struct Bar {}
-            impl Bar {
-                fn new(_: i32) -> Bar { Bar {} }
-                fn do_stuff(&self, v: i32) {}
-            }
-            fn main() {
-                let b = Bar {};
-                let f = Foo {};
-                b.do_stuff(1);
-                f.do_stuff(2);
-                Bar::new(3).do_stuff(4);
-                // Too many / too few args - should never match
-                f.do_stuff(2, 10);
-                f.do_stuff();
-            }
-        "#]],
-    );
-}
-
-#[test]
-fn pattern_is_a_single_segment_path() {
-    mark::check!(pattern_is_a_single_segment_path);
-    // The first function should not be altered because the `foo` in scope at the cursor position is
-    // a different `foo`. This case is special because "foo" can be parsed as a pattern (IDENT_PAT ->
-    // NAME -> IDENT), which contains no path. If we're not careful we'll end up matching the `foo`
-    // in `let foo` from the first function. Whether we should match the `let foo` in the second
-    // function is less clear. At the moment, we don't. Doing so sounds like a rename operation,
-    // which isn't really what SSR is for, especially since the replacement `bar` must be able to be
-    // resolved, which means if we rename `foo` we'll get a name collision.
-    assert_ssr_transform(
-        "foo ==>> bar",
-        r#"
-        fn f1() -> i32 {
-            let foo = 1;
-            let bar = 2;
-            foo
-        }
-        fn f1() -> i32 {
-            let foo = 1;
-            let bar = 2;
-            foo<|>
-        }
-        "#,
-        expect![[r#"
-            fn f1() -> i32 {
-                let foo = 1;
-                let bar = 2;
-                foo
-            }
-            fn f1() -> i32 {
-                let foo = 1;
-                let bar = 2;
-                bar
-            }
-        "#]],
-    );
-}
-
-#[test]
-fn replace_local_variable_reference() {
-    // The pattern references a local variable `foo` in the block containing the cursor. We should
-    // only replace references to this variable `foo`, not other variables that just happen to have
-    // the same name.
-    mark::check!(cursor_after_semicolon);
-    assert_ssr_transform(
-        "foo + $a ==>> $a - foo",
-        r#"
-            fn bar1() -> i32 {
-                let mut res = 0;
-                let foo = 5;
-                res += foo + 1;
-                let foo = 10;
-                res += foo + 2;<|>
-                res += foo + 3;
-                let foo = 15;
-                res += foo + 4;
-                res
-            }
-            "#,
-        expect![[r#"
-            fn bar1() -> i32 {
-                let mut res = 0;
-                let foo = 5;
-                res += foo + 1;
-                let foo = 10;
-                res += 2 - foo;
-                res += 3 - foo;
-                let foo = 15;
-                res += foo + 4;
-                res
-            }
-        "#]],
-    )
-}
-
-#[test]
-fn replace_path_within_selection() {
-    assert_ssr_transform(
-        "foo ==>> bar",
-        r#"
-        fn main() {
-            let foo = 41;
-            let bar = 42;
-            do_stuff(foo);
-            do_stuff(foo);<|>
-            do_stuff(foo);
-            do_stuff(foo);<|>
-            do_stuff(foo);
-        }"#,
-        expect![[r#"
-            fn main() {
-                let foo = 41;
-                let bar = 42;
-                do_stuff(foo);
-                do_stuff(foo);
-                do_stuff(bar);
-                do_stuff(bar);
-                do_stuff(foo);
-            }"#]],
-    );
-}
-
-#[test]
-fn replace_nonpath_within_selection() {
-    mark::check!(replace_nonpath_within_selection);
-    assert_ssr_transform(
-        "$a + $b ==>> $b * $a",
-        r#"
-        fn main() {
-            let v = 1 + 2;<|>
-            let v2 = 3 + 3;
-            let v3 = 4 + 5;<|>
-            let v4 = 6 + 7;
-        }"#,
-        expect![[r#"
-            fn main() {
-                let v = 1 + 2;
-                let v2 = 3 * 3;
-                let v3 = 5 * 4;
-                let v4 = 6 + 7;
-            }"#]],
-    );
-}
-
-#[test]
-fn replace_self() {
-    // `foo(self)` occurs twice in the code, however only the first occurrence is the `self` that's
-    // in scope where the rule is invoked.
-    assert_ssr_transform(
-        "foo(self) ==>> bar(self)",
-        r#"
-        struct S1 {}
-        fn foo(_: &S1) {}
-        fn bar(_: &S1) {}
-        impl S1 {
-            fn f1(&self) {
-                foo(self)<|>
-            }
-            fn f2(&self) {
-                foo(self)
-            }
-        }
-        "#,
-        expect![[r#"
-            struct S1 {}
-            fn foo(_: &S1) {}
-            fn bar(_: &S1) {}
-            impl S1 {
-                fn f1(&self) {
-                    bar(self)
-                }
-                fn f2(&self) {
-                    foo(self)
-                }
-            }
-        "#]],
-    );
-}
-
-#[test]
-fn match_trait_method_call() {
-    // `Bar::foo` and `Bar2::foo` resolve to the same function. Make sure we only match if the type
-    // matches what's in the pattern. Also checks that we handle autoderef.
-    let code = r#"
-        pub struct Bar {}
-        pub struct Bar2 {}
-        pub trait Foo {
-            fn foo(&self, _: i32) {}
-        }
-        impl Foo for Bar {}
-        impl Foo for Bar2 {}
-        fn main() {
-            let v1 = Bar {};
-            let v2 = Bar2 {};
-            let v1_ref = &v1;
-            let v2_ref = &v2;
-            v1.foo(1);
-            v2.foo(2);
-            Bar::foo(&v1, 3);
-            Bar2::foo(&v2, 4);
-            v1_ref.foo(5);
-            v2_ref.foo(6);
-        }
-        "#;
-    assert_matches("Bar::foo($a, $b)", code, &["v1.foo(1)", "Bar::foo(&v1, 3)", "v1_ref.foo(5)"]);
-    assert_matches("Bar2::foo($a, $b)", code, &["v2.foo(2)", "Bar2::foo(&v2, 4)", "v2_ref.foo(6)"]);
-}
index da8c09c09a74b5ffbe8c9df5a165637b2ae98824..749cf648c6f7d25b91ed0f2030955c4e8dfda438 100644 (file)
@@ -48,7 +48,7 @@ toolchain = { path = "../toolchain" }
 # This should only be used in CLI
 base_db = { path = "../base_db" }
 ide_db = { path = "../ide_db" }
-ra_ssr = { path = "../ra_ssr" }
+ssr = { path = "../ssr" }
 hir = { path = "../hir" }
 hir_def = { path = "../hir_def" }
 hir_ty = { path = "../hir_ty" }
index d3081e88bd94176e4ad71d454b64b5f77c9f1514..0bc92431a9a0b7d69c15f11478fed5e56e61cfc9 100644 (file)
@@ -7,8 +7,8 @@
 
 use anyhow::{bail, Result};
 use pico_args::Arguments;
-use ra_ssr::{SsrPattern, SsrRule};
 use rust_analyzer::cli::{AnalysisStatsCmd, BenchCmd, BenchWhat, Position, Verbosity};
+use ssr::{SsrPattern, SsrRule};
 use vfs::AbsPathBuf;
 
 pub(crate) struct Args {
index 420abaccb93c609b55048269539a7e6013616bd8..b237a94d113ad7164a6d6ca3294522667c783963 100644 (file)
 use ra_ide::Analysis;
 use syntax::{AstNode, SourceFile};
 
-pub use analysis_bench::{BenchCmd, BenchWhat, Position};
-pub use analysis_stats::AnalysisStatsCmd;
-pub use diagnostics::diagnostics;
-pub use load_cargo::load_cargo;
-pub use ssr::{apply_ssr_rules, search_for_patterns};
+pub use self::{
+    analysis_bench::{BenchCmd, BenchWhat, Position},
+    analysis_stats::AnalysisStatsCmd,
+    diagnostics::diagnostics,
+    load_cargo::load_cargo,
+    ssr::{apply_ssr_rules, search_for_patterns},
+};
 
 #[derive(Clone, Copy)]
 pub enum Verbosity {
index 1357a93e1693413d4d69ab931a84c7e5507993a5..c11e109437191c97884162ca281ab5bda42d49e2 100644 (file)
@@ -1,7 +1,7 @@
 //! Applies structured search replace rules from the command line.
 
 use crate::cli::{load_cargo::load_cargo, Result};
-use ra_ssr::{MatchFinder, SsrPattern, SsrRule};
+use ssr::{MatchFinder, SsrPattern, SsrRule};
 
 pub fn apply_ssr_rules(rules: Vec<SsrRule>) -> Result<()> {
     use base_db::SourceDatabaseExt;
diff --git a/crates/ssr/Cargo.toml b/crates/ssr/Cargo.toml
new file mode 100644 (file)
index 0000000..cd05eee
--- /dev/null
@@ -0,0 +1,24 @@
+[package]
+name = "ssr"
+version = "0.1.0"
+description = "Structural search and replace of Rust code"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-analyzer/rust-analyzer"
+authors = ["rust-analyzer developers"]
+edition = "2018"
+
+[lib]
+doctest = false
+
+[dependencies]
+rustc-hash = "1.1.0"
+
+text_edit = { path = "../text_edit" }
+syntax = { path = "../syntax" }
+base_db = { path = "../base_db" }
+ide_db = { path = "../ide_db" }
+hir = { path = "../hir" }
+test_utils = { path = "../test_utils" }
+
+[dev-dependencies]
+expect = { path = "../expect" }
diff --git a/crates/ssr/src/errors.rs b/crates/ssr/src/errors.rs
new file mode 100644 (file)
index 0000000..c02baca
--- /dev/null
@@ -0,0 +1,29 @@
+//! Code relating to errors produced by SSR.
+
+/// Constructs an SsrError taking arguments like the format macro.
+macro_rules! _error {
+    ($fmt:expr) => {$crate::SsrError::new(format!($fmt))};
+    ($fmt:expr, $($arg:tt)+) => {$crate::SsrError::new(format!($fmt, $($arg)+))}
+}
+pub(crate) use _error as error;
+
+/// Returns from the current function with an error, supplied by arguments as for format!
+macro_rules! _bail {
+    ($($tokens:tt)*) => {return Err(crate::errors::error!($($tokens)*))}
+}
+pub(crate) use _bail as bail;
+
+#[derive(Debug, PartialEq)]
+pub struct SsrError(pub(crate) String);
+
+impl std::fmt::Display for SsrError {
+    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+        write!(f, "Parse error: {}", self.0)
+    }
+}
+
+impl SsrError {
+    pub(crate) fn new(message: impl Into<String>) -> SsrError {
+        SsrError(message.into())
+    }
+}
diff --git a/crates/ssr/src/lib.rs b/crates/ssr/src/lib.rs
new file mode 100644 (file)
index 0000000..292bd5b
--- /dev/null
@@ -0,0 +1,338 @@
+//! Structural Search Replace
+//!
+//! Allows searching the AST for code that matches one or more patterns and then replacing that code
+//! based on a template.
+
+// Feature: Structural Search and Replace
+//
+// Search and replace with named wildcards that will match any expression, type, path, pattern or item.
+// The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`.
+// A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
+// Within a macro call, a placeholder will match up until whatever token follows the placeholder.
+//
+// All paths in both the search pattern and the replacement template must resolve in the context
+// in which this command is invoked. Paths in the search pattern will then match the code if they
+// resolve to the same item, even if they're written differently. For example if we invoke the
+// command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers
+// to `foo::Bar` will match.
+//
+// Paths in the replacement template will be rendered appropriately for the context in which the
+// replacement occurs. For example if our replacement template is `foo::Bar` and we match some
+// code in the `foo` module, we'll insert just `Bar`.
+//
+// Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will
+// match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`.
+//
+// The scope of the search / replace will be restricted to the current selection if any, otherwise
+// it will apply to the whole workspace.
+//
+// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
+//
+// Supported constraints:
+//
+// |===
+// | Constraint    | Restricts placeholder
+//
+// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`)
+// | not(a)        | Negates the constraint `a`
+// |===
+//
+// Available via the command `rust-analyzer.ssr`.
+//
+// ```rust
+// // Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)]
+//
+// // BEFORE
+// String::from(foo(y + 5, z))
+//
+// // AFTER
+// String::from((y + 5).foo(z))
+// ```
+//
+// |===
+// | Editor  | Action Name
+//
+// | VS Code | **Rust Analyzer: Structural Search Replace**
+// |===
+
+mod matching;
+mod nester;
+mod parsing;
+mod replacing;
+mod resolving;
+mod search;
+#[macro_use]
+mod errors;
+#[cfg(test)]
+mod tests;
+
+use crate::errors::bail;
+pub use crate::errors::SsrError;
+pub use crate::matching::Match;
+use crate::matching::MatchFailureReason;
+use base_db::{FileId, FilePosition, FileRange};
+use hir::Semantics;
+use ide_db::source_change::SourceFileEdit;
+use resolving::ResolvedRule;
+use rustc_hash::FxHashMap;
+use syntax::{ast, AstNode, SyntaxNode, TextRange};
+
+// A structured search replace rule. Create by calling `parse` on a str.
+#[derive(Debug)]
+pub struct SsrRule {
+    /// A structured pattern that we're searching for.
+    pattern: parsing::RawPattern,
+    /// What we'll replace it with.
+    template: parsing::RawPattern,
+    parsed_rules: Vec<parsing::ParsedRule>,
+}
+
+#[derive(Debug)]
+pub struct SsrPattern {
+    raw: parsing::RawPattern,
+    parsed_rules: Vec<parsing::ParsedRule>,
+}
+
+#[derive(Debug, Default)]
+pub struct SsrMatches {
+    pub matches: Vec<Match>,
+}
+
+/// Searches a crate for pattern matches and possibly replaces them with something else.
+pub struct MatchFinder<'db> {
+    /// Our source of information about the user's code.
+    sema: Semantics<'db, ide_db::RootDatabase>,
+    rules: Vec<ResolvedRule>,
+    resolution_scope: resolving::ResolutionScope<'db>,
+    restrict_ranges: Vec<FileRange>,
+}
+
+impl<'db> MatchFinder<'db> {
+    /// Constructs a new instance where names will be looked up as if they appeared at
+    /// `lookup_context`.
+    pub fn in_context(
+        db: &'db ide_db::RootDatabase,
+        lookup_context: FilePosition,
+        mut restrict_ranges: Vec<FileRange>,
+    ) -> MatchFinder<'db> {
+        restrict_ranges.retain(|range| !range.range.is_empty());
+        let sema = Semantics::new(db);
+        let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context);
+        MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges }
+    }
+
+    /// Constructs an instance using the start of the first file in `db` as the lookup context.
+    pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
+        use base_db::SourceDatabaseExt;
+        use ide_db::symbol_index::SymbolsDatabase;
+        if let Some(first_file_id) = db
+            .local_roots()
+            .iter()
+            .next()
+            .and_then(|root| db.source_root(root.clone()).iter().next())
+        {
+            Ok(MatchFinder::in_context(
+                db,
+                FilePosition { file_id: first_file_id, offset: 0.into() },
+                vec![],
+            ))
+        } else {
+            bail!("No files to search");
+        }
+    }
+
+    /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take
+    /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to
+    /// match to it.
+    pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> {
+        for parsed_rule in rule.parsed_rules {
+            self.rules.push(ResolvedRule::new(
+                parsed_rule,
+                &self.resolution_scope,
+                self.rules.len(),
+            )?);
+        }
+        Ok(())
+    }
+
+    /// Finds matches for all added rules and returns edits for all found matches.
+    pub fn edits(&self) -> Vec<SourceFileEdit> {
+        use base_db::SourceDatabaseExt;
+        let mut matches_by_file = FxHashMap::default();
+        for m in self.matches().matches {
+            matches_by_file
+                .entry(m.range.file_id)
+                .or_insert_with(|| SsrMatches::default())
+                .matches
+                .push(m);
+        }
+        let mut edits = vec![];
+        for (file_id, matches) in matches_by_file {
+            let edit =
+                replacing::matches_to_edit(&matches, &self.sema.db.file_text(file_id), &self.rules);
+            edits.push(SourceFileEdit { file_id, edit });
+        }
+        edits
+    }
+
+    /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you
+    /// intend to do replacement, use `add_rule` instead.
+    pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> {
+        for parsed_rule in pattern.parsed_rules {
+            self.rules.push(ResolvedRule::new(
+                parsed_rule,
+                &self.resolution_scope,
+                self.rules.len(),
+            )?);
+        }
+        Ok(())
+    }
+
+    /// Returns matches for all added rules.
+    pub fn matches(&self) -> SsrMatches {
+        let mut matches = Vec::new();
+        let mut usage_cache = search::UsageCache::default();
+        for rule in &self.rules {
+            self.find_matches_for_rule(rule, &mut usage_cache, &mut matches);
+        }
+        nester::nest_and_remove_collisions(matches, &self.sema)
+    }
+
+    /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match
+    /// them, while recording reasons why they don't match. This API is useful for command
+    /// line-based debugging where providing a range is difficult.
+    pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> {
+        use base_db::SourceDatabaseExt;
+        let file = self.sema.parse(file_id);
+        let mut res = Vec::new();
+        let file_text = self.sema.db.file_text(file_id);
+        let mut remaining_text = file_text.as_str();
+        let mut base = 0;
+        let len = snippet.len() as u32;
+        while let Some(offset) = remaining_text.find(snippet) {
+            let start = base + offset as u32;
+            let end = start + len;
+            self.output_debug_for_nodes_at_range(
+                file.syntax(),
+                FileRange { file_id, range: TextRange::new(start.into(), end.into()) },
+                &None,
+                &mut res,
+            );
+            remaining_text = &remaining_text[offset + snippet.len()..];
+            base = end;
+        }
+        res
+    }
+
+    fn output_debug_for_nodes_at_range(
+        &self,
+        node: &SyntaxNode,
+        range: FileRange,
+        restrict_range: &Option<FileRange>,
+        out: &mut Vec<MatchDebugInfo>,
+    ) {
+        for node in node.children() {
+            let node_range = self.sema.original_range(&node);
+            if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range)
+            {
+                continue;
+            }
+            if node_range.range == range.range {
+                for rule in &self.rules {
+                    // For now we ignore rules that have a different kind than our node, otherwise
+                    // we get lots of noise. If at some point we add support for restricting rules
+                    // to a particular kind of thing (e.g. only match type references), then we can
+                    // relax this. We special-case expressions, since function calls can match
+                    // method calls.
+                    if rule.pattern.node.kind() != node.kind()
+                        && !(ast::Expr::can_cast(rule.pattern.node.kind())
+                            && ast::Expr::can_cast(node.kind()))
+                    {
+                        continue;
+                    }
+                    out.push(MatchDebugInfo {
+                        matched: matching::get_match(true, rule, &node, restrict_range, &self.sema)
+                            .map_err(|e| MatchFailureReason {
+                                reason: e.reason.unwrap_or_else(|| {
+                                    "Match failed, but no reason was given".to_owned()
+                                }),
+                            }),
+                        pattern: rule.pattern.node.clone(),
+                        node: node.clone(),
+                    });
+                }
+            } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) {
+                if let Some(expanded) = self.sema.expand(&macro_call) {
+                    if let Some(tt) = macro_call.token_tree() {
+                        self.output_debug_for_nodes_at_range(
+                            &expanded,
+                            range,
+                            &Some(self.sema.original_range(tt.syntax())),
+                            out,
+                        );
+                    }
+                }
+            }
+            self.output_debug_for_nodes_at_range(&node, range, restrict_range, out);
+        }
+    }
+}
+
+pub struct MatchDebugInfo {
+    node: SyntaxNode,
+    /// Our search pattern parsed as an expression or item, etc
+    pattern: SyntaxNode,
+    matched: Result<Match, MatchFailureReason>,
+}
+
+impl std::fmt::Debug for MatchDebugInfo {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match &self.matched {
+            Ok(_) => writeln!(f, "Node matched")?,
+            Err(reason) => writeln!(f, "Node failed to match because: {}", reason.reason)?,
+        }
+        writeln!(
+            f,
+            "============ AST ===========\n\
+            {:#?}",
+            self.node
+        )?;
+        writeln!(f, "========= PATTERN ==========")?;
+        writeln!(f, "{:#?}", self.pattern)?;
+        writeln!(f, "============================")?;
+        Ok(())
+    }
+}
+
+impl SsrMatches {
+    /// Returns `self` with any nested matches removed and made into top-level matches.
+    pub fn flattened(self) -> SsrMatches {
+        let mut out = SsrMatches::default();
+        self.flatten_into(&mut out);
+        out
+    }
+
+    fn flatten_into(self, out: &mut SsrMatches) {
+        for mut m in self.matches {
+            for p in m.placeholder_values.values_mut() {
+                std::mem::replace(&mut p.inner_matches, SsrMatches::default()).flatten_into(out);
+            }
+            out.matches.push(m);
+        }
+    }
+}
+
+impl Match {
+    pub fn matched_text(&self) -> String {
+        self.matched_node.text().to_string()
+    }
+}
+
+impl std::error::Error for SsrError {}
+
+#[cfg(test)]
+impl MatchDebugInfo {
+    pub(crate) fn match_failure_reason(&self) -> Option<&str> {
+        self.matched.as_ref().err().map(|r| r.reason.as_str())
+    }
+}
diff --git a/crates/ssr/src/matching.rs b/crates/ssr/src/matching.rs
new file mode 100644 (file)
index 0000000..ffc7202
--- /dev/null
@@ -0,0 +1,777 @@
+//! This module is responsible for matching a search pattern against a node in the AST. In the
+//! process of matching, placeholder values are recorded.
+
+use crate::{
+    parsing::{Constraint, NodeKind, Placeholder},
+    resolving::{ResolvedPattern, ResolvedRule, UfcsCallInfo},
+    SsrMatches,
+};
+use base_db::FileRange;
+use hir::Semantics;
+use rustc_hash::FxHashMap;
+use std::{cell::Cell, iter::Peekable};
+use syntax::ast::{AstNode, AstToken};
+use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken};
+use test_utils::mark;
+
+// Creates a match error. If we're currently attempting to match some code that we thought we were
+// going to match, as indicated by the --debug-snippet flag, then populate the reason field.
+macro_rules! match_error {
+    ($e:expr) => {{
+            MatchFailed {
+                reason: if recording_match_fail_reasons() {
+                    Some(format!("{}", $e))
+                } else {
+                    None
+                }
+            }
+    }};
+    ($fmt:expr, $($arg:tt)+) => {{
+        MatchFailed {
+            reason: if recording_match_fail_reasons() {
+                Some(format!($fmt, $($arg)+))
+            } else {
+                None
+            }
+        }
+    }};
+}
+
+// Fails the current match attempt, recording the supplied reason if we're recording match fail reasons.
+macro_rules! fail_match {
+    ($($args:tt)*) => {return Err(match_error!($($args)*))};
+}
+
+/// Information about a match that was found.
+#[derive(Debug)]
+pub struct Match {
+    pub(crate) range: FileRange,
+    pub(crate) matched_node: SyntaxNode,
+    pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>,
+    pub(crate) ignored_comments: Vec<ast::Comment>,
+    pub(crate) rule_index: usize,
+    /// The depth of matched_node.
+    pub(crate) depth: usize,
+    // Each path in the template rendered for the module in which the match was found.
+    pub(crate) rendered_template_paths: FxHashMap<SyntaxNode, hir::ModPath>,
+}
+
+/// Represents a `$var` in an SSR query.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub(crate) struct Var(pub String);
+
+/// Information about a placeholder bound in a match.
+#[derive(Debug)]
+pub(crate) struct PlaceholderMatch {
+    /// The node that the placeholder matched to. If set, then we'll search for further matches
+    /// within this node. It isn't set when we match tokens within a macro call's token tree.
+    pub(crate) node: Option<SyntaxNode>,
+    pub(crate) range: FileRange,
+    /// More matches, found within `node`.
+    pub(crate) inner_matches: SsrMatches,
+}
+
+#[derive(Debug)]
+pub(crate) struct MatchFailureReason {
+    pub(crate) reason: String,
+}
+
+/// An "error" indicating that matching failed. Use the fail_match! macro to create and return this.
+#[derive(Clone)]
+pub(crate) struct MatchFailed {
+    /// The reason why we failed to match. Only present when debug_active true in call to
+    /// `get_match`.
+    pub(crate) reason: Option<String>,
+}
+
+/// Checks if `code` matches the search pattern found in `search_scope`, returning information about
+/// the match, if it does. Since we only do matching in this module and searching is done by the
+/// parent module, we don't populate nested matches.
+pub(crate) fn get_match(
+    debug_active: bool,
+    rule: &ResolvedRule,
+    code: &SyntaxNode,
+    restrict_range: &Option<FileRange>,
+    sema: &Semantics<ide_db::RootDatabase>,
+) -> Result<Match, MatchFailed> {
+    record_match_fails_reasons_scope(debug_active, || {
+        Matcher::try_match(rule, code, restrict_range, sema)
+    })
+}
+
+/// Checks if our search pattern matches a particular node of the AST.
+struct Matcher<'db, 'sema> {
+    sema: &'sema Semantics<'db, ide_db::RootDatabase>,
+    /// If any placeholders come from anywhere outside of this range, then the match will be
+    /// rejected.
+    restrict_range: Option<FileRange>,
+    rule: &'sema ResolvedRule,
+}
+
+/// Which phase of matching we're currently performing. We do two phases because most attempted
+/// matches will fail and it means we can defer more expensive checks to the second phase.
+enum Phase<'a> {
+    /// On the first phase, we perform cheap checks. No state is mutated and nothing is recorded.
+    First,
+    /// On the second phase, we construct the `Match`. Things like what placeholders bind to is
+    /// recorded.
+    Second(&'a mut Match),
+}
+
+impl<'db, 'sema> Matcher<'db, 'sema> {
+    fn try_match(
+        rule: &ResolvedRule,
+        code: &SyntaxNode,
+        restrict_range: &Option<FileRange>,
+        sema: &'sema Semantics<'db, ide_db::RootDatabase>,
+    ) -> Result<Match, MatchFailed> {
+        let match_state = Matcher { sema, restrict_range: restrict_range.clone(), rule };
+        // First pass at matching, where we check that node types and idents match.
+        match_state.attempt_match_node(&mut Phase::First, &rule.pattern.node, code)?;
+        match_state.validate_range(&sema.original_range(code))?;
+        let mut the_match = Match {
+            range: sema.original_range(code),
+            matched_node: code.clone(),
+            placeholder_values: FxHashMap::default(),
+            ignored_comments: Vec::new(),
+            rule_index: rule.index,
+            depth: 0,
+            rendered_template_paths: FxHashMap::default(),
+        };
+        // Second matching pass, where we record placeholder matches, ignored comments and maybe do
+        // any other more expensive checks that we didn't want to do on the first pass.
+        match_state.attempt_match_node(
+            &mut Phase::Second(&mut the_match),
+            &rule.pattern.node,
+            code,
+        )?;
+        the_match.depth = sema.ancestors_with_macros(the_match.matched_node.clone()).count();
+        if let Some(template) = &rule.template {
+            the_match.render_template_paths(template, sema)?;
+        }
+        Ok(the_match)
+    }
+
+    /// Checks that `range` is within the permitted range if any. This is applicable when we're
+    /// processing a macro expansion and we want to fail the match if we're working with a node that
+    /// didn't originate from the token tree of the macro call.
+    fn validate_range(&self, range: &FileRange) -> Result<(), MatchFailed> {
+        if let Some(restrict_range) = &self.restrict_range {
+            if restrict_range.file_id != range.file_id
+                || !restrict_range.range.contains_range(range.range)
+            {
+                fail_match!("Node originated from a macro");
+            }
+        }
+        Ok(())
+    }
+
+    fn attempt_match_node(
+        &self,
+        phase: &mut Phase,
+        pattern: &SyntaxNode,
+        code: &SyntaxNode,
+    ) -> Result<(), MatchFailed> {
+        // Handle placeholders.
+        if let Some(placeholder) = self.get_placeholder(&SyntaxElement::Node(pattern.clone())) {
+            for constraint in &placeholder.constraints {
+                self.check_constraint(constraint, code)?;
+            }
+            if let Phase::Second(matches_out) = phase {
+                let original_range = self.sema.original_range(code);
+                // We validated the range for the node when we started the match, so the placeholder
+                // probably can't fail range validation, but just to be safe...
+                self.validate_range(&original_range)?;
+                matches_out.placeholder_values.insert(
+                    Var(placeholder.ident.to_string()),
+                    PlaceholderMatch::new(code, original_range),
+                );
+            }
+            return Ok(());
+        }
+        // We allow a UFCS call to match a method call, provided they resolve to the same function.
+        if let Some(pattern_ufcs) = self.rule.pattern.ufcs_function_calls.get(pattern) {
+            if let Some(code) = ast::MethodCallExpr::cast(code.clone()) {
+                return self.attempt_match_ufcs_to_method_call(phase, pattern_ufcs, &code);
+            }
+            if let Some(code) = ast::CallExpr::cast(code.clone()) {
+                return self.attempt_match_ufcs_to_ufcs(phase, pattern_ufcs, &code);
+            }
+        }
+        if pattern.kind() != code.kind() {
+            fail_match!(
+                "Pattern had `{}` ({:?}), code had `{}` ({:?})",
+                pattern.text(),
+                pattern.kind(),
+                code.text(),
+                code.kind()
+            );
+        }
+        // Some kinds of nodes have special handling. For everything else, we fall back to default
+        // matching.
+        match code.kind() {
+            SyntaxKind::RECORD_EXPR_FIELD_LIST => {
+                self.attempt_match_record_field_list(phase, pattern, code)
+            }
+            SyntaxKind::TOKEN_TREE => self.attempt_match_token_tree(phase, pattern, code),
+            SyntaxKind::PATH => self.attempt_match_path(phase, pattern, code),
+            _ => self.attempt_match_node_children(phase, pattern, code),
+        }
+    }
+
+    fn attempt_match_node_children(
+        &self,
+        phase: &mut Phase,
+        pattern: &SyntaxNode,
+        code: &SyntaxNode,
+    ) -> Result<(), MatchFailed> {
+        self.attempt_match_sequences(
+            phase,
+            PatternIterator::new(pattern),
+            code.children_with_tokens(),
+        )
+    }
+
+    fn attempt_match_sequences(
+        &self,
+        phase: &mut Phase,
+        pattern_it: PatternIterator,
+        mut code_it: SyntaxElementChildren,
+    ) -> Result<(), MatchFailed> {
+        let mut pattern_it = pattern_it.peekable();
+        loop {
+            match phase.next_non_trivial(&mut code_it) {
+                None => {
+                    if let Some(p) = pattern_it.next() {
+                        fail_match!("Part of the pattern was unmatched: {:?}", p);
+                    }
+                    return Ok(());
+                }
+                Some(SyntaxElement::Token(c)) => {
+                    self.attempt_match_token(phase, &mut pattern_it, &c)?;
+                }
+                Some(SyntaxElement::Node(c)) => match pattern_it.next() {
+                    Some(SyntaxElement::Node(p)) => {
+                        self.attempt_match_node(phase, &p, &c)?;
+                    }
+                    Some(p) => fail_match!("Pattern wanted '{}', code has {}", p, c.text()),
+                    None => fail_match!("Pattern reached end, code has {}", c.text()),
+                },
+            }
+        }
+    }
+
+    fn attempt_match_token(
+        &self,
+        phase: &mut Phase,
+        pattern: &mut Peekable<PatternIterator>,
+        code: &syntax::SyntaxToken,
+    ) -> Result<(), MatchFailed> {
+        phase.record_ignored_comments(code);
+        // Ignore whitespace and comments.
+        if code.kind().is_trivia() {
+            return Ok(());
+        }
+        if let Some(SyntaxElement::Token(p)) = pattern.peek() {
+            // If the code has a comma and the pattern is about to close something, then accept the
+            // comma without advancing the pattern. i.e. ignore trailing commas.
+            if code.kind() == SyntaxKind::COMMA && is_closing_token(p.kind()) {
+                return Ok(());
+            }
+            // Conversely, if the pattern has a comma and the code doesn't, skip that part of the
+            // pattern and continue to match the code.
+            if p.kind() == SyntaxKind::COMMA && is_closing_token(code.kind()) {
+                pattern.next();
+            }
+        }
+        // Consume an element from the pattern and make sure it matches.
+        match pattern.next() {
+            Some(SyntaxElement::Token(p)) => {
+                if p.kind() != code.kind() || p.text() != code.text() {
+                    fail_match!(
+                        "Pattern wanted token '{}' ({:?}), but code had token '{}' ({:?})",
+                        p.text(),
+                        p.kind(),
+                        code.text(),
+                        code.kind()
+                    )
+                }
+            }
+            Some(SyntaxElement::Node(p)) => {
+                // Not sure if this is actually reachable.
+                fail_match!(
+                    "Pattern wanted {:?}, but code had token '{}' ({:?})",
+                    p,
+                    code.text(),
+                    code.kind()
+                );
+            }
+            None => {
+                fail_match!("Pattern exhausted, while code remains: `{}`", code.text());
+            }
+        }
+        Ok(())
+    }
+
+    fn check_constraint(
+        &self,
+        constraint: &Constraint,
+        code: &SyntaxNode,
+    ) -> Result<(), MatchFailed> {
+        match constraint {
+            Constraint::Kind(kind) => {
+                kind.matches(code)?;
+            }
+            Constraint::Not(sub) => {
+                if self.check_constraint(&*sub, code).is_ok() {
+                    fail_match!("Constraint {:?} failed for '{}'", constraint, code.text());
+                }
+            }
+        }
+        Ok(())
+    }
+
+    /// Paths are matched based on whether they refer to the same thing, even if they're written
+    /// differently.
+    fn attempt_match_path(
+        &self,
+        phase: &mut Phase,
+        pattern: &SyntaxNode,
+        code: &SyntaxNode,
+    ) -> Result<(), MatchFailed> {
+        if let Some(pattern_resolved) = self.rule.pattern.resolved_paths.get(pattern) {
+            let pattern_path = ast::Path::cast(pattern.clone()).unwrap();
+            let code_path = ast::Path::cast(code.clone()).unwrap();
+            if let (Some(pattern_segment), Some(code_segment)) =
+                (pattern_path.segment(), code_path.segment())
+            {
+                // Match everything within the segment except for the name-ref, which is handled
+                // separately via comparing what the path resolves to below.
+                self.attempt_match_opt(
+                    phase,
+                    pattern_segment.generic_arg_list(),
+                    code_segment.generic_arg_list(),
+                )?;
+                self.attempt_match_opt(
+                    phase,
+                    pattern_segment.param_list(),
+                    code_segment.param_list(),
+                )?;
+            }
+            if matches!(phase, Phase::Second(_)) {
+                let resolution = self
+                    .sema
+                    .resolve_path(&code_path)
+                    .ok_or_else(|| match_error!("Failed to resolve path `{}`", code.text()))?;
+                if pattern_resolved.resolution != resolution {
+                    fail_match!("Pattern had path `{}` code had `{}`", pattern.text(), code.text());
+                }
+            }
+        } else {
+            return self.attempt_match_node_children(phase, pattern, code);
+        }
+        Ok(())
+    }
+
+    fn attempt_match_opt<T: AstNode>(
+        &self,
+        phase: &mut Phase,
+        pattern: Option<T>,
+        code: Option<T>,
+    ) -> Result<(), MatchFailed> {
+        match (pattern, code) {
+            (Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()),
+            (None, None) => Ok(()),
+            (Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()),
+            (None, Some(c)) => {
+                fail_match!("Nothing in pattern to match code `{}`", c.syntax().text())
+            }
+        }
+    }
+
+    /// We want to allow the records to match in any order, so we have special matching logic for
+    /// them.
+    fn attempt_match_record_field_list(
+        &self,
+        phase: &mut Phase,
+        pattern: &SyntaxNode,
+        code: &SyntaxNode,
+    ) -> Result<(), MatchFailed> {
+        // Build a map keyed by field name.
+        let mut fields_by_name = FxHashMap::default();
+        for child in code.children() {
+            if let Some(record) = ast::RecordExprField::cast(child.clone()) {
+                if let Some(name) = record.field_name() {
+                    fields_by_name.insert(name.text().clone(), child.clone());
+                }
+            }
+        }
+        for p in pattern.children_with_tokens() {
+            if let SyntaxElement::Node(p) = p {
+                if let Some(name_element) = p.first_child_or_token() {
+                    if self.get_placeholder(&name_element).is_some() {
+                        // If the pattern is using placeholders for field names then order
+                        // independence doesn't make sense. Fall back to regular ordered
+                        // matching.
+                        return self.attempt_match_node_children(phase, pattern, code);
+                    }
+                    if let Some(ident) = only_ident(name_element) {
+                        let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| {
+                            match_error!(
+                                "Placeholder has record field '{}', but code doesn't",
+                                ident
+                            )
+                        })?;
+                        self.attempt_match_node(phase, &p, &code_record)?;
+                    }
+                }
+            }
+        }
+        if let Some(unmatched_fields) = fields_by_name.keys().next() {
+            fail_match!(
+                "{} field(s) of a record literal failed to match, starting with {}",
+                fields_by_name.len(),
+                unmatched_fields
+            );
+        }
+        Ok(())
+    }
+
+    /// Outside of token trees, a placeholder can only match a single AST node, whereas in a token
+    /// tree it can match a sequence of tokens. Note, that this code will only be used when the
+    /// pattern matches the macro invocation. For matches within the macro call, we'll already have
+    /// expanded the macro.
+    fn attempt_match_token_tree(
+        &self,
+        phase: &mut Phase,
+        pattern: &SyntaxNode,
+        code: &syntax::SyntaxNode,
+    ) -> Result<(), MatchFailed> {
+        let mut pattern = PatternIterator::new(pattern).peekable();
+        let mut children = code.children_with_tokens();
+        while let Some(child) = children.next() {
+            if let Some(placeholder) = pattern.peek().and_then(|p| self.get_placeholder(p)) {
+                pattern.next();
+                let next_pattern_token = pattern
+                    .peek()
+                    .and_then(|p| match p {
+                        SyntaxElement::Token(t) => Some(t.clone()),
+                        SyntaxElement::Node(n) => n.first_token(),
+                    })
+                    .map(|p| p.text().to_string());
+                let first_matched_token = child.clone();
+                let mut last_matched_token = child;
+                // Read code tokens util we reach one equal to the next token from our pattern
+                // or we reach the end of the token tree.
+                while let Some(next) = children.next() {
+                    match &next {
+                        SyntaxElement::Token(t) => {
+                            if Some(t.to_string()) == next_pattern_token {
+                                pattern.next();
+                                break;
+                            }
+                        }
+                        SyntaxElement::Node(n) => {
+                            if let Some(first_token) = n.first_token() {
+                                if Some(first_token.to_string()) == next_pattern_token {
+                                    if let Some(SyntaxElement::Node(p)) = pattern.next() {
+                                        // We have a subtree that starts with the next token in our pattern.
+                                        self.attempt_match_token_tree(phase, &p, &n)?;
+                                        break;
+                                    }
+                                }
+                            }
+                        }
+                    };
+                    last_matched_token = next;
+                }
+                if let Phase::Second(match_out) = phase {
+                    match_out.placeholder_values.insert(
+                        Var(placeholder.ident.to_string()),
+                        PlaceholderMatch::from_range(FileRange {
+                            file_id: self.sema.original_range(code).file_id,
+                            range: first_matched_token
+                                .text_range()
+                                .cover(last_matched_token.text_range()),
+                        }),
+                    );
+                }
+                continue;
+            }
+            // Match literal (non-placeholder) tokens.
+            match child {
+                SyntaxElement::Token(token) => {
+                    self.attempt_match_token(phase, &mut pattern, &token)?;
+                }
+                SyntaxElement::Node(node) => match pattern.next() {
+                    Some(SyntaxElement::Node(p)) => {
+                        self.attempt_match_token_tree(phase, &p, &node)?;
+                    }
+                    Some(SyntaxElement::Token(p)) => fail_match!(
+                        "Pattern has token '{}', code has subtree '{}'",
+                        p.text(),
+                        node.text()
+                    ),
+                    None => fail_match!("Pattern has nothing, code has '{}'", node.text()),
+                },
+            }
+        }
+        if let Some(p) = pattern.next() {
+            fail_match!("Reached end of token tree in code, but pattern still has {:?}", p);
+        }
+        Ok(())
+    }
+
+    fn attempt_match_ufcs_to_method_call(
+        &self,
+        phase: &mut Phase,
+        pattern_ufcs: &UfcsCallInfo,
+        code: &ast::MethodCallExpr,
+    ) -> Result<(), MatchFailed> {
+        use ast::ArgListOwner;
+        let code_resolved_function = self
+            .sema
+            .resolve_method_call(code)
+            .ok_or_else(|| match_error!("Failed to resolve method call"))?;
+        if pattern_ufcs.function != code_resolved_function {
+            fail_match!("Method call resolved to a different function");
+        }
+        if code_resolved_function.has_self_param(self.sema.db) {
+            if let (Some(pattern_type), Some(expr)) = (&pattern_ufcs.qualifier_type, &code.expr()) {
+                self.check_expr_type(pattern_type, expr)?;
+            }
+        }
+        // Check arguments.
+        let mut pattern_args = pattern_ufcs
+            .call_expr
+            .arg_list()
+            .ok_or_else(|| match_error!("Pattern function call has no args"))?
+            .args();
+        self.attempt_match_opt(phase, pattern_args.next(), code.expr())?;
+        let mut code_args =
+            code.arg_list().ok_or_else(|| match_error!("Code method call has no args"))?.args();
+        loop {
+            match (pattern_args.next(), code_args.next()) {
+                (None, None) => return Ok(()),
+                (p, c) => self.attempt_match_opt(phase, p, c)?,
+            }
+        }
+    }
+
+    fn attempt_match_ufcs_to_ufcs(
+        &self,
+        phase: &mut Phase,
+        pattern_ufcs: &UfcsCallInfo,
+        code: &ast::CallExpr,
+    ) -> Result<(), MatchFailed> {
+        use ast::ArgListOwner;
+        // Check that the first argument is the expected type.
+        if let (Some(pattern_type), Some(expr)) = (
+            &pattern_ufcs.qualifier_type,
+            &code.arg_list().and_then(|code_args| code_args.args().next()),
+        ) {
+            self.check_expr_type(pattern_type, expr)?;
+        }
+        self.attempt_match_node_children(phase, pattern_ufcs.call_expr.syntax(), code.syntax())
+    }
+
+    fn check_expr_type(
+        &self,
+        pattern_type: &hir::Type,
+        expr: &ast::Expr,
+    ) -> Result<(), MatchFailed> {
+        use hir::HirDisplay;
+        let code_type = self.sema.type_of_expr(&expr).ok_or_else(|| {
+            match_error!("Failed to get receiver type for `{}`", expr.syntax().text())
+        })?;
+        if !code_type
+            .autoderef(self.sema.db)
+            .any(|deref_code_type| *pattern_type == deref_code_type)
+        {
+            fail_match!(
+                "Pattern type `{}` didn't match code type `{}`",
+                pattern_type.display(self.sema.db),
+                code_type.display(self.sema.db)
+            );
+        }
+        Ok(())
+    }
+
+    fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> {
+        only_ident(element.clone()).and_then(|ident| self.rule.get_placeholder(&ident))
+    }
+}
+
+impl Match {
+    fn render_template_paths(
+        &mut self,
+        template: &ResolvedPattern,
+        sema: &Semantics<ide_db::RootDatabase>,
+    ) -> Result<(), MatchFailed> {
+        let module = sema
+            .scope(&self.matched_node)
+            .module()
+            .ok_or_else(|| match_error!("Matched node isn't in a module"))?;
+        for (path, resolved_path) in &template.resolved_paths {
+            if let hir::PathResolution::Def(module_def) = resolved_path.resolution {
+                let mod_path = module.find_use_path(sema.db, module_def).ok_or_else(|| {
+                    match_error!("Failed to render template path `{}` at match location")
+                })?;
+                self.rendered_template_paths.insert(path.clone(), mod_path);
+            }
+        }
+        Ok(())
+    }
+}
+
+impl Phase<'_> {
+    fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> {
+        loop {
+            let c = code_it.next();
+            if let Some(SyntaxElement::Token(t)) = &c {
+                self.record_ignored_comments(t);
+                if t.kind().is_trivia() {
+                    continue;
+                }
+            }
+            return c;
+        }
+    }
+
+    fn record_ignored_comments(&mut self, token: &SyntaxToken) {
+        if token.kind() == SyntaxKind::COMMENT {
+            if let Phase::Second(match_out) = self {
+                if let Some(comment) = ast::Comment::cast(token.clone()) {
+                    match_out.ignored_comments.push(comment);
+                }
+            }
+        }
+    }
+}
+
+fn is_closing_token(kind: SyntaxKind) -> bool {
+    kind == SyntaxKind::R_PAREN || kind == SyntaxKind::R_CURLY || kind == SyntaxKind::R_BRACK
+}
+
+pub(crate) fn record_match_fails_reasons_scope<F, T>(debug_active: bool, f: F) -> T
+where
+    F: Fn() -> T,
+{
+    RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(debug_active));
+    let res = f();
+    RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(false));
+    res
+}
+
+// For performance reasons, we don't want to record the reason why every match fails, only the bit
+// of code that the user indicated they thought would match. We use a thread local to indicate when
+// we are trying to match that bit of code. This saves us having to pass a boolean into all the bits
+// of code that can make the decision to not match.
+thread_local! {
+    pub static RECORDING_MATCH_FAIL_REASONS: Cell<bool> = Cell::new(false);
+}
+
+fn recording_match_fail_reasons() -> bool {
+    RECORDING_MATCH_FAIL_REASONS.with(|c| c.get())
+}
+
+impl PlaceholderMatch {
+    fn new(node: &SyntaxNode, range: FileRange) -> Self {
+        Self { node: Some(node.clone()), range, inner_matches: SsrMatches::default() }
+    }
+
+    fn from_range(range: FileRange) -> Self {
+        Self { node: None, range, inner_matches: SsrMatches::default() }
+    }
+}
+
+impl NodeKind {
+    fn matches(&self, node: &SyntaxNode) -> Result<(), MatchFailed> {
+        let ok = match self {
+            Self::Literal => {
+                mark::hit!(literal_constraint);
+                ast::Literal::can_cast(node.kind())
+            }
+        };
+        if !ok {
+            fail_match!("Code '{}' isn't of kind {:?}", node.text(), self);
+        }
+        Ok(())
+    }
+}
+
+// If `node` contains nothing but an ident then return it, otherwise return None.
+fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> {
+    match element {
+        SyntaxElement::Token(t) => {
+            if t.kind() == SyntaxKind::IDENT {
+                return Some(t);
+            }
+        }
+        SyntaxElement::Node(n) => {
+            let mut children = n.children_with_tokens();
+            if let (Some(only_child), None) = (children.next(), children.next()) {
+                return only_ident(only_child);
+            }
+        }
+    }
+    None
+}
+
+struct PatternIterator {
+    iter: SyntaxElementChildren,
+}
+
+impl Iterator for PatternIterator {
+    type Item = SyntaxElement;
+
+    fn next(&mut self) -> Option<SyntaxElement> {
+        while let Some(element) = self.iter.next() {
+            if !element.kind().is_trivia() {
+                return Some(element);
+            }
+        }
+        None
+    }
+}
+
+impl PatternIterator {
+    fn new(parent: &SyntaxNode) -> Self {
+        Self { iter: parent.children_with_tokens() }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::{MatchFinder, SsrRule};
+
+    #[test]
+    fn parse_match_replace() {
+        let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap();
+        let input = "fn foo() {} fn bar() {} fn main() { foo(1+2); }";
+
+        let (db, position, selections) = crate::tests::single_file(input);
+        let mut match_finder = MatchFinder::in_context(&db, position, selections);
+        match_finder.add_rule(rule).unwrap();
+        let matches = match_finder.matches();
+        assert_eq!(matches.matches.len(), 1);
+        assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)");
+        assert_eq!(matches.matches[0].placeholder_values.len(), 1);
+        assert_eq!(
+            matches.matches[0].placeholder_values[&Var("x".to_string())]
+                .node
+                .as_ref()
+                .unwrap()
+                .text(),
+            "1+2"
+        );
+
+        let edits = match_finder.edits();
+        assert_eq!(edits.len(), 1);
+        let edit = &edits[0];
+        let mut after = input.to_string();
+        edit.edit.apply(&mut after);
+        assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }");
+    }
+}
diff --git a/crates/ssr/src/nester.rs b/crates/ssr/src/nester.rs
new file mode 100644 (file)
index 0000000..6ac355d
--- /dev/null
@@ -0,0 +1,94 @@
+//! Converts a flat collection of matches into a nested form suitable for replacement. When there
+//! are multiple matches for a node, or that overlap, priority is given to the earlier rule. Nested
+//! matches are only permitted if the inner match is contained entirely within a placeholder of an
+//! outer match.
+//!
+//! For example, if our search pattern is `foo(foo($a))` and the code had `foo(foo(foo(foo(42))))`,
+//! then we'll get 3 matches, however only the outermost and innermost matches can be accepted. The
+//! middle match would take the second `foo` from the outer match.
+
+use crate::{Match, SsrMatches};
+use rustc_hash::FxHashMap;
+use syntax::SyntaxNode;
+
+pub(crate) fn nest_and_remove_collisions(
+    mut matches: Vec<Match>,
+    sema: &hir::Semantics<ide_db::RootDatabase>,
+) -> SsrMatches {
+    // We sort the matches by depth then by rule index. Sorting by depth means that by the time we
+    // see a match, any parent matches or conflicting matches will have already been seen. Sorting
+    // by rule_index means that if there are two matches for the same node, the rule added first
+    // will take precedence.
+    matches.sort_by(|a, b| a.depth.cmp(&b.depth).then_with(|| a.rule_index.cmp(&b.rule_index)));
+    let mut collector = MatchCollector::default();
+    for m in matches {
+        collector.add_match(m, sema);
+    }
+    collector.into()
+}
+
+#[derive(Default)]
+struct MatchCollector {
+    matches_by_node: FxHashMap<SyntaxNode, Match>,
+}
+
+impl MatchCollector {
+    /// Attempts to add `m` to matches. If it conflicts with an existing match, it is discarded. If
+    /// it is entirely within the a placeholder of an existing match, then it is added as a child
+    /// match of the existing match.
+    fn add_match(&mut self, m: Match, sema: &hir::Semantics<ide_db::RootDatabase>) {
+        let matched_node = m.matched_node.clone();
+        if let Some(existing) = self.matches_by_node.get_mut(&matched_node) {
+            try_add_sub_match(m, existing, sema);
+            return;
+        }
+        for ancestor in sema.ancestors_with_macros(m.matched_node.clone()) {
+            if let Some(existing) = self.matches_by_node.get_mut(&ancestor) {
+                try_add_sub_match(m, existing, sema);
+                return;
+            }
+        }
+        self.matches_by_node.insert(matched_node, m);
+    }
+}
+
+/// Attempts to add `m` as a sub-match of `existing`.
+fn try_add_sub_match(m: Match, existing: &mut Match, sema: &hir::Semantics<ide_db::RootDatabase>) {
+    for p in existing.placeholder_values.values_mut() {
+        // Note, no need to check if p.range.file is equal to m.range.file, since we
+        // already know we're within `existing`.
+        if p.range.range.contains_range(m.range.range) {
+            // Convert the inner matches in `p` into a temporary MatchCollector. When
+            // we're done, we then convert it back into an SsrMatches. If we expected
+            // lots of inner matches, it might be worthwhile keeping a MatchCollector
+            // around for each placeholder match. However we expect most placeholder
+            // will have 0 and a few will have 1. More than that should hopefully be
+            // exceptional.
+            let mut collector = MatchCollector::default();
+            for m in std::mem::replace(&mut p.inner_matches.matches, Vec::new()) {
+                collector.matches_by_node.insert(m.matched_node.clone(), m);
+            }
+            collector.add_match(m, sema);
+            p.inner_matches = collector.into();
+            break;
+        }
+    }
+}
+
+impl From<MatchCollector> for SsrMatches {
+    fn from(mut match_collector: MatchCollector) -> Self {
+        let mut matches = SsrMatches::default();
+        for (_, m) in match_collector.matches_by_node.drain() {
+            matches.matches.push(m);
+        }
+        matches.matches.sort_by(|a, b| {
+            // Order matches by file_id then by start range. This should be sufficient since ranges
+            // shouldn't be overlapping.
+            a.range
+                .file_id
+                .cmp(&b.range.file_id)
+                .then_with(|| a.range.range.start().cmp(&b.range.range.start()))
+        });
+        matches
+    }
+}
diff --git a/crates/ssr/src/parsing.rs b/crates/ssr/src/parsing.rs
new file mode 100644 (file)
index 0000000..9570e96
--- /dev/null
@@ -0,0 +1,389 @@
+//! This file contains code for parsing SSR rules, which look something like `foo($a) ==>> bar($b)`.
+//! We first split everything before and after the separator `==>>`. Next, both the search pattern
+//! and the replacement template get tokenized by the Rust tokenizer. Tokens are then searched for
+//! placeholders, which start with `$`. For replacement templates, this is the final form. For
+//! search patterns, we go further and parse the pattern as each kind of thing that we can match.
+//! e.g. expressions, type references etc.
+
+use crate::errors::bail;
+use crate::{SsrError, SsrPattern, SsrRule};
+use rustc_hash::{FxHashMap, FxHashSet};
+use std::str::FromStr;
+use syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T};
+use test_utils::mark;
+
+#[derive(Debug)]
+pub(crate) struct ParsedRule {
+    pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
+    pub(crate) pattern: SyntaxNode,
+    pub(crate) template: Option<SyntaxNode>,
+}
+
+#[derive(Debug)]
+pub(crate) struct RawPattern {
+    tokens: Vec<PatternElement>,
+}
+
+// Part of a search or replace pattern.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum PatternElement {
+    Token(Token),
+    Placeholder(Placeholder),
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) struct Placeholder {
+    /// The name of this placeholder. e.g. for "$a", this would be "a"
+    pub(crate) ident: SmolStr,
+    /// A unique name used in place of this placeholder when we parse the pattern as Rust code.
+    stand_in_name: String,
+    pub(crate) constraints: Vec<Constraint>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum Constraint {
+    Kind(NodeKind),
+    Not(Box<Constraint>),
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub(crate) enum NodeKind {
+    Literal,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub(crate) struct Token {
+    kind: SyntaxKind,
+    pub(crate) text: SmolStr,
+}
+
+impl ParsedRule {
+    fn new(
+        pattern: &RawPattern,
+        template: Option<&RawPattern>,
+    ) -> Result<Vec<ParsedRule>, SsrError> {
+        let raw_pattern = pattern.as_rust_code();
+        let raw_template = template.map(|t| t.as_rust_code());
+        let raw_template = raw_template.as_ref().map(|s| s.as_str());
+        let mut builder = RuleBuilder {
+            placeholders_by_stand_in: pattern.placeholders_by_stand_in(),
+            rules: Vec::new(),
+        };
+        builder.try_add(ast::Expr::parse(&raw_pattern), raw_template.map(ast::Expr::parse));
+        builder.try_add(ast::Type::parse(&raw_pattern), raw_template.map(ast::Type::parse));
+        builder.try_add(ast::Item::parse(&raw_pattern), raw_template.map(ast::Item::parse));
+        builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse));
+        builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse));
+        builder.build()
+    }
+}
+
+struct RuleBuilder {
+    placeholders_by_stand_in: FxHashMap<SmolStr, Placeholder>,
+    rules: Vec<ParsedRule>,
+}
+
+impl RuleBuilder {
+    fn try_add<T: AstNode>(&mut self, pattern: Result<T, ()>, template: Option<Result<T, ()>>) {
+        match (pattern, template) {
+            (Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule {
+                placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
+                pattern: pattern.syntax().clone(),
+                template: Some(template.syntax().clone()),
+            }),
+            (Ok(pattern), None) => self.rules.push(ParsedRule {
+                placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
+                pattern: pattern.syntax().clone(),
+                template: None,
+            }),
+            _ => {}
+        }
+    }
+
+    fn build(mut self) -> Result<Vec<ParsedRule>, SsrError> {
+        if self.rules.is_empty() {
+            bail!("Not a valid Rust expression, type, item, path or pattern");
+        }
+        // If any rules contain paths, then we reject any rules that don't contain paths. Allowing a
+        // mix leads to strange semantics, since the path-based rules only match things where the
+        // path refers to semantically the same thing, whereas the non-path-based rules could match
+        // anything. Specifically, if we have a rule like `foo ==>> bar` we only want to match the
+        // `foo` that is in the current scope, not any `foo`. However "foo" can be parsed as a
+        // pattern (IDENT_PAT -> NAME -> IDENT). Allowing such a rule through would result in
+        // renaming everything called `foo` to `bar`. It'd also be slow, since without a path, we'd
+        // have to use the slow-scan search mechanism.
+        if self.rules.iter().any(|rule| contains_path(&rule.pattern)) {
+            let old_len = self.rules.len();
+            self.rules.retain(|rule| contains_path(&rule.pattern));
+            if self.rules.len() < old_len {
+                mark::hit!(pattern_is_a_single_segment_path);
+            }
+        }
+        Ok(self.rules)
+    }
+}
+
+/// Returns whether there are any paths in `node`.
+fn contains_path(node: &SyntaxNode) -> bool {
+    node.kind() == SyntaxKind::PATH
+        || node.descendants().any(|node| node.kind() == SyntaxKind::PATH)
+}
+
+impl FromStr for SsrRule {
+    type Err = SsrError;
+
+    fn from_str(query: &str) -> Result<SsrRule, SsrError> {
+        let mut it = query.split("==>>");
+        let pattern = it.next().expect("at least empty string").trim();
+        let template = it
+            .next()
+            .ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))?
+            .trim()
+            .to_string();
+        if it.next().is_some() {
+            return Err(SsrError("More than one delimiter found".into()));
+        }
+        let raw_pattern = pattern.parse()?;
+        let raw_template = template.parse()?;
+        let parsed_rules = ParsedRule::new(&raw_pattern, Some(&raw_template))?;
+        let rule = SsrRule { pattern: raw_pattern, template: raw_template, parsed_rules };
+        validate_rule(&rule)?;
+        Ok(rule)
+    }
+}
+
+impl FromStr for RawPattern {
+    type Err = SsrError;
+
+    fn from_str(pattern_str: &str) -> Result<RawPattern, SsrError> {
+        Ok(RawPattern { tokens: parse_pattern(pattern_str)? })
+    }
+}
+
+impl RawPattern {
+    /// Returns this search pattern as Rust source code that we can feed to the Rust parser.
+    fn as_rust_code(&self) -> String {
+        let mut res = String::new();
+        for t in &self.tokens {
+            res.push_str(match t {
+                PatternElement::Token(token) => token.text.as_str(),
+                PatternElement::Placeholder(placeholder) => placeholder.stand_in_name.as_str(),
+            });
+        }
+        res
+    }
+
+    pub(crate) fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> {
+        let mut res = FxHashMap::default();
+        for t in &self.tokens {
+            if let PatternElement::Placeholder(placeholder) = t {
+                res.insert(SmolStr::new(placeholder.stand_in_name.clone()), placeholder.clone());
+            }
+        }
+        res
+    }
+}
+
+impl FromStr for SsrPattern {
+    type Err = SsrError;
+
+    fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> {
+        let raw_pattern = pattern_str.parse()?;
+        let parsed_rules = ParsedRule::new(&raw_pattern, None)?;
+        Ok(SsrPattern { raw: raw_pattern, parsed_rules })
+    }
+}
+
+/// Returns `pattern_str`, parsed as a search or replace pattern. If `remove_whitespace` is true,
+/// then any whitespace tokens will be removed, which we do for the search pattern, but not for the
+/// replace pattern.
+fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> {
+    let mut res = Vec::new();
+    let mut placeholder_names = FxHashSet::default();
+    let mut tokens = tokenize(pattern_str)?.into_iter();
+    while let Some(token) = tokens.next() {
+        if token.kind == T![$] {
+            let placeholder = parse_placeholder(&mut tokens)?;
+            if !placeholder_names.insert(placeholder.ident.clone()) {
+                bail!("Name `{}` repeats more than once", placeholder.ident);
+            }
+            res.push(PatternElement::Placeholder(placeholder));
+        } else {
+            res.push(PatternElement::Token(token));
+        }
+    }
+    Ok(res)
+}
+
+/// Checks for errors in a rule. e.g. the replace pattern referencing placeholders that the search
+/// pattern didn't define.
+fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
+    let mut defined_placeholders = FxHashSet::default();
+    for p in &rule.pattern.tokens {
+        if let PatternElement::Placeholder(placeholder) = p {
+            defined_placeholders.insert(&placeholder.ident);
+        }
+    }
+    let mut undefined = Vec::new();
+    for p in &rule.template.tokens {
+        if let PatternElement::Placeholder(placeholder) = p {
+            if !defined_placeholders.contains(&placeholder.ident) {
+                undefined.push(format!("${}", placeholder.ident));
+            }
+            if !placeholder.constraints.is_empty() {
+                bail!("Replacement placeholders cannot have constraints");
+            }
+        }
+    }
+    if !undefined.is_empty() {
+        bail!("Replacement contains undefined placeholders: {}", undefined.join(", "));
+    }
+    Ok(())
+}
+
+fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> {
+    let mut start = 0;
+    let (raw_tokens, errors) = syntax::tokenize(source);
+    if let Some(first_error) = errors.first() {
+        bail!("Failed to parse pattern: {}", first_error);
+    }
+    let mut tokens: Vec<Token> = Vec::new();
+    for raw_token in raw_tokens {
+        let token_len = usize::from(raw_token.len);
+        tokens.push(Token {
+            kind: raw_token.kind,
+            text: SmolStr::new(&source[start..start + token_len]),
+        });
+        start += token_len;
+    }
+    Ok(tokens)
+}
+
+fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> {
+    let mut name = None;
+    let mut constraints = Vec::new();
+    if let Some(token) = tokens.next() {
+        match token.kind {
+            SyntaxKind::IDENT => {
+                name = Some(token.text);
+            }
+            T!['{'] => {
+                let token =
+                    tokens.next().ok_or_else(|| SsrError::new("Unexpected end of placeholder"))?;
+                if token.kind == SyntaxKind::IDENT {
+                    name = Some(token.text);
+                }
+                loop {
+                    let token = tokens
+                        .next()
+                        .ok_or_else(|| SsrError::new("Placeholder is missing closing brace '}'"))?;
+                    match token.kind {
+                        T![:] => {
+                            constraints.push(parse_constraint(tokens)?);
+                        }
+                        T!['}'] => break,
+                        _ => bail!("Unexpected token while parsing placeholder: '{}'", token.text),
+                    }
+                }
+            }
+            _ => {
+                bail!("Placeholders should either be $name or ${{name:constraints}}");
+            }
+        }
+    }
+    let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?;
+    Ok(Placeholder::new(name, constraints))
+}
+
+fn parse_constraint(tokens: &mut std::vec::IntoIter<Token>) -> Result<Constraint, SsrError> {
+    let constraint_type = tokens
+        .next()
+        .ok_or_else(|| SsrError::new("Found end of placeholder while looking for a constraint"))?
+        .text
+        .to_string();
+    match constraint_type.as_str() {
+        "kind" => {
+            expect_token(tokens, "(")?;
+            let t = tokens.next().ok_or_else(|| {
+                SsrError::new("Unexpected end of constraint while looking for kind")
+            })?;
+            if t.kind != SyntaxKind::IDENT {
+                bail!("Expected ident, found {:?} while parsing kind constraint", t.kind);
+            }
+            expect_token(tokens, ")")?;
+            Ok(Constraint::Kind(NodeKind::from(&t.text)?))
+        }
+        "not" => {
+            expect_token(tokens, "(")?;
+            let sub = parse_constraint(tokens)?;
+            expect_token(tokens, ")")?;
+            Ok(Constraint::Not(Box::new(sub)))
+        }
+        x => bail!("Unsupported constraint type '{}'", x),
+    }
+}
+
+fn expect_token(tokens: &mut std::vec::IntoIter<Token>, expected: &str) -> Result<(), SsrError> {
+    if let Some(t) = tokens.next() {
+        if t.text == expected {
+            return Ok(());
+        }
+        bail!("Expected {} found {}", expected, t.text);
+    }
+    bail!("Expected {} found end of stream", expected);
+}
+
+impl NodeKind {
+    fn from(name: &SmolStr) -> Result<NodeKind, SsrError> {
+        Ok(match name.as_str() {
+            "literal" => NodeKind::Literal,
+            _ => bail!("Unknown node kind '{}'", name),
+        })
+    }
+}
+
+impl Placeholder {
+    fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self {
+        Self { stand_in_name: format!("__placeholder_{}", name), constraints, ident: name }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn parser_happy_case() {
+        fn token(kind: SyntaxKind, text: &str) -> PatternElement {
+            PatternElement::Token(Token { kind, text: SmolStr::new(text) })
+        }
+        fn placeholder(name: &str) -> PatternElement {
+            PatternElement::Placeholder(Placeholder::new(SmolStr::new(name), Vec::new()))
+        }
+        let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap();
+        assert_eq!(
+            result.pattern.tokens,
+            vec![
+                token(SyntaxKind::IDENT, "foo"),
+                token(T!['('], "("),
+                placeholder("a"),
+                token(T![,], ","),
+                token(SyntaxKind::WHITESPACE, " "),
+                placeholder("b"),
+                token(T![')'], ")"),
+            ]
+        );
+        assert_eq!(
+            result.template.tokens,
+            vec![
+                token(SyntaxKind::IDENT, "bar"),
+                token(T!['('], "("),
+                placeholder("b"),
+                token(T![,], ","),
+                token(SyntaxKind::WHITESPACE, " "),
+                placeholder("a"),
+                token(T![')'], ")"),
+            ]
+        );
+    }
+}
diff --git a/crates/ssr/src/replacing.rs b/crates/ssr/src/replacing.rs
new file mode 100644 (file)
index 0000000..8f8fe61
--- /dev/null
@@ -0,0 +1,194 @@
+//! Code for applying replacement templates for matches that have previously been found.
+
+use crate::matching::Var;
+use crate::{resolving::ResolvedRule, Match, SsrMatches};
+use rustc_hash::{FxHashMap, FxHashSet};
+use syntax::ast::{self, AstToken};
+use syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, TextSize};
+use text_edit::TextEdit;
+
+/// Returns a text edit that will replace each match in `matches` with its corresponding replacement
+/// template. Placeholders in the template will have been substituted with whatever they matched to
+/// in the original code.
+pub(crate) fn matches_to_edit(
+    matches: &SsrMatches,
+    file_src: &str,
+    rules: &[ResolvedRule],
+) -> TextEdit {
+    matches_to_edit_at_offset(matches, file_src, 0.into(), rules)
+}
+
+fn matches_to_edit_at_offset(
+    matches: &SsrMatches,
+    file_src: &str,
+    relative_start: TextSize,
+    rules: &[ResolvedRule],
+) -> TextEdit {
+    let mut edit_builder = TextEdit::builder();
+    for m in &matches.matches {
+        edit_builder.replace(
+            m.range.range.checked_sub(relative_start).unwrap(),
+            render_replace(m, file_src, rules),
+        );
+    }
+    edit_builder.finish()
+}
+
+struct ReplacementRenderer<'a> {
+    match_info: &'a Match,
+    file_src: &'a str,
+    rules: &'a [ResolvedRule],
+    rule: &'a ResolvedRule,
+    out: String,
+    // Map from a range within `out` to a token in `template` that represents a placeholder. This is
+    // used to validate that the generated source code doesn't split any placeholder expansions (see
+    // below).
+    placeholder_tokens_by_range: FxHashMap<TextRange, SyntaxToken>,
+    // Which placeholder tokens need to be wrapped in parenthesis in order to ensure that when `out`
+    // is parsed, placeholders don't get split. e.g. if a template of `$a.to_string()` results in `1
+    // + 2.to_string()` then the placeholder value `1 + 2` was split and needs parenthesis.
+    placeholder_tokens_requiring_parenthesis: FxHashSet<SyntaxToken>,
+}
+
+fn render_replace(match_info: &Match, file_src: &str, rules: &[ResolvedRule]) -> String {
+    let rule = &rules[match_info.rule_index];
+    let template = rule
+        .template
+        .as_ref()
+        .expect("You called MatchFinder::edits after calling MatchFinder::add_search_pattern");
+    let mut renderer = ReplacementRenderer {
+        match_info,
+        file_src,
+        rules,
+        rule,
+        out: String::new(),
+        placeholder_tokens_requiring_parenthesis: FxHashSet::default(),
+        placeholder_tokens_by_range: FxHashMap::default(),
+    };
+    renderer.render_node(&template.node);
+    renderer.maybe_rerender_with_extra_parenthesis(&template.node);
+    for comment in &match_info.ignored_comments {
+        renderer.out.push_str(&comment.syntax().to_string());
+    }
+    renderer.out
+}
+
+impl ReplacementRenderer<'_> {
+    fn render_node_children(&mut self, node: &SyntaxNode) {
+        for node_or_token in node.children_with_tokens() {
+            self.render_node_or_token(&node_or_token);
+        }
+    }
+
+    fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) {
+        match node_or_token {
+            SyntaxElement::Token(token) => {
+                self.render_token(&token);
+            }
+            SyntaxElement::Node(child_node) => {
+                self.render_node(&child_node);
+            }
+        }
+    }
+
+    fn render_node(&mut self, node: &SyntaxNode) {
+        use syntax::ast::AstNode;
+        if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) {
+            self.out.push_str(&mod_path.to_string());
+            // Emit everything except for the segment's name-ref, since we already effectively
+            // emitted that as part of `mod_path`.
+            if let Some(path) = ast::Path::cast(node.clone()) {
+                if let Some(segment) = path.segment() {
+                    for node_or_token in segment.syntax().children_with_tokens() {
+                        if node_or_token.kind() != SyntaxKind::NAME_REF {
+                            self.render_node_or_token(&node_or_token);
+                        }
+                    }
+                }
+            }
+        } else {
+            self.render_node_children(&node);
+        }
+    }
+
+    fn render_token(&mut self, token: &SyntaxToken) {
+        if let Some(placeholder) = self.rule.get_placeholder(&token) {
+            if let Some(placeholder_value) =
+                self.match_info.placeholder_values.get(&Var(placeholder.ident.to_string()))
+            {
+                let range = &placeholder_value.range.range;
+                let mut matched_text =
+                    self.file_src[usize::from(range.start())..usize::from(range.end())].to_owned();
+                let edit = matches_to_edit_at_offset(
+                    &placeholder_value.inner_matches,
+                    self.file_src,
+                    range.start(),
+                    self.rules,
+                );
+                let needs_parenthesis =
+                    self.placeholder_tokens_requiring_parenthesis.contains(token);
+                edit.apply(&mut matched_text);
+                if needs_parenthesis {
+                    self.out.push('(');
+                }
+                self.placeholder_tokens_by_range.insert(
+                    TextRange::new(
+                        TextSize::of(&self.out),
+                        TextSize::of(&self.out) + TextSize::of(&matched_text),
+                    ),
+                    token.clone(),
+                );
+                self.out.push_str(&matched_text);
+                if needs_parenthesis {
+                    self.out.push(')');
+                }
+            } else {
+                // We validated that all placeholder references were valid before we
+                // started, so this shouldn't happen.
+                panic!(
+                    "Internal error: replacement referenced unknown placeholder {}",
+                    placeholder.ident
+                );
+            }
+        } else {
+            self.out.push_str(token.text().as_str());
+        }
+    }
+
+    // Checks if the resulting code, when parsed doesn't split any placeholders due to different
+    // order of operations between the search pattern and the replacement template. If any do, then
+    // we rerender the template and wrap the problematic placeholders with parenthesis.
+    fn maybe_rerender_with_extra_parenthesis(&mut self, template: &SyntaxNode) {
+        if let Some(node) = parse_as_kind(&self.out, template.kind()) {
+            self.remove_node_ranges(node);
+            if self.placeholder_tokens_by_range.is_empty() {
+                return;
+            }
+            self.placeholder_tokens_requiring_parenthesis =
+                self.placeholder_tokens_by_range.values().cloned().collect();
+            self.out.clear();
+            self.render_node(template);
+        }
+    }
+
+    fn remove_node_ranges(&mut self, node: SyntaxNode) {
+        self.placeholder_tokens_by_range.remove(&node.text_range());
+        for child in node.children() {
+            self.remove_node_ranges(child);
+        }
+    }
+}
+
+fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option<SyntaxNode> {
+    use syntax::ast::AstNode;
+    if ast::Expr::can_cast(kind) {
+        if let Ok(expr) = ast::Expr::parse(code) {
+            return Some(expr.syntax().clone());
+        }
+    } else if ast::Item::can_cast(kind) {
+        if let Ok(item) = ast::Item::parse(code) {
+            return Some(item.syntax().clone());
+        }
+    }
+    None
+}
diff --git a/crates/ssr/src/resolving.rs b/crates/ssr/src/resolving.rs
new file mode 100644 (file)
index 0000000..020fd79
--- /dev/null
@@ -0,0 +1,299 @@
+//! This module is responsible for resolving paths within rules.
+
+use crate::errors::error;
+use crate::{parsing, SsrError};
+use base_db::FilePosition;
+use parsing::Placeholder;
+use rustc_hash::FxHashMap;
+use syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken};
+use test_utils::mark;
+
+pub(crate) struct ResolutionScope<'db> {
+    scope: hir::SemanticsScope<'db>,
+    hygiene: hir::Hygiene,
+    node: SyntaxNode,
+}
+
+pub(crate) struct ResolvedRule {
+    pub(crate) pattern: ResolvedPattern,
+    pub(crate) template: Option<ResolvedPattern>,
+    pub(crate) index: usize,
+}
+
+pub(crate) struct ResolvedPattern {
+    pub(crate) placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
+    pub(crate) node: SyntaxNode,
+    // Paths in `node` that we've resolved.
+    pub(crate) resolved_paths: FxHashMap<SyntaxNode, ResolvedPath>,
+    pub(crate) ufcs_function_calls: FxHashMap<SyntaxNode, UfcsCallInfo>,
+    pub(crate) contains_self: bool,
+}
+
+pub(crate) struct ResolvedPath {
+    pub(crate) resolution: hir::PathResolution,
+    /// The depth of the ast::Path that was resolved within the pattern.
+    pub(crate) depth: u32,
+}
+
+pub(crate) struct UfcsCallInfo {
+    pub(crate) call_expr: ast::CallExpr,
+    pub(crate) function: hir::Function,
+    pub(crate) qualifier_type: Option<hir::Type>,
+}
+
+impl ResolvedRule {
+    pub(crate) fn new(
+        rule: parsing::ParsedRule,
+        resolution_scope: &ResolutionScope,
+        index: usize,
+    ) -> Result<ResolvedRule, SsrError> {
+        let resolver =
+            Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in };
+        let resolved_template = if let Some(template) = rule.template {
+            Some(resolver.resolve_pattern_tree(template)?)
+        } else {
+            None
+        };
+        Ok(ResolvedRule {
+            pattern: resolver.resolve_pattern_tree(rule.pattern)?,
+            template: resolved_template,
+            index,
+        })
+    }
+
+    pub(crate) fn get_placeholder(&self, token: &SyntaxToken) -> Option<&Placeholder> {
+        if token.kind() != SyntaxKind::IDENT {
+            return None;
+        }
+        self.pattern.placeholders_by_stand_in.get(token.text())
+    }
+}
+
+struct Resolver<'a, 'db> {
+    resolution_scope: &'a ResolutionScope<'db>,
+    placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>,
+}
+
+impl Resolver<'_, '_> {
+    fn resolve_pattern_tree(&self, pattern: SyntaxNode) -> Result<ResolvedPattern, SsrError> {
+        use syntax::ast::AstNode;
+        use syntax::{SyntaxElement, T};
+        let mut resolved_paths = FxHashMap::default();
+        self.resolve(pattern.clone(), 0, &mut resolved_paths)?;
+        let ufcs_function_calls = resolved_paths
+            .iter()
+            .filter_map(|(path_node, resolved)| {
+                if let Some(grandparent) = path_node.parent().and_then(|parent| parent.parent()) {
+                    if let Some(call_expr) = ast::CallExpr::cast(grandparent.clone()) {
+                        if let hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) =
+                            resolved.resolution
+                        {
+                            let qualifier_type = self.resolution_scope.qualifier_type(path_node);
+                            return Some((
+                                grandparent,
+                                UfcsCallInfo { call_expr, function, qualifier_type },
+                            ));
+                        }
+                    }
+                }
+                None
+            })
+            .collect();
+        let contains_self =
+            pattern.descendants_with_tokens().any(|node_or_token| match node_or_token {
+                SyntaxElement::Token(t) => t.kind() == T![self],
+                _ => false,
+            });
+        Ok(ResolvedPattern {
+            node: pattern,
+            resolved_paths,
+            placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
+            ufcs_function_calls,
+            contains_self,
+        })
+    }
+
+    fn resolve(
+        &self,
+        node: SyntaxNode,
+        depth: u32,
+        resolved_paths: &mut FxHashMap<SyntaxNode, ResolvedPath>,
+    ) -> Result<(), SsrError> {
+        use syntax::ast::AstNode;
+        if let Some(path) = ast::Path::cast(node.clone()) {
+            if is_self(&path) {
+                // Self cannot be resolved like other paths.
+                return Ok(());
+            }
+            // Check if this is an appropriate place in the path to resolve. If the path is
+            // something like `a::B::<i32>::c` then we want to resolve `a::B`. If the path contains
+            // a placeholder. e.g. `a::$b::c` then we want to resolve `a`.
+            if !path_contains_type_arguments(path.qualifier())
+                && !self.path_contains_placeholder(&path)
+            {
+                let resolution = self
+                    .resolution_scope
+                    .resolve_path(&path)
+                    .ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?;
+                if self.ok_to_use_path_resolution(&resolution) {
+                    resolved_paths.insert(node, ResolvedPath { resolution, depth });
+                    return Ok(());
+                }
+            }
+        }
+        for node in node.children() {
+            self.resolve(node, depth + 1, resolved_paths)?;
+        }
+        Ok(())
+    }
+
+    /// Returns whether `path` contains a placeholder, but ignores any placeholders within type
+    /// arguments.
+    fn path_contains_placeholder(&self, path: &ast::Path) -> bool {
+        if let Some(segment) = path.segment() {
+            if let Some(name_ref) = segment.name_ref() {
+                if self.placeholders_by_stand_in.contains_key(name_ref.text()) {
+                    return true;
+                }
+            }
+        }
+        if let Some(qualifier) = path.qualifier() {
+            return self.path_contains_placeholder(&qualifier);
+        }
+        false
+    }
+
+    fn ok_to_use_path_resolution(&self, resolution: &hir::PathResolution) -> bool {
+        match resolution {
+            hir::PathResolution::AssocItem(hir::AssocItem::Function(function)) => {
+                if function.has_self_param(self.resolution_scope.scope.db) {
+                    // If we don't use this path resolution, then we won't be able to match method
+                    // calls. e.g. `Foo::bar($s)` should match `x.bar()`.
+                    true
+                } else {
+                    mark::hit!(replace_associated_trait_default_function_call);
+                    false
+                }
+            }
+            hir::PathResolution::AssocItem(_) => {
+                // Not a function. Could be a constant or an associated type.
+                mark::hit!(replace_associated_trait_constant);
+                false
+            }
+            _ => true,
+        }
+    }
+}
+
+impl<'db> ResolutionScope<'db> {
+    pub(crate) fn new(
+        sema: &hir::Semantics<'db, ide_db::RootDatabase>,
+        resolve_context: FilePosition,
+    ) -> ResolutionScope<'db> {
+        use syntax::ast::AstNode;
+        let file = sema.parse(resolve_context.file_id);
+        // Find a node at the requested position, falling back to the whole file.
+        let node = file
+            .syntax()
+            .token_at_offset(resolve_context.offset)
+            .left_biased()
+            .map(|token| token.parent())
+            .unwrap_or_else(|| file.syntax().clone());
+        let node = pick_node_for_resolution(node);
+        let scope = sema.scope(&node);
+        ResolutionScope {
+            scope,
+            hygiene: hir::Hygiene::new(sema.db, resolve_context.file_id.into()),
+            node,
+        }
+    }
+
+    /// Returns the function in which SSR was invoked, if any.
+    pub(crate) fn current_function(&self) -> Option<SyntaxNode> {
+        self.node.ancestors().find(|node| node.kind() == SyntaxKind::FN).map(|node| node.clone())
+    }
+
+    fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> {
+        let hir_path = hir::Path::from_src(path.clone(), &self.hygiene)?;
+        // First try resolving the whole path. This will work for things like
+        // `std::collections::HashMap`, but will fail for things like
+        // `std::collections::HashMap::new`.
+        if let Some(resolution) = self.scope.resolve_hir_path(&hir_path) {
+            return Some(resolution);
+        }
+        // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if
+        // that succeeds, then iterate through the candidates on the resolved type with the provided
+        // name.
+        let resolved_qualifier = self.scope.resolve_hir_path_qualifier(&hir_path.qualifier()?)?;
+        if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
+            adt.ty(self.scope.db).iterate_path_candidates(
+                self.scope.db,
+                self.scope.module()?.krate(),
+                &self.scope.traits_in_scope(),
+                Some(hir_path.segments().last()?.name),
+                |_ty, assoc_item| Some(hir::PathResolution::AssocItem(assoc_item)),
+            )
+        } else {
+            None
+        }
+    }
+
+    fn qualifier_type(&self, path: &SyntaxNode) -> Option<hir::Type> {
+        use syntax::ast::AstNode;
+        if let Some(path) = ast::Path::cast(path.clone()) {
+            if let Some(qualifier) = path.qualifier() {
+                if let Some(resolved_qualifier) = self.resolve_path(&qualifier) {
+                    if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
+                        return Some(adt.ty(self.scope.db));
+                    }
+                }
+            }
+        }
+        None
+    }
+}
+
+fn is_self(path: &ast::Path) -> bool {
+    path.segment().map(|segment| segment.self_token().is_some()).unwrap_or(false)
+}
+
+/// Returns a suitable node for resolving paths in the current scope. If we create a scope based on
+/// a statement node, then we can't resolve local variables that were defined in the current scope
+/// (only in parent scopes). So we find another node, ideally a child of the statement where local
+/// variable resolution is permitted.
+fn pick_node_for_resolution(node: SyntaxNode) -> SyntaxNode {
+    match node.kind() {
+        SyntaxKind::EXPR_STMT => {
+            if let Some(n) = node.first_child() {
+                mark::hit!(cursor_after_semicolon);
+                return n;
+            }
+        }
+        SyntaxKind::LET_STMT | SyntaxKind::IDENT_PAT => {
+            if let Some(next) = node.next_sibling() {
+                return pick_node_for_resolution(next);
+            }
+        }
+        SyntaxKind::NAME => {
+            if let Some(parent) = node.parent() {
+                return pick_node_for_resolution(parent);
+            }
+        }
+        _ => {}
+    }
+    node
+}
+
+/// Returns whether `path` or any of its qualifiers contains type arguments.
+fn path_contains_type_arguments(path: Option<ast::Path>) -> bool {
+    if let Some(path) = path {
+        if let Some(segment) = path.segment() {
+            if segment.generic_arg_list().is_some() {
+                mark::hit!(type_arguments_within_path);
+                return true;
+            }
+        }
+        return path_contains_type_arguments(path.qualifier());
+    }
+    false
+}
diff --git a/crates/ssr/src/search.rs b/crates/ssr/src/search.rs
new file mode 100644 (file)
index 0000000..8509cfa
--- /dev/null
@@ -0,0 +1,282 @@
+//! Searching for matches.
+
+use crate::{
+    matching,
+    resolving::{ResolvedPath, ResolvedPattern, ResolvedRule},
+    Match, MatchFinder,
+};
+use base_db::{FileId, FileRange};
+use ide_db::{
+    defs::Definition,
+    search::{Reference, SearchScope},
+};
+use rustc_hash::FxHashSet;
+use syntax::{ast, AstNode, SyntaxKind, SyntaxNode};
+use test_utils::mark;
+
+/// A cache for the results of find_usages. This is for when we have multiple patterns that have the
+/// same path. e.g. if the pattern was `foo::Bar` that can parse as a path, an expression, a type
+/// and as a pattern. In each, the usages of `foo::Bar` are the same and we'd like to avoid finding
+/// them more than once.
+#[derive(Default)]
+pub(crate) struct UsageCache {
+    usages: Vec<(Definition, Vec<Reference>)>,
+}
+
+impl<'db> MatchFinder<'db> {
+    /// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make
+    /// replacement impossible, so further processing is required in order to properly nest matches
+    /// and remove overlapping matches. This is done in the `nesting` module.
+    pub(crate) fn find_matches_for_rule(
+        &self,
+        rule: &ResolvedRule,
+        usage_cache: &mut UsageCache,
+        matches_out: &mut Vec<Match>,
+    ) {
+        if rule.pattern.contains_self {
+            // If the pattern contains `self` we restrict the scope of the search to just the
+            // current method. No other method can reference the same `self`. This makes the
+            // behavior of `self` consistent with other variables.
+            if let Some(current_function) = self.resolution_scope.current_function() {
+                self.slow_scan_node(&current_function, rule, &None, matches_out);
+            }
+            return;
+        }
+        if pick_path_for_usages(&rule.pattern).is_none() {
+            self.slow_scan(rule, matches_out);
+            return;
+        }
+        self.find_matches_for_pattern_tree(rule, &rule.pattern, usage_cache, matches_out);
+    }
+
+    fn find_matches_for_pattern_tree(
+        &self,
+        rule: &ResolvedRule,
+        pattern: &ResolvedPattern,
+        usage_cache: &mut UsageCache,
+        matches_out: &mut Vec<Match>,
+    ) {
+        if let Some(resolved_path) = pick_path_for_usages(pattern) {
+            let definition: Definition = resolved_path.resolution.clone().into();
+            for reference in self.find_usages(usage_cache, definition) {
+                if let Some(node_to_match) = self.find_node_to_match(resolved_path, reference) {
+                    if !is_search_permitted_ancestors(&node_to_match) {
+                        mark::hit!(use_declaration_with_braces);
+                        continue;
+                    }
+                    self.try_add_match(rule, &node_to_match, &None, matches_out);
+                }
+            }
+        }
+    }
+
+    fn find_node_to_match(
+        &self,
+        resolved_path: &ResolvedPath,
+        reference: &Reference,
+    ) -> Option<SyntaxNode> {
+        let file = self.sema.parse(reference.file_range.file_id);
+        let depth = resolved_path.depth as usize;
+        let offset = reference.file_range.range.start();
+        if let Some(path) =
+            self.sema.find_node_at_offset_with_descend::<ast::Path>(file.syntax(), offset)
+        {
+            self.sema.ancestors_with_macros(path.syntax().clone()).skip(depth).next()
+        } else if let Some(path) =
+            self.sema.find_node_at_offset_with_descend::<ast::MethodCallExpr>(file.syntax(), offset)
+        {
+            // If the pattern contained a path and we found a reference to that path that wasn't
+            // itself a path, but was a method call, then we need to adjust how far up to try
+            // matching by how deep the path was within a CallExpr. The structure would have been
+            // CallExpr, PathExpr, Path - i.e. a depth offset of 2. We don't need to check if the
+            // path was part of a CallExpr because if it wasn't then all that will happen is we'll
+            // fail to match, which is the desired behavior.
+            const PATH_DEPTH_IN_CALL_EXPR: usize = 2;
+            if depth < PATH_DEPTH_IN_CALL_EXPR {
+                return None;
+            }
+            self.sema
+                .ancestors_with_macros(path.syntax().clone())
+                .skip(depth - PATH_DEPTH_IN_CALL_EXPR)
+                .next()
+        } else {
+            None
+        }
+    }
+
+    fn find_usages<'a>(
+        &self,
+        usage_cache: &'a mut UsageCache,
+        definition: Definition,
+    ) -> &'a [Reference] {
+        // Logically if a lookup succeeds we should just return it. Unfortunately returning it would
+        // extend the lifetime of the borrow, then we wouldn't be able to do the insertion on a
+        // cache miss. This is a limitation of NLL and is fixed with Polonius. For now we do two
+        // lookups in the case of a cache hit.
+        if usage_cache.find(&definition).is_none() {
+            let usages = definition.find_usages(&self.sema, Some(self.search_scope()));
+            usage_cache.usages.push((definition, usages));
+            return &usage_cache.usages.last().unwrap().1;
+        }
+        usage_cache.find(&definition).unwrap()
+    }
+
+    /// Returns the scope within which we want to search. We don't want un unrestricted search
+    /// scope, since we don't want to find references in external dependencies.
+    fn search_scope(&self) -> SearchScope {
+        // FIXME: We should ideally have a test that checks that we edit local roots and not library
+        // roots. This probably would require some changes to fixtures, since currently everything
+        // seems to get put into a single source root.
+        let mut files = Vec::new();
+        self.search_files_do(|file_id| {
+            files.push(file_id);
+        });
+        SearchScope::files(&files)
+    }
+
+    fn slow_scan(&self, rule: &ResolvedRule, matches_out: &mut Vec<Match>) {
+        self.search_files_do(|file_id| {
+            let file = self.sema.parse(file_id);
+            let code = file.syntax();
+            self.slow_scan_node(code, rule, &None, matches_out);
+        })
+    }
+
+    fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
+        if self.restrict_ranges.is_empty() {
+            // Unrestricted search.
+            use base_db::SourceDatabaseExt;
+            use ide_db::symbol_index::SymbolsDatabase;
+            for &root in self.sema.db.local_roots().iter() {
+                let sr = self.sema.db.source_root(root);
+                for file_id in sr.iter() {
+                    callback(file_id);
+                }
+            }
+        } else {
+            // Search is restricted, deduplicate file IDs (generally only one).
+            let mut files = FxHashSet::default();
+            for range in &self.restrict_ranges {
+                if files.insert(range.file_id) {
+                    callback(range.file_id);
+                }
+            }
+        }
+    }
+
+    fn slow_scan_node(
+        &self,
+        code: &SyntaxNode,
+        rule: &ResolvedRule,
+        restrict_range: &Option<FileRange>,
+        matches_out: &mut Vec<Match>,
+    ) {
+        if !is_search_permitted(code) {
+            return;
+        }
+        self.try_add_match(rule, &code, restrict_range, matches_out);
+        // If we've got a macro call, we already tried matching it pre-expansion, which is the only
+        // way to match the whole macro, now try expanding it and matching the expansion.
+        if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
+            if let Some(expanded) = self.sema.expand(&macro_call) {
+                if let Some(tt) = macro_call.token_tree() {
+                    // When matching within a macro expansion, we only want to allow matches of
+                    // nodes that originated entirely from within the token tree of the macro call.
+                    // i.e. we don't want to match something that came from the macro itself.
+                    self.slow_scan_node(
+                        &expanded,
+                        rule,
+                        &Some(self.sema.original_range(tt.syntax())),
+                        matches_out,
+                    );
+                }
+            }
+        }
+        for child in code.children() {
+            self.slow_scan_node(&child, rule, restrict_range, matches_out);
+        }
+    }
+
+    fn try_add_match(
+        &self,
+        rule: &ResolvedRule,
+        code: &SyntaxNode,
+        restrict_range: &Option<FileRange>,
+        matches_out: &mut Vec<Match>,
+    ) {
+        if !self.within_range_restrictions(code) {
+            mark::hit!(replace_nonpath_within_selection);
+            return;
+        }
+        if let Ok(m) = matching::get_match(false, rule, code, restrict_range, &self.sema) {
+            matches_out.push(m);
+        }
+    }
+
+    /// Returns whether `code` is within one of our range restrictions if we have any. No range
+    /// restrictions is considered unrestricted and always returns true.
+    fn within_range_restrictions(&self, code: &SyntaxNode) -> bool {
+        if self.restrict_ranges.is_empty() {
+            // There is no range restriction.
+            return true;
+        }
+        let node_range = self.sema.original_range(code);
+        for range in &self.restrict_ranges {
+            if range.file_id == node_range.file_id && range.range.contains_range(node_range.range) {
+                return true;
+            }
+        }
+        false
+    }
+}
+
+/// Returns whether we support matching within `node` and all of its ancestors.
+fn is_search_permitted_ancestors(node: &SyntaxNode) -> bool {
+    if let Some(parent) = node.parent() {
+        if !is_search_permitted_ancestors(&parent) {
+            return false;
+        }
+    }
+    is_search_permitted(node)
+}
+
+/// Returns whether we support matching within this kind of node.
+fn is_search_permitted(node: &SyntaxNode) -> bool {
+    // FIXME: Properly handle use declarations. At the moment, if our search pattern is `foo::bar`
+    // and the code is `use foo::{baz, bar}`, we'll match `bar`, since it resolves to `foo::bar`.
+    // However we'll then replace just the part we matched `bar`. We probably need to instead remove
+    // `bar` and insert a new use declaration.
+    node.kind() != SyntaxKind::USE
+}
+
+impl UsageCache {
+    fn find(&mut self, definition: &Definition) -> Option<&[Reference]> {
+        // We expect a very small number of cache entries (generally 1), so a linear scan should be
+        // fast enough and avoids the need to implement Hash for Definition.
+        for (d, refs) in &self.usages {
+            if d == definition {
+                return Some(refs);
+            }
+        }
+        None
+    }
+}
+
+/// Returns a path that's suitable for path resolution. We exclude builtin types, since they aren't
+/// something that we can find references to. We then somewhat arbitrarily pick the path that is the
+/// longest as this is hopefully more likely to be less common, making it faster to find.
+fn pick_path_for_usages(pattern: &ResolvedPattern) -> Option<&ResolvedPath> {
+    // FIXME: Take the scope of the resolved path into account. e.g. if there are any paths that are
+    // private to the current module, then we definitely would want to pick them over say a path
+    // from std. Possibly we should go further than this and intersect the search scopes for all
+    // resolved paths then search only in that scope.
+    pattern
+        .resolved_paths
+        .iter()
+        .filter(|(_, p)| {
+            !matches!(p.resolution, hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_)))
+        })
+        .map(|(node, resolved)| (node.text().len(), resolved))
+        .max_by(|(a, _), (b, _)| a.cmp(b))
+        .map(|(_, resolved)| resolved)
+}
diff --git a/crates/ssr/src/tests.rs b/crates/ssr/src/tests.rs
new file mode 100644 (file)
index 0000000..0d0a000
--- /dev/null
@@ -0,0 +1,1174 @@
+use crate::{MatchFinder, SsrRule};
+use base_db::{salsa::Durability, FileId, FilePosition, FileRange, SourceDatabaseExt};
+use expect::{expect, Expect};
+use rustc_hash::FxHashSet;
+use std::sync::Arc;
+use test_utils::{mark, RangeOrOffset};
+
+fn parse_error_text(query: &str) -> String {
+    format!("{}", query.parse::<SsrRule>().unwrap_err())
+}
+
+#[test]
+fn parser_empty_query() {
+    assert_eq!(parse_error_text(""), "Parse error: Cannot find delimiter `==>>`");
+}
+
+#[test]
+fn parser_no_delimiter() {
+    assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delimiter `==>>`");
+}
+
+#[test]
+fn parser_two_delimiters() {
+    assert_eq!(
+        parse_error_text("foo() ==>> a ==>> b "),
+        "Parse error: More than one delimiter found"
+    );
+}
+
+#[test]
+fn parser_repeated_name() {
+    assert_eq!(
+        parse_error_text("foo($a, $a) ==>>"),
+        "Parse error: Name `a` repeats more than once"
+    );
+}
+
+#[test]
+fn parser_invalid_pattern() {
+    assert_eq!(
+        parse_error_text(" ==>> ()"),
+        "Parse error: Not a valid Rust expression, type, item, path or pattern"
+    );
+}
+
+#[test]
+fn parser_invalid_template() {
+    assert_eq!(
+        parse_error_text("() ==>> )"),
+        "Parse error: Not a valid Rust expression, type, item, path or pattern"
+    );
+}
+
+#[test]
+fn parser_undefined_placeholder_in_replacement() {
+    assert_eq!(
+        parse_error_text("42 ==>> $a"),
+        "Parse error: Replacement contains undefined placeholders: $a"
+    );
+}
+
+/// `code` may optionally contain a cursor marker `<|>`. If it doesn't, then the position will be
+/// the start of the file. If there's a second cursor marker, then we'll return a single range.
+pub(crate) fn single_file(code: &str) -> (ide_db::RootDatabase, FilePosition, Vec<FileRange>) {
+    use base_db::fixture::WithFixture;
+    use ide_db::symbol_index::SymbolsDatabase;
+    let (mut db, file_id, range_or_offset) = if code.contains(test_utils::CURSOR_MARKER) {
+        ide_db::RootDatabase::with_range_or_offset(code)
+    } else {
+        let (db, file_id) = ide_db::RootDatabase::with_single_file(code);
+        (db, file_id, RangeOrOffset::Offset(0.into()))
+    };
+    let selections;
+    let position;
+    match range_or_offset {
+        RangeOrOffset::Range(range) => {
+            position = FilePosition { file_id, offset: range.start() };
+            selections = vec![FileRange { file_id, range: range }];
+        }
+        RangeOrOffset::Offset(offset) => {
+            position = FilePosition { file_id, offset };
+            selections = vec![];
+        }
+    }
+    let mut local_roots = FxHashSet::default();
+    local_roots.insert(base_db::fixture::WORKSPACE);
+    db.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
+    (db, position, selections)
+}
+
+fn assert_ssr_transform(rule: &str, input: &str, expected: Expect) {
+    assert_ssr_transforms(&[rule], input, expected);
+}
+
+fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
+    let (db, position, selections) = single_file(input);
+    let mut match_finder = MatchFinder::in_context(&db, position, selections);
+    for rule in rules {
+        let rule: SsrRule = rule.parse().unwrap();
+        match_finder.add_rule(rule).unwrap();
+    }
+    let edits = match_finder.edits();
+    if edits.is_empty() {
+        panic!("No edits were made");
+    }
+    assert_eq!(edits[0].file_id, position.file_id);
+    // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters
+    // stuff.
+    let mut actual = db.file_text(position.file_id).to_string();
+    edits[0].edit.apply(&mut actual);
+    expected.assert_eq(&actual);
+}
+
+fn print_match_debug_info(match_finder: &MatchFinder, file_id: FileId, snippet: &str) {
+    let debug_info = match_finder.debug_where_text_equal(file_id, snippet);
+    println!(
+        "Match debug info: {} nodes had text exactly equal to '{}'",
+        debug_info.len(),
+        snippet
+    );
+    for (index, d) in debug_info.iter().enumerate() {
+        println!("Node #{}\n{:#?}\n", index, d);
+    }
+}
+
+fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
+    let (db, position, selections) = single_file(code);
+    let mut match_finder = MatchFinder::in_context(&db, position, selections);
+    match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
+    let matched_strings: Vec<String> =
+        match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
+    if matched_strings != expected && !expected.is_empty() {
+        print_match_debug_info(&match_finder, position.file_id, &expected[0]);
+    }
+    assert_eq!(matched_strings, expected);
+}
+
+fn assert_no_match(pattern: &str, code: &str) {
+    let (db, position, selections) = single_file(code);
+    let mut match_finder = MatchFinder::in_context(&db, position, selections);
+    match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
+    let matches = match_finder.matches().flattened().matches;
+    if !matches.is_empty() {
+        print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text());
+        panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches);
+    }
+}
+
+fn assert_match_failure_reason(pattern: &str, code: &str, snippet: &str, expected_reason: &str) {
+    let (db, position, selections) = single_file(code);
+    let mut match_finder = MatchFinder::in_context(&db, position, selections);
+    match_finder.add_search_pattern(pattern.parse().unwrap()).unwrap();
+    let mut reasons = Vec::new();
+    for d in match_finder.debug_where_text_equal(position.file_id, snippet) {
+        if let Some(reason) = d.match_failure_reason() {
+            reasons.push(reason.to_owned());
+        }
+    }
+    assert_eq!(reasons, vec![expected_reason]);
+}
+
+#[test]
+fn ssr_function_to_method() {
+    assert_ssr_transform(
+        "my_function($a, $b) ==>> ($a).my_method($b)",
+        "fn my_function() {} fn main() { loop { my_function( other_func(x, y), z + w) } }",
+        expect![["fn my_function() {} fn main() { loop { (other_func(x, y)).my_method(z + w) } }"]],
+    )
+}
+
+#[test]
+fn ssr_nested_function() {
+    assert_ssr_transform(
+        "foo($a, $b, $c) ==>> bar($c, baz($a, $b))",
+        r#"
+            //- /lib.rs crate:foo
+            fn foo() {}
+            fn bar() {}
+            fn baz() {}
+            fn main { foo  (x + value.method(b), x+y-z, true && false) }
+            "#,
+        expect![[r#"
+            fn foo() {}
+            fn bar() {}
+            fn baz() {}
+            fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }
+        "#]],
+    )
+}
+
+#[test]
+fn ssr_expected_spacing() {
+    assert_ssr_transform(
+        "foo($x) + bar() ==>> bar($x)",
+        "fn foo() {} fn bar() {} fn main() { foo(5) + bar() }",
+        expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
+    );
+}
+
+#[test]
+fn ssr_with_extra_space() {
+    assert_ssr_transform(
+        "foo($x  ) +    bar() ==>> bar($x)",
+        "fn foo() {} fn bar() {} fn main() { foo(  5 )  +bar(   ) }",
+        expect![["fn foo() {} fn bar() {} fn main() { bar(5) }"]],
+    );
+}
+
+#[test]
+fn ssr_keeps_nested_comment() {
+    assert_ssr_transform(
+        "foo($x) ==>> bar($x)",
+        "fn foo() {} fn bar() {} fn main() { foo(other(5 /* using 5 */)) }",
+        expect![["fn foo() {} fn bar() {} fn main() { bar(other(5 /* using 5 */)) }"]],
+    )
+}
+
+#[test]
+fn ssr_keeps_comment() {
+    assert_ssr_transform(
+        "foo($x) ==>> bar($x)",
+        "fn foo() {} fn bar() {} fn main() { foo(5 /* using 5 */) }",
+        expect![["fn foo() {} fn bar() {} fn main() { bar(5)/* using 5 */ }"]],
+    )
+}
+
+#[test]
+fn ssr_struct_lit() {
+    assert_ssr_transform(
+        "Foo{a: $a, b: $b} ==>> Foo::new($a, $b)",
+        r#"
+            struct Foo() {}
+            impl Foo { fn new() {} }
+            fn main() { Foo{b:2, a:1} }
+            "#,
+        expect![[r#"
+            struct Foo() {}
+            impl Foo { fn new() {} }
+            fn main() { Foo::new(1, 2) }
+        "#]],
+    )
+}
+
+#[test]
+fn ignores_whitespace() {
+    assert_matches("1+2", "fn f() -> i32 {1  +  2}", &["1  +  2"]);
+    assert_matches("1 + 2", "fn f() -> i32 {1+2}", &["1+2"]);
+}
+
+#[test]
+fn no_match() {
+    assert_no_match("1 + 3", "fn f() -> i32 {1  +  2}");
+}
+
+#[test]
+fn match_fn_definition() {
+    assert_matches("fn $a($b: $t) {$c}", "fn f(a: i32) {bar()}", &["fn f(a: i32) {bar()}"]);
+}
+
+#[test]
+fn match_struct_definition() {
+    let code = r#"
+        struct Option<T> {}
+        struct Bar {}
+        struct Foo {name: Option<String>}"#;
+    assert_matches("struct $n {$f: Option<String>}", code, &["struct Foo {name: Option<String>}"]);
+}
+
+#[test]
+fn match_expr() {
+    let code = r#"
+        fn foo() {}
+        fn f() -> i32 {foo(40 + 2, 42)}"#;
+    assert_matches("foo($a, $b)", code, &["foo(40 + 2, 42)"]);
+    assert_no_match("foo($a, $b, $c)", code);
+    assert_no_match("foo($a)", code);
+}
+
+#[test]
+fn match_nested_method_calls() {
+    assert_matches(
+        "$a.z().z().z()",
+        "fn f() {h().i().j().z().z().z().d().e()}",
+        &["h().i().j().z().z().z()"],
+    );
+}
+
+// Make sure that our node matching semantics don't differ within macro calls.
+#[test]
+fn match_nested_method_calls_with_macro_call() {
+    assert_matches(
+        "$a.z().z().z()",
+        r#"
+            macro_rules! m1 { ($a:expr) => {$a}; }
+            fn f() {m1!(h().i().j().z().z().z().d().e())}"#,
+        &["h().i().j().z().z().z()"],
+    );
+}
+
+#[test]
+fn match_complex_expr() {
+    let code = r#"
+        fn foo() {} fn bar() {}
+        fn f() -> i32 {foo(bar(40, 2), 42)}"#;
+    assert_matches("foo($a, $b)", code, &["foo(bar(40, 2), 42)"]);
+    assert_no_match("foo($a, $b, $c)", code);
+    assert_no_match("foo($a)", code);
+    assert_matches("bar($a, $b)", code, &["bar(40, 2)"]);
+}
+
+// Trailing commas in the code should be ignored.
+#[test]
+fn match_with_trailing_commas() {
+    // Code has comma, pattern doesn't.
+    assert_matches("foo($a, $b)", "fn foo() {} fn f() {foo(1, 2,);}", &["foo(1, 2,)"]);
+    assert_matches("Foo{$a, $b}", "struct Foo {} fn f() {Foo{1, 2,};}", &["Foo{1, 2,}"]);
+
+    // Pattern has comma, code doesn't.
+    assert_matches("foo($a, $b,)", "fn foo() {} fn f() {foo(1, 2);}", &["foo(1, 2)"]);
+    assert_matches("Foo{$a, $b,}", "struct Foo {} fn f() {Foo{1, 2};}", &["Foo{1, 2}"]);
+}
+
+#[test]
+fn match_type() {
+    assert_matches("i32", "fn f() -> i32 {1  +  2}", &["i32"]);
+    assert_matches(
+        "Option<$a>",
+        "struct Option<T> {} fn f() -> Option<i32> {42}",
+        &["Option<i32>"],
+    );
+    assert_no_match(
+        "Option<$a>",
+        "struct Option<T> {} struct Result<T, E> {} fn f() -> Result<i32, ()> {42}",
+    );
+}
+
+#[test]
+fn match_struct_instantiation() {
+    let code = r#"
+        struct Foo {bar: i32, baz: i32}
+        fn f() {Foo {bar: 1, baz: 2}}"#;
+    assert_matches("Foo {bar: 1, baz: 2}", code, &["Foo {bar: 1, baz: 2}"]);
+    // Now with placeholders for all parts of the struct.
+    assert_matches("Foo {$a: $b, $c: $d}", code, &["Foo {bar: 1, baz: 2}"]);
+    assert_matches("Foo {}", "struct Foo {} fn f() {Foo {}}", &["Foo {}"]);
+}
+
+#[test]
+fn match_path() {
+    let code = r#"
+        mod foo {
+            pub fn bar() {}
+        }
+        fn f() {foo::bar(42)}"#;
+    assert_matches("foo::bar", code, &["foo::bar"]);
+    assert_matches("$a::bar", code, &["foo::bar"]);
+    assert_matches("foo::$b", code, &["foo::bar"]);
+}
+
+#[test]
+fn match_pattern() {
+    assert_matches("Some($a)", "struct Some(); fn f() {if let Some(x) = foo() {}}", &["Some(x)"]);
+}
+
+// If our pattern has a full path, e.g. a::b::c() and the code has c(), but c resolves to
+// a::b::c, then we should match.
+#[test]
+fn match_fully_qualified_fn_path() {
+    let code = r#"
+        mod a {
+            pub mod b {
+                pub fn c(_: i32) {}
+            }
+        }
+        use a::b::c;
+        fn f1() {
+            c(42);
+        }
+        "#;
+    assert_matches("a::b::c($a)", code, &["c(42)"]);
+}
+
+#[test]
+fn match_resolved_type_name() {
+    let code = r#"
+        mod m1 {
+            pub mod m2 {
+                pub trait Foo<T> {}
+            }
+        }
+        mod m3 {
+            trait Foo<T> {}
+            fn f1(f: Option<&dyn Foo<bool>>) {}
+        }
+        mod m4 {
+            use crate::m1::m2::Foo;
+            fn f1(f: Option<&dyn Foo<i32>>) {}
+        }
+        "#;
+    assert_matches("m1::m2::Foo<$t>", code, &["Foo<i32>"]);
+}
+
+#[test]
+fn type_arguments_within_path() {
+    mark::check!(type_arguments_within_path);
+    let code = r#"
+        mod foo {
+            pub struct Bar<T> {t: T}
+            impl<T> Bar<T> {
+                pub fn baz() {}
+            }
+        }
+        fn f1() {foo::Bar::<i32>::baz();}
+        "#;
+    assert_no_match("foo::Bar::<i64>::baz()", code);
+    assert_matches("foo::Bar::<i32>::baz()", code, &["foo::Bar::<i32>::baz()"]);
+}
+
+#[test]
+fn literal_constraint() {
+    mark::check!(literal_constraint);
+    let code = r#"
+        enum Option<T> { Some(T), None }
+        use Option::Some;
+        fn f1() {
+            let x1 = Some(42);
+            let x2 = Some("foo");
+            let x3 = Some(x1);
+            let x4 = Some(40 + 2);
+            let x5 = Some(true);
+        }
+        "#;
+    assert_matches("Some(${a:kind(literal)})", code, &["Some(42)", "Some(\"foo\")", "Some(true)"]);
+    assert_matches("Some(${a:not(kind(literal))})", code, &["Some(x1)", "Some(40 + 2)"]);
+}
+
+#[test]
+fn match_reordered_struct_instantiation() {
+    assert_matches(
+        "Foo {aa: 1, b: 2, ccc: 3}",
+        "struct Foo {} fn f() {Foo {b: 2, ccc: 3, aa: 1}}",
+        &["Foo {b: 2, ccc: 3, aa: 1}"],
+    );
+    assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {b: 1}}");
+    assert_no_match("Foo {a: 1}", "struct Foo {} fn f() {Foo {a: 2}}");
+    assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {a: 1}}");
+    assert_no_match("Foo {a: 1, b: 2}", "struct Foo {} fn f() {Foo {b: 2}}");
+    assert_no_match("Foo {a: 1, }", "struct Foo {} fn f() {Foo {a: 1, b: 2}}");
+    assert_no_match("Foo {a: 1, z: 9}", "struct Foo {} fn f() {Foo {a: 1}}");
+}
+
+#[test]
+fn match_macro_invocation() {
+    assert_matches(
+        "foo!($a)",
+        "macro_rules! foo {() => {}} fn() {foo(foo!(foo()))}",
+        &["foo!(foo())"],
+    );
+    assert_matches(
+        "foo!(41, $a, 43)",
+        "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43)}",
+        &["foo!(41, 42, 43)"],
+    );
+    assert_no_match("foo!(50, $a, 43)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}");
+    assert_no_match("foo!(41, $a, 50)", "macro_rules! foo {() => {}} fn() {foo!(41, 42, 43}");
+    assert_matches(
+        "foo!($a())",
+        "macro_rules! foo {() => {}} fn() {foo!(bar())}",
+        &["foo!(bar())"],
+    );
+}
+
+// When matching within a macro expansion, we only allow matches of nodes that originated from
+// the macro call, not from the macro definition.
+#[test]
+fn no_match_expression_from_macro() {
+    assert_no_match(
+        "$a.clone()",
+        r#"
+            macro_rules! m1 {
+                () => {42.clone()}
+            }
+            fn f1() {m1!()}
+            "#,
+    );
+}
+
+// We definitely don't want to allow matching of an expression that part originates from the
+// macro call `42` and part from the macro definition `.clone()`.
+#[test]
+fn no_match_split_expression() {
+    assert_no_match(
+        "$a.clone()",
+        r#"
+            macro_rules! m1 {
+                ($x:expr) => {$x.clone()}
+            }
+            fn f1() {m1!(42)}
+            "#,
+    );
+}
+
+#[test]
+fn replace_function_call() {
+    // This test also makes sure that we ignore empty-ranges.
+    assert_ssr_transform(
+        "foo() ==>> bar()",
+        "fn foo() {<|><|>} fn bar() {} fn f1() {foo(); foo();}",
+        expect![["fn foo() {} fn bar() {} fn f1() {bar(); bar();}"]],
+    );
+}
+
+#[test]
+fn replace_function_call_with_placeholders() {
+    assert_ssr_transform(
+        "foo($a, $b) ==>> bar($b, $a)",
+        "fn foo() {} fn bar() {} fn f1() {foo(5, 42)}",
+        expect![["fn foo() {} fn bar() {} fn f1() {bar(42, 5)}"]],
+    );
+}
+
+#[test]
+fn replace_nested_function_calls() {
+    assert_ssr_transform(
+        "foo($a) ==>> bar($a)",
+        "fn foo() {} fn bar() {} fn f1() {foo(foo(42))}",
+        expect![["fn foo() {} fn bar() {} fn f1() {bar(bar(42))}"]],
+    );
+}
+
+#[test]
+fn replace_associated_function_call() {
+    assert_ssr_transform(
+        "Foo::new() ==>> Bar::new()",
+        r#"
+            struct Foo {}
+            impl Foo { fn new() {} }
+            struct Bar {}
+            impl Bar { fn new() {} }
+            fn f1() {Foo::new();}
+            "#,
+        expect![[r#"
+            struct Foo {}
+            impl Foo { fn new() {} }
+            struct Bar {}
+            impl Bar { fn new() {} }
+            fn f1() {Bar::new();}
+        "#]],
+    );
+}
+
+#[test]
+fn replace_associated_trait_default_function_call() {
+    mark::check!(replace_associated_trait_default_function_call);
+    assert_ssr_transform(
+        "Bar2::foo() ==>> Bar2::foo2()",
+        r#"
+            trait Foo { fn foo() {} }
+            pub struct Bar {}
+            impl Foo for Bar {}
+            pub struct Bar2 {}
+            impl Foo for Bar2 {}
+            impl Bar2 { fn foo2() {} }
+            fn main() {
+                Bar::foo();
+                Bar2::foo();
+            }
+        "#,
+        expect![[r#"
+            trait Foo { fn foo() {} }
+            pub struct Bar {}
+            impl Foo for Bar {}
+            pub struct Bar2 {}
+            impl Foo for Bar2 {}
+            impl Bar2 { fn foo2() {} }
+            fn main() {
+                Bar::foo();
+                Bar2::foo2();
+            }
+        "#]],
+    );
+}
+
+#[test]
+fn replace_associated_trait_constant() {
+    mark::check!(replace_associated_trait_constant);
+    assert_ssr_transform(
+        "Bar2::VALUE ==>> Bar2::VALUE_2222",
+        r#"
+            trait Foo { const VALUE: i32; const VALUE_2222: i32; }
+            pub struct Bar {}
+            impl Foo for Bar { const VALUE: i32 = 1;  const VALUE_2222: i32 = 2; }
+            pub struct Bar2 {}
+            impl Foo for Bar2 { const VALUE: i32 = 1;  const VALUE_2222: i32 = 2; }
+            impl Bar2 { fn foo2() {} }
+            fn main() {
+                Bar::VALUE;
+                Bar2::VALUE;
+            }
+            "#,
+        expect![[r#"
+            trait Foo { const VALUE: i32; const VALUE_2222: i32; }
+            pub struct Bar {}
+            impl Foo for Bar { const VALUE: i32 = 1;  const VALUE_2222: i32 = 2; }
+            pub struct Bar2 {}
+            impl Foo for Bar2 { const VALUE: i32 = 1;  const VALUE_2222: i32 = 2; }
+            impl Bar2 { fn foo2() {} }
+            fn main() {
+                Bar::VALUE;
+                Bar2::VALUE_2222;
+            }
+        "#]],
+    );
+}
+
+#[test]
+fn replace_path_in_different_contexts() {
+    // Note the <|> inside module a::b which marks the point where the rule is interpreted. We
+    // replace foo with bar, but both need different path qualifiers in different contexts. In f4,
+    // foo is unqualified because of a use statement, however the replacement needs to be fully
+    // qualified.
+    assert_ssr_transform(
+        "c::foo() ==>> c::bar()",
+        r#"
+            mod a {
+                pub mod b {<|>
+                    pub mod c {
+                        pub fn foo() {}
+                        pub fn bar() {}
+                        fn f1() { foo() }
+                    }
+                    fn f2() { c::foo() }
+                }
+                fn f3() { b::c::foo() }
+            }
+            use a::b::c::foo;
+            fn f4() { foo() }
+            "#,
+        expect![[r#"
+            mod a {
+                pub mod b {
+                    pub mod c {
+                        pub fn foo() {}
+                        pub fn bar() {}
+                        fn f1() { bar() }
+                    }
+                    fn f2() { c::bar() }
+                }
+                fn f3() { b::c::bar() }
+            }
+            use a::b::c::foo;
+            fn f4() { a::b::c::bar() }
+            "#]],
+    );
+}
+
+#[test]
+fn replace_associated_function_with_generics() {
+    assert_ssr_transform(
+        "c::Foo::<$a>::new() ==>> d::Bar::<$a>::default()",
+        r#"
+            mod c {
+                pub struct Foo<T> {v: T}
+                impl<T> Foo<T> { pub fn new() {} }
+                fn f1() {
+                    Foo::<i32>::new();
+                }
+            }
+            mod d {
+                pub struct Bar<T> {v: T}
+                impl<T> Bar<T> { pub fn default() {} }
+                fn f1() {
+                    super::c::Foo::<i32>::new();
+                }
+            }
+            "#,
+        expect![[r#"
+            mod c {
+                pub struct Foo<T> {v: T}
+                impl<T> Foo<T> { pub fn new() {} }
+                fn f1() {
+                    crate::d::Bar::<i32>::default();
+                }
+            }
+            mod d {
+                pub struct Bar<T> {v: T}
+                impl<T> Bar<T> { pub fn default() {} }
+                fn f1() {
+                    Bar::<i32>::default();
+                }
+            }
+            "#]],
+    );
+}
+
+#[test]
+fn replace_type() {
+    assert_ssr_transform(
+        "Result<(), $a> ==>> Option<$a>",
+        "struct Result<T, E> {} struct Option<T> {} fn f1() -> Result<(), Vec<Error>> {foo()}",
+        expect![[
+            "struct Result<T, E> {} struct Option<T> {} fn f1() -> Option<Vec<Error>> {foo()}"
+        ]],
+    );
+}
+
+#[test]
+fn replace_macro_invocations() {
+    assert_ssr_transform(
+        "try!($a) ==>> $a?",
+        "macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(try!(foo()));}",
+        expect![["macro_rules! try {() => {}} fn f1() -> Result<(), E> {bar(foo()?);}"]],
+    );
+    assert_ssr_transform(
+        "foo!($a($b)) ==>> foo($b, $a)",
+        "macro_rules! foo {() => {}} fn f1() {foo!(abc(def() + 2));}",
+        expect![["macro_rules! foo {() => {}} fn f1() {foo(def() + 2, abc);}"]],
+    );
+}
+
+#[test]
+fn replace_binary_op() {
+    assert_ssr_transform(
+        "$a + $b ==>> $b + $a",
+        "fn f() {2 * 3 + 4 * 5}",
+        expect![["fn f() {4 * 5 + 2 * 3}"]],
+    );
+    assert_ssr_transform(
+        "$a + $b ==>> $b + $a",
+        "fn f() {1 + 2 + 3 + 4}",
+        expect![[r#"fn f() {4 + (3 + (2 + 1))}"#]],
+    );
+}
+
+#[test]
+fn match_binary_op() {
+    assert_matches("$a + $b", "fn f() {1 + 2 + 3 + 4}", &["1 + 2", "1 + 2 + 3", "1 + 2 + 3 + 4"]);
+}
+
+#[test]
+fn multiple_rules() {
+    assert_ssr_transforms(
+        &["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"],
+        "fn add() {} fn add_one() {} fn f() -> i32 {3 + 2 + 1}",
+        expect![["fn add() {} fn add_one() {} fn f() -> i32 {add_one(add(3, 2))}"]],
+    )
+}
+
+#[test]
+fn multiple_rules_with_nested_matches() {
+    assert_ssr_transforms(
+        &["foo1($a) ==>> bar1($a)", "foo2($a) ==>> bar2($a)"],
+        r#"
+            fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
+            fn f() {foo1(foo2(foo1(foo2(foo1(42)))))}
+            "#,
+        expect![[r#"
+            fn foo1() {} fn foo2() {} fn bar1() {} fn bar2() {}
+            fn f() {bar1(bar2(bar1(bar2(bar1(42)))))}
+        "#]],
+    )
+}
+
+#[test]
+fn match_within_macro_invocation() {
+    let code = r#"
+            macro_rules! foo {
+                ($a:stmt; $b:expr) => {
+                    $b
+                };
+            }
+            struct A {}
+            impl A {
+                fn bar() {}
+            }
+            fn f1() {
+                let aaa = A {};
+                foo!(macro_ignores_this(); aaa.bar());
+            }
+        "#;
+    assert_matches("$a.bar()", code, &["aaa.bar()"]);
+}
+
+#[test]
+fn replace_within_macro_expansion() {
+    assert_ssr_transform(
+        "$a.foo() ==>> bar($a)",
+        r#"
+            macro_rules! macro1 {
+                ($a:expr) => {$a}
+            }
+            fn bar() {}
+            fn f() {macro1!(5.x().foo().o2())}
+            "#,
+        expect![[r#"
+            macro_rules! macro1 {
+                ($a:expr) => {$a}
+            }
+            fn bar() {}
+            fn f() {macro1!(bar(5.x()).o2())}
+            "#]],
+    )
+}
+
+#[test]
+fn replace_outside_and_within_macro_expansion() {
+    assert_ssr_transform(
+        "foo($a) ==>> bar($a)",
+        r#"
+            fn foo() {} fn bar() {}
+            macro_rules! macro1 {
+                ($a:expr) => {$a}
+            }
+            fn f() {foo(foo(macro1!(foo(foo(42)))))}
+            "#,
+        expect![[r#"
+            fn foo() {} fn bar() {}
+            macro_rules! macro1 {
+                ($a:expr) => {$a}
+            }
+            fn f() {bar(bar(macro1!(bar(bar(42)))))}
+        "#]],
+    )
+}
+
+#[test]
+fn preserves_whitespace_within_macro_expansion() {
+    assert_ssr_transform(
+        "$a + $b ==>> $b - $a",
+        r#"
+            macro_rules! macro1 {
+                ($a:expr) => {$a}
+            }
+            fn f() {macro1!(1   *   2 + 3 + 4}
+            "#,
+        expect![[r#"
+            macro_rules! macro1 {
+                ($a:expr) => {$a}
+            }
+            fn f() {macro1!(4 - (3 - 1   *   2)}
+            "#]],
+    )
+}
+
+#[test]
+fn add_parenthesis_when_necessary() {
+    assert_ssr_transform(
+        "foo($a) ==>> $a.to_string()",
+        r#"
+        fn foo(_: i32) {}
+        fn bar3(v: i32) {
+            foo(1 + 2);
+            foo(-v);
+        }
+        "#,
+        expect![[r#"
+            fn foo(_: i32) {}
+            fn bar3(v: i32) {
+                (1 + 2).to_string();
+                (-v).to_string();
+            }
+        "#]],
+    )
+}
+
+#[test]
+fn match_failure_reasons() {
+    let code = r#"
+        fn bar() {}
+        macro_rules! foo {
+            ($a:expr) => {
+                1 + $a + 2
+            };
+        }
+        fn f1() {
+            bar(1, 2);
+            foo!(5 + 43.to_string() + 5);
+        }
+        "#;
+    assert_match_failure_reason(
+        "bar($a, 3)",
+        code,
+        "bar(1, 2)",
+        r#"Pattern wanted token '3' (INT_NUMBER), but code had token '2' (INT_NUMBER)"#,
+    );
+    assert_match_failure_reason(
+        "42.to_string()",
+        code,
+        "43.to_string()",
+        r#"Pattern wanted token '42' (INT_NUMBER), but code had token '43' (INT_NUMBER)"#,
+    );
+}
+
+#[test]
+fn overlapping_possible_matches() {
+    // There are three possible matches here, however the middle one, `foo(foo(foo(42)))` shouldn't
+    // match because it overlaps with the outer match. The inner match is permitted since it's is
+    // contained entirely within the placeholder of the outer match.
+    assert_matches(
+        "foo(foo($a))",
+        "fn foo() {} fn main() {foo(foo(foo(foo(42))))}",
+        &["foo(foo(42))", "foo(foo(foo(foo(42))))"],
+    );
+}
+
+#[test]
+fn use_declaration_with_braces() {
+    // It would be OK for a path rule to match and alter a use declaration. We shouldn't mess it up
+    // though. In particular, we must not change `use foo::{baz, bar}` to `use foo::{baz,
+    // foo2::bar2}`.
+    mark::check!(use_declaration_with_braces);
+    assert_ssr_transform(
+        "foo::bar ==>> foo2::bar2",
+        r#"
+        mod foo { pub fn bar() {} pub fn baz() {} }
+        mod foo2 { pub fn bar2() {} }
+        use foo::{baz, bar};
+        fn main() { bar() }
+        "#,
+        expect![["
+        mod foo { pub fn bar() {} pub fn baz() {} }
+        mod foo2 { pub fn bar2() {} }
+        use foo::{baz, bar};
+        fn main() { foo2::bar2() }
+        "]],
+    )
+}
+
+#[test]
+fn ufcs_matches_method_call() {
+    let code = r#"
+    struct Foo {}
+    impl Foo {
+        fn new(_: i32) -> Foo { Foo {} }
+        fn do_stuff(&self, _: i32) {}
+    }
+    struct Bar {}
+    impl Bar {
+        fn new(_: i32) -> Bar { Bar {} }
+        fn do_stuff(&self, v: i32) {}
+    }
+    fn main() {
+        let b = Bar {};
+        let f = Foo {};
+        b.do_stuff(1);
+        f.do_stuff(2);
+        Foo::new(4).do_stuff(3);
+        // Too many / too few args - should never match
+        f.do_stuff(2, 10);
+        f.do_stuff();
+    }
+    "#;
+    assert_matches("Foo::do_stuff($a, $b)", code, &["f.do_stuff(2)", "Foo::new(4).do_stuff(3)"]);
+    // The arguments needs special handling in the case of a function call matching a method call
+    // and the first argument is different.
+    assert_matches("Foo::do_stuff($a, 2)", code, &["f.do_stuff(2)"]);
+    assert_matches("Foo::do_stuff(Foo::new(4), $b)", code, &["Foo::new(4).do_stuff(3)"]);
+
+    assert_ssr_transform(
+        "Foo::do_stuff(Foo::new($a), $b) ==>> Bar::new($b).do_stuff($a)",
+        code,
+        expect![[r#"
+            struct Foo {}
+            impl Foo {
+                fn new(_: i32) -> Foo { Foo {} }
+                fn do_stuff(&self, _: i32) {}
+            }
+            struct Bar {}
+            impl Bar {
+                fn new(_: i32) -> Bar { Bar {} }
+                fn do_stuff(&self, v: i32) {}
+            }
+            fn main() {
+                let b = Bar {};
+                let f = Foo {};
+                b.do_stuff(1);
+                f.do_stuff(2);
+                Bar::new(3).do_stuff(4);
+                // Too many / too few args - should never match
+                f.do_stuff(2, 10);
+                f.do_stuff();
+            }
+        "#]],
+    );
+}
+
+#[test]
+fn pattern_is_a_single_segment_path() {
+    mark::check!(pattern_is_a_single_segment_path);
+    // The first function should not be altered because the `foo` in scope at the cursor position is
+    // a different `foo`. This case is special because "foo" can be parsed as a pattern (IDENT_PAT ->
+    // NAME -> IDENT), which contains no path. If we're not careful we'll end up matching the `foo`
+    // in `let foo` from the first function. Whether we should match the `let foo` in the second
+    // function is less clear. At the moment, we don't. Doing so sounds like a rename operation,
+    // which isn't really what SSR is for, especially since the replacement `bar` must be able to be
+    // resolved, which means if we rename `foo` we'll get a name collision.
+    assert_ssr_transform(
+        "foo ==>> bar",
+        r#"
+        fn f1() -> i32 {
+            let foo = 1;
+            let bar = 2;
+            foo
+        }
+        fn f1() -> i32 {
+            let foo = 1;
+            let bar = 2;
+            foo<|>
+        }
+        "#,
+        expect![[r#"
+            fn f1() -> i32 {
+                let foo = 1;
+                let bar = 2;
+                foo
+            }
+            fn f1() -> i32 {
+                let foo = 1;
+                let bar = 2;
+                bar
+            }
+        "#]],
+    );
+}
+
+#[test]
+fn replace_local_variable_reference() {
+    // The pattern references a local variable `foo` in the block containing the cursor. We should
+    // only replace references to this variable `foo`, not other variables that just happen to have
+    // the same name.
+    mark::check!(cursor_after_semicolon);
+    assert_ssr_transform(
+        "foo + $a ==>> $a - foo",
+        r#"
+            fn bar1() -> i32 {
+                let mut res = 0;
+                let foo = 5;
+                res += foo + 1;
+                let foo = 10;
+                res += foo + 2;<|>
+                res += foo + 3;
+                let foo = 15;
+                res += foo + 4;
+                res
+            }
+            "#,
+        expect![[r#"
+            fn bar1() -> i32 {
+                let mut res = 0;
+                let foo = 5;
+                res += foo + 1;
+                let foo = 10;
+                res += 2 - foo;
+                res += 3 - foo;
+                let foo = 15;
+                res += foo + 4;
+                res
+            }
+        "#]],
+    )
+}
+
+#[test]
+fn replace_path_within_selection() {
+    assert_ssr_transform(
+        "foo ==>> bar",
+        r#"
+        fn main() {
+            let foo = 41;
+            let bar = 42;
+            do_stuff(foo);
+            do_stuff(foo);<|>
+            do_stuff(foo);
+            do_stuff(foo);<|>
+            do_stuff(foo);
+        }"#,
+        expect![[r#"
+            fn main() {
+                let foo = 41;
+                let bar = 42;
+                do_stuff(foo);
+                do_stuff(foo);
+                do_stuff(bar);
+                do_stuff(bar);
+                do_stuff(foo);
+            }"#]],
+    );
+}
+
+#[test]
+fn replace_nonpath_within_selection() {
+    mark::check!(replace_nonpath_within_selection);
+    assert_ssr_transform(
+        "$a + $b ==>> $b * $a",
+        r#"
+        fn main() {
+            let v = 1 + 2;<|>
+            let v2 = 3 + 3;
+            let v3 = 4 + 5;<|>
+            let v4 = 6 + 7;
+        }"#,
+        expect![[r#"
+            fn main() {
+                let v = 1 + 2;
+                let v2 = 3 * 3;
+                let v3 = 5 * 4;
+                let v4 = 6 + 7;
+            }"#]],
+    );
+}
+
+#[test]
+fn replace_self() {
+    // `foo(self)` occurs twice in the code, however only the first occurrence is the `self` that's
+    // in scope where the rule is invoked.
+    assert_ssr_transform(
+        "foo(self) ==>> bar(self)",
+        r#"
+        struct S1 {}
+        fn foo(_: &S1) {}
+        fn bar(_: &S1) {}
+        impl S1 {
+            fn f1(&self) {
+                foo(self)<|>
+            }
+            fn f2(&self) {
+                foo(self)
+            }
+        }
+        "#,
+        expect![[r#"
+            struct S1 {}
+            fn foo(_: &S1) {}
+            fn bar(_: &S1) {}
+            impl S1 {
+                fn f1(&self) {
+                    bar(self)
+                }
+                fn f2(&self) {
+                    foo(self)
+                }
+            }
+        "#]],
+    );
+}
+
+#[test]
+fn match_trait_method_call() {
+    // `Bar::foo` and `Bar2::foo` resolve to the same function. Make sure we only match if the type
+    // matches what's in the pattern. Also checks that we handle autoderef.
+    let code = r#"
+        pub struct Bar {}
+        pub struct Bar2 {}
+        pub trait Foo {
+            fn foo(&self, _: i32) {}
+        }
+        impl Foo for Bar {}
+        impl Foo for Bar2 {}
+        fn main() {
+            let v1 = Bar {};
+            let v2 = Bar2 {};
+            let v1_ref = &v1;
+            let v2_ref = &v2;
+            v1.foo(1);
+            v2.foo(2);
+            Bar::foo(&v1, 3);
+            Bar2::foo(&v2, 4);
+            v1_ref.foo(5);
+            v2_ref.foo(6);
+        }
+        "#;
+    assert_matches("Bar::foo($a, $b)", code, &["v1.foo(1)", "Bar::foo(&v1, 3)", "v1_ref.foo(5)"]);
+    assert_matches("Bar2::foo($a, $b)", code, &["v2.foo(2)", "Bar2::foo(&v2, 4)", "v2_ref.foo(6)"]);
+}