]> git.lizzy.rs Git - rust.git/commitdiff
clippy::redudant_borrow
authorMaan2003 <manmeetmann2003@gmail.com>
Sun, 13 Jun 2021 03:54:16 +0000 (09:24 +0530)
committerMaan2003 <manmeetmann2003@gmail.com>
Sun, 13 Jun 2021 03:54:16 +0000 (09:24 +0530)
114 files changed:
crates/base_db/src/fixture.rs
crates/hir/src/lib.rs
crates/hir/src/semantics.rs
crates/hir/src/source_analyzer.rs
crates/hir_def/src/body/lower.rs
crates/hir_def/src/body/scope.rs
crates/hir_def/src/generics.rs
crates/hir_def/src/item_tree/lower.rs
crates/hir_def/src/nameres/collector.rs
crates/hir_def/src/nameres/path_resolution.rs
crates/hir_def/src/path/lower.rs
crates/hir_def/src/resolver.rs
crates/hir_def/src/type_ref.rs
crates/hir_expand/src/builtin_macro.rs
crates/hir_expand/src/input.rs
crates/hir_expand/src/proc_macro.rs
crates/hir_ty/src/diagnostics/match_check/deconstruct_pat.rs
crates/hir_ty/src/diagnostics/match_check/usefulness.rs
crates/hir_ty/src/infer.rs
crates/hir_ty/src/infer/coerce.rs
crates/hir_ty/src/infer/expr.rs
crates/hir_ty/src/infer/pat.rs
crates/hir_ty/src/infer/path.rs
crates/hir_ty/src/interner.rs
crates/hir_ty/src/lower.rs
crates/hir_ty/src/method_resolution.rs
crates/ide/src/diagnostics.rs
crates/ide/src/diagnostics/fixes/create_field.rs
crates/ide/src/diagnostics/fixes/fill_missing_fields.rs
crates/ide/src/doc_links.rs
crates/ide/src/extend_selection.rs
crates/ide/src/goto_definition.rs
crates/ide/src/hover.rs
crates/ide/src/inlay_hints.rs
crates/ide/src/join_lines.rs
crates/ide/src/lib.rs
crates/ide/src/references.rs
crates/ide/src/references/rename.rs
crates/ide/src/runnables.rs
crates/ide/src/syntax_highlighting.rs
crates/ide/src/syntax_highlighting/highlight.rs
crates/ide/src/syntax_highlighting/html.rs
crates/ide/src/syntax_highlighting/inject.rs
crates/ide/src/typing/on_enter.rs
crates/ide_assists/src/handlers/convert_comment_block.rs
crates/ide_assists/src/handlers/early_return.rs
crates/ide_assists/src/handlers/extract_function.rs
crates/ide_assists/src/handlers/fill_match_arms.rs
crates/ide_assists/src/handlers/fix_visibility.rs
crates/ide_assists/src/handlers/generate_enum_is_method.rs
crates/ide_assists/src/handlers/generate_enum_projection_method.rs
crates/ide_assists/src/handlers/generate_function.rs
crates/ide_assists/src/handlers/generate_getter.rs
crates/ide_assists/src/handlers/generate_new.rs
crates/ide_assists/src/handlers/generate_setter.rs
crates/ide_assists/src/handlers/remove_dbg.rs
crates/ide_assists/src/handlers/reorder_fields.rs
crates/ide_assists/src/handlers/replace_derive_with_manual_impl.rs
crates/ide_assists/src/handlers/replace_if_let_with_match.rs
crates/ide_assists/src/handlers/wrap_return_type_in_result.rs
crates/ide_assists/src/tests.rs
crates/ide_assists/src/utils.rs
crates/ide_assists/src/utils/suggest_name.rs
crates/ide_completion/src/completions/dot.rs
crates/ide_completion/src/completions/postfix.rs
crates/ide_completion/src/completions/postfix/format_like.rs
crates/ide_completion/src/completions/qualified_path.rs
crates/ide_completion/src/context.rs
crates/ide_completion/src/patterns.rs
crates/ide_completion/src/render.rs
crates/ide_completion/src/render/pattern.rs
crates/ide_db/src/call_info.rs
crates/ide_db/src/helpers/import_assets.rs
crates/ide_db/src/helpers/merge_imports.rs
crates/ide_db/src/search.rs
crates/ide_ssr/src/matching.rs
crates/ide_ssr/src/replacing.rs
crates/ide_ssr/src/resolving.rs
crates/ide_ssr/src/search.rs
crates/ide_ssr/src/tests.rs
crates/mbe/src/expander/matcher.rs
crates/mbe/src/expander/transcriber.rs
crates/mbe/src/lib.rs
crates/mbe/src/parser.rs
crates/mbe/src/subtree_source.rs
crates/mbe/src/syntax_bridge.rs
crates/mbe/src/tests/expand.rs
crates/mbe/src/tt_iter.rs
crates/proc_macro_api/src/msg.rs
crates/project_model/src/build_data.rs
crates/project_model/src/cargo_workspace.rs
crates/project_model/src/sysroot.rs
crates/project_model/src/workspace.rs
crates/rust-analyzer/src/cargo_target_spec.rs
crates/rust-analyzer/src/cli/load_cargo.rs
crates/rust-analyzer/src/diagnostics.rs
crates/rust-analyzer/src/diagnostics/to_proto.rs
crates/rust-analyzer/src/global_state.rs
crates/rust-analyzer/src/handlers.rs
crates/rust-analyzer/src/lsp_utils.rs
crates/rust-analyzer/src/main_loop.rs
crates/rust-analyzer/src/to_proto.rs
crates/rust-analyzer/tests/slow-tests/main.rs
crates/rust-analyzer/tests/slow-tests/support.rs
crates/syntax/src/ast/edit.rs
crates/syntax/src/ast/token_ext.rs
crates/syntax/src/parsing.rs
crates/syntax/src/parsing/lexer.rs
crates/syntax/src/parsing/reparsing.rs
crates/syntax/src/tests.rs
crates/vfs/src/file_set.rs
xtask/src/codegen/gen_lint_completions.rs
xtask/src/codegen/gen_syntax.rs
xtask/src/metrics.rs

index 69ceba735617b9e0eaf93d6b3bd11e4d1f7150ae..da4afb5ebcf7fa7797a9da4faa42d7346f7ae603 100644 (file)
@@ -190,7 +190,7 @@ fn from(f: Fixture) -> FileMeta {
             edition: f
                 .edition
                 .as_ref()
-                .map_or(Edition::Edition2018, |v| Edition::from_str(&v).unwrap()),
+                .map_or(Edition::Edition2018, |v| Edition::from_str(v).unwrap()),
             env: f.env.into_iter().collect(),
             introduce_new_source_root: f.introduce_new_source_root,
         }
index 2468c0dc61a77c7485e1f70e4b4a546ffc7c301c..f6eb2326215ec4fc062f2d4b533b5532e0357a89 100644 (file)
@@ -1112,7 +1112,7 @@ pub fn diagnostics(
                                     .collect();
                                 sink.push(MissingFields {
                                     file: source_ptr.file_id,
-                                    field_list_parent: AstPtr::new(&record_expr),
+                                    field_list_parent: AstPtr::new(record_expr),
                                     field_list_parent_path: record_expr
                                         .path()
                                         .map(|path| AstPtr::new(&path)),
@@ -2531,13 +2531,13 @@ fn walk_type(db: &dyn HirDatabase, type_: &Type, cb: &mut impl FnMut(Type)) {
             match ty.kind(&Interner) {
                 TyKind::Adt(_, substs) => {
                     cb(type_.derived(ty.clone()));
-                    walk_substs(db, type_, &substs, cb);
+                    walk_substs(db, type_, substs, cb);
                 }
                 TyKind::AssociatedType(_, substs) => {
                     if let Some(_) = ty.associated_type_parent_trait(db) {
                         cb(type_.derived(ty.clone()));
                     }
-                    walk_substs(db, type_, &substs, cb);
+                    walk_substs(db, type_, substs, cb);
                 }
                 TyKind::OpaqueType(_, subst) => {
                     if let Some(bounds) = ty.impl_trait_bounds(db) {
@@ -2577,7 +2577,7 @@ fn walk_type(db: &dyn HirDatabase, type_: &Type, cb: &mut impl FnMut(Type)) {
                 TyKind::FnDef(_, substs)
                 | TyKind::Tuple(_, substs)
                 | TyKind::Closure(.., substs) => {
-                    walk_substs(db, type_, &substs, cb);
+                    walk_substs(db, type_, substs, cb);
                 }
                 TyKind::Function(hir_ty::FnPointer { substitution, .. }) => {
                     walk_substs(db, type_, &substitution.0, cb);
index d522d5245a9509325c1eb0915431429213ccc7d7..613266e0706f07684924aea9225e17d4ef8e8460 100644 (file)
@@ -192,7 +192,7 @@ pub fn find_node_at_offset_with_descend<N: AstNode>(
         node: &SyntaxNode,
         offset: TextSize,
     ) -> Option<N> {
-        if let Some(it) = find_node_at_offset(&node, offset) {
+        if let Some(it) = find_node_at_offset(node, offset) {
             return Some(it);
         }
 
@@ -744,7 +744,7 @@ fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool
                     return None;
                 }
 
-                let func = self.resolve_method_call(&method_call_expr).map(Function::from)?;
+                let func = self.resolve_method_call(method_call_expr).map(Function::from)?;
                 let res = match func.self_param(self.db)?.access(self.db) {
                     Access::Shared | Access::Exclusive => true,
                     Access::Owned => false,
index 37a0504150c9facc9e36396f68d3ae14f626b313..c9744d81d221ddfc03c02ccc36102b8e6fdf6b40 100644 (file)
@@ -222,7 +222,7 @@ pub(crate) fn resolve_bind_pat_to_const(
             Pat::Path(path) => path,
             _ => return None,
         };
-        let res = resolve_hir_path(db, &self.resolver, &path)?;
+        let res = resolve_hir_path(db, &self.resolver, path)?;
         match res {
             PathResolution::Def(def) => Some(def),
             _ => None,
@@ -329,7 +329,7 @@ pub(crate) fn record_literal_missing_fields(
 
         let (variant, missing_fields, _exhaustive) =
             record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
-        let res = self.missing_fields(db, krate, &substs, variant, missing_fields);
+        let res = self.missing_fields(db, krate, substs, variant, missing_fields);
         Some(res)
     }
 
@@ -347,7 +347,7 @@ pub(crate) fn record_pattern_missing_fields(
 
         let (variant, missing_fields, _exhaustive) =
             record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
-        let res = self.missing_fields(db, krate, &substs, variant, missing_fields);
+        let res = self.missing_fields(db, krate, substs, variant, missing_fields);
         Some(res)
     }
 
index da1fdac33aec5371dfb1d336cdb23278143fad7c..a8bd36a0a072be8301442e76d7ecc181e2b407e3 100644 (file)
@@ -1002,16 +1002,16 @@ fn from(ast_lit_kind: ast::LiteralKind) -> Self {
                 if let builtin @ Some(_) = lit.suffix().and_then(BuiltinFloat::from_suffix) {
                     return Literal::Float(Default::default(), builtin);
                 } else if let builtin @ Some(_) =
-                    lit.suffix().and_then(|it| BuiltinInt::from_suffix(&it))
+                    lit.suffix().and_then(|it| BuiltinInt::from_suffix(it))
                 {
                     Literal::Int(lit.value().unwrap_or(0) as i128, builtin)
                 } else {
-                    let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(&it));
+                    let builtin = lit.suffix().and_then(|it| BuiltinUint::from_suffix(it));
                     Literal::Uint(lit.value().unwrap_or(0), builtin)
                 }
             }
             LiteralKind::FloatNumber(lit) => {
-                let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(&it));
+                let ty = lit.suffix().and_then(|it| BuiltinFloat::from_suffix(it));
                 Literal::Float(Default::default(), ty)
             }
             LiteralKind::ByteString(bs) => {
index 6764de3a75e4b4105938b182bec80000def974d2..58a1fc81cba1a358531dd9270be0a0ceedca8608 100644 (file)
@@ -198,7 +198,7 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
         }
         Expr::Lambda { args, body: body_expr, .. } => {
             let scope = scopes.new_scope(scope);
-            scopes.add_params_bindings(body, scope, &args);
+            scopes.add_params_bindings(body, scope, args);
             compute_expr_scopes(*body_expr, body, scopes, scope);
         }
         Expr::Match { expr, arms } => {
index 44d22b918704c6fcbccae4b3506963601bdf165f..6933f6e3c334a6ee2cb7fcd8ea6fa0e2ac785528 100644 (file)
@@ -280,7 +280,7 @@ fn fill_params(
             sm.type_params.insert(param_id, Either::Right(type_param.clone()));
 
             let type_ref = TypeRef::Path(name.into());
-            self.fill_bounds(&lower_ctx, &type_param, Either::Left(type_ref));
+            self.fill_bounds(lower_ctx, &type_param, Either::Left(type_ref));
         }
         for lifetime_param in params.lifetime_params() {
             let name =
@@ -289,7 +289,7 @@ fn fill_params(
             let param_id = self.lifetimes.alloc(param);
             sm.lifetime_params.insert(param_id, lifetime_param.clone());
             let lifetime_ref = LifetimeRef::new_name(name);
-            self.fill_bounds(&lower_ctx, &lifetime_param, Either::Right(lifetime_ref));
+            self.fill_bounds(lower_ctx, &lifetime_param, Either::Right(lifetime_ref));
         }
         for const_param in params.const_params() {
             let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
index cfda7cb321cdb1a3afa21be139f5c840830b28a5..3f90bda742e24c9b0b95d5197d42459934e160a6 100644 (file)
@@ -823,7 +823,7 @@ fn is_intrinsic_fn_unsafe(name: &Name) -> bool {
         known::type_name,
         known::variant_count,
     ]
-    .contains(&name)
+    .contains(name)
 }
 
 fn lower_abi(abi: ast::Abi) -> Interned<str> {
@@ -855,7 +855,7 @@ fn lower_use_tree(&mut self, tree: ast::UseTree) -> Option<UseTree> {
                 // E.g. `use something::{inner}` (prefix is `None`, path is `something`)
                 // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
                 Some(path) => {
-                    match ModPath::from_src(self.db, path, &self.hygiene) {
+                    match ModPath::from_src(self.db, path, self.hygiene) {
                         Some(it) => Some(it),
                         None => return None, // FIXME: report errors somewhere
                     }
@@ -874,7 +874,7 @@ fn lower_use_tree(&mut self, tree: ast::UseTree) -> Option<UseTree> {
         } else {
             let is_glob = tree.star_token().is_some();
             let path = match tree.path() {
-                Some(path) => Some(ModPath::from_src(self.db, path, &self.hygiene)?),
+                Some(path) => Some(ModPath::from_src(self.db, path, self.hygiene)?),
                 None => None,
             };
             let alias = tree.rename().map(|a| {
index 93f30f23dd8ac10909968a623ace406df5dfb5c2..6fab58f159387a4b31429886bf3dfbcb96d7bacd 100644 (file)
@@ -500,7 +500,7 @@ fn inject_prelude(&mut self, crate_attrs: &Attrs) {
             let (per_ns, _) = self.def_map.resolve_path(
                 self.db,
                 self.def_map.root,
-                &path,
+                path,
                 BuiltinShadowMode::Other,
             );
 
@@ -722,7 +722,7 @@ fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialRe
         if import.is_extern_crate {
             let res = self.def_map.resolve_name_in_extern_prelude(
                 self.db,
-                &import
+                import
                     .path
                     .as_ident()
                     .expect("extern crate should have been desugared to one-element path"),
@@ -1351,7 +1351,7 @@ fn collect(&mut self, items: &[ModItem]) {
                     let imports = Import::from_use(
                         self.def_collector.db,
                         krate,
-                        &self.item_tree,
+                        self.item_tree,
                         ItemTreeId::new(self.file_id, import_id),
                     );
                     self.def_collector.unresolved_imports.extend(imports.into_iter().map(
@@ -1368,7 +1368,7 @@ fn collect(&mut self, items: &[ModItem]) {
                         import: Import::from_extern_crate(
                             self.def_collector.db,
                             krate,
-                            &self.item_tree,
+                            self.item_tree,
                             ItemTreeId::new(self.file_id, import_id),
                         ),
                         status: PartialResolvedImport::Unresolved,
@@ -1889,7 +1889,7 @@ fn collect_macro_call(&mut self, mac: &MacroCall) {
                     self.def_collector.def_map.with_ancestor_maps(
                         self.def_collector.db,
                         self.module_id,
-                        &mut |map, module| map[module].scope.get_legacy_macro(&name),
+                        &mut |map, module| map[module].scope.get_legacy_macro(name),
                     )
                 })
             },
@@ -1993,7 +1993,7 @@ fn do_collect_defs(db: &dyn DefDatabase, def_map: DefMap) -> DefMap {
     }
 
     fn do_resolve(code: &str) -> DefMap {
-        let (db, _file_id) = TestDB::with_single_file(&code);
+        let (db, _file_id) = TestDB::with_single_file(code);
         let krate = db.test_crate();
 
         let edition = db.crate_graph()[krate].edition;
index c984148c3a605e70cb309eac8c6ff45cd07c270b..629bc7952c23e6640bf0cbec5fcf7bd2c7518eba 100644 (file)
@@ -93,7 +93,7 @@ pub(crate) fn resolve_visibility(
         let mut vis = match visibility {
             RawVisibility::Module(path) => {
                 let (result, remaining) =
-                    self.resolve_path(db, original_module, &path, BuiltinShadowMode::Module);
+                    self.resolve_path(db, original_module, path, BuiltinShadowMode::Module);
                 if remaining.is_some() {
                     return None;
                 }
@@ -205,7 +205,7 @@ pub(super) fn resolve_path_fp_with_macro_single(
                     None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
                 };
                 log::debug!("resolving {:?} in crate root (+ extern prelude)", segment);
-                self.resolve_name_in_crate_root_or_extern_prelude(db, &segment)
+                self.resolve_name_in_crate_root_or_extern_prelude(db, segment)
             }
             PathKind::Plain => {
                 let (_, segment) = match segments.next() {
@@ -222,7 +222,7 @@ pub(super) fn resolve_path_fp_with_macro_single(
                     if path.segments().len() == 1 { shadow } else { BuiltinShadowMode::Module };
 
                 log::debug!("resolving {:?} in module", segment);
-                self.resolve_name_in_module(db, original_module, &segment, prefer_module)
+                self.resolve_name_in_module(db, original_module, segment, prefer_module)
             }
             PathKind::Super(lvl) => {
                 let mut module = original_module;
@@ -269,7 +269,7 @@ pub(super) fn resolve_path_fp_with_macro_single(
                     Some((_, segment)) => segment,
                     None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
                 };
-                if let Some(def) = self.extern_prelude.get(&segment) {
+                if let Some(def) = self.extern_prelude.get(segment) {
                     log::debug!("absolute path {:?} resolved to crate {:?}", path, def);
                     PerNs::types(*def, Visibility::Public)
                 } else {
@@ -319,13 +319,13 @@ pub(super) fn resolve_path_fp_with_macro_single(
                     };
 
                     // Since it is a qualified path here, it should not contains legacy macros
-                    module_data.scope.get(&segment)
+                    module_data.scope.get(segment)
                 }
                 ModuleDefId::AdtId(AdtId::EnumId(e)) => {
                     // enum variant
                     cov_mark::hit!(can_import_enum_variant);
                     let enum_data = db.enum_data(e);
-                    match enum_data.variant(&segment) {
+                    match enum_data.variant(segment) {
                         Some(local_id) => {
                             let variant = EnumVariantId { parent: e, local_id };
                             match &*enum_data.variants[local_id].variant_data {
index f6220aa9280652608ba68824a2cf5d163f4b6a97..27345d07ce708de3af116323025f2d7bc0cfe250 100644 (file)
@@ -208,13 +208,13 @@ fn lower_generic_args_from_fn_path(
     let params = params?;
     let mut param_types = Vec::new();
     for param in params.params() {
-        let type_ref = TypeRef::from_ast_opt(&ctx, param.ty());
+        let type_ref = TypeRef::from_ast_opt(ctx, param.ty());
         param_types.push(type_ref);
     }
     let arg = GenericArg::Type(TypeRef::Tuple(param_types));
     args.push(arg);
     if let Some(ret_type) = ret_type {
-        let type_ref = TypeRef::from_ast_opt(&ctx, ret_type.ty());
+        let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty());
         bindings.push(AssociatedTypeBinding {
             name: name![Output],
             type_ref: Some(type_ref),
index fb8a6f260631e0f56748ab3e10a171912a507d9c..d4681fa3e66976eeb13a93c52abc008106327c25 100644 (file)
@@ -133,7 +133,7 @@ fn resolve_module_path(
             Some(it) => it,
             None => return PerNs::none(),
         };
-        let (module_res, segment_index) = item_map.resolve_path(db, module, &path, shadow);
+        let (module_res, segment_index) = item_map.resolve_path(db, module, path, shadow);
         if segment_index.is_some() {
             return PerNs::none();
         }
@@ -150,7 +150,7 @@ pub fn resolve_module_path_in_trait_items(
         path: &ModPath,
     ) -> Option<TraitId> {
         let (item_map, module) = self.module_scope()?;
-        let (module_res, ..) = item_map.resolve_path(db, module, &path, BuiltinShadowMode::Module);
+        let (module_res, ..) = item_map.resolve_path(db, module, path, BuiltinShadowMode::Module);
         match module_res.take_types()? {
             ModuleDefId::TraitId(it) => Some(it),
             _ => None,
@@ -325,7 +325,7 @@ pub fn resolve_path_as_macro(
         path: &ModPath,
     ) -> Option<MacroDefId> {
         let (item_map, module) = self.module_scope()?;
-        item_map.resolve_path(db, module, &path, BuiltinShadowMode::Other).0.take_macros()
+        item_map.resolve_path(db, module, path, BuiltinShadowMode::Other).0.take_macros()
     }
 
     pub fn process_all_names(&self, db: &dyn DefDatabase, f: &mut dyn FnMut(Name, ScopeDef)) {
@@ -561,7 +561,7 @@ fn resolve_path_in_value_ns(
         path: &ModPath,
     ) -> Option<ResolveValueResult> {
         let (module_def, idx) =
-            self.def_map.resolve_path_locally(db, self.module_id, &path, BuiltinShadowMode::Other);
+            self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
         match idx {
             None => {
                 let value = to_value_ns(module_def)?;
@@ -591,7 +591,7 @@ fn resolve_path_in_type_ns(
         path: &ModPath,
     ) -> Option<(TypeNs, Option<usize>)> {
         let (module_def, idx) =
-            self.def_map.resolve_path_locally(db, self.module_id, &path, BuiltinShadowMode::Other);
+            self.def_map.resolve_path_locally(db, self.module_id, path, BuiltinShadowMode::Other);
         let res = to_type_ns(module_def)?;
         Some((res, idx))
     }
index cbde6b94026ed3e7a9306092f5c6cb5edb00554f..ffe4999738a1285ab17075ffe291b19f77f2d4dd 100644 (file)
@@ -128,7 +128,7 @@ impl TypeRef {
     /// Converts an `ast::TypeRef` to a `hir::TypeRef`.
     pub fn from_ast(ctx: &LowerCtx, node: ast::Type) -> Self {
         match node {
-            ast::Type::ParenType(inner) => TypeRef::from_ast_opt(&ctx, inner.ty()),
+            ast::Type::ParenType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()),
             ast::Type::TupleType(inner) => {
                 TypeRef::Tuple(inner.fields().map(|it| TypeRef::from_ast(ctx, it)).collect())
             }
@@ -142,7 +142,7 @@ pub fn from_ast(ctx: &LowerCtx, node: ast::Type) -> Self {
                     .unwrap_or(TypeRef::Error)
             }
             ast::Type::PtrType(inner) => {
-                let inner_ty = TypeRef::from_ast_opt(&ctx, inner.ty());
+                let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty());
                 let mutability = Mutability::from_mutable(inner.mut_token().is_some());
                 TypeRef::RawPtr(Box::new(inner_ty), mutability)
             }
@@ -156,13 +156,13 @@ pub fn from_ast(ctx: &LowerCtx, node: ast::Type) -> Self {
                     .map(ConstScalar::usize_from_literal_expr)
                     .unwrap_or(ConstScalar::Unknown);
 
-                TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty())), len)
+                TypeRef::Array(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())), len)
             }
             ast::Type::SliceType(inner) => {
-                TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.ty())))
+                TypeRef::Slice(Box::new(TypeRef::from_ast_opt(ctx, inner.ty())))
             }
             ast::Type::RefType(inner) => {
-                let inner_ty = TypeRef::from_ast_opt(&ctx, inner.ty());
+                let inner_ty = TypeRef::from_ast_opt(ctx, inner.ty());
                 let lifetime = inner.lifetime().map(|lt| LifetimeRef::new(&lt));
                 let mutability = Mutability::from_mutable(inner.mut_token().is_some());
                 TypeRef::Reference(Box::new(inner_ty), lifetime, mutability)
@@ -180,7 +180,7 @@ pub fn from_ast(ctx: &LowerCtx, node: ast::Type) -> Self {
                         is_varargs = param.dotdotdot_token().is_some();
                     }
 
-                    pl.params().map(|p| p.ty()).map(|it| TypeRef::from_ast_opt(&ctx, it)).collect()
+                    pl.params().map(|p| p.ty()).map(|it| TypeRef::from_ast_opt(ctx, it)).collect()
                 } else {
                     Vec::new()
                 };
@@ -188,7 +188,7 @@ pub fn from_ast(ctx: &LowerCtx, node: ast::Type) -> Self {
                 TypeRef::Fn(params, is_varargs)
             }
             // for types are close enough for our purposes to the inner type for now...
-            ast::Type::ForType(inner) => TypeRef::from_ast_opt(&ctx, inner.ty()),
+            ast::Type::ForType(inner) => TypeRef::from_ast_opt(ctx, inner.ty()),
             ast::Type::ImplTraitType(inner) => {
                 TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
             }
@@ -229,7 +229,7 @@ fn go(type_ref: &TypeRef, f: &mut impl FnMut(&TypeRef)) {
                 TypeRef::RawPtr(type_ref, _)
                 | TypeRef::Reference(type_ref, ..)
                 | TypeRef::Array(type_ref, _)
-                | TypeRef::Slice(type_ref) => go(&type_ref, f),
+                | TypeRef::Slice(type_ref) => go(type_ref, f),
                 TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => {
                     for bound in bounds {
                         match bound.as_ref() {
index 0b310ba2fc106de5f47358d90a0b03906f0b7256..51572226e38e064f338aeee27b3edcaa66d42e32 100644 (file)
@@ -354,7 +354,7 @@ fn concat_expand(
                 // concat works with string and char literals, so remove any quotes.
                 // It also works with integer, float and boolean literals, so just use the rest
                 // as-is.
-                let component = unquote_str(&it).unwrap_or_else(|| it.text.to_string());
+                let component = unquote_str(it).unwrap_or_else(|| it.text.to_string());
                 text.push_str(&component);
             }
             // handle boolean literals
@@ -417,7 +417,7 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, mbe::ExpandError> {
     tt.token_trees
         .get(0)
         .and_then(|tt| match tt {
-            tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(&it),
+            tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(it),
             _ => None,
         })
         .ok_or_else(|| mbe::ExpandError::ConversionError)
@@ -561,7 +561,7 @@ mod tests {
     use syntax::ast::NameOwner;
 
     fn expand_builtin_macro(ra_fixture: &str) -> String {
-        let (db, file_id) = TestDB::with_single_file(&ra_fixture);
+        let (db, file_id) = TestDB::with_single_file(ra_fixture);
         let parsed = db.parse(file_id);
         let mut macro_rules: Vec<_> =
             parsed.syntax_node().descendants().filter_map(ast::MacroRules::cast).collect();
index 82dc7f326a6aacceb99bb622f10bdc75dab86a5c..bc3ecc59301a76f295fbf48aa2f611eb211cd384 100644 (file)
@@ -78,7 +78,7 @@ mod tests {
     use super::*;
 
     fn test_remove_derives_up_to(attr: usize, ra_fixture: &str, expect: Expect) {
-        let (db, file_id) = TestDB::with_single_file(&ra_fixture);
+        let (db, file_id) = TestDB::with_single_file(ra_fixture);
         let parsed = db.parse(file_id);
 
         let mut items: Vec<_> =
index dbe1b446e01ac14015806a06e6d311f4135a641e..3ad2d3bf738b0062b59e1b341ae25b4edd5330a0 100644 (file)
@@ -51,7 +51,7 @@ pub fn expand(
                 // Proc macros have access to the environment variables of the invoking crate.
                 let env = &krate_graph[calling_crate].env;
 
-                proc_macro.expander.expand(&tt, attr_arg, &env).map_err(mbe::ExpandError::from)
+                proc_macro.expander.expand(tt, attr_arg, env).map_err(mbe::ExpandError::from)
             }
             None => Err(mbe::ExpandError::UnresolvedProcMacro),
         }
index 222141bd6a5536300a4be8ac5de01127a0adac63..088d2791e307ce69837ab44c05c2ad11029453d6 100644 (file)
@@ -528,7 +528,7 @@ pub(super) fn new(pcx: PatCtxt<'_>) -> Self {
                 smallvec![NonExhaustive]
             }
             TyKind::Never => SmallVec::new(),
-            _ if cx.is_uninhabited(&pcx.ty) => SmallVec::new(),
+            _ if cx.is_uninhabited(pcx.ty) => SmallVec::new(),
             TyKind::Adt(..) | TyKind::Tuple(..) | TyKind::Ref(..) => smallvec![Single],
             // This type is one for which we cannot list constructors, like `str` or `f64`.
             _ => smallvec![NonExhaustive],
index bd76a606cb120dbb03122c7ee8d6212cfe2448e4..f5ac714446d4fc5d277475bbf091eb8ce295e332 100644 (file)
@@ -645,7 +645,7 @@ fn union(&mut self, other: Self) {
             (Seq { subpats: s_set }, Seq { subpats: mut o_set }) => {
                 s_set.retain(|i, s_sub_set| {
                     // Missing entries count as full.
-                    let o_sub_set = o_set.remove(&i).unwrap_or(Full);
+                    let o_sub_set = o_set.remove(i).unwrap_or(Full);
                     s_sub_set.union(o_sub_set);
                     // We drop full entries.
                     !s_sub_set.is_full()
@@ -656,7 +656,7 @@ fn union(&mut self, other: Self) {
             (Alt { subpats: s_set, .. }, Alt { subpats: mut o_set, .. }) => {
                 s_set.retain(|i, s_sub_set| {
                     // Missing entries count as empty.
-                    let o_sub_set = o_set.remove(&i).unwrap_or(Empty);
+                    let o_sub_set = o_set.remove(i).unwrap_or(Empty);
                     s_sub_set.union(o_sub_set);
                     // We drop empty entries.
                     !s_sub_set.is_empty()
@@ -898,7 +898,7 @@ fn apply_constructor(
                 } else {
                     witnesses
                         .into_iter()
-                        .map(|witness| witness.apply_constructor(pcx, &ctor, ctor_wild_subpatterns))
+                        .map(|witness| witness.apply_constructor(pcx, ctor, ctor_wild_subpatterns))
                         .collect()
                 };
                 WithWitnesses(new_witnesses)
index f023c1fb7de3f37cb73551309371491b15b5ab3f..9590c2e47bc8c238a6f1b8be3ed45e6aa474c54f 100644 (file)
@@ -782,7 +782,7 @@ fn only_has_type(&self, table: &mut unify::InferenceTable) -> Option<Ty> {
     fn adjust_for_branches(&self, table: &mut unify::InferenceTable) -> Expectation {
         match self {
             Expectation::HasType(ety) => {
-                let ety = table.resolve_ty_shallow(&ety);
+                let ety = table.resolve_ty_shallow(ety);
                 if !ety.is_ty_var() {
                     Expectation::HasType(ety)
                 } else {
index 03b97e7db5d643ef5764178603145dccf8954524..8647d74372a6192727a6046e12ecf053277d25f3 100644 (file)
@@ -109,7 +109,7 @@ fn coerce_inner(&mut self, from_ty: Ty, to_ty: &Ty) -> InferResult {
         }
 
         // Consider coercing the subtype to a DST
-        if let Ok(ret) = self.try_coerce_unsized(&from_ty, &to_ty) {
+        if let Ok(ret) = self.try_coerce_unsized(&from_ty, to_ty) {
             return Ok(ret);
         }
 
index e34f194fff8a0a79dbddc71a1d6ec94155bb68c1..4805c0a008c619dedbb1aa1f1e75f0d1051bcc54 100644 (file)
@@ -54,7 +54,7 @@ pub(super) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) ->
     /// Infer type of expression with possibly implicit coerce to the expected type.
     /// Return the type after possible coercion.
     pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
-        let ty = self.infer_expr_inner(expr, &expected);
+        let ty = self.infer_expr_inner(expr, expected);
         let ty = if let Some(target) = expected.only_has_type(&mut self.table) {
             if !self.coerce(&ty, &target) {
                 self.result
@@ -135,11 +135,11 @@ fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
                 let mut both_arms_diverge = Diverges::Always;
 
                 let mut result_ty = self.table.new_type_var();
-                let then_ty = self.infer_expr_inner(*then_branch, &expected);
+                let then_ty = self.infer_expr_inner(*then_branch, expected);
                 both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
                 result_ty = self.coerce_merge_branch(Some(*then_branch), &result_ty, &then_ty);
                 let else_ty = match else_branch {
-                    Some(else_branch) => self.infer_expr_inner(*else_branch, &expected),
+                    Some(else_branch) => self.infer_expr_inner(*else_branch, expected),
                     None => TyBuilder::unit(),
                 };
                 both_arms_diverge &= self.diverges;
@@ -330,8 +330,8 @@ fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
                 .infer_method_call(
                     tgt_expr,
                     *receiver,
-                    &args,
-                    &method_name,
+                    args,
+                    method_name,
                     generic_args.as_deref(),
                 ),
             Expr::Match { expr, arms } => {
@@ -993,7 +993,7 @@ fn substs_for_method_call(
     }
 
     fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
-        let callable_ty = self.resolve_ty_shallow(&callable_ty);
+        let callable_ty = self.resolve_ty_shallow(callable_ty);
         if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(&Interner) {
             let def: CallableDefId = from_chalk(self.db, *fn_def);
             let generic_predicates = self.db.generic_predicates(def.into());
index 25dff7e49d41a4dc74a04ddbea532fd9d74a45eb..8f5db1f407fef5c7337b1b59de655b8841379b1d 100644 (file)
@@ -192,7 +192,7 @@ pub(super) fn infer_pat(
             Pat::Path(path) => {
                 // FIXME use correct resolver for the surrounding expression
                 let resolver = self.resolver.clone();
-                self.infer_path(&resolver, &path, pat.into()).unwrap_or(self.err_ty())
+                self.infer_path(&resolver, path, pat.into()).unwrap_or(self.err_ty())
             }
             Pat::Bind { mode, name: _, subpat } => {
                 let mode = if mode == &BindingAnnotation::Unannotated {
index 14c99eafd31bcf82644e179454ceac417430f7b7..056cdb5d5ba1b90a1d5a779b2976ae673e2e4f58 100644 (file)
@@ -43,11 +43,11 @@ fn resolve_value_path(
             }
             let ty = self.make_ty(type_ref);
             let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
-            let ctx = crate::lower::TyLoweringContext::new(self.db, &resolver);
+            let ctx = crate::lower::TyLoweringContext::new(self.db, resolver);
             let (ty, _) = ctx.lower_ty_relative_path(ty, None, remaining_segments_for_ty);
             self.resolve_ty_assoc_item(
                 ty,
-                &path.segments().last().expect("path had at least one segment").name,
+                path.segments().last().expect("path had at least one segment").name,
                 id,
             )?
         } else {
@@ -154,7 +154,7 @@ fn resolve_assoc_item(
                 let segment =
                     remaining_segments.last().expect("there should be at least one segment here");
 
-                self.resolve_ty_assoc_item(ty, &segment.name, id)
+                self.resolve_ty_assoc_item(ty, segment.name, id)
             }
         }
     }
index 29ffdd9b7f3940da7059ceca683e1a94d28ea422..5fef878e899e6156b6d0901f1c5953d4fef04edf 100644 (file)
@@ -331,7 +331,7 @@ fn program_clauses_data<'a>(
         &self,
         clauses: &'a Self::InternedProgramClauses,
     ) -> &'a [chalk_ir::ProgramClause<Self>] {
-        &clauses
+        clauses
     }
 
     fn intern_quantified_where_clauses<E>(
@@ -373,7 +373,7 @@ fn canonical_var_kinds_data<'a>(
         &self,
         canonical_var_kinds: &'a Self::InternedCanonicalVarKinds,
     ) -> &'a [chalk_ir::CanonicalVarKind<Self>] {
-        &canonical_var_kinds
+        canonical_var_kinds
     }
 
     fn intern_constraints<E>(
@@ -413,7 +413,7 @@ fn variances_data<'a>(
         &self,
         variances: &'a Self::InternedVariances,
     ) -> &'a [chalk_ir::Variance] {
-        &variances
+        variances
     }
 }
 
index c83933c73d492258194b3f9811b585854e686fb7..0b8f21e5df1be0362f0881d25832be51139a09d4 100644 (file)
@@ -238,7 +238,7 @@ pub fn lower_ty_ext(&self, type_ref: &TypeRef) -> (Ty, Option<TypeNs>) {
                         // away instead of two.
                         let actual_opaque_type_data = self
                             .with_debruijn(DebruijnIndex::INNERMOST, |ctx| {
-                                ctx.lower_impl_trait(&bounds)
+                                ctx.lower_impl_trait(bounds)
                             });
                         self.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data;
 
@@ -421,7 +421,7 @@ pub(crate) fn lower_partly_resolved_path(
                     let found = self
                         .db
                         .trait_data(trait_ref.hir_trait_id())
-                        .associated_type_by_name(&segment.name);
+                        .associated_type_by_name(segment.name);
                     match found {
                         Some(associated_ty) => {
                             // FIXME handle type parameters on the segment
@@ -505,7 +505,7 @@ pub(crate) fn lower_partly_resolved_path(
     pub(crate) fn lower_path(&self, path: &Path) -> (Ty, Option<TypeNs>) {
         // Resolve the path (in type namespace)
         if let Some(type_ref) = path.type_anchor() {
-            let (ty, res) = self.lower_ty_ext(&type_ref);
+            let (ty, res) = self.lower_ty_ext(type_ref);
             return self.lower_ty_relative_path(ty, res, path.segments());
         }
         let (resolution, remaining_index) =
index a23527f7df253837510a1f5a625c07cea4cae755..8c00a636995389f7414beba841efaab9ab6c40da 100644 (file)
@@ -372,7 +372,7 @@ pub(crate) fn lookup_method(
         db,
         env,
         krate,
-        &traits_in_scope,
+        traits_in_scope,
         visible_from_module,
         Some(name),
         LookupMode::MethodCall,
@@ -484,7 +484,7 @@ fn iterate_method_candidates_impl(
         LookupMode::Path => {
             // No autoderef for path lookups
             iterate_method_candidates_for_self_ty(
-                &ty,
+                ty,
                 db,
                 env,
                 krate,
@@ -513,7 +513,7 @@ fn iterate_method_candidates_with_autoref(
         db,
         env.clone(),
         krate,
-        &traits_in_scope,
+        traits_in_scope,
         visible_from_module,
         name,
         &mut callback,
@@ -531,7 +531,7 @@ fn iterate_method_candidates_with_autoref(
         db,
         env.clone(),
         krate,
-        &traits_in_scope,
+        traits_in_scope,
         visible_from_module,
         name,
         &mut callback,
@@ -549,7 +549,7 @@ fn iterate_method_candidates_with_autoref(
         db,
         env,
         krate,
-        &traits_in_scope,
+        traits_in_scope,
         visible_from_module,
         name,
         &mut callback,
@@ -593,7 +593,7 @@ fn iterate_method_candidates_by_receiver(
             db,
             env.clone(),
             krate,
-            &traits_in_scope,
+            traits_in_scope,
             name,
             Some(receiver_ty),
             &mut callback,
@@ -870,7 +870,7 @@ fn transform_receiver_ty(
             .fill_with_unknown()
             .build(),
         AssocContainerId::ImplId(impl_id) => {
-            let impl_substs = inherent_impl_substs(db, env, impl_id, &self_ty)?;
+            let impl_substs = inherent_impl_substs(db, env, impl_id, self_ty)?;
             TyBuilder::subst_for_def(db, function_id)
                 .use_parent_substs(&impl_substs)
                 .fill_with_unknown()
index 4193aabf51fc6120c029754f91c9e8861cc6a2f0..31d5cfedca0fd3f2a9fee3fc90222604f12ae945 100644 (file)
@@ -208,7 +208,7 @@ pub(crate) fn diagnostics(
     match sema.to_module_def(file_id) {
         Some(m) => m.diagnostics(db, &mut sink, internal_diagnostics),
         None => {
-            sink.push(UnlinkedFile { file_id, node: SyntaxNodePtr::new(&parse.tree().syntax()) });
+            sink.push(UnlinkedFile { file_id, node: SyntaxNodePtr::new(parse.tree().syntax()) });
         }
     }
 
@@ -222,7 +222,7 @@ fn diagnostic_with_fix<D: DiagnosticWithFixes>(
     resolve: &AssistResolveStrategy,
 ) -> Diagnostic {
     Diagnostic::error(sema.diagnostics_display_range(d.display_source()).range, d.message())
-        .with_fixes(d.fixes(&sema, resolve))
+        .with_fixes(d.fixes(sema, resolve))
         .with_code(Some(d.code()))
 }
 
@@ -232,7 +232,7 @@ fn warning_with_fix<D: DiagnosticWithFixes>(
     resolve: &AssistResolveStrategy,
 ) -> Diagnostic {
     Diagnostic::hint(sema.diagnostics_display_range(d.display_source()).range, d.message())
-        .with_fixes(d.fixes(&sema, resolve))
+        .with_fixes(d.fixes(sema, resolve))
         .with_code(Some(d.code()))
 }
 
index a5f457dcea537635ca08d2f2d8e0a8ea0b1c24f6..f6e45967ad2d75005b5a7fcbaf5a613ba3040fc5 100644 (file)
@@ -18,7 +18,7 @@ fn fixes(
     ) -> Option<Vec<Assist>> {
         let root = sema.db.parse_or_expand(self.file)?;
         missing_record_expr_field_fixes(
-            &sema,
+            sema,
             self.file.original_file(sema.db),
             &self.field.to_node(&root),
         )
index b5dd64c08c36ad576bd6f3e5a28d2d441a782a7f..c76f6008a3096257b04d2d14d5e009623812276c 100644 (file)
@@ -37,7 +37,7 @@ fn fixes(
 
         let edit = {
             let mut builder = TextEdit::builder();
-            algo::diff(&old_field_list.syntax(), &new_field_list.syntax())
+            algo::diff(old_field_list.syntax(), new_field_list.syntax())
                 .into_text_edit(&mut builder);
             builder.finish()
         };
@@ -45,7 +45,7 @@ fn fixes(
             "fill_missing_fields",
             "Fill struct fields",
             SourceChange::from_text_edit(self.file.original_file(sema.db), edit),
-            sema.original_range(&field_list_parent.syntax()).range,
+            sema.original_range(field_list_parent.syntax()).range,
         )])
     }
 }
index ec3828ab2d2ff0a7db7949318c93edfbadda7ac2..774952d962af5f0f095cde88294a486ecb263eae 100644 (file)
@@ -151,18 +151,18 @@ pub(crate) fn resolve_doc_path_for_def(
 ) -> Option<hir::ModuleDef> {
     match def {
         Definition::ModuleDef(def) => match def {
-            hir::ModuleDef::Module(it) => it.resolve_doc_path(db, &link, ns),
-            hir::ModuleDef::Function(it) => it.resolve_doc_path(db, &link, ns),
-            hir::ModuleDef::Adt(it) => it.resolve_doc_path(db, &link, ns),
-            hir::ModuleDef::Variant(it) => it.resolve_doc_path(db, &link, ns),
-            hir::ModuleDef::Const(it) => it.resolve_doc_path(db, &link, ns),
-            hir::ModuleDef::Static(it) => it.resolve_doc_path(db, &link, ns),
-            hir::ModuleDef::Trait(it) => it.resolve_doc_path(db, &link, ns),
-            hir::ModuleDef::TypeAlias(it) => it.resolve_doc_path(db, &link, ns),
+            hir::ModuleDef::Module(it) => it.resolve_doc_path(db, link, ns),
+            hir::ModuleDef::Function(it) => it.resolve_doc_path(db, link, ns),
+            hir::ModuleDef::Adt(it) => it.resolve_doc_path(db, link, ns),
+            hir::ModuleDef::Variant(it) => it.resolve_doc_path(db, link, ns),
+            hir::ModuleDef::Const(it) => it.resolve_doc_path(db, link, ns),
+            hir::ModuleDef::Static(it) => it.resolve_doc_path(db, link, ns),
+            hir::ModuleDef::Trait(it) => it.resolve_doc_path(db, link, ns),
+            hir::ModuleDef::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
             hir::ModuleDef::BuiltinType(_) => None,
         },
-        Definition::Macro(it) => it.resolve_doc_path(db, &link, ns),
-        Definition::Field(it) => it.resolve_doc_path(db, &link, ns),
+        Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
+        Definition::Field(it) => it.resolve_doc_path(db, link, ns),
         Definition::SelfType(_)
         | Definition::Local(_)
         | Definition::GenericParam(_)
index 7032889acc0fc254b6294d7680160d37782f1c05..c7ec87edf14463895d276b3b58a181e406e878d8 100644 (file)
@@ -328,7 +328,7 @@ mod tests {
     use super::*;
 
     fn do_check(before: &str, afters: &[&str]) {
-        let (analysis, position) = fixture::position(&before);
+        let (analysis, position) = fixture::position(before);
         let before = analysis.file_text(position.file_id).unwrap();
         let range = TextRange::empty(position.offset);
         let mut frange = FileRange { file_id: position.file_id, range };
index 2d36c34e99d3f1da1de2222fb9990703006795fa..27a292d83edaad56da60414ccf3b3dec1590ccd1 100644 (file)
@@ -57,7 +57,7 @@ pub(crate) fn goto_definition(
             },
             ast::Name(name) => {
                 let def = NameClass::classify(&sema, &name)?.referenced_or_defined(sema.db);
-                try_find_trait_item_definition(&sema.db, &def)
+                try_find_trait_item_definition(sema.db, &def)
                     .or_else(|| def.try_to_nav(sema.db))
             },
             ast::Lifetime(lt) => if let Some(name_class) = NameClass::classify_lifetime(&sema, &lt) {
index 1c6d36939b23ccee6012cba353e4632a2bf0ef44..b4b3b45b565a1c8ce2b9e49e14c25c591f66b55c 100644 (file)
@@ -288,7 +288,7 @@ fn runnable_action(
 ) -> Option<HoverAction> {
     match def {
         Definition::ModuleDef(it) => match it {
-            ModuleDef::Module(it) => runnable_mod(&sema, it).map(|it| HoverAction::Runnable(it)),
+            ModuleDef::Module(it) => runnable_mod(sema, it).map(|it| HoverAction::Runnable(it)),
             ModuleDef::Function(func) => {
                 let src = func.source(sema.db)?;
                 if src.file_id != file_id.into() {
@@ -297,7 +297,7 @@ fn runnable_action(
                     return None;
                 }
 
-                runnable_fn(&sema, func).map(HoverAction::Runnable)
+                runnable_fn(sema, func).map(HoverAction::Runnable)
             }
             _ => None,
         },
@@ -432,7 +432,7 @@ fn hover_for_definition(
     return match def {
         Definition::Macro(it) => match &it.source(db)?.value {
             Either::Left(mac) => {
-                let label = macro_label(&mac);
+                let label = macro_label(mac);
                 from_def_source_labeled(db, it, Some(label), mod_path)
             }
             Either::Right(_) => {
@@ -516,7 +516,7 @@ fn hover_for_keyword(
     if !token.kind().is_keyword() {
         return None;
     }
-    let famous_defs = FamousDefs(&sema, sema.scope(&token.parent()?).krate());
+    let famous_defs = FamousDefs(sema, sema.scope(&token.parent()?).krate());
     // std exposes {}_keyword modules with docstrings on the root to document keywords
     let keyword_mod = format!("{}_keyword", token.text());
     let doc_owner = find_std_module(&famous_defs, &keyword_mod)?;
index 821c61403bdfd24dbfd891e2aad6ee2771118e53..9cd33d0e450bb9ea2935a4b3e7fff40b0fa09466 100644 (file)
@@ -96,7 +96,7 @@ fn get_chaining_hints(
     }
 
     let krate = sema.scope(expr.syntax()).module().map(|it| it.krate());
-    let famous_defs = FamousDefs(&sema, krate);
+    let famous_defs = FamousDefs(sema, krate);
 
     let mut tokens = expr
         .syntax()
@@ -165,7 +165,7 @@ fn get_param_name_hints(
             };
             Some((param_name, arg))
         })
-        .filter(|(param_name, arg)| !should_hide_param_name_hint(sema, &callable, param_name, &arg))
+        .filter(|(param_name, arg)| !should_hide_param_name_hint(sema, &callable, param_name, arg))
         .map(|(param_name, arg)| InlayHint {
             range: arg.syntax().text_range(),
             kind: InlayKind::ParameterHint,
@@ -187,7 +187,7 @@ fn get_bind_pat_hints(
     }
 
     let krate = sema.scope(pat.syntax()).module().map(|it| it.krate());
-    let famous_defs = FamousDefs(&sema, krate);
+    let famous_defs = FamousDefs(sema, krate);
 
     let ty = sema.type_of_pat(&pat.clone().into())?;
 
index c67ccd1a9f7e9731dae814878da28abae7799381..93d3760bf2598965ddde7e001e74ab506a323c0c 100644 (file)
@@ -60,7 +60,7 @@ fn remove_newlines(edit: &mut TextEditBuilder, token: &SyntaxToken, range: TextR
         let pos: TextSize = (pos as u32).into();
         let offset = token.text_range().start() + range.start() + pos;
         if !edit.invalidates_offset(offset) {
-            remove_newline(edit, &token, offset);
+            remove_newline(edit, token, offset);
         }
     }
 }
index 97c9e5d2b4a7b1b0f573fcefde031c47fd44daf7..0511efae3834e96043ec24a2188c4bd5a87a2492 100644 (file)
@@ -282,20 +282,20 @@ pub fn syntax_tree(
         file_id: FileId,
         text_range: Option<TextRange>,
     ) -> Cancellable<String> {
-        self.with_db(|db| syntax_tree::syntax_tree(&db, file_id, text_range))
+        self.with_db(|db| syntax_tree::syntax_tree(db, file_id, text_range))
     }
 
     pub fn view_hir(&self, position: FilePosition) -> Cancellable<String> {
-        self.with_db(|db| view_hir::view_hir(&db, position))
+        self.with_db(|db| view_hir::view_hir(db, position))
     }
 
     pub fn view_item_tree(&self, file_id: FileId) -> Cancellable<String> {
-        self.with_db(|db| view_item_tree::view_item_tree(&db, file_id))
+        self.with_db(|db| view_item_tree::view_item_tree(db, file_id))
     }
 
     /// Renders the crate graph to GraphViz "dot" syntax.
     pub fn view_crate_graph(&self) -> Cancellable<Result<String, String>> {
-        self.with_db(|db| view_crate_graph::view_crate_graph(&db))
+        self.with_db(|db| view_crate_graph::view_crate_graph(db))
     }
 
     pub fn expand_macro(&self, position: FilePosition) -> Cancellable<Option<ExpandedMacro>> {
@@ -315,7 +315,7 @@ pub fn join_lines(&self, frange: FileRange) -> Cancellable<TextEdit> {
     /// up minor stuff like continuing the comment.
     /// The edit will be a snippet (with `$0`).
     pub fn on_enter(&self, position: FilePosition) -> Cancellable<Option<TextEdit>> {
-        self.with_db(|db| typing::on_enter(&db, position))
+        self.with_db(|db| typing::on_enter(db, position))
     }
 
     /// Returns an edit which should be applied after a character was typed.
@@ -331,7 +331,7 @@ pub fn on_char_typed(
         if !typing::TRIGGER_CHARS.contains(char_typed) {
             return Ok(None);
         }
-        self.with_db(|db| typing::on_char_typed(&db, position, char_typed))
+        self.with_db(|db| typing::on_char_typed(db, position, char_typed))
     }
 
     /// Returns a tree representation of symbols in the file. Useful to draw a
index f8b64a669048b984cd95f218d34fbff1964854c3..a0fdead2c169704dac4ee2b11ab4b006dec43c9a 100644 (file)
@@ -62,7 +62,7 @@ pub(crate) fn find_all_refs(
         if let Some(name) = get_name_of_item_declaration(&syntax, position) {
             (NameClass::classify(sema, &name)?.referenced_or_defined(sema.db), true)
         } else {
-            (find_def(&sema, &syntax, position)?, false)
+            (find_def(sema, &syntax, position)?, false)
         };
 
     let mut usages = def.usages(sema).set_scope(search_scope).include_self_refs().all();
index 7dfc5043ee99ee7f78b931b5dd738b9db5d4706f..02b171bdaa38daa30f3cc84aac5124b372b55333 100644 (file)
@@ -64,7 +64,7 @@ pub(crate) fn prepare_rename(
         }
     };
     let name_like = sema
-        .find_node_at_offset_with_descend(&syntax, position.offset)
+        .find_node_at_offset_with_descend(syntax, position.offset)
         .ok_or_else(|| format_err!("No references found at position"))?;
     let node = match &name_like {
         ast::NameLike::Name(it) => it.syntax(),
@@ -104,7 +104,7 @@ pub(crate) fn rename_with_semantics(
 
     let def = find_definition(sema, syntax, position)?;
     match def {
-        Definition::ModuleDef(ModuleDef::Module(module)) => rename_mod(&sema, module, new_name),
+        Definition::ModuleDef(ModuleDef::Module(module)) => rename_mod(sema, module, new_name),
         Definition::SelfType(_) => bail!("Cannot rename `Self`"),
         Definition::ModuleDef(ModuleDef::BuiltinType(_)) => bail!("Cannot rename builtin type"),
         def => rename_reference(sema, def, new_name),
@@ -323,7 +323,7 @@ fn rename_reference(
     }
     let mut source_change = SourceChange::default();
     source_change.extend(usages.iter().map(|(&file_id, references)| {
-        (file_id, source_edit_from_references(&references, def, new_name))
+        (file_id, source_edit_from_references(references, def, new_name))
     }));
 
     let (file_id, edit) = source_edit_from_def(sema, def, new_name)?;
@@ -413,7 +413,7 @@ fn rename_self_to_param(
     let mut source_change = SourceChange::default();
     source_change.insert_source_edit(file_id.original_file(sema.db), edit);
     source_change.extend(usages.iter().map(|(&file_id, references)| {
-        (file_id, source_edit_from_references(&references, def, new_name))
+        (file_id, source_edit_from_references(references, def, new_name))
     }));
     Ok(source_change)
 }
index 55205495118fe43d9b2a06c25fd94b7f7e46b2dc..03faabadca688c551d31d514d46d2156ffbed069 100644 (file)
@@ -158,7 +158,7 @@ fn find_related_tests(
     search_scope: Option<SearchScope>,
     tests: &mut FxHashSet<Runnable>,
 ) {
-    if let Some(refs) = references::find_all_refs(&sema, position, search_scope) {
+    if let Some(refs) = references::find_all_refs(sema, position, search_scope) {
         for (file_id, refs) in refs.references {
             let file = sema.parse(file_id);
             let file = file.syntax();
@@ -169,10 +169,10 @@ fn find_related_tests(
             });
 
             for fn_def in functions {
-                if let Some(runnable) = as_test_runnable(&sema, &fn_def) {
+                if let Some(runnable) = as_test_runnable(sema, &fn_def) {
                     // direct test
                     tests.insert(runnable);
-                } else if let Some(module) = parent_test_module(&sema, &fn_def) {
+                } else if let Some(module) = parent_test_module(sema, &fn_def) {
                     // indirect test
                     find_related_tests_in_module(sema, &fn_def, &module, tests);
                 }
@@ -203,7 +203,7 @@ fn find_related_tests_in_module(
 }
 
 fn as_test_runnable(sema: &Semantics<RootDatabase>, fn_def: &ast::Fn) -> Option<Runnable> {
-    if test_related_attribute(&fn_def).is_some() {
+    if test_related_attribute(fn_def).is_some() {
         let function = sema.to_def(fn_def)?;
         runnable_fn(sema, function)
     } else {
index b03f1c71f170303413adae3fd5c5fdf93aa13d43..e186b82b7ca36183d4e74d976956852398165361 100644 (file)
@@ -323,7 +323,7 @@ fn traverse(
         if let Some(token) = element.as_token().cloned().and_then(ast::String::cast) {
             if token.is_raw() {
                 let expanded = element_to_highlight.as_token().unwrap().clone();
-                if inject::ra_fixture(hl, &sema, token, expanded).is_some() {
+                if inject::ra_fixture(hl, sema, token, expanded).is_some() {
                     continue;
                 }
             }
@@ -334,7 +334,7 @@ fn traverse(
         }
 
         if let Some((mut highlight, binding_hash)) = highlight::element(
-            &sema,
+            sema,
             krate,
             &mut bindings_shadow_count,
             syntactic_name_ref_highlighting,
index 84012227d832213ac7cc268abde767f287c7a072..8c0e553c035975911c94e1a16e6e383b705742ba 100644 (file)
@@ -449,12 +449,12 @@ fn highlight_method_call(
     krate: Option<hir::Crate>,
     method_call: &ast::MethodCallExpr,
 ) -> Option<Highlight> {
-    let func = sema.resolve_method_call(&method_call)?;
+    let func = sema.resolve_method_call(method_call)?;
 
     let mut h = SymbolKind::Function.into();
     h |= HlMod::Associated;
 
-    if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(&method_call) {
+    if func.is_unsafe(sema.db) || sema.is_unsafe_method_call(method_call) {
         h |= HlMod::Unsafe;
     }
     if func.is_async(sema.db) {
index 5327af845c2a9a96a3e7a87315dd983bec38867b..478facfee8b268a695bb30f14929427272760315 100644 (file)
@@ -23,7 +23,7 @@ fn rainbowify(seed: u64) -> String {
     let hl_ranges = highlight(db, file_id, None, false);
     let text = parse.tree().syntax().to_string();
     let mut buf = String::new();
-    buf.push_str(&STYLE);
+    buf.push_str(STYLE);
     buf.push_str("<pre><code>");
     for r in &hl_ranges {
         let chunk = html_escape(&text[r.range]);
index 4269d339ebfdcc6ed7a33d06c59cedbf34beeabb..883252c0e534dc3eb72f31864451a9b76b8c0b60 100644 (file)
@@ -23,7 +23,7 @@ pub(super) fn ra_fixture(
     literal: ast::String,
     expanded: SyntaxToken,
 ) -> Option<()> {
-    let active_parameter = ActiveParameter::at_token(&sema, expanded)?;
+    let active_parameter = ActiveParameter::at_token(sema, expanded)?;
     if !active_parameter.ident().map_or(false, |name| name.text().starts_with("ra_fixture")) {
         return None;
     }
@@ -124,7 +124,7 @@ pub(super) fn doc_comment(
     }
 
     for attr in attributes.by_key("doc").attrs() {
-        let InFile { file_id, value: src } = attrs_source_map.source_of(&attr);
+        let InFile { file_id, value: src } = attrs_source_map.source_of(attr);
         if file_id != node.file_id {
             continue;
         }
index 81c4d95b1b5d2191f76ed8307a16a102cded9c0e..5cba9d11d004d982c9700063ff137c428f49793f 100644 (file)
@@ -88,12 +88,12 @@ fn on_enter_in_comment(
         if comment.text().ends_with(' ') {
             cov_mark::hit!(continues_end_of_line_comment_with_space);
             remove_trailing_whitespace = true;
-        } else if !followed_by_comment(&comment) {
+        } else if !followed_by_comment(comment) {
             return None;
         }
     }
 
-    let indent = node_indent(&file, comment.syntax())?;
+    let indent = node_indent(file, comment.syntax())?;
     let inserted = format!("\n{}{} $0", indent, prefix);
     let delete = if remove_trailing_whitespace {
         let trimmed_len = comment.text().trim_end().len() as u32;
@@ -188,7 +188,7 @@ mod tests {
     use crate::fixture;
 
     fn apply_on_enter(before: &str) -> Option<String> {
-        let (analysis, position) = fixture::position(&before);
+        let (analysis, position) = fixture::position(before);
         let result = analysis.on_enter(position).unwrap()?;
 
         let mut actual = analysis.file_text(position.file_id).unwrap().to_string();
index d202a85f958883845b0ee53f593fcf006dfaab77..749e8685bf18e24e8af4f513545e3b86202b2b52 100644 (file)
@@ -88,7 +88,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
             // We pick a single indentation level for the whole block comment based on the
             // comment where the assist was invoked. This will be prepended to the
             // contents of each line comment when they're put into the block comment.
-            let indentation = IndentLevel::from_token(&comment.syntax());
+            let indentation = IndentLevel::from_token(comment.syntax());
 
             let block_comment_body =
                 comments.into_iter().map(|c| line_comment_text(indentation, c)).join("\n");
@@ -167,7 +167,7 @@ fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String {
     if contents.is_empty() {
         contents.to_owned()
     } else {
-        indentation.to_string() + &contents
+        indentation.to_string() + contents
     }
 }
 
index 5eb6a57f07757c883d7132d89af6d8dc44668fa0..ef4a7cb50c8d9a753228a9ad8b0cf32a655957cc 100644 (file)
@@ -108,7 +108,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext)
         "Convert to guarded return",
         target,
         |edit| {
-            let if_indent_level = IndentLevel::from_node(&if_expr.syntax());
+            let if_indent_level = IndentLevel::from_node(if_expr.syntax());
             let new_block = match if_let_pat {
                 None => {
                     // If.
@@ -174,7 +174,7 @@ fn replace(
                         .take_while(|i| *i != end_of_then),
                 );
                 replace_children(
-                    &parent_block.syntax(),
+                    parent_block.syntax(),
                     RangeInclusive::new(
                         if_expr.clone().syntax().clone().into(),
                         if_expr.syntax().clone().into(),
index a2dba915cea37c5609bd264492071fa64c61b20e..63d28480afd46ed27d29ddfe81f66642af34c427 100644 (file)
@@ -76,7 +76,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext) -> Option
     let module = ctx.sema.scope(&insert_after).module()?;
 
     let vars_defined_in_body_and_outlive =
-        vars_defined_in_body_and_outlive(ctx, &body, &node.parent().as_ref().unwrap_or(&node));
+        vars_defined_in_body_and_outlive(ctx, &body, node.parent().as_ref().unwrap_or(&node));
     let ret_ty = body_return_ty(ctx, &body)?;
 
     // FIXME: we compute variables that outlive here just to check `never!` condition
@@ -808,7 +808,7 @@ trait HasTokenAtOffset {
 
 impl HasTokenAtOffset for SyntaxNode {
     fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken> {
-        SyntaxNode::token_at_offset(&self, offset)
+        SyntaxNode::token_at_offset(self, offset)
     }
 }
 
@@ -854,7 +854,7 @@ fn vars_defined_in_body_and_outlive(
     body: &FunctionBody,
     parent: &SyntaxNode,
 ) -> Vec<OutlivedLocal> {
-    let vars_defined_in_body = vars_defined_in_body(&body, ctx);
+    let vars_defined_in_body = vars_defined_in_body(body, ctx);
     vars_defined_in_body
         .into_iter()
         .filter_map(|var| var_outlives_body(ctx, body, var, parent))
@@ -868,7 +868,7 @@ fn is_defined_before(
     src: &hir::InFile<Either<ast::IdentPat, ast::SelfParam>>,
 ) -> bool {
     src.file_id.original_file(ctx.db()) == ctx.frange.file_id
-        && !body.contains_node(&either_syntax(&src.value))
+        && !body.contains_node(either_syntax(&src.value))
 }
 
 fn either_syntax(value: &Either<ast::IdentPat, ast::SelfParam>) -> &SyntaxNode {
index 3d2cd739aeaf0b1b197b0dda0903fc048351cfce..c8bc923f5d6d01834e6192b4aeecbee59c13314b 100644 (file)
@@ -212,7 +212,7 @@ fn variants(self, db: &RootDatabase) -> Vec<ExtendedVariant> {
 }
 
 fn resolve_enum_def(sema: &Semantics<RootDatabase>, expr: &ast::Expr) -> Option<ExtendedEnum> {
-    sema.type_of_expr(&expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
+    sema.type_of_expr(expr)?.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
         Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)),
         _ => {
             if ty.is_bool() {
@@ -228,7 +228,7 @@ fn resolve_tuple_of_enum_def(
     sema: &Semantics<RootDatabase>,
     expr: &ast::Expr,
 ) -> Option<Vec<ExtendedEnum>> {
-    sema.type_of_expr(&expr)?
+    sema.type_of_expr(expr)?
         .tuple_fields(sema.db)
         .iter()
         .map(|ty| {
index 89f7b2c2c37975d063d550f7cf677a0f63972c7f..9b432e92ffc95bef98ab20775c349ac1838014c8 100644 (file)
@@ -43,7 +43,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext) -> O
         _ => return None,
     };
 
-    let current_module = ctx.sema.scope(&path.syntax()).module()?;
+    let current_module = ctx.sema.scope(path.syntax()).module()?;
     let target_module = def.module(ctx.db())?;
 
     let vis = target_module.visibility_of(ctx.db(), &def)?;
index a9f71a70329ea4a6f00b9f1285db5dfa576efa1b..24939f2622adb050417169a5822d4ae4088ddd88 100644 (file)
@@ -47,7 +47,7 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext) ->
     let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text()));
 
     // Return early if we've found an existing new fn
-    let impl_def = find_struct_impl(&ctx, &parent_enum, &fn_name)?;
+    let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?;
 
     let target = variant.syntax().text_range();
     acc.add(
index e2f572ba371608f4a11ea0cc4cad3b7d80c8f0a0..986fb2315562d609e58ed9d101e8f218eb2b8edc 100644 (file)
@@ -136,7 +136,7 @@ fn generate_enum_projection_method(
         format!("{}_{}", props.fn_name_prefix, &to_lower_snake_case(&variant_name.text()));
 
     // Return early if we've found an existing new fn
-    let impl_def = find_struct_impl(&ctx, &parent_enum, &fn_name)?;
+    let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?;
 
     let target = variant.syntax().text_range();
     acc.add(AssistId(assist_id, AssistKind::Generate), assist_description, target, |builder| {
index bc9fc524b90e075c15480e8f0872fc995f935f77..706c995ac6fc3b5e14632a01aca336f6bd007663 100644 (file)
@@ -59,7 +59,7 @@ pub(crate) fn generate_function(acc: &mut Assists, ctx: &AssistContext) -> Optio
         None => None,
     };
 
-    let function_builder = FunctionBuilder::from_call(&ctx, &call, &path, target_module)?;
+    let function_builder = FunctionBuilder::from_call(ctx, &call, &path, target_module)?;
 
     let target = call.syntax().text_range();
     acc.add(
@@ -128,12 +128,12 @@ fn from_call(
                 file = in_file;
                 target
             }
-            None => next_space_for_fn_after_call_site(&call)?,
+            None => next_space_for_fn_after_call_site(call)?,
         };
         let needs_pub = target_module.is_some();
         let target_module = target_module.or_else(|| ctx.sema.scope(target.syntax()).module())?;
-        let fn_name = fn_name(&path)?;
-        let (type_params, params) = fn_args(ctx, target_module, &call)?;
+        let fn_name = fn_name(path)?;
+        let (type_params, params) = fn_args(ctx, target_module, call)?;
 
         // should_render_snippet intends to express a rough level of confidence about
         // the correctness of the return type.
index 09971226e1e2cb6ae58c9eaebaa6c1bf37de716d..cc020c92c522ba868a5649bdbad93d4347b2cd68 100644 (file)
@@ -75,7 +75,7 @@ pub(crate) fn generate_getter_impl(
     if mutable {
         format_to!(fn_name, "_mut");
     }
-    let impl_def = find_struct_impl(&ctx, &ast::Adt::Struct(strukt.clone()), fn_name.as_str())?;
+    let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), fn_name.as_str())?;
 
     let (id, label) = if mutable {
         ("generate_getter_mut", "Generate a mut getter method")
index 959a1f86cb576812ed5d9bb1bd8b2ef5f3f1eee9..b65e8387b00b5ff60c36ff296f7d1b6b48ce2c80 100644 (file)
@@ -36,7 +36,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
     };
 
     // Return early if we've found an existing new fn
-    let impl_def = find_struct_impl(&ctx, &ast::Adt::Struct(strukt.clone()), "new")?;
+    let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), "new")?;
 
     let target = strukt.syntax().text_range();
     acc.add(AssistId("generate_new", AssistKind::Generate), "Generate `new`", target, |builder| {
index 288cf745d5ff8c800f6e2b2b531c2a5c04bbb7d6..5bdf6b3f44a3956b481c300a5d08e4992d2314df 100644 (file)
@@ -39,7 +39,7 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext) -> Option<
     // Return early if we've found an existing fn
     let fn_name = to_lower_snake_case(&field_name.to_string());
     let impl_def = find_struct_impl(
-        &ctx,
+        ctx,
         &ast::Adt::Struct(strukt.clone()),
         format!("set_{}", fn_name).as_str(),
     )?;
index c8226550fbe46980cd68a6e9651a2db1d84908bd..b20fe992d89c20f7914b46ed6f473fc79b3a291a 100644 (file)
@@ -85,7 +85,7 @@ fn whitespace_start(it: SyntaxElement) -> Option<TextSize> {
 }
 
 fn adjusted_macro_contents(macro_call: &ast::MacroCall) -> Option<String> {
-    let contents = get_valid_macrocall_contents(&macro_call, "dbg")?;
+    let contents = get_valid_macrocall_contents(macro_call, "dbg")?;
     let macro_text_with_brackets = macro_call.token_tree()?.syntax().text();
     let macro_text_in_brackets = macro_text_with_brackets.slice(TextRange::new(
         TextSize::of('('),
index 933acead1b66b6b96e9448200a7269846e8970db..f6a926042c139c3919d10975d0575580b64d902d 100644 (file)
@@ -28,7 +28,7 @@ pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext) -> Option<(
         .or_else(|| ctx.find_node_at_offset::<ast::RecordPat>().map(Either::Right))?;
 
     let path = record.as_ref().either(|it| it.path(), |it| it.path())?;
-    let ranks = compute_fields_ranks(&path, &ctx)?;
+    let ranks = compute_fields_ranks(&path, ctx)?;
     let get_rank_of_field =
         |of: Option<_>| *ranks.get(&of.unwrap_or_default()).unwrap_or(&usize::MAX);
 
index 10d9cec3136bbe7c376f284633be1a9d740eba36..f9474c9f58331a78ada41551dbe9ed2abe68980b 100644 (file)
@@ -112,7 +112,7 @@ fn add_assist(
             let insert_pos = adt.syntax().text_range().end();
             let impl_def_with_items =
                 impl_def_from_trait(&ctx.sema, &annotated_name, trait_, trait_path);
-            update_attribute(builder, &input, &trait_name, &attr);
+            update_attribute(builder, input, &trait_name, attr);
             let trait_path = format!("{}", trait_path);
             match (ctx.config.snippet_cap, impl_def_with_items) {
                 (None, _) => {
index aee880625f8d1a34d5813955d417716df4697a45..9404aa26dbe93068065c968dafa8c1f92a3f1ab7 100644 (file)
@@ -169,7 +169,7 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext)
 }
 
 fn is_pat_wildcard_or_sad(sema: &hir::Semantics<RootDatabase>, pat: &ast::Pat) -> bool {
-    sema.type_of_pat(&pat)
+    sema.type_of_pat(pat)
         .and_then(|ty| TryEnum::from_ty(sema, &ty))
         .map(|it| it.sad_pattern().syntax().text() == pat.syntax().text())
         .unwrap_or_else(|| matches!(pat, ast::Pat::WildcardPat(_)))
index 2f1da82c7dc1f120fe65d24169736ce72ed444d5..140e27356fedd4f6c63da3579253b942bba1d433 100644 (file)
@@ -123,7 +123,7 @@ fn collect_jump_exprs(&mut self, block_expr: &BlockExpr, collect_break: bool) {
     fn handle_exprs(&mut self, expr: &Expr, collect_break: bool) {
         match expr {
             Expr::BlockExpr(block_expr) => {
-                self.collect_jump_exprs(&block_expr, collect_break);
+                self.collect_jump_exprs(block_expr, collect_break);
             }
             Expr::ReturnExpr(ret_expr) => {
                 if let Some(ret_expr_arg) = &ret_expr.expr() {
index 2b7c2d581e9afe3bf081bd1db8712bdb5a8f9bc3..bdf9cb71c5fcd287a863b3cdcb87909e868ccb9e 100644 (file)
@@ -74,7 +74,7 @@ pub(crate) fn check_assist_unresolved(assist: Handler, ra_fixture: &str) {
 #[track_caller]
 fn check_doc_test(assist_id: &str, before: &str, after: &str) {
     let after = trim_indent(after);
-    let (db, file_id, selection) = RootDatabase::with_range_or_offset(&before);
+    let (db, file_id, selection) = RootDatabase::with_range_or_offset(before);
     let before = db.file_text(file_id).to_string();
     let frange = FileRange { file_id, range: selection.into() };
 
index 30128a24a222ed872edc1554bc212b5931c41ac2..068df005bfaa218cc8e9343cfc1210c8b354628b 100644 (file)
@@ -492,7 +492,7 @@ pub(crate) fn add_method_to_adt(
     let start_offset = impl_def
         .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf))
         .unwrap_or_else(|| {
-            buf = generate_impl_text(&adt, &buf);
+            buf = generate_impl_text(adt, &buf);
             adt.syntax().text_range().end()
         });
 
index b3aabeab37adc77cd146275250ae3c0dbbacf229..cb8bc8b2fe502cfde95d71faae29fbdbb738d2d4 100644 (file)
@@ -187,7 +187,7 @@ fn from_method_call(expr: &ast::Expr) -> Option<String> {
         }
     }
 
-    normalize(&name)
+    normalize(name)
 }
 
 fn from_param(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>) -> Option<String> {
index 8ad57a0692aa2ba2f266ee0805f80bc094efb747..9552875c11fa53d87bb349ab35fb1f988cc4d9a3 100644 (file)
@@ -13,7 +13,7 @@ pub(crate) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
         _ => return complete_undotted_self(acc, ctx),
     };
 
-    let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
+    let receiver_ty = match ctx.sema.type_of_expr(dot_receiver) {
         Some(ty) => ty,
         _ => return,
     };
index 86eb2171424bab8a72e873085ba551ea3d23a4bb..9f98b21be220856607fdd8fd73216ffff4c29846 100644 (file)
@@ -34,7 +34,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
 
     let receiver_text = get_receiver_text(dot_receiver, receiver_is_ambiguous_float_literal);
 
-    let receiver_ty = match ctx.sema.type_of_expr(&dot_receiver) {
+    let receiver_ty = match ctx.sema.type_of_expr(dot_receiver) {
         Some(it) => it,
         None => return,
     };
@@ -50,7 +50,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
                 postfix_snippet(
                     ctx,
                     cap,
-                    &dot_receiver,
+                    dot_receiver,
                     "ifl",
                     "if let Ok {}",
                     &format!("if let Ok($1) = {} {{\n    $0\n}}", receiver_text),
@@ -60,7 +60,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
                 postfix_snippet(
                     ctx,
                     cap,
-                    &dot_receiver,
+                    dot_receiver,
                     "while",
                     "while let Ok {}",
                     &format!("while let Ok($1) = {} {{\n    $0\n}}", receiver_text),
@@ -71,7 +71,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
                 postfix_snippet(
                     ctx,
                     cap,
-                    &dot_receiver,
+                    dot_receiver,
                     "ifl",
                     "if let Some {}",
                     &format!("if let Some($1) = {} {{\n    $0\n}}", receiver_text),
@@ -81,7 +81,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
                 postfix_snippet(
                     ctx,
                     cap,
-                    &dot_receiver,
+                    dot_receiver,
                     "while",
                     "while let Some {}",
                     &format!("while let Some($1) = {} {{\n    $0\n}}", receiver_text),
@@ -93,7 +93,7 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
         postfix_snippet(
             ctx,
             cap,
-            &dot_receiver,
+            dot_receiver,
             "if",
             "if expr {}",
             &format!("if {} {{\n    $0\n}}", receiver_text),
@@ -102,22 +102,22 @@ pub(crate) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
         postfix_snippet(
             ctx,
             cap,
-            &dot_receiver,
+            dot_receiver,
             "while",
             "while expr {}",
             &format!("while {} {{\n    $0\n}}", receiver_text),
         )
         .add_to(acc);
-        postfix_snippet(ctx, cap, &dot_receiver, "not", "!expr", &format!("!{}", receiver_text))
+        postfix_snippet(ctx, cap, dot_receiver, "not", "!expr", &format!("!{}", receiver_text))
             .add_to(acc);
     }
 
-    postfix_snippet(ctx, cap, &dot_receiver, "ref", "&expr", &format!("&{}", receiver_text))
+    postfix_snippet(ctx, cap, dot_receiver, "ref", "&expr", &format!("&{}", receiver_text))
         .add_to(acc);
     postfix_snippet(
         ctx,
         cap,
-        &dot_receiver,
+        dot_receiver,
         "refm",
         "&mut expr",
         &format!("&mut {}", receiver_text),
index 9ebe1dcc0a388cd9b1308e9dd4c0702e9ed8901e..f619f8b5223aba85c04af3f10833d0dd62e76e52 100644 (file)
@@ -53,7 +53,7 @@ pub(crate) fn add_format_like_completions(
         for (label, macro_name) in KINDS {
             let snippet = parser.into_suggestion(macro_name);
 
-            postfix_snippet(ctx, cap, &dot_receiver, label, macro_name, &snippet).add_to(acc);
+            postfix_snippet(ctx, cap, dot_receiver, label, macro_name, &snippet).add_to(acc);
         }
     }
 }
index 58d4dd9ee81805ca19aba75e832ec8e4df605cd4..6083537b73e4dc2735292201aef334c66ee1fc9e 100644 (file)
@@ -15,7 +15,7 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon
         None => return,
     };
 
-    let resolution = match ctx.sema.resolve_path(&path) {
+    let resolution = match ctx.sema.resolve_path(path) {
         Some(res) => res,
         None => return,
     };
index 2c2a4aa6bf1de96496ef0886561b79f555abb56b..e4abe2742b6a9999932e849fc601e036ed932e75 100644 (file)
@@ -467,7 +467,7 @@ fn fill(
         self.expected_type = expected_type;
         self.expected_name = expected_name;
 
-        let name_like = match find_node_at_offset(&&file_with_fake_ident, offset) {
+        let name_like = match find_node_at_offset(&file_with_fake_ident, offset) {
             Some(it) => it,
             None => return,
         };
index 81d7a1a1d471b61a9c1ecb1d360a981025958caa..c567ac63dc963ac4e4feb0127fc07a97b60fdc5e 100644 (file)
@@ -115,12 +115,12 @@ pub(crate) fn determine_location(
 ) -> Option<ImmediateLocation> {
     let node = match name_like {
         ast::NameLike::NameRef(name_ref) => {
-            if ast::RecordExprField::for_field_name(&name_ref).is_some() {
+            if ast::RecordExprField::for_field_name(name_ref).is_some() {
                 return sema
                     .find_node_at_offset_with_macros(original_file, offset)
                     .map(ImmediateLocation::RecordExpr);
             }
-            if ast::RecordPatField::for_field_name_ref(&name_ref).is_some() {
+            if ast::RecordPatField::for_field_name_ref(name_ref).is_some() {
                 return sema
                     .find_node_at_offset_with_macros(original_file, offset)
                     .map(ImmediateLocation::RecordPat);
@@ -128,7 +128,7 @@ pub(crate) fn determine_location(
             maximize_name_ref(name_ref)
         }
         ast::NameLike::Name(name) => {
-            if ast::RecordPatField::for_field_name(&name).is_some() {
+            if ast::RecordPatField::for_field_name(name).is_some() {
                 return sema
                     .find_node_at_offset_with_macros(original_file, offset)
                     .map(ImmediateLocation::RecordPat);
index d3db55c35b81e703649129e1fc679bf92dba092d..a5508163124a58705200fcc1dfc2d2520140e044 100644 (file)
@@ -86,7 +86,7 @@ fn snippet_cap(&self) -> Option<SnippetCap> {
     }
 
     fn db(&self) -> &'a RootDatabase {
-        &self.completion.db
+        self.completion.db
     }
 
     fn source_range(&self) -> TextRange {
index b4e80f424a2019641c1f908c31bd002ee235e8dc..3717a0409a9ffbc753d04945b8400203c2207ab8 100644 (file)
@@ -75,10 +75,10 @@ fn render_pat(
 ) -> Option<String> {
     let mut pat = match kind {
         StructKind::Tuple if ctx.snippet_cap().is_some() => {
-            render_tuple_as_pat(&fields, &name, fields_omitted)
+            render_tuple_as_pat(fields, name, fields_omitted)
         }
         StructKind::Record => {
-            render_record_as_pat(ctx.db(), ctx.snippet_cap(), &fields, &name, fields_omitted)
+            render_record_as_pat(ctx.db(), ctx.snippet_cap(), fields, name, fields_omitted)
         }
         _ => return None,
     };
@@ -86,7 +86,7 @@ fn render_pat(
     if ctx.completion.is_param {
         pat.push(':');
         pat.push(' ');
-        pat.push_str(&name);
+        pat.push_str(name);
     }
     if ctx.snippet_cap().is_some() {
         pat.push_str("$0");
index 933bcad55b24a77daddbd352ad006138f03fe2e8..4795e25650bee16c4346aa1b2c5a24a74f8f6c7a 100644 (file)
@@ -162,7 +162,7 @@ pub fn at(db: &RootDatabase, position: FilePosition) -> Option<Self> {
     }
 
     pub fn at_token(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Self> {
-        let (signature, active_parameter) = call_info_impl(&sema, token)?;
+        let (signature, active_parameter) = call_info_impl(sema, token)?;
 
         let idx = active_parameter?;
         let mut params = signature.params(sema.db);
index ae52dd8bb87bcc39d3b8edfcfe54a341eb22e53b..9634d872e49ca3608659e70c3bf0b2fb3812f030 100644 (file)
@@ -323,7 +323,7 @@ fn import_for_item(
     }
 
     let segment_import =
-        find_import_for_segment(db, original_item_candidate, &unresolved_first_segment)?;
+        find_import_for_segment(db, original_item_candidate, unresolved_first_segment)?;
     let trait_item_to_import = item_as_assoc(db, original_item)
         .and_then(|assoc| assoc.containing_trait(db))
         .map(|trait_| ItemInNs::from(ModuleDef::from(trait_)));
@@ -383,7 +383,7 @@ fn find_import_for_segment(
         original_item
     } else {
         let matching_module =
-            module_with_segment_name(db, &unresolved_first_segment, original_item)?;
+            module_with_segment_name(db, unresolved_first_segment, original_item)?;
         ItemInNs::from(ModuleDef::from(matching_module))
     })
 }
index 0dbabb44fba674986575f3b9fd2b0495aecc29b5..ec29476a492d86ae902f4e0e3f97a7852808ee66 100644 (file)
@@ -124,7 +124,7 @@ fn recursive_merge(
                             .map(|tree_list| tree_list.use_trees().any(tree_is_self))
                             .unwrap_or(false)
                     };
-                    match (tree_contains_self(&lhs_t), tree_contains_self(&rhs_t)) {
+                    match (tree_contains_self(lhs_t), tree_contains_self(&rhs_t)) {
                         (true, false) => continue,
                         (false, true) => {
                             *lhs_t = rhs_t;
index 8152630f5808c74e541c8105e6ac6d7a6bd1989b..8bfbba4bbdddebcd06eb20651aefe888ab347bf9 100644 (file)
@@ -409,7 +409,7 @@ fn search(self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) {
                     if let Some(ast::NameLike::NameRef(name_ref)) =
                         sema.find_node_at_offset_with_descend(&tree, offset)
                     {
-                        if self.found_self_ty_name_ref(&self_ty, &name_ref, sink) {
+                        if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
                             return;
                         }
                     }
@@ -424,7 +424,7 @@ fn found_self_ty_name_ref(
         name_ref: &ast::NameRef,
         sink: &mut dyn FnMut(FileId, FileReference) -> bool,
     ) -> bool {
-        match NameRefClass::classify(self.sema, &name_ref) {
+        match NameRefClass::classify(self.sema, name_ref) {
             Some(NameRefClass::Definition(Definition::SelfType(impl_)))
                 if impl_.self_ty(self.sema.db) == *self_ty =>
             {
@@ -464,13 +464,13 @@ fn found_name_ref(
         name_ref: &ast::NameRef,
         sink: &mut dyn FnMut(FileId, FileReference) -> bool,
     ) -> bool {
-        match NameRefClass::classify(self.sema, &name_ref) {
+        match NameRefClass::classify(self.sema, name_ref) {
             Some(NameRefClass::Definition(def)) if def == self.def => {
                 let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
                 let reference = FileReference {
                     range,
                     name: ast::NameLike::NameRef(name_ref.clone()),
-                    access: reference_access(&def, &name_ref),
+                    access: reference_access(&def, name_ref),
                 };
                 sink(file_id, reference)
             }
@@ -480,7 +480,7 @@ fn found_name_ref(
                     let reference = FileReference {
                         range,
                         name: ast::NameLike::NameRef(name_ref.clone()),
-                        access: reference_access(&def, &name_ref),
+                        access: reference_access(&def, name_ref),
                     };
                     sink(file_id, reference)
                 } else {
@@ -491,10 +491,10 @@ fn found_name_ref(
                 let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
                 let access = match self.def {
                     Definition::Field(_) if field == self.def => {
-                        reference_access(&field, &name_ref)
+                        reference_access(&field, name_ref)
                     }
                     Definition::Local(l) if local == l => {
-                        reference_access(&Definition::Local(local), &name_ref)
+                        reference_access(&Definition::Local(local), name_ref)
                     }
                     _ => return false,
                 };
index b3072fb9f43ecfecaee45fd7d5fecd30c134d403..fb92a0ccc727956d35d4ba5a0caf546b2e0fc29a 100644 (file)
@@ -382,7 +382,7 @@ fn attempt_match_opt<T: AstNode>(
         code: Option<T>,
     ) -> Result<(), MatchFailed> {
         match (pattern, code) {
-            (Some(p), Some(c)) => self.attempt_match_node(phase, &p.syntax(), &c.syntax()),
+            (Some(p), Some(c)) => self.attempt_match_node(phase, p.syntax(), c.syntax()),
             (None, None) => Ok(()),
             (Some(p), None) => fail_match!("Pattern `{}` had nothing to match", p.syntax().text()),
             (None, Some(c)) => {
@@ -478,7 +478,7 @@ fn attempt_match_token_tree(
                                 if Some(first_token.text()) == next_pattern_token.as_deref() {
                                     if let Some(SyntaxElement::Node(p)) = pattern.next() {
                                         // We have a subtree that starts with the next token in our pattern.
-                                        self.attempt_match_token_tree(phase, &p, &n)?;
+                                        self.attempt_match_token_tree(phase, &p, n)?;
                                         break;
                                     }
                                 }
@@ -609,7 +609,7 @@ fn check_expr_type(
         expr: &ast::Expr,
     ) -> Result<usize, MatchFailed> {
         use hir::HirDisplay;
-        let code_type = self.sema.type_of_expr(&expr).ok_or_else(|| {
+        let code_type = self.sema.type_of_expr(expr).ok_or_else(|| {
             match_error!("Failed to get receiver type for `{}`", expr.syntax().text())
         })?;
         // Temporary needed to make the borrow checker happy.
index c9ccc1961007892f51926f6122e3efdc3f07bdba..9265af7c13a65dcb4b9f2df41dd6daf2534eecf0 100644 (file)
@@ -84,16 +84,16 @@ fn render_node_children(&mut self, node: &SyntaxNode) {
     fn render_node_or_token(&mut self, node_or_token: &SyntaxElement) {
         match node_or_token {
             SyntaxElement::Token(token) => {
-                self.render_token(&token);
+                self.render_token(token);
             }
             SyntaxElement::Node(child_node) => {
-                self.render_node(&child_node);
+                self.render_node(child_node);
             }
         }
     }
 
     fn render_node(&mut self, node: &SyntaxNode) {
-        if let Some(mod_path) = self.match_info.rendered_template_paths.get(&node) {
+        if let Some(mod_path) = self.match_info.rendered_template_paths.get(node) {
             self.out.push_str(&mod_path.to_string());
             // Emit everything except for the segment's name-ref, since we already effectively
             // emitted that as part of `mod_path`.
@@ -107,12 +107,12 @@ fn render_node(&mut self, node: &SyntaxNode) {
                 }
             }
         } else {
-            self.render_node_children(&node);
+            self.render_node_children(node);
         }
     }
 
     fn render_token(&mut self, token: &SyntaxToken) {
-        if let Some(placeholder) = self.rule.get_placeholder(&token) {
+        if let Some(placeholder) = self.rule.get_placeholder(token) {
             if let Some(placeholder_value) =
                 self.match_info.placeholder_values.get(&placeholder.ident)
             {
index 541da4122f7a68bc0fad64f96580e4e85b1afa4d..a66a7a4a8400d769fbb0522270f5018436ac63ce 100644 (file)
@@ -211,7 +211,7 @@ fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> {
         // First try resolving the whole path. This will work for things like
         // `std::collections::HashMap`, but will fail for things like
         // `std::collections::HashMap::new`.
-        if let Some(resolution) = self.scope.speculative_resolve(&path) {
+        if let Some(resolution) = self.scope.speculative_resolve(path) {
             return Some(resolution);
         }
         // Resolution failed, try resolving the qualifier (e.g. `std::collections::HashMap` and if
index 28cef742c4f8cd5b8b8bb1f52c3306605a8a6494..f2056919ed0e168105560570a652dbcad4715844 100644 (file)
@@ -173,7 +173,7 @@ fn slow_scan_node(
         if !is_search_permitted(code) {
             return;
         }
-        self.try_add_match(rule, &code, restrict_range, matches_out);
+        self.try_add_match(rule, code, restrict_range, matches_out);
         // If we've got a macro call, we already tried matching it pre-expansion, which is the only
         // way to match the whole macro, now try expanding it and matching the expansion.
         if let Some(macro_call) = ast::MacroCall::cast(code.clone()) {
index 1d8565dc0ede1914dae579a6aadeaed03c0ccdb9..5dd0d600fe7c91a40f9b254684ffef25160e7206 100644 (file)
@@ -129,7 +129,7 @@ fn assert_matches(pattern: &str, code: &str, expected: &[&str]) {
     let matched_strings: Vec<String> =
         match_finder.matches().flattened().matches.iter().map(|m| m.matched_text()).collect();
     if matched_strings != expected && !expected.is_empty() {
-        print_match_debug_info(&match_finder, position.file_id, &expected[0]);
+        print_match_debug_info(&match_finder, position.file_id, expected[0]);
     }
     assert_eq!(matched_strings, expected);
 }
index c982eb58f1fe823d9cd90fc6276946ffbead62e1..c0e1705c06f1167b0f5e3b7951d7322e1b7190c9 100644 (file)
@@ -121,7 +121,7 @@ fn add_err(&mut self, err: ExpandError) {
 
 /// Matching errors are added to the `Match`.
 pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree) -> Match {
-    let mut res = match_loop(pattern, &input);
+    let mut res = match_loop(pattern, input);
     res.bound_count = count(res.bindings.bindings());
     return res;
 
@@ -202,7 +202,7 @@ fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Frag
     }
 
     fn push_nested(&mut self, parent: &mut BindingsIdx, child: &BindingsIdx) {
-        let BindingsIdx(idx, nidx) = self.copy(&child);
+        let BindingsIdx(idx, nidx) = self.copy(child);
         self.nodes[parent.0].push(LinkNode::Node(Rc::new(BindingKind::Nested(idx, nidx))));
     }
 
@@ -221,7 +221,7 @@ fn build(self, idx: &BindingsIdx) -> Bindings {
 
     fn build_inner(&self, bindings: &mut Bindings, link_nodes: &[LinkNode<Rc<BindingKind>>]) {
         let mut nodes = Vec::new();
-        self.collect_nodes(&link_nodes, &mut nodes);
+        self.collect_nodes(link_nodes, &mut nodes);
 
         for cmd in nodes {
             match &**cmd {
@@ -282,7 +282,7 @@ fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Binding
 
         nested_refs.into_iter().for_each(|iter| {
             let mut child_bindings = Bindings::default();
-            self.build_inner(&mut child_bindings, &iter);
+            self.build_inner(&mut child_bindings, iter);
             nested.push(child_bindings)
         })
     }
@@ -417,7 +417,7 @@ macro_rules! try_push {
                     let sep_len = item.sep.as_ref().map_or(0, Separator::tt_count);
                     if item.sep.is_some() && sep_idx != sep_len {
                         let sep = item.sep.as_ref().unwrap();
-                        if src.clone().expect_separator(&sep, sep_idx) {
+                        if src.clone().expect_separator(sep, sep_idx) {
                             item.dot.next();
                             item.sep_parsed = Some(sep_idx + 1);
                             try_push!(next_items, item);
@@ -487,7 +487,7 @@ macro_rules! try_push {
                                 item.meta_result = Some((fork, match_res));
                                 try_push!(bb_items, item);
                             } else {
-                                bindings_builder.push_optional(&mut item.bindings, &name);
+                                bindings_builder.push_optional(&mut item.bindings, name);
                                 item.dot.next();
                                 cur_items.push(item);
                             }
@@ -495,7 +495,7 @@ macro_rules! try_push {
                         Some(err) => {
                             res.add_err(err);
                             if let Some(fragment) = match_res.value {
-                                bindings_builder.push_fragment(&mut item.bindings, &name, fragment);
+                                bindings_builder.push_fragment(&mut item.bindings, name, fragment);
                             }
                             item.is_error = true;
                             error_items.push(item);
@@ -504,7 +504,7 @@ macro_rules! try_push {
                 }
             }
             OpDelimited::Op(Op::Leaf(leaf)) => {
-                if let Err(err) = match_leaf(&leaf, &mut src.clone()) {
+                if let Err(err) = match_leaf(leaf, &mut src.clone()) {
                     res.add_err(err);
                     item.is_error = true;
                 } else {
@@ -640,10 +640,10 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
                 let (iter, match_res) = item.meta_result.take().unwrap();
                 match match_res.value {
                     Some(fragment) => {
-                        bindings_builder.push_fragment(&mut item.bindings, &name, fragment);
+                        bindings_builder.push_fragment(&mut item.bindings, name, fragment);
                     }
                     None if match_res.err.is_none() => {
-                        bindings_builder.push_optional(&mut item.bindings, &name);
+                        bindings_builder.push_optional(&mut item.bindings, name);
                     }
                     _ => {}
                 }
index dd7fa97d726ccb19ea132c05a2bad1d4481dedce..9a9c1a467ffc9a75b792f37e6aa1ad87d44c3a71 100644 (file)
@@ -55,7 +55,7 @@ pub(super) fn transcribe(
     template: &MetaTemplate,
     bindings: &Bindings,
 ) -> ExpandResult<tt::Subtree> {
-    let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() };
+    let mut ctx = ExpandCtx { bindings: bindings, nesting: Vec::new() };
     let mut arena: Vec<tt::TokenTree> = Vec::new();
     expand_subtree(&mut ctx, template, None, &mut arena)
 }
@@ -91,12 +91,12 @@ fn expand_subtree(
             Op::Leaf(tt) => arena.push(tt.clone().into()),
             Op::Subtree { tokens, delimiter } => {
                 let ExpandResult { value: tt, err: e } =
-                    expand_subtree(ctx, &tokens, *delimiter, arena);
+                    expand_subtree(ctx, tokens, *delimiter, arena);
                 err = err.or(e);
                 arena.push(tt.into());
             }
             Op::Var { name, id, .. } => {
-                let ExpandResult { value: fragment, err: e } = expand_var(ctx, &name, *id);
+                let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id);
                 err = err.or(e);
                 push_fragment(arena, fragment);
             }
@@ -141,7 +141,7 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr, id: tt::TokenId) -> ExpandResult
         .into();
         ExpandResult::ok(Fragment::Tokens(tt))
     } else {
-        ctx.bindings.get(&v, &mut ctx.nesting).map_or_else(
+        ctx.bindings.get(v, &mut ctx.nesting).map_or_else(
             |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) },
             |b| ExpandResult::ok(b.clone()),
         )
index 380a5074431c918f6ca180f89ef0a64bbe5147cc..8c8528aafe12d6e8db53ab3224d76d15a129dbfe 100644 (file)
@@ -280,8 +280,8 @@ fn parse(src: &mut TtIter, expect_arrow: bool) -> Result<Rule, ParseError> {
             .expect_subtree()
             .map_err(|()| ParseError::Expected("expected subtree".to_string()))?;
 
-        let lhs = MetaTemplate(parse_pattern(&lhs)?);
-        let rhs = MetaTemplate(parse_template(&rhs)?);
+        let lhs = MetaTemplate(parse_pattern(lhs)?);
+        let rhs = MetaTemplate(parse_template(rhs)?);
 
         Ok(crate::Rule { lhs, rhs })
     }
@@ -290,7 +290,7 @@ fn parse(src: &mut TtIter, expect_arrow: bool) -> Result<Rule, ParseError> {
 fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
     for op in pattern.iter() {
         match op {
-            Op::Subtree { tokens, .. } => validate(&tokens)?,
+            Op::Subtree { tokens, .. } => validate(tokens)?,
             Op::Repeat { tokens: subtree, separator, .. } => {
                 // Checks that no repetition which could match an empty token
                 // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558
index 04c0d3e7543d026c809328c84429f914311dfed1..deed884d2ddc864e8fe817f9762224e75a3b1a45 100644 (file)
@@ -42,7 +42,7 @@ pub(crate) fn peek(&self) -> Option<OpDelimited<'a>> {
     }
 
     pub(crate) fn reset(&self) -> Self {
-        Self { inner: &self.inner, idx: 0, delimited: self.delimited }
+        Self { inner: self.inner, idx: 0, delimited: self.delimited }
     }
 }
 
@@ -126,11 +126,11 @@ pub(crate) fn tt_count(&self) -> usize {
 }
 
 pub(crate) fn parse_template(template: &tt::Subtree) -> Result<Vec<Op>, ParseError> {
-    parse_inner(&template, Mode::Template).into_iter().collect()
+    parse_inner(template, Mode::Template).into_iter().collect()
 }
 
 pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Result<Vec<Op>, ParseError> {
-    parse_inner(&pattern, Mode::Pattern).into_iter().collect()
+    parse_inner(pattern, Mode::Pattern).into_iter().collect()
 }
 
 #[derive(Clone, Copy)]
@@ -140,7 +140,7 @@ enum Mode {
 }
 
 fn parse_inner(tt: &tt::Subtree, mode: Mode) -> Vec<Result<Op, ParseError>> {
-    let mut src = TtIter::new(&tt);
+    let mut src = TtIter::new(tt);
     std::iter::from_fn(move || {
         let first = src.next()?;
         Some(next_op(first, &mut src, mode))
@@ -171,7 +171,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul
             match second {
                 tt::TokenTree::Subtree(subtree) => {
                     let (separator, kind) = parse_repeat(src)?;
-                    let tokens = parse_inner(&subtree, mode)
+                    let tokens = parse_inner(subtree, mode)
                         .into_iter()
                         .collect::<Result<Vec<Op>, ParseError>>()?;
                     Op::Repeat { tokens: MetaTemplate(tokens), separator, kind }
@@ -191,7 +191,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul
                         Op::Var { name, kind, id }
                     }
                     tt::Leaf::Literal(lit) => {
-                        if is_boolean_literal(&lit) {
+                        if is_boolean_literal(lit) {
                             let name = lit.text.clone();
                             let kind = eat_fragment_kind(src, mode)?;
                             let id = lit.id;
@@ -206,7 +206,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul
         tt::TokenTree::Leaf(tt) => Op::Leaf(tt.clone()),
         tt::TokenTree::Subtree(subtree) => {
             let tokens =
-                parse_inner(&subtree, mode).into_iter().collect::<Result<Vec<Op>, ParseError>>()?;
+                parse_inner(subtree, mode).into_iter().collect::<Result<Vec<Op>, ParseError>>()?;
             Op::Subtree { tokens: MetaTemplate(tokens), delimiter: subtree.delimiter }
         }
     };
index bde370fdb1605e95f69a625e2a5a834689afb29a..ee80807ad42bbf212a9cf408d27cc3580661bf9d 100644 (file)
@@ -22,7 +22,7 @@ impl<'a> SubtreeTokenSource {
     #[cfg(test)]
     pub(crate) fn text(&self) -> SmolStr {
         match self.cached.get(self.curr.1) {
-            Some(ref tt) => tt.text.clone(),
+            Some(tt) => tt.text.clone(),
             _ => SmolStr::new(""),
         }
     }
@@ -59,7 +59,7 @@ pub(crate) fn new(buffer: &TokenBuffer) -> SubtreeTokenSource {
 
             current = match tt {
                 Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
-                    cached.push(convert_leaf(&leaf));
+                    cached.push(convert_leaf(leaf));
                     cursor.bump()
                 }
                 Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
@@ -114,7 +114,7 @@ fn bump(&mut self) {
     /// Is the current token a specified keyword?
     fn is_keyword(&self, kw: &str) -> bool {
         match self.cached.get(self.curr.1) {
-            Some(ref t) => t.text == *kw,
+            Some(t) => t.text == *kw,
             _ => false,
         }
     }
index 978c75747a207d3f75388be0a51596f55c4f28f4..cdc22425d9abcdf853fed044a31c6718667c86d7 100644 (file)
@@ -633,7 +633,7 @@ fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
                     }
                 }
             };
-            self.buf += &text;
+            self.buf += text;
             self.text_pos += TextSize::of(text);
         }
 
index 75c88687c9db4bab7ad0dfdc7993ccff6ca7de92..c788e427e7e07813e6081d99150fb0af41de582a 100644 (file)
@@ -490,7 +490,7 @@ macro_rules! structs {
 
 fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree {
     if let tt::TokenTree::Subtree(subtree) = tt {
-        return &subtree;
+        return subtree;
     }
     unreachable!("It is not a subtree");
 }
index bd54f2442bfa3670a995c48c1564b2f342fd1582..5a4eca7bf00a2781d53129de3c78f42669b91a87 100644 (file)
@@ -115,7 +115,7 @@ fn error(&mut self, _error: parser::ParseError) {
             }
         }
 
-        let buffer = TokenBuffer::from_tokens(&self.inner.as_slice());
+        let buffer = TokenBuffer::from_tokens(self.inner.as_slice());
         let mut src = SubtreeTokenSource::new(&buffer);
         let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
 
index 14eed42890cafdebb8850c22f4eeea7268594f29..899895578682cd4d2e92a686bc768391fd2f8748 100644 (file)
@@ -59,7 +59,7 @@ fn read(inp: &mut impl BufRead, buf: &mut String) -> io::Result<Option<Self>> {
         Ok(match read_json(inp, buf)? {
             None => None,
             Some(text) => {
-                let mut deserializer = serde_json::Deserializer::from_str(&text);
+                let mut deserializer = serde_json::Deserializer::from_str(text);
                 // Note that some proc-macro generate very deep syntax tree
                 // We have to disable the current limit of serde here
                 deserializer.disable_recursion_limit();
index 33a4f81686f3af2228af166d87233e9d9aeba889..53cb4bae7388a3719712988afb63c4d9b652b2a8 100644 (file)
@@ -184,7 +184,7 @@ fn collect(
 
                 // Copy-pasted from existing cargo_metadata. It seems like we
                 // should be using sered_stacker here?
-                let mut deserializer = serde_json::Deserializer::from_str(&line);
+                let mut deserializer = serde_json::Deserializer::from_str(line);
                 deserializer.disable_recursion_limit();
                 let message = Message::deserialize(&mut deserializer)
                     .unwrap_or(Message::TextLine(line.to_string()));
index b8ad083640c9f41bb4fb9d4442a1b8c4f98329fc..ac079f83e6d6eed18c77ea6ef6785f19313abe72 100644 (file)
@@ -278,7 +278,7 @@ pub fn from_cargo_metadata(
                 id, edition, name, manifest_path, version, metadata, ..
             } = meta_pkg;
             let meta = from_value::<PackageMetadata>(metadata.clone()).unwrap_or_default();
-            let is_member = ws_members.contains(&id);
+            let is_member = ws_members.contains(id);
             let edition = edition
                 .parse::<Edition>()
                 .with_context(|| format!("Failed to parse edition {}", edition))?;
index 4e39d6dd3c64f85c5ee2fa413c4a2b26a99c6a38..a22f79c15d8969d0d61a31dc4555370eda55c293 100644 (file)
@@ -142,12 +142,12 @@ fn discover_sysroot_src_dir(
         log::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core);
     }
 
-    get_rust_src(&sysroot_path)
+    get_rust_src(sysroot_path)
         .or_else(|| {
             let mut rustup = Command::new(toolchain::rustup());
             rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]);
             utf8_stdout(rustup).ok()?;
-            get_rust_src(&sysroot_path)
+            get_rust_src(sysroot_path)
         })
         .ok_or_else(|| {
             format_err!(
index 84990075f728e4d56be8d31d9156284d56cdf0aa..ef0f3c9e424e98c26d2ef16057b325078813183e 100644 (file)
@@ -185,7 +185,7 @@ pub fn load_inline(
 
     pub fn load_detached_files(detached_files: Vec<AbsPathBuf>) -> Result<ProjectWorkspace> {
         let sysroot = Sysroot::discover(
-            &detached_files.first().ok_or_else(|| format_err!("No detached files to load"))?,
+            detached_files.first().ok_or_else(|| format_err!("No detached files to load"))?,
         )?;
         let rustc_cfg = rustc_cfg::get(None, None);
         Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
@@ -324,7 +324,7 @@ pub fn to_crate_graph(
     pub fn collect_build_data_configs(&self, collector: &mut BuildDataCollector) {
         match self {
             ProjectWorkspace::Cargo { cargo, .. } => {
-                collector.add_config(&cargo.workspace_root(), cargo.build_data_config().clone());
+                collector.add_config(cargo.workspace_root(), cargo.build_data_config().clone());
             }
             _ => {}
         }
@@ -348,7 +348,7 @@ fn project_json_to_crate_graph(
         .crates()
         .filter_map(|(crate_id, krate)| {
             let file_path = &krate.root_module;
-            let file_id = load(&file_path)?;
+            let file_id = load(file_path)?;
             Some((crate_id, krate, file_id))
         })
         .map(|(crate_id, krate, file_id)| {
@@ -534,7 +534,7 @@ fn detached_files_to_crate_graph(
     cfg_options.extend(rustc_cfg);
 
     for detached_file in detached_files {
-        let file_id = match load(&detached_file) {
+        let file_id = match load(detached_file) {
             Some(file_id) => file_id,
             None => {
                 log::error!("Failed to load detached file {:?}", detached_file);
@@ -602,7 +602,7 @@ fn handle_rustc_crates(
                         crate_graph,
                         &rustc_workspace[pkg],
                         rustc_build_data_map.and_then(|it| it.get(&rustc_workspace[pkg].id)),
-                        &cfg_options,
+                        cfg_options,
                         proc_macro_loader,
                         file_id,
                         &rustc_workspace[tgt].name,
@@ -685,7 +685,7 @@ fn add_target_crate_root(
     let proc_macro = build_data
         .as_ref()
         .and_then(|it| it.proc_macro_dylib_path.as_ref())
-        .map(|it| proc_macro_loader(&it))
+        .map(|it| proc_macro_loader(it))
         .unwrap_or_default();
 
     let display_name = CrateDisplayName::from_canonical_name(cargo_name.to_string());
index f4cd43448d0c99209edf71ed1c575bae66e6a80d..5d854715299098e84f25f2f53f6aadcfaa8f650e 100644 (file)
@@ -123,7 +123,7 @@ pub(crate) fn for_file(
         let res = CargoTargetSpec {
             workspace_root: cargo_ws.workspace_root().to_path_buf(),
             cargo_toml: package_data.manifest.clone(),
-            package: cargo_ws.package_flag(&package_data),
+            package: cargo_ws.package_flag(package_data),
             target: target_data.name.clone(),
             target_kind: target_data.kind,
         };
index 19cb1c046d8275d859ca2a4ef5ac657bf26e8850..b5f5519b43ef2b856fec20c3f5548cec07c04b64 100644 (file)
@@ -126,7 +126,7 @@ fn load_crate_graph(
             }
         }
     }
-    let source_roots = source_root_config.partition(&vfs);
+    let source_roots = source_root_config.partition(vfs);
     analysis_change.set_roots(source_roots);
 
     analysis_change.set_crate_graph(crate_graph);
index d4b9db362b0e8d3d087edee6054c4681089b8c4a..2f63c26ce516ff655ac48009414438ee12c863c1 100644 (file)
@@ -47,7 +47,7 @@ pub(crate) fn add_check_diagnostic(
     ) {
         let diagnostics = self.check.entry(file_id).or_default();
         for existing_diagnostic in diagnostics.iter() {
-            if are_diagnostics_equal(&existing_diagnostic, &diagnostic) {
+            if are_diagnostics_equal(existing_diagnostic, &diagnostic) {
                 return;
             }
         }
index 82dd0da9a444b8f567a937b00aa09621dbb2266f..8594d923cd014d6df81e82a71713cba4aee029b7 100644 (file)
@@ -224,7 +224,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
 
     let mut message = rd.message.clone();
     for child in &rd.children {
-        let child = map_rust_child_diagnostic(config, workspace_root, &child);
+        let child = map_rust_child_diagnostic(config, workspace_root, child);
         match child {
             MappedRustChildDiagnostic::SubDiagnostic(sub) => {
                 subdiagnostics.push(sub);
@@ -268,7 +268,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
     primary_spans
         .iter()
         .flat_map(|primary_span| {
-            let primary_location = primary_location(config, workspace_root, &primary_span);
+            let primary_location = primary_location(config, workspace_root, primary_span);
 
             let mut message = message.clone();
             if needs_primary_span_label {
@@ -298,7 +298,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp(
                 // generated that code.
                 let is_in_macro_call = i != 0;
 
-                let secondary_location = location(config, workspace_root, &span);
+                let secondary_location = location(config, workspace_root, span);
                 if secondary_location == primary_location {
                     continue;
                 }
index 582a89667761e4fb606e4d022ddb13dc5636647d..583900cfeef2becc62209e7da78ed97f3090c52e 100644 (file)
@@ -194,7 +194,7 @@ pub(crate) fn process_changes(&mut self) -> bool {
                 change.change_file(file.file_id, text);
             }
             if has_fs_changes {
-                let roots = self.source_root_config.partition(&vfs);
+                let roots = self.source_root_config.partition(vfs);
                 change.set_roots(roots);
             }
             change
@@ -291,7 +291,7 @@ pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex>
     }
 
     pub(crate) fn url_file_version(&self, url: &Url) -> Option<i32> {
-        let path = from_proto::vfs_path(&url).ok()?;
+        let path = from_proto::vfs_path(url).ok()?;
         Some(self.mem_docs.get(&path)?.version)
     }
 
@@ -300,7 +300,7 @@ pub(crate) fn anchored_path(&self, path: &AnchoredPathBuf) -> Url {
         base.pop();
         let path = base.join(&path.path).unwrap();
         let path = path.as_path().unwrap();
-        url_from_abs_path(&path)
+        url_from_abs_path(path)
     }
 
     pub(crate) fn cargo_target_for_crate_root(
@@ -312,7 +312,7 @@ pub(crate) fn cargo_target_for_crate_root(
         let path = path.as_path()?;
         self.workspaces.iter().find_map(|ws| match ws {
             ProjectWorkspace::Cargo { cargo, .. } => {
-                cargo.target_by_root(&path).map(|it| (cargo, it))
+                cargo.target_by_root(path).map(|it| (cargo, it))
             }
             ProjectWorkspace::Json { .. } => None,
             ProjectWorkspace::DetachedFiles { .. } => None,
@@ -323,7 +323,7 @@ pub(crate) fn cargo_target_for_crate_root(
 pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {
     let path = vfs.file_path(id);
     let path = path.as_path().unwrap();
-    url_from_abs_path(&path)
+    url_from_abs_path(path)
 }
 
 pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> {
index 40dd0da3e71e7c28d92fe235ec3ef9bbc1433ec9..59339d4015e016bfa2fc454743da8f1d8756282a 100644 (file)
@@ -1396,7 +1396,7 @@ pub(crate) fn handle_semantic_tokens_full_delta(
 
     if let Some(prev_id) = &cached_tokens.result_id {
         if *prev_id == params.previous_result_id {
-            let delta = to_proto::semantic_token_delta(&cached_tokens, &semantic_tokens);
+            let delta = to_proto::semantic_token_delta(cached_tokens, &semantic_tokens);
             *cached_tokens = semantic_tokens;
             return Ok(Some(delta.into()));
         }
@@ -1540,7 +1540,7 @@ fn runnable_action_links(
     snap: &GlobalStateSnapshot,
     runnable: Runnable,
 ) -> Option<lsp_ext::CommandLinkGroup> {
-    let cargo_spec = CargoTargetSpec::for_file(&snap, runnable.nav.file_id).ok()?;
+    let cargo_spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id).ok()?;
     let hover_config = snap.config.hover();
     if !hover_config.runnable() || should_skip_target(&runnable, cargo_spec.as_ref()) {
         return None;
@@ -1624,7 +1624,7 @@ fn run_rustfmt(
     text_document: TextDocumentIdentifier,
     range: Option<lsp_types::Range>,
 ) -> Result<Option<Vec<lsp_types::TextEdit>>> {
-    let file_id = from_proto::file_id(&snap, &text_document.uri)?;
+    let file_id = from_proto::file_id(snap, &text_document.uri)?;
     let file = snap.analysis.file_text(file_id)?;
     let crate_ids = snap.analysis.crate_for(file_id)?;
 
@@ -1671,7 +1671,7 @@ fn run_rustfmt(
                     .into());
                 }
 
-                let frange = from_proto::file_range(&snap, text_document, range)?;
+                let frange = from_proto::file_range(snap, text_document, range)?;
                 let start_line = line_index.index.line_col(frange.range.start()).line;
                 let end_line = line_index.index.line_col(frange.range.end()).line;
 
index 8000b5490f0e1718250fb42f9ebca5904f0f2027..087c26a71fe11a827146316bcaf878d3121569f5 100644 (file)
@@ -124,7 +124,7 @@ fn covers(&self, line: u32) -> bool {
         match change.range {
             Some(range) => {
                 if !index_valid.covers(range.end.line) {
-                    line_index.index = Arc::new(ide::LineIndex::new(&old_text));
+                    line_index.index = Arc::new(ide::LineIndex::new(old_text));
                 }
                 index_valid = IndexValid::UpToLineExclusive(range.start.line);
                 let range = from_proto::text_range(&line_index, range);
index 31d8ea9e7fed5a0b38f8b44986c885b536b6a75a..fa5fc6fbfdb54c89593f5243778ac16a80adc9e0 100644 (file)
@@ -740,7 +740,7 @@ fn maybe_update_diagnostics(&mut self) {
         let subscriptions = self
             .mem_docs
             .keys()
-            .map(|path| self.vfs.read().0.file_id(&path).unwrap())
+            .map(|path| self.vfs.read().0.file_id(path).unwrap())
             .collect::<Vec<_>>();
 
         log::trace!("updating notifications for {:?}", subscriptions);
index 7428a3043ba1944198ddd770e565edb159127f2f..e53cd3c7ba541b59440823d49f233713f8f78a69 100644 (file)
@@ -405,7 +405,7 @@ pub(crate) fn semantic_tokens(
                 text_range =
                     TextRange::new(text_range.start(), text_range.end() - TextSize::of('\n'));
             }
-            let range = range(&line_index, text_range);
+            let range = range(line_index, text_range);
             builder.push(range, token_index, modifier_bitset);
         }
     }
@@ -781,7 +781,7 @@ pub(crate) fn snippet_workspace_edit(
         document_changes.extend_from_slice(&ops);
     }
     for (file_id, edit) in source_change.source_file_edits {
-        let edit = snippet_text_document_edit(&snap, source_change.is_snippet, file_id, edit)?;
+        let edit = snippet_text_document_edit(snap, source_change.is_snippet, file_id, edit)?;
         document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit));
     }
     let mut workspace_edit = lsp_ext::SnippetWorkspaceEdit {
@@ -957,7 +957,7 @@ pub(crate) fn code_lens(
             let annotation_range = range(&line_index, annotation.range);
 
             let action = run.action();
-            let r = runnable(&snap, run)?;
+            let r = runnable(snap, run)?;
 
             let command = if debug {
                 command::debug_single(&r)
@@ -1236,12 +1236,12 @@ fn main() {
         assert_eq!(folds.len(), 4);
 
         let line_index = LineIndex {
-            index: Arc::new(ide::LineIndex::new(&text)),
+            index: Arc::new(ide::LineIndex::new(text)),
             endings: LineEndings::Unix,
             encoding: OffsetEncoding::Utf16,
         };
         let converted: Vec<lsp_types::FoldingRange> =
-            folds.into_iter().map(|it| folding_range(&text, &line_index, true, it)).collect();
+            folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect();
 
         let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)];
         assert_eq!(converted.len(), expected_lines.len());
index 9e89209eae4d7bc4a7b5aa59d8bea06b23f95662..3585132d45e8b6302810655e1d20ec1ddf3c810f 100644 (file)
@@ -493,7 +493,7 @@ fn preserves_dos_line_endings() {
     }
 
     let server = Project::with_fixture(
-        &"
+        "
 //- /Cargo.toml
 [package]
 name = \"foo\"
@@ -758,7 +758,7 @@ pub fn foo(_input: TokenStream) -> TokenStream {
         ```rust
         fn bar()
         ```"#]]
-    .assert_eq(&value);
+    .assert_eq(value);
 }
 
 #[test]
@@ -795,7 +795,7 @@ fn main() {}
 
 "#;
     let server =
-        Project::with_fixture(&code).tmp_dir(tmp_dir).server().wait_until_workspace_is_loaded();
+        Project::with_fixture(code).tmp_dir(tmp_dir).server().wait_until_workspace_is_loaded();
 
     //rename same level file
     server.request::<WillRenameFiles>(
index 75e677762a602e446213970961a2232228daa94d..e22c295f93589ec982572bd17e50b0df9dafd1bf 100644 (file)
@@ -323,7 +323,7 @@ fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a Valu
 
             if !l.is_empty() {
                 assert!(!r.is_empty());
-                Some((&l[0], &r[0]))
+                Some((l[0], r[0]))
             } else {
                 assert_eq!(r.len(), 0);
                 None
index 19107ee3812d4c1c585afc1ff69e9ae86e7765ab..8698687d898a63d095f569b31065c84d438aa6e9 100644 (file)
@@ -30,7 +30,7 @@ pub fn split_prefix(&self, prefix: &ast::Path) -> ast::UseTree {
         let suffix = if self.path().as_ref() == Some(prefix) && self.use_tree_list().is_none() {
             make::path_unqualified(make::path_segment_self())
         } else {
-            match split_path_prefix(&prefix) {
+            match split_path_prefix(prefix) {
                 Some(it) => it,
                 None => return self.clone(),
             }
index 4b1e1ccee29cd7e0a5158b39d99e11f26effe484..ad52d9f54066f922407135b048384980d8ebfcf8 100644 (file)
@@ -242,7 +242,7 @@ pub fn value(&self) -> Option<Cow<'_, [u8]>> {
             (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
             (Ok(c), true) => {
                 buf.reserve_exact(text.len());
-                buf.extend_from_slice(&text[..char_range.start].as_bytes());
+                buf.extend_from_slice(text[..char_range.start].as_bytes());
                 buf.push(c as u8);
             }
             (Err(_), _) => has_error = true,
index 431ed06999d8e956a1aaeabb9f70c9e1223f8b9a..001921343d1dbde88ce1015f0dd304a9e1160f48 100644 (file)
@@ -15,7 +15,7 @@
 pub(crate) use crate::parsing::{lexer::*, reparsing::incremental_reparse};
 
 pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
-    let (tokens, lexer_errors) = tokenize(&text);
+    let (tokens, lexer_errors) = tokenize(text);
 
     let mut token_source = TextTokenSource::new(text, &tokens);
     let mut tree_sink = TextTreeSink::new(text, &tokens);
@@ -33,7 +33,7 @@ pub(crate) fn parse_text_fragment<T: AstNode>(
     text: &str,
     fragment_kind: parser::FragmentKind,
 ) -> Result<T, ()> {
-    let (tokens, lexer_errors) = tokenize(&text);
+    let (tokens, lexer_errors) = tokenize(text);
     if !lexer_errors.is_empty() {
         return Err(());
     }
index 7c8d0a4c48c409b73b0eb0687e2ef2be2201ff13..ae4844e486e586669e427fce6b3fcd9fbfa4b8d1 100644 (file)
@@ -144,7 +144,7 @@ fn rustc_token_kind_to_syntax_kind(
             }
 
             rustc_lexer::TokenKind::RawIdent => IDENT,
-            rustc_lexer::TokenKind::Literal { kind, .. } => return match_literal_kind(&kind),
+            rustc_lexer::TokenKind::Literal { kind, .. } => return match_literal_kind(kind),
 
             rustc_lexer::TokenKind::Lifetime { starts_with_number: false } => LIFETIME_IDENT,
             rustc_lexer::TokenKind::Lifetime { starts_with_number: true } => {
index 304f47b3dc9fbdf43c985d84f783ffaa2d913e21..186cc9e74c8c8fcdfe24a9070146992a1ea1ce9c 100644 (file)
@@ -26,11 +26,11 @@ pub(crate) fn incremental_reparse(
     edit: &Indel,
     errors: Vec<SyntaxError>,
 ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
-    if let Some((green, new_errors, old_range)) = reparse_token(node, &edit) {
+    if let Some((green, new_errors, old_range)) = reparse_token(node, edit) {
         return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
     }
 
-    if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) {
+    if let Some((green, new_errors, old_range)) = reparse_block(node, edit) {
         return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
     }
     None
@@ -52,7 +52,7 @@ fn reparse_token(
                 }
             }
 
-            let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit);
+            let mut new_text = get_text_after_edit(prev_token.clone().into(), edit);
             let (new_token_kind, new_err) = lex_single_syntax_kind(&new_text)?;
 
             if new_token_kind != prev_token_kind
index 9f24261717fc9ce6eb27ac1062617987d6f2e175..4961ca08dd0ceca99c9ae205deec3aa4850029d4 100644 (file)
@@ -69,13 +69,13 @@ fn parser_tests() {
     dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| {
         let parse = SourceFile::parse(text);
         let errors = parse.errors();
-        assert_errors_are_absent(&errors, path);
+        assert_errors_are_absent(errors, path);
         parse.debug_dump()
     });
     dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], "rast", |text, path| {
         let parse = SourceFile::parse(text);
         let errors = parse.errors();
-        assert_errors_are_present(&errors, path);
+        assert_errors_are_present(errors, path);
         parse.debug_dump()
     });
 }
index 0a4590c8db1abc2d8690ebce6af2a061ee1664f1..0011f73c960e81aa062dd68cdfa592dc01c0f53e 100644 (file)
@@ -111,7 +111,7 @@ pub fn partition(&self, vfs: &Vfs) -> Vec<FileSet> {
         let mut scratch_space = Vec::new();
         let mut res = vec![FileSet::default(); self.len()];
         for (file_id, path) in vfs.iter() {
-            let root = self.classify(&path, &mut scratch_space);
+            let root = self.classify(path, &mut scratch_space);
             res[root].insert(file_id, path.clone())
         }
         res
index 3b54b24894d1acf3772685231bed944917d40e62..4aebb02bde8f8c9f5d2f670cf85131e8d0362991 100644 (file)
@@ -28,7 +28,7 @@ pub(crate) fn generate_lint_completions() -> Result<()> {
     contents.push('\n');
 
     cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?;
-    generate_descriptor_clippy(&mut contents, &Path::new("./target/clippy_lints.json"))?;
+    generate_descriptor_clippy(&mut contents, Path::new("./target/clippy_lints.json"))?;
     let contents = reformat(&contents)?;
 
     let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs");
index b0b9e30db2a7f9a4c60a62e692847cd25f210e31..5435da76e414c0197405c81cc71aceba49cd37fb 100644 (file)
@@ -258,7 +258,7 @@ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
     for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") {
         res.push_str(chunk);
         if let Some(doc) = docs.next() {
-            write_doc_comment(&doc, &mut res);
+            write_doc_comment(doc, &mut res);
         }
     }
 
@@ -294,14 +294,14 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> {
 
     let full_keywords_values = &grammar.keywords;
     let full_keywords =
-        full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(&kw)));
+        full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw)));
 
     let all_keywords_values =
         grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
     let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
     let all_keywords = all_keywords_values
         .iter()
-        .map(|name| format_ident!("{}_KW", to_upper_snake_case(&name)))
+        .map(|name| format_ident!("{}_KW", to_upper_snake_case(name)))
         .collect::<Vec<_>>();
 
     let literals =
index 34679062f9ffaad37b5439c95f938b6ca3d676a7..7b190d425f4702e29de7aa96d5c3d0aaaa26a3b7 100644 (file)
@@ -71,7 +71,7 @@ fn measure_build(&mut self) -> Result<()> {
         Ok(())
     }
     fn measure_analysis_stats_self(&mut self) -> Result<()> {
-        self.measure_analysis_stats_path("self", &".")
+        self.measure_analysis_stats_path("self", ".")
     }
     fn measure_analysis_stats(&mut self, bench: &str) -> Result<()> {
         self.measure_analysis_stats_path(