]> git.lizzy.rs Git - rust.git/commitdiff
Merge #7020
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>
Wed, 23 Dec 2020 19:50:04 +0000 (19:50 +0000)
committerGitHub <noreply@github.com>
Wed, 23 Dec 2020 19:50:04 +0000 (19:50 +0000)
7020: Implement const pat and expr inference r=flodiebold a=Veykril

Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
13 files changed:
Cargo.lock
crates/hir/src/code_model.rs
crates/hir_ty/Cargo.toml
crates/hir_ty/src/traits/chalk.rs
crates/ide/src/diagnostics.rs
crates/ide/src/lib.rs
crates/ide/src/references/rename.rs
crates/rust-analyzer/Cargo.toml
crates/rust-analyzer/src/caps.rs
crates/rust-analyzer/src/handlers.rs
crates/rust-analyzer/src/lib.rs
crates/rust-analyzer/src/main_loop.rs
crates/syntax/Cargo.toml

index 891cff55e372955e94f1b616862a377d6f34859b..fd04ec3c520f00150409145dc2a92dd3b94a6b9f 100644 (file)
@@ -168,9 +168,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
 
 [[package]]
 name = "chalk-derive"
-version = "0.43.0"
+version = "0.45.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2d9e0c8adcced1ab0fea5cb8a38647922893d5b495e363e1814299fd380469b"
+checksum = "ec7dacf94958d1a930b95d049d9443860859af59eadc77849392093eb577bcee"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -180,19 +180,20 @@ dependencies = [
 
 [[package]]
 name = "chalk-ir"
-version = "0.43.0"
+version = "0.45.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c5218266a5709bc4943de997e64d3fab41c9e9f68efd54a898de53135e987bd3"
+checksum = "a1a5b38ede247def17da87f4badb62396a5753db6048e2011d3089d8b3796c67"
 dependencies = [
+ "bitflags",
  "chalk-derive",
  "lazy_static",
 ]
 
 [[package]]
 name = "chalk-recursive"
-version = "0.43.0"
+version = "0.45.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ed8f34f13fd4f30251f9f6f1dc56f80363201390ecbcac2fdfc8e33036cd9c4a"
+checksum = "7a18db146d7a023edc20ad094e8c2284451f7888719645004979617d1f17c041"
 dependencies = [
  "chalk-derive",
  "chalk-ir",
@@ -203,9 +204,9 @@ dependencies = [
 
 [[package]]
 name = "chalk-solve"
-version = "0.43.0"
+version = "0.45.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "379c9f584488346044709d4c638c38d61a06fe593d4de2ac5f15fd2b0ba4cd9d"
+checksum = "7f73e0de04a0f394e47ed8118e00541bcf681d7c3c2ef500fa743eb4cf3a4850"
 dependencies = [
  "chalk-derive",
  "chalk-ir",
@@ -507,9 +508,9 @@ checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
 
 [[package]]
 name = "heck"
-version = "0.3.1"
+version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205"
+checksum = "87cbf45460356b7deeb5e3415b5563308c0a9b057c85e12b06ad551f98d0a6ac"
 dependencies = [
  "unicode-segmentation",
 ]
@@ -1074,9 +1075,9 @@ dependencies = [
 
 [[package]]
 name = "parking_lot_core"
-version = "0.8.1"
+version = "0.8.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d7c6d9b8427445284a09c55be860a15855ab580a417ccad9da88f5a06787ced0"
+checksum = "9ccb628cad4f84851442432c60ad8e1f607e29752d0bf072cbd0baf28aa34272"
 dependencies = [
  "cfg-if 1.0.0",
  "instant",
@@ -1358,6 +1359,7 @@ dependencies = [
  "rustc-hash",
  "serde",
  "serde_json",
+ "serde_path_to_error",
  "ssr",
  "stdx",
  "syntax",
@@ -1376,9 +1378,9 @@ dependencies = [
 
 [[package]]
 name = "rustc-ap-rustc_lexer"
-version = "691.0.0"
+version = "695.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44bc89d9ca7a78fb82e103b389362c55f03800745f8ba14e068b805cfaf783ec"
+checksum = "390bad134705b0bff02cd9541ac66df751a91c3cc734c3369cd6151ca269caed"
 dependencies = [
  "unicode-xid",
 ]
@@ -1526,6 +1528,15 @@ dependencies = [
  "serde",
 ]
 
+[[package]]
+name = "serde_path_to_error"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42f6109f0506e20f7e0f910e51a0079acf41da8e0694e6442527c4ddf5a2b158"
+dependencies = [
+ "serde",
+]
+
 [[package]]
 name = "serde_repr"
 version = "0.1.6"
index 1d7e5ddd7f6a9aa9710aa36c94fe2391de8abffe..6d17d546acf92ec4d9ac16227ef01d753380cdcc 100644 (file)
@@ -374,8 +374,6 @@ pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) {
         let crate_def_map = db.crate_def_map(self.id.krate);
         crate_def_map.add_diagnostics(db.upcast(), self.id.local_id, sink);
         for decl in self.declarations(db) {
-            decl.diagnostics(db, sink);
-
             match decl {
                 crate::ModuleDef::Function(f) => f.diagnostics(db, sink),
                 crate::ModuleDef::Module(m) => {
@@ -384,7 +382,9 @@ pub fn diagnostics(self, db: &dyn HirDatabase, sink: &mut DiagnosticSink) {
                         m.diagnostics(db, sink)
                     }
                 }
-                _ => (),
+                _ => {
+                    decl.diagnostics(db, sink);
+                }
             }
         }
 
index 289e812fe57b6f21ca3c2b54fe7de0f931df1414..965c1780a74c519e7ee472fb4768d1e41089d8a8 100644 (file)
@@ -17,9 +17,9 @@ ena = "0.14.0"
 log = "0.4.8"
 rustc-hash = "1.1.0"
 scoped-tls = "1"
-chalk-solve = { version = "0.43", default-features = false }
-chalk-ir = "0.43"
-chalk-recursive = "0.43"
+chalk-solve = { version = "0.45", default-features = false }
+chalk-ir = "0.45"
+chalk-recursive = "0.45"
 
 stdx = { path = "../stdx", version = "0.0.0" }
 hir_def = { path = "../hir_def", version = "0.0.0" }
index 69eae6f79c9ec85eacab0f189aa3acf6d6603ef9..2196af677675f2e32fe3b9f939e351a9e0553a99 100644 (file)
@@ -56,8 +56,13 @@ fn trait_datum(&self, trait_id: TraitId) -> Arc<TraitDatum> {
     fn adt_datum(&self, struct_id: AdtId) -> Arc<StructDatum> {
         self.db.struct_datum(self.krate, struct_id)
     }
-    fn adt_repr(&self, _struct_id: AdtId) -> rust_ir::AdtRepr {
-        rust_ir::AdtRepr { repr_c: false, repr_packed: false }
+    fn adt_repr(&self, _struct_id: AdtId) -> Arc<rust_ir::AdtRepr<Interner>> {
+        // FIXME: keep track of these
+        Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None })
+    }
+    fn discriminant_type(&self, _ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> {
+        // FIXME: keep track of this
+        chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32)).intern(&Interner)
     }
     fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> {
         self.db.impl_datum(self.krate, impl_id)
@@ -457,6 +462,7 @@ fn well_known_trait_from_lang_attr(name: &str) -> Option<WellKnownTrait> {
         "fn" => WellKnownTrait::Fn,
         "unsize" => WellKnownTrait::Unsize,
         "coerce_unsized" => WellKnownTrait::CoerceUnsized,
+        "discriminant_kind" => WellKnownTrait::DiscriminantKind,
         _ => return None,
     })
 }
@@ -473,6 +479,7 @@ fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str {
         WellKnownTrait::Unsize => "unsize",
         WellKnownTrait::Unpin => "unpin",
         WellKnownTrait::CoerceUnsized => "coerce_unsized",
+        WellKnownTrait::DiscriminantKind => "discriminant_kind",
     }
 }
 
index 3ad30f0c9e22c627d8cd231f3bfec87fbb021a01..b2714cb6915afe1816f727f974441a82fc9e2cd2 100644 (file)
@@ -895,4 +895,17 @@ pub fn some_fn() -> TestStruct {
 "#,
         );
     }
+
+    #[test]
+    fn test_single_incorrect_case_diagnostic_in_function_name_issue_6970() {
+        let input = r#"fn FOO<|>() {}"#;
+        let expected = r#"fn foo() {}"#;
+
+        let (analysis, file_position) = fixture::position(input);
+        let diagnostics =
+            analysis.diagnostics(&DiagnosticsConfig::default(), file_position.file_id).unwrap();
+        assert_eq!(diagnostics.len(), 1);
+
+        check_fixes(input, expected);
+    }
 }
index dbad9a84f6a0244764fca82e100006acd314bca2..52c7f97759ffb5a1364106209cd3a6152f3aa716 100644 (file)
@@ -535,6 +535,14 @@ pub fn prepare_rename(
         self.with_db(|db| references::rename::prepare_rename(db, position))
     }
 
+    pub fn will_rename_file(
+        &self,
+        file_id: FileId,
+        new_name_stem: &str,
+    ) -> Cancelable<Option<SourceChange>> {
+        self.with_db(|db| references::rename::will_rename_file(db, file_id, new_name_stem))
+    }
+
     pub fn structural_search_replace(
         &self,
         query: &str,
index cd721b7ebcb382442d871037c702e297423fbd9f..15c95f2398cd7bb098d6d23437a1d7c2991767a1 100644 (file)
@@ -6,7 +6,7 @@
 };
 
 use hir::{Module, ModuleDef, ModuleSource, Semantics};
-use ide_db::base_db::{AnchoredPathBuf, FileRange, SourceDatabaseExt};
+use ide_db::base_db::{AnchoredPathBuf, FileId, FileRange, SourceDatabaseExt};
 use ide_db::{
     defs::{Definition, NameClass, NameRefClass},
     RootDatabase,
@@ -110,6 +110,23 @@ pub(crate) fn rename_with_semantics(
     }
 }
 
+pub(crate) fn will_rename_file(
+    db: &RootDatabase,
+    file_id: FileId,
+    new_name_stem: &str,
+) -> Option<SourceChange> {
+    let sema = Semantics::new(db);
+    let module = sema.to_module_def(file_id)?;
+
+    let decl = module.declaration_source(db)?;
+    let range = decl.value.name()?.syntax().text_range();
+
+    let position = FilePosition { file_id: decl.file_id.original_file(db), offset: range.start() };
+    let mut change = rename_mod(&sema, position, module, new_name_stem).ok()?.info;
+    change.file_system_edits.clear();
+    Some(change)
+}
+
 fn find_module_at_offset(
     sema: &Semantics<RootDatabase>,
     position: FilePosition,
index 0b4d3f4ebbe70f60834d1f8d8fb55f698beb82ab..53e70eaf7b508de7d910690ac1277c38b5aa48a0 100644 (file)
@@ -28,6 +28,7 @@ oorandom = "11.1.2"
 rustc-hash = "1.1.0"
 serde = { version = "1.0.106", features = ["derive"] }
 serde_json = { version = "1.0.48", features = ["preserve_order"] }
+serde_path_to_error = "0.1"
 threadpool = "1.7.1"
 rayon = "1.5"
 mimalloc = { version = "0.1.19", default-features = false, optional = true }
index de5eb93b50683a9cda82988db35f6e143b90abb0..80e46bf7f15edc545cbefd2ff76d0f2987086de3 100644 (file)
@@ -5,12 +5,14 @@
 use lsp_types::{
     CallHierarchyServerCapability, ClientCapabilities, CodeActionKind, CodeActionOptions,
     CodeActionProviderCapability, CodeLensOptions, CompletionOptions,
-    DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability, HoverProviderCapability,
-    ImplementationProviderCapability, OneOf, RenameOptions, SaveOptions,
+    DocumentOnTypeFormattingOptions, FileOperationFilter, FileOperationPattern,
+    FileOperationPatternKind, FileOperationRegistrationOptions, FoldingRangeProviderCapability,
+    HoverProviderCapability, ImplementationProviderCapability, OneOf, RenameOptions, SaveOptions,
     SelectionRangeProviderCapability, SemanticTokensFullOptions, SemanticTokensLegend,
     SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability,
     TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability,
-    WorkDoneProgressOptions,
+    WorkDoneProgressOptions, WorkspaceFileOperationsServerCapabilities,
+    WorkspaceServerCapabilities,
 };
 use rustc_hash::FxHashSet;
 use serde_json::json;
@@ -68,7 +70,26 @@ pub fn server_capabilities(client_caps: &ClientCapabilities) -> ServerCapabiliti
         document_link_provider: None,
         color_provider: None,
         execute_command_provider: None,
-        workspace: None,
+        workspace: Some(WorkspaceServerCapabilities {
+            workspace_folders: None,
+            file_operations: Some(WorkspaceFileOperationsServerCapabilities {
+                did_create: None,
+                will_create: None,
+                did_rename: None,
+                will_rename: Some(FileOperationRegistrationOptions {
+                    filters: vec![FileOperationFilter {
+                        scheme: Some(String::from("file")),
+                        pattern: FileOperationPattern {
+                            glob: String::from("**/*.rs"),
+                            matches: Some(FileOperationPatternKind::File),
+                            options: None,
+                        },
+                    }],
+                }),
+                did_delete: None,
+                will_delete: None,
+            }),
+        }),
         call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
         semantic_tokens_provider: Some(
             SemanticTokensOptions {
index 66f8bee991579feea8218dca29cc620d8a67e44b..25692793b9764d7f34254f55eb8b5d485ff99ae0 100644 (file)
@@ -5,11 +5,13 @@
 use std::{
     io::Write as _,
     process::{self, Stdio},
+    sync::Arc,
 };
 
 use ide::{
-    CompletionResolveCapability, FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData,
-    NavigationTarget, Query, RangeInfo, Runnable, RunnableKind, SearchScope, SymbolKind, TextEdit,
+    AssistConfig, CompletionResolveCapability, FileId, FilePosition, FileRange, HoverAction,
+    HoverGotoTypeData, LineIndex, NavigationTarget, Query, RangeInfo, Runnable, RunnableKind,
+    SearchScope, SourceChange, SymbolKind, TextEdit,
 };
 use itertools::Itertools;
 use lsp_server::ErrorCode;
@@ -400,6 +402,45 @@ fn exec_query(snap: &GlobalStateSnapshot, query: Query) -> Result<Vec<SymbolInfo
     }
 }
 
+pub(crate) fn handle_will_rename_files(
+    snap: GlobalStateSnapshot,
+    params: lsp_types::RenameFilesParams,
+) -> Result<Option<lsp_types::WorkspaceEdit>> {
+    let _p = profile::span("handle_will_rename_files");
+
+    let source_changes: Vec<SourceChange> = params
+        .files
+        .into_iter()
+        .filter_map(|file_rename| {
+            let from = Url::parse(&file_rename.old_uri).ok()?;
+            let to = Url::parse(&file_rename.new_uri).ok()?;
+
+            let from_path = from.to_file_path().ok()?;
+            let to_path = to.to_file_path().ok()?;
+
+            // Limit to single-level moves for now.
+            match (from_path.parent(), to_path.parent()) {
+                (Some(p1), Some(p2)) if p1 == p2 => {
+                    let new_name = to_path.file_stem()?;
+                    let new_name = new_name.to_str()?;
+                    Some((snap.url_to_file_id(&from).ok()?, new_name.to_string()))
+                }
+                _ => None,
+            }
+        })
+        .filter_map(|(file_id, new_name)| {
+            snap.analysis.will_rename_file(file_id, &new_name).ok()?
+        })
+        .collect();
+
+    // Drop file system edits since we're just renaming things on the same level
+    let edits = source_changes.into_iter().map(|it| it.source_file_edits).flatten().collect();
+    let source_change = SourceChange::from_edits(edits, Vec::new());
+
+    let workspace_edit = to_proto::workspace_edit(&snap, source_change)?;
+    Ok(Some(workspace_edit))
+}
+
 pub(crate) fn handle_goto_definition(
     snap: GlobalStateSnapshot,
     params: lsp_types::GotoDefinitionParams,
@@ -865,58 +906,8 @@ pub(crate) fn handle_formatting(
     }
 }
 
-fn handle_fixes(
-    snap: &GlobalStateSnapshot,
-    params: &lsp_types::CodeActionParams,
-    res: &mut Vec<lsp_ext::CodeAction>,
-) -> Result<()> {
-    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
-    let line_index = snap.analysis.file_line_index(file_id)?;
-    let range = from_proto::text_range(&line_index, params.range);
-
-    match &params.context.only {
-        Some(v) => {
-            if !v.iter().any(|it| {
-                it == &lsp_types::CodeActionKind::EMPTY
-                    || it == &lsp_types::CodeActionKind::QUICKFIX
-            }) {
-                return Ok(());
-            }
-        }
-        None => {}
-    };
-
-    let diagnostics = snap.analysis.diagnostics(&snap.config.diagnostics, file_id)?;
-
-    for fix in diagnostics
-        .into_iter()
-        .filter_map(|d| d.fix)
-        .filter(|fix| fix.fix_trigger_range.intersect(range).is_some())
-    {
-        let edit = to_proto::snippet_workspace_edit(&snap, fix.source_change)?;
-        let action = lsp_ext::CodeAction {
-            title: fix.label.to_string(),
-            group: None,
-            kind: Some(CodeActionKind::QUICKFIX),
-            edit: Some(edit),
-            is_preferred: Some(false),
-            data: None,
-        };
-        res.push(action);
-    }
-
-    for fix in snap.check_fixes.get(&file_id).into_iter().flatten() {
-        let fix_range = from_proto::text_range(&line_index, fix.range);
-        if fix_range.intersect(range).is_none() {
-            continue;
-        }
-        res.push(fix.action.clone());
-    }
-    Ok(())
-}
-
 pub(crate) fn handle_code_action(
-    mut snap: GlobalStateSnapshot,
+    snap: GlobalStateSnapshot,
     params: lsp_types::CodeActionParams,
 ) -> Result<Option<Vec<lsp_ext::CodeAction>>> {
     let _p = profile::span("handle_code_action");
@@ -932,24 +923,35 @@ pub(crate) fn handle_code_action(
     let range = from_proto::text_range(&line_index, params.range);
     let frange = FileRange { file_id, range };
 
-    snap.config.assist.allowed = params
-        .clone()
-        .context
-        .only
-        .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
+    let assists_config = AssistConfig {
+        allowed: params
+            .clone()
+            .context
+            .only
+            .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()),
+        ..snap.config.assist
+    };
 
     let mut res: Vec<lsp_ext::CodeAction> = Vec::new();
 
-    handle_fixes(&snap, &params, &mut res)?;
+    let include_quick_fixes = match &params.context.only {
+        Some(v) => v.iter().any(|it| {
+            it == &lsp_types::CodeActionKind::EMPTY || it == &lsp_types::CodeActionKind::QUICKFIX
+        }),
+        None => true,
+    };
+    if include_quick_fixes {
+        add_quick_fixes(&snap, frange, &line_index, &mut res)?;
+    }
 
     if snap.config.client_caps.code_action_resolve {
         for (index, assist) in
-            snap.analysis.unresolved_assists(&snap.config.assist, frange)?.into_iter().enumerate()
+            snap.analysis.unresolved_assists(&assists_config, frange)?.into_iter().enumerate()
         {
             res.push(to_proto::unresolved_code_action(&snap, params.clone(), assist, index)?);
         }
     } else {
-        for assist in snap.analysis.resolved_assists(&snap.config.assist, frange)?.into_iter() {
+        for assist in snap.analysis.resolved_assists(&assists_config, frange)?.into_iter() {
             res.push(to_proto::resolved_code_action(&snap, assist)?);
         }
     }
@@ -957,6 +959,40 @@ pub(crate) fn handle_code_action(
     Ok(Some(res))
 }
 
+fn add_quick_fixes(
+    snap: &GlobalStateSnapshot,
+    frange: FileRange,
+    line_index: &Arc<LineIndex>,
+    acc: &mut Vec<lsp_ext::CodeAction>,
+) -> Result<()> {
+    let diagnostics = snap.analysis.diagnostics(&snap.config.diagnostics, frange.file_id)?;
+
+    for fix in diagnostics
+        .into_iter()
+        .filter_map(|d| d.fix)
+        .filter(|fix| fix.fix_trigger_range.intersect(frange.range).is_some())
+    {
+        let edit = to_proto::snippet_workspace_edit(&snap, fix.source_change)?;
+        let action = lsp_ext::CodeAction {
+            title: fix.label.to_string(),
+            group: None,
+            kind: Some(CodeActionKind::QUICKFIX),
+            edit: Some(edit),
+            is_preferred: Some(false),
+            data: None,
+        };
+        acc.push(action);
+    }
+
+    for fix in snap.check_fixes.get(&frange.file_id).into_iter().flatten() {
+        let fix_range = from_proto::text_range(&line_index, fix.range);
+        if fix_range.intersect(frange.range).is_some() {
+            acc.push(fix.action.clone());
+        }
+    }
+    Ok(())
+}
+
 pub(crate) fn handle_code_action_resolve(
     mut snap: GlobalStateSnapshot,
     mut code_action: lsp_ext::CodeAction,
index 79fe30e5301ee4e80991e8546163e25025d95e0c..d538ad69a1243fcd7151ccab6aaca503f47fd096 100644 (file)
@@ -46,7 +46,7 @@ macro_rules! eprintln {
 pub type Result<T, E = Error> = std::result::Result<T, E>;
 
 pub fn from_json<T: DeserializeOwned>(what: &'static str, json: serde_json::Value) -> Result<T> {
-    let res = T::deserialize(&json)
+    let res = serde_path_to_error::deserialize(&json)
         .map_err(|e| format!("Failed to deserialize {}: {}; {}", what, e, json))?;
     Ok(res)
 }
index ec3d5e0600ffdab656cbe66861d26894c4eb7222..5d55dc96e2a18e687d6144a579595b4e2c7bbfc6 100644 (file)
@@ -485,6 +485,7 @@ fn on_request(&mut self, request_received: Instant, req: Request) -> Result<()>
             .on::<lsp_types::request::SemanticTokensRangeRequest>(
                 handlers::handle_semantic_tokens_range,
             )
+            .on::<lsp_types::request::WillRenameFiles>(handlers::handle_will_rename_files)
             .on::<lsp_ext::Ssr>(handlers::handle_ssr)
             .finish();
         Ok(())
index c6a6f11e13375f1262cae62aef6a2fff64e5846c..21015591c79e44f4f3b410a6b19661fe552ea8b4 100644 (file)
@@ -13,7 +13,7 @@ doctest = false
 [dependencies]
 itertools = "0.9.0"
 rowan = "0.10.0"
-rustc_lexer = { version = "691.0.0", package = "rustc-ap-rustc_lexer" }
+rustc_lexer = { version = "695.0.0", package = "rustc-ap-rustc_lexer" }
 rustc-hash = "1.1.0"
 arrayvec = "0.5.1"
 once_cell = "1.3.1"