Before submitting, please make sure that you're not running into one of these known issues:
1. extension doesn't load in VSCodium: #11080
- 2. VS Code Marketplace serves old stable version: #11098
- 3. on-the-fly diagnostics are mostly unimplemented (`cargo check` diagnostics will be shown when saving a file): #3107
+ 2. on-the-fly diagnostics are mostly unimplemented (`cargo check` diagnostics will be shown when saving a file): #3107
Otherwise please try to provide information which will help us to fix the issue faster. Minimal reproducible examples with few dependencies are especially lovely <3.
-->
**rust-analyzer version**: (eg. output of "Rust Analyzer: Show RA Version" command)
**rustc version**: (eg. output of `rustc -V`)
+
+**relevant settings**: (eg. client settings, or environment variables like `CARGO`, `RUSTUP_HOME` or `CARGO_HOME`)
[[package]]
name = "arbitrary"
-version = "1.0.3"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "510c76ecefdceada737ea728f4f9a84bd2e1ef29f1ba555e560940fe279954de"
+checksum = "c38b6b6b79f671c25e1a3e785b7b82d7562ffc9cd3efdc98627e5668a2472490"
[[package]]
name = "arrayvec"
[[package]]
name = "derive_arbitrary"
-version = "1.0.2"
+version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b24629208e87a2d8b396ff43b15c4afb0a69cea3fbbaa9ed9b92b7c02f0aed73"
+checksum = "98e23c06c035dac87bd802d98f368df73a7f2cb05a66ffbd1f377e821fac4af9"
dependencies = [
"proc-macro2",
"quote",
"cfg",
"cov-mark",
"either",
+ "expect-test",
"hashbrown 0.12.0",
"itertools",
"la-arena",
[[package]]
name = "memmap2"
-version = "0.5.2"
+version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fe3179b85e1fd8b14447cbebadb75e45a1002f541b925f0bfec366d56a81c56d"
+checksum = "057a3db23999c867821a7a59feb06a578fcb03685e983dff90daf9e7d24ac08f"
dependencies = [
"libc",
]
[[package]]
name = "ntapi"
-version = "0.3.6"
+version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44"
+checksum = "c28774a7fd2fbb4f0babd8237ce554b73af68021b5f695a3cebd6c59bac0980f"
dependencies = [
"winapi",
]
[[package]]
name = "serde_json"
-version = "1.0.78"
+version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d23c1ba4cf0efd44be32017709280b32d1cea5c3f1275c3b6d9e8bc54f758085"
+checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95"
dependencies = [
"indexmap",
"itoa",
[[package]]
name = "ungrammar"
-version = "1.14.9"
+version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "66be59c2fd880e3d76d1a6cf6d34114008f1d8af2748d4ad9d39ea712f14fda9"
+checksum = "ed01567101450f7d600508e7680df6005ae4fe97119d79b0364cc5910ff39732"
[[package]]
name = "unicase"
db
}
+ fn with_files_extra_proc_macros(
+ ra_fixture: &str,
+ proc_macros: Vec<(String, ProcMacro)>,
+ ) -> Self {
+ let fixture = ChangeFixture::parse_with_proc_macros(ra_fixture, proc_macros);
+ let mut db = Self::default();
+ fixture.change.apply(&mut db);
+ assert!(fixture.file_position.is_none());
+ db
+ }
+
fn with_position(ra_fixture: &str) -> (Self, FilePosition) {
let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture);
let offset = range_or_offset.expect_offset();
impl ChangeFixture {
pub fn parse(ra_fixture: &str) -> ChangeFixture {
- let (mini_core, proc_macros, fixture) = Fixture::parse(ra_fixture);
+ Self::parse_with_proc_macros(ra_fixture, Vec::new())
+ }
+
+ pub fn parse_with_proc_macros(
+ ra_fixture: &str,
+ mut proc_macros: Vec<(String, ProcMacro)>,
+ ) -> ChangeFixture {
+ let (mini_core, proc_macro_names, fixture) = Fixture::parse(ra_fixture);
let mut change = Change::new();
let mut files = Vec::new();
}
}
- if !proc_macros.is_empty() {
+ if !proc_macro_names.is_empty() {
let proc_lib_file = file_id;
file_id.0 += 1;
- let (proc_macro, source) = test_proc_macros(&proc_macros);
+ proc_macros.extend(default_test_proc_macros());
+ let (proc_macro, source) = filter_test_proc_macros(&proc_macro_names, proc_macros);
let mut fs = FileSet::default();
fs.insert(
proc_lib_file,
}
}
-fn test_proc_macros(proc_macros: &[String]) -> (Vec<ProcMacro>, String) {
- // The source here is only required so that paths to the macros exist and are resolvable.
- let source = r#"
+fn default_test_proc_macros() -> [(String, ProcMacro); 4] {
+ [
+ (
+ r#"
#[proc_macro_attribute]
pub fn identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
item
}
+"#
+ .into(),
+ ProcMacro {
+ name: "identity".into(),
+ kind: crate::ProcMacroKind::Attr,
+ expander: Arc::new(IdentityProcMacroExpander),
+ },
+ ),
+ (
+ r#"
#[proc_macro_derive(DeriveIdentity)]
pub fn derive_identity(item: TokenStream) -> TokenStream {
item
}
+"#
+ .into(),
+ ProcMacro {
+ name: "DeriveIdentity".into(),
+ kind: crate::ProcMacroKind::CustomDerive,
+ expander: Arc::new(IdentityProcMacroExpander),
+ },
+ ),
+ (
+ r#"
#[proc_macro_attribute]
pub fn input_replace(attr: TokenStream, _item: TokenStream) -> TokenStream {
attr
}
+"#
+ .into(),
+ ProcMacro {
+ name: "input_replace".into(),
+ kind: crate::ProcMacroKind::Attr,
+ expander: Arc::new(AttributeInputReplaceProcMacroExpander),
+ },
+ ),
+ (
+ r#"
#[proc_macro]
pub fn mirror(input: TokenStream) -> TokenStream {
input
}
-"#;
- let proc_macros = [
- ProcMacro {
- name: "identity".into(),
- kind: crate::ProcMacroKind::Attr,
- expander: Arc::new(IdentityProcMacroExpander),
- },
- ProcMacro {
- name: "DeriveIdentity".into(),
- kind: crate::ProcMacroKind::CustomDerive,
- expander: Arc::new(IdentityProcMacroExpander),
- },
- ProcMacro {
- name: "input_replace".into(),
- kind: crate::ProcMacroKind::Attr,
- expander: Arc::new(AttributeInputReplaceProcMacroExpander),
- },
- ProcMacro {
- name: "mirror".into(),
- kind: crate::ProcMacroKind::FuncLike,
- expander: Arc::new(MirrorProcMacroExpander),
- },
+"#
+ .into(),
+ ProcMacro {
+ name: "mirror".into(),
+ kind: crate::ProcMacroKind::FuncLike,
+ expander: Arc::new(MirrorProcMacroExpander),
+ },
+ ),
]
- .into_iter()
- .filter(|pm| proc_macros.iter().any(|name| name == &stdx::to_lower_snake_case(&pm.name)))
- .collect();
- (proc_macros, source.into())
+}
+
+fn filter_test_proc_macros(
+ proc_macro_names: &[String],
+ proc_macro_defs: Vec<(String, ProcMacro)>,
+) -> (Vec<ProcMacro>, String) {
+ // The source here is only required so that paths to the macros exist and are resolvable.
+ let mut source = String::new();
+ let mut proc_macros = Vec::new();
+
+ for (c, p) in proc_macro_defs {
+ if !proc_macro_names.iter().any(|name| name == &stdx::to_lower_snake_case(&p.name)) {
+ continue;
+ }
+ proc_macros.push(p);
+ source += &c;
+ }
+
+ (proc_macros, source)
}
#[derive(Debug, Clone, Copy)]
use stdx::{format_to, impl_from};
use syntax::{
ast::{self, HasAttrs as _, HasDocComments, HasName},
- AstNode, AstPtr, SmolStr, SyntaxKind, SyntaxNodePtr,
+ AstNode, AstPtr, SmolStr, SyntaxNodePtr, T,
};
use tt::{Ident, Leaf, Literal, TokenTree};
DefDiagnosticKind::UnresolvedProcMacro { ast } => {
let mut precise_location = None;
- let (node, name) = match ast {
+ let (node, macro_name) = match ast {
MacroCallKind::FnLike { ast_id, .. } => {
let node = ast_id.to_node(db.upcast());
(ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))), None)
}
- MacroCallKind::Derive { ast_id, derive_name, .. } => {
+ MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => {
let node = ast_id.to_node(db.upcast());
// Compute the precise location of the macro name's token in the derive
// list.
- // FIXME: This does not handle paths to the macro, but neither does the
- // rest of r-a.
- let derive_attrs =
- node.attrs().filter_map(|attr| match attr.as_simple_call() {
- Some((name, args)) if name == "derive" => Some(args),
- _ => None,
- });
- 'outer: for attr in derive_attrs {
- let tokens =
- attr.syntax().children_with_tokens().filter_map(|elem| match elem {
- syntax::NodeOrToken::Node(_) => None,
+ let token = (|| {
+ let derive_attr = node.attrs().nth(*derive_attr_index as usize)?;
+ derive_attr
+ .syntax()
+ .children_with_tokens()
+ .filter_map(|elem| match elem {
syntax::NodeOrToken::Token(tok) => Some(tok),
- });
- for token in tokens {
- if token.kind() == SyntaxKind::IDENT && token.text() == &**derive_name {
- precise_location = Some(token.text_range());
- break 'outer;
- }
- }
- }
-
+ _ => None,
+ })
+ .group_by(|t| t.kind() == T![,])
+ .into_iter()
+ .filter(|&(comma, _)| !comma)
+ .nth(*derive_index as usize)
+ .and_then(|(_, mut g)| g.find(|t| t.kind() == T![ident]))
+ })();
+ precise_location = token.as_ref().map(|tok| tok.text_range());
(
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
- Some(derive_name.clone()),
+ token.as_ref().map(ToString::to_string),
)
}
- MacroCallKind::Attr { ast_id, invoc_attr_index, attr_name, .. } => {
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let node = ast_id.to_node(db.upcast());
let attr = node
.doc_comments_and_attrs()
.unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index));
(
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
- Some(attr_name.clone()),
+ attr.path()
+ .and_then(|path| path.segment())
+ .and_then(|seg| seg.name_ref())
+ .as_ref()
+ .map(ToString::to_string),
)
}
};
- acc.push(
- UnresolvedProcMacro { node, precise_location, macro_name: name.map(Into::into) }
- .into(),
- );
+ acc.push(UnresolvedProcMacro { node, precise_location, macro_name }.into());
}
DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
db.function_data(self.id).has_body()
}
+ pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option<MacroDef> {
+ let function_data = db.function_data(self.id);
+ let attrs = &function_data.attrs;
+ if !(attrs.is_proc_macro()
+ || attrs.is_proc_macro_attribute()
+ || attrs.is_proc_macro_derive())
+ {
+ return None;
+ }
+ let loc = self.id.lookup(db.upcast());
+ let krate = loc.krate(db);
+ let def_map = db.crate_def_map(krate.into());
+ let name = &function_data.name;
+ let mut exported_proc_macros = def_map.exported_proc_macros();
+ exported_proc_macros.find(|(_, mac_name)| mac_name == name).map(|(id, _)| MacroDef { id })
+ }
+
/// A textual representation of the HIR of this function for debugging purposes.
pub fn debug_hir(self, db: &dyn HirDatabase) -> String {
let body = db.body(self.id.into());
}
}
+ pub fn is_builtin_derive(&self) -> bool {
+ matches!(self.id.kind, MacroDefKind::BuiltInAttr(exp, _) if exp.is_derive())
+ }
+
pub fn is_attr(&self) -> bool {
matches!(self.kind(), MacroKind::Attr)
}
pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
let src = self.source(db)?;
- let item = src.file_id.is_builtin_derive(db.upcast())?;
- let hygenic = hir_expand::hygiene::Hygiene::new(db.upcast(), item.file_id);
-
- // FIXME: handle `cfg_attr`
- let attr = item
- .value
- .attrs()
- .filter_map(|it| {
- let path = ModPath::from_src(db.upcast(), it.path()?, &hygenic)?;
- if path.as_ident()?.to_smol_str() == "derive" {
- Some(it)
- } else {
- None
- }
- })
- .last()?;
-
- Some(item.with_value(attr))
+ src.file_id.is_builtin_derive(db.upcast())
}
}
use std::{cell::RefCell, fmt, iter};
use base_db::{FileId, FileRange};
-use either::Either;
use hir_def::{
body,
resolver::{self, HasResolver, Resolver, TypeNs},
use syntax::{
algo::skip_trivia_token,
ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
- match_ast, AstNode, AstToken, Direction, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken,
- TextSize, T,
+ match_ast, AstNode, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
};
use crate::{
db::HirDatabase,
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{resolve_hir_path, SourceAnalyzer},
- Access, AssocItem, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasAttrs as _,
- HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef,
- Name, Path, ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
+ Access, AssocItem, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasSource,
+ HirFileId, Impl, InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path,
+ ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
};
#[derive(Debug, Clone, PartialEq, Eq)]
self.imp.is_attr_macro_call(item)
}
+ pub fn is_derive_annotated(&self, item: &ast::Adt) -> bool {
+ self.imp.is_derive_annotated(item)
+ }
+
pub fn speculative_expand(
&self,
actual_macro_call: &ast::MacroCall,
self.imp.resolve_bind_pat_to_const(pat)
}
- pub fn resolve_derive_ident(
- &self,
- derive: &ast::Attr,
- ident: &ast::Ident,
- ) -> Option<PathResolution> {
- self.imp.resolve_derive_ident(derive, ident)
- }
-
pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
self.imp.record_literal_missing_fields(literal)
}
let adt = InFile::new(file_id, &adt);
let src = InFile::new(file_id, attr.clone());
self.with_ctx(|ctx| {
- let (_, res) = ctx.attr_to_derive_macro_call(adt, src)?;
+ let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?;
Some(res.to_vec())
})
}
+ fn is_derive_annotated(&self, adt: &ast::Adt) -> bool {
+ let file_id = self.find_file(adt.syntax()).file_id;
+ let adt = InFile::new(file_id, adt);
+ self.with_ctx(|ctx| ctx.has_derives(adt))
+ }
+
fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
let file_id = self.find_file(item.syntax()).file_id;
let src = InFile::new(file_id, item.clone());
// FIXME replace map.while_some with take_while once stable
token.value.ancestors().map(ast::TokenTree::cast).while_some().last()
{
- let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
+ let parent = tt.syntax().parent()?;
+ // check for derive attribute here
+ let macro_call = match_ast! {
+ match parent {
+ ast::MacroCall(mcall) => mcall,
+ // attribute we failed expansion for earlier, this might be a derive invocation
+ // so try downmapping the token into the pseudo derive expansion
+ // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
+ ast::Meta(meta) => {
+ let attr = meta.parent_attr()?;
+ let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
+ let call_id = self.with_ctx(|ctx| {
+ let (_, call_id, _) = ctx.attr_to_derive_macro_call(
+ token.with_value(&adt),
+ token.with_value(attr),
+ )?;
+ Some(call_id)
+ })?;
+ let file_id = call_id.as_file();
+ return process_expansion_for_token(
+ &mut stack,
+ file_id,
+ Some(adt.into()),
+ token.as_ref(),
+ );
+ },
+ _ => return None,
+ }
+ };
+
if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
return None;
}
self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat)
}
- fn resolve_derive_ident(
- &self,
- derive: &ast::Attr,
- ident: &ast::Ident,
- ) -> Option<PathResolution> {
- debug_assert!(ident.syntax().parent().and_then(ast::TokenTree::cast).is_some());
- debug_assert!(ident.syntax().ancestors().any(|anc| anc == *derive.syntax()));
- // derive macros are always at depth 2, tokentree -> meta -> attribute
- let syntax = ident.syntax();
-
- let tt = derive.token_tree()?;
- let file = self.find_file(derive.syntax());
- let adt = derive.syntax().parent().and_then(ast::Adt::cast)?;
- let adt_def = ToDef::to_def(self, file.with_value(adt.clone()))?;
- let res = self.with_ctx(|ctx| {
- let (attr_id, derives) = ctx.attr_to_derive_macro_call(
- file.with_value(&adt),
- file.with_value(derive.clone()),
- )?;
- let attrs = adt_def.attrs(self.db);
- let mut derive_paths = attrs.get(attr_id)?.parse_path_comma_token_tree()?;
-
- let derive_idx = tt
- .syntax()
- .children_with_tokens()
- .filter_map(SyntaxElement::into_token)
- .take_while(|tok| tok != syntax)
- .filter(|t| t.kind() == T![,])
- .count();
- let path_segment_idx = syntax
- .siblings_with_tokens(Direction::Prev)
- .filter_map(SyntaxElement::into_token)
- .take_while(|tok| matches!(tok.kind(), T![:] | T![ident]))
- .filter(|tok| tok.kind() == T![ident])
- .count();
-
- let mut mod_path = derive_paths.nth(derive_idx)?;
-
- if path_segment_idx < mod_path.len() {
- // the path for the given ident is a qualifier, resolve to module if possible
- while path_segment_idx < mod_path.len() {
- mod_path.pop_segment();
- }
- Some(Either::Left(mod_path))
- } else {
- // otherwise fetch the derive
- Some(Either::Right(derives[derive_idx]))
- }
- })?;
-
- match res {
- Either::Left(path) => {
- let len = path.len();
- resolve_hir_path(
- self.db,
- &self.scope(derive.syntax()).resolver,
- &Path::from_known_path(path, vec![None; len]),
- )
- .filter(|res| matches!(res, PathResolution::Def(ModuleDef::Module(_))))
- }
- Either::Right(derive) => derive
- .map(|call| MacroDef { id: self.db.lookup_intern_macro_call(call).def })
- .map(PathResolution::Macro),
- }
- }
-
fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
self.analyze(literal.syntax())
.record_literal_missing_fields(self.db, literal)
&mut self,
item: InFile<&ast::Adt>,
src: InFile<ast::Attr>,
- ) -> Option<(AttrId, &[Option<MacroCallId>])> {
+ ) -> Option<(AttrId, MacroCallId, &[Option<MacroCallId>])> {
let map = self.dyn_map(item)?;
- map[keys::DERIVE_MACRO_CALL].get(&src.value).map(|(id, ids)| (*id, &**ids))
+ map[keys::DERIVE_MACRO_CALL]
+ .get(&src.value)
+ .map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids))
+ }
+ pub(super) fn has_derives(&mut self, adt: InFile<&ast::Adt>) -> bool {
+ self.dyn_map(adt).as_ref().map_or(false, |map| !map[keys::DERIVE_MACRO_CALL].is_empty())
}
fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>(
return builtin.map(PathResolution::BuiltinAttr);
}
return match resolve_hir_path_as_macro(db, &self.resolver, &hir_path) {
- res @ Some(m) if m.is_attr() => res.map(PathResolution::Macro),
+ Some(m) => Some(PathResolution::Macro(m)),
// this labels any path that starts with a tool module as the tool itself, this is technically wrong
// but there is no benefit in differentiating these two cases for the time being
- _ => path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
+ None => path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
match self.resolver.krate() {
Some(krate) => ToolModule::by_name(db, krate.into(), &name_ref.text()),
None => ToolModule::builtin(&name_ref.text()),
pub fn by_key(&self, key: &'static str) -> AttrQuery<'_> {
AttrQuery { attrs: self, key }
}
+}
+impl Attrs {
pub fn cfg(&self) -> Option<CfgExpr> {
let mut cfgs = self.by_key("cfg").tt_values().map(CfgExpr::parse).collect::<Vec<_>>();
match cfgs.len() {
matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "hidden")
})
}
+
+ pub fn is_proc_macro(&self) -> bool {
+ self.by_key("proc_macro").exists()
+ }
+
+ pub fn is_proc_macro_attribute(&self) -> bool {
+ self.by_key("proc_macro_attribute").exists()
+ }
+
+ pub fn is_proc_macro_derive(&self) -> bool {
+ self.by_key("proc_macro_derive").exists()
+ }
}
impl AttrsWithOwner {
) -> Result<ExpandResult<Option<(Mark, T)>>, UnresolvedMacro> {
if self.recursion_limit(db).check(self.recursion_limit + 1).is_err() {
cov_mark::hit!(your_stack_belongs_to_me);
- return Ok(ExpandResult::str_err(
+ return Ok(ExpandResult::only_err(ExpandError::Other(
"reached recursion limit during macro expansion".into(),
- ));
+ )));
}
let macro_call = InFile::new(self.current_file_id, ¯o_call);
}
return ExpandResult::only_err(err.unwrap_or_else(|| {
- mbe::ExpandError::Other("failed to parse macro invocation".into())
+ ExpandError::Other("failed to parse macro invocation".into())
}));
}
};
db::DefDatabase,
expr::{
dummy_expr_id, Array, BindingAnnotation, Expr, ExprId, Label, LabelId, Literal, MatchArm,
- MatchGuard, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
+ Pat, PatId, RecordFieldPat, RecordLitField, Statement,
},
intern::Interned,
item_scope::BuiltinShadowMode,
fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId {
self.make_expr(expr, Err(SyntheticSyntax))
}
- fn unit(&mut self) -> ExprId {
- self.alloc_expr_desugared(Expr::Tuple { exprs: Box::default() })
- }
fn missing_expr(&mut self) -> ExprId {
self.alloc_expr_desugared(Expr::Missing)
}
}
});
- let condition = match e.condition() {
- None => self.missing_expr(),
- Some(condition) => match condition.pat() {
- None => self.collect_expr_opt(condition.expr()),
- // if let -- desugar to match
- Some(pat) => {
- let pat = self.collect_pat(pat);
- let match_expr = self.collect_expr_opt(condition.expr());
- let placeholder_pat = self.missing_pat();
- let arms = vec![
- MatchArm { pat, expr: then_branch, guard: None },
- MatchArm {
- pat: placeholder_pat,
- expr: else_branch.unwrap_or_else(|| self.unit()),
- guard: None,
- },
- ]
- .into();
- return Some(
- self.alloc_expr(Expr::Match { expr: match_expr, arms }, syntax_ptr),
- );
- }
- },
- };
+ let condition = self.collect_expr_opt(e.condition());
self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr)
}
+ ast::Expr::LetExpr(e) => {
+ let pat = self.collect_pat_opt(e.pat());
+ let expr = self.collect_expr_opt(e.expr());
+ self.alloc_expr(Expr::Let { pat, expr }, syntax_ptr)
+ }
ast::Expr::BlockExpr(e) => match e.modifier() {
Some(ast::BlockModifier::Try(_)) => {
let body = self.collect_block(e);
let label = e.label().map(|label| self.collect_label(label));
let body = self.collect_block_opt(e.loop_body());
- let condition = match e.condition() {
- None => self.missing_expr(),
- Some(condition) => match condition.pat() {
- None => self.collect_expr_opt(condition.expr()),
- // if let -- desugar to match
- Some(pat) => {
- cov_mark::hit!(infer_resolve_while_let);
- let pat = self.collect_pat(pat);
- let match_expr = self.collect_expr_opt(condition.expr());
- let placeholder_pat = self.missing_pat();
- let break_ =
- self.alloc_expr_desugared(Expr::Break { expr: None, label: None });
- let arms = vec![
- MatchArm { pat, expr: body, guard: None },
- MatchArm { pat: placeholder_pat, expr: break_, guard: None },
- ]
- .into();
- let match_expr =
- self.alloc_expr_desugared(Expr::Match { expr: match_expr, arms });
- return Some(
- self.alloc_expr(Expr::Loop { body: match_expr, label }, syntax_ptr),
- );
- }
- },
- };
+ let condition = self.collect_expr_opt(e.condition());
self.alloc_expr(Expr::While { condition, body, label }, syntax_ptr)
}
self.check_cfg(&arm).map(|()| MatchArm {
pat: self.collect_pat_opt(arm.pat()),
expr: self.collect_expr_opt(arm.expr()),
- guard: arm.guard().map(|guard| match guard.pat() {
- Some(pat) => MatchGuard::IfLet {
- pat: self.collect_pat(pat),
- expr: self.collect_expr_opt(guard.expr()),
- },
- None => {
- MatchGuard::If { expr: self.collect_expr_opt(guard.expr()) }
- }
- }),
+ guard: arm
+ .guard()
+ .map(|guard| self.collect_expr_opt(guard.condition())),
})
})
.collect()
use crate::{
body::Body,
db::DefDatabase,
- expr::{Expr, ExprId, LabelId, MatchGuard, Pat, PatId, Statement},
+ expr::{Expr, ExprId, LabelId, Pat, PatId, Statement},
BlockId, DefWithBodyId,
};
fn new(body: &Body) -> ExprScopes {
let mut scopes =
ExprScopes { scopes: Arena::default(), scope_by_expr: FxHashMap::default() };
- let root = scopes.root_scope();
+ let mut root = scopes.root_scope();
scopes.add_params_bindings(body, root, &body.params);
- compute_expr_scopes(body.body_expr, body, &mut scopes, root);
+ compute_expr_scopes(body.body_expr, body, &mut scopes, &mut root);
scopes
}
match stmt {
Statement::Let { pat, initializer, else_branch, .. } => {
if let Some(expr) = initializer {
- compute_expr_scopes(*expr, body, scopes, scope);
+ compute_expr_scopes(*expr, body, scopes, &mut scope);
}
if let Some(expr) = else_branch {
- compute_expr_scopes(*expr, body, scopes, scope);
+ compute_expr_scopes(*expr, body, scopes, &mut scope);
}
scope = scopes.new_scope(scope);
scopes.add_bindings(body, scope, *pat);
}
Statement::Expr { expr, .. } => {
- compute_expr_scopes(*expr, body, scopes, scope);
+ compute_expr_scopes(*expr, body, scopes, &mut scope);
}
}
}
if let Some(expr) = tail {
- compute_expr_scopes(expr, body, scopes, scope);
+ compute_expr_scopes(expr, body, scopes, &mut scope);
}
}
-fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope: ScopeId) {
+fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope: &mut ScopeId) {
let make_label =
|label: &Option<LabelId>| label.map(|label| (label, body.labels[label].name.clone()));
- scopes.set_scope(expr, scope);
+ scopes.set_scope(expr, *scope);
match &body[expr] {
Expr::Block { statements, tail, id, label } => {
- let scope = scopes.new_block_scope(scope, *id, make_label(label));
+ let scope = scopes.new_block_scope(*scope, *id, make_label(label));
// Overwrite the old scope for the block expr, so that every block scope can be found
// via the block itself (important for blocks that only contain items, no expressions).
scopes.set_scope(expr, scope);
}
Expr::For { iterable, pat, body: body_expr, label } => {
compute_expr_scopes(*iterable, body, scopes, scope);
- let scope = scopes.new_labeled_scope(scope, make_label(label));
+ let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
scopes.add_bindings(body, scope, *pat);
- compute_expr_scopes(*body_expr, body, scopes, scope);
+ compute_expr_scopes(*body_expr, body, scopes, &mut scope);
}
Expr::While { condition, body: body_expr, label } => {
- let scope = scopes.new_labeled_scope(scope, make_label(label));
- compute_expr_scopes(*condition, body, scopes, scope);
- compute_expr_scopes(*body_expr, body, scopes, scope);
+ let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
+ compute_expr_scopes(*condition, body, scopes, &mut scope);
+ compute_expr_scopes(*body_expr, body, scopes, &mut scope);
}
Expr::Loop { body: body_expr, label } => {
- let scope = scopes.new_labeled_scope(scope, make_label(label));
- compute_expr_scopes(*body_expr, body, scopes, scope);
+ let mut scope = scopes.new_labeled_scope(*scope, make_label(label));
+ compute_expr_scopes(*body_expr, body, scopes, &mut scope);
}
Expr::Lambda { args, body: body_expr, .. } => {
- let scope = scopes.new_scope(scope);
+ let mut scope = scopes.new_scope(*scope);
scopes.add_params_bindings(body, scope, args);
- compute_expr_scopes(*body_expr, body, scopes, scope);
+ compute_expr_scopes(*body_expr, body, scopes, &mut scope);
}
Expr::Match { expr, arms } => {
compute_expr_scopes(*expr, body, scopes, scope);
for arm in arms.iter() {
- let mut scope = scopes.new_scope(scope);
+ let mut scope = scopes.new_scope(*scope);
scopes.add_bindings(body, scope, arm.pat);
- match arm.guard {
- Some(MatchGuard::If { expr: guard }) => {
- scopes.set_scope(guard, scope);
- compute_expr_scopes(guard, body, scopes, scope);
- }
- Some(MatchGuard::IfLet { pat, expr: guard }) => {
- scopes.set_scope(guard, scope);
- compute_expr_scopes(guard, body, scopes, scope);
- scope = scopes.new_scope(scope);
- scopes.add_bindings(body, scope, pat);
- }
- _ => {}
- };
- scopes.set_scope(arm.expr, scope);
- compute_expr_scopes(arm.expr, body, scopes, scope);
+ if let Some(guard) = arm.guard {
+ scope = scopes.new_scope(scope);
+ compute_expr_scopes(guard, body, scopes, &mut scope);
+ }
+ compute_expr_scopes(arm.expr, body, scopes, &mut scope);
+ }
+ }
+ &Expr::If { condition, then_branch, else_branch } => {
+ let mut then_branch_scope = scopes.new_scope(*scope);
+ compute_expr_scopes(condition, body, scopes, &mut then_branch_scope);
+ compute_expr_scopes(then_branch, body, scopes, &mut then_branch_scope);
+ if let Some(else_branch) = else_branch {
+ compute_expr_scopes(else_branch, body, scopes, scope);
}
}
+ &Expr::Let { pat, expr } => {
+ compute_expr_scopes(expr, body, scopes, scope);
+ *scope = scopes.new_scope(*scope);
+ scopes.add_bindings(body, *scope, pat);
+ }
e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)),
};
}
}
#[test]
- fn while_let_desugaring() {
- cov_mark::check!(infer_resolve_while_let);
+ fn while_let_adds_binding() {
do_check_local_name(
r#"
fn test() {
"#,
75,
);
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<f32> = None;
+ while (((let Option::Some(_) = foo))) && let Option::Some(spam) = foo {
+ spam$0
+ }
+}
+"#,
+ 107,
+ );
+ }
+
+ #[test]
+ fn match_guard_if_let() {
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<f32> = None;
+ match foo {
+ _ if let Option::Some(spam) = foo => spam$0,
+ }
+}
+"#,
+ 93,
+ );
+ }
+
+ #[test]
+ fn let_chains_can_reference_previous_lets() {
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<i32> = None;
+ if let Some(spam) = foo && spa$0m > 1 && let Some(spam) = foo && spam > 1 {}
+}
+"#,
+ 61,
+ );
+ do_check_local_name(
+ r#"
+fn test() {
+ let foo: Option<i32> = None;
+ if let Some(spam) = foo && spam > 1 && let Some(spam) = foo && sp$0am > 1 {}
+}
+"#,
+ 100,
+ );
}
}
self.derive_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
|(ast_id, calls)| {
let adt = ast_id.to_node(db.upcast());
- calls.for_each(|(attr_id, calls)| {
+ calls.for_each(|(attr_id, call_id, calls)| {
if let Some(Either::Left(attr)) =
adt.doc_comments_and_attrs().nth(attr_id.ast_index as usize)
{
- res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, calls.into()));
+ res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into()));
}
});
},
fn insert(map: &mut DynMap, key: Self::K, value: Self::V);
fn get<'a>(map: &'a DynMap, key: &Self::K) -> Option<&'a Self::V>;
+ fn is_empty(map: &DynMap) -> bool;
}
impl<K: Hash + Eq + 'static, V: 'static> Policy for (K, V) {
fn get<'a>(map: &'a DynMap, key: &K) -> Option<&'a V> {
map.map.get::<FxHashMap<K, V>>()?.get(key)
}
+ fn is_empty(map: &DynMap) -> bool {
+ map.map.get::<FxHashMap<K, V>>().map_or(true, |it| it.is_empty())
+ }
}
pub struct DynMap {
pub fn get(&self, key: &P::K) -> Option<&P::V> {
P::get(&self.map, key)
}
+
+ pub fn is_empty(&self) -> bool {
+ P::is_empty(&self.map)
+ }
}
impl<P: Policy> Index<Key<P::K, P::V, P>> for DynMap {
then_branch: ExprId,
else_branch: Option<ExprId>,
},
+ Let {
+ pat: PatId,
+ expr: ExprId,
+ },
Block {
id: BlockId,
statements: Box<[Statement]>,
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct MatchArm {
pub pat: PatId,
- pub guard: Option<MatchGuard>,
+ pub guard: Option<ExprId>,
pub expr: ExprId,
}
-#[derive(Debug, Clone, Eq, PartialEq)]
-pub enum MatchGuard {
- If { expr: ExprId },
-
- IfLet { pat: PatId, expr: ExprId },
-}
-
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct RecordLitField {
pub name: Name,
f(else_branch);
}
}
+ Expr::Let { expr, .. } => {
+ f(*expr);
+ }
Expr::Block { statements, tail, .. } => {
for stmt in statements.iter() {
match stmt {
attr_macros: FxHashMap<AstId<ast::Item>, MacroCallId>,
/// The derive macro invocations in this scope, keyed by the owner item over the actual derive attributes
/// paired with the derive macro invocations for the specific attribute.
- derive_macros:
- FxHashMap<AstId<ast::Adt>, SmallVec<[(AttrId, SmallVec<[Option<MacroCallId>; 1]>); 1]>>,
+ derive_macros: FxHashMap<
+ AstId<ast::Adt>,
+ SmallVec<[(AttrId, MacroCallId, SmallVec<[Option<MacroCallId>; 1]>); 1]>,
+ >,
}
pub(crate) static BUILTIN_SCOPE: Lazy<FxHashMap<Name, PerNs>> = Lazy::new(|| {
idx: usize,
) {
if let Some(derives) = self.derive_macros.get_mut(&adt) {
- if let Some((_, invocs)) = derives.iter_mut().find(|&&mut (id, _)| id == attr_id) {
+ if let Some((.., invocs)) = derives.iter_mut().find(|&&mut (id, ..)| id == attr_id) {
invocs[idx] = Some(call);
}
}
&mut self,
adt: AstId<ast::Adt>,
attr_id: AttrId,
+ call_id: MacroCallId,
len: usize,
) {
- self.derive_macros.entry(adt).or_default().push((attr_id, smallvec![None; len]));
+ self.derive_macros.entry(adt).or_default().push((attr_id, call_id, smallvec![None; len]));
}
pub(crate) fn derive_macro_invocs(
&self,
) -> impl Iterator<
- Item = (AstId<ast::Adt>, impl Iterator<Item = (AttrId, &[Option<MacroCallId>])>),
+ Item = (
+ AstId<ast::Adt>,
+ impl Iterator<Item = (AttrId, MacroCallId, &[Option<MacroCallId>])>,
+ ),
> + '_ {
- self.derive_macros
- .iter()
- .map(|(k, v)| (*k, v.iter().map(|(attr_id, invocs)| (*attr_id, &**invocs))))
+ self.derive_macros.iter().map(|(k, v)| {
+ (*k, v.iter().map(|&(attr_id, call_id, ref invocs)| (attr_id, call_id, &**invocs)))
+ })
}
pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
w!(self, "]");
}
TypeRef::Fn(args_and_ret, varargs) => {
- let (ret, args) =
+ let ((_, return_type), args) =
args_and_ret.split_last().expect("TypeRef::Fn is missing return type");
w!(self, "fn(");
- for (i, arg) in args.iter().enumerate() {
+ for (i, (_, typeref)) in args.iter().enumerate() {
if i != 0 {
w!(self, ", ");
}
- self.print_type_ref(arg);
+ self.print_type_ref(&typeref);
}
if *varargs {
if !args.is_empty() {
w!(self, "...");
}
w!(self, ") -> ");
- self.print_type_ref(ret);
+ self.print_type_ref(&return_type);
}
TypeRef::Macro(_ast_id) => {
w!(self, "<macro>");
pub const MACRO: Key<ast::Macro, MacroDefId> = Key::new();
pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
-pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, Box<[Option<MacroCallId>]>)> = Key::new();
+pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> =
+ Key::new();
/// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are
/// equal if they point to exactly the same object.
let key = AstPtr::new(key);
map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(&key)
}
+ fn is_empty(map: &DynMap) -> bool {
+ map.map.get::<FxHashMap<AstPtr<AST>, ID>>().map_or(true, |it| it.is_empty())
+ }
}
ast_id_map::FileAstId,
eager::{expand_eager_macro, ErrorEmitted, ErrorSink},
hygiene::Hygiene,
- AstId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind,
- UnresolvedMacro,
+ AstId, ExpandError, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId,
+ MacroDefKind, UnresolvedMacro,
};
use item_tree::ExternBlock;
use la_arena::Idx;
db: &dyn db::DefDatabase,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
- error_sink: &mut dyn FnMut(mbe::ExpandError),
+ error_sink: &mut dyn FnMut(ExpandError),
) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro>;
}
db: &dyn db::DefDatabase,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
- mut error_sink: &mut dyn FnMut(mbe::ExpandError),
+ mut error_sink: &mut dyn FnMut(ExpandError),
) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro> {
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
self.value.path().and_then(|path| path::ModPath::from_src(db.upcast(), path, &h));
let path = match error_sink
- .option(path, || mbe::ExpandError::Other("malformed macro invocation".into()))
+ .option(path, || ExpandError::Other("malformed macro invocation".into()))
{
Ok(path) => path,
Err(error) => {
};
macro_call_as_call_id(
+ db,
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
expands_to,
- db,
krate,
resolver,
error_sink,
}
fn macro_call_as_call_id(
+ db: &dyn db::DefDatabase,
call: &AstIdWithPath<ast::MacroCall>,
expand_to: ExpandTo,
- db: &dyn db::DefDatabase,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
- error_sink: &mut dyn FnMut(mbe::ExpandError),
+ error_sink: &mut dyn FnMut(ExpandError),
) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro> {
let def: MacroDefId =
resolver(call.path.clone()).ok_or_else(|| UnresolvedMacro { path: call.path.clone() })?;
}
fn derive_macro_as_call_id(
+ db: &dyn db::DefDatabase,
item_attr: &AstIdWithPath<ast::Adt>,
derive_attr: AttrId,
- db: &dyn db::DefDatabase,
+ derive_pos: u32,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
) -> Result<MacroCallId, UnresolvedMacro> {
let def: MacroDefId = resolver(item_attr.path.clone())
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
- let last_segment = item_attr
- .path
- .segments()
- .last()
- .ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
let res = def.as_lazy_macro(
db.upcast(),
krate,
MacroCallKind::Derive {
ast_id: item_attr.ast_id,
- derive_name: last_segment.to_string().into_boxed_str(),
+ derive_index: derive_pos,
derive_attr_index: derive_attr.ast_index,
},
);
}
fn attr_macro_as_call_id(
+ db: &dyn db::DefDatabase,
item_attr: &AstIdWithPath<ast::Item>,
macro_attr: &Attr,
- db: &dyn db::DefDatabase,
krate: CrateId,
def: MacroDefId,
+ is_derive: bool,
) -> MacroCallId {
- let attr_path = &item_attr.path;
- let last_segment = attr_path.segments().last().expect("empty attribute path");
let mut arg = match macro_attr.input.as_deref() {
Some(attr::AttrInput::TokenTree(tt, map)) => (tt.clone(), map.clone()),
_ => Default::default(),
krate,
MacroCallKind::Attr {
ast_id: item_attr.ast_id,
- attr_name: last_segment.to_string().into_boxed_str(),
- attr_args: arg,
+ attr_args: Arc::new(arg),
invoc_attr_index: macro_attr.id.ast_index,
+ is_derive,
},
);
res
mod builtin_derive_macro;
mod proc_macros;
-use std::{iter, ops::Range};
+use std::{iter, ops::Range, sync::Arc};
use ::mbe::TokenMap;
-use base_db::{fixture::WithFixture, SourceDatabase};
+use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
use expect_test::Expect;
use hir_expand::{
db::{AstDatabase, TokenExpander},
#[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) {
- let db = TestDB::with_files(ra_fixture);
+ let extra_proc_macros = vec![(
+ r#"
+#[proc_macro_attribute]
+pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream {
+ item
+}
+"#
+ .into(),
+ ProcMacro {
+ name: "identity_when_valid".into(),
+ kind: base_db::ProcMacroKind::Attr,
+ expander: Arc::new(IdentityWhenValidProcMacroExpander),
+ },
+ )];
+ let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros);
let krate = db.crate_graph().iter().next().unwrap();
let def_map = db.crate_def_map(krate);
let local_id = def_map.root();
let range: Range<usize> = range.into();
if show_token_ids {
- if let Some((tree, map)) = arg.as_deref() {
+ if let Some((tree, map, _)) = arg.as_deref() {
let tt_range = call.token_tree().unwrap().syntax().text_range();
let mut ranges = Vec::new();
extract_id_ranges(&mut ranges, &map, &tree);
}
for decl_id in def_map[local_id].scope.declarations() {
- if let ModuleDefId::AdtId(AdtId::StructId(struct_id)) = decl_id {
- let src = struct_id.lookup(&db).source(&db);
+ // FIXME: I'm sure there's already better way to do this
+ let src = match decl_id {
+ ModuleDefId::AdtId(AdtId::StructId(struct_id)) => {
+ Some(struct_id.lookup(&db).source(&db).syntax().cloned())
+ }
+ ModuleDefId::FunctionId(function_id) => {
+ Some(function_id.lookup(&db).source(&db).syntax().cloned())
+ }
+ _ => None,
+ };
+ if let Some(src) = src {
if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
- let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
+ let pp = pretty_print_macro_expansion(src.value, None);
format_to!(expanded_text, "\n{}", pp)
}
}
}
res
}
+
+// Identity mapping, but only works when the input is syntactically valid. This
+// simulates common proc macros that unnecessarily parse their input and return
+// compile errors.
+#[derive(Debug)]
+struct IdentityWhenValidProcMacroExpander;
+impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
+ fn expand(
+ &self,
+ subtree: &Subtree,
+ _: Option<&Subtree>,
+ _: &base_db::Env,
+ ) -> Result<Subtree, base_db::ProcMacroExpansionError> {
+ let (parse, _) =
+ ::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);
+ if parse.errors().is_empty() {
+ Ok(subtree.clone())
+ } else {
+ panic!("got invalid macro input: {:?}", parse.errors());
+ }
+ }
+}
check(
r#"
macro_rules! stmts {
- () => { let _ = 0; }
+ () => { fn foo() {} }
}
fn f() { let _ = stmts!/*+errors*/(); }
"#,
expect![[r#"
macro_rules! stmts {
- () => { let _ = 0; }
+ () => { fn foo() {} }
}
fn f() { let _ = /* parse error: expected expression */
-let _ = 0;; }
+fn foo() {}; }
"#]],
)
}
#[attr2] struct S;"##]],
);
}
+
+#[test]
+fn attribute_macro_syntax_completion_1() {
+ // this is just the case where the input is actually valid
+ check(
+ r#"
+//- proc_macros: identity_when_valid
+#[proc_macros::identity_when_valid]
+fn foo() { bar.baz(); blub }
+"#,
+ expect![[r##"
+#[proc_macros::identity_when_valid]
+fn foo() { bar.baz(); blub }
+
+fn foo() {
+ bar.baz();
+ blub
+}"##]],
+ );
+}
+
+#[test]
+fn attribute_macro_syntax_completion_2() {
+ // common case of dot completion while typing
+ check(
+ r#"
+//- proc_macros: identity_when_valid
+#[proc_macros::identity_when_valid]
+fn foo() { bar.; blub }
+"#,
+ expect![[r##"
+#[proc_macros::identity_when_valid]
+fn foo() { bar.; blub }
+
+fn foo() {
+ bar. ;
+ blub
+}"##]],
+ );
+}
None => return Err(UnresolvedMacro { path: ast_id.path.clone() }),
};
- Ok(ResolvedAttr::Macro(attr_macro_as_call_id(&ast_id, attr, db, self.krate, def)))
+ Ok(ResolvedAttr::Macro(attr_macro_as_call_id(db, &ast_id, attr, self.krate, def, false)))
}
pub(crate) fn is_builtin_or_registered_attr(&self, path: &ModPath) -> bool {
fn resolve_macros(&mut self) -> ReachedFixedPoint {
let mut macros = std::mem::take(&mut self.unresolved_macros);
let mut resolved = Vec::new();
+ let mut push_resolved = |directive: &MacroDirective, call_id| {
+ resolved.push((directive.module_id, directive.depth, directive.container, call_id));
+ };
let mut res = ReachedFixedPoint::Yes;
macros.retain(|directive| {
let resolver = |path| {
match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to } => {
let call_id = macro_call_as_call_id(
+ self.db,
ast_id,
*expand_to,
- self.db,
self.def_map.krate,
&resolver,
&mut |_err| (),
);
if let Ok(Ok(call_id)) = call_id {
- resolved.push((
- directive.module_id,
- call_id,
- directive.depth,
- directive.container,
- ));
+ push_resolved(directive, call_id);
res = ReachedFixedPoint::No;
return false;
}
}
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
let call_id = derive_macro_as_call_id(
+ self.db,
ast_id,
*derive_attr,
- self.db,
+ *derive_pos as u32,
self.def_map.krate,
&resolver,
);
*derive_pos,
);
- resolved.push((
- directive.module_id,
- call_id,
- directive.depth,
- directive.container,
- ));
+ push_resolved(directive, call_id);
res = ReachedFixedPoint::No;
return false;
}
len = idx;
}
+ // We treat the #[derive] macro as an attribute call, but we do not resolve it for nameres collection.
+ // This is just a trick to be able to resolve the input to derives as proper paths.
+ // Check the comment in [`builtin_attr_macro`].
+ let call_id = attr_macro_as_call_id(
+ self.db,
+ file_ast_id,
+ attr,
+ self.def_map.krate,
+ def,
+ true,
+ );
self.def_map.modules[directive.module_id]
.scope
- .init_derive_attribute(ast_id, attr.id, len + 1);
+ .init_derive_attribute(ast_id, attr.id, call_id, len + 1);
}
None => {
let diag = DefDiagnostic::malformed_derive(
}
// Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute.
- let call_id =
- attr_macro_as_call_id(file_ast_id, attr, self.db, self.def_map.krate, def);
+ let call_id = attr_macro_as_call_id(
+ self.db,
+ file_ast_id,
+ attr,
+ self.def_map.krate,
+ def,
+ false,
+ );
let loc: MacroCallLoc = self.db.lookup_intern_macro_call(call_id);
// Skip #[test]/#[bench] expansion, which would merely result in more memory usage
.scope
.add_attr_macro_invoc(ast_id, call_id);
- resolved.push((
- directive.module_id,
- call_id,
- directive.depth,
- directive.container,
- ));
+ push_resolved(directive, call_id);
res = ReachedFixedPoint::No;
return false;
}
// Attribute resolution can add unresolved macro invocations, so concatenate the lists.
self.unresolved_macros.extend(macros);
- for (module_id, macro_call_id, depth, container) in resolved {
+ for (module_id, depth, container, macro_call_id) in resolved {
self.collect_macro_expansion(module_id, macro_call_id, depth, container);
}
match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to } => {
let macro_call_as_call_id = macro_call_as_call_id(
+ self.db,
ast_id,
*expand_to,
- self.db,
self.def_map.krate,
|path| {
let resolved_res = self.def_map.resolve_path_fp_with_macro(
// Case 1: try to resolve in legacy scope and expand macro_rules
let mut error = None;
match macro_call_as_call_id(
+ self.def_collector.db,
&ast_id,
mac.expand_to,
- self.def_collector.db,
self.def_collector.def_map.krate,
|path| {
path.as_ident().and_then(|name| {
impl Attrs {
#[rustfmt::skip]
pub(super) fn parse_proc_macro_decl(&self, func_name: &Name) -> Option<ProcMacroDef> {
- if self.by_key("proc_macro").exists() {
+ if self.is_proc_macro() {
Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::FnLike })
- } else if self.by_key("proc_macro_attribute").exists() {
+ } else if self.is_proc_macro_attribute() {
Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Attr })
} else if self.by_key("proc_macro_derive").exists() {
- let derive = self.by_key("proc_macro_derive").tt_values().next().unwrap();
+ let derive = self.by_key("proc_macro_derive").tt_values().next()?;
match &*derive.token_trees {
// `#[proc_macro_derive(Trait)]`
//! HIR for references to types. Paths in these are not yet resolved. They can
//! be directly created from an ast::TypeRef, without further queries.
-use hir_expand::{name::Name, AstId, InFile};
+use hir_expand::{
+ name::{AsName, Name},
+ AstId, InFile,
+};
use std::convert::TryInto;
-use syntax::ast;
+use syntax::ast::{self, HasName};
use crate::{body::LowerCtx, intern::Interned, path::Path};
Array(Box<TypeRef>, ConstScalar),
Slice(Box<TypeRef>),
/// A fn pointer. Last element of the vector is the return type.
- Fn(Vec<TypeRef>, bool /*varargs*/),
+ Fn(Vec<(Option<Name>, TypeRef)>, bool /*varargs*/),
// For
ImplTrait(Vec<Interned<TypeBound>>),
DynTrait(Vec<Interned<TypeBound>>),
is_varargs = param.dotdotdot_token().is_some();
}
- pl.params().map(|p| p.ty()).map(|it| TypeRef::from_ast_opt(ctx, it)).collect()
+ pl.params()
+ .map(|it| {
+ let type_ref = TypeRef::from_ast_opt(ctx, it.ty());
+ let name = match it.pat() {
+ Some(ast::Pat::IdentPat(it)) => Some(
+ it.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing),
+ ),
+ _ => None,
+ };
+ (name, type_ref)
+ })
+ .collect()
} else {
Vec::new()
};
- params.push(ret_ty);
+ params.push((None, ret_ty));
TypeRef::Fn(params, is_varargs)
}
// for types are close enough for our purposes to the inner type for now...
fn go(type_ref: &TypeRef, f: &mut impl FnMut(&TypeRef)) {
f(type_ref);
match type_ref {
- TypeRef::Fn(types, _) | TypeRef::Tuple(types) => {
- types.iter().for_each(|t| go(t, f))
+ TypeRef::Fn(params, _) => {
+ params.iter().for_each(|(_, param_type)| go(¶m_type, f))
}
+ TypeRef::Tuple(types) => types.iter().for_each(|t| go(t, f)),
TypeRef::RawPtr(type_ref, _)
| TypeRef::Reference(type_ref, ..)
| TypeRef::Array(type_ref, _)
tt = { path = "../tt", version = "0.0.0" }
mbe = { path = "../mbe", version = "0.0.0" }
limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.2.0-pre.1"
//! Builtin attributes.
-use mbe::ExpandResult;
+use itertools::Itertools;
use syntax::ast;
-use crate::{db::AstDatabase, name, AstId, CrateId, MacroCallId, MacroDefId, MacroDefKind};
+use crate::{
+ db::AstDatabase, name, AstId, CrateId, ExpandResult, MacroCallId, MacroCallKind, MacroDefId,
+ MacroDefKind,
+};
macro_rules! register_builtin {
( $(($name:ident, $variant:ident) => $expand:ident),* ) => {
(bench, Bench) => dummy_attr_expand,
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand,
- (derive, Derive) => dummy_attr_expand,
+ (derive, Derive) => derive_attr_expand,
(global_allocator, GlobalAllocator) => dummy_attr_expand,
(test, Test) => dummy_attr_expand,
(test_case, TestCase) => dummy_attr_expand
) -> ExpandResult<tt::Subtree> {
ExpandResult::ok(tt.clone())
}
+
+/// We generate a very specific expansion here, as we do not actually expand the `#[derive]` attribute
+/// itself in name res, but we do want to expand it to something for the IDE layer, so that the input
+/// derive attributes can be downmapped, and resolved as proper paths.
+/// This is basically a hack, that simplifies the hacks we need in a lot of ide layer places to
+/// somewhat inconsistently resolve derive attributes.
+///
+/// As such, we expand `#[derive(Foo, bar::Bar)]` into
+/// ```
+/// #[Foo]
+/// #[bar::Bar]
+/// ();
+/// ```
+/// which allows fallback path resolution in hir::Semantics to properly identify our derives.
+/// Since we do not expand the attribute in nameres though, we keep the original item.
+///
+/// The ideal expansion here would be for the `#[derive]` to re-emit the annotated item and somehow
+/// use the input paths in its output as well.
+/// But that would bring two problems with it, for one every derive would duplicate the item token tree
+/// wasting a lot of memory, and it would also require some way to use a path in a way that makes it
+/// always resolve as a derive without nameres recollecting them.
+/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in
+/// [`hir::Semantics`] to make this work.
+fn derive_attr_expand(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+ tt: &tt::Subtree,
+) -> ExpandResult<tt::Subtree> {
+ let loc = db.lookup_intern_macro_call(id);
+ let derives = match &loc.kind {
+ MacroCallKind::Attr { attr_args, .. } => &attr_args.0,
+ _ => return ExpandResult::ok(tt.clone()),
+ };
+
+ let mk_leaf = |char| {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
+ char,
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }))
+ };
+
+ let mut token_trees = Vec::new();
+ for (comma, group) in &derives
+ .token_trees
+ .iter()
+ .filter_map(|tt| match tt {
+ tt::TokenTree::Leaf(l) => Some(l),
+ tt::TokenTree::Subtree(_) => None,
+ })
+ .group_by(|l| matches!(l, tt::Leaf::Punct(tt::Punct { char: ',', .. })))
+ {
+ if comma {
+ continue;
+ }
+ token_trees.push(mk_leaf('#'));
+ token_trees.push(mk_leaf('['));
+ token_trees.extend(group.cloned().map(tt::TokenTree::Leaf));
+ token_trees.push(mk_leaf(']'));
+ }
+ token_trees.push(mk_leaf('('));
+ token_trees.push(mk_leaf(')'));
+ token_trees.push(mk_leaf(';'));
+ ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees })
+}
use tracing::debug;
-use mbe::ExpandResult;
use syntax::{
ast::{self, AstNode, HasGenericParams, HasModuleItem, HasName},
match_ast,
};
+use tt::TokenId;
-use crate::{db::AstDatabase, name, quote, AstId, CrateId, MacroCallId, MacroDefId, MacroDefKind};
+use crate::{
+ db::AstDatabase, name, quote, AstId, CrateId, ExpandError, ExpandResult, MacroCallId,
+ MacroDefId, MacroDefKind,
+};
macro_rules! register_builtin {
( $($trait:ident => $expand:ident),* ) => {
type_params: usize,
}
-fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
- let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems); // FragmentKind::Items doesn't parse attrs?
+fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
+ let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems);
let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
debug!("derive node didn't parse");
- mbe::ExpandError::UnexpectedToken
+ ExpandError::Other("invalid item definition".into())
})?;
let item = macro_items.items().next().ok_or_else(|| {
debug!("no module item parsed");
- mbe::ExpandError::NoMatchingRule
+ ExpandError::Other("no item found".into())
})?;
let node = item.syntax();
let (name, params) = match_ast! {
ast::Union(it) => (it.name(), it.generic_param_list()),
_ => {
debug!("unexpected node is {:?}", node);
- return Err(mbe::ExpandError::ConversionError)
+ return Err(ExpandError::Other("expected struct, enum or union".into()))
},
}
};
let name = name.ok_or_else(|| {
debug!("parsed item has no name");
- mbe::ExpandError::NoMatchingRule
- })?;
- let name_token_id = token_map.token_by_range(name.syntax().text_range()).ok_or_else(|| {
- debug!("name token not found");
- mbe::ExpandError::ConversionError
+ ExpandError::Other("missing name".into())
})?;
+ let name_token_id = token_map
+ .token_by_range(name.syntax().text_range())
+ .unwrap_or_else(|| TokenId::unspecified());
let name_token = tt::Ident { id: name_token_id, text: name.text().into() };
let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count());
Ok(BasicAdtInfo { name: name_token, type_params })
//! Builtin macro
-use crate::{
- db::AstDatabase, name, quote, AstId, CrateId, MacroCallId, MacroCallLoc, MacroDefId,
- MacroDefKind,
-};
use base_db::{AnchoredPath, Edition, FileId};
use cfg::CfgExpr;
use either::Either;
-use mbe::{parse_exprs_with_sep, parse_to_token_tree, ExpandResult};
+use mbe::{parse_exprs_with_sep, parse_to_token_tree};
use syntax::ast::{self, AstToken};
+use crate::{
+ db::AstDatabase, name, quote, AstId, CrateId, ExpandError, ExpandResult, MacroCallId,
+ MacroCallLoc, MacroDefId, MacroDefKind,
+};
+
macro_rules! register_builtin {
( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
let mut args = parse_exprs_with_sep(tt, ',');
if args.is_empty() {
- return ExpandResult::only_err(mbe::ExpandError::NoMatchingRule);
+ return ExpandResult::only_err(mbe::ExpandError::NoMatchingRule.into());
}
for arg in &mut args {
// Remove `key =`.
let text = it.text.as_str();
if text.starts_with('"') && text.ends_with('"') {
// FIXME: does not handle raw strings
- mbe::ExpandError::Other(text[1..text.len() - 1].into())
+ ExpandError::Other(text[1..text.len() - 1].into())
} else {
- mbe::ExpandError::BindingError("`compile_error!` argument must be a string".into())
+ ExpandError::Other("`compile_error!` argument must be a string".into())
}
}
- _ => mbe::ExpandError::BindingError("`compile_error!` argument must be a string".into()),
+ _ => ExpandError::Other("`compile_error!` argument must be a string".into()),
};
ExpandResult { value: ExpandedEager::new(quote! {}), err: Some(err) }
}
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
_ => {
- err.get_or_insert(mbe::ExpandError::UnexpectedToken);
+ err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
}
}
}
}
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
_ => {
- err.get_or_insert(mbe::ExpandError::UnexpectedToken);
+ err.get_or_insert(mbe::ExpandError::UnexpectedToken.into());
}
}
}
call_id: MacroCallId,
path_str: &str,
allow_recursion: bool,
-) -> Result<FileId, mbe::ExpandError> {
+) -> Result<FileId, ExpandError> {
let call_site = call_id.as_file().original_file(db);
let path = AnchoredPath { anchor: call_site, path: path_str };
- let res = db.resolve_path(path).ok_or_else(|| {
- mbe::ExpandError::Other(format!("failed to load file `{path_str}`").into())
- })?;
+ let res = db
+ .resolve_path(path)
+ .ok_or_else(|| ExpandError::Other(format!("failed to load file `{path_str}`").into()))?;
// Prevent include itself
if res == call_site && !allow_recursion {
- Err(mbe::ExpandError::Other(format!("recursive inclusion of `{path_str}`").into()))
+ Err(ExpandError::Other(format!("recursive inclusion of `{path_str}`").into()))
} else {
Ok(res)
}
}
-fn parse_string(tt: &tt::Subtree) -> Result<String, mbe::ExpandError> {
+fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
tt.token_trees
.get(0)
.and_then(|tt| match tt {
tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(it),
_ => None,
})
- .ok_or(mbe::ExpandError::ConversionError)
+ .ok_or(mbe::ExpandError::ConversionError.into())
}
fn include_expand(
// The only variable rust-analyzer ever sets is `OUT_DIR`, so only diagnose that to avoid
// unnecessary diagnostics for eg. `CARGO_PKG_NAME`.
if key == "OUT_DIR" {
- err = Some(mbe::ExpandError::Other(
+ err = Some(ExpandError::Other(
r#"`OUT_DIR` not set, enable "run build scripts" to fix"#.into(),
));
}
use base_db::{salsa, SourceDatabase};
use either::Either;
use limit::Limit;
-use mbe::{syntax_node_to_token_tree, ExpandError, ExpandResult};
+use mbe::syntax_node_to_token_tree;
use rustc_hash::FxHashSet;
use syntax::{
algo::diff,
};
use crate::{
- ast_id_map::AstIdMap, hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander,
- BuiltinFnLikeExpander, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
- MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
+ ast_id_map::AstIdMap, fixup, hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander,
+ BuiltinFnLikeExpander, ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr,
+ MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile,
+ ProcMacroExpander,
};
/// Total limit on the number of tokens produced by any macro invocation.
db: &dyn AstDatabase,
id: MacroCallId,
tt: &tt::Subtree,
- ) -> mbe::ExpandResult<tt::Subtree> {
+ ) -> ExpandResult<tt::Subtree> {
match self {
- TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt),
- TokenExpander::Builtin(it) => it.expand(db, id, tt),
+ TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt).map_err(Into::into),
+ TokenExpander::Builtin(it) => it.expand(db, id, tt).map_err(Into::into),
TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt),
TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
TokenExpander::ProcMacro(_) => {
/// Lowers syntactic macro call to a token tree representation.
#[salsa::transparent]
- fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>;
+ fn macro_arg(
+ &self,
+ id: MacroCallId,
+ ) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>;
/// Extracts syntax node, corresponding to a macro call. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
// Build the subtree and token mapping for the speculative args
let censor = censor_for_macro_input(&loc, &speculative_args);
- let (mut tt, spec_args_tmap) =
- mbe::syntax_node_to_token_tree_censored(&speculative_args, &censor);
+ let mut fixups = fixup::fixup_syntax(&speculative_args);
+ fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
+ let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ &speculative_args,
+ fixups.token_map,
+ fixups.next_id,
+ fixups.replace,
+ fixups.append,
+ );
let (attr_arg, token_id) = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
- let speculative_expansion = if let MacroDefKind::ProcMacro(expander, ..) = loc.def.kind {
+ let mut speculative_expansion = if let MacroDefKind::ProcMacro(expander, ..) = loc.def.kind {
tt.delimiter = None;
expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
} else {
};
let expand_to = macro_expand_to(db, actual_macro_call);
+ fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
let range = rev_tmap.first_range_by_token(token_id, token_to_map.kind())?;
}
}
-fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
+fn macro_arg(
+ db: &dyn AstDatabase,
+ id: MacroCallId,
+) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
let arg = db.macro_arg_text(id)?;
let loc = db.lookup_intern_macro_call(id);
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
- let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, &censor);
+ let mut fixups = fixup::fixup_syntax(&node);
+ fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
+ let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ &node,
+ fixups.token_map,
+ fixups.next_id,
+ fixups.replace,
+ fixups.append,
+ );
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = None;
}
- Some(Arc::new((tt, tmap)))
+ Some(Arc::new((tt, tmap, fixups.undo_info)))
}
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
ast::Item::cast(node.clone())?
.attrs()
.take(derive_attr_index as usize + 1)
+ // FIXME
.filter(|attr| attr.simple_name().as_deref() == Some("derive"))
.map(|it| it.syntax().clone())
.collect()
}
+ MacroCallKind::Attr { is_derive: true, .. } => return None,
MacroCallKind::Attr { invoc_attr_index, .. } => {
cov_mark::hit!(attribute_macro_attr_censoring);
ast::Item::cast(node.clone())?
let macro_arg = match db.macro_arg(id) {
Some(it) => it,
- None => return ExpandResult::str_err("Failed to lower macro args to token tree".into()),
+ None => {
+ return ExpandResult::only_err(ExpandError::Other(
+ "Failed to lower macro args to token tree".into(),
+ ))
+ }
};
let expander = match db.macro_def(loc.def) {
// FIXME: This is weird -- we effectively report macro *definition*
// errors lazily, when we try to expand the macro. Instead, they should
// be reported at the definition site (when we construct a def map).
- Err(err) => return ExpandResult::str_err(format!("invalid macro definition: {}", err)),
+ Err(err) => {
+ return ExpandResult::only_err(ExpandError::Other(
+ format!("invalid macro definition: {}", err).into(),
+ ))
+ }
};
- let ExpandResult { value: tt, err } = expander.expand(db, id, ¯o_arg.0);
+ let ExpandResult { value: mut tt, err } = expander.expand(db, id, ¯o_arg.0);
// Set a hard limit for the expanded tt
let count = tt.count();
- // XXX: Make ExpandResult a real error and use .map_err instead?
if TOKEN_LIMIT.check(count).is_err() {
- return ExpandResult::str_err(format!(
- "macro invocation exceeds token limit: produced {} tokens, limit is {}",
- count,
- TOKEN_LIMIT.inner(),
+ return ExpandResult::only_err(ExpandError::Other(
+ format!(
+ "macro invocation exceeds token limit: produced {} tokens, limit is {}",
+ count,
+ TOKEN_LIMIT.inner(),
+ )
+ .into(),
));
}
+ fixup::reverse_fixups(&mut tt, ¯o_arg.1, ¯o_arg.2);
+
ExpandResult { value: Some(Arc::new(tt)), err }
}
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
let macro_arg = match db.macro_arg(id) {
Some(it) => it,
- None => return ExpandResult::str_err("No arguments for proc-macro".to_string()),
+ None => {
+ return ExpandResult::only_err(ExpandError::Other("No arguments for proc-macro".into()))
+ }
};
let expander = match loc.def.kind {
use std::sync::Arc;
use base_db::CrateId;
-use mbe::ExpandResult;
use syntax::{ted, SyntaxNode};
use crate::{
db::AstDatabase,
hygiene::Hygiene,
mod_path::ModPath,
- EagerCallInfo, ExpandTo, InFile, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
- MacroDefKind, UnresolvedMacro,
+ EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
+ MacroCallLoc, MacroDefId, MacroDefKind, UnresolvedMacro,
};
#[derive(Debug)]
}
pub trait ErrorSink {
- fn emit(&mut self, err: mbe::ExpandError);
+ fn emit(&mut self, err: ExpandError);
fn option<T>(
&mut self,
opt: Option<T>,
- error: impl FnOnce() -> mbe::ExpandError,
+ error: impl FnOnce() -> ExpandError,
) -> Result<T, ErrorEmitted> {
match opt {
Some(it) => Ok(it),
fn option_with<T>(
&mut self,
opt: impl FnOnce() -> Option<T>,
- error: impl FnOnce() -> mbe::ExpandError,
+ error: impl FnOnce() -> ExpandError,
) -> Result<T, ErrorEmitted> {
self.option(opt(), error)
}
- fn result<T>(&mut self, res: Result<T, mbe::ExpandError>) -> Result<T, ErrorEmitted> {
+ fn result<T>(&mut self, res: Result<T, ExpandError>) -> Result<T, ErrorEmitted> {
match res {
Ok(it) => Ok(it),
Err(e) => {
}
}
-impl ErrorSink for &'_ mut dyn FnMut(mbe::ExpandError) {
- fn emit(&mut self, err: mbe::ExpandError) {
+impl ErrorSink for &'_ mut dyn FnMut(ExpandError) {
+ fn emit(&mut self, err: ExpandError) {
self(err);
}
}
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
- diagnostic_sink: &mut dyn FnMut(mbe::ExpandError),
+ diagnostic_sink: &mut dyn FnMut(ExpandError),
) -> Result<Result<MacroCallId, ErrorEmitted>, UnresolvedMacro> {
let hygiene = Hygiene::new(db, macro_call.file_id);
let parsed_args = macro_call
if let MacroDefKind::BuiltInEager(eager, _) = def.kind {
let res = eager.expand(db, arg_id, &subtree);
if let Some(err) = res.err {
- diagnostic_sink(err);
+ diagnostic_sink(err.into());
}
let loc = MacroCallLoc {
curr: InFile<SyntaxNode>,
krate: CrateId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
- mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError),
+ mut diagnostic_sink: &mut dyn FnMut(ExpandError),
) -> Result<Result<SyntaxNode, ErrorEmitted>, UnresolvedMacro> {
let original = curr.value.clone_for_update();
let def = match child.path().and_then(|path| ModPath::from_src(db, path, &hygiene)) {
Some(path) => macro_resolver(path.clone()).ok_or_else(|| UnresolvedMacro { path })?,
None => {
- diagnostic_sink(mbe::ExpandError::Other("malformed macro invocation".into()));
+ diagnostic_sink(ExpandError::Other("malformed macro invocation".into()));
continue;
}
};
--- /dev/null
+//! To make attribute macros work reliably when typing, we need to take care to
+//! fix up syntax errors in the code we're passing to them.
+use std::mem;
+
+use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
+use rustc_hash::FxHashMap;
+use syntax::{
+ ast::{self, AstNode},
+ match_ast, SyntaxKind, SyntaxNode, TextRange,
+};
+use tt::Subtree;
+
+/// The result of calculating fixes for a syntax node -- a bunch of changes
+/// (appending to and replacing nodes), the information that is needed to
+/// reverse those changes afterwards, and a token map.
+#[derive(Debug)]
+pub(crate) struct SyntaxFixups {
+ pub(crate) append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+ pub(crate) replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+ pub(crate) undo_info: SyntaxFixupUndoInfo,
+ pub(crate) token_map: TokenMap,
+ pub(crate) next_id: u32,
+}
+
+/// This is the information needed to reverse the fixups.
+#[derive(Debug, PartialEq, Eq)]
+pub struct SyntaxFixupUndoInfo {
+ original: Vec<Subtree>,
+}
+
+const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
+
+pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
+ let mut append = FxHashMap::default();
+ let mut replace = FxHashMap::default();
+ let mut preorder = node.preorder();
+ let mut original = Vec::new();
+ let mut token_map = TokenMap::default();
+ let mut next_id = 0;
+ while let Some(event) = preorder.next() {
+ let node = match event {
+ syntax::WalkEvent::Enter(node) => node,
+ syntax::WalkEvent::Leave(_) => continue,
+ };
+
+ if can_handle_error(&node) && has_error_to_handle(&node) {
+ // the node contains an error node, we have to completely replace it by something valid
+ let (original_tree, new_tmap, new_next_id) =
+ mbe::syntax_node_to_token_tree_with_modifications(
+ &node,
+ mem::take(&mut token_map),
+ next_id,
+ Default::default(),
+ Default::default(),
+ );
+ token_map = new_tmap;
+ next_id = new_next_id;
+ let idx = original.len() as u32;
+ original.push(original_tree);
+ let replacement = SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: node.text_range(),
+ id: SyntheticTokenId(idx),
+ };
+ replace.insert(node.clone(), vec![replacement]);
+ preorder.skip_subtree();
+ continue;
+ }
+
+ // In some other situations, we can fix things by just appending some tokens.
+ let end_range = TextRange::empty(node.text_range().end());
+ match_ast! {
+ match node {
+ ast::FieldExpr(it) => {
+ if it.name_ref().is_none() {
+ // incomplete field access: some_expr.|
+ append.insert(node.clone(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::ExprStmt(it) => {
+ if it.semicolon_token().is_none() {
+ append.insert(node.clone(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::SEMICOLON,
+ text: ";".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ _ => (),
+ }
+ }
+ }
+ SyntaxFixups {
+ append,
+ replace,
+ token_map,
+ next_id,
+ undo_info: SyntaxFixupUndoInfo { original },
+ }
+}
+
+fn has_error(node: &SyntaxNode) -> bool {
+ node.children().any(|c| c.kind() == SyntaxKind::ERROR)
+}
+
+fn can_handle_error(node: &SyntaxNode) -> bool {
+ ast::Expr::can_cast(node.kind())
+}
+
+fn has_error_to_handle(node: &SyntaxNode) -> bool {
+ has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
+}
+
+pub(crate) fn reverse_fixups(
+ tt: &mut Subtree,
+ token_map: &TokenMap,
+ undo_info: &SyntaxFixupUndoInfo,
+) {
+ tt.token_trees.retain(|tt| match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ token_map.synthetic_token_id(leaf.id()).is_none()
+ || token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID)
+ }
+ _ => true,
+ });
+ tt.token_trees.iter_mut().for_each(|tt| match tt {
+ tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info),
+ tt::TokenTree::Leaf(leaf) => {
+ if let Some(id) = token_map.synthetic_token_id(leaf.id()) {
+ let original = &undo_info.original[id.0 as usize];
+ *tt = tt::TokenTree::Subtree(original.clone());
+ }
+ }
+ });
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use super::reverse_fixups;
+
+ #[track_caller]
+ fn check(ra_fixture: &str, mut expect: Expect) {
+ let parsed = syntax::SourceFile::parse(ra_fixture);
+ let fixups = super::fixup_syntax(&parsed.syntax_node());
+ let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ &parsed.syntax_node(),
+ fixups.token_map,
+ fixups.next_id,
+ fixups.replace,
+ fixups.append,
+ );
+
+ let mut actual = tt.to_string();
+ actual.push_str("\n");
+
+ expect.indent(false);
+ expect.assert_eq(&actual);
+
+ // the fixed-up tree should be syntactically valid
+ let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems);
+ assert_eq!(
+ parse.errors(),
+ &[],
+ "parse has syntax errors. parse tree:\n{:#?}",
+ parse.syntax_node()
+ );
+
+ reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
+
+ // the fixed-up + reversed version should be equivalent to the original input
+ // (but token IDs don't matter)
+ let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
+ assert_eq!(tt.to_string(), original_as_tt.to_string());
+ }
+
+ #[test]
+ fn incomplete_field_expr_1() {
+ check(
+ r#"
+fn foo() {
+ a.
+}
+"#,
+ expect![[r#"
+fn foo () {a . __ra_fixup}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_2() {
+ check(
+ r#"
+fn foo() {
+ a. ;
+}
+"#,
+ expect![[r#"
+fn foo () {a . __ra_fixup ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_3() {
+ check(
+ r#"
+fn foo() {
+ a. ;
+ bar();
+}
+"#,
+ expect![[r#"
+fn foo () {a . __ra_fixup ; bar () ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn field_expr_before_call() {
+ // another case that easily happens while typing
+ check(
+ r#"
+fn foo() {
+ a.b
+ bar();
+}
+"#,
+ expect![[r#"
+fn foo () {a . b ; bar () ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn extraneous_comma() {
+ check(
+ r#"
+fn foo() {
+ bar(,);
+}
+"#,
+ expect![[r#"
+fn foo () {__ra_fixup ;}
+"#]],
+ )
+ }
+}
use crate::{
db::{self, AstDatabase},
+ fixup,
name::{AsName, Name},
HirFileId, HirFileIdRepr, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
};
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
macro_def: Arc<TokenExpander>,
- macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
+ macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
macro_arg_shift: mbe::Shift,
exp_map: Arc<mbe::TokenMap>,
}
pub mod quote;
pub mod eager;
pub mod mod_path;
+mod fixup;
-pub use mbe::{ExpandError, ExpandResult, Origin};
+pub use mbe::{Origin, ValueResult};
-use std::{hash::Hash, iter, sync::Arc};
+use std::{fmt, hash::Hash, iter, sync::Arc};
use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange, ProcMacroKind};
use either::Either;
proc_macro::ProcMacroExpander,
};
+pub type ExpandResult<T> = ValueResult<T, ExpandError>;
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum ExpandError {
+ UnresolvedProcMacro,
+ Mbe(mbe::ExpandError),
+ Other(Box<str>),
+}
+
+impl From<mbe::ExpandError> for ExpandError {
+ fn from(mbe: mbe::ExpandError) -> Self {
+ Self::Mbe(mbe)
+ }
+}
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ExpandError::UnresolvedProcMacro => f.write_str("unresolved proc-macro"),
+ ExpandError::Mbe(it) => it.fmt(f),
+ ExpandError::Other(it) => f.write_str(it),
+ }
+ }
+}
+
/// Input to the analyzer is a set of files, where each file is identified by
/// `FileId` and contains source code. However, another source of source code in
/// Rust are macros: each macro can be thought of as producing a "temporary
},
Derive {
ast_id: AstId<ast::Adt>,
- derive_name: Box<str>,
/// Syntactical index of the invoking `#[derive]` attribute.
///
/// Outer attributes are counted first, then inner attributes. This does not support
/// out-of-line modules, which may have attributes spread across 2 files!
derive_attr_index: u32,
+ /// Index of the derive macro in the derive attribute
+ derive_index: u32,
},
Attr {
ast_id: AstId<ast::Item>,
- attr_name: Box<str>,
- attr_args: (tt::Subtree, mbe::TokenMap),
+ attr_args: Arc<(tt::Subtree, mbe::TokenMap)>,
/// Syntactical index of the invoking `#[attribute]`.
///
/// Outer attributes are counted first, then inner attributes. This does not support
/// out-of-line modules, which may have attributes spread across 2 files!
invoc_attr_index: u32,
+ /// Whether this attribute is the `#[derive]` attribute.
+ is_derive: bool,
},
}
let arg_tt = loc.kind.arg(db)?;
+ let macro_def = db.macro_def(loc.def).ok()?;
+ let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?;
+ let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
+
let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) {
ast::Macro::MacroRules(mac) => mac.token_tree()?,
+ ast::Macro::MacroDef(_)
+ if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) =>
+ {
+ return None
+ }
ast::Macro::MacroDef(mac) => mac.body()?,
};
Some(InFile::new(id.file_id, def_tt))
_ => None,
});
- let macro_def = db.macro_def(loc.def).ok()?;
- let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?;
- let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
-
Some(ExpansionInfo {
expanded: InFile::new(self, parse.syntax_node()),
arg: InFile::new(loc.kind.file_id(), arg_tt),
}
/// Indicate it is macro file generated for builtin derive
- pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option<InFile<ast::Item>> {
+ pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option<InFile<ast::Attr>> {
match self.0 {
HirFileIdRepr::FileId(_) => None,
HirFileIdRepr::MacroFile(macro_file) => {
let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
- let item = match loc.def.kind {
+ let attr = match loc.def.kind {
MacroDefKind::BuiltInDerive(..) => loc.kind.to_node(db),
_ => return None,
};
- Some(item.with_value(ast::Item::cast(item.value.clone())?))
+ Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
}
}
}
}
}
- /// Return whether this file is an include macro
+ /// Return whether this file is an attr macro
pub fn is_attr_macro(&self, db: &dyn db::AstDatabase) -> bool {
match self.0 {
HirFileIdRepr::MacroFile(macro_file) => {
}
}
+ /// Return whether this file is the pseudo expansion of the derive attribute.
+ /// See [`crate::builtin_attr_macro::derive_attr_expand`].
+ pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::AstDatabase) -> bool {
+ match self.0 {
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.kind, MacroCallKind::Attr { is_derive: true, .. })
+ }
+ _ => false,
+ }
+ }
+
pub fn is_macro(self) -> bool {
matches!(self.0, HirFileIdRepr::MacroFile(_))
}
MacroCallKind::FnLike { ast_id, .. } => {
ast_id.with_value(ast_id.to_node(db).syntax().clone())
}
- MacroCallKind::Derive { ast_id, .. } => {
- ast_id.with_value(ast_id.to_node(db).syntax().clone())
+ MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
+ // FIXME: handle `cfg_attr`
+ ast_id.with_value(ast_id.to_node(db)).map(|it| {
+ it.doc_comments_and_attrs()
+ .nth(*derive_attr_index as usize)
+ .and_then(|it| match it {
+ Either::Left(attr) => Some(attr.syntax().clone()),
+ Either::Right(_) => None,
+ })
+ .unwrap_or_else(|| it.syntax().clone())
+ })
+ }
+ MacroCallKind::Attr { ast_id, is_derive: true, invoc_attr_index, .. } => {
+ // FIXME: handle `cfg_attr`
+ ast_id.with_value(ast_id.to_node(db)).map(|it| {
+ it.doc_comments_and_attrs()
+ .nth(*invoc_attr_index as usize)
+ .and_then(|it| match it {
+ Either::Left(attr) => Some(attr.syntax().clone()),
+ Either::Right(_) => None,
+ })
+ .unwrap_or_else(|| it.syntax().clone())
+ })
}
MacroCallKind::Attr { ast_id, .. } => {
ast_id.with_value(ast_id.to_node(db).syntax().clone())
match self {
MacroCallKind::FnLike { expand_to, .. } => *expand_to,
MacroCallKind::Derive { .. } => ExpandTo::Items,
+ MacroCallKind::Attr { is_derive: true, .. } => ExpandTo::Statements,
MacroCallKind::Attr { .. } => ExpandTo::Items, // is this always correct?
}
}
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
macro_def: Arc<TokenExpander>,
- macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
+ macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
/// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
/// and as such we need to shift tokens if they are part of an attributes input instead of their item.
macro_arg_shift: mbe::Shift,
let token_range = token.value.text_range();
match &loc.kind {
- MacroCallKind::Attr { attr_args: (_, map), invoc_attr_index, .. } => {
+ MacroCallKind::Attr { attr_args, invoc_attr_index, is_derive, .. } => {
let attr = item
.doc_comments_and_attrs()
.nth(*invoc_attr_index as usize)
let relative_range =
token.value.text_range().checked_sub(attr_input_start)?;
// shift by the item's tree's max id
- let token_id =
- self.macro_arg_shift.shift(map.token_by_range(relative_range)?);
+ let token_id = attr_args.1.token_by_range(relative_range)?;
+ let token_id = if *is_derive {
+ // we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens
+ token_id
+ } else {
+ self.macro_arg_shift.shift(token_id)
+ };
Some(token_id)
}
_ => None,
// Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item.
let (token_map, tt) = match &loc.kind {
- MacroCallKind::Attr { attr_args: (_, arg_token_map), .. } => {
+ MacroCallKind::Attr { attr_args, is_derive: true, .. } => {
+ (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
+ }
+ MacroCallKind::Attr { attr_args, .. } => {
// try unshifting the the token id, if unshifting fails, the token resides in the non-item attribute input
// note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this
match self.macro_arg_shift.unshift(token_id) {
Some(unshifted) => {
token_id = unshifted;
- (arg_token_map, self.attr_input_or_mac_def.clone()?.syntax().cloned())
+ (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
}
None => (&self.macro_arg.1, self.arg.clone()),
}
}
}
+impl InFile<SyntaxToken> {
+ pub fn upmap(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxToken>> {
+ let expansion = self.file_id.expansion_info(db)?;
+ expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it)
+ }
+}
+
fn ascend_node_border_tokens(
db: &dyn db::AstDatabase,
InFile { file_id, value: node }: InFile<&SyntaxNode>,
expansion: &ExpansionInfo,
token: InFile<SyntaxToken>,
) -> Option<InFile<SyntaxToken>> {
- let (mapped, origin) = expansion.map_token_up(db, token.as_ref())?;
- if origin != Origin::Call {
- return None;
- }
- if let Some(info) = mapped.file_id.expansion_info(db) {
- return ascend_call_token(db, &info, mapped);
+ let mut mapping = expansion.map_token_up(db, token.as_ref())?;
+ while let (mapped, Origin::Call) = mapping {
+ match mapped.file_id.expansion_info(db) {
+ Some(info) => mapping = info.map_token_up(db, mapped.as_ref())?,
+ None => return Some(mapped),
+ }
}
- Some(mapped)
+ None
}
impl InFile<SyntaxToken> {
MACRO_TYPE => ExpandTo::Type,
ARG_LIST | TRY_EXPR | TUPLE_EXPR | PAREN_EXPR | ARRAY_EXPR | FOR_EXPR | PATH_EXPR
- | CLOSURE_EXPR | CONDITION | BREAK_EXPR | RETURN_EXPR | MATCH_EXPR | MATCH_ARM
- | MATCH_GUARD | RECORD_EXPR_FIELD | CALL_EXPR | INDEX_EXPR | METHOD_CALL_EXPR
- | FIELD_EXPR | AWAIT_EXPR | CAST_EXPR | REF_EXPR | PREFIX_EXPR | RANGE_EXPR
- | BIN_EXPR => ExpandTo::Expr,
+ | CLOSURE_EXPR | BREAK_EXPR | RETURN_EXPR | MATCH_EXPR | MATCH_ARM | MATCH_GUARD
+ | RECORD_EXPR_FIELD | CALL_EXPR | INDEX_EXPR | METHOD_CALL_EXPR | FIELD_EXPR
+ | AWAIT_EXPR | CAST_EXPR | REF_EXPR | PREFIX_EXPR | RANGE_EXPR | BIN_EXPR
+ | LET_EXPR => ExpandTo::Expr,
LET_STMT => {
// FIXME: Handle LHS Pattern
ExpandTo::Expr
//! Proc Macro Expander stub
use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
-use mbe::ExpandResult;
-use crate::db::AstDatabase;
+use crate::{db::AstDatabase, ExpandError, ExpandResult};
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
pub struct ProcMacroExpander {
let krate_graph = db.crate_graph();
let proc_macro = match krate_graph[self.krate].proc_macro.get(id.0 as usize) {
Some(proc_macro) => proc_macro,
- None => return ExpandResult::str_err("No proc-macro found.".to_string()),
+ None => {
+ return ExpandResult::only_err(ExpandError::Other(
+ "No proc-macro found.".into(),
+ ))
+ }
};
// Proc macros have access to the environment variables of the invoking crate.
{
ExpandResult {
value: tt.clone(),
- err: Some(mbe::ExpandError::Other(text.into())),
+ err: Some(ExpandError::Other(text.into())),
}
}
ProcMacroExpansionError::System(text)
| ProcMacroExpansionError::Panic(text) => {
- ExpandResult::only_err(mbe::ExpandError::Other(text.into()))
+ ExpandResult::only_err(ExpandError::Other(text.into()))
}
},
}
}
- None => ExpandResult::only_err(mbe::ExpandError::UnresolvedProcMacro),
+ None => ExpandResult::only_err(ExpandError::UnresolvedProcMacro),
}
}
}
inner.hir_fmt(f)?;
write!(f, "]")?;
}
- TypeRef::Fn(tys, is_varargs) => {
+ TypeRef::Fn(parameters, is_varargs) => {
// FIXME: Function pointer qualifiers.
write!(f, "fn(")?;
- f.write_joined(&tys[..tys.len() - 1], ", ")?;
- if *is_varargs {
- write!(f, "{}...", if tys.len() == 1 { "" } else { ", " })?;
- }
- write!(f, ")")?;
- let ret_ty = tys.last().unwrap();
- match ret_ty {
- TypeRef::Tuple(tup) if tup.is_empty() => {}
- _ => {
- write!(f, " -> ")?;
- ret_ty.hir_fmt(f)?;
+ if let Some(((_, return_type), function_parameters)) = parameters.split_last() {
+ for index in 0..function_parameters.len() {
+ let (param_name, param_type) = &function_parameters[index];
+ if let Some(name) = param_name {
+ write!(f, "{}: ", name)?;
+ }
+
+ param_type.hir_fmt(f)?;
+
+ if index != function_parameters.len() - 1 {
+ write!(f, ", ")?;
+ }
+ }
+ if *is_varargs {
+ write!(f, "{}...", if parameters.len() == 1 { "" } else { ", " })?;
+ }
+ write!(f, ")")?;
+ match &return_type {
+ TypeRef::Tuple(tup) if tup.is_empty() => {}
+ _ => {
+ write!(f, " -> ")?;
+ return_type.hir_fmt(f)?;
+ }
}
}
}
use chalk_ir::{cast::Cast, fold::Shift, Mutability, TyVariableKind};
use hir_def::{
- expr::{
- ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, MatchGuard, Ordering, Statement,
- UnaryOp,
- },
+ expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, Ordering, Statement, UnaryOp},
path::{GenericArg, GenericArgs},
resolver::resolver_for_expr,
FieldId, FunctionId, ItemContainerId, Lookup,
coerce.complete()
}
+ &Expr::Let { pat, expr } => {
+ let input_ty = self.infer_expr(expr, &Expectation::none());
+ self.infer_pat(pat, &input_ty, BindingMode::default());
+ TyKind::Scalar(Scalar::Bool).intern(Interner)
+ }
Expr::Block { statements, tail, label, id: _ } => {
let old_resolver = mem::replace(
&mut self.resolver,
for arm in arms.iter() {
self.diverges = Diverges::Maybe;
let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
- match arm.guard {
- Some(MatchGuard::If { expr: guard_expr }) => {
- self.infer_expr(
- guard_expr,
- &Expectation::has_type(
- TyKind::Scalar(Scalar::Bool).intern(Interner),
- ),
- );
- }
- Some(MatchGuard::IfLet { expr, pat }) => {
- let input_ty = self.infer_expr(expr, &Expectation::none());
- let _pat_ty = self.infer_pat(pat, &input_ty, BindingMode::default());
- }
- _ => {}
+ if let Some(guard_expr) = arm.guard {
+ self.infer_expr(
+ guard_expr,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
}
let arm_ty = self.infer_expr_inner(arm.expr, &expected);
use hir_def::{
expr::{BindingAnnotation, Expr, Literal, Pat, PatId, RecordFieldPat},
path::Path,
+ type_ref::ConstScalar,
};
use hir_expand::name::Name;
Adjust, Adjustment, AutoBorrow, BindingMode, Expectation, InferenceContext, TypeMismatch,
},
lower::lower_to_chalk_mutability,
- static_lifetime, Interner, Substitution, Ty, TyBuilder, TyExt, TyKind,
+ static_lifetime, ConcreteConst, ConstValue, Interner, Substitution, Ty, TyBuilder, TyExt,
+ TyKind,
};
impl<'a> InferenceContext<'a> {
self.infer_pat(pat_id, &elem_ty, default_bm);
}
- let pat_ty = match expected.kind(Interner) {
- TyKind::Array(_, const_) => TyKind::Array(elem_ty, const_.clone()),
- _ => TyKind::Slice(elem_ty),
- }
- .intern(Interner);
if let &Some(slice_pat_id) = slice {
- self.infer_pat(slice_pat_id, &pat_ty, default_bm);
+ let rest_pat_ty = match expected.kind(Interner) {
+ TyKind::Array(_, length) => {
+ let length = match length.data(Interner).value {
+ ConstValue::Concrete(ConcreteConst {
+ interned: ConstScalar::Usize(length),
+ }) => length.checked_sub((prefix.len() + suffix.len()) as u64),
+ _ => None,
+ };
+ TyKind::Array(elem_ty.clone(), crate::consteval::usize_const(length))
+ }
+ _ => TyKind::Slice(elem_ty.clone()),
+ }
+ .intern(Interner);
+ self.infer_pat(slice_pat_id, &rest_pat_ty, default_bm);
}
- pat_ty
+ match expected.kind(Interner) {
+ TyKind::Array(_, const_) => TyKind::Array(elem_ty, const_.clone()),
+ _ => TyKind::Slice(elem_ty),
+ }
+ .intern(Interner)
}
Pat::Wild => expected.clone(),
Pat::Range { start, end } => {
TypeRef::Placeholder => TyKind::Error.intern(Interner),
TypeRef::Fn(params, is_varargs) => {
let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
- Substitution::from_iter(Interner, params.iter().map(|tr| ctx.lower_ty(tr)))
+ Substitution::from_iter(Interner, params.iter().map(|(_, tr)| ctx.lower_ty(tr)))
});
TyKind::Function(FnPointer {
num_binders: 0, // FIXME lower `for<'a> fn()` correctly
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
- !0..6 '1isize': isize
39..442 '{ ...!(); }': ()
73..94 'spam!(...am!())': {unknown}
100..119 'for _ ...!() {}': ()
117..119 '{}': ()
124..134 '|| spam!()': || -> isize
140..156 'while ...!() {}': ()
+ 146..153 'spam!()': bool
154..156 '{}': ()
161..174 'break spam!()': !
180..194 'return spam!()': !
!0..6 '1isize': isize
!0..6 '1isize': isize
!0..6 '1isize': isize
- !0..6 '1isize': isize
53..456 '{ ...!(); }': ()
87..108 'spam!(...am!())': {unknown}
114..133 'for _ ...!() {}': ()
131..133 '{}': ()
138..148 '|| spam!()': || -> isize
154..170 'while ...!() {}': ()
+ 160..167 'spam!()': bool
168..170 '{}': ()
175..188 'break spam!()': !
194..208 'return spam!()': !
139..140 'g': {unknown}
143..144 'e': {unknown}
157..204 'if let... }': ()
+ 160..175 'let [val] = opt': bool
164..169 '[val]': [{unknown}]
165..168 'val': {unknown}
172..175 'opt': [{unknown}]
190..191 'h': {unknown}
194..197 'val': {unknown}
210..236 'if let...rue {}': ()
+ 213..233 'let x ... &true': bool
217..225 'x @ true': &bool
221..225 'true': bool
221..225 'true': bool
37..38 'x': &i32
46..208 '{ ...) {} }': ()
52..75 'if let...y() {}': ()
+ 55..72 'let "f... any()': bool
59..64 '"foo"': &str
59..64 '"foo"': &str
67..70 'any': fn any<&str>() -> &str
67..72 'any()': &str
73..75 '{}': ()
80..99 'if let...y() {}': ()
+ 83..96 'let 1 = any()': bool
87..88 '1': i32
87..88 '1': i32
91..94 'any': fn any<i32>() -> i32
91..96 'any()': i32
97..99 '{}': ()
104..126 'if let...y() {}': ()
+ 107..123 'let 1u... any()': bool
111..115 '1u32': u32
111..115 '1u32': u32
118..121 'any': fn any<u32>() -> u32
118..123 'any()': u32
124..126 '{}': ()
131..153 'if let...y() {}': ()
+ 134..150 'let 1f... any()': bool
138..142 '1f32': f32
138..142 '1f32': f32
145..148 'any': fn any<f32>() -> f32
145..150 'any()': f32
151..153 '{}': ()
158..179 'if let...y() {}': ()
+ 161..176 'let 1.0 = any()': bool
165..168 '1.0': f64
165..168 '1.0': f64
171..174 'any': fn any<f64>() -> f64
171..176 'any()': f64
177..179 '{}': ()
184..206 'if let...y() {}': ()
+ 187..203 'let tr... any()': bool
191..195 'true': bool
191..195 'true': bool
198..201 'any': fn any<bool>() -> bool
8..9 'x': &i32
17..75 '{ ...2 {} }': ()
23..45 'if let...u32 {}': ()
+ 26..42 'let 1....= 2u32': bool
30..35 '1..76': u32
38..42 '2u32': u32
43..45 '{}': ()
50..73 'if let...u32 {}': ()
+ 53..70 'let 1....= 2u32': bool
57..63 '1..=76': u32
66..70 '2u32': u32
71..73 '{}': ()
);
}
+#[test]
+fn slice_pattern_correctly_handles_array_length() {
+ check_infer(
+ r#"
+fn main() {
+ let [head, middle @ .., tail, tail2] = [1, 2, 3, 4, 5];
+}
+ "#,
+ expect![[r#"
+ 10..73 '{ ... 5]; }': ()
+ 20..52 '[head,...tail2]': [i32; 5]
+ 21..25 'head': i32
+ 27..38 'middle @ ..': [i32; 2]
+ 36..38 '..': [i32; 2]
+ 40..44 'tail': i32
+ 46..51 'tail2': i32
+ 55..70 '[1, 2, 3, 4, 5]': [i32; 5]
+ 56..57 '1': i32
+ 59..60 '2': i32
+ 62..63 '3': i32
+ 65..66 '4': i32
+ 68..69 '5': i32
+ "#]],
+ );
+}
+
#[test]
fn pattern_lookup_in_value_ns() {
check_types(
176..193 'Thing ...1i32 }': Thing<i32>
187..191 '1i32': i32
199..240 'if let... }': ()
+ 202..221 'let Th... } = z': bool
206..217 'Thing { t }': Thing<i32>
214..215 't': i32
220..221 'z': Thing<i32>
visit_file_defs(&Semantics::new(db), file_id, &mut |def| {
let range = match def {
Definition::Const(konst) if config.annotate_references => {
- konst.source(db).and_then(|node| name_range(&node, file_id))
+ konst.source(db).and_then(|node| name_range(db, node, file_id))
}
Definition::Trait(trait_) if config.annotate_references || config.annotate_impls => {
- trait_.source(db).and_then(|node| name_range(&node, file_id))
+ trait_.source(db).and_then(|node| name_range(db, node, file_id))
}
Definition::Adt(adt) => match adt {
hir::Adt::Enum(enum_) => {
.variants(db)
.into_iter()
.map(|variant| {
- variant.source(db).and_then(|node| name_range(&node, file_id))
+ variant.source(db).and_then(|node| name_range(db, node, file_id))
})
.filter_map(std::convert::identity)
.for_each(|range| {
})
}
if config.annotate_references || config.annotate_impls {
- enum_.source(db).and_then(|node| name_range(&node, file_id))
+ enum_.source(db).and_then(|node| name_range(db, node, file_id))
} else {
None
}
}
_ => {
if config.annotate_references || config.annotate_impls {
- adt.source(db).and_then(|node| name_range(&node, file_id))
+ adt.source(db).and_then(|node| name_range(db, node, file_id))
} else {
None
}
});
}
- fn name_range<T: HasName>(node: &InFile<T>, file_id: FileId) -> Option<TextRange> {
- if node.file_id == file_id.into() {
- node.value.name().map(|it| it.syntax().text_range())
- } else {
- // Node is outside the file we are adding annotations to (e.g. macros).
- None
+ fn name_range<T: HasName>(
+ db: &RootDatabase,
+ node: InFile<T>,
+ source_file_id: FileId,
+ ) -> Option<TextRange> {
+ if let Some(InFile { file_id, value }) = node.original_ast_node(db) {
+ if file_id == source_file_id.into() {
+ return value.name().map(|it| it.syntax().text_range());
+ }
}
+ None
}
});
helpers::{insert_whitespace_into_node::insert_ws_into, pick_best_token},
RootDatabase,
};
-use itertools::Itertools;
-use syntax::{ast, ted, AstNode, SyntaxKind, SyntaxNode};
+use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T};
use crate::FilePosition;
// struct Bar;
// ```
- let derive = sema.descend_into_macros(tok.clone()).iter().find_map(|descended| {
- let attr = descended.ancestors().find_map(ast::Attr::cast)?;
- let (path, tt) = attr.as_simple_call()?;
- if path == "derive" {
- let mut tt = tt.syntax().children_with_tokens().skip(1).join("");
- tt.pop();
- let expansions = sema.expand_derive_macro(&attr)?;
- Some(ExpandedMacro {
- name: tt,
- expansion: expansions.into_iter().map(insert_ws_into).join(""),
- })
- } else {
- None
+ let derive = sema.descend_into_macros(tok.clone()).into_iter().find_map(|descended| {
+ let hir_file = sema.hir_file_for(&descended.parent()?);
+ if !hir_file.is_derive_attr_pseudo_expansion(db) {
+ return None;
}
+
+ let name = descended.ancestors().filter_map(ast::Path::cast).last()?.to_string();
+ // up map out of the #[derive] expansion
+ let token = hir::InFile::new(hir_file, descended).upmap(db)?.value;
+ let attr = token.ancestors().find_map(ast::Attr::cast)?;
+ let expansions = sema.expand_derive_macro(&attr)?;
+ let idx = attr
+ .token_tree()?
+ .token_trees_and_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .take_while(|it| it != &token)
+ .filter(|it| it.kind() == T![,])
+ .count();
+ Some(ExpandedMacro {
+ name,
+ expansion: expansions.get(idx).cloned().map(insert_ws_into)?.to_string(),
+ })
});
if derive.is_some() {
struct Foo {}
"#,
expect![[r#"
- Copy, Clone
+ Copy
impl < >core::marker::Copy for Foo< >{}
+ "#]],
+ );
+ check(
+ r#"
+//- minicore: copy, clone, derive
+
+#[derive(Copy, Cl$0one)]
+struct Foo {}
+"#,
+ expect![[r#"
+ Clone
impl < >core::clone::Clone for Foo< >{}
"#]],
let sema = &hir::Semantics::new(db);
let file = sema.parse(file_id).syntax().clone();
- let offset = if !range.is_empty() {
+ if !range.is_empty() {
return hover_ranged(&file, range, sema, config);
- } else {
- range.start()
- };
+ }
+ let offset = range.start();
let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] => 3,
let descended = sema.descend_into_macros(original_token.clone());
// FIXME: Definition should include known lints and the like instead of having this special case here
- if let Some(res) = descended.iter().find_map(|token| {
+ let hovered_lint = descended.iter().find_map(|token| {
let attr = token.ancestors().find_map(ast::Attr::cast)?;
render::try_for_lint(&attr, token)
- }) {
+ });
+ if let Some(res) = hovered_lint {
return Some(RangeInfo::new(original_token.text_range(), res));
}
if result.is_none() {
// fallbacks, show keywords or types
- if let Some(res) = render::keyword(sema, config, &original_token) {
+
+ let res = descended.iter().find_map(|token| render::keyword(sema, config, &token));
+ if let Some(res) = res {
return Some(RangeInfo::new(original_token.text_range(), res));
}
let res = descended
sema: &Semantics<RootDatabase>,
config: &HoverConfig,
) -> Option<RangeInfo<HoverResult>> {
+ // FIXME: make this work in attributes
let expr_or_pat = file.covering_element(range).ancestors().find_map(|it| {
match_ast! {
match it {
algo, ast,
display::{fn_as_proc_macro_label, macro_label},
match_ast, AstNode, Direction,
- SyntaxKind::{CONDITION, LET_STMT},
+ SyntaxKind::{LET_EXPR, LET_STMT},
SyntaxToken, T,
};
}
let parent = token.parent()?;
let famous_defs = FamousDefs(sema, sema.scope(&parent).krate());
- let keyword_mod = if token.kind() == T![fn] && ast::FnPtrType::cast(parent).is_some() {
- // treat fn keyword inside function pointer type as primitive
- format!("prim_{}", token.text())
- } else {
- // std exposes {}_keyword modules with docstrings on the root to document keywords
- format!("{}_keyword", token.text())
- };
+
+ let KeywordHint { description, keyword_mod, actions } = keyword_hints(sema, token, parent);
+
let doc_owner = find_std_module(&famous_defs, &keyword_mod)?;
let docs = doc_owner.attrs(sema.db).docs()?;
let markup = process_markup(
sema.db,
Definition::Module(doc_owner),
- &markup(Some(docs.into()), token.text().into(), None)?,
+ &markup(Some(docs.into()), description, None)?,
config,
);
- Some(HoverResult { markup, actions: Default::default() })
+ Some(HoverResult { markup, actions })
}
pub(super) fn try_for_lint(attr: &ast::Attr, token: &SyntaxToken) -> Option<HoverResult> {
let let_kw = if ident
.syntax()
.parent()
- .map_or(false, |p| p.kind() == LET_STMT || p.kind() == CONDITION)
+ .map_or(false, |p| p.kind() == LET_STMT || p.kind() == LET_EXPR)
{
"let "
} else {
};
markup(None, desc, None)
}
+
+struct KeywordHint {
+ description: String,
+ keyword_mod: String,
+ actions: Vec<HoverAction>,
+}
+
+impl KeywordHint {
+ fn new(description: String, keyword_mod: String) -> Self {
+ Self { description, keyword_mod, actions: Vec::default() }
+ }
+}
+
+fn keyword_hints(
+ sema: &Semantics<RootDatabase>,
+ token: &SyntaxToken,
+ parent: syntax::SyntaxNode,
+) -> KeywordHint {
+ match token.kind() {
+ T![await] | T![loop] | T![match] | T![unsafe] | T![as] | T![try] | T![if] | T![else] => {
+ let keyword_mod = format!("{}_keyword", token.text());
+
+ match ast::Expr::cast(parent).and_then(|site| sema.type_of_expr(&site)) {
+ // ignore the unit type ()
+ Some(ty) if !ty.adjusted.as_ref().unwrap_or(&ty.original).is_unit() => {
+ let mut targets: Vec<hir::ModuleDef> = Vec::new();
+ let mut push_new_def = |item: hir::ModuleDef| {
+ if !targets.contains(&item) {
+ targets.push(item);
+ }
+ };
+ walk_and_push_ty(sema.db, &ty.original, &mut push_new_def);
+
+ let ty = ty.adjusted();
+ let description = format!("{}: {}", token.text(), ty.display(sema.db));
+
+ KeywordHint {
+ description,
+ keyword_mod,
+ actions: vec![HoverAction::goto_type_from_targets(sema.db, targets)],
+ }
+ }
+ _ => KeywordHint {
+ description: token.text().to_string(),
+ keyword_mod,
+ actions: Vec::new(),
+ },
+ }
+ }
+
+ T![fn] => {
+ let module = match ast::FnPtrType::cast(parent) {
+ // treat fn keyword inside function pointer type as primitive
+ Some(_) => format!("prim_{}", token.text()),
+ None => format!("{}_keyword", token.text()),
+ };
+ KeywordHint::new(token.text().to_string(), module)
+ }
+
+ _ => KeywordHint::new(token.text().to_string(), format!("{}_keyword", token.text())),
+ }
+}
assert!(hover.is_none(), "hover not expected but found: {:?}", hover.unwrap());
}
+#[track_caller]
fn check(ra_fixture: &str, expect: Expect) {
let (analysis, position) = fixture::position(ra_fixture);
let hover = analysis
);
}
+#[test]
+fn test_hover_function_show_types() {
+ check(
+ r#"fn foo$0(a: i32, b:i32) -> i32 { 0 }"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo(a: i32, b: i32) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_pointer_show_identifiers() {
+ check(
+ r#"type foo$0 = fn(a: i32, b: i32) -> i32;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type foo = fn(a: i32, b: i32) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_pointer_no_identifier() {
+ check(
+ r#"type foo$0 = fn(i32, _: i32) -> i32;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type foo = fn(i32, i32) -> i32
+ ```
+ "#]],
+ );
+}
+
#[test]
fn test_hover_trait_show_qualifiers() {
check_actions(
let expr = match_ast! {
match let_node {
ast::LetStmt(it) => it.initializer(),
- ast::Condition(it) => it.expr(),
+ ast::LetExpr(it) => it.expr(),
_ => None,
}
}?;
match node {
ast::LetStmt(it) => return it.ty().is_some(),
ast::Param(it) => return it.ty().is_some(),
- ast::MatchArm(_it) => return pat_is_enum_variant(db, bind_pat, pat_ty),
- ast::IfExpr(it) => {
- return it.condition().and_then(|condition| condition.pat()).is_some()
- && pat_is_enum_variant(db, bind_pat, pat_ty);
- },
- ast::WhileExpr(it) => {
- return it.condition().and_then(|condition| condition.pat()).is_some()
- && pat_is_enum_variant(db, bind_pat, pat_ty);
- },
+ ast::MatchArm(_) => return pat_is_enum_variant(db, bind_pat, pat_ty),
+ ast::LetExpr(_) => return pat_is_enum_variant(db, bind_pat, pat_ty),
+ ast::IfExpr(_) => return false,
+ ast::WhileExpr(_) => return false,
ast::ForExpr(it) => {
// We *should* display hint only if user provided "in {expr}" and we know the type of expr (and it's not unit).
// Type of expr should be iterable.
let syntax = sema.parse(position.file_id).syntax().clone();
let make_searcher = |literal_search: bool| {
move |def: Definition| {
- let mut usages =
- def.usages(sema).set_scope(search_scope.clone()).include_self_refs().all();
let declaration = match def {
Definition::Module(module) => {
Some(NavigationTarget::from_module_to_decl(sema.db, module))
nav,
}
});
+ let mut usages =
+ def.usages(sema).set_scope(search_scope.clone()).include_self_refs().all();
if literal_search {
retain_adt_literal_usages(&mut usages, def, sema);
}
"#]],
)
}
+
+ #[test]
+ fn attr() {
+ check(
+ r#"
+//- proc_macros: identity
+
+#[proc_macros::$0identity]
+fn func() {}
+"#,
+ expect![[r#"
+ identity Attribute FileId(1) 1..107 32..40
+
+ FileId(0) 16..24
+ "#]],
+ );
+ check(
+ r#"
+#[proc_macro_attribute]
+fn func$0() {}
+"#,
+ expect![[r#"
+ func Attribute FileId(0) 0..36 27..31
+
+ (no references)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn derive() {
+ check(
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive
+
+#[derive(proc_macros::DeriveIdentity$0)]
+struct Foo;
+"#,
+ expect![[r#"
+ derive_identity Derive FileId(2) 1..107 45..60
+
+ FileId(0) 23..37
+ "#]],
+ )
+ }
}
)
}
+ #[test]
+ fn test_rename_mod_in_macro() {
+ check(
+ "bar",
+ r#"
+//- /foo.rs
+
+//- /lib.rs
+macro_rules! submodule {
+ ($name:ident) => {
+ mod $name;
+ };
+}
+
+submodule!($0foo);
+"#,
+ r#"
+macro_rules! submodule {
+ ($name:ident) => {
+ mod $name;
+ };
+}
+
+submodule!(bar);
+"#,
+ )
+ }
+
#[test]
fn test_enum_variant_from_module_1() {
cov_mark::check!(rename_non_local);
ty_args.format_with(", ", |ty, cb| cb(&ty.display(db)))
);
}
- format_to!(path, "::{}", def_name);
- return Some(path);
+ return Some(format!(r#""{}::{}""#, path, def_name));
}
}
}
},
kind: DocTest {
test_id: Path(
- "Data::foo",
+ "\"Data::foo\"",
),
},
cfg: None,
},
kind: DocTest {
test_id: Path(
- "foo::Foo::foo",
+ "\"foo::Foo::foo\"",
),
},
cfg: None,
},
kind: DocTest {
test_id: Path(
- "Foo<T, U>::t",
+ "\"Foo<T, U>::t\"",
),
},
cfg: None,
continue;
}
Some(item) if sema.is_attr_macro_call(&item) => current_attr_call = Some(item),
+ Some(item) if current_attr_call.is_none() => {
+ let adt = match item {
+ ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
+ ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
+ ast::Item::Union(it) => Some(ast::Adt::Union(it)),
+ _ => None,
+ };
+ match adt {
+ Some(adt) if sema.is_derive_annotated(&adt) => {
+ current_attr_call = Some(adt.into());
+ }
+ _ => (),
+ }
+ }
None if ast::Attr::can_cast(node.kind()) => inside_attribute = true,
_ => (),
},
syntactic_name_ref_highlighting,
node,
),
- NodeOrToken::Token(token) => highlight::token(sema, krate, token).zip(Some(None)),
+ NodeOrToken::Token(token) => highlight::token(sema, token).zip(Some(None)),
};
if let Some((mut highlight, binding_hash)) = element {
if inside_attribute {
Highlight, HlMod, HlTag,
};
-pub(super) fn token(
- sema: &Semantics<RootDatabase>,
- krate: Option<hir::Crate>,
- token: SyntaxToken,
-) -> Option<Highlight> {
+pub(super) fn token(sema: &Semantics<RootDatabase>, token: SyntaxToken) -> Option<Highlight> {
if let Some(comment) = ast::Comment::cast(token.clone()) {
let h = HlTag::Comment;
return Some(match comment.kind().doc {
INT_NUMBER | FLOAT_NUMBER => HlTag::NumericLiteral.into(),
BYTE => HlTag::ByteLiteral.into(),
CHAR => HlTag::CharLiteral.into(),
- IDENT => {
- let tt = ast::TokenTree::cast(token.parent()?)?;
- let ident = ast::Ident::cast(token)?;
+ IDENT if token.parent().and_then(ast::TokenTree::cast).is_some() => {
// from this point on we are inside a token tree, this only happens for identifiers
// that were not mapped down into macro invocations
- (|| {
- let attr = tt.parent_meta()?.parent_attr()?;
- let res = sema.resolve_derive_ident(&attr, &ident)?;
- Some(highlight_def(sema, krate, Definition::from(res)))
- })()
- .unwrap_or_else(|| HlTag::None.into())
+ HlTag::None.into()
}
p if p.is_punct() => punctuation(sema, token, p),
k if k.is_keyword() => keyword(sema, token, k)?,
insert_use::{insert_use, ImportScope},
mod_path_to_ast,
};
-use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxElement};
+use syntax::{ast, AstNode, NodeOrToken, SyntaxElement};
use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
{
ImportAssets::for_ident_pat(&ctx.sema, &pat).zip(Some(pat.syntax().clone().into()))
} else {
- // FIXME: Descend?
- let ident = ctx.find_token_at_offset()?;
- ImportAssets::for_derive_ident(&ctx.sema, &ident).zip(Some(ident.syntax().clone().into()))
+ None
}
}
use ide_db::{
helpers::{
for_each_tail_expr,
- node_ext::{block_as_lone_tail, preorder_expr},
+ node_ext::{block_as_lone_tail, is_pattern_cond, preorder_expr},
FamousDefs,
},
RootDatabase,
return None;
}
- let cond = expr.condition().filter(|cond| !cond.is_pattern_cond())?;
- let cond = cond.expr()?;
+ let cond = expr.condition().filter(|cond| !is_pattern_cond(cond.clone()))?;
let then = expr.then_branch()?;
let else_ = match expr.else_branch()? {
ast::ElseBranch::Block(b) => b,
_ => receiver,
};
let if_expr = make::expr_if(
- make::condition(cond, None),
+ cond,
closure_body.reset_indent(),
Some(ast::ElseBranch::Block(make::block_expr(None, Some(none_path)))),
)
use std::iter::once;
+use ide_db::helpers::node_ext::{is_pattern_cond, single_let};
use syntax::{
ast::{
self,
let cond = if_expr.condition()?;
// Check if there is an IfLet that we can handle.
- let if_let_pat = match cond.pat() {
- None => None, // No IfLet, supported.
- Some(ast::Pat::TupleStructPat(pat)) if pat.fields().count() == 1 => {
- let path = pat.path()?;
- if path.qualifier().is_some() {
- return None;
- }
+ let (if_let_pat, cond_expr) = if is_pattern_cond(cond.clone()) {
+ let let_ = single_let(cond)?;
+ match let_.pat() {
+ Some(ast::Pat::TupleStructPat(pat)) if pat.fields().count() == 1 => {
+ let path = pat.path()?;
+ if path.qualifier().is_some() {
+ return None;
+ }
- let bound_ident = pat.fields().next().unwrap();
- if !ast::IdentPat::can_cast(bound_ident.syntax().kind()) {
- return None;
- }
+ let bound_ident = pat.fields().next().unwrap();
+ if !ast::IdentPat::can_cast(bound_ident.syntax().kind()) {
+ return None;
+ }
- Some((path, bound_ident))
+ (Some((path, bound_ident)), let_.expr()?)
+ }
+ _ => return None, // Unsupported IfLet.
}
- Some(_) => return None, // Unsupported IfLet.
+ } else {
+ (None, cond)
};
- let cond_expr = cond.expr()?;
let then_block = if_expr.then_branch()?;
let then_block = then_block.stmt_list()?;
let then_branch =
make::block_expr(once(make::expr_stmt(early_expression).into()), None);
let cond = invert_boolean_expression(cond_expr);
- make::expr_if(make::condition(cond, None), then_branch, None)
- .indent(if_indent_level)
+ make::expr_if(cond, then_branch, None).indent(if_indent_level)
};
new_expr.syntax().clone_for_update()
}
use std::iter::once;
+use ide_db::helpers::node_ext::is_pattern_cond;
use syntax::{
ast::{
self,
let while_expr = while_kw.parent().and_then(ast::WhileExpr::cast)?;
let while_body = while_expr.loop_body()?;
let while_cond = while_expr.condition()?;
- let while_cond_expr = while_cond.expr()?;
let target = while_expr.syntax().text_range();
acc.add(
let break_block =
make::block_expr(once(make::expr_stmt(make::expr_break(None)).into()), None)
.indent(while_indent_level);
- let block_expr = match while_cond.pat() {
- Some(_) => {
- let if_expr = make::expr_if(while_cond, while_body, Some(break_block.into()));
- let stmts = once(make::expr_stmt(if_expr).into());
- make::block_expr(stmts, None)
- }
- None => {
- let if_cond = make::condition(invert_boolean_expression(while_cond_expr), None);
- let if_expr = make::expr_if(if_cond, break_block, None);
- let stmts =
- once(make::expr_stmt(if_expr).into()).chain(while_body.statements());
- make::block_expr(stmts, while_body.tail_expr())
- }
+ let block_expr = if is_pattern_cond(while_cond.clone()) {
+ let if_expr = make::expr_if(while_cond, while_body, Some(break_block.into()));
+ let stmts = once(make::expr_stmt(if_expr).into());
+ make::block_expr(stmts, None)
+ } else {
+ let if_cond = invert_boolean_expression(while_cond);
+ let if_expr = make::expr_if(if_cond, break_block, None);
+ let stmts = once(make::expr_stmt(if_expr).into()).chain(while_body.statements());
+ make::block_expr(stmts, while_body.tail_expr())
};
let replacement = make::expr_loop(block_expr.indent(while_indent_level));
let stmt = make::expr_stmt(action);
let block = make::block_expr(iter::once(stmt.into()), None);
let controlflow_break_path = make::path_from_text("ControlFlow::Break");
- let condition = make::condition(
+ let condition = make::expr_let(
+ make::tuple_struct_pat(
+ controlflow_break_path,
+ iter::once(make::wildcard_pat().into()),
+ )
+ .into(),
call_expr,
- Some(
- make::tuple_struct_pat(
- controlflow_break_path,
- iter::once(make::wildcard_pat().into()),
- )
- .into(),
- ),
);
- make::expr_if(condition, block, None)
+ make::expr_if(condition.into(), block, None)
}
FlowHandler::IfOption { action } => {
let path = make::ext::ident_path("Some");
let value_pat = make::ext::simple_ident_pat(make::name("value"));
let pattern = make::tuple_struct_pat(path, iter::once(value_pat.into()));
- let cond = make::condition(call_expr, Some(pattern.into()));
+ let cond = make::expr_let(pattern.into(), call_expr);
let value = make::expr_path(make::ext::ident_path("value"));
let action_expr = action.make_result_handler(Some(value));
let action_stmt = make::expr_stmt(action_expr);
let then = make::block_expr(iter::once(action_stmt.into()), None);
- make::expr_if(cond, then, None)
+ make::expr_if(cond.into(), then, None)
}
FlowHandler::MatchOption { none } => {
let some_name = "value";
ast::Item::Enum(it) => replacements.push((it.visibility(), it.syntax().clone())),
ast::Item::ExternCrate(it) => replacements.push((it.visibility(), it.syntax().clone())),
ast::Item::Fn(it) => replacements.push((it.visibility(), it.syntax().clone())),
- ast::Item::Impl(it) => impls.push(it),
+ //Associated item's visibility should not be changed
+ ast::Item::Impl(it) if it.for_token().is_none() => impls.push(it),
ast::Item::MacroRules(it) => replacements.push((it.visibility(), it.syntax().clone())),
ast::Item::MacroDef(it) => replacements.push((it.visibility(), it.syntax().clone())),
ast::Item::Module(it) => replacements.push((it.visibility(), it.syntax().clone())),
vis: Option<ast::Visibility>,
node_or_token_opt: Option<syntax::SyntaxElement>,
) -> Option<()> {
- if let Some(vis) = vis {
- if vis.syntax().text() == "pub" {
- ted::replace(vis.syntax(), make::visibility_pub_crate().syntax().clone_for_update());
- }
- } else {
+ if let None = vis {
if let Some(node_or_token) = node_or_token_opt {
let pub_crate_vis = make::visibility_pub_crate().clone_for_update();
if let Some(node) = node_or_token.as_node() {
pub(crate) inner: SomeType,
}
- pub(crate) struct PrivateStruct1 {
- pub(crate) inner: i32,
+ pub struct PrivateStruct1 {
+ pub inner: i32,
}
impl PrivateStruct {
pub(crate) struct A {}
impl A {
- pub(crate) fn new_a() -> i32 {
+ pub fn new_a() -> i32 {
2
}
}
pub struct PrivateStruct;
$0struct Strukt {
- field: PrivateStruct,
+ field: PrivateStruct,
}$0
struct Strukt1 {
use super::PrivateStruct;
pub(crate) struct Strukt {
- pub(crate) field: PrivateStruct,
+ pub(crate) field: PrivateStruct,
}
}
use super::A;
impl A {
- pub(crate) fn new_a() -> i32 {
+ pub fn new_a() -> i32 {
2
}
}
use super::super::foo::A;
impl A {
- pub(crate) fn new_a() -> i32 {
+ pub fn new_a() -> i32 {
2
}
}
",
)
}
+
+ #[test]
+ fn test_do_not_apply_visibility_modifier_to_trait_impl_items() {
+ check_assist(
+ extract_module,
+ r"
+ trait ATrait {
+ fn function();
+ }
+
+ struct A {}
+
+$0impl ATrait for A {
+ fn function() {}
+}$0
+ ",
+ r"
+ trait ATrait {
+ fn function();
+ }
+
+ struct A {}
+
+mod modname {
+ use super::A;
+
+ use super::ATrait;
+
+ impl ATrait for A {
+ fn function() {}
+ }
+}
+ ",
+ )
+ }
}
+use ide_db::helpers::node_ext::is_pattern_cond;
use syntax::{
ast::{self, AstNode},
T,
return None;
}
+ let cond = expr.condition()?;
// This assist should not apply for if-let.
- if expr.condition()?.is_pattern_cond() {
+ if is_pattern_cond(cond.clone()) {
return None;
}
- let cond = expr.condition()?.expr()?;
let then_node = expr.then_branch()?.syntax().clone();
let else_block = match expr.else_branch()? {
ast::ElseBranch::Block(it) => it,
);
}
+ #[test]
+ fn test_merge_with_nested_self_item() {
+ check_assist(
+ merge_imports,
+ r"
+use std$0::{fmt::{Write, Display}};
+use std::{fmt::{self, Debug}};
+",
+ r"
+use std::{fmt::{Write, Display, self, Debug}};
+",
+ );
+ }
+
+ #[test]
+ fn test_merge_with_nested_self_item2() {
+ check_assist(
+ merge_imports,
+ r"
+use std$0::{fmt::{self, Debug}};
+use std::{fmt::{Write, Display}};
+",
+ r"
+use std::{fmt::{self, Debug, Write, Display}};
+",
+ );
+ }
+
+ #[test]
+ fn test_merge_self_with_nested_self_item() {
+ check_assist(
+ merge_imports,
+ r"
+use std::{fmt$0::{self, Debug}, fmt::{Write, Display}};
+",
+ r"
+use std::{fmt::{self, Debug, Write, Display}};
+",
+ );
+ }
+
+ #[test]
+ fn test_merge_nested_self_and_empty() {
+ check_assist(
+ merge_imports,
+ r"
+use foo::$0{bar::{self}};
+use foo::{bar};
+",
+ r"
+use foo::{bar::{self}};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_nested_empty_and_self() {
+ check_assist(
+ merge_imports,
+ r"
+use foo::$0{bar};
+use foo::{bar::{self}};
+",
+ r"
+use foo::{bar::{self}};
+",
+ )
+ }
+
+ #[test]
+ fn test_merge_nested_list_self_and_glob() {
+ check_assist(
+ merge_imports,
+ r"
+use std$0::{fmt::*};
+use std::{fmt::{self, Display}};
+",
+ r"
+use std::{fmt::{self, *, Display}};
+",
+ )
+ }
+
#[test]
fn test_merge_single_wildcard_diff_prefixes() {
check_assist(
use syntax::{
- ast::{
- edit::AstNodeEdit, make, AstNode, BlockExpr, Condition, ElseBranch, Expr, IfExpr, MatchArm,
- Pat,
- },
+ ast::{edit::AstNodeEdit, make, AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat},
SyntaxKind::WHITESPACE,
};
}
let space_before_guard = guard.syntax().prev_sibling_or_token();
- // FIXME: support `if let` guards too
- if guard.let_token().is_some() {
- return None;
- }
- let guard_condition = guard.expr()?;
+ let guard_condition = guard.condition()?;
let arm_expr = match_arm.expr()?;
- let if_expr = make::expr_if(
- make::condition(guard_condition, None),
- make::block_expr(None, Some(arm_expr.clone())),
- None,
- )
- .indent(arm_expr.indent_level());
+ let if_expr =
+ make::expr_if(guard_condition, make::block_expr(None, Some(arm_expr.clone())), None)
+ .indent(arm_expr.indent_level());
let target = guard.syntax().text_range();
acc.add(
)
}
-// Parses an if-else-if chain to get the conditons and the then branches until we encounter an else
+// Parses an if-else-if chain to get the conditions and the then branches until we encounter an else
// branch or the end.
-fn parse_if_chain(if_expr: IfExpr) -> Option<(Vec<(Condition, BlockExpr)>, Option<BlockExpr>)> {
+fn parse_if_chain(if_expr: IfExpr) -> Option<(Vec<(Expr, BlockExpr)>, Option<BlockExpr>)> {
let mut conds_blocks = Vec::new();
let mut curr_if = if_expr;
let tail = loop {
let cond = curr_if.condition()?;
- // Not support moving if let to arm guard
- if cond.is_pattern_cond() {
- return None;
- }
conds_blocks.push((cond, curr_if.then_branch()?));
match curr_if.else_branch() {
Some(ElseBranch::IfExpr(e)) => {
);
}
+ #[test]
+ fn move_let_guard_to_arm_body_works() {
+ check_assist(
+ move_guard_to_arm_body,
+ r#"
+fn main() {
+ match 92 {
+ x $0if (let 1 = x) => false,
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x => if (let 1 = x) {
+ false
+ },
+ _ => true
+ }
+}
+"#,
+ );
+ }
+
#[test]
fn move_guard_to_arm_body_works_complex_match() {
check_assist(
}
#[test]
- fn move_arm_cond_to_match_guard_if_let_not_works() {
- check_assist_not_applicable(
+ fn move_arm_cond_to_match_guard_if_let_works() {
+ check_assist(
move_arm_cond_to_match_guard,
r#"
fn main() {
match 92 {
- x => if let 62 = x { $0false },
+ x => if let 62 = x && true { $0false },
+ _ => true
+ }
+}
+"#,
+ r#"
+fn main() {
+ match 92 {
+ x if let 62 = x && true => false,
_ => true
}
}
#[test]
fn move_arm_cond_to_match_guard_elseif_iflet() {
- check_assist_not_applicable(
+ check_assist(
move_arm_cond_to_match_guard,
r#"
fn main() {
4
},
}
-}
-"#,
- )
+}"#,
+ r#"
+fn main() {
+ match 92 {
+ 3 => 0,
+ x if x > 10 => 1,
+ x if x > 5 => 2,
+ x if let 4 = 4 => {
+ 42;
+ 3
+ }
+ x => 4,
+ }
+}"#,
+ );
}
#[test]
-use hir::ModuleDef;
-use ide_db::helpers::insert_whitespace_into_node::insert_ws_into;
-use ide_db::helpers::{
- get_path_at_cursor_in_tt, import_assets::NameToImport, mod_path_to_ast,
- parse_tt_as_comma_sep_paths,
+use hir::{InFile, ModuleDef};
+use ide_db::{
+ helpers::{
+ import_assets::NameToImport, insert_whitespace_into_node::insert_ws_into, mod_path_to_ast,
+ },
+ items_locator,
};
-use ide_db::items_locator;
use itertools::Itertools;
use syntax::{
- ast::{self, AstNode, AstToken, HasName},
+ ast::{self, AstNode, HasName},
SyntaxKind::WHITESPACE,
};
// Converts a `derive` impl into a manual one.
//
// ```
+// # //- minicore: derive
// # trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
// #[derive(Deb$0ug, Display)]
// struct S;
acc: &mut Assists,
ctx: &AssistContext,
) -> Option<()> {
- let attr = ctx.find_node_at_offset::<ast::Attr>()?;
- let (name, args) = attr.as_simple_call()?;
- if name != "derive" {
+ let attr = ctx.find_node_at_offset_with_descend::<ast::Attr>()?;
+ let path = attr.path()?;
+ let hir_file = ctx.sema.hir_file_for(attr.syntax());
+ if !hir_file.is_derive_attr_pseudo_expansion(ctx.db()) {
return None;
}
- if !args.syntax().text_range().contains(ctx.offset()) {
- cov_mark::hit!(outside_of_attr_args);
+ let InFile { file_id, value } = hir_file.call_node(ctx.db())?;
+ if file_id.is_macro() {
+ // FIXME: make this work in macro files
return None;
}
+ // collect the derive paths from the #[derive] expansion
+ let current_derives = ctx
+ .sema
+ .parse_or_expand(hir_file)?
+ .descendants()
+ .filter_map(ast::Attr::cast)
+ .filter_map(|attr| attr.path())
+ .collect::<Vec<_>>();
- let ident = args.syntax().token_at_offset(ctx.offset()).find_map(ast::Ident::cast)?;
- let trait_path = get_path_at_cursor_in_tt(&ident)?;
- let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
+ let adt = value.parent().and_then(ast::Adt::cast)?;
+ let attr = ast::Attr::cast(value)?;
+ let args = attr.token_tree()?;
let current_module = ctx.sema.scope(adt.syntax()).module()?;
let current_crate = current_module.krate();
let found_traits = items_locator::items_with_name(
&ctx.sema,
current_crate,
- NameToImport::exact_case_sensitive(trait_path.segments().last()?.to_string()),
+ NameToImport::exact_case_sensitive(path.segments().last()?.to_string()),
items_locator::AssocItemSearch::Exclude,
Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
)
});
let mut no_traits_found = true;
- let current_derives = parse_tt_as_comma_sep_paths(args.clone())?;
- let current_derives = current_derives.as_slice();
for (replace_trait_path, trait_) in found_traits.inspect(|_| no_traits_found = false) {
add_assist(
acc,
&attr,
¤t_derives,
&args,
- &trait_path,
+ &path,
&replace_trait_path,
Some(trait_),
&adt,
)?;
}
if no_traits_found {
- add_assist(acc, ctx, &attr, ¤t_derives, &args, &trait_path, &trait_path, None, &adt)?;
+ add_assist(acc, ctx, &attr, ¤t_derives, &args, &path, &path, None, &adt)?;
}
Some(())
}
let impl_def_with_items =
impl_def_from_trait(&ctx.sema, adt, &annotated_name, trait_, replace_trait_path);
update_attribute(builder, old_derives, old_tree, old_trait_path, attr);
- let trait_path = format!("{}", replace_trait_path);
+ let trait_path = replace_trait_path.to_string();
match (ctx.config.snippet_cap, impl_def_with_items) {
(None, _) => {
builder.insert(insert_pos, generate_trait_impl_text(adt, &trait_path, ""))
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: fmt
+//- minicore: fmt, derive
#[derive(Debu$0g)]
struct Foo {
bar: String,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: fmt
+//- minicore: fmt, derive
#[derive(Debu$0g)]
struct Foo(String, usize);
"#,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: fmt
+//- minicore: fmt, derive
#[derive(Debu$0g)]
struct Foo;
"#,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: fmt
+//- minicore: fmt, derive
#[derive(Debu$0g)]
enum Foo {
Bar,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: fmt
+//- minicore: fmt, derive
#[derive(Debu$0g)]
enum Foo {
Bar(usize, usize),
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: fmt
+//- minicore: fmt, derive
#[derive(Debu$0g)]
enum Foo {
Bar {
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: default
+//- minicore: default, derive
#[derive(Defau$0lt)]
struct Foo {
foo: usize,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: default
+//- minicore: default, derive
#[derive(Defau$0lt)]
struct Foo(usize);
"#,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: default
+//- minicore: default, derive
#[derive(Defau$0lt)]
struct Foo;
"#,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: hash
+//- minicore: hash, derive
#[derive(Has$0h)]
struct Foo {
bin: usize,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: hash
+//- minicore: hash, derive
#[derive(Has$0h)]
struct Foo(usize, usize);
"#,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: hash
+//- minicore: hash, derive
#[derive(Has$0h)]
enum Foo {
Bar,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: clone
+//- minicore: clone, derive
#[derive(Clo$0ne)]
struct Foo {
bin: usize,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: clone
+//- minicore: clone, derive
#[derive(Clo$0ne)]
struct Foo(usize, usize);
"#,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: clone
+//- minicore: clone, derive
#[derive(Clo$0ne)]
struct Foo;
"#,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: clone
+//- minicore: clone, derive
#[derive(Clo$0ne)]
enum Foo {
Bar,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: clone
+//- minicore: clone, derive
#[derive(Clo$0ne)]
enum Foo {
Bar(String),
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: clone
+//- minicore: clone, derive
#[derive(Clo$0ne)]
enum Foo {
Bar {
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: ord
+//- minicore: ord, derive
#[derive(Partial$0Ord)]
struct Foo {
bin: usize,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: ord
+//- minicore: ord, derive
#[derive(Partial$0Ord)]
struct Foo {
bin: usize,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: ord
+//- minicore: ord, derive
#[derive(Partial$0Ord)]
struct Foo(usize, usize, usize);
"#,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: eq
+//- minicore: eq, derive
#[derive(Partial$0Eq)]
struct Foo {
bin: usize,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: eq
+//- minicore: eq, derive
#[derive(Partial$0Eq)]
struct Foo(usize, usize);
"#,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: eq
+//- minicore: eq, derive
#[derive(Partial$0Eq)]
struct Foo;
"#,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: eq
+//- minicore: eq, derive
#[derive(Partial$0Eq)]
enum Foo {
Bar,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: eq
+//- minicore: eq, derive
#[derive(Partial$0Eq)]
enum Foo {
Bar(String),
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: eq
+//- minicore: eq, derive
#[derive(Partial$0Eq)]
enum Foo {
Bar {
check_assist(
replace_derive_with_manual_impl,
r#"
+//- minicore: derive
mod foo {
pub trait Bar {
type Qux;
)
}
#[test]
- fn add_custom_impl_for_unique_input() {
+ fn add_custom_impl_for_unique_input_unknown() {
check_assist(
replace_derive_with_manual_impl,
r#"
+//- minicore: derive
#[derive(Debu$0g)]
struct Foo {
bar: String,
check_assist(
replace_derive_with_manual_impl,
r#"
+//- minicore: derive
#[derive(Debug$0)]
pub struct Foo {
bar: String,
check_assist(
replace_derive_with_manual_impl,
r#"
+//- minicore: derive
#[derive(Display, Debug$0, Serialize)]
struct Foo {}
"#,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: default
+//- minicore: default, derive
#[derive(Defau$0lt)]
struct Foo<T, U> {
foo: T,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: clone
+//- minicore: clone, derive
#[derive(Clo$0ne)]
struct Foo<T: Clone>(T, usize);
"#,
check_assist_not_applicable(
replace_derive_with_manual_impl,
r#"
+//- minicore: derive
#[derive($0)]
struct Foo {}
"#,
check_assist_not_applicable(
replace_derive_with_manual_impl,
r#"
+//- minicore: derive, fmt
#[derive$0(Debug)]
struct Foo {}
"#,
check_assist_not_applicable(
replace_derive_with_manual_impl,
r#"
+//- minicore: derive, fmt
#[derive(Debug)$0]
struct Foo {}
"#,
check_assist_not_applicable(
replace_derive_with_manual_impl,
r#"
+//- minicore: derive
#[allow(non_camel_$0case_types)]
struct Foo {}
"#,
#[test]
fn works_at_start_of_file() {
- cov_mark::check!(outside_of_attr_args);
check_assist_not_applicable(
replace_derive_with_manual_impl,
r#"
+//- minicore: derive, fmt
$0#[derive(Debug)]
struct S;
"#,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: clone
+//- minicore: clone, derive
#[derive(std::fmt::Debug, Clo$0ne)]
pub struct Foo;
"#,
check_assist(
replace_derive_with_manual_impl,
r#"
-//- minicore: fmt
+//- minicore: fmt, derive
#[derive(core::fmt::Deb$0ug, Clone)]
pub struct Foo;
"#,
use std::iter::{self, successors};
use either::Either;
-use ide_db::{defs::NameClass, ty_filter::TryEnum, RootDatabase};
+use ide_db::{
+ defs::NameClass,
+ helpers::node_ext::{is_pattern_cond, single_let},
+ ty_filter::TryEnum,
+ RootDatabase,
+};
use syntax::{
ast::{
self,
None
}
});
- let scrutinee_to_be_expr = if_expr.condition()?.expr()?;
+ let scrutinee_to_be_expr = if_expr.condition()?;
+ let scrutinee_to_be_expr = match single_let(scrutinee_to_be_expr.clone()) {
+ Some(cond) => cond.expr()?,
+ None => scrutinee_to_be_expr,
+ };
let mut pat_seen = false;
let mut cond_bodies = Vec::new();
for if_expr in if_exprs {
let cond = if_expr.condition()?;
- let expr = cond.expr()?;
- let cond = match cond.pat() {
- Some(pat) => {
+ let cond = match single_let(cond.clone()) {
+ Some(let_) => {
+ let pat = let_.pat()?;
+ let expr = let_.expr()?;
+ // FIXME: If one `let` is wrapped in parentheses and the second is not,
+ // we'll exit here.
if scrutinee_to_be_expr.syntax().text() != expr.syntax().text() {
// Only if all condition expressions are equal we can merge them into a match
return None;
pat_seen = true;
Either::Left(pat)
}
- None => Either::Right(expr),
+ // Multiple `let`, unsupported.
+ None if is_pattern_cond(cond.clone()) => return None,
+ None => Either::Right(cond),
};
let body = if_expr.then_branch()?;
cond_bodies.push((cond, body));
}
}
- let condition = make::condition(scrutinee, Some(if_let_pat));
+ let condition = make::expr_let(if_let_pat, scrutinee);
let then_block = make_block_expr(then_expr.reset_indent());
let else_expr = if is_empty_expr(&else_expr) { None } else { Some(else_expr) };
let if_let_expr = make::expr_if(
- condition,
+ condition.into(),
then_block,
else_expr.map(make_block_expr).map(ast::ElseBranch::Block),
)
)
}
+ #[test]
+ fn test_if_let_with_match_let_chain() {
+ check_assist_not_applicable(
+ replace_if_let_with_match,
+ r#"
+fn main() {
+ if $0let true = true && let Some(1) = None {}
+}
+"#,
+ )
+ }
+
#[test]
fn test_if_let_with_match_basic() {
check_assist(
let block =
make::ext::empty_block_expr().indent(IndentLevel::from_node(let_stmt.syntax()));
- let if_ = make::expr_if(make::condition(init, Some(pat)), block, None);
+ let if_ = make::expr_if(make::expr_let(pat, init).into(), block, None);
let stmt = make::expr_stmt(if_);
edit.replace_ast(ast::Stmt::from(let_stmt), ast::Stmt::from(stmt));
ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
};
- shorten_paths(scope.as_syntax_node(), &path.clone_for_update());
+ shorten_paths(scope.as_syntax_node(), &path);
+ let path = drop_generic_args(&path);
// stick the found import in front of the to be replaced path
let path = match path_to_qualifier.and_then(|it| mod_path_to_ast(&it).qualifier()) {
Some(qualifier) => make::path_concat(qualifier, path),
)
}
-/// Adds replacements to `re` that shorten `path` in all descendants of `node`.
+fn drop_generic_args(path: &ast::Path) -> ast::Path {
+ let path = path.clone_for_update();
+ if let Some(segment) = path.segment() {
+ if let Some(generic_args) = segment.generic_arg_list() {
+ ted::remove(generic_args.syntax());
+ }
+ }
+ path
+}
+
+/// Mutates `node` to shorten `path` in all descendants of `node`.
fn shorten_paths(node: &SyntaxNode, path: &ast::Path) {
for child in node.children() {
match_ast! {
fn main() {
drop(0);
}
+",
+ );
+ }
+
+ #[test]
+ fn replace_should_drop_generic_args_in_use() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std {
+ pub mod mem {
+ pub fn drop<T>(_: T) {}
+ }
+}
+
+fn main() {
+ std::mem::drop::<usize>$0(0);
+}
+",
+ r"
+use std::mem::drop;
+
+mod std {
+ pub mod mem {
+ pub fn drop<T>(_: T) {}
+ }
+}
+
+fn main() {
+ drop::<usize>(0);
+}
",
);
}
check_doc_test(
"replace_derive_with_manual_impl",
r#####"
+//- minicore: derive
trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
#[derive(Deb$0ug, Display)]
struct S;
}
if let hir::Adt::Struct(strukt) = ctx.expected_type.as_ref()?.as_adt()? {
- let module = if let Some(module) = ctx.module { module } else { strukt.module(ctx.db) };
+ if ctx.path_qual().is_none() {
+ let module = if let Some(module) = ctx.module { module } else { strukt.module(ctx.db) };
+ let path = module.find_use_path(ctx.db, hir::ModuleDef::from(strukt));
- let path = module.find_use_path(ctx.db, hir::ModuleDef::from(strukt));
-
- acc.add_struct_literal(ctx, strukt, path, None);
+ acc.add_struct_literal(ctx, strukt, path, None);
+ }
}
Some(())
(
hir::AssocItem::Function(fn_item),
ImplCompletionKind::All | ImplCompletionKind::Fn,
- ) => add_function_impl(&trigger, acc, ctx, fn_item, hir_impl),
+ ) => add_function_impl(acc, ctx, &trigger, fn_item, hir_impl),
(
hir::AssocItem::TypeAlias(type_item),
ImplCompletionKind::All | ImplCompletionKind::TypeAlias,
- ) => add_type_alias_impl(&trigger, acc, ctx, type_item),
+ ) => add_type_alias_impl(acc, ctx, &trigger, type_item),
(
hir::AssocItem::Const(const_item),
ImplCompletionKind::All | ImplCompletionKind::Const,
- ) => add_const_impl(&trigger, acc, ctx, const_item, hir_impl),
+ ) => add_const_impl(acc, ctx, &trigger, const_item, hir_impl),
_ => {}
}
});
}
fn add_function_impl(
- fn_def_node: &SyntaxNode,
acc: &mut Completions,
ctx: &CompletionContext,
+ fn_def_node: &SyntaxNode,
func: hir::Function,
impl_def: hir::Impl,
) {
}
fn add_type_alias_impl(
- type_def_node: &SyntaxNode,
acc: &mut Completions,
ctx: &CompletionContext,
+ type_def_node: &SyntaxNode,
type_alias: hir::TypeAlias,
) {
let alias_name = type_alias.name(ctx.db).to_smol_str();
}
fn add_const_impl(
- const_def_node: &SyntaxNode,
acc: &mut Completions,
ctx: &CompletionContext,
+ const_def_node: &SyntaxNode,
const_: hir::Const,
impl_def: hir::Impl,
) {
(ty, name)
},
+ ast::LetExpr(it) => {
+ cov_mark::hit!(expected_type_if_let_without_leading_char);
+ let ty = it.pat()
+ .and_then(|pat| self.sema.type_of_pat(&pat))
+ .or_else(|| it.expr().and_then(|it| self.sema.type_of_expr(&it)))
+ .map(TypeInfo::original);
+ (ty, None)
+ },
ast::ArgList(_) => {
cov_mark::hit!(expected_type_fn_param);
ActiveParameter::at_token(
(ty, None)
},
ast::IfExpr(it) => {
- cov_mark::hit!(expected_type_if_let_without_leading_char);
let ty = it.condition()
- .and_then(|cond| cond.expr())
.and_then(|e| self.sema.type_of_expr(&e))
.map(TypeInfo::original);
(ty, None)
return (PatternRefutability::Irrefutable, has_type_ascription)
},
ast::MatchArm(_) => PatternRefutability::Refutable,
- ast::Condition(_) => PatternRefutability::Refutable,
+ ast::LetExpr(_) => PatternRefutability::Refutable,
ast::ForExpr(_) => PatternRefutability::Irrefutable,
_ => PatternRefutability::Irrefutable,
}
struct Foo;
"#,
expect![[r#"
- at input_replace pub macro input_replace
- at identity pub macro identity
+ at identity pub macro identity
"#]],
)
}
)
}
+ #[test]
+ fn derive_no_attrs() {
+ check_derive(
+ r#"
+//- proc_macros: identity
+//- minicore: derive
+#[derive($0)] struct Test;
+"#,
+ expect![[r#""#]],
+ );
+ check_derive(
+ r#"
+//- proc_macros: identity
+//- minicore: derive
+#[derive(i$0)] struct Test;
+"#,
+ expect![[r#""#]],
+ );
+ }
+
#[test]
fn derive_flyimport() {
check_derive(
use stdx::impl_from;
use syntax::{
ast::{self, AstNode},
- match_ast, AstToken, SyntaxKind, SyntaxNode, SyntaxToken,
+ match_ast, SyntaxKind, SyntaxNode, SyntaxToken,
};
use crate::RootDatabase;
token: &SyntaxToken,
) -> Option<IdentClass> {
let parent = token.parent()?;
- // resolve derives if possible
- if let Some(ident) = ast::Ident::cast(token.clone()) {
- let attr = ast::TokenTree::cast(parent.clone())
- .and_then(|tt| tt.parent_meta())
- .and_then(|meta| meta.parent_attr());
- if let Some(attr) = attr {
- return NameRefClass::classify_derive(sema, &attr, &ident)
- .map(IdentClass::NameRefClass);
- }
- }
Self::classify_node(sema, &parent)
}
Definition::Macro(sema.to_def(&ast::Macro::MacroDef(it))?)
}
ast::Item::Const(it) => Definition::Const(sema.to_def(&it)?),
- ast::Item::Fn(it) => Definition::Function(sema.to_def(&it)?),
+ ast::Item::Fn(it) => {
+ let def = sema.to_def(&it)?;
+ def.as_proc_macro(sema.db)
+ .map(Definition::Macro)
+ .unwrap_or(Definition::Function(def))
+ }
ast::Item::Module(it) => Definition::Module(sema.to_def(&it)?),
ast::Item::Static(it) => Definition::Static(sema.to_def(&it)?),
ast::Item::Trait(it) => Definition::Trait(sema.to_def(&it)?),
_ => None,
}
}
-
- pub fn classify_derive(
- sema: &Semantics<RootDatabase>,
- attr: &ast::Attr,
- ident: &ast::Ident,
- ) -> Option<NameRefClass> {
- sema.resolve_derive_ident(&attr, &ident).map(Definition::from).map(NameRefClass::Definition)
- }
}
impl_from!(
pub mod rust_doc;
pub mod format_string;
-use std::{collections::VecDeque, iter};
+use std::collections::VecDeque;
use base_db::FileId;
-use hir::{ItemInNs, MacroDef, ModuleDef, Name, PathResolution, Semantics};
+use hir::{ItemInNs, MacroDef, ModuleDef, Name, Semantics};
use itertools::Itertools;
use syntax::{
ast::{self, make, HasLoopBody},
- AstNode, AstToken, Direction, SyntaxElement, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent,
- T,
+ AstNode, AstToken, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent, T,
};
use crate::{defs::Definition, RootDatabase};
}
}
-/// Parses and returns the derive path at the cursor position in the given attribute, if it is a derive.
-/// This special case is required because the derive macro is a compiler builtin that discards the input derives.
-///
-/// The returned path is synthesized from TokenTree tokens and as such cannot be used with the [`Semantics`].
-pub fn get_path_in_derive_attr(
- sema: &hir::Semantics<RootDatabase>,
- attr: &ast::Attr,
- cursor: &ast::Ident,
-) -> Option<ast::Path> {
- let path = attr.path()?;
- let tt = attr.token_tree()?;
- if !tt.syntax().text_range().contains_range(cursor.syntax().text_range()) {
- return None;
- }
- let scope = sema.scope(attr.syntax());
- let resolved_attr = sema.resolve_path(&path)?;
- let derive = FamousDefs(sema, scope.krate()).core_macros_builtin_derive()?;
- if PathResolution::Macro(derive) != resolved_attr {
- return None;
- }
- get_path_at_cursor_in_tt(cursor)
-}
-
-/// Parses the path the identifier is part of inside a token tree.
-pub fn get_path_at_cursor_in_tt(cursor: &ast::Ident) -> Option<ast::Path> {
- let cursor = cursor.syntax();
- let first = cursor
- .siblings_with_tokens(Direction::Prev)
- .filter_map(SyntaxElement::into_token)
- .take_while(|tok| tok.kind() != T!['('] && tok.kind() != T![,])
- .last()?;
- let path_tokens = first
- .siblings_with_tokens(Direction::Next)
- .filter_map(SyntaxElement::into_token)
- .take_while(|tok| tok != cursor);
-
- syntax::hacks::parse_expr_from_str(&path_tokens.chain(iter::once(cursor.clone())).join(""))
- .and_then(|expr| match expr {
- ast::Expr::PathExpr(it) => it.path(),
- _ => None,
- })
-}
-
/// Picks the token with the highest rank returned by the passed in function.
pub fn pick_best_token(
tokens: TokenAtOffset<SyntaxToken>,
) -> Option<SyntaxToken> {
tokens.max_by_key(move |t| f(t.kind()))
}
+pub fn pick_token<T: AstToken>(mut tokens: TokenAtOffset<SyntaxToken>) -> Option<T> {
+ tokens.find_map(T::cast)
+}
/// Converts the mod path struct into its ast representation.
pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path {
| ast::Expr::TryExpr(_)
| ast::Expr::TupleExpr(_)
| ast::Expr::WhileExpr(_)
+ | ast::Expr::LetExpr(_)
| ast::Expr::YieldExpr(_) => cb(expr),
}
}
use syntax::{
ast::{self, HasName},
utils::path_to_string_stripping_turbo_fish,
- AstNode, AstToken, SyntaxNode,
+ AstNode, SyntaxNode,
};
use crate::{
- helpers::get_path_in_derive_attr,
items_locator::{self, AssocItemSearch, DEFAULT_QUERY_SEARCH_LIMIT},
RootDatabase,
};
})
}
- pub fn for_derive_ident(sema: &Semantics<RootDatabase>, ident: &ast::Ident) -> Option<Self> {
- let attr = ident.syntax().ancestors().find_map(ast::Attr::cast)?;
- let path = get_path_in_derive_attr(sema, &attr, ident)?;
-
- if let Some(_) = path.qualifier() {
- return None;
- }
-
- let name = NameToImport::exact_case_sensitive(path.segment()?.name_ref()?.to_string());
- let candidate_node = attr.syntax().clone();
- Some(Self {
- import_candidate: ImportCandidate::Path(PathImportCandidate { qualifier: None, name }),
- module_with_candidate: sema.scope(&candidate_node).module()?,
- candidate_node,
- })
- }
-
pub fn for_fuzzy_path(
module_with_candidate: Module,
qualifier: Option<ast::Path>,
let tree_contains_self = |tree: &ast::UseTree| {
tree.use_tree_list()
.map(|tree_list| tree_list.use_trees().any(|it| tree_is_self(&it)))
- .unwrap_or(false)
+ // Glob imports aren't part of the use-tree lists,
+ // so they need to be handled explicitly
+ .or_else(|| tree.star_token().map(|_| false))
};
match (tree_contains_self(lhs_t), tree_contains_self(&rhs_t)) {
- (true, false) => continue,
- (false, true) => {
+ (Some(true), None) => continue,
+ (None, Some(true)) => {
ted::replace(lhs_t.syntax(), rhs_t.syntax());
*lhs_t = rhs_t;
continue;
_ => false,
}
}
+
+/// Returns the `let` only if there is exactly one (that is, `let pat = expr`
+/// or `((let pat = expr))`, but not `let pat = expr && expr` or `non_let_expr`).
+pub fn single_let(expr: ast::Expr) -> Option<ast::LetExpr> {
+ match expr {
+ ast::Expr::ParenExpr(expr) => expr.expr().and_then(single_let),
+ ast::Expr::LetExpr(expr) => Some(expr),
+ _ => None,
+ }
+}
+
+pub fn is_pattern_cond(expr: ast::Expr) -> bool {
+ match expr {
+ ast::Expr::BinExpr(expr)
+ if expr.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And)) =>
+ {
+ expr.lhs()
+ .map(is_pattern_cond)
+ .or_else(|| expr.rhs().map(is_pattern_cond))
+ .unwrap_or(false)
+ }
+ ast::Expr::ParenExpr(expr) => expr.expr().map_or(false, is_pattern_cond),
+ ast::Expr::LetExpr(_) => true,
+ _ => false,
+ }
+}
source_change.push_file_system_edit(move_file);
}
- if let Some(InFile { file_id, value: decl_source }) = module.declaration_source(sema.db) {
- let file_id = file_id.original_file(sema.db);
- match decl_source.name() {
- Some(name) => source_change.insert_source_edit(
- file_id,
- TextEdit::replace(name.syntax().text_range(), new_name.to_string()),
- ),
+ if let Some(src) = module.declaration_source(sema.db) {
+ let file_id = src.file_id.original_file(sema.db);
+ match src.value.name() {
+ Some(name) => {
+ if let Some(file_range) =
+ src.with_value(name.syntax()).original_file_range_opt(sema.db)
+ {
+ source_change.insert_source_edit(
+ file_id,
+ TextEdit::replace(file_range.range, new_name.to_string()),
+ )
+ };
+ }
_ => never!("Module source node is missing a name"),
}
}
+
let def = Definition::Module(module);
let usages = def.usages(sema).all();
let ref_edits = usages.iter().map(|(&file_id, references)| {
let mut new_item = item.clone();
new_item.bindings = bindings_builder.copy(&new_item.bindings);
new_item.dot.next();
- let mut vars = Vec::new();
- collect_vars(&mut vars, tokens);
- for var in vars {
- bindings_builder.push_empty(&mut new_item.bindings, &var);
- }
+ collect_vars(
+ &mut |s| {
+ bindings_builder.push_empty(&mut new_item.bindings, &s);
+ },
+ tokens,
+ );
cur_items.push(new_item);
}
cur_items.push(MatchState {
src = it;
res.unmatched_tts += src.len();
}
- res.add_err(ExpandError::binding_error("leftover tokens"));
+ res.add_err(ExpandError::LeftoverTokens);
if let Some(error_reover_item) = error_recover_item {
res.bindings = bindings_builder.build(&error_reover_item);
fn match_leaf(lhs: &tt::Leaf, src: &mut TtIter) -> Result<(), ExpandError> {
let rhs = src
.expect_leaf()
- .map_err(|()| ExpandError::BindingError(format!("expected leaf: `{lhs}`").into()))?;
+ .map_err(|()| ExpandError::binding_error(format!("expected leaf: `{lhs}`")))?;
match (lhs, rhs) {
(
tt::Leaf::Punct(tt::Punct { char: lhs, .. }),
input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens))
}
-fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &MetaTemplate) {
+fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) {
for op in pattern.iter() {
match op {
- Op::Var { name, .. } => buf.push(name.clone()),
+ Op::Var { name, .. } => collector_fun(name.clone()),
Op::Leaf(_) => (),
- Op::Subtree { tokens, .. } => collect_vars(buf, tokens),
- Op::Repeat { tokens, .. } => collect_vars(buf, tokens),
+ Op::Subtree { tokens, .. } => collect_vars(collector_fun, tokens),
+ Op::Repeat { tokens, .. } => collect_vars(collector_fun, tokens),
}
}
}
-
impl MetaTemplate {
fn iter_delimited<'a>(&'a self, delimited: Option<&'a tt::Delimiter>) -> OpDelimitedIter<'a> {
OpDelimitedIter { inner: &self.0, idx: 0, delimited }
fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<&Fragment, ExpandError> {
macro_rules! binding_err {
- ($($arg:tt)*) => { ExpandError::BindingError(format!($($arg)*).into()) };
+ ($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) };
}
let mut b: &Binding =
);
return ExpandResult {
value: Fragment::Tokens(Subtree::default().into()),
- err: Some(ExpandError::Other("Expand exceed limit".into())),
+ err: Some(ExpandError::LimitExceeded),
};
}
pub use crate::{
syntax_bridge::{
parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree,
- syntax_node_to_token_tree_censored, token_tree_to_syntax_node,
+ syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken,
+ SyntheticTokenId,
},
token_map::TokenMap,
};
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum ExpandError {
+ BindingError(Box<Box<str>>),
+ LeftoverTokens,
+ ConversionError,
+ LimitExceeded,
NoMatchingRule,
UnexpectedToken,
- BindingError(Box<str>),
- ConversionError,
- // FIXME: no way mbe should know about proc macros.
- UnresolvedProcMacro,
- Other(Box<str>),
}
impl ExpandError {
- fn binding_error(e: &str) -> ExpandError {
- ExpandError::BindingError(e.into())
+ fn binding_error(e: impl Into<Box<str>>) -> ExpandError {
+ ExpandError::BindingError(Box::new(e.into()))
}
}
ExpandError::UnexpectedToken => f.write_str("unexpected token in input"),
ExpandError::BindingError(e) => f.write_str(e),
ExpandError::ConversionError => f.write_str("could not convert tokens"),
- ExpandError::UnresolvedProcMacro => f.write_str("unresolved proc macro"),
- ExpandError::Other(e) => f.write_str(e),
+ ExpandError::LimitExceeded => f.write_str("Expand exceed limit"),
+ ExpandError::LeftoverTokens => f.write_str("leftover tokens"),
}
}
}
Ok(())
}
+pub type ExpandResult<T> = ValueResult<T, ExpandError>;
+
#[derive(Debug, Clone, Eq, PartialEq)]
-pub struct ExpandResult<T> {
+pub struct ValueResult<T, E> {
pub value: T,
- pub err: Option<ExpandError>,
+ pub err: Option<E>,
}
-impl<T> ExpandResult<T> {
+impl<T, E> ValueResult<T, E> {
pub fn ok(value: T) -> Self {
Self { value, err: None }
}
- pub fn only_err(err: ExpandError) -> Self
+ pub fn only_err(err: E) -> Self
where
T: Default,
{
Self { value: Default::default(), err: Some(err) }
}
- pub fn str_err(err: String) -> Self
- where
- T: Default,
- {
- Self::only_err(ExpandError::Other(err.into()))
+ pub fn map<U>(self, f: impl FnOnce(T) -> U) -> ValueResult<U, E> {
+ ValueResult { value: f(self.value), err: self.err }
}
- pub fn map<U>(self, f: impl FnOnce(T) -> U) -> ExpandResult<U> {
- ExpandResult { value: f(self.value), err: self.err }
+ pub fn map_err<E2>(self, f: impl FnOnce(E) -> E2) -> ValueResult<T, E2> {
+ ValueResult { value: self.value, err: self.err.map(f) }
}
- pub fn result(self) -> Result<T, ExpandError> {
+ pub fn result(self) -> Result<T, E> {
self.err.map_or(Ok(self.value), Err)
}
}
-impl<T: Default> From<Result<T, ExpandError>> for ExpandResult<T> {
- fn from(result: Result<T, ExpandError>) -> Self {
+impl<T: Default, E> From<Result<T, E>> for ValueResult<T, E> {
+ fn from(result: Result<T, E>) -> Self {
result.map_or_else(Self::only_err, Self::ok)
}
}
//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
-use rustc_hash::{FxHashMap, FxHashSet};
-use stdx::non_empty_vec::NonEmptyVec;
+use rustc_hash::FxHashMap;
+use stdx::{always, non_empty_vec::NonEmptyVec};
use syntax::{
ast::{self, make::tokens::doc_comment},
AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
/// Convert the syntax node to a `TokenTree` (what macro
/// will consume).
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
- syntax_node_to_token_tree_censored(node, &Default::default())
+ let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications(
+ node,
+ Default::default(),
+ 0,
+ Default::default(),
+ Default::default(),
+ );
+ (subtree, token_map)
}
/// Convert the syntax node to a `TokenTree` (what macro will consume)
/// with the censored range excluded.
-pub fn syntax_node_to_token_tree_censored(
+pub fn syntax_node_to_token_tree_with_modifications(
node: &SyntaxNode,
- censor: &FxHashSet<SyntaxNode>,
-) -> (tt::Subtree, TokenMap) {
+ existing_token_map: TokenMap,
+ next_id: u32,
+ replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+ append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+) -> (tt::Subtree, TokenMap, u32) {
let global_offset = node.text_range().start();
- let mut c = Convertor::new(node, global_offset, censor);
+ let mut c = Convertor::new(node, global_offset, existing_token_map, next_id, replace, append);
let subtree = convert_tokens(&mut c);
c.id_alloc.map.shrink_to_fit();
- (subtree, c.id_alloc.map)
+ always!(c.replace.is_empty(), "replace: {:?}", c.replace);
+ always!(c.append.is_empty(), "append: {:?}", c.append);
+ (subtree, c.id_alloc.map, c.id_alloc.next_id)
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct SyntheticTokenId(pub u32);
+
+#[derive(Debug, Clone)]
+pub struct SyntheticToken {
+ pub kind: SyntaxKind,
+ pub text: SmolStr,
+ pub range: TextRange,
+ pub id: SyntheticTokenId,
}
// The following items are what `rustc` macro can be parsed into :
Some(it) => it,
None => break,
};
+ let synth_id = token.synthetic_id(&conv);
let kind = token.kind(&conv);
if kind == COMMENT {
if let Some(tokens) = conv.convert_doc_comment(&token) {
// FIXME: There has to be a better way to do this
// Add the comments token id to the converted doc string
- let id = conv.id_alloc().alloc(range);
+ let id = conv.id_alloc().alloc(range, synth_id);
result.extend(tokens.into_iter().map(|mut tt| {
if let tt::TokenTree::Subtree(sub) = &mut tt {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
continue;
}
let tt = if kind.is_punct() && kind != UNDERSCORE {
- assert_eq!(range.len(), TextSize::of('.'));
+ if synth_id.is_none() {
+ assert_eq!(range.len(), TextSize::of('.'));
+ }
if let Some(delim) = subtree.delimiter {
let expected = match delim.kind {
panic!("Token from lexer must be single char: token = {:#?}", token);
}
};
- tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range) }).into()
+ tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) })
+ .into()
} else {
macro_rules! make_leaf {
($i:ident) => {
- tt::$i { id: conv.id_alloc().alloc(range), text: token.to_text(conv) }.into()
+ tt::$i { id: conv.id_alloc().alloc(range, synth_id), text: token.to_text(conv) }
+ .into()
};
}
let leaf: tt::Leaf = match kind {
let apostrophe = tt::Leaf::from(tt::Punct {
char: '\'',
spacing: tt::Spacing::Joint,
- id: conv.id_alloc().alloc(r),
+ id: conv.id_alloc().alloc(r, synth_id),
});
result.push(apostrophe.into());
let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
let ident = tt::Leaf::from(tt::Ident {
text: SmolStr::new(&token.to_text(conv)[1..]),
- id: conv.id_alloc().alloc(r),
+ id: conv.id_alloc().alloc(r, synth_id),
});
result.push(ident.into());
continue;
conv.id_alloc().close_delim(entry.idx, None);
let leaf: tt::Leaf = tt::Punct {
- id: conv.id_alloc().alloc(entry.open_range),
+ id: conv.id_alloc().alloc(entry.open_range, None),
char: match entry.subtree.delimiter.unwrap().kind {
tt::DelimiterKind::Parenthesis => '(',
tt::DelimiterKind::Brace => '{',
}
impl TokenIdAlloc {
- fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId {
+ fn alloc(
+ &mut self,
+ absolute_range: TextRange,
+ synthetic_id: Option<SyntheticTokenId>,
+ ) -> tt::TokenId {
let relative_range = absolute_range - self.global_offset;
let token_id = tt::TokenId(self.next_id);
self.next_id += 1;
self.map.insert(token_id, relative_range);
+ if let Some(id) = synthetic_id {
+ self.map.insert_synthetic(token_id, id);
+ }
token_id
}
fn to_char(&self, ctx: &Ctx) -> Option<char>;
fn to_text(&self, ctx: &Ctx) -> SmolStr;
+
+ fn synthetic_id(&self, ctx: &Ctx) -> Option<SyntheticTokenId>;
}
trait TokenConvertor: Sized {
fn to_text(&self, ctx: &RawConvertor<'_>) -> SmolStr {
ctx.lexed.text(*self).into()
}
+
+ fn synthetic_id(&self, _ctx: &RawConvertor<'a>) -> Option<SyntheticTokenId> {
+ None
+ }
}
impl<'a> TokenConvertor for RawConvertor<'a> {
}
}
-struct Convertor<'c> {
+struct Convertor {
id_alloc: TokenIdAlloc,
current: Option<SyntaxToken>,
+ current_synthetic: Vec<SyntheticToken>,
preorder: PreorderWithTokens,
- censor: &'c FxHashSet<SyntaxNode>,
+ replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+ append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
range: TextRange,
punct_offset: Option<(SyntaxToken, TextSize)>,
}
-impl<'c> Convertor<'c> {
+impl Convertor {
fn new(
node: &SyntaxNode,
global_offset: TextSize,
- censor: &'c FxHashSet<SyntaxNode>,
- ) -> Convertor<'c> {
+ existing_token_map: TokenMap,
+ next_id: u32,
+ mut replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+ mut append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+ ) -> Convertor {
let range = node.text_range();
let mut preorder = node.preorder_with_tokens();
- let first = Self::next_token(&mut preorder, censor);
+ let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
Convertor {
- id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
+ id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } },
current: first,
+ current_synthetic: synthetic,
preorder,
range,
- censor,
+ replace,
+ append,
punct_offset: None,
}
}
fn next_token(
preorder: &mut PreorderWithTokens,
- censor: &FxHashSet<SyntaxNode>,
- ) -> Option<SyntaxToken> {
+ replace: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+ append: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+ ) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
while let Some(ev) = preorder.next() {
let ele = match ev {
WalkEvent::Enter(ele) => ele,
+ WalkEvent::Leave(SyntaxElement::Node(node)) => {
+ if let Some(mut v) = append.remove(&node) {
+ if !v.is_empty() {
+ v.reverse();
+ return (None, v);
+ }
+ }
+ continue;
+ }
_ => continue,
};
match ele {
- SyntaxElement::Token(t) => return Some(t),
- SyntaxElement::Node(node) if censor.contains(&node) => preorder.skip_subtree(),
- SyntaxElement::Node(_) => (),
+ SyntaxElement::Token(t) => return (Some(t), Vec::new()),
+ SyntaxElement::Node(node) => {
+ if let Some(mut v) = replace.remove(&node) {
+ preorder.skip_subtree();
+ if !v.is_empty() {
+ v.reverse();
+ return (None, v);
+ }
+ }
+ }
}
}
- None
+ (None, Vec::new())
}
}
#[derive(Debug)]
enum SynToken {
Ordinary(SyntaxToken),
+ // FIXME is this supposed to be `Punct`?
Punch(SyntaxToken, TextSize),
+ Synthetic(SyntheticToken),
}
impl SynToken {
- fn token(&self) -> &SyntaxToken {
+ fn token(&self) -> Option<&SyntaxToken> {
match self {
- SynToken::Ordinary(it) | SynToken::Punch(it, _) => it,
+ SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it),
+ SynToken::Synthetic(_) => None,
}
}
}
-impl<'a> SrcToken<Convertor<'a>> for SynToken {
- fn kind(&self, _ctx: &Convertor<'a>) -> SyntaxKind {
- self.token().kind()
+impl SrcToken<Convertor> for SynToken {
+ fn kind(&self, _ctx: &Convertor) -> SyntaxKind {
+ match self {
+ SynToken::Ordinary(token) => token.kind(),
+ SynToken::Punch(token, _) => token.kind(),
+ SynToken::Synthetic(token) => token.kind,
+ }
}
- fn to_char(&self, _ctx: &Convertor<'a>) -> Option<char> {
+ fn to_char(&self, _ctx: &Convertor) -> Option<char> {
match self {
SynToken::Ordinary(_) => None,
SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
+ SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(),
+ SynToken::Synthetic(_) => None,
}
}
- fn to_text(&self, _ctx: &Convertor<'a>) -> SmolStr {
- self.token().text().into()
+ fn to_text(&self, _ctx: &Convertor) -> SmolStr {
+ match self {
+ SynToken::Ordinary(token) => token.text().into(),
+ SynToken::Punch(token, _) => token.text().into(),
+ SynToken::Synthetic(token) => token.text.clone(),
+ }
+ }
+
+ fn synthetic_id(&self, _ctx: &Convertor) -> Option<SyntheticTokenId> {
+ match self {
+ SynToken::Synthetic(token) => Some(token.id),
+ _ => None,
+ }
}
}
-impl TokenConvertor for Convertor<'_> {
+impl TokenConvertor for Convertor {
type Token = SynToken;
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
- convert_doc_comment(token.token())
+ convert_doc_comment(token.token()?)
}
fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
}
}
+ if let Some(synth_token) = self.current_synthetic.pop() {
+ if self.current_synthetic.is_empty() {
+ let (new_current, new_synth) =
+ Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
+ self.current = new_current;
+ self.current_synthetic = new_synth;
+ }
+ let range = synth_token.range;
+ return Some((SynToken::Synthetic(synth_token), range));
+ }
+
let curr = self.current.clone()?;
if !&self.range.contains_range(curr.text_range()) {
return None;
}
- self.current = Self::next_token(&mut self.preorder, self.censor);
+ let (new_current, new_synth) =
+ Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
+ self.current = new_current;
+ self.current_synthetic = new_synth;
let token = if curr.kind().is_punct() {
self.punct_offset = Some((curr.clone(), 0.into()));
let range = curr.text_range();
}
}
+ if let Some(synth_token) = self.current_synthetic.last() {
+ return Some(SynToken::Synthetic(synth_token.clone()));
+ }
+
let curr = self.current.clone()?;
if !self.range.contains_range(curr.text_range()) {
return None;
use parser::{SyntaxKind, T};
use syntax::{TextRange, TextSize};
+use crate::syntax_bridge::SyntheticTokenId;
+
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
enum TokenTextRange {
Token(TextRange),
pub struct TokenMap {
/// Maps `tt::TokenId` to the *relative* source range.
entries: Vec<(tt::TokenId, TokenTextRange)>,
+ pub synthetic_entries: Vec<(tt::TokenId, SyntheticTokenId)>,
}
impl TokenMap {
.filter_map(move |(_, range)| range.by_kind(kind))
}
+ pub fn synthetic_token_id(&self, token_id: tt::TokenId) -> Option<SyntheticTokenId> {
+ self.synthetic_entries.iter().find(|(tid, _)| *tid == token_id).map(|(_, id)| *id)
+ }
+
pub fn first_range_by_token(
&self,
token_id: tt::TokenId,
pub(crate) fn shrink_to_fit(&mut self) {
self.entries.shrink_to_fit();
+ self.synthetic_entries.shrink_to_fit();
}
pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
self.entries.push((token_id, TokenTextRange::Token(relative_range)));
}
+ pub(crate) fn insert_synthetic(&mut self, token_id: tt::TokenId, id: SyntheticTokenId) {
+ self.synthetic_entries.push((token_id, id));
+ }
+
pub(crate) fn insert_delim(
&mut self,
token_id: tt::TokenId,
}
let err = if error || !cursor.is_root() {
- Some(ExpandError::BindingError(format!("expected {entry_point:?}").into()))
+ Some(ExpandError::binding_error(format!("expected {entry_point:?}")))
} else {
None
};
expr_bp(p, None, r, 1);
}
+/// Parses the expression in `let pattern = expression`.
+/// It needs to be parsed with lower precedence than `&&`, so that
+/// `if let true = true && false` is parsed as `if (let true = true) && (true)`
+/// and not `if let true = (true && true)`.
+fn expr_let(p: &mut Parser) {
+ let r = Restrictions { forbid_structs: true, prefer_stmt: false };
+ expr_bp(p, None, r, 5);
+}
+
pub(super) fn stmt(p: &mut Parser, semicolon: Semicolon) {
if p.eat(T![;]) {
return;
T![%] if p.at(T![%=]) => (1, T![%=]),
T![%] => (11, T![%]),
T![&] if p.at(T![&=]) => (1, T![&=]),
+ // If you update this, remember to update `expr_let()` too.
T![&] if p.at(T![&&]) => (4, T![&&]),
T![&] => (8, T![&]),
T![/] if p.at(T![/=]) => (1, T![/=]),
T!['('] => tuple_expr(p),
T!['['] => array_expr(p),
T![|] => closure_expr(p),
- T![move] if la == T![|] => closure_expr(p),
- T![async] if la == T![|] || (la == T![move] && p.nth(2) == T![|]) => closure_expr(p),
+ T![static] | T![async] | T![move] if la == T![|] => closure_expr(p),
+ T![static] | T![async] if la == T![move] && p.nth(2) == T![|] => closure_expr(p),
+ T![static] if la == T![async] && p.nth(2) == T![|] => closure_expr(p),
+ T![static] if la == T![async] && p.nth(2) == T![move] && p.nth(3) == T![|] => {
+ closure_expr(p)
+ }
T![if] => if_expr(p),
+ T![let] => let_expr(p),
T![loop] => loop_expr(p, None),
T![box] => box_expr(p, None),
// async || {};
// move || {};
// async move || {};
+// static || {};
+// static move || {};
+// static async || {};
+// static async move || {};
// }
fn closure_expr(p: &mut Parser) -> CompletedMarker {
assert!(
|| (p.at(T![move]) && p.nth(1) == T![|])
|| (p.at(T![async]) && p.nth(1) == T![|])
|| (p.at(T![async]) && p.nth(1) == T![move] && p.nth(2) == T![|])
+ || (p.at(T![static]) && p.nth(1) == T![|])
+ || (p.at(T![static]) && p.nth(1) == T![move] && p.nth(2) == T![|])
+ || (p.at(T![static]) && p.nth(1) == T![async] && p.nth(2) == T![|])
+ || (p.at(T![static])
+ && p.nth(1) == T![async]
+ && p.nth(2) == T![move]
+ && p.nth(3) == T![|])
);
let m = p.start();
+ p.eat(T![static]);
p.eat(T![async]);
p.eat(T![move]);
params::param_list_closure(p);
assert!(p.at(T![if]));
let m = p.start();
p.bump(T![if]);
- condition(p);
+ expr_no_struct(p);
block_expr(p);
if p.at(T![else]) {
p.bump(T![else]);
assert!(p.at(T![while]));
let m = m.unwrap_or_else(|| p.start());
p.bump(T![while]);
- condition(p);
+ expr_no_struct(p);
block_expr(p);
m.complete(p, WHILE_EXPR)
}
m.complete(p, FOR_EXPR)
}
-// test cond
-// fn foo() { if let Some(_) = None {} }
-// fn bar() {
-// if let Some(_) | Some(_) = None {}
-// if let | Some(_) = None {}
-// while let Some(_) | Some(_) = None {}
-// while let | Some(_) = None {}
+// test let_expr
+// fn foo() {
+// if let Some(_) = None && true {}
+// while 1 == 5 && (let None = None) {}
// }
-fn condition(p: &mut Parser) {
+fn let_expr(p: &mut Parser) -> CompletedMarker {
let m = p.start();
- if p.eat(T![let]) {
- patterns::pattern_top(p);
- p.expect(T![=]);
- }
- expr_no_struct(p);
- m.complete(p, CONDITION);
+ p.bump(T![let]);
+ patterns::pattern_top(p);
+ p.expect(T![=]);
+ expr_let(p);
+ m.complete(p, LET_EXPR)
}
// test match_expr
assert!(p.at(T![if]));
let m = p.start();
p.bump(T![if]);
- if p.eat(T![let]) {
- patterns::pattern_top(p);
- p.expect(T![=]);
- }
expr(p);
m.complete(p, MATCH_GUARD)
}
IDENT if p.at_contextual_kw(T![macro_rules]) && p.nth(1) == BANG => macro_rules(p, m),
T![const] if (la == IDENT || la == T![_] || la == T![mut]) => consts::konst(p, m),
- T![static] => consts::static_(p, m),
+ T![static] if (la == IDENT || la == T![_] || la == T![mut]) => consts::static_(p, m),
_ => return Err(m),
};
CLOSURE_EXPR,
IF_EXPR,
WHILE_EXPR,
- CONDITION,
LOOP_EXPR,
FOR_EXPR,
CONTINUE_EXPR,
STMT_LIST,
RETURN_EXPR,
YIELD_EXPR,
+ LET_EXPR,
MATCH_EXPR,
MATCH_ARM_LIST,
MATCH_ARM,
TopEntryPoint::Expr,
"let _ = 0;",
expect![[r#"
- ERROR
- LET_KW "let"
- WHITESPACE " "
- UNDERSCORE "_"
- WHITESPACE " "
- EQ "="
- WHITESPACE " "
- INT_NUMBER "0"
- SEMICOLON ";"
- error 0: expected expression
- "#]],
+ ERROR
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0"
+ SEMICOLON ";"
+ "#]],
);
}
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- LITERAL
- TRUE_KW "true"
+ LITERAL
+ TRUE_KW "true"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
fn foo() {
- let foo =
+ let foo = 11
let bar = 1;
let
let baz = 92;
IDENT "foo"
WHITESPACE " "
EQ "="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "11"
WHITESPACE "\n "
LET_STMT
LET_KW "let"
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- LITERAL
- TRUE_KW "true"
+ LITERAL
+ TRUE_KW "true"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
WHILE_EXPR
WHILE_KW "while"
WHITESPACE " "
- CONDITION
- LITERAL
- TRUE_KW "true"
+ LITERAL
+ TRUE_KW "true"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"
-error 24: expected expression
-error 24: expected SEMICOLON
-error 49: expected pattern
-error 49: expected SEMICOLON
-error 75: expected pattern
-error 75: expected SEMICOLON
-error 98: expected pattern
-error 98: expected SEMICOLON
-error 124: expected pattern
-error 124: expected SEMICOLON
+error 27: expected SEMICOLON
+error 52: expected pattern
+error 52: expected SEMICOLON
+error 78: expected pattern
+error 78: expected SEMICOLON
+error 101: expected pattern
+error 101: expected SEMICOLON
+error 127: expected pattern
+error 127: expected SEMICOLON
ERROR
PLUS "+"
WHITESPACE " "
- EXPR_STMT
- TUPLE_EXPR
- L_PAREN "("
- FOR_EXPR
- FOR_KW "for"
- PATH_PAT
- PATH
- PATH_SEGMENT
- L_ANGLE "<"
- ERROR
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
- WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ FOR_EXPR
+ FOR_KW "for"
+ PATH_PAT
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ ERROR
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ BIN_EXPR
BIN_EXPR
BIN_EXPR
BIN_EXPR
- BIN_EXPR
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Trait"
- L_ANGLE "<"
- ERROR
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
- ERROR
- R_PAREN ")"
- WHITESPACE " "
- PLUS "+"
- WHITESPACE " "
- PAREN_EXPR
- L_PAREN "("
PATH_EXPR
PATH
PATH_SEGMENT
NAME_REF
- IDENT "Copy"
+ IDENT "Trait"
+ L_ANGLE "<"
+ ERROR
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ ERROR
R_PAREN ")"
- R_ANGLE ">"
- ERROR
- SEMICOLON ";"
- WHITESPACE "\n "
- LET_STMT
- LET_KW "let"
- WHITESPACE " "
- WILDCARD_PAT
- UNDERSCORE "_"
- COLON ":"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_PAREN ")"
+ R_ANGLE ">"
+ ERROR
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ ERROR
+ COLON ":"
WHITESPACE " "
- DYN_TRAIT_TYPE
- TYPE_BOUND_LIST
- TYPE_BOUND
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Box"
- GENERIC_ARG_LIST
- L_ANGLE "<"
- TYPE_ARG
- PAREN_TYPE
- L_PAREN "("
- FOR_TYPE
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Trait"
- GENERIC_ARG_LIST
- L_ANGLE "<"
- LIFETIME_ARG
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
- R_PAREN ")"
- WHITESPACE " "
- PLUS "+"
- WHITESPACE " "
- TYPE_BOUND
- L_PAREN "("
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Copy"
- R_PAREN ")"
- WHITESPACE " "
- PLUS "+"
- WHITESPACE " "
- TYPE_BOUND
+ BIN_EXPR
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ L_ANGLE "<"
+ TUPLE_EXPR
L_PAREN "("
- QUESTION "?"
- PATH_TYPE
+ FOR_EXPR
+ FOR_KW "for"
+ PATH_PAT
+ PATH
+ PATH_SEGMENT
+ L_ANGLE "<"
+ ERROR
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ BIN_EXPR
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ L_ANGLE "<"
+ ERROR
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ ERROR
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Copy"
+ R_PAREN ")"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ ERROR
+ QUESTION "?"
+ PATH_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "Sized"
R_PAREN ")"
- ERROR
- R_ANGLE ">"
- SEMICOLON ";"
+ R_ANGLE ">"
+ ERROR
+ SEMICOLON ";"
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"
error 179: expected expression
error 180: expected a block
error 180: expected COMMA
-error 180: expected expression
-error 180: expected R_PAREN
-error 180: expected SEMICOLON
-error 215: expected COMMA
-error 215: expected R_ANGLE
-error 235: expected SEMICOLON
-error 235: expected expression
+error 190: expected EQ
+error 190: expected expression
+error 191: expected COMMA
+error 201: expected type
+error 204: expected IN_KW
+error 211: expected expression
+error 214: expected expression
+error 228: expected expression
+error 229: expected R_PAREN
+error 229: expected a block
+error 229: expected COMMA
+error 236: expected expression
+error 237: expected COMMA
+error 237: expected expression
+error 237: expected R_PAREN
+++ /dev/null
-SOURCE_FILE@0..197
- FN@0..37
- FN_KW@0..2 "fn"
- WHITESPACE@2..3 " "
- NAME@3..6
- IDENT@3..6 "foo"
- PARAM_LIST@6..8
- L_PAREN@6..7 "("
- R_PAREN@7..8 ")"
- WHITESPACE@8..9 " "
- BLOCK_EXPR@9..37
- STMT_LIST@9..37
- L_CURLY@9..10 "{"
- WHITESPACE@10..11 " "
- IF_EXPR@11..35
- IF_KW@11..13 "if"
- WHITESPACE@13..14 " "
- CONDITION@14..32
- LET_KW@14..17 "let"
- WHITESPACE@17..18 " "
- TUPLE_STRUCT_PAT@18..25
- PATH@18..22
- PATH_SEGMENT@18..22
- NAME_REF@18..22
- IDENT@18..22 "Some"
- L_PAREN@22..23 "("
- WILDCARD_PAT@23..24
- UNDERSCORE@23..24 "_"
- R_PAREN@24..25 ")"
- WHITESPACE@25..26 " "
- EQ@26..27 "="
- WHITESPACE@27..28 " "
- PATH_EXPR@28..32
- PATH@28..32
- PATH_SEGMENT@28..32
- NAME_REF@28..32
- IDENT@28..32 "None"
- WHITESPACE@32..33 " "
- BLOCK_EXPR@33..35
- STMT_LIST@33..35
- L_CURLY@33..34 "{"
- R_CURLY@34..35 "}"
- WHITESPACE@35..36 " "
- R_CURLY@36..37 "}"
- WHITESPACE@37..38 "\n"
- FN@38..196
- FN_KW@38..40 "fn"
- WHITESPACE@40..41 " "
- NAME@41..44
- IDENT@41..44 "bar"
- PARAM_LIST@44..46
- L_PAREN@44..45 "("
- R_PAREN@45..46 ")"
- WHITESPACE@46..47 " "
- BLOCK_EXPR@47..196
- STMT_LIST@47..196
- L_CURLY@47..48 "{"
- WHITESPACE@48..53 "\n "
- EXPR_STMT@53..87
- IF_EXPR@53..87
- IF_KW@53..55 "if"
- WHITESPACE@55..56 " "
- CONDITION@56..84
- LET_KW@56..59 "let"
- WHITESPACE@59..60 " "
- OR_PAT@60..77
- TUPLE_STRUCT_PAT@60..67
- PATH@60..64
- PATH_SEGMENT@60..64
- NAME_REF@60..64
- IDENT@60..64 "Some"
- L_PAREN@64..65 "("
- WILDCARD_PAT@65..66
- UNDERSCORE@65..66 "_"
- R_PAREN@66..67 ")"
- WHITESPACE@67..68 " "
- PIPE@68..69 "|"
- WHITESPACE@69..70 " "
- TUPLE_STRUCT_PAT@70..77
- PATH@70..74
- PATH_SEGMENT@70..74
- NAME_REF@70..74
- IDENT@70..74 "Some"
- L_PAREN@74..75 "("
- WILDCARD_PAT@75..76
- UNDERSCORE@75..76 "_"
- R_PAREN@76..77 ")"
- WHITESPACE@77..78 " "
- EQ@78..79 "="
- WHITESPACE@79..80 " "
- PATH_EXPR@80..84
- PATH@80..84
- PATH_SEGMENT@80..84
- NAME_REF@80..84
- IDENT@80..84 "None"
- WHITESPACE@84..85 " "
- BLOCK_EXPR@85..87
- STMT_LIST@85..87
- L_CURLY@85..86 "{"
- R_CURLY@86..87 "}"
- WHITESPACE@87..92 "\n "
- EXPR_STMT@92..118
- IF_EXPR@92..118
- IF_KW@92..94 "if"
- WHITESPACE@94..95 " "
- CONDITION@95..115
- LET_KW@95..98 "let"
- WHITESPACE@98..99 " "
- PIPE@99..100 "|"
- WHITESPACE@100..101 " "
- TUPLE_STRUCT_PAT@101..108
- PATH@101..105
- PATH_SEGMENT@101..105
- NAME_REF@101..105
- IDENT@101..105 "Some"
- L_PAREN@105..106 "("
- WILDCARD_PAT@106..107
- UNDERSCORE@106..107 "_"
- R_PAREN@107..108 ")"
- WHITESPACE@108..109 " "
- EQ@109..110 "="
- WHITESPACE@110..111 " "
- PATH_EXPR@111..115
- PATH@111..115
- PATH_SEGMENT@111..115
- NAME_REF@111..115
- IDENT@111..115 "None"
- WHITESPACE@115..116 " "
- BLOCK_EXPR@116..118
- STMT_LIST@116..118
- L_CURLY@116..117 "{"
- R_CURLY@117..118 "}"
- WHITESPACE@118..123 "\n "
- EXPR_STMT@123..160
- WHILE_EXPR@123..160
- WHILE_KW@123..128 "while"
- WHITESPACE@128..129 " "
- CONDITION@129..157
- LET_KW@129..132 "let"
- WHITESPACE@132..133 " "
- OR_PAT@133..150
- TUPLE_STRUCT_PAT@133..140
- PATH@133..137
- PATH_SEGMENT@133..137
- NAME_REF@133..137
- IDENT@133..137 "Some"
- L_PAREN@137..138 "("
- WILDCARD_PAT@138..139
- UNDERSCORE@138..139 "_"
- R_PAREN@139..140 ")"
- WHITESPACE@140..141 " "
- PIPE@141..142 "|"
- WHITESPACE@142..143 " "
- TUPLE_STRUCT_PAT@143..150
- PATH@143..147
- PATH_SEGMENT@143..147
- NAME_REF@143..147
- IDENT@143..147 "Some"
- L_PAREN@147..148 "("
- WILDCARD_PAT@148..149
- UNDERSCORE@148..149 "_"
- R_PAREN@149..150 ")"
- WHITESPACE@150..151 " "
- EQ@151..152 "="
- WHITESPACE@152..153 " "
- PATH_EXPR@153..157
- PATH@153..157
- PATH_SEGMENT@153..157
- NAME_REF@153..157
- IDENT@153..157 "None"
- WHITESPACE@157..158 " "
- BLOCK_EXPR@158..160
- STMT_LIST@158..160
- L_CURLY@158..159 "{"
- R_CURLY@159..160 "}"
- WHITESPACE@160..165 "\n "
- WHILE_EXPR@165..194
- WHILE_KW@165..170 "while"
- WHITESPACE@170..171 " "
- CONDITION@171..191
- LET_KW@171..174 "let"
- WHITESPACE@174..175 " "
- PIPE@175..176 "|"
- WHITESPACE@176..177 " "
- TUPLE_STRUCT_PAT@177..184
- PATH@177..181
- PATH_SEGMENT@177..181
- NAME_REF@177..181
- IDENT@177..181 "Some"
- L_PAREN@181..182 "("
- WILDCARD_PAT@182..183
- UNDERSCORE@182..183 "_"
- R_PAREN@183..184 ")"
- WHITESPACE@184..185 " "
- EQ@185..186 "="
- WHITESPACE@186..187 " "
- PATH_EXPR@187..191
- PATH@187..191
- PATH_SEGMENT@187..191
- NAME_REF@187..191
- IDENT@187..191 "None"
- WHITESPACE@191..192 " "
- BLOCK_EXPR@192..194
- STMT_LIST@192..194
- L_CURLY@192..193 "{"
- R_CURLY@193..194 "}"
- WHITESPACE@194..195 "\n"
- R_CURLY@195..196 "}"
- WHITESPACE@196..197 "\n"
+++ /dev/null
-fn foo() { if let Some(_) = None {} }
-fn bar() {
- if let Some(_) | Some(_) = None {}
- if let | Some(_) = None {}
- while let Some(_) | Some(_) = None {}
- while let | Some(_) = None {}
-}
+++ /dev/null
-SOURCE_FILE
- FN
- FN_KW "fn"
- WHITESPACE " "
- NAME
- IDENT "foo"
- PARAM_LIST
- L_PAREN "("
- R_PAREN ")"
- WHITESPACE " "
- BLOCK_EXPR
- STMT_LIST
- L_CURLY "{"
- WHITESPACE " "
- IF_EXPR
- IF_KW "if"
- WHITESPACE " "
- CONDITION
- LET_KW "let"
- WHITESPACE " "
- TUPLE_STRUCT_PAT
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Some"
- L_PAREN "("
- WILDCARD_PAT
- UNDERSCORE "_"
- R_PAREN ")"
- WHITESPACE " "
- EQ "="
- WHITESPACE " "
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "None"
- WHITESPACE " "
- BLOCK_EXPR
- STMT_LIST
- L_CURLY "{"
- R_CURLY "}"
- WHITESPACE " "
- R_CURLY "}"
- WHITESPACE "\n"
- FN
- FN_KW "fn"
- WHITESPACE " "
- NAME
- IDENT "bar"
- PARAM_LIST
- L_PAREN "("
- R_PAREN ")"
- WHITESPACE " "
- BLOCK_EXPR
- STMT_LIST
- L_CURLY "{"
- WHITESPACE "\n "
- EXPR_STMT
- IF_EXPR
- IF_KW "if"
- WHITESPACE " "
- CONDITION
- LET_KW "let"
- WHITESPACE " "
- OR_PAT
- TUPLE_STRUCT_PAT
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Some"
- L_PAREN "("
- WILDCARD_PAT
- UNDERSCORE "_"
- R_PAREN ")"
- WHITESPACE " "
- PIPE "|"
- WHITESPACE " "
- TUPLE_STRUCT_PAT
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Some"
- L_PAREN "("
- WILDCARD_PAT
- UNDERSCORE "_"
- R_PAREN ")"
- WHITESPACE " "
- EQ "="
- WHITESPACE " "
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "None"
- WHITESPACE " "
- BLOCK_EXPR
- STMT_LIST
- L_CURLY "{"
- R_CURLY "}"
- WHITESPACE "\n "
- EXPR_STMT
- IF_EXPR
- IF_KW "if"
- WHITESPACE " "
- CONDITION
- LET_KW "let"
- WHITESPACE " "
- PIPE "|"
- WHITESPACE " "
- TUPLE_STRUCT_PAT
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Some"
- L_PAREN "("
- WILDCARD_PAT
- UNDERSCORE "_"
- R_PAREN ")"
- WHITESPACE " "
- EQ "="
- WHITESPACE " "
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "None"
- WHITESPACE " "
- BLOCK_EXPR
- STMT_LIST
- L_CURLY "{"
- R_CURLY "}"
- WHITESPACE "\n "
- EXPR_STMT
- WHILE_EXPR
- WHILE_KW "while"
- WHITESPACE " "
- CONDITION
- LET_KW "let"
- WHITESPACE " "
- OR_PAT
- TUPLE_STRUCT_PAT
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Some"
- L_PAREN "("
- WILDCARD_PAT
- UNDERSCORE "_"
- R_PAREN ")"
- WHITESPACE " "
- PIPE "|"
- WHITESPACE " "
- TUPLE_STRUCT_PAT
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Some"
- L_PAREN "("
- WILDCARD_PAT
- UNDERSCORE "_"
- R_PAREN ")"
- WHITESPACE " "
- EQ "="
- WHITESPACE " "
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "None"
- WHITESPACE " "
- BLOCK_EXPR
- STMT_LIST
- L_CURLY "{"
- R_CURLY "}"
- WHITESPACE "\n "
- WHILE_EXPR
- WHILE_KW "while"
- WHITESPACE " "
- CONDITION
- LET_KW "let"
- WHITESPACE " "
- PIPE "|"
- WHITESPACE " "
- TUPLE_STRUCT_PAT
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Some"
- L_PAREN "("
- WILDCARD_PAT
- UNDERSCORE "_"
- R_PAREN ")"
- WHITESPACE " "
- EQ "="
- WHITESPACE " "
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "None"
- WHITESPACE " "
- BLOCK_EXPR
- STMT_LIST
- L_CURLY "{"
- R_CURLY "}"
- WHITESPACE "\n"
- R_CURLY "}"
- WHITESPACE "\n"
--- /dev/null
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BIN_EXPR
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "None"
+ WHITESPACE " "
+ AMP2 "&&"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "5"
+ WHITESPACE " "
+ AMP2 "&&"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "None"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "None"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
--- /dev/null
+fn foo() {
+ if let Some(_) = None && true {}
+ while 1 == 5 && (let None = None) {}
+}
--- /dev/null
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ IF_EXPR
+ IF_KW "if"
+ WHITESPACE " "
+ BIN_EXPR
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ TUPLE_STRUCT_PAT
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Some"
+ L_PAREN "("
+ WILDCARD_PAT
+ UNDERSCORE "_"
+ R_PAREN ")"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "None"
+ WHITESPACE " "
+ AMP2 "&&"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n "
+ WHILE_EXPR
+ WHILE_KW "while"
+ WHITESPACE " "
+ BIN_EXPR
+ BIN_EXPR
+ LITERAL
+ INT_NUMBER "1"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "5"
+ WHITESPACE " "
+ AMP2 "&&"
+ WHITESPACE " "
+ PAREN_EXPR
+ L_PAREN "("
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "None"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "None"
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
WHILE_EXPR
WHILE_KW "while"
WHITESPACE " "
- CONDITION
- LITERAL
- TRUE_KW "true"
+ LITERAL
+ TRUE_KW "true"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
WHILE_EXPR
WHILE_KW "while"
WHITESPACE " "
- CONDITION
+ LET_EXPR
LET_KW "let"
WHITESPACE " "
TUPLE_STRUCT_PAT
WHILE_EXPR
WHILE_KW "while"
WHITESPACE " "
- CONDITION
- BLOCK_EXPR
- STMT_LIST
- L_CURLY "{"
- WHITESPACE " "
- LITERAL
- TRUE_KW "true"
- WHITESPACE " "
- R_CURLY "}"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ R_CURLY "}"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- LITERAL
- TRUE_KW "true"
+ LITERAL
+ TRUE_KW "true"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- LITERAL
- TRUE_KW "true"
+ LITERAL
+ TRUE_KW "true"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- LITERAL
- TRUE_KW "true"
+ LITERAL
+ TRUE_KW "true"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- LITERAL
- FALSE_KW "false"
+ LITERAL
+ FALSE_KW "false"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "S"
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- BLOCK_EXPR
- STMT_LIST
- L_CURLY "{"
- WHITESPACE " "
- LITERAL
- TRUE_KW "true"
- WHITESPACE " "
- R_CURLY "}"
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ LITERAL
+ TRUE_KW "true"
+ WHITESPACE " "
+ R_CURLY "}"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- BREAK_EXPR
- BREAK_KW "break"
+ BREAK_EXPR
+ BREAK_KW "break"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
WHILE_EXPR
WHILE_KW "while"
WHITESPACE " "
- CONDITION
- BREAK_EXPR
- BREAK_KW "break"
+ BREAK_EXPR
+ BREAK_KW "break"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- LITERAL
- TRUE_KW "true"
+ LITERAL
+ TRUE_KW "true"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
WHILE_EXPR
WHILE_KW "while"
WHITESPACE " "
- CONDITION
- LITERAL
- TRUE_KW "true"
+ LITERAL
+ TRUE_KW "true"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
async || {};
move || {};
async move || {};
+ static || {};
+ static move || {};
+ static async || {};
+ static async move || {};
}
L_CURLY "{"
R_CURLY "}"
SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ STATIC_KW "static"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ STATIC_KW "static"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ STATIC_KW "static"
+ WHITESPACE " "
+ ASYNC_KW "async"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
+ WHITESPACE "\n "
+ EXPR_STMT
+ CLOSURE_EXPR
+ STATIC_KW "static"
+ WHITESPACE " "
+ ASYNC_KW "async"
+ WHITESPACE " "
+ MOVE_KW "move"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ SEMICOLON ";"
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"
WHITESPACE " "
WHILE_KW "while"
WHITESPACE " "
- CONDITION
- LITERAL
- TRUE_KW "true"
+ LITERAL
+ TRUE_KW "true"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
MATCH_GUARD
IF_KW "if"
WHITESPACE " "
- LET_KW "let"
- WHITESPACE " "
- IDENT_PAT
- NAME
- IDENT "foo"
- WHITESPACE " "
- EQ "="
- WHITESPACE " "
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "bar"
+ LET_EXPR
+ LET_KW "let"
+ WHITESPACE " "
+ IDENT_PAT
+ NAME
+ IDENT "foo"
+ WHITESPACE " "
+ EQ "="
+ WHITESPACE " "
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
WHITESPACE " "
FAT_ARROW "=>"
WHITESPACE " "
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- CALL_EXPR
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "condition_not_met"
- ARG_LIST
- L_PAREN "("
- R_PAREN ")"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "condition_not_met"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- CALL_EXPR
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "condition_not_met"
- ARG_LIST
- L_PAREN "("
- R_PAREN ")"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "condition_not_met"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- CALL_EXPR
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "foo"
- ARG_LIST
- L_PAREN "("
- R_PAREN ")"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "foo"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- CALL_EXPR
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "bar"
- ARG_LIST
- L_PAREN "("
- R_PAREN ")"
+ CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "bar"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
WHILE_EXPR
WHILE_KW "while"
WHITESPACE " "
- CONDITION
- PREFIX_EXPR
- BANG "!"
- METHOD_CALL_EXPR
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "x"
- DOT "."
- NAME_REF
- IDENT "get"
- ARG_LIST
- L_PAREN "("
- R_PAREN ")"
+ PREFIX_EXPR
+ BANG "!"
+ METHOD_CALL_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "x"
+ DOT "."
+ NAME_REF
+ IDENT "get"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
WHILE_EXPR
WHILE_KW "while"
WHITESPACE " "
- CONDITION
- PAREN_EXPR
- L_PAREN "("
- RETURN_EXPR
- RETURN_KW "return"
- R_PAREN ")"
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- PAREN_EXPR
- L_PAREN "("
- RETURN_EXPR
- RETURN_KW "return"
- R_PAREN ")"
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- PAREN_EXPR
- L_PAREN "("
- RETURN_EXPR
- RETURN_KW "return"
- R_PAREN ")"
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- PAREN_EXPR
- L_PAREN "("
- RETURN_EXPR
- RETURN_KW "return"
- R_PAREN ")"
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- PAREN_EXPR
- L_PAREN "("
- RETURN_EXPR
- RETURN_KW "return"
- R_PAREN ")"
+ PAREN_EXPR
+ L_PAREN "("
+ RETURN_EXPR
+ RETURN_KW "return"
+ R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- BREAK_EXPR
- BREAK_KW "break"
+ BREAK_EXPR
+ BREAK_KW "break"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- BIN_EXPR
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "i"
- WHITESPACE " "
- EQ2 "=="
- WHITESPACE " "
- LITERAL
- INT_NUMBER "1"
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i"
+ WHITESPACE " "
+ EQ2 "=="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "1"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- BIN_EXPR
- PATH_EXPR
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "u8"
- WHITESPACE " "
- NEQ "!="
- WHITESPACE " "
- LITERAL
- INT_NUMBER "0u8"
+ BIN_EXPR
+ PATH_EXPR
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
+ WHITESPACE " "
+ NEQ "!="
+ WHITESPACE " "
+ LITERAL
+ INT_NUMBER "0u8"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
+ LET_EXPR
LET_KW "let"
WHITESPACE " "
TUPLE_STRUCT_PAT
IF_EXPR
IF_KW "if"
WHITESPACE " "
- CONDITION
- BIN_EXPR
- CAST_EXPR
- METHOD_CALL_EXPR
- LITERAL
- FLOAT_NUMBER "1.0f32"
- DOT "."
- NAME_REF
- IDENT "floor"
- ARG_LIST
- L_PAREN "("
- R_PAREN ")"
- WHITESPACE " "
- AS_KW "as"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "i64"
+ BIN_EXPR
+ CAST_EXPR
+ METHOD_CALL_EXPR
+ LITERAL
+ FLOAT_NUMBER "1.0f32"
+ DOT "."
+ NAME_REF
+ IDENT "floor"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
WHITESPACE " "
- NEQ "!="
+ AS_KW "as"
WHITESPACE " "
- CAST_EXPR
- METHOD_CALL_EXPR
- LITERAL
- FLOAT_NUMBER "1.0f32"
- DOT "."
- NAME_REF
- IDENT "floor"
- ARG_LIST
- L_PAREN "("
- R_PAREN ")"
- WHITESPACE " "
- AS_KW "as"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "i64"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i64"
+ WHITESPACE " "
+ NEQ "!="
+ WHITESPACE " "
+ CAST_EXPR
+ METHOD_CALL_EXPR
+ LITERAL
+ FLOAT_NUMBER "1.0f32"
+ DOT "."
+ NAME_REF
+ IDENT "floor"
+ ARG_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "i64"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
WHILE_EXPR
WHILE_KW "while"
WHITESPACE " "
- CONDITION
- LITERAL
- TRUE_KW "true"
+ LITERAL
+ TRUE_KW "true"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
pub kind: TargetKind,
/// Is this target a proc-macro
pub is_proc_macro: bool,
+ /// Required features of the target without which it won't build
+ pub required_features: Vec<String>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
root: AbsPathBuf::assert(PathBuf::from(&meta_tgt.src_path)),
kind: TargetKind::new(meta_tgt.kind.as_slice()),
is_proc_macro,
+ required_features: meta_tgt.required_features.clone(),
});
pkg_data.targets.push(tgt);
}
),
moniker_provider: None,
experimental: Some(json!({
+ "externalDocs": true,
+ "hoverRange": true,
"joinLines": true,
- "openCargoToml": true,
- "ssr": true,
+ "matchingBrace": true,
+ "moveItem": true,
"onEnter": true,
+ "openCargoToml": true,
"parentModule": true,
- "hoverRange": true,
"runnables": {
"kinds": [ "cargo" ],
},
+ "ssr": true,
"workspaceSymbolScopeKindFiltering": true,
})),
}
//! See `CargoTargetSpec`
+use std::mem;
+
use cfg::{CfgAtom, CfgExpr};
use ide::{FileId, RunnableKind, TestId};
use project_model::{self, ManifestPath, TargetKind};
pub(crate) package: String,
pub(crate) target: String,
pub(crate) target_kind: TargetKind,
+ pub(crate) required_features: Vec<String>,
}
impl CargoTargetSpec {
pub(crate) fn runnable_args(
snap: &GlobalStateSnapshot,
- spec: Option<CargoTargetSpec>,
+ mut spec: Option<CargoTargetSpec>,
kind: &RunnableKind,
cfg: &Option<CfgExpr>,
) -> Result<(Vec<String>, Vec<String>)> {
let mut args = Vec::new();
let mut extra_args = Vec::new();
+
+ let target_required_features =
+ spec.as_mut().map(|spec| mem::take(&mut spec.required_features)).unwrap_or(Vec::new());
+
match kind {
RunnableKind::Test { test_id, attr } => {
args.push("test".to_string());
let cargo_config = snap.config.cargo();
if cargo_config.all_features {
args.push("--all-features".to_string());
+
+ for feature in target_required_features {
+ args.push("--features".to_string());
+ args.push(feature);
+ }
} else {
let mut features = Vec::new();
if let Some(cfg) = cfg.as_ref() {
required_features(cfg, &mut features);
}
- for feature in cargo_config.features {
- features.push(feature.clone());
- }
+
+ features.extend(cargo_config.features);
+ features.extend(target_required_features);
+
features.dedup();
for feature in features {
args.push("--features".to_string());
package: cargo_ws.package_flag(package_data),
target: target_data.name.clone(),
target_kind: target_data.kind,
+ required_features: target_data.required_features.clone(),
};
Ok(Some(res))
/// snapshot of the file systems, and `analysis_host`, which stores our
/// incremental salsa database.
///
-/// Note that this struct has more than on impl in various modules!
+/// Note that this struct has more than one impl in various modules!
pub(crate) struct GlobalState {
sender: Sender<lsp_server::Message>,
req_queue: ReqQueue,
let char_typed = params.ch.chars().next().unwrap_or('\0');
let text = snap.analysis.file_text(position.file_id)?;
- if !text[usize::from(position.offset)..].starts_with(char_typed) {
- // Add `always!` here once VS Code bug is fixed:
- // https://github.com/rust-analyzer/rust-analyzer/issues/10002
+ if stdx::never!(!text[usize::from(position.offset)..].starts_with(char_typed)) {
return Ok(None);
}
expect-test = "1.2.0-pre.1"
proc-macro2 = "1.0.8"
quote = "1.0.2"
-ungrammar = "=1.14.9"
+ungrammar = "=1.15.0"
test_utils = { path = "../test_utils" }
sourcegen = { path = "../sourcegen" }
impl ast::HasAttrs for IfExpr {}
impl IfExpr {
pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
- pub fn condition(&self) -> Option<Condition> { support::child(&self.syntax) }
+ pub fn condition(&self) -> Option<Expr> { support::child(&self.syntax) }
pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) }
}
impl ast::HasLoopBody for WhileExpr {}
impl WhileExpr {
pub fn while_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![while]) }
- pub fn condition(&self) -> Option<Condition> { support::child(&self.syntax) }
+ pub fn condition(&self) -> Option<Expr> { support::child(&self.syntax) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LetExpr {}
+impl LetExpr {
+ pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct StmtList {
pub(crate) syntax: SyntaxNode,
pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
}
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Condition {
- pub(crate) syntax: SyntaxNode,
-}
-impl Condition {
- pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
- pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
- pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
- pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
-}
-
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct MatchArmList {
pub(crate) syntax: SyntaxNode,
}
impl MatchGuard {
pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
- pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
- pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
- pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
- pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn condition(&self) -> Option<Expr> { support::child(&self.syntax) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
TupleExpr(TupleExpr),
WhileExpr(WhileExpr),
YieldExpr(YieldExpr),
+ LetExpr(LetExpr),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
+impl AstNode for LetExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LET_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
impl AstNode for StmtList {
fn can_cast(kind: SyntaxKind) -> bool { kind == STMT_LIST }
fn cast(syntax: SyntaxNode) -> Option<Self> {
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
-impl AstNode for Condition {
- fn can_cast(kind: SyntaxKind) -> bool { kind == CONDITION }
- fn cast(syntax: SyntaxNode) -> Option<Self> {
- if Self::can_cast(syntax.kind()) {
- Some(Self { syntax })
- } else {
- None
- }
- }
- fn syntax(&self) -> &SyntaxNode { &self.syntax }
-}
impl AstNode for MatchArmList {
fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM_LIST }
fn cast(syntax: SyntaxNode) -> Option<Self> {
impl From<YieldExpr> for Expr {
fn from(node: YieldExpr) -> Expr { Expr::YieldExpr(node) }
}
+impl From<LetExpr> for Expr {
+ fn from(node: LetExpr) -> Expr { Expr::LetExpr(node) }
+}
impl AstNode for Expr {
fn can_cast(kind: SyntaxKind) -> bool {
match kind {
| INDEX_EXPR | LITERAL | LOOP_EXPR | MACRO_CALL | MACRO_STMTS | MATCH_EXPR
| METHOD_CALL_EXPR | PAREN_EXPR | PATH_EXPR | PREFIX_EXPR | RANGE_EXPR
| RECORD_EXPR | REF_EXPR | RETURN_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR
- | YIELD_EXPR => true,
+ | YIELD_EXPR | LET_EXPR => true,
_ => false,
}
}
TUPLE_EXPR => Expr::TupleExpr(TupleExpr { syntax }),
WHILE_EXPR => Expr::WhileExpr(WhileExpr { syntax }),
YIELD_EXPR => Expr::YieldExpr(YieldExpr { syntax }),
+ LET_EXPR => Expr::LetExpr(LetExpr { syntax }),
_ => return None,
};
Some(res)
Expr::TupleExpr(it) => &it.syntax,
Expr::WhileExpr(it) => &it.syntax,
Expr::YieldExpr(it) => &it.syntax,
+ Expr::LetExpr(it) => &it.syntax,
}
}
}
| TUPLE_EXPR
| WHILE_EXPR
| YIELD_EXPR
+ | LET_EXPR
| STMT_LIST
| RECORD_EXPR_FIELD_LIST
| RECORD_EXPR_FIELD
std::fmt::Display::fmt(self.syntax(), f)
}
}
+impl std::fmt::Display for LetExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
impl std::fmt::Display for StmtList {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
std::fmt::Display::fmt(self.syntax(), f)
}
}
-impl std::fmt::Display for Condition {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- std::fmt::Display::fmt(self.syntax(), f)
- }
-}
impl std::fmt::Display for MatchArmList {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
expr_from_text(&format!("match {} {}", expr, match_arm_list))
}
pub fn expr_if(
- condition: ast::Condition,
+ condition: ast::Expr,
then_branch: ast::BlockExpr,
else_branch: Option<ast::ElseBranch>,
) -> ast::Expr {
fn expr_from_text(text: &str) -> ast::Expr {
ast_from_text(&format!("const C: () = {};", text))
}
-
-pub fn condition(expr: ast::Expr, pattern: Option<ast::Pat>) -> ast::Condition {
- match pattern {
- None => ast_from_text(&format!("const _: () = while {} {{}};", expr)),
- Some(pattern) => {
- ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr))
- }
- }
+pub fn expr_let(pattern: ast::Pat, expr: ast::Expr) -> ast::LetExpr {
+ ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr))
}
pub fn arg_list(args: impl IntoIterator<Item = ast::Expr>) -> ast::ArgList {
}
}
-impl ast::Condition {
- pub fn is_pattern_cond(&self) -> bool {
- self.let_token().is_some()
- }
-}
-
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum FieldKind {
Name(ast::NameRef),
}
impl ast::TokenTree {
+ pub fn token_trees_and_tokens(
+ &self,
+ ) -> impl Iterator<Item = NodeOrToken<ast::TokenTree, SyntaxToken>> {
+ self.syntax().children_with_tokens().filter_map(|not| match not {
+ NodeOrToken::Node(node) => ast::TokenTree::cast(node).map(NodeOrToken::Node),
+ NodeOrToken::Token(t) => Some(NodeOrToken::Token(t)),
+ })
+ }
+
pub fn left_delimiter_token(&self) -> Option<SyntaxToken> {
self.syntax()
.first_child_or_token()?
"CLOSURE_EXPR",
"IF_EXPR",
"WHILE_EXPR",
- "CONDITION",
"LOOP_EXPR",
"FOR_EXPR",
"CONTINUE_EXPR",
"STMT_LIST",
"RETURN_EXPR",
"YIELD_EXPR",
+ "LET_EXPR",
"MATCH_EXPR",
"MATCH_ARM_LIST",
"MATCH_ARM",
ast::PtrType(it) => validate_trait_object_ptr_ty(it, &mut errors),
ast::FnPtrType(it) => validate_trait_object_fn_ptr_ret_ty(it, &mut errors),
ast::MacroRules(it) => validate_macro_rules(it, &mut errors),
+ ast::LetExpr(it) => validate_let_expr(it, &mut errors),
_ => (),
}
}
errors.push(SyntaxError::new("const globals cannot be mutable", mut_token.text_range()));
}
}
+
+fn validate_let_expr(let_: ast::LetExpr, errors: &mut Vec<SyntaxError>) {
+ let mut token = let_.syntax().clone();
+ loop {
+ token = match token.parent() {
+ Some(it) => it,
+ None => break,
+ };
+
+ if ast::ParenExpr::can_cast(token.kind()) {
+ continue;
+ } else if let Some(it) = ast::BinExpr::cast(token.clone()) {
+ if it.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And)) {
+ continue;
+ }
+ } else if ast::IfExpr::can_cast(token.kind())
+ || ast::WhileExpr::can_cast(token.kind())
+ || ast::MatchGuard::can_cast(token.kind())
+ {
+ // It must be part of the condition since the expressions are inside a block.
+ return;
+ }
+
+ break;
+ }
+ errors.push(SyntaxError::new(
+ "`let` expressions are not supported here",
+ let_.syntax().text_range(),
+ ));
+}
IF_EXPR@134..257
IF_KW@134..136 "if"
WHITESPACE@136..137 " "
- CONDITION@137..141
- LITERAL@137..141
- TRUE_KW@137..141 "true"
+ LITERAL@137..141
+ TRUE_KW@137..141 "true"
WHITESPACE@141..142 " "
BLOCK_EXPR@142..257
STMT_LIST@142..257
WHILE_EXPR@262..347
WHILE_KW@262..267 "while"
WHITESPACE@267..268 " "
- CONDITION@268..272
- LITERAL@268..272
- TRUE_KW@268..272 "true"
+ LITERAL@268..272
+ TRUE_KW@268..272 "true"
WHITESPACE@272..273 " "
BLOCK_EXPR@273..347
STMT_LIST@273..347
--- /dev/null
+SOURCE_FILE@0..282\r
+ FN@0..281\r
+ FN_KW@0..2 "fn"\r
+ WHITESPACE@2..3 " "\r
+ NAME@3..6\r
+ IDENT@3..6 "foo"\r
+ PARAM_LIST@6..8\r
+ L_PAREN@6..7 "("\r
+ R_PAREN@7..8 ")"\r
+ WHITESPACE@8..9 " "\r
+ BLOCK_EXPR@9..281\r
+ STMT_LIST@9..281\r
+ L_CURLY@9..10 "{"\r
+ WHITESPACE@10..15 "\n "\r
+ CONST@15..42\r
+ CONST_KW@15..20 "const"\r
+ WHITESPACE@20..21 " "\r
+ UNDERSCORE@21..22 "_"\r
+ COLON@22..23 ":"\r
+ WHITESPACE@23..24 " "\r
+ TUPLE_TYPE@24..26\r
+ L_PAREN@24..25 "("\r
+ R_PAREN@25..26 ")"\r
+ WHITESPACE@26..27 " "\r
+ EQ@27..28 "="\r
+ WHITESPACE@28..29 " "\r
+ LET_EXPR@29..41\r
+ LET_KW@29..32 "let"\r
+ WHITESPACE@32..33 " "\r
+ WILDCARD_PAT@33..34\r
+ UNDERSCORE@33..34 "_"\r
+ WHITESPACE@34..35 " "\r
+ EQ@35..36 "="\r
+ WHITESPACE@36..37 " "\r
+ PATH_EXPR@37..41\r
+ PATH@37..41\r
+ PATH_SEGMENT@37..41\r
+ NAME_REF@37..41\r
+ IDENT@37..41 "None"\r
+ SEMICOLON@41..42 ";"\r
+ WHITESPACE@42..48 "\n\n "\r
+ LET_STMT@48..83\r
+ LET_KW@48..51 "let"\r
+ WHITESPACE@51..52 " "\r
+ WILDCARD_PAT@52..53\r
+ UNDERSCORE@52..53 "_"\r
+ WHITESPACE@53..54 " "\r
+ EQ@54..55 "="\r
+ WHITESPACE@55..56 " "\r
+ IF_EXPR@56..82\r
+ IF_KW@56..58 "if"\r
+ WHITESPACE@58..59 " "\r
+ LITERAL@59..63\r
+ TRUE_KW@59..63 "true"\r
+ WHITESPACE@63..64 " "\r
+ BLOCK_EXPR@64..82\r
+ STMT_LIST@64..82\r
+ L_CURLY@64..65 "{"\r
+ WHITESPACE@65..66 " "\r
+ PAREN_EXPR@66..80\r
+ L_PAREN@66..67 "("\r
+ LET_EXPR@67..79\r
+ LET_KW@67..70 "let"\r
+ WHITESPACE@70..71 " "\r
+ WILDCARD_PAT@71..72\r
+ UNDERSCORE@71..72 "_"\r
+ WHITESPACE@72..73 " "\r
+ EQ@73..74 "="\r
+ WHITESPACE@74..75 " "\r
+ PATH_EXPR@75..79\r
+ PATH@75..79\r
+ PATH_SEGMENT@75..79\r
+ NAME_REF@75..79\r
+ IDENT@75..79 "None"\r
+ R_PAREN@79..80 ")"\r
+ WHITESPACE@80..81 " "\r
+ R_CURLY@81..82 "}"\r
+ SEMICOLON@82..83 ";"\r
+ WHITESPACE@83..89 "\n\n "\r
+ IF_EXPR@89..279\r
+ IF_KW@89..91 "if"\r
+ WHITESPACE@91..92 " "\r
+ BIN_EXPR@92..114\r
+ LITERAL@92..96\r
+ TRUE_KW@92..96 "true"\r
+ WHITESPACE@96..97 " "\r
+ AMP2@97..99 "&&"\r
+ WHITESPACE@99..100 " "\r
+ PAREN_EXPR@100..114\r
+ L_PAREN@100..101 "("\r
+ LET_EXPR@101..113\r
+ LET_KW@101..104 "let"\r
+ WHITESPACE@104..105 " "\r
+ WILDCARD_PAT@105..106\r
+ UNDERSCORE@105..106 "_"\r
+ WHITESPACE@106..107 " "\r
+ EQ@107..108 "="\r
+ WHITESPACE@108..109 " "\r
+ PATH_EXPR@109..113\r
+ PATH@109..113\r
+ PATH_SEGMENT@109..113\r
+ NAME_REF@109..113\r
+ IDENT@109..113 "None"\r
+ R_PAREN@113..114 ")"\r
+ WHITESPACE@114..115 " "\r
+ BLOCK_EXPR@115..279\r
+ STMT_LIST@115..279\r
+ L_CURLY@115..116 "{"\r
+ WHITESPACE@116..125 "\n "\r
+ EXPR_STMT@125..140\r
+ PAREN_EXPR@125..139\r
+ L_PAREN@125..126 "("\r
+ LET_EXPR@126..138\r
+ LET_KW@126..129 "let"\r
+ WHITESPACE@129..130 " "\r
+ WILDCARD_PAT@130..131\r
+ UNDERSCORE@130..131 "_"\r
+ WHITESPACE@131..132 " "\r
+ EQ@132..133 "="\r
+ WHITESPACE@133..134 " "\r
+ PATH_EXPR@134..138\r
+ PATH@134..138\r
+ PATH_SEGMENT@134..138\r
+ NAME_REF@134..138\r
+ IDENT@134..138 "None"\r
+ R_PAREN@138..139 ")"\r
+ SEMICOLON@139..140 ";"\r
+ WHITESPACE@140..149 "\n "\r
+ WHILE_EXPR@149..273\r
+ WHILE_KW@149..154 "while"\r
+ WHITESPACE@154..155 " "\r
+ LET_EXPR@155..167\r
+ LET_KW@155..158 "let"\r
+ WHITESPACE@158..159 " "\r
+ WILDCARD_PAT@159..160\r
+ UNDERSCORE@159..160 "_"\r
+ WHITESPACE@160..161 " "\r
+ EQ@161..162 "="\r
+ WHITESPACE@162..163 " "\r
+ PATH_EXPR@163..167\r
+ PATH@163..167\r
+ PATH_SEGMENT@163..167\r
+ NAME_REF@163..167\r
+ IDENT@163..167 "None"\r
+ WHITESPACE@167..168 " "\r
+ BLOCK_EXPR@168..273\r
+ STMT_LIST@168..273\r
+ L_CURLY@168..169 "{"\r
+ WHITESPACE@169..182 "\n "\r
+ MATCH_EXPR@182..263\r
+ MATCH_KW@182..187 "match"\r
+ WHITESPACE@187..188 " "\r
+ PATH_EXPR@188..192\r
+ PATH@188..192\r
+ PATH_SEGMENT@188..192\r
+ NAME_REF@188..192\r
+ IDENT@188..192 "None"\r
+ WHITESPACE@192..193 " "\r
+ MATCH_ARM_LIST@193..263\r
+ L_CURLY@193..194 "{"\r
+ WHITESPACE@194..211 "\n "\r
+ MATCH_ARM@211..249\r
+ WILDCARD_PAT@211..212\r
+ UNDERSCORE@211..212 "_"\r
+ WHITESPACE@212..213 " "\r
+ MATCH_GUARD@213..228\r
+ IF_KW@213..215 "if"\r
+ WHITESPACE@215..216 " "\r
+ LET_EXPR@216..228\r
+ LET_KW@216..219 "let"\r
+ WHITESPACE@219..220 " "\r
+ WILDCARD_PAT@220..221\r
+ UNDERSCORE@220..221 "_"\r
+ WHITESPACE@221..222 " "\r
+ EQ@222..223 "="\r
+ WHITESPACE@223..224 " "\r
+ PATH_EXPR@224..228\r
+ PATH@224..228\r
+ PATH_SEGMENT@224..228\r
+ NAME_REF@224..228\r
+ IDENT@224..228 "None"\r
+ WHITESPACE@228..229 " "\r
+ FAT_ARROW@229..231 "=>"\r
+ WHITESPACE@231..232 " "\r
+ BLOCK_EXPR@232..249\r
+ STMT_LIST@232..249\r
+ L_CURLY@232..233 "{"\r
+ WHITESPACE@233..234 " "\r
+ LET_STMT@234..247\r
+ LET_KW@234..237 "let"\r
+ WHITESPACE@237..238 " "\r
+ WILDCARD_PAT@238..239\r
+ UNDERSCORE@238..239 "_"\r
+ WHITESPACE@239..240 " "\r
+ EQ@240..241 "="\r
+ WHITESPACE@241..242 " "\r
+ PATH_EXPR@242..246\r
+ PATH@242..246\r
+ PATH_SEGMENT@242..246\r
+ NAME_REF@242..246\r
+ IDENT@242..246 "None"\r
+ SEMICOLON@246..247 ";"\r
+ WHITESPACE@247..248 " "\r
+ R_CURLY@248..249 "}"\r
+ WHITESPACE@249..262 "\n "\r
+ R_CURLY@262..263 "}"\r
+ WHITESPACE@263..272 "\n "\r
+ R_CURLY@272..273 "}"\r
+ WHITESPACE@273..278 "\n "\r
+ R_CURLY@278..279 "}"\r
+ WHITESPACE@279..280 "\n"\r
+ R_CURLY@280..281 "}"\r
+ WHITESPACE@281..282 "\n"\r
+error 29..41: `let` expressions are not supported here\r
+error 67..79: `let` expressions are not supported here\r
+error 126..138: `let` expressions are not supported here\r
--- /dev/null
+fn foo() {
+ const _: () = let _ = None;
+
+ let _ = if true { (let _ = None) };
+
+ if true && (let _ = None) {
+ (let _ = None);
+ while let _ = None {
+ match None {
+ _ if let _ = None => { let _ = None; }
+ }
+ }
+ }
+}
pub id: TokenId,
}
+impl Leaf {
+ pub fn id(&self) -> TokenId {
+ match self {
+ Leaf::Literal(l) => l.id,
+ Leaf::Punct(p) => p.id,
+ Leaf::Ident(i) => i.id,
+ }
+ }
+}
+
fn print_debug_subtree(f: &mut fmt::Formatter<'_>, subtree: &Subtree, level: usize) -> fmt::Result {
let align = " ".repeat(level);
After adding a new inline test you need to run `cargo test -p xtask` and also update the test data as described above.
-Note [`api_walkthrough`](https://github.com/rust-analyzer/rust-analyzer/blob/2fb6af89eb794f775de60b82afe56b6f986c2a40/crates/ra_syntax/src/lib.rs#L190-L348)
+Note [`api_walkthrough`](https://github.com/rust-analyzer/rust-analyzer/blob/2fb6af89eb794f775de60b82afe56b6f986c2a40/crates/ra_syntax/src/lib.rs#L190-L348)
in particular: it shows off various methods of working with syntax tree.
See [#93](https://github.com/rust-analyzer/rust-analyzer/pull/93) for an example PR which fixes a bug in the grammar.
textDocument: TextDocumentIdentifier;
/// Position where SSR was invoked.
position: Position;
+ /// Current selections. Search/replace will be restricted to these if non-empty.
+ selections: Range[];
}
```
$ rustup component add rust-src
```
+=== Toolchain
+
+Only the latest stable standard library source is officially supported for use with rust-analyzer.
+If you are using an older toolchain or have an override set, rust-analyzer may fail to understand the Rust source.
+You will either need to update your toolchain or use an older version of rust-analyzer that is compatible with your toolchain.
+
+If you are using an override in your project, you can still force rust-analyzer to use the stable toolchain via the environment variable `RUSTUP_TOOLCHAIN`.
+For example, with VS Code or coc-rust-analyzer:
+
+[source,json]
+----
+{ "rust-analyzer.server.extraEnv": { "RUSTUP_TOOLCHAIN": "stable" } }
+----
+
=== VS Code
This is the best supported editor at the moment.
* same configurations as VSCode extension, `rust-analyzer.server.path`, `rust-analyzer.cargo.features` etc.
* same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc.
* inlay hints for variables and method chaining, _Neovim Only_
- * semantic highlighting is not implemented yet
Note: for code actions, use `coc-codeaction-cursor` and `coc-codeaction-selected`; `coc-codeaction` and `coc-codeaction-line` are unlikely to be useful.
endif
----
-=== Sublime Text 3
-
-Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+=== Sublime Text
-You also need the `LSP` package.
-To install it:
+==== Sublime Text 4:
+* Follow the instructions in link:https://github.com/sublimelsp/LSP-rust-analyzer[LSP-rust-analyzer].
-1. If you've never installed a Sublime Text package, install Package Control:
- * Open the command palette (Win/Linux: `ctrl+shift+p`, Mac: `cmd+shift+p`)
- * Type `Install Package Control`, press enter
-2. In the command palette, run `Package control: Install package`, and in the list that pops up, type `LSP` and press enter.
+NOTE: Install link:https://packagecontrol.io/packages/LSP-file-watcher-chokidar[LSP-file-watcher-chokidar] to enable file watching (`workspace/didChangeWatchedFiles`).
-Finally, with your Rust project open, in the command palette, run `LSP: Enable Language Server In Project` or `LSP: Enable Language Server Globally`, then select `rust-analyzer` in the list that pops up to enable the rust-analyzer LSP.
-The latter means that rust-analyzer is enabled by default in Rust projects.
+==== Sublime Text 3:
+* Install the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+* Install the link:https://packagecontrol.io/packages/LSP[LSP package].
+* From the command palette, run `LSP: Enable Language Server Globally` and select `rust-analyzer`.
-If it worked, you should see "rust-analyzer, Line X, Column Y" on the left side of the bottom bar, and after waiting a bit, functionality like tooltips on hovering over variables should become available.
+If it worked, you should see "rust-analyzer, Line X, Column Y" on the left side of the status bar, and after waiting a bit, functionalities like tooltips on hovering over variables should become available.
If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> section on installing the language server binary.