8221: Prefer adding `mod` declaration to lib.rs over file.rs in UnlinkedFile fix r=Veykril a=Veykril
When there is a `lib.rs` and `main.rs` in one crate, one usually wants the `lib.rs` file to declare the modules.
bors r+
Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
_c: Count::new(),
},
expander,
+ statements_in_scope: Vec::new(),
}
.collect(params, body)
}
expander: Expander,
body: Body,
source_map: BodySourceMap,
+ statements_in_scope: Vec<Statement>,
}
impl ExprCollector<'_> {
ids[0]
}
ast::Expr::MacroStmts(e) => {
- // FIXME: these statements should be held by some hir containter
- for stmt in e.statements() {
- self.collect_stmt(stmt);
- }
- if let Some(expr) = e.expr() {
- self.collect_expr(expr)
- } else {
- self.alloc_expr(Expr::Missing, syntax_ptr)
- }
+ e.statements().for_each(|s| self.collect_stmt(s));
+ let tail = e
+ .expr()
+ .map(|e| self.collect_expr(e))
+ .unwrap_or_else(|| self.alloc_expr(Expr::Missing, syntax_ptr.clone()));
+
+ self.alloc_expr(Expr::MacroStmts { tail }, syntax_ptr)
}
})
}
}
}
- fn collect_stmt(&mut self, s: ast::Stmt) -> Option<Vec<Statement>> {
- let stmt = match s {
+ fn collect_stmt(&mut self, s: ast::Stmt) {
+ match s {
ast::Stmt::LetStmt(stmt) => {
- self.check_cfg(&stmt)?;
-
+ if self.check_cfg(&stmt).is_none() {
+ return;
+ }
let pat = self.collect_pat_opt(stmt.pat());
let type_ref = stmt.ty().map(|it| TypeRef::from_ast(&self.ctx(), it));
let initializer = stmt.initializer().map(|e| self.collect_expr(e));
- vec![Statement::Let { pat, type_ref, initializer }]
+ self.statements_in_scope.push(Statement::Let { pat, type_ref, initializer });
}
ast::Stmt::ExprStmt(stmt) => {
- self.check_cfg(&stmt)?;
+ if self.check_cfg(&stmt).is_none() {
+ return;
+ }
// Note that macro could be expended to multiple statements
if let Some(ast::Expr::MacroCall(m)) = stmt.expr() {
let syntax_ptr = AstPtr::new(&stmt.expr().unwrap());
- let mut stmts = vec![];
self.collect_macro_call(m, syntax_ptr.clone(), false, |this, expansion| {
match expansion {
Some(expansion) => {
let statements: ast::MacroStmts = expansion;
- statements.statements().for_each(|stmt| {
- if let Some(mut r) = this.collect_stmt(stmt) {
- stmts.append(&mut r);
- }
- });
+ statements.statements().for_each(|stmt| this.collect_stmt(stmt));
if let Some(expr) = statements.expr() {
- stmts.push(Statement::Expr(this.collect_expr(expr)));
+ let expr = this.collect_expr(expr);
+ this.statements_in_scope.push(Statement::Expr(expr));
}
}
None => {
- stmts.push(Statement::Expr(
- this.alloc_expr(Expr::Missing, syntax_ptr.clone()),
- ));
+ let expr = this.alloc_expr(Expr::Missing, syntax_ptr.clone());
+ this.statements_in_scope.push(Statement::Expr(expr));
}
}
});
- stmts
} else {
- vec![Statement::Expr(self.collect_expr_opt(stmt.expr()))]
+ let expr = self.collect_expr_opt(stmt.expr());
+ self.statements_in_scope.push(Statement::Expr(expr));
}
}
ast::Stmt::Item(item) => {
- self.check_cfg(&item)?;
-
- return None;
+ if self.check_cfg(&item).is_none() {
+ return;
+ }
}
- };
-
- Some(stmt)
+ }
}
fn collect_block(&mut self, block: ast::BlockExpr) -> ExprId {
let module = if has_def_map { def_map.root() } else { self.expander.module };
let prev_def_map = mem::replace(&mut self.expander.def_map, def_map);
let prev_local_module = mem::replace(&mut self.expander.module, module);
+ let prev_statements = std::mem::take(&mut self.statements_in_scope);
+
+ block.statements().for_each(|s| self.collect_stmt(s));
- let statements =
- block.statements().filter_map(|s| self.collect_stmt(s)).flatten().collect();
let tail = block.tail_expr().map(|e| self.collect_expr(e));
+ let statements = std::mem::replace(&mut self.statements_in_scope, prev_statements);
let syntax_node_ptr = AstPtr::new(&block.into());
let expr_id = self.alloc_expr(
Expr::Block { id: block_id, statements, tail, label: None },
Unsafe {
body: ExprId,
},
+ MacroStmts {
+ tail: ExprId,
+ },
Array(Array),
Literal(Literal),
}
f(*repeat)
}
},
+ Expr::MacroStmts { tail } => f(*tail),
Expr::Literal(_) => {}
}
}
// still need to collect inner items.
ctx.lower_inner_items(e.syntax())
},
- ast::ExprStmt(stmt) => {
- // Macros can expand to stmt. We return an empty item tree in this case, but
- // still need to collect inner items.
- ctx.lower_inner_items(stmt.syntax())
- },
- ast::Item(item) => {
- // Macros can expand to stmt and other item, and we add it as top level item
- ctx.lower_single_item(item)
- },
_ => {
panic!("cannot create item tree from {:?} {}", syntax, syntax);
},
self.tree
}
- pub(super) fn lower_single_item(mut self, item: ast::Item) -> ItemTree {
- self.tree.top_level = self
- .lower_mod_item(&item, false)
- .map(|item| item.0)
- .unwrap_or_else(|| Default::default());
- self.tree
- }
-
pub(super) fn lower_inner_items(mut self, within: &SyntaxNode) -> ItemTree {
self.collect_inner_items(within);
self.tree
use base_db::{salsa, SourceDatabase};
use mbe::{ExpandError, ExpandResult, MacroRules};
use parser::FragmentKind;
-use syntax::{algo::diff, ast::NameOwner, AstNode, GreenNode, Parse, SyntaxKind::*, SyntaxNode};
+use syntax::{
+ algo::diff,
+ ast::{MacroStmts, NameOwner},
+ AstNode, GreenNode, Parse,
+ SyntaxKind::*,
+ SyntaxNode,
+};
use crate::{
ast_id_map::AstIdMap, hygiene::HygieneFrame, BuiltinDeriveExpander, BuiltinFnLikeExpander,
None => return ExpandResult { value: None, err: result.err },
};
- log::debug!("expanded = {}", tt.as_debug_string());
-
let fragment_kind = to_fragment_kind(db, macro_call_id);
+ log::debug!("expanded = {}", tt.as_debug_string());
+ log::debug!("kind = {:?}", fragment_kind);
+
let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) {
Ok(it) => it,
Err(err) => {
+ log::debug!(
+ "failed to parse expanstion to {:?} = {}",
+ fragment_kind,
+ tt.as_debug_string()
+ );
return ExpandResult::only_err(err);
}
};
return ExpandResult::only_err(err);
}
};
-
- if !diff(&node, &call_node.value).is_empty() {
- ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) }
- } else {
+ if is_self_replicating(&node, &call_node.value) {
return ExpandResult::only_err(err);
+ } else {
+ ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) }
+ }
+ }
+ None => {
+ log::debug!("parse = {:?}", parse.syntax_node().kind());
+ ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None }
+ }
+ }
+}
+
+fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool {
+ if diff(from, to).is_empty() {
+ return true;
+ }
+ if let Some(stmts) = MacroStmts::cast(from.clone()) {
+ if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) {
+ return true;
+ }
+ if let Some(expr) = stmts.expr() {
+ if diff(expr.syntax(), to).is_empty() {
+ return true;
}
}
- None => ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None },
}
+ false
}
fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
let parent = match syn.parent() {
Some(it) => it,
- None => {
- // FIXME:
- // If it is root, which means the parent HirFile
- // MacroKindFile must be non-items
- // return expr now.
- return FragmentKind::Expr;
- }
+ None => return FragmentKind::Statements,
};
match parent.kind() {
MACRO_ITEMS | SOURCE_FILE => FragmentKind::Items,
- MACRO_STMTS => FragmentKind::Statement,
+ MACRO_STMTS => FragmentKind::Statements,
ITEM_LIST => FragmentKind::Items,
LET_STMT => {
- // FIXME: Handle Pattern
+ // FIXME: Handle LHS Pattern
FragmentKind::Expr
}
EXPR_STMT => FragmentKind::Statements,
None => self.table.new_float_var(),
},
},
+ Expr::MacroStmts { tail } => self.infer_expr(*tail, expected),
};
// use a new type variable if we got unknown here
let ty = self.insert_type_vars_shallow(ty);
"#,
expect![[r#"
!0..8 'leta=();': ()
+ !0..8 'leta=();': ()
+ !3..4 'a': ()
+ !5..7 '()': ()
57..84 '{ ...); } }': ()
"#]],
);
}
+#[test]
+fn recurisve_macro_expanded_in_stmts() {
+ check_infer(
+ r#"
+ macro_rules! ng {
+ ([$($tts:tt)*]) => {
+ $($tts)*;
+ };
+ ([$($tts:tt)*] $head:tt $($rest:tt)*) => {
+ ng! {
+ [$($tts)* $head] $($rest)*
+ }
+ };
+ }
+ fn foo() {
+ ng!([] let a = 3);
+ let b = a;
+ }
+ "#,
+ expect![[r#"
+ !0..7 'leta=3;': {unknown}
+ !0..7 'leta=3;': {unknown}
+ !0..13 'ng!{[leta=3]}': {unknown}
+ !0..13 'ng!{[leta=]3}': {unknown}
+ !0..13 'ng!{[leta]=3}': {unknown}
+ !3..4 'a': i32
+ !5..6 '3': i32
+ 196..237 '{ ...= a; }': ()
+ 229..230 'b': i32
+ 233..234 'a': i32
+ "#]],
+ );
+}
+
#[test]
fn recursive_inner_item_macro_rules() {
check_infer(
"#,
expect![[r#"
!0..1 '1': i32
- !0..7 'mac!($)': {unknown}
+ !0..26 'macro_...>{1};}': {unknown}
+ !0..26 'macro_...>{1};}': {unknown}
107..143 '{ ...!(); }': ()
129..130 'a': i32
"#]],
"struct Foo<'a, T: Foo<'a>> {$0}",
"struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n $0\n}",
);
+ check_assist(
+ generate_impl,
+ r#"
+ struct MyOwnArray<T, const S: usize> {}$0"#,
+ r#"
+ struct MyOwnArray<T, const S: usize> {}
+
+ impl<T, const S: usize> MyOwnArray<T, S> {
+ $0
+ }"#,
+ );
check_assist(
generate_impl,
r#"
check_assist(
generate_impl,
r#"
- struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String> {}$0"#,
+ struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {}$0"#,
r#"
- struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String> {}
+ struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {}
- impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b> Defaulted<'a, 'b, T> {
+ impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> Defaulted<'a, 'b, T, S> {
$0
}"#,
);
}
buf
});
- let generics = lifetimes.chain(type_params).format(", ");
+ let const_params = generic_params.const_params().map(|t| t.syntax().to_string());
+ let generics = lifetimes.chain(type_params).chain(const_params).format(", ");
format_to!(buf, "<{}>", generics);
}
buf.push(' ');
.type_params()
.filter_map(|it| it.name())
.map(|it| SmolStr::from(it.text()));
- format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", "))
+ let const_params = generic_params
+ .const_params()
+ .filter_map(|it| it.name())
+ .map(|it| SmolStr::from(it.text()));
+ format_to!(buf, "<{}>", lifetime_params.chain(type_params).chain(const_params).format(", "))
}
match adt.where_clause() {
-use crate::RootDatabase;
use base_db::{fixture::ChangeFixture, FilePosition};
use expect_test::{expect, Expect};
use test_utils::RangeOrOffset;
+use crate::RootDatabase;
+
/// Creates analysis from a multi-file fixture, returns positions marked with $0.
pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
let change_fixture = ChangeFixture::parse(ra_fixture);
//! Handle syntactic aspects of inserting a new `use`.
use std::{cmp::Ordering, iter::successors};
-use crate::RootDatabase;
use hir::Semantics;
use itertools::{EitherOrBoth, Itertools};
use syntax::{
AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
};
+use crate::RootDatabase;
+
pub use hir::PrefixKind;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-//! See `Label`
+//! See [`Label`]
use std::fmt;
/// A type to specify UI label, like an entry in the list of assists. Enforces
use rustc_hash::FxHashMap;
use syntax::{ast, match_ast, AstNode, TextRange, TextSize};
-use crate::defs::NameClass;
use crate::{
- defs::{Definition, NameRefClass},
+ defs::{Definition, NameClass, NameRefClass},
RootDatabase,
};
}
}
+ /// Inserts a [`TextEdit`] for the given [`FileId`]. This properly handles merging existing
+ /// edits for a file if some already exist.
pub fn insert_source_edit(&mut self, file_id: FileId, edit: TextEdit) {
match self.source_file_edits.entry(file_id) {
Entry::Occupied(mut entry) => {
resolve_target_trait(sema, impl_def).map_or(vec![], |target_trait| {
target_trait
.items(sema.db)
- .iter()
+ .into_iter()
.filter(|i| match i {
hir::AssocItem::Function(f) => {
!impl_fns_consts.contains(&f.name(sema.db).to_string())
.map(|n| !impl_fns_consts.contains(&n.to_string()))
.unwrap_or_default(),
})
- .cloned()
.collect()
})
}
-use crate::RootDatabase;
use base_db::{fixture::ChangeFixture, FilePosition};
use expect_test::{expect, Expect};
use hir::Semantics;
use syntax::ast::{self, AstNode};
use test_utils::RangeOrOffset;
+use crate::RootDatabase;
+
/// Creates analysis from a multi-file fixture, returns positions marked with $0.
pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
let change_fixture = ChangeFixture::parse(ra_fixture);
//! Use case for structures in this module is, for example, situation when you need to process
//! only certain `Enum`s.
-use crate::RootDatabase;
-use hir::{Adt, Semantics, Type};
use std::iter;
+
+use hir::{Adt, Semantics, Type};
use syntax::ast::{self, make};
+use crate::RootDatabase;
+
/// Enum types that implement `std::ops::Try` trait.
#[derive(Clone, Copy)]
pub enum TryEnum {
use crate::{
ast::{self, support, AstNode, AstToken, AttrsOwner, NameOwner, SyntaxNode},
- SmolStr, SyntaxElement, SyntaxToken, T,
+ SmolStr, SyntaxElement, SyntaxToken, TokenText, T,
};
impl ast::Lifetime {
- pub fn text(&self) -> SmolStr {
+ pub fn text(&self) -> TokenText {
text_of_first_token(self.syntax())
}
}
impl ast::Name {
- pub fn text(&self) -> SmolStr {
+ pub fn text(&self) -> TokenText {
text_of_first_token(self.syntax())
}
}
impl ast::NameRef {
- pub fn text(&self) -> SmolStr {
+ pub fn text(&self) -> TokenText {
text_of_first_token(self.syntax())
}
}
}
-fn text_of_first_token(node: &SyntaxNode) -> SmolStr {
- node.green().children().next().and_then(|it| it.into_token()).unwrap().text().into()
+fn text_of_first_token(node: &SyntaxNode) -> TokenText {
+ let first_token =
+ node.green().children().next().and_then(|it| it.into_token()).unwrap().to_owned();
+
+ TokenText(first_token)
}
pub enum Macro {
}
impl NameOrNameRef {
- pub fn text(&self) -> SmolStr {
+ pub fn text(&self) -> TokenText {
match self {
NameOrNameRef::Name(name) => name.text(),
NameOrNameRef::NameRef(name_ref) => name_ref.text(),
mod parsing;
mod validation;
mod ptr;
+mod token_text;
#[cfg(test)]
mod tests;
SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, SyntaxToken,
SyntaxTreeBuilder,
},
+ token_text::TokenText,
};
pub use parser::{SyntaxKind, T};
pub use rowan::{
--- /dev/null
+//! Yet another version of owned string, backed by a syntax tree token.
+
+use std::{cmp::Ordering, fmt, ops};
+
+pub struct TokenText(pub(crate) rowan::GreenToken);
+
+impl TokenText {
+ pub fn as_str(&self) -> &str {
+ self.0.text()
+ }
+}
+
+impl ops::Deref for TokenText {
+ type Target = str;
+
+ fn deref(&self) -> &str {
+ self.as_str()
+ }
+}
+impl AsRef<str> for TokenText {
+ fn as_ref(&self) -> &str {
+ self.as_str()
+ }
+}
+
+impl From<TokenText> for String {
+ fn from(token_text: TokenText) -> Self {
+ token_text.as_str().into()
+ }
+}
+
+impl PartialEq<&'_ str> for TokenText {
+ fn eq(&self, other: &&str) -> bool {
+ self.as_str() == *other
+ }
+}
+impl PartialEq<TokenText> for &'_ str {
+ fn eq(&self, other: &TokenText) -> bool {
+ other == self
+ }
+}
+impl PartialEq<String> for TokenText {
+ fn eq(&self, other: &String) -> bool {
+ self.as_str() == other.as_str()
+ }
+}
+impl PartialEq<TokenText> for String {
+ fn eq(&self, other: &TokenText) -> bool {
+ other == self
+ }
+}
+impl PartialEq for TokenText {
+ fn eq(&self, other: &TokenText) -> bool {
+ self.as_str() == other.as_str()
+ }
+}
+impl Eq for TokenText {}
+impl Ord for TokenText {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.as_str().cmp(other.as_str())
+ }
+}
+impl PartialOrd for TokenText {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+impl fmt::Display for TokenText {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(self.as_str(), f)
+ }
+}
+impl fmt::Debug for TokenText {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(self.as_str(), f)
+ }
+}
If your editor can't find the binary even though the binary is on your `$PATH`, the likely explanation is that it doesn't see the same `$PATH` as the shell, see https://github.com/rust-analyzer/rust-analyzer/issues/1811[this issue].
On Unix, running the editor from a shell or changing the `.desktop` file to set the environment should help.
+==== `rustup`
+
+`rust-analyzer` is available in `rustup`, but only in the nightly toolchain:
+
+[source,bash]
+---
+$ rustup +nightly component add rust-analyzer-preview
+---
+
==== Arch Linux
The `rust-analyzer` binary can be installed from the repos or AUR (Arch User Repository):