7353: Add LifetimeParam and ConstParam to CompletionItemKind r=matklad a=Veykril
Adds `LifetimeParam` and `ConstParam` to `CompletionItemKind` and maps them both to `TypeParam` in the protocol conversion as there are no equivalents, so nothing really changes there.
`ConstParam` could be mapped to `Const` I guess but I'm split on whether that would be better?
Additions were solely inspired by (the single) test output for const params.
Also sorts the variants of `CompletionItemKind` and its to_proto match.
Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
toolchain: stable
profile: minimal
override: true
+ components: rust-src
- name: Install Nodejs
uses: actions/setup-node@v1
if: github.ref != 'refs/heads/release'
run: cargo xtask dist --nightly --client 0.3.$GITHUB_RUN_NUMBER-nightly
- - name: Nightly analysis-stats check
- if: github.ref != 'refs/heads/release'
+ - name: Run analysis-stats on rust-analyzer
run: target/${{ env.RA_TARGET }}/release/rust-analyzer analysis-stats .
+ - name: Run analysis-stats on rust std library
+ run: target/${{ env.RA_TARGET }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std
+
- name: Upload artifacts
uses: actions/upload-artifact@v1
with:
[[package]]
name = "backtrace"
-version = "0.3.55"
+version = "0.3.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ef5140344c85b01f9bbb4d4b7288a8aa4b3287ccef913a14bcc78a1063623598"
+checksum = "9d117600f438b1707d4e4ae15d3595657288f8235a0eb593e80ecc98ab34e1bc"
dependencies = [
"addr2line",
"cfg-if 1.0.0",
"libc",
"miniz_oxide",
- "object 0.22.0",
+ "object",
"rustc-demangle",
]
[[package]]
name = "chalk-derive"
-version = "0.47.0"
+version = "0.50.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f00f6342a387edc822002d36a381e117afcac9f744951ff75fbf4a218edea5c"
+checksum = "ac605cf409013573e971d7292d4bec6f5495b19d5f98fc9d8b1a12270c3888e0"
dependencies = [
"proc-macro2",
"quote",
[[package]]
name = "chalk-ir"
-version = "0.47.0"
+version = "0.50.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c686e69913591ae753e5526e73cbee39db3d9b0a92cc9078ab780cabf1c70aa9"
+checksum = "fa1dbfb3c2c8b67edb5cd981f720550e43579090574f786145731f90c5d401ff"
dependencies = [
"bitflags",
"chalk-derive",
[[package]]
name = "chalk-recursive"
-version = "0.47.0"
+version = "0.50.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "310fdcac0340dab4163b766baa8067266e3b909108d1ac1b5246c033bde63975"
+checksum = "0882e2a3ba66901717a64f8bb0655e809f800ac6abed05cb605e7a41d4bf8999"
dependencies = [
"chalk-derive",
"chalk-ir",
[[package]]
name = "chalk-solve"
-version = "0.47.0"
+version = "0.50.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c3c3252116111c3548f1164ab8d98c67c49848b3bde10dd11b650fd023e91c72"
+checksum = "0d43cce07150eac39771ff4b198537cefef744734b2218a89c682295b54cd8d0"
dependencies = [
"chalk-derive",
"chalk-ir",
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28b9d6de7f49e22cf97ad17fc4036ece69300032f45f78f30b4a4482cdc3f4a6"
+[[package]]
+name = "countme"
+version = "2.0.0-pre.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c5716604cba7c02a846ecad3f4a3fd2d2b641faccc2a24a51efb21aff0d01f35"
+dependencies = [
+ "dashmap",
+ "once_cell",
+ "rustc-hash",
+]
+
[[package]]
name = "crc32fast"
version = "1.2.1"
"const_fn",
"crossbeam-utils 0.8.1",
"lazy_static",
- "memoffset 0.6.1",
+ "memoffset",
"scopeguard",
]
"lazy_static",
]
+[[package]]
+name = "dashmap"
+version = "4.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e77a43b28d0668df09411cb0bc9a8c2adc40f9a048afe863e05fd43251e8e39c"
+dependencies = [
+ "cfg-if 1.0.0",
+ "num_cpus",
+]
+
[[package]]
name = "dissimilar"
version = "1.0.2"
[[package]]
name = "filetime"
-version = "0.2.13"
+version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0c122a393ea57648015bf06fbd3d372378992e86b9ff5a7a497b076a28c79efe"
+checksum = "1d34cfa13a63ae058bfa601fe9e313bbdb3746427c1459185464ce0fcf62e1e8"
dependencies = [
"cfg-if 1.0.0",
"libc",
- "redox_syscall",
+ "redox_syscall 0.2.4",
"winapi 0.3.9",
]
"winapi 0.3.9",
]
-[[package]]
-name = "memoffset"
-version = "0.5.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa"
-dependencies = [
- "autocfg",
-]
-
[[package]]
name = "memoffset"
version = "0.6.1"
"libc",
]
-[[package]]
-name = "object"
-version = "0.22.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8d3b63360ec3cb337817c2dbd47ab4a0f170d285d8e5a2064600f3def1402397"
-
[[package]]
name = "object"
version = "0.23.0"
"cfg-if 1.0.0",
"instant",
"libc",
- "redox_syscall",
+ "redox_syscall 0.1.57",
"smallvec",
"winapi 0.3.9",
]
[[package]]
name = "pico-args"
-version = "0.3.4"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "28b9b4df73455c861d7cbf8be42f01d3b373ed7f02e378d55fa84eafc6f638b1"
+checksum = "d70072c20945e1ab871c472a285fc772aefd4f5407723c206242f2c6f94595d6"
[[package]]
name = "pin-project-lite"
"libloading",
"mbe",
"memmap",
- "object 0.23.0",
+ "object",
"proc_macro_api",
"proc_macro_test",
"serde_derive",
version = "0.0.0"
dependencies = [
"cfg-if 1.0.0",
+ "countme",
"jemalloc-ctl",
"la-arena",
"libc",
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce"
+[[package]]
+name = "redox_syscall"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05ec8ca9416c5ea37062b502703cd7fcb207736bc294f6e0cf367ac6fc234570"
+dependencies = [
+ "bitflags",
+]
+
[[package]]
name = "regex"
version = "1.4.3"
[[package]]
name = "rowan"
-version = "0.10.6"
+version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a0734142c18710f7214dc21908e2f054e973b908dbb1a602a3e6691615aaaae"
+checksum = "24c2d78254049413f9d73495f883e7fa0b7a7d4b88468cd72a3bbbd0ad585cd1"
dependencies = [
+ "countme",
"hashbrown",
+ "memoffset",
"rustc-hash",
- "smol_str",
"text-size",
- "triomphe",
]
[[package]]
[[package]]
name = "rustc-ap-rustc_lexer"
-version = "697.0.0"
+version = "700.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "67adbe260a0a11910624d6d28c0304fcf7b063e666682111005c83b09f73429d"
+checksum = "5ed36784376b69c941d7aa36e960a52ac712e2663960357121a4d9f2cc58e225"
dependencies = [
"unicode-xid",
]
[[package]]
name = "serde"
-version = "1.0.119"
+version = "1.0.120"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9bdd36f49e35b61d49efd8aa7fc068fd295961fd2286d0b2ee9a4c7a14e99cc3"
+checksum = "166b2349061381baf54a58e4b13c89369feb0ef2eaa57198899e2312aac30aab"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.119"
+version = "1.0.120"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "552954ce79a059ddd5fd68c271592374bd15cab2274970380c000118aeffe1cd"
+checksum = "0ca2a8cb5805ce9e3b95435e3765b7b553cecc762d938d409434338386cb5775"
dependencies = [
"proc-macro2",
"quote",
"text_edit",
]
-[[package]]
-name = "stable_deref_trait"
-version = "1.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
-
[[package]]
name = "stdx"
version = "0.0.0"
"tracing-subscriber",
]
-[[package]]
-name = "triomphe"
-version = "0.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6e9d872053cf9e5a833d8c1dd772cdc38ab66a908129d6f73c049c986161d07c"
-dependencies = [
- "memoffset 0.5.6",
- "serde",
- "stable_deref_trait",
-]
-
[[package]]
name = "tt"
version = "0.0.0"
it.text_range().end()
}
GeneratedFunctionTarget::InEmptyItemList(it) => {
- let indent = IndentLevel::from_node(it.syntax());
+ let indent = IndentLevel::from_node(&it);
leading_ws = format!("\n{}", indent + 1);
fn_def = fn_def.indent(indent + 1);
trailing_ws = format!("\n{}", indent);
- it.syntax().text_range().start() + TextSize::of('{')
+ it.text_range().start() + TextSize::of('{')
}
};
enum GeneratedFunctionTarget {
BehindItem(SyntaxNode),
- InEmptyItemList(ast::ItemList),
+ InEmptyItemList(SyntaxNode),
}
impl GeneratedFunctionTarget {
fn syntax(&self) -> &SyntaxNode {
match self {
GeneratedFunctionTarget::BehindItem(it) => it,
- GeneratedFunctionTarget::InEmptyItemList(it) => it.syntax(),
+ GeneratedFunctionTarget::InEmptyItemList(it) => it,
}
}
}
if let Some(last_item) = it.item_list().and_then(|it| it.items().last()) {
GeneratedFunctionTarget::BehindItem(last_item.syntax().clone())
} else {
- GeneratedFunctionTarget::InEmptyItemList(it.item_list()?)
+ GeneratedFunctionTarget::InEmptyItemList(it.item_list()?.syntax().clone())
+ }
+ }
+ hir::ModuleSource::BlockExpr(it) => {
+ if let Some(last_item) =
+ it.statements().take_while(|stmt| matches!(stmt, ast::Stmt::Item(_))).last()
+ {
+ GeneratedFunctionTarget::BehindItem(last_item.syntax().clone())
+ } else {
+ GeneratedFunctionTarget::InEmptyItemList(it.syntax().clone())
}
}
};
use itertools::Itertools;
use stdx::format_to;
-use syntax::ast::{self, AstNode, AttrsOwner, GenericParamsOwner, NameOwner};
+use syntax::{
+ ast::{self, AstNode, AttrsOwner, GenericParamsOwner, NameOwner},
+ SmolStr,
+};
use crate::{AssistContext, AssistId, AssistKind, Assists};
format_to!(buf, "{}", type_params.syntax());
}
buf.push_str(" ");
- buf.push_str(name.text().as_str());
+ buf.push_str(name.text());
if let Some(type_params) = type_params {
let lifetime_params = type_params
.lifetime_params()
.filter_map(|it| it.lifetime())
- .map(|it| it.text().clone());
+ .map(|it| SmolStr::from(it.text()));
let type_params = type_params
.type_params()
.filter_map(|it| it.name())
- .map(|it| it.text().clone());
+ .map(|it| SmolStr::from(it.text()));
let generic_params = lifetime_params.chain(type_params).format(", ");
format_to!(buf, "<{}>", generic_params)
use stdx::format_to;
use syntax::{
ast::{self, AstNode, GenericParamsOwner, NameOwner, StructKind, VisibilityOwner},
- T,
+ SmolStr, T,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
format_to!(buf, "{}", type_params.syntax());
}
buf.push_str(" ");
- buf.push_str(strukt.name().unwrap().text().as_str());
+ buf.push_str(strukt.name().unwrap().text());
if let Some(type_params) = type_params {
let lifetime_params = type_params
.lifetime_params()
.filter_map(|it| it.lifetime())
- .map(|it| it.text().clone());
+ .map(|it| SmolStr::from(it.text()));
let type_params =
- type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone());
+ type_params.type_params().filter_map(|it| it.name()).map(|it| SmolStr::from(it.text()));
format_to!(buf, "<{}>", lifetime_params.chain(type_params).format(", "))
}
return None;
}
- let text = token.text().as_str();
+ let text = token.text();
if !text.starts_with("r#") && text.ends_with('#') {
return None;
}
use itertools::Itertools;
use syntax::{
ast::{self, make, AstNode},
- Direction, SmolStr,
+ Direction,
SyntaxKind::{IDENT, WHITESPACE},
TextSize,
};
) -> Option<()> {
let attr = ctx.find_node_at_offset::<ast::Attr>()?;
- let attr_name = attr
+ let has_derive = attr
.syntax()
.descendants_with_tokens()
.filter(|t| t.kind() == IDENT)
.find_map(syntax::NodeOrToken::into_token)
- .filter(|t| t.text() == "derive")?
- .text()
- .clone();
+ .filter(|t| t.text() == "derive")
+ .is_some();
+ if !has_derive {
+ return None;
+ }
- let trait_token =
- ctx.token_at_offset().find(|t| t.kind() == IDENT && *t.text() != attr_name)?;
+ let trait_token = ctx.token_at_offset().find(|t| t.kind() == IDENT && t.text() != "derive")?;
let trait_path = make::path_unqualified(make::path_segment(make::name_ref(trait_token.text())));
let annotated_name = attr.syntax().siblings(Direction::Next).find_map(ast::Name::cast)?;
.syntax()
.descendants_with_tokens()
.filter(|t| t.kind() == IDENT)
- .filter_map(|t| t.into_token().map(|t| t.text().clone()))
+ .filter_map(|t| t.into_token().map(|t| t.text().to_string()))
.filter(|t| t != trait_name.text())
- .collect::<Vec<SmolStr>>();
+ .collect::<Vec<_>>();
let has_more_derives = !new_attr_input.is_empty();
if has_more_derives {
let method = mce.name_ref()?;
let arg_list = mce.arg_list()?;
- let method = match method.text().as_str() {
+ let method = match method.text() {
"is_some" => "is_none",
"is_none" => "is_some",
"is_ok" => "is_err",
use hir_def::{
attr::{Attrs, Documentation},
path::ModPath,
+ per_ns::PerNs,
resolver::HasResolver,
AttrDefId, GenericParamId, ModuleDefId,
};
let path = ast::Path::parse(link).ok()?;
let modpath = ModPath::from_src(path, &Hygiene::new_unhygienic()).unwrap();
let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
+ if resolved == PerNs::none() {
+ if let Some(trait_id) = resolver.resolve_module_path_in_trait_items(db.upcast(), &modpath) {
+ return Some(ModuleDefId::TraitId(trait_id));
+ };
+ }
let def = match ns {
Some(Namespace::Types) => resolved.take_types()?,
Some(Namespace::Values) => resolved.take_values()?,
}
pub fn root_module(self, db: &dyn HirDatabase) -> Module {
- let module_id = db.crate_def_map(self.id).root;
+ let module_id = db.crate_def_map(self.id).root();
Module::new(self, module_id)
}
/// in the module tree of any target in `Cargo.toml`.
pub fn crate_root(self, db: &dyn HirDatabase) -> Module {
let def_map = db.crate_def_map(self.id.krate);
- self.with_module_id(def_map.root)
+ self.with_module_id(def_map.root())
}
/// Iterates over all child modules.
/// early, in `hir_expand`, where modules simply do not exist yet.
pub fn module(self, db: &dyn HirDatabase) -> Option<Module> {
let krate = self.id.krate;
- let module_id = db.crate_def_map(krate).root;
+ let module_id = db.crate_def_map(krate).root();
Some(Module::new(Crate { id: krate }, module_id))
}
//! FIXME: write short doc here
pub use hir_def::db::{
- AttrsQuery, BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQueryQuery,
- CrateLangItemsQuery, DefDatabase, DefDatabaseStorage, EnumDataQuery, ExprScopesQuery,
- FunctionDataQuery, GenericParamsQuery, ImplDataQuery, ImportMapQuery, InternConstQuery,
- InternDatabase, InternDatabaseStorage, InternEnumQuery, InternFunctionQuery, InternImplQuery,
- InternStaticQuery, InternStructQuery, InternTraitQuery, InternTypeAliasQuery, InternUnionQuery,
- ItemTreeQuery, LangItemQuery, StaticDataQuery, StructDataQuery, TraitDataQuery,
- TypeAliasDataQuery, UnionDataQuery,
+ AttrsQuery, BlockDefMapQuery, BodyQuery, BodyWithSourceMapQuery, ConstDataQuery,
+ CrateDefMapQueryQuery, CrateLangItemsQuery, DefDatabase, DefDatabaseStorage, EnumDataQuery,
+ ExprScopesQuery, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, ImportMapQuery,
+ InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery, InternFunctionQuery,
+ InternImplQuery, InternStaticQuery, InternStructQuery, InternTraitQuery, InternTypeAliasQuery,
+ InternUnionQuery, ItemTreeQuery, LangItemQuery, StaticDataQuery, StructDataQuery,
+ TraitDataQuery, TypeAliasDataQuery, UnionDataQuery,
};
pub use hir_expand::db::{
AstDatabase, AstDatabaseStorage, AstIdMapQuery, HygieneFrameQuery, InternEagerExpansionQuery,
mod_data.definition_source(db).as_ref().map(|src| match src {
ModuleSource::SourceFile(file) => file as &dyn AttrsOwner,
ModuleSource::Module(module) => module as &dyn AttrsOwner,
+ ModuleSource::BlockExpr(block) => block as &dyn AttrsOwner,
}),
),
}
let mut err = None;
let call_id =
- macro_call.as_call_id_with_errors(db, self.crate_def_map.krate, resolver, &mut |e| {
+ macro_call.as_call_id_with_errors(db, self.crate_def_map.krate(), resolver, &mut |e| {
err.get_or_insert(e);
});
let call_id = match call_id {
use super::*;
fn lower(ra_fixture: &str) -> Arc<Body> {
- let (db, file_id) = crate::test_db::TestDB::with_single_file(ra_fixture);
+ let db = crate::test_db::TestDB::with_files(ra_fixture);
let krate = db.crate_graph().iter().next().unwrap();
let def_map = db.crate_def_map(krate);
- let module = def_map.modules_for_file(file_id).next().unwrap();
- let module = &def_map[module];
- let fn_def = match module.scope.declarations().next().unwrap() {
- ModuleDefId::FunctionId(it) => it,
- _ => panic!(),
- };
+ let mut fn_def = None;
+ 'outer: for (_, module) in def_map.modules() {
+ for decl in module.scope.declarations() {
+ match decl {
+ ModuleDefId::FunctionId(it) => {
+ fn_def = Some(it);
+ break 'outer;
+ }
+ _ => {}
+ }
+ }
+ }
- db.body(fn_def.into())
+ db.body(fn_def.unwrap().into())
}
fn check_diagnostics(ra_fixture: &str) {
);
}
+#[test]
+fn macro_resolve() {
+ // Regression test for a path resolution bug introduced with inner item handling.
+ lower(
+ r"
+macro_rules! vec {
+ () => { () };
+ ($elem:expr; $n:expr) => { () };
+ ($($x:expr),+ $(,)?) => { () };
+}
+mod m {
+ fn outer() {
+ let _ = vec![FileSet::default(); self.len()];
+ }
+}
+ ",
+ );
+}
+
#[test]
fn cfg_diagnostics() {
check_diagnostics(
use std::sync::Arc;
use base_db::{salsa, CrateId, SourceDatabase, Upcast};
-use hir_expand::{db::AstDatabase, HirFileId};
+use hir_expand::{db::AstDatabase, AstId, HirFileId};
use la_arena::ArenaMap;
-use syntax::SmolStr;
+use syntax::{ast, SmolStr};
use crate::{
adt::{EnumData, StructData},
#[salsa::invoke(DefMap::crate_def_map_query)]
fn crate_def_map_query(&self, krate: CrateId) -> Arc<DefMap>;
+ #[salsa::invoke(DefMap::block_def_map_query)]
+ fn block_def_map(&self, krate: CrateId, block: AstId<ast::BlockExpr>) -> Arc<DefMap>;
+
#[salsa::invoke(StructData::struct_data_query)]
fn struct_data(&self, id: StructId) -> Arc<StructData>;
#[salsa::invoke(StructData::union_data_query)]
if item == ItemInNs::Types(from.into()) {
// - if the item is the module we're in, use `self`
Some(ModPath::from_segments(PathKind::Super(0), Vec::new()))
- } else if let Some(parent_id) = def_map.modules[from.local_id].parent {
+ } else if let Some(parent_id) = def_map[from.local_id].parent {
// - if the item is the parent module, use `super` (this is not used recursively, since `super::super` is ugly)
if item
== ItemInNs::Types(ModuleDefId::ModuleId(ModuleId {
// - if the item is already in scope, return the name under which it is
let def_map = db.crate_def_map(from.krate);
- let from_scope: &crate::item_scope::ItemScope = &def_map.modules[from.local_id].scope;
+ let from_scope: &crate::item_scope::ItemScope = &def_map[from.local_id].scope;
let scope_name =
if let Some((name, _)) = from_scope.name_of(item) { Some(name.clone()) } else { None };
if prefixed.is_none() && scope_name.is_some() {
if item
== ItemInNs::Types(ModuleDefId::ModuleId(ModuleId {
krate: from.krate,
- local_id: def_map.root,
+ local_id: def_map.root(),
}))
{
return Some(ModPath::from_segments(PathKind::Crate, Vec::new()));
}
// - if the item is the crate root of a dependency crate, return the name from the extern prelude
- for (name, def_id) in &def_map.extern_prelude {
+ for (name, def_id) in def_map.extern_prelude() {
if item == ItemInNs::Types(*def_id) {
let name = scope_name.unwrap_or_else(|| name.clone());
return Some(ModPath::from_segments(PathKind::Plain, vec![name]));
}
// - if the item is in the prelude, return the name from there
- if let Some(prelude_module) = def_map.prelude {
+ if let Some(prelude_module) = def_map.prelude() {
let prelude_def_map = db.crate_def_map(prelude_module.krate);
let prelude_scope: &crate::item_scope::ItemScope =
- &prelude_def_map.modules[prelude_module.local_id].scope;
+ &prelude_def_map[prelude_module.local_id].scope;
if let Some((name, vis)) = prelude_scope.name_of(item) {
if vis.is_visible_from(db, from) {
return Some(ModPath::from_segments(PathKind::Plain, vec![name.clone()]));
// - otherwise, look for modules containing (reexporting) it and import it from one of those
- let crate_root = ModuleId { local_id: def_map.root, krate: from.krate };
+ let crate_root = ModuleId { local_id: def_map.root(), krate: from.krate };
let crate_attrs = db.attrs(crate_root.into());
let prefer_no_std = crate_attrs.by_key("no_std").exists();
let mut best_path = None;
// Compute the initial worklist. We start with all direct child modules of `from` as well as all
// of its (recursive) parent modules.
- let data = &def_map.modules[from.local_id];
+ let data = &def_map[from.local_id];
let mut worklist = data
.children
.values()
let mut parent = data.parent;
while let Some(p) = parent {
worklist.push(ModuleId { krate: from.krate, local_id: p });
- parent = def_map.modules[p].parent;
+ parent = def_map[p].parent;
}
let mut seen: FxHashSet<_> = FxHashSet::default();
// We look only into modules that are public(ly reexported), starting with the crate root.
let empty = ImportPath { segments: vec![] };
- let root = ModuleId { krate, local_id: def_map.root };
+ let root = ModuleId { krate, local_id: def_map.root() };
let mut worklist = vec![(root, empty)];
while let Some((module, mod_path)) = worklist.pop() {
let ext_def_map;
HirFileId, InFile,
};
use la_arena::{Arena, Idx, RawIdx};
+use profile::Count;
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use syntax::{ast, match_ast};
/// The item tree of a source file.
#[derive(Debug, Eq, PartialEq)]
pub struct ItemTree {
+ _c: Count<Self>,
+
top_level: SmallVec<[ModItem; 1]>,
attrs: FxHashMap<AttrOwner, RawAttrs>,
- inner_items: FxHashMap<FileAstId<ast::Item>, SmallVec<[ModItem; 1]>>,
data: Option<Box<ItemTreeData>>,
}
impl ItemTree {
- pub fn item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
+ pub(crate) fn item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
let _p = profile::span("item_tree_query").detail(|| format!("{:?}", file_id));
let syntax = if let Some(node) = db.parse_or_expand(file_id) {
node
fn empty() -> Self {
Self {
+ _c: Count::new(),
top_level: Default::default(),
attrs: Default::default(),
- inner_items: Default::default(),
data: Default::default(),
}
}
macro_defs,
vis,
generics,
+ inner_items,
} = &mut **data;
imports.shrink_to_fit();
vis.arena.shrink_to_fit();
generics.arena.shrink_to_fit();
+
+ inner_items.shrink_to_fit();
}
}
self.raw_attrs(of).clone().filter(db, krate)
}
- /// Returns the lowered inner items that `ast` corresponds to.
- ///
- /// Most AST items are lowered to a single `ModItem`, but some (eg. `use` items) may be lowered
- /// to multiple items in the `ItemTree`.
- pub fn inner_items(&self, ast: FileAstId<ast::Item>) -> &[ModItem] {
- &self.inner_items[&ast]
+ pub fn all_inner_items(&self) -> impl Iterator<Item = ModItem> + '_ {
+ match &self.data {
+ Some(data) => Some(data.inner_items.values().flatten().copied()).into_iter().flatten(),
+ None => None.into_iter().flatten(),
+ }
}
- pub fn all_inner_items(&self) -> impl Iterator<Item = ModItem> + '_ {
- self.inner_items.values().flatten().copied()
+ pub fn inner_items_of_block(&self, block: FileAstId<ast::BlockExpr>) -> &[ModItem] {
+ match &self.data {
+ Some(data) => data.inner_items.get(&block).map(|it| &**it).unwrap_or(&[]),
+ None => &[],
+ }
}
pub fn source<S: ItemTreeNode>(&self, db: &dyn DefDatabase, of: ItemTreeId<S>) -> S::Source {
vis: ItemVisibilities,
generics: GenericParamsStorage,
+
+ inner_items: FxHashMap<FileAstId<ast::BlockExpr>, SmallVec<[ModItem; 1]>>,
}
#[derive(Debug, Eq, PartialEq, Hash)]
use smallvec::SmallVec;
use syntax::{
ast::{self, ModuleItemOwner},
- SyntaxNode,
+ SyntaxNode, WalkEvent,
};
use crate::{
file: HirFileId,
source_ast_id_map: Arc<AstIdMap>,
body_ctx: crate::body::LowerCtx,
- inner_items: Vec<ModItem>,
forced_visibility: Option<RawVisibilityId>,
}
file,
source_ast_id_map: db.ast_id_map(file),
body_ctx: crate::body::LowerCtx::new(db, file),
- inner_items: Vec::new(),
forced_visibility: None,
}
}
}
fn lower_mod_item(&mut self, item: &ast::Item, inner: bool) -> Option<ModItems> {
- assert!(inner || self.inner_items.is_empty());
-
// Collect inner items for 1-to-1-lowered items.
match item {
ast::Item::Struct(_)
fn collect_inner_items(&mut self, container: &SyntaxNode) {
let forced_vis = self.forced_visibility.take();
- let mut inner_items = mem::take(&mut self.tree.inner_items);
- inner_items.extend(container.descendants().skip(1).filter_map(ast::Item::cast).filter_map(
- |item| {
- let ast_id = self.source_ast_id_map.ast_id(&item);
- Some((ast_id, self.lower_mod_item(&item, true)?.0))
- },
- ));
- self.tree.inner_items = inner_items;
+
+ let mut block_stack = Vec::new();
+ for event in container.preorder().skip(1) {
+ match event {
+ WalkEvent::Enter(node) => {
+ match_ast! {
+ match node {
+ ast::BlockExpr(block) => {
+ block_stack.push(self.source_ast_id_map.ast_id(&block));
+ },
+ ast::Item(item) => {
+ let mod_items = self.lower_mod_item(&item, true);
+ let current_block = block_stack.last();
+ if let (Some(mod_items), Some(block)) = (mod_items, current_block) {
+ if !mod_items.0.is_empty() {
+ self.data().inner_items.entry(*block).or_default().extend(mod_items.0.iter().copied());
+ }
+ }
+ },
+ _ => {}
+ }
+ }
+ }
+ WalkEvent::Leave(node) => {
+ if ast::BlockExpr::cast(node).is_some() {
+ block_stack.pop();
+ }
+ }
+ }
+ }
+
self.forced_visibility = forced_vis;
}
let crate_def_map = db.crate_def_map(krate);
- for (_, module_data) in crate_def_map.modules.iter() {
+ for (_, module_data) in crate_def_map.modules() {
for impl_def in module_data.scope.impls() {
lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDefId)
}
use base_db::{CrateId, Edition, FileId};
use hir_expand::{diagnostics::DiagnosticSink, name::Name, InFile};
use la_arena::Arena;
+use profile::Count;
use rustc_hash::FxHashMap;
use stdx::format_to;
-use syntax::ast;
+use syntax::{ast, AstNode};
use crate::{
db::DefDatabase,
/// Contains all top-level defs from a macro-expanded crate
#[derive(Debug, PartialEq, Eq)]
pub struct DefMap {
- pub root: LocalModuleId,
- pub modules: Arena<ModuleData>,
- pub(crate) krate: CrateId,
+ _c: Count<Self>,
+ parent: Option<Arc<DefMap>>,
+ root: LocalModuleId,
+ modules: Arena<ModuleData>,
+ krate: CrateId,
/// The prelude module for this crate. This either comes from an import
/// marked with the `prelude_import` attribute, or (in the normal case) from
/// a dependency (`std` or `core`).
- pub(crate) prelude: Option<ModuleId>,
- pub(crate) extern_prelude: FxHashMap<Name, ModuleDefId>,
+ prelude: Option<ModuleId>,
+ extern_prelude: FxHashMap<Name, ModuleDefId>,
edition: Edition,
diagnostics: Vec<DefDiagnostic>,
Inline {
definition: AstId<ast::Module>,
},
+ /// Pseudo-module introduced by a block scope (contains only inner items).
+ BlockExpr {
+ block: AstId<ast::BlockExpr>,
+ },
}
impl Default for ModuleOrigin {
match self {
ModuleOrigin::File { declaration: module, .. }
| ModuleOrigin::Inline { definition: module, .. } => Some(*module),
- ModuleOrigin::CrateRoot { .. } => None,
+ ModuleOrigin::CrateRoot { .. } | ModuleOrigin::BlockExpr { .. } => None,
}
}
pub fn is_inline(&self) -> bool {
match self {
- ModuleOrigin::Inline { .. } => true,
+ ModuleOrigin::Inline { .. } | ModuleOrigin::BlockExpr { .. } => true,
ModuleOrigin::CrateRoot { .. } | ModuleOrigin::File { .. } => false,
}
}
definition.file_id,
ModuleSource::Module(definition.to_node(db.upcast())),
),
+ ModuleOrigin::BlockExpr { block } => {
+ InFile::new(block.file_id, ModuleSource::BlockExpr(block.to_node(db.upcast())))
+ }
}
}
}
let _p = profile::span("crate_def_map_query").detail(|| {
db.crate_graph()[krate].display_name.as_deref().unwrap_or_default().to_string()
});
- let def_map = {
- let edition = db.crate_graph()[krate].edition;
- let mut modules: Arena<ModuleData> = Arena::default();
- let root = modules.alloc(ModuleData::default());
- DefMap {
- krate,
- edition,
- extern_prelude: FxHashMap::default(),
- prelude: None,
- root,
- modules,
- diagnostics: Vec::new(),
- }
- };
- let def_map = collector::collect_defs(db, def_map);
+ let edition = db.crate_graph()[krate].edition;
+ let def_map = DefMap::empty(krate, edition);
+ let def_map = collector::collect_defs(db, def_map, None);
Arc::new(def_map)
}
+ pub(crate) fn block_def_map_query(
+ db: &dyn DefDatabase,
+ krate: CrateId,
+ block: AstId<ast::BlockExpr>,
+ ) -> Arc<DefMap> {
+ let item_tree = db.item_tree(block.file_id);
+ let block_items = item_tree.inner_items_of_block(block.value);
+
+ let parent = parent_def_map(db, krate, block);
+
+ if block_items.is_empty() {
+ // If there are no inner items, nothing new is brought into scope, so we can just return
+ // the parent DefMap. This keeps DefMap parent chains short.
+ return parent;
+ }
+
+ let mut def_map = DefMap::empty(krate, parent.edition);
+ def_map.parent = Some(parent);
+
+ let def_map = collector::collect_defs(db, def_map, Some(block.value));
+ Arc::new(def_map)
+ }
+
+ fn empty(krate: CrateId, edition: Edition) -> DefMap {
+ let mut modules: Arena<ModuleData> = Arena::default();
+ let root = modules.alloc(ModuleData::default());
+ DefMap {
+ _c: Count::new(),
+ parent: None,
+ krate,
+ edition,
+ extern_prelude: FxHashMap::default(),
+ prelude: None,
+ root,
+ modules,
+ diagnostics: Vec::new(),
+ }
+ }
+
pub fn add_diagnostics(
&self,
db: &dyn DefDatabase,
.map(|(id, _data)| id)
}
+ pub fn modules(&self) -> impl Iterator<Item = (LocalModuleId, &ModuleData)> + '_ {
+ self.modules.iter()
+ }
+
+ pub fn root(&self) -> LocalModuleId {
+ self.root
+ }
+
+ pub(crate) fn krate(&self) -> CrateId {
+ self.krate
+ }
+
+ pub(crate) fn prelude(&self) -> Option<ModuleId> {
+ self.prelude
+ }
+
+ pub(crate) fn extern_prelude(&self) -> impl Iterator<Item = (&Name, &ModuleDefId)> + '_ {
+ self.extern_prelude.iter()
+ }
+
pub(crate) fn resolve_path(
&self,
db: &dyn DefDatabase,
// even), as this should be a great debugging aid.
pub fn dump(&self) -> String {
let mut buf = String::new();
- go(&mut buf, self, "crate", self.root);
+ let mut current_map = self;
+ while let Some(parent) = ¤t_map.parent {
+ go(&mut buf, current_map, "block scope", current_map.root);
+ current_map = &**parent;
+ }
+ go(&mut buf, current_map, "crate", current_map.root);
return buf;
fn go(buf: &mut String, map: &DefMap, path: &str, module: LocalModuleId) {
}
}
+fn parent_def_map(
+ db: &dyn DefDatabase,
+ krate: CrateId,
+ block: AstId<ast::BlockExpr>,
+) -> Arc<DefMap> {
+ // FIXME: store this info in the item tree instead of reparsing here
+ let ast_id_map = db.ast_id_map(block.file_id);
+ let block_ptr = ast_id_map.get(block.value);
+ let root = match db.parse_or_expand(block.file_id) {
+ Some(it) => it,
+ None => {
+ return Arc::new(DefMap::empty(krate, Edition::Edition2018));
+ }
+ };
+ let ast = block_ptr.to_node(&root);
+
+ for ancestor in ast.syntax().ancestors().skip(1) {
+ if let Some(block_expr) = ast::BlockExpr::cast(ancestor) {
+ let ancestor_id = ast_id_map.ast_id(&block_expr);
+ let ast_id = InFile::new(block.file_id, ancestor_id);
+ let parent_map = db.block_def_map(krate, ast_id);
+ return parent_map;
+ }
+ }
+
+ // No enclosing block scope, so the parent is the crate-level DefMap.
+ db.crate_def_map(krate)
+}
+
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ModuleSource {
SourceFile(ast::SourceFile),
Module(ast::Module),
+ BlockExpr(ast::BlockExpr),
}
mod diagnostics {
const EXPANSION_DEPTH_LIMIT: usize = 128;
const FIXED_POINT_LIMIT: usize = 8192;
-pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap) -> DefMap {
+pub(super) fn collect_defs(
+ db: &dyn DefDatabase,
+ mut def_map: DefMap,
+ block: Option<FileAstId<ast::BlockExpr>>,
+) -> DefMap {
let crate_graph = db.crate_graph();
// populate external prelude
exports_proc_macros: false,
from_glob_import: Default::default(),
};
+ match block {
+ Some(block) => {
+ collector.seed_with_inner(block);
+ }
+ None => {
+ collector.seed_with_top_level();
+ }
+ }
collector.collect();
collector.finish()
}
}
impl DefCollector<'_> {
- fn collect(&mut self) {
+ fn seed_with_top_level(&mut self) {
let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id;
let item_tree = self.db.item_tree(file_id.into());
let module_id = self.def_map.root;
}
.collect(item_tree.top_level_items());
}
+ }
+
+ fn seed_with_inner(&mut self, block: FileAstId<ast::BlockExpr>) {
+ let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id;
+ let item_tree = self.db.item_tree(file_id.into());
+ let module_id = self.def_map.root;
+ self.def_map.modules[module_id].origin = ModuleOrigin::CrateRoot { definition: file_id };
+ if item_tree
+ .top_level_attrs(self.db, self.def_map.krate)
+ .cfg()
+ .map_or(true, |cfg| self.cfg_options.check(&cfg) != Some(false))
+ {
+ ModCollector {
+ def_collector: &mut *self,
+ macro_depth: 0,
+ module_id,
+ file_id: file_id.into(),
+ item_tree: &item_tree,
+ mod_dir: ModDir::root(),
+ }
+ .collect(item_tree.inner_items_of_block(block));
+ }
+ }
+ fn collect(&mut self) {
// main name resolution fixed-point loop.
let mut i = 0;
loop {
mod tests {
use crate::{db::DefDatabase, test_db::TestDB};
use base_db::{fixture::WithFixture, SourceDatabase};
- use la_arena::Arena;
use super::*;
exports_proc_macros: false,
from_glob_import: Default::default(),
};
+ collector.seed_with_top_level();
collector.collect();
collector.def_map
}
let (db, _file_id) = TestDB::with_single_file(&code);
let krate = db.test_crate();
- let def_map = {
- let edition = db.crate_graph()[krate].edition;
- let mut modules: Arena<ModuleData> = Arena::default();
- let root = modules.alloc(ModuleData::default());
- DefMap {
- krate,
- edition,
- extern_prelude: FxHashMap::default(),
- prelude: None,
- root,
- modules,
- diagnostics: Vec::new(),
- }
- };
+ let edition = db.crate_graph()[krate].edition;
+ let def_map = DefMap::empty(krate, edition);
do_collect_defs(&db, def_map)
}
original_module: LocalModuleId,
path: &ModPath,
shadow: BuiltinShadowMode,
+ ) -> ResolvePathResult {
+ let mut result = ResolvePathResult::empty(ReachedFixedPoint::No);
+ result.segment_index = Some(usize::max_value());
+
+ let mut current_map = self;
+ loop {
+ let new = current_map.resolve_path_fp_with_macro_single(
+ db,
+ mode,
+ original_module,
+ path,
+ shadow,
+ );
+
+ // Merge `new` into `result`.
+ result.resolved_def = result.resolved_def.or(new.resolved_def);
+ if result.reached_fixedpoint == ReachedFixedPoint::No {
+ result.reached_fixedpoint = new.reached_fixedpoint;
+ }
+ // FIXME: this doesn't seem right; what if the different namespace resolutions come from different crates?
+ result.krate = result.krate.or(new.krate);
+ result.segment_index = result.segment_index.min(new.segment_index);
+
+ match ¤t_map.parent {
+ Some(map) => current_map = map,
+ None => return result,
+ }
+ }
+ }
+
+ pub(super) fn resolve_path_fp_with_macro_single(
+ &self,
+ db: &dyn DefDatabase,
+ mode: ResolveMode,
+ original_module: LocalModuleId,
+ path: &ModPath,
+ shadow: BuiltinShadowMode,
) -> ResolvePathResult {
let mut segments = path.segments.iter().enumerate();
let mut curr_per_ns: PerNs = match path.kind {
mod mod_resolution;
mod diagnostics;
mod primitives;
+mod block;
use std::sync::Arc;
use base_db::{fixture::WithFixture, SourceDatabase};
use expect_test::{expect, Expect};
+use hir_expand::db::AstDatabase;
use test_utils::mark;
use crate::{db::DefDatabase, nameres::*, test_db::TestDB};
db.crate_def_map(krate)
}
+fn compute_block_def_map(ra_fixture: &str) -> Arc<DefMap> {
+ let (db, position) = TestDB::with_position(ra_fixture);
+ let module = db.module_for_file(position.file_id);
+ let ast_map = db.ast_id_map(position.file_id.into());
+ let ast = db.parse(position.file_id);
+ let block: ast::BlockExpr =
+ syntax::algo::find_node_at_offset(&ast.syntax_node(), position.offset).unwrap();
+ let block_id = ast_map.ast_id(&block);
+
+ db.block_def_map(module.krate, InFile::new(position.file_id.into(), block_id))
+}
+
fn check(ra_fixture: &str, expect: Expect) {
let def_map = compute_crate_def_map(ra_fixture);
let actual = def_map.dump();
expect.assert_eq(&actual);
}
+fn check_at(ra_fixture: &str, expect: Expect) {
+ let def_map = compute_block_def_map(ra_fixture);
+ let actual = def_map.dump();
+ expect.assert_eq(&actual);
+}
+
#[test]
fn crate_def_map_smoke_test() {
check(
--- /dev/null
+use super::*;
+
+#[test]
+fn inner_item_smoke() {
+ check_at(
+ r#"
+struct inner {}
+fn outer() {
+ $0
+ fn inner() {}
+}
+"#,
+ expect![[r#"
+ block scope
+ inner: v
+ crate
+ inner: t
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
+fn use_from_crate() {
+ check_at(
+ r#"
+struct Struct;
+fn outer() {
+ use Struct;
+ use crate::Struct as CrateStruct;
+ use self::Struct as SelfStruct;
+ $0
+}
+"#,
+ expect![[r#"
+ block scope
+ CrateStruct: t v
+ SelfStruct: t v
+ Struct: t v
+ crate
+ Struct: t v
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
+fn merge_namespaces() {
+ check_at(
+ r#"
+struct name {}
+fn outer() {
+ fn name() {}
+
+ use name as imported; // should import both `name`s
+
+ $0
+}
+"#,
+ expect![[r#"
+ block scope
+ imported: t v
+ name: v
+ crate
+ name: t
+ outer: v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_blocks() {
+ check_at(
+ r#"
+fn outer() {
+ struct inner1 {}
+ fn inner() {
+ use inner1;
+ use outer;
+ fn inner2() {}
+ $0
+ }
+}
+"#,
+ expect![[r#"
+ block scope
+ inner1: t
+ inner2: v
+ outer: v
+ block scope
+ inner: v
+ inner1: t
+ crate
+ outer: v
+ "#]],
+ );
+}
self.resolve_module_path(db, path, BuiltinShadowMode::Module)
}
+ pub fn resolve_module_path_in_trait_items(
+ &self,
+ db: &dyn DefDatabase,
+ path: &ModPath,
+ ) -> Option<TraitId> {
+ let (item_map, module) = self.module_scope()?;
+ let (module_res, ..) = item_map.resolve_path(db, module, &path, BuiltinShadowMode::Module);
+ match module_res.take_types()? {
+ ModuleDefId::TraitId(it) => Some(it),
+ _ => None,
+ }
+ }
+
pub fn resolve_path_in_type_ns(
&self,
db: &dyn DefDatabase,
let mut traits = FxHashSet::default();
for scope in &self.scopes {
if let Scope::ModuleScope(m) = scope {
- if let Some(prelude) = m.crate_def_map.prelude {
+ if let Some(prelude) = m.crate_def_map.prelude() {
let prelude_def_map = db.crate_def_map(prelude.krate);
traits.extend(prelude_def_map[prelude.local_id].scope.traits());
}
pub fn module(&self) -> Option<ModuleId> {
let (def_map, local_id) = self.module_scope()?;
- Some(ModuleId { krate: def_map.krate, local_id })
+ Some(ModuleId { krate: def_map.krate(), local_id })
}
pub fn krate(&self) -> Option<CrateId> {
- self.module_scope().map(|t| t.0.krate)
+ self.module_scope().map(|t| t.0.krate())
}
pub fn where_predicates_in_scope<'a>(
seen.insert((name.clone(), scope));
f(name.clone(), ScopeDef::PerNs(scope));
});
- m.crate_def_map.extern_prelude.iter().for_each(|(name, &def)| {
+ m.crate_def_map.extern_prelude().for_each(|(name, &def)| {
f(name.clone(), ScopeDef::PerNs(PerNs::types(def, Visibility::Public)));
});
BUILTIN_SCOPE.iter().for_each(|(name, &def)| {
f(name.clone(), ScopeDef::PerNs(def));
});
- if let Some(prelude) = m.crate_def_map.prelude {
+ if let Some(prelude) = m.crate_def_map.prelude() {
let prelude_def_map = db.crate_def_map(prelude.krate);
prelude_def_map[prelude.local_id].scope.entries().for_each(|(name, def)| {
let seen_tuple = (name.clone(), def);
pub(crate) fn module_for_file(&self, file_id: FileId) -> crate::ModuleId {
for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate);
- for (local_id, data) in crate_def_map.modules.iter() {
+ for (local_id, data) in crate_def_map.modules() {
if data.origin.file_id() == Some(file_id) {
return crate::ModuleId { krate, local_id };
}
let crate_graph = self.crate_graph();
for krate in crate_graph.iter() {
let crate_def_map = self.crate_def_map(krate);
- for (module_id, _) in crate_def_map.modules.iter() {
+ for (module_id, _) in crate_def_map.modules() {
let file_id = crate_def_map[module_id].origin.file_id();
files.extend(file_id)
}
let crate_def_map = self.crate_def_map(krate);
let mut sink = DiagnosticSinkBuilder::new().build(&mut cb);
- for (module_id, module) in crate_def_map.modules.iter() {
+ for (module_id, module) in crate_def_map.modules() {
crate_def_map.add_diagnostics(self, module_id, &mut sink);
for decl in module.scope.declarations() {
};
use la_arena::{Arena, Idx};
-use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
+use syntax::{ast, match_ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
/// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstNode> {
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
// trait does not change ids of top-level items, which helps caching.
- bdfs(node, |it| match ast::Item::cast(it) {
- Some(module_item) => {
- res.alloc(module_item.syntax());
- true
+ bdfs(node, |it| {
+ match_ast! {
+ match it {
+ ast::Item(module_item) => {
+ res.alloc(module_item.syntax());
+ true
+ },
+ ast::BlockExpr(block) => {
+ res.alloc(block.syntax());
+ true
+ },
+ _ => false,
+ }
}
- None => false,
});
res
}
debug!("name token not found");
mbe::ExpandError::ConversionError
})?;
- let name_token = tt::Ident { id: name_token_id, text: name.text().clone() };
+ let name_token = tt::Ident { id: name_token_id, text: name.text().into() };
let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count());
Ok(BasicAdtInfo { name: name_token, type_params })
}
};
let loc = db.lookup_intern_macro(id);
let arg = loc.kind.arg(db)?;
- Some(arg.green().clone())
+ Some(arg.green().to_owned())
}
fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
}
pub fn new_lifetime(lt: &ast::Lifetime) -> Name {
- Self::new_text(lt.text().clone())
+ Self::new_text(lt.text().into())
}
/// Shortcut to create inline plain text name
}
/// Resolve a name from the text of token.
- fn resolve(raw_text: &SmolStr) -> Name {
+ fn resolve(raw_text: &str) -> Name {
let raw_start = "r#";
- if raw_text.as_str().starts_with(raw_start) {
+ if raw_text.starts_with(raw_start) {
Name::new_text(SmolStr::new(&raw_text[raw_start.len()..]))
} else {
- Name::new_text(raw_text.clone())
+ Name::new_text(raw_text.into())
}
}
log = "0.4.8"
rustc-hash = "1.1.0"
scoped-tls = "1"
-chalk-solve = { version = "0.47", default-features = false }
-chalk-ir = "0.47"
-chalk-recursive = "0.47"
+chalk-solve = { version = "0.50", default-features = false }
+chalk-ir = "0.50"
+chalk-recursive = "0.50"
la-arena = { version = "0.2.0", path = "../../lib/arena" }
stdx = { path = "../stdx", version = "0.0.0" }
let crate_def_map = self.crate_def_map(krate);
let mut fns = Vec::new();
- for (module_id, _) in crate_def_map.modules.iter() {
+ for (module_id, _) in crate_def_map.modules() {
for decl in crate_def_map[module_id].scope.declarations() {
let mut sink = DiagnosticSinkBuilder::new().build(&mut cb);
validate_module_item(self, krate, decl, &mut sink);
Expr::Box { expr } => {
let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
if let Some(box_) = self.resolve_boxed_box() {
- Ty::apply_one(TypeCtor::Adt(box_), inner_ty)
+ let mut sb = Substs::build_for_type_ctor(self.db, TypeCtor::Adt(box_));
+ sb = sb.push(inner_ty);
+ sb = sb.fill(repeat_with(|| self.table.new_type_var()));
+ Ty::apply(TypeCtor::Adt(box_), sb.build())
} else {
Ty::Unknown
}
let mut impls = Self { map: FxHashMap::default() };
let crate_def_map = db.crate_def_map(krate);
- for (_module_id, module_data) in crate_def_map.modules.iter() {
+ for (_module_id, module_data) in crate_def_map.modules() {
for impl_id in module_data.scope.impls() {
let target_trait = match db.impl_trait(impl_id) {
Some(tr) => tr.value.trait_,
let mut map: FxHashMap<_, Vec<_>> = FxHashMap::default();
let crate_def_map = db.crate_def_map(krate);
- for (_module_id, module_data) in crate_def_map.modules.iter() {
+ for (_module_id, module_data) in crate_def_map.modules() {
for impl_id in module_data.scope.impls() {
let data = db.impl_data(impl_id);
if data.target_trait.is_some() {
pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
for &krate in self.relevant_crates(file_id).iter() {
let crate_def_map = self.crate_def_map(krate);
- for (local_id, data) in crate_def_map.modules.iter() {
+ for (local_id, data) in crate_def_map.modules() {
if data.origin.file_id() == Some(file_id) {
return ModuleId { krate, local_id };
}
let crate_graph = self.crate_graph();
for krate in crate_graph.iter() {
let crate_def_map = self.crate_def_map(krate);
- for (module_id, _) in crate_def_map.modules.iter() {
+ for (module_id, _) in crate_def_map.modules() {
let file_id = crate_def_map[module_id].origin.file_id();
files.extend(file_id)
}
);
}
+#[test]
+fn infer_box_with_allocator() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+fn test() {
+ let x = box 1;
+ let t = (x, box x, box &1, box [1]);
+ t;
+} //^ (Box<i32, {unknown}>, Box<Box<i32, {unknown}>, {unknown}>, Box<&i32, {unknown}>, Box<[i32; _], {unknown}>)
+
+//- /std.rs crate:std
+#[prelude_import] use prelude::*;
+mod boxed {
+ #[lang = "owned_box"]
+ pub struct Box<T: ?Sized, A: Allocator> {
+ inner: *mut T,
+ allocator: A,
+ }
+}
+"#,
+ );
+}
+
#[test]
fn infer_adt_self() {
check_types(
node: InFile<&dyn ast::NameOwner>,
kind: SymbolKind,
) -> NavigationTarget {
- let name =
- node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_"));
+ let name = node.value.name().map(|it| it.text().into()).unwrap_or_else(|| "_".into());
let focus_range =
node.value.name().map(|it| node.with_value(it.syntax()).original_file_range(db).range);
let frange = node.map(|it| it.syntax()).original_file_range(db);
ModuleSource::Module(node) => {
(node.syntax(), node.name().map(|it| it.syntax().text_range()))
}
+ ModuleSource::BlockExpr(node) => (node.syntax(), None),
};
let frange = src.with_value(syntax).original_file_range(db);
NavigationTarget::from_syntax(frange.file_id, name, focus, frange.range, SymbolKind::Module)
}
}
+impl ShortLabel for ast::BlockExpr {
+ fn short_label(&self) -> Option<String> {
+ None
+ }
+}
+
impl ShortLabel for ast::TypeAlias {
fn short_label(&self) -> Option<String> {
short_label_from_node(self, "type ")
impl ShortLabel for ast::ConstParam {
fn short_label(&self) -> Option<String> {
let mut buf = "const ".to_owned();
- buf.push_str(self.name()?.text().as_str());
+ buf.push_str(self.name()?.text());
if let Some(type_ref) = self.ty() {
format_to!(buf, ": {}", type_ref.syntax());
}
{
let mut buf = node.visibility().map(|v| format!("{} ", v.syntax())).unwrap_or_default();
buf.push_str(label);
- buf.push_str(node.name()?.text().as_str());
+ buf.push_str(node.name()?.text());
Some(buf)
}
}?;
let krate = resolved.module(db)?.krate();
let canonical_path = resolved.canonical_path(db)?;
- let new_target = get_doc_url(db, &krate)?
+ let mut new_url = get_doc_url(db, &krate)?
.join(&format!("{}/", krate.display_name(db)?))
.ok()?
.join(&canonical_path.replace("::", "/"))
.ok()?
.join(&get_symbol_filename(db, &resolved)?)
- .ok()?
- .into_string();
+ .ok()?;
+
+ if let ModuleDef::Trait(t) = resolved {
+ let items = t.items(db);
+ if let Some(field_or_assoc_item) = items.iter().find_map(|assoc_item| {
+ if let Some(name) = assoc_item.name(db) {
+ if link.to_string() == format!("{}::{}", canonical_path, name) {
+ return Some(FieldOrAssocItem::AssocItem(*assoc_item));
+ }
+ }
+ None
+ }) {
+ if let Some(fragment) = get_symbol_fragment(db, &field_or_assoc_item) {
+ new_url = new_url.join(&fragment).ok()?;
+ }
+ };
+ }
+
+ let new_target = new_url.into_string();
let new_title = strip_prefixes_suffixes(title);
Some((new_target, new_title.to_string()))
}
let ws_text = ws.text();
let suffix = TextRange::new(offset, ws.text_range().end()) - ws.text_range().start();
let prefix = TextRange::new(ws.text_range().start(), offset) - ws.text_range().start();
- let ws_suffix = &ws_text.as_str()[suffix];
- let ws_prefix = &ws_text.as_str()[prefix];
+ let ws_suffix = &ws_text[suffix];
+ let ws_prefix = &ws_text[prefix];
if ws_text.contains('\n') && !ws_suffix.contains('\n') {
if let Some(node) = ws.next_sibling_or_token() {
let start = match ws_prefix.rfind('\n') {
use hir::{HasAttrs, ModuleDef, Semantics};
use ide_db::{
defs::{Definition, NameClass, NameRefClass},
- symbol_index, RootDatabase,
+ RootDatabase,
};
use syntax::{
ast, match_ast, AstNode, AstToken, SyntaxKind::*, SyntaxToken, TextSize, TokenAtOffset, T,
};
use crate::{
- display::{ToNav, TryToNav},
- doc_links::extract_definitions_from_markdown,
- runnables::doc_owner_to_def,
+ display::TryToNav, doc_links::extract_definitions_from_markdown, runnables::doc_owner_to_def,
FilePosition, NavigationTarget, RangeInfo,
};
return Some(RangeInfo::new(original_token.text_range(), vec![nav]));
}
- let nav_targets = match_ast! {
+ let nav = match_ast! {
match parent {
ast::NameRef(name_ref) => {
- reference_definition(&sema, Either::Right(&name_ref)).to_vec()
+ reference_definition(&sema, Either::Right(&name_ref))
},
ast::Name(name) => {
let def = NameClass::classify(&sema, &name)?.referenced_or_defined(sema.db);
- let nav = def.try_to_nav(sema.db)?;
- vec![nav]
+ def.try_to_nav(sema.db)
},
ast::Lifetime(lt) => if let Some(name_class) = NameClass::classify_lifetime(&sema, <) {
let def = name_class.referenced_or_defined(sema.db);
- let nav = def.try_to_nav(sema.db)?;
- vec![nav]
+ def.try_to_nav(sema.db)
} else {
- reference_definition(&sema, Either::Left(<)).to_vec()
+ reference_definition(&sema, Either::Left(<))
},
_ => return None,
}
};
- Some(RangeInfo::new(original_token.text_range(), nav_targets))
+ Some(RangeInfo::new(original_token.text_range(), nav.into_iter().collect()))
}
fn def_for_doc_comment(
}
}
-#[derive(Debug)]
-pub(crate) enum ReferenceResult {
- Exact(NavigationTarget),
- Approximate(Vec<NavigationTarget>),
-}
-
-impl ReferenceResult {
- fn to_vec(self) -> Vec<NavigationTarget> {
- match self {
- ReferenceResult::Exact(target) => vec![target],
- ReferenceResult::Approximate(vec) => vec,
- }
- }
-}
-
pub(crate) fn reference_definition(
sema: &Semantics<RootDatabase>,
name_ref: Either<&ast::Lifetime, &ast::NameRef>,
-) -> ReferenceResult {
+) -> Option<NavigationTarget> {
let name_kind = name_ref.either(
|lifetime| NameRefClass::classify_lifetime(sema, lifetime),
|name_ref| NameRefClass::classify(sema, name_ref),
- );
- if let Some(def) = name_kind {
- let def = def.referenced(sema.db);
- return match def.try_to_nav(sema.db) {
- Some(nav) => ReferenceResult::Exact(nav),
- None => ReferenceResult::Approximate(Vec::new()),
- };
- }
-
- // Fallback index based approach:
- let name = name_ref.either(ast::Lifetime::text, ast::NameRef::text);
- let navs =
- symbol_index::index_resolve(sema.db, name).into_iter().map(|s| s.to_nav(sema.db)).collect();
- ReferenceResult::Approximate(navs)
+ )?;
+ let def = name_kind.referenced(sema.db);
+ def.try_to_nav(sema.db)
}
#[cfg(test)]
fn goto_def_for_extern_crate() {
check(
r#"
- //- /main.rs crate:main deps:std
- extern crate std$0;
- //- /std/lib.rs crate:std
- // empty
- //^ file
- "#,
+//- /main.rs crate:main deps:std
+extern crate std$0;
+//- /std/lib.rs crate:std
+// empty
+//^ file
+"#,
)
}
fn goto_def_for_renamed_extern_crate() {
check(
r#"
- //- /main.rs crate:main deps:std
- extern crate std as abc$0;
- //- /std/lib.rs crate:std
- // empty
- //^ file
- "#,
+//- /main.rs crate:main deps:std
+extern crate std as abc$0;
+//- /std/lib.rs crate:std
+// empty
+//^ file
+"#,
)
}
fn goto_def_for_macros_from_other_crates() {
check(
r#"
-//- /lib.rs
+//- /lib.rs crate:main deps:foo
use foo::foo;
fn bar() {
$0foo!();
}
-//- /foo/lib.rs
+//- /foo/lib.rs crate:foo
#[macro_export]
macro_rules! foo { () => { () } }
//^^^
fn goto_def_for_macros_in_use_tree() {
check(
r#"
-//- /lib.rs
+//- /lib.rs crate:main deps:foo
use foo::foo$0;
-//- /foo/lib.rs
+//- /foo/lib.rs crate:foo
#[macro_export]
macro_rules! foo { () => { () } }
//^^^
fn goto_def_for_macro_container() {
check(
r#"
-//- /lib.rs
+//- /lib.rs crate:main deps:foo
foo::module$0::mac!();
-//- /foo/lib.rs
+//- /foo/lib.rs crate:foo
pub mod module {
//^^^^^^
#[macro_export]
match it.definition_source(db).value {
ModuleSource::Module(it) => it.short_label(),
ModuleSource::SourceFile(it) => it.short_label(),
+ ModuleSource::BlockExpr(it) => it.short_label(),
},
mod_path,
),
"#]],
);
}
+ #[test]
+ fn test_hover_intra_link_reference_to_trait_method() {
+ check(
+ r#"
+pub trait Foo {
+ fn buzz() -> usize;
+}
+/// [Foo][buzz]
+///
+/// [buzz]: Foo::buzz
+pub struct B$0ar
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub struct Bar
+ ```
+
+ ---
+
+ [Foo](https://docs.rs/test/*/test/trait.Foo.html#tymethod.buzz)
+ "#]],
+ );
+ }
#[test]
fn test_hover_external_url() {
match expr {
ast::Expr::MethodCallExpr(method_call_expr) => {
let name_ref = method_call_expr.name_ref()?;
- match name_ref.text().as_str() {
+ match name_ref.text() {
"clone" => method_call_expr.receiver().map(|rec| rec.to_string()),
name_ref => Some(name_ref.to_owned()),
}
// The node is either the first or the last in the file
let suff = &token.text()[TextRange::new(
offset - token.text_range().start() + TextSize::of('\n'),
- TextSize::of(token.text().as_str()),
+ TextSize::of(token.text()),
)];
let spaces = suff.bytes().take_while(|&b| b == b' ').count();
ast::{self, AstNode, AttrsOwner},
match_ast, SyntaxNode,
};
+use test_utils::mark;
use crate::{
display::{ToNav, TryToNav},
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
let sema = Semantics::new(db);
let module = match sema.to_module_def(file_id) {
- None => return vec![],
+ None => return Vec::new(),
Some(it) => it,
};
- runnables_mod(&sema, module)
+ let mut res = Vec::new();
+ runnables_mod(&sema, &mut res, module);
+ res
}
-fn runnables_mod(sema: &Semantics<RootDatabase>, module: hir::Module) -> Vec<Runnable> {
- let mut res: Vec<Runnable> = module
- .declarations(sema.db)
- .into_iter()
- .filter_map(|def| {
- let runnable = match def {
- hir::ModuleDef::Module(it) => runnable_mod(&sema, it),
- hir::ModuleDef::Function(it) => runnable_fn(&sema, it),
- _ => None,
- };
- runnable.or_else(|| module_def_doctest(&sema, def))
- })
- .collect();
+fn runnables_mod(sema: &Semantics<RootDatabase>, acc: &mut Vec<Runnable>, module: hir::Module) {
+ acc.extend(module.declarations(sema.db).into_iter().filter_map(|def| {
+ let runnable = match def {
+ hir::ModuleDef::Module(it) => runnable_mod(&sema, it),
+ hir::ModuleDef::Function(it) => runnable_fn(&sema, it),
+ _ => None,
+ };
+ runnable.or_else(|| module_def_doctest(&sema, def))
+ }));
- res.extend(module.impl_defs(sema.db).into_iter().flat_map(|it| it.items(sema.db)).filter_map(
+ acc.extend(module.impl_defs(sema.db).into_iter().flat_map(|it| it.items(sema.db)).filter_map(
|def| match def {
hir::AssocItem::Function(it) => {
runnable_fn(&sema, it).or_else(|| module_def_doctest(&sema, it.into()))
},
));
- res.extend(module.declarations(sema.db).into_iter().flat_map(|def| match def {
- hir::ModuleDef::Module(it) => runnables_mod(sema, it),
- _ => vec![],
- }));
-
- res
+ for def in module.declarations(sema.db) {
+ if let hir::ModuleDef::Module(submodule) = def {
+ match submodule.definition_source(sema.db).value {
+ hir::ModuleSource::Module(_) => runnables_mod(sema, acc, submodule),
+ hir::ModuleSource::SourceFile(_) => mark::hit!(dont_recurse_in_outline_submodules),
+ hir::ModuleSource::BlockExpr(_) => {} // inner items aren't runnable
+ }
+ }
+ }
}
pub(crate) fn runnable_fn(sema: &Semantics<RootDatabase>, def: hir::Function) -> Option<Runnable> {
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
+ use test_utils::mark;
use crate::fixture;
"#]],
);
}
+
+ #[test]
+ fn dont_recurse_in_outline_submodules() {
+ mark::check!(dont_recurse_in_outline_submodules);
+ check(
+ r#"
+//- /lib.rs
+$0
+mod m;
+//- /m.rs
+mod tests {
+ #[test]
+ fn t() {}
+}
+"#,
+ &[],
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
}
format_to!(buf, "{}\n", syntax_tree_stats(db));
format_to!(buf, "{} (macros)\n", macro_syntax_tree_stats(db));
format_to!(buf, "{} total\n", memory_usage());
+ format_to!(buf, "\ncounts:\n{}", profile::countme::get_all());
if let Some(file_id) = file_id {
format_to!(buf, "\nfile info:\n");
None => format_to!(buf, "does not belong to any crate"),
}
}
+
buf
}
let parent = string.syntax().parent();
let name = parent.parent().and_then(ast::MacroCall::cast)?.path()?.segment()?.name_ref()?;
- if !matches!(name.text().as_str(), "format_args" | "format_args_nl") {
+ if !matches!(name.text(), "format_args" | "format_args_nl") {
return None;
}
None => (),
}
- let line: &str = comment.text().as_str();
+ let line: &str = comment.text();
let range = comment.syntax().text_range();
let mut pos = TextSize::of(comment.prefix());
// DefDatabase
hir::db::ItemTreeQuery
+ hir::db::BlockDefMapQuery
hir::db::CrateDefMapQueryQuery
hir::db::StructDataQuery
hir::db::UnionDataQuery
hir::AssocItem::TypeAlias(it) => Some(*it),
_ => None,
})
- .find(|alias| alias.name(sema.db).to_string() == **name_ref.text())
+ .find(|alias| &alias.name(sema.db).to_string() == name_ref.text())
{
return Some(NameRefClass::Definition(Definition::ModuleDef(
ModuleDef::TypeAlias(ty),
PathSegmentKind::SelfKw => ImportGroup::ThisModule,
PathSegmentKind::SuperKw => ImportGroup::SuperModule,
PathSegmentKind::CrateKw => ImportGroup::ThisCrate,
- PathSegmentKind::Name(name) => match name.text().as_str() {
+ PathSegmentKind::Name(name) => match name.text() {
"std" => ImportGroup::Std,
"core" => ImportGroup::Std,
_ => ImportGroup::ExternCrate,
// so do nothing.
}
}
+ ModuleSource::BlockExpr(b) => {
+ if is_first {
+ let range = Some(b.syntax().text_range());
+ res.insert(file_id, range);
+ } else {
+ // We have already added the enclosing file to the search scope,
+ // so do nothing.
+ }
+ }
ModuleSource::SourceFile(_) => {
res.insert(file_id, None);
}
let mut res = FxHashMap::default();
let range = match module_src.value {
ModuleSource::Module(m) => Some(m.syntax().text_range()),
+ ModuleSource::BlockExpr(b) => Some(b.syntax().text_range()),
ModuleSource::SourceFile(_) => None,
};
res.insert(file_id, range);
let def_map = db.crate_def_map(krate);
let mut files = Vec::new();
- let mut modules = vec![def_map.root];
+ let mut modules = vec![def_map.root()];
while let Some(module) = modules.pop() {
let data = &def_map[module];
files.extend(data.origin.file_id());
query.search(&buf)
}
-pub fn index_resolve(db: &RootDatabase, name: &SmolStr) -> Vec<FileSymbol> {
+pub fn index_resolve(db: &RootDatabase, name: &str) -> Vec<FileSymbol> {
let mut query = Query::new(name.to_string());
query.exact();
query.limit(4);
fn decl<N: NameOwner>(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> {
let name = node.name()?;
let name_range = name.syntax().text_range();
- let name = name.text().clone();
+ let name = name.text().into();
let ptr = SyntaxNodePtr::new(node.syntax());
Some((name, ptr, name_range))
}
}
fn to_text(&self) -> SmolStr {
- self.token().text().clone()
+ self.token().text().into()
}
}
self.text_pos += TextSize::of(text);
}
- let text = SmolStr::new(self.buf.as_str());
+ self.inner.token(kind, self.buf.as_str());
self.buf.clear();
- self.inner.token(kind, text);
-
// Add whitespace between adjoint puncts
let next = last.bump();
if let (
cfg-if = "1"
libc = "0.2.73"
la-arena = { version = "0.2.0", path = "../../lib/arena" }
+countme = { version = "2.0.0-pre.2", features = ["enable"] }
jemalloc-ctl = { version = "0.3.3", optional = true }
[target.'cfg(target_os = "linux")'.dependencies]
use std::{
cell::RefCell,
collections::{BTreeMap, HashSet},
+ env,
io::{stderr, Write},
sync::{
atomic::{AtomicBool, Ordering},
/// env RA_PROFILE=foo|bar|baz // enabled only selected entries
/// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms
pub fn init() {
- let spec = std::env::var("RA_PROFILE").unwrap_or_default();
+ countme::enable(env::var("RA_COUNT").is_ok());
+ let spec = env::var("RA_PROFILE").unwrap_or_default();
init_from(&spec);
}
stop_watch::{StopWatch, StopWatchSpan},
};
+pub use countme;
+/// Include `_c: Count<Self>` field in important structs to count them.
+///
+/// To view the counts, run with `RA_COUNT=1`. The overhead of disabled count is
+/// almost zero.
+pub use countme::Count;
+
thread_local!(static IN_SCOPE: RefCell<bool> = RefCell::new(false));
/// Allows to check if the current code is withing some dynamic scope, can be
--- /dev/null
+//! Handles build script specific information
+
+use std::{
+ ffi::OsStr,
+ io::BufReader,
+ path::{Path, PathBuf},
+ process::{Command, Stdio},
+};
+
+use anyhow::Result;
+use cargo_metadata::{BuildScript, Message, Package, PackageId};
+use itertools::Itertools;
+use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::FxHashMap;
+use stdx::JodChild;
+
+use crate::{cfg_flag::CfgFlag, CargoConfig};
+
+#[derive(Debug, Clone, Default)]
+pub(crate) struct BuildDataMap {
+ data: FxHashMap<PackageId, BuildData>,
+}
+#[derive(Debug, Clone, Default, PartialEq, Eq)]
+pub struct BuildData {
+ /// List of config flags defined by this package's build script
+ pub cfgs: Vec<CfgFlag>,
+ /// List of cargo-related environment variables with their value
+ ///
+ /// If the package has a build script which defines environment variables,
+ /// they can also be found here.
+ pub envs: Vec<(String, String)>,
+ /// Directory where a build script might place its output
+ pub out_dir: Option<AbsPathBuf>,
+ /// Path to the proc-macro library file if this package exposes proc-macros
+ pub proc_macro_dylib_path: Option<AbsPathBuf>,
+}
+
+impl BuildDataMap {
+ pub(crate) fn new(
+ cargo_toml: &AbsPath,
+ cargo_features: &CargoConfig,
+ packages: &Vec<Package>,
+ progress: &dyn Fn(String),
+ ) -> Result<BuildDataMap> {
+ let mut cmd = Command::new(toolchain::cargo());
+ cmd.args(&["check", "--workspace", "--message-format=json", "--manifest-path"])
+ .arg(cargo_toml.as_ref());
+
+ // --all-targets includes tests, benches and examples in addition to the
+ // default lib and bins. This is an independent concept from the --targets
+ // flag below.
+ cmd.arg("--all-targets");
+
+ if let Some(target) = &cargo_features.target {
+ cmd.args(&["--target", target]);
+ }
+
+ if cargo_features.all_features {
+ cmd.arg("--all-features");
+ } else {
+ if cargo_features.no_default_features {
+ // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
+ // https://github.com/oli-obk/cargo_metadata/issues/79
+ cmd.arg("--no-default-features");
+ }
+ if !cargo_features.features.is_empty() {
+ cmd.arg("--features");
+ cmd.arg(cargo_features.features.join(" "));
+ }
+ }
+
+ cmd.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null());
+
+ let mut child = cmd.spawn().map(JodChild)?;
+ let child_stdout = child.stdout.take().unwrap();
+ let stdout = BufReader::new(child_stdout);
+
+ let mut res = BuildDataMap::default();
+ for message in cargo_metadata::Message::parse_stream(stdout) {
+ if let Ok(message) = message {
+ match message {
+ Message::BuildScriptExecuted(BuildScript {
+ package_id,
+ out_dir,
+ cfgs,
+ env,
+ ..
+ }) => {
+ let cfgs = {
+ let mut acc = Vec::new();
+ for cfg in cfgs {
+ match cfg.parse::<CfgFlag>() {
+ Ok(it) => acc.push(it),
+ Err(err) => {
+ anyhow::bail!("invalid cfg from cargo-metadata: {}", err)
+ }
+ };
+ }
+ acc
+ };
+ let res = res.data.entry(package_id.clone()).or_default();
+ // cargo_metadata crate returns default (empty) path for
+ // older cargos, which is not absolute, so work around that.
+ if out_dir != PathBuf::default() {
+ let out_dir = AbsPathBuf::assert(out_dir);
+ res.out_dir = Some(out_dir);
+ res.cfgs = cfgs;
+ }
+
+ res.envs = env;
+ }
+ Message::CompilerArtifact(message) => {
+ progress(format!("metadata {}", message.target.name));
+
+ if message.target.kind.contains(&"proc-macro".to_string()) {
+ let package_id = message.package_id;
+ // Skip rmeta file
+ if let Some(filename) =
+ message.filenames.iter().find(|name| is_dylib(name))
+ {
+ let filename = AbsPathBuf::assert(filename.clone());
+ let res = res.data.entry(package_id.clone()).or_default();
+ res.proc_macro_dylib_path = Some(filename);
+ }
+ }
+ }
+ Message::CompilerMessage(message) => {
+ progress(message.target.name.clone());
+ }
+ Message::Unknown => (),
+ Message::BuildFinished(_) => {}
+ Message::TextLine(_) => {}
+ }
+ }
+ }
+ res.inject_cargo_env(packages);
+ Ok(res)
+ }
+
+ pub(crate) fn with_cargo_env(packages: &Vec<Package>) -> Self {
+ let mut res = Self::default();
+ res.inject_cargo_env(packages);
+ res
+ }
+
+ pub(crate) fn get(&self, id: &PackageId) -> Option<&BuildData> {
+ self.data.get(id)
+ }
+
+ fn inject_cargo_env(&mut self, packages: &Vec<Package>) {
+ for meta_pkg in packages {
+ let resource = self.data.entry(meta_pkg.id.clone()).or_default();
+ inject_cargo_env(meta_pkg, &mut resource.envs);
+
+ if let Some(out_dir) = &resource.out_dir {
+ // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
+ if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) {
+ resource.envs.push(("OUT_DIR".to_string(), out_dir));
+ }
+ }
+ }
+ }
+}
+
+// FIXME: File a better way to know if it is a dylib
+fn is_dylib(path: &Path) -> bool {
+ match path.extension().and_then(OsStr::to_str).map(|it| it.to_string().to_lowercase()) {
+ None => false,
+ Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"),
+ }
+}
+
+/// Recreates the compile-time environment variables that Cargo sets.
+///
+/// Should be synced with <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
+fn inject_cargo_env(package: &cargo_metadata::Package, env: &mut Vec<(String, String)>) {
+ // FIXME: Missing variables:
+ // CARGO, CARGO_PKG_HOMEPAGE, CARGO_CRATE_NAME, CARGO_BIN_NAME, CARGO_BIN_EXE_<name>
+
+ let mut manifest_dir = package.manifest_path.clone();
+ manifest_dir.pop();
+ if let Some(cargo_manifest_dir) = manifest_dir.to_str() {
+ env.push(("CARGO_MANIFEST_DIR".into(), cargo_manifest_dir.into()));
+ }
+
+ env.push(("CARGO_PKG_VERSION".into(), package.version.to_string()));
+ env.push(("CARGO_PKG_VERSION_MAJOR".into(), package.version.major.to_string()));
+ env.push(("CARGO_PKG_VERSION_MINOR".into(), package.version.minor.to_string()));
+ env.push(("CARGO_PKG_VERSION_PATCH".into(), package.version.patch.to_string()));
+
+ let pre = package.version.pre.iter().map(|id| id.to_string()).format(".");
+ env.push(("CARGO_PKG_VERSION_PRE".into(), pre.to_string()));
+
+ let authors = package.authors.join(";");
+ env.push(("CARGO_PKG_AUTHORS".into(), authors));
+
+ env.push(("CARGO_PKG_NAME".into(), package.name.clone()));
+ env.push(("CARGO_PKG_DESCRIPTION".into(), package.description.clone().unwrap_or_default()));
+ //env.push(("CARGO_PKG_HOMEPAGE".into(), package.homepage.clone().unwrap_or_default()));
+ env.push(("CARGO_PKG_REPOSITORY".into(), package.repository.clone().unwrap_or_default()));
+ env.push(("CARGO_PKG_LICENSE".into(), package.license.clone().unwrap_or_default()));
+
+ let license_file =
+ package.license_file.as_ref().map(|buf| buf.display().to_string()).unwrap_or_default();
+ env.push(("CARGO_PKG_LICENSE_FILE".into(), license_file));
+}
//! FIXME: write short doc here
-use std::{
- convert::TryInto,
- ffi::OsStr,
- io::BufReader,
- ops,
- path::{Path, PathBuf},
- process::{Command, Stdio},
-};
+use std::{convert::TryInto, ops, process::Command};
use anyhow::{Context, Result};
use base_db::Edition;
-use cargo_metadata::{BuildScript, CargoOpt, Message, MetadataCommand, PackageId};
-use itertools::Itertools;
+use cargo_metadata::{CargoOpt, MetadataCommand};
use la_arena::{Arena, Idx};
use paths::{AbsPath, AbsPathBuf};
use rustc_hash::FxHashMap;
-use stdx::JodChild;
-use crate::cfg_flag::CfgFlag;
+use crate::build_data::{BuildData, BuildDataMap};
use crate::utf8_stdout;
/// `CargoWorkspace` represents the logical structure of, well, a Cargo
pub dependencies: Vec<PackageDependency>,
/// Rust edition for this package
pub edition: Edition,
- /// List of features to activate
- pub features: Vec<String>,
- /// List of config flags defined by this package's build script
- pub cfgs: Vec<CfgFlag>,
- /// List of cargo-related environment variables with their value
- ///
- /// If the package has a build script which defines environment variables,
- /// they can also be found here.
- pub envs: Vec<(String, String)>,
- /// Directory where a build script might place its output
- pub out_dir: Option<AbsPathBuf>,
- /// Path to the proc-macro library file if this package exposes proc-macros
- pub proc_macro_dylib_path: Option<AbsPathBuf>,
+ /// Features provided by the crate, mapped to the features required by that feature.
+ pub features: FxHashMap<String, Vec<String>>,
+ /// List of features enabled on this package
+ pub active_features: Vec<String>,
+ /// Build script related data for this package
+ pub build_data: BuildData,
}
#[derive(Debug, Clone, Eq, PartialEq)]
)
})?;
- let mut out_dir_by_id = FxHashMap::default();
- let mut cfgs = FxHashMap::default();
- let mut envs = FxHashMap::default();
- let mut proc_macro_dylib_paths = FxHashMap::default();
- if config.load_out_dirs_from_check {
- let resources = load_extern_resources(cargo_toml, config, progress)?;
- out_dir_by_id = resources.out_dirs;
- cfgs = resources.cfgs;
- envs = resources.env;
- proc_macro_dylib_paths = resources.proc_dylib_paths;
- }
+ let resources = if config.load_out_dirs_from_check {
+ BuildDataMap::new(cargo_toml, config, &meta.packages, progress)?
+ } else {
+ BuildDataMap::with_cargo_env(&meta.packages)
+ };
let mut pkg_by_id = FxHashMap::default();
let mut packages = Arena::default();
meta.packages.sort_by(|a, b| a.id.cmp(&b.id));
for meta_pkg in meta.packages {
let id = meta_pkg.id.clone();
- inject_cargo_env(&meta_pkg, envs.entry(id).or_default());
+ let build_data = resources.get(&id).cloned().unwrap_or_default();
let cargo_metadata::Package { id, edition, name, manifest_path, version, .. } =
meta_pkg;
is_member,
edition,
dependencies: Vec::new(),
- features: Vec::new(),
- cfgs: cfgs.get(&id).cloned().unwrap_or_default(),
- envs: envs.get(&id).cloned().unwrap_or_default(),
- out_dir: out_dir_by_id.get(&id).cloned(),
- proc_macro_dylib_path: proc_macro_dylib_paths.get(&id).cloned(),
+ features: meta_pkg.features.into_iter().collect(),
+ active_features: Vec::new(),
+ build_data,
});
let pkg_data = &mut packages[pkg];
pkg_by_id.insert(id, pkg);
let dep = PackageDependency { name: dep_node.name, pkg };
packages[source].dependencies.push(dep);
}
- packages[source].features.extend(node.features);
+ packages[source].active_features.extend(node.features);
}
let workspace_root = AbsPathBuf::assert(meta.workspace_root);
self.packages.iter().filter(|(_, v)| v.name == name).count() == 1
}
}
-
-#[derive(Debug, Clone, Default)]
-pub(crate) struct ExternResources {
- out_dirs: FxHashMap<PackageId, AbsPathBuf>,
- proc_dylib_paths: FxHashMap<PackageId, AbsPathBuf>,
- cfgs: FxHashMap<PackageId, Vec<CfgFlag>>,
- env: FxHashMap<PackageId, Vec<(String, String)>>,
-}
-
-pub(crate) fn load_extern_resources(
- cargo_toml: &Path,
- cargo_features: &CargoConfig,
- progress: &dyn Fn(String),
-) -> Result<ExternResources> {
- let mut cmd = Command::new(toolchain::cargo());
- cmd.args(&["check", "--workspace", "--message-format=json", "--manifest-path"]).arg(cargo_toml);
-
- // --all-targets includes tests, benches and examples in addition to the
- // default lib and bins. This is an independent concept from the --targets
- // flag below.
- cmd.arg("--all-targets");
-
- if let Some(target) = &cargo_features.target {
- cmd.args(&["--target", target]);
- }
-
- if cargo_features.all_features {
- cmd.arg("--all-features");
- } else {
- if cargo_features.no_default_features {
- // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
- // https://github.com/oli-obk/cargo_metadata/issues/79
- cmd.arg("--no-default-features");
- }
- if !cargo_features.features.is_empty() {
- cmd.arg("--features");
- cmd.arg(cargo_features.features.join(" "));
- }
- }
-
- cmd.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null());
-
- let mut child = cmd.spawn().map(JodChild)?;
- let child_stdout = child.stdout.take().unwrap();
- let stdout = BufReader::new(child_stdout);
-
- let mut res = ExternResources::default();
- for message in cargo_metadata::Message::parse_stream(stdout) {
- if let Ok(message) = message {
- match message {
- Message::BuildScriptExecuted(BuildScript {
- package_id,
- out_dir,
- cfgs,
- env,
- ..
- }) => {
- let cfgs = {
- let mut acc = Vec::new();
- for cfg in cfgs {
- match cfg.parse::<CfgFlag>() {
- Ok(it) => acc.push(it),
- Err(err) => {
- anyhow::bail!("invalid cfg from cargo-metadata: {}", err)
- }
- };
- }
- acc
- };
- // cargo_metadata crate returns default (empty) path for
- // older cargos, which is not absolute, so work around that.
- if out_dir != PathBuf::default() {
- let out_dir = AbsPathBuf::assert(out_dir);
- res.out_dirs.insert(package_id.clone(), out_dir);
- res.cfgs.insert(package_id.clone(), cfgs);
- }
-
- res.env.insert(package_id, env);
- }
- Message::CompilerArtifact(message) => {
- progress(format!("metadata {}", message.target.name));
-
- if message.target.kind.contains(&"proc-macro".to_string()) {
- let package_id = message.package_id;
- // Skip rmeta file
- if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name))
- {
- let filename = AbsPathBuf::assert(filename.clone());
- res.proc_dylib_paths.insert(package_id, filename);
- }
- }
- }
- Message::CompilerMessage(message) => {
- progress(message.target.name.clone());
- }
- Message::Unknown => (),
- Message::BuildFinished(_) => {}
- Message::TextLine(_) => {}
- }
- }
- }
- Ok(res)
-}
-
-// FIXME: File a better way to know if it is a dylib
-fn is_dylib(path: &Path) -> bool {
- match path.extension().and_then(OsStr::to_str).map(|it| it.to_string().to_lowercase()) {
- None => false,
- Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"),
- }
-}
-
-/// Recreates the compile-time environment variables that Cargo sets.
-///
-/// Should be synced with <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
-fn inject_cargo_env(package: &cargo_metadata::Package, env: &mut Vec<(String, String)>) {
- // FIXME: Missing variables:
- // CARGO, CARGO_PKG_HOMEPAGE, CARGO_CRATE_NAME, CARGO_BIN_NAME, CARGO_BIN_EXE_<name>
-
- let mut manifest_dir = package.manifest_path.clone();
- manifest_dir.pop();
- if let Some(cargo_manifest_dir) = manifest_dir.to_str() {
- env.push(("CARGO_MANIFEST_DIR".into(), cargo_manifest_dir.into()));
- }
-
- env.push(("CARGO_PKG_VERSION".into(), package.version.to_string()));
- env.push(("CARGO_PKG_VERSION_MAJOR".into(), package.version.major.to_string()));
- env.push(("CARGO_PKG_VERSION_MINOR".into(), package.version.minor.to_string()));
- env.push(("CARGO_PKG_VERSION_PATCH".into(), package.version.patch.to_string()));
-
- let pre = package.version.pre.iter().map(|id| id.to_string()).format(".");
- env.push(("CARGO_PKG_VERSION_PRE".into(), pre.to_string()));
-
- let authors = package.authors.join(";");
- env.push(("CARGO_PKG_AUTHORS".into(), authors));
-
- env.push(("CARGO_PKG_NAME".into(), package.name.clone()));
- env.push(("CARGO_PKG_DESCRIPTION".into(), package.description.clone().unwrap_or_default()));
- //env.push(("CARGO_PKG_HOMEPAGE".into(), package.homepage.clone().unwrap_or_default()));
- env.push(("CARGO_PKG_REPOSITORY".into(), package.repository.clone().unwrap_or_default()));
- env.push(("CARGO_PKG_LICENSE".into(), package.license.clone().unwrap_or_default()));
-
- let license_file =
- package.license_file.as_ref().map(|buf| buf.display().to_string()).unwrap_or_default();
- env.push(("CARGO_PKG_LICENSE_FILE".into(), license_file));
-}
mod sysroot;
mod workspace;
mod rustc_cfg;
+mod build_data;
use std::{
fs::{read_dir, ReadDir},
let pkg_root = cargo[pkg].root().to_path_buf();
let mut include = vec![pkg_root.clone()];
- include.extend(cargo[pkg].out_dir.clone());
+ include.extend(cargo[pkg].build_data.out_dir.clone());
let mut exclude = vec![pkg_root.join(".git")];
if is_member {
let edition = pkg.edition;
let cfg_options = {
let mut opts = cfg_options.clone();
- for feature in pkg.features.iter() {
+ for feature in pkg.active_features.iter() {
opts.insert_key_value("feature".into(), feature.into());
}
- opts.extend(pkg.cfgs.iter().cloned());
+ opts.extend(pkg.build_data.cfgs.iter().cloned());
opts
};
let mut env = Env::default();
- for (k, v) in &pkg.envs {
+ for (k, v) in &pkg.build_data.envs {
env.set(k, v.clone());
}
- if let Some(out_dir) = &pkg.out_dir {
- // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
- if let Some(out_dir) = out_dir.to_str().map(|s| s.to_owned()) {
- env.set("OUT_DIR", out_dir);
- }
- }
- let proc_macro =
- pkg.proc_macro_dylib_path.as_ref().map(|it| proc_macro_loader(&it)).unwrap_or_default();
+ let proc_macro = pkg
+ .build_data
+ .proc_macro_dylib_path
+ .as_ref()
+ .map(|it| proc_macro_loader(&it))
+ .unwrap_or_default();
let display_name = CrateDisplayName::from_canonical_name(pkg.name.clone());
let crate_id = crate_graph.add_crate_root(
log = "0.4.8"
lsp-types = { version = "0.86.0", features = ["proposed"] }
parking_lot = "0.11.0"
-pico-args = "0.3.1"
+pico-args = "0.4.0"
oorandom = "11.1.2"
rustc-hash = "1.1.0"
serde = { version = "1.0.106", features = ["derive"] }
let mut matches = Arguments::from_env();
if matches.contains("--version") {
- matches.finish()?;
+ finish_args(matches)?;
return Ok(Args {
verbosity: Verbosity::Normal,
log_file: None,
let subcommand = match matches.subcommand()? {
Some(it) => it,
None => {
- matches.finish()?;
+ finish_args(matches)?;
return Ok(Args { verbosity, log_file, command: Command::RunServer });
}
};
load_output_dirs: matches.contains("--load-output-dirs"),
with_proc_macro: matches.contains("--with-proc-macro"),
path: matches
- .free_from_str()?
+ .opt_free_from_str()?
.ok_or_else(|| format_err!("expected positional argument"))?,
}),
"analysis-bench" => Command::Bench(BenchCmd {
load_output_dirs: matches.contains("--load-output-dirs"),
with_proc_macro: matches.contains("--with-proc-macro"),
path: matches
- .free_from_str()?
+ .opt_free_from_str()?
.ok_or_else(|| format_err!("expected positional argument"))?,
}),
"diagnostics" => Command::Diagnostics {
load_output_dirs: matches.contains("--load-output-dirs"),
with_proc_macro: matches.contains("--with-proc-macro"),
path: matches
- .free_from_str()?
+ .opt_free_from_str()?
.ok_or_else(|| format_err!("expected positional argument"))?,
},
"proc-macro" => Command::ProcMacro,
"ssr" => Command::Ssr {
rules: {
let mut acc = Vec::new();
- while let Some(rule) = matches.free_from_str()? {
+ while let Some(rule) = matches.opt_free_from_str()? {
acc.push(rule);
}
acc
debug_snippet: matches.opt_value_from_str("--debug")?,
patterns: {
let mut acc = Vec::new();
- while let Some(rule) = matches.free_from_str()? {
+ while let Some(rule) = matches.opt_free_from_str()? {
acc.push(rule);
}
acc
return Ok(Args { verbosity, log_file: None, command: Command::Help });
}
};
- matches.finish()?;
+ finish_args(matches)?;
Ok(Args { verbosity, log_file, command })
}
}
+
+fn finish_args(args: Arguments) -> Result<()> {
+ if !args.finish().is_empty() {
+ bail!("Unused arguments.");
+ }
+ Ok(())
+}
//! errors.
use std::{
+ env,
path::PathBuf,
time::{SystemTime, UNIX_EPOCH},
};
report_metric("total memory", memory.allocated.megabytes() as u64, "MB");
}
+ if env::var("RA_COUNT").is_ok() {
+ eprintln!("{}", profile::countme::get_all());
+ }
+
if self.memory_usage && verbosity.is_verbose() {
print_memory_usage(host, vfs);
}
RustfmtConfig::Rustfmt { extra_args } => {
let mut cmd = process::Command::new(toolchain::rustfmt());
cmd.args(extra_args);
+ // try to chdir to the file so we can respect `rustfmt.toml`
+ // FIXME: use `rustfmt --config-path` once
+ // https://github.com/rust-lang/rustfmt/issues/4660 gets fixed
+ match params.text_document.uri.to_file_path() {
+ Ok(mut path) => {
+ // pop off file name
+ if path.pop() && path.is_dir() {
+ cmd.current_dir(path);
+ }
+ }
+ Err(_) => {
+ log::error!(
+ "Unable to get file path for {}, rustfmt.toml might be ignored",
+ params.text_document.uri
+ );
+ }
+ }
if let Some(&crate_id) = crate_ids.first() {
// Assume all crates are in the same edition
let edition = snap.analysis.crate_edition(crate_id)?;
use ide_db::base_db::FileRange;
use rustc_hash::FxHashMap;
use std::{cell::Cell, iter::Peekable};
-use syntax::ast::{AstNode, AstToken};
use syntax::{ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken};
+use syntax::{
+ ast::{AstNode, AstToken},
+ SmolStr,
+};
use test_utils::mark;
// Creates a match error. If we're currently attempting to match some code that we thought we were
code: &SyntaxNode,
) -> Result<(), MatchFailed> {
// Build a map keyed by field name.
- let mut fields_by_name = FxHashMap::default();
+ let mut fields_by_name: FxHashMap<SmolStr, SyntaxNode> = FxHashMap::default();
for child in code.children() {
if let Some(record) = ast::RecordExprField::cast(child.clone()) {
if let Some(name) = record.field_name() {
- fields_by_name.insert(name.text().clone(), child.clone());
+ fields_by_name.insert(name.text().into(), child.clone());
}
}
}
}
SyntaxElement::Node(n) => {
if let Some(first_token) = n.first_token() {
- if Some(first_token.text().as_str())
- == next_pattern_token.as_deref()
- {
+ if Some(first_token.text()) == next_pattern_token.as_deref() {
if let Some(SyntaxElement::Node(p)) = pattern.next() {
// We have a subtree that starts with the next token in our pattern.
self.attempt_match_token_tree(phase, &p, &n)?;
);
}
} else {
- self.out.push_str(token.text().as_str());
+ self.out.push_str(token.text());
}
}
None,
|_ty, assoc_item| {
let item_name = assoc_item.name(self.scope.db)?;
- if item_name.to_string().as_str() == name.text().as_str() {
+ if item_name.to_string().as_str() == name.text() {
Some(hir::PathResolution::AssocItem(assoc_item))
} else {
None
[dependencies]
itertools = "0.10.0"
-rowan = "0.10.3"
-rustc_lexer = { version = "697.0.0", package = "rustc-ap-rustc_lexer" }
+rowan = "0.12"
+rustc_lexer = { version = "700.0.0", package = "rustc-ap-rustc_lexer" }
rustc-hash = "1.1.0"
arrayvec = "0.5.1"
once_cell = "1.3.1"
indexmap = "1.4.0"
-# This crate transitively depends on `smol_str` via `rowan`.
-# ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here
-# to reduce number of compilations
smol_str = { version = "0.1.15", features = ["serde"] }
serde = { version = "1.0.106", features = ["derive"] }
fmt,
hash::BuildHasherDefault,
ops::{self, RangeInclusive},
+ ptr,
};
use indexmap::IndexMap;
&& lhs.text_range().len() == rhs.text_range().len()
&& match (&lhs, &rhs) {
(NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
- lhs.green() == rhs.green() || lhs.text() == rhs.text()
+ ptr::eq(lhs.green(), rhs.green()) || lhs.text() == rhs.text()
}
(NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(),
_ => false,
fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
match element {
- NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()),
+ NodeOrToken::Node(it) => NodeOrToken::Node(it.green().to_owned()),
NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()),
}
}
fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
match element {
- NodeOrToken::Node(it) => it.green().clone().into(),
+ NodeOrToken::Node(it) => it.green().to_owned().into(),
NodeOrToken::Token(it) => it.green().clone().into(),
}
}
use crate::{
syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
- SmolStr, SyntaxKind,
+ SyntaxKind,
};
pub use self::{
fn syntax(&self) -> &SyntaxToken;
- fn text(&self) -> &SmolStr {
+ fn text(&self) -> &str {
self.syntax().text()
}
}
}
fn unroot(n: SyntaxNode) -> SyntaxNode {
- SyntaxNode::new_root(n.green().clone())
+ SyntaxNode::new_root(n.green().to_owned())
}
pub mod tokens {
.syntax()
.descendants_with_tokens()
.filter_map(|it| it.into_token())
- .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ")
+ .find(|it| it.kind() == WHITESPACE && it.text() == " ")
.unwrap()
}
.syntax()
.descendants_with_tokens()
.filter_map(|it| it.into_token())
- .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n")
+ .find(|it| it.kind() == WHITESPACE && it.text() == "\n")
.unwrap()
}
.syntax()
.descendants_with_tokens()
.filter_map(|it| it.into_token())
- .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n\n")
+ .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n")
.unwrap()
}
};
impl ast::Lifetime {
- pub fn text(&self) -> &SmolStr {
+ pub fn text(&self) -> &str {
text_of_first_token(self.syntax())
}
}
impl ast::Name {
- pub fn text(&self) -> &SmolStr {
+ pub fn text(&self) -> &str {
text_of_first_token(self.syntax())
}
}
impl ast::NameRef {
- pub fn text(&self) -> &SmolStr {
+ pub fn text(&self) -> &str {
text_of_first_token(self.syntax())
}
}
}
-fn text_of_first_token(node: &SyntaxNode) -> &SmolStr {
+fn text_of_first_token(node: &SyntaxNode) -> &str {
node.green().children().next().and_then(|it| it.into_token()).unwrap().text()
}
pub fn simple_name(&self) -> Option<SmolStr> {
let path = self.path()?;
match (path.segment(), path.qualifier()) {
- (Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()),
+ (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()),
_ => None,
}
}
match kind {
CommentKind { shape, doc: Some(_) } => {
let prefix = kind.prefix();
- let text = &self.text().as_str()[prefix.len()..];
+ let text = &self.text()[prefix.len()..];
let ws = text.chars().next().filter(|c| c.is_whitespace());
let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]);
match shape {
pub fn value(&self) -> Option<Cow<'_, str>> {
if self.is_raw() {
- let text = self.text().as_str();
+ let text = self.text();
let text =
&text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
return Some(Cow::Borrowed(text));
}
- let text = self.text().as_str();
+ let text = self.text();
let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
let mut buf = String::new();
}
pub fn quote_offsets(&self) -> Option<QuoteOffsets> {
- let text = self.text().as_str();
+ let text = self.text();
let offsets = QuoteOffsets::new(text)?;
let o = self.syntax().text_range().start();
let offsets = QuoteOffsets {
fn char_ranges(
&self,
) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> {
- let text = self.text().as_str();
+ let text = self.text();
let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start();
pub fn value(&self) -> Option<u128> {
let token = self.syntax();
- let mut text = token.text().as_str();
+ let mut text = token.text();
if let Some(suffix) = self.suffix() {
text = &text[..text.len() - suffix.len()]
}
};
pub use parser::{SyntaxKind, T};
pub use rowan::{
- Direction, GreenNode, NodeOrToken, SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset,
- WalkEvent,
+ Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent,
};
+pub use smol_str::SmolStr;
/// `Parse` is the result of the parsing: a syntax tree and a collection of
/// errors.
new_text.pop();
}
- let new_token =
- GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into());
+ let new_token = GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), &new_text);
Some((
prev_token.replace_with(new_token),
new_err.into_iter().collect(),
ast,
parsing::Token,
syntax_node::GreenNode,
- SmolStr, SyntaxError,
+ SyntaxError,
SyntaxKind::{self, *},
SyntaxTreeBuilder, TextRange, TextSize,
};
fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) {
let range = TextRange::at(self.text_pos, len);
- let text: SmolStr = self.text[range].into();
+ let text = &self.text[range];
self.text_pos += len;
self.token_pos += n_tokens;
self.inner.token(kind, text);
use rowan::{GreenNodeBuilder, Language};
-use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize};
+use crate::{Parse, SyntaxError, SyntaxKind, TextSize};
pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken};
Parse::new(green, errors)
}
- pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) {
+ pub fn token(&mut self, kind: SyntaxKind, text: &str) {
let kind = RustLanguage::kind_to_raw(kind);
self.inner.token(kind, text)
}
}
let token = literal.token();
- let text = token.text().as_str();
+ let text = token.text();
// FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205)
let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| {
In particular, I have `export RA_PROFILE='*>10'` in my shell profile.
+We also have a "counting" profiler which counts number of instances of popular structs.
+It is enabled by `RA_COUNT=1`.
+
To measure time for from-scratch analysis, use something like this:
```
It is explicitly OK for a reviewer to flag only some nits in the PR, and then send a follow-up cleanup PR for things which are easier to explain by example, cc-ing the original author.
Sending small cleanup PRs (like renaming a single local variable) is encouraged.
+When reviewing pull requests prefer extending this document to leaving
+non-reusable comments on the pull request itself.
+
# General
## Scale of Changes
Formatting ensures that you can use your editor's "number of selected characters" feature to correlate offsets with test's source code.
+## Marked Tests
+
+Use
+[`mark::hit! / mark::check!`](https://github.com/rust-analyzer/rust-analyzer/blob/71fe719dd5247ed8615641d9303d7ca1aa201c2f/crates/test_utils/src/mark.rs)
+when testing specific conditions.
+Do not place several marks into a single test or condition.
+Do not reuse marks between several tests.
+
+**Rationale:** marks provide an easy way to find the canonical test for each bit of code.
+This makes it much easier to understand.
+
## Function Preconditions
Express function preconditions in types and force the caller to provide them (rather than checking in callee):
**Rationale:** less typing in the common case, uniformity.
+Use `Vec::new` rather than `vec![]`. **Rationale:** uniformity, strength
+reduction.
+
## Functions Over Objects
Avoid creating "doer" objects.
Runtime performance obeys 80%/20% rule -- only a small fraction of code is hot.
Compile time **does not** obey this rule -- all code has to be compiled.
+## Appropriate String Types
+
+When interfacing with OS APIs, use `OsString`, even if the original source of data is utf-8 encoded.
+**Rationale:** cleanly delineates the boundary when the data goes into the OS-land.
+
+Use `AbsPathBuf` and `AbsPath` over `std::Path`.
+**Rationale:** rust-analyzer is a long-lived process which handles several projects at the same time.
+It is important not to leak cwd by accident.
# Premature Pessimization
**Rationale:** reveals the costs.
It is also more efficient when the caller already owns the allocation.
-## Collection types
+## Collection Types
Prefer `rustc_hash::FxHashMap` and `rustc_hash::FxHashSet` instead of the ones in `std::collections`.
**Rationale:** they use a hasher that's significantly faster and using them consistently will reduce code size by some small amount.
+## Avoid Intermediate Collections
+
+When writing a recursive function to compute a sets of things, use an accumulator parameter instead of returning a fresh collection.
+Accumulator goes first in the list of arguments.
+
+```rust
+// GOOD
+pub fn reachable_nodes(node: Node) -> FxHashSet<Node> {
+ let mut res = FxHashSet::default();
+ go(&mut res, node);
+ res
+}
+fn go(acc: &mut FxHashSet<Node>, node: Node) {
+ acc.insert(node);
+ for n in node.neighbors() {
+ go(acc, n);
+ }
+}
+
+// BAD
+pub fn reachable_nodes(node: Node) -> FxHashSet<Node> {
+ let mut res = FxHashSet::default();
+ res.insert(node);
+ for n in node.neighbors() {
+ res.extend(reachable_nodes(n));
+ }
+ res
+}
+```
+
+**Rational:** re-use allocations, accumulator style is more concise for complex cases.
+
# Style
## Order of Imports
[dependencies]
anyhow = "1.0.26"
flate2 = "1.0"
-pico-args = "0.3.1"
+pico-args = "0.4.0"
proc-macro2 = "1.0.8"
quote = "1.0.2"
ungrammar = "1.9"
use crate::{
codegen::{self, extract_comment_blocks_with_empty_lines, reformat, Location, Mode, PREAMBLE},
- project_root, rust_files, Result,
+ project_root, rust_files_in, Result,
};
pub fn generate_assists_tests(mode: Mode) -> Result<()> {
impl Assist {
fn collect() -> Result<Vec<Assist>> {
let mut res = Vec::new();
- for path in rust_files(&project_root().join("crates/assists/src/handlers")) {
+ for path in rust_files_in(&project_root().join("crates/assists/src/handlers")) {
collect_file(&mut res, path.as_path())?;
}
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
impl Diagnostic {
fn collect() -> Result<Vec<Diagnostic>> {
let mut res = Vec::new();
- for path in rust_files(&project_root()) {
+ for path in rust_files() {
collect_file(&mut res, path)?;
}
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
impl Feature {
fn collect() -> Result<Vec<Feature>> {
let mut res = Vec::new();
- for path in rust_files(&project_root()) {
+ for path in rust_files() {
collect_file(&mut res, path)?;
}
res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
.to_path_buf()
}
-pub fn rust_files(path: &Path) -> impl Iterator<Item = PathBuf> {
+pub fn rust_files() -> impl Iterator<Item = PathBuf> {
+ rust_files_in(&project_root().join("crates"))
+}
+
+pub fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> {
let iter = WalkDir::new(path);
return iter
.into_iter()
use std::env;
+use anyhow::bail;
use codegen::CodegenCmd;
use pico_args::Arguments;
use xshell::{cmd, cp, pushd};
let client_opt = args.opt_value_from_str("--client")?;
- args.finish()?;
+ finish_args(args)?;
InstallCmd {
client: if server { None } else { Some(client_opt.unwrap_or_default()) },
}
"codegen" => {
let features = args.contains("--features");
- args.finish()?;
+ finish_args(args)?;
CodegenCmd { features }.run()
}
"format" => {
- args.finish()?;
+ finish_args(args)?;
run_rustfmt(Mode::Overwrite)
}
"install-pre-commit-hook" => {
- args.finish()?;
+ finish_args(args)?;
pre_commit::install_hook()
}
"lint" => {
- args.finish()?;
+ finish_args(args)?;
run_clippy()
}
"fuzz-tests" => {
- args.finish()?;
+ finish_args(args)?;
run_fuzzer()
}
"pre-cache" => {
- args.finish()?;
+ finish_args(args)?;
PreCacheCmd.run()
}
"release" => {
let dry_run = args.contains("--dry-run");
- args.finish()?;
+ finish_args(args)?;
ReleaseCmd { dry_run }.run()
}
"promote" => {
let dry_run = args.contains("--dry-run");
- args.finish()?;
+ finish_args(args)?;
PromoteCmd { dry_run }.run()
}
"dist" => {
let nightly = args.contains("--nightly");
let client_version: Option<String> = args.opt_value_from_str("--client")?;
- args.finish()?;
+ finish_args(args)?;
DistCmd { nightly, client_version }.run()
}
"metrics" => {
let dry_run = args.contains("--dry-run");
- args.finish()?;
+ finish_args(args)?;
MetricsCmd { dry_run }.run()
}
"bb" => {
- let suffix: String = args.free_from_str()?.unwrap();
- args.finish()?;
+ let suffix: String = args.free_from_str()?;
+ finish_args(args)?;
cmd!("cargo build --release").run()?;
cp("./target/release/rust-analyzer", format!("./target/rust-analyzer-{}", suffix))?;
Ok(())
}
}
}
+
+fn finish_args(args: Arguments) -> Result<()> {
+ if !args.finish().is_empty() {
+ bail!("Unused arguments.");
+ }
+ Ok(())
+}
#[test]
fn rust_files_are_tidy() {
let mut tidy_docs = TidyDocs::default();
- for path in rust_files(&project_root().join("crates")) {
+ for path in rust_files() {
let text = read_file(&path).unwrap();
check_todo(&path, &text);
check_dbg(&path, &text);