#[serde(flatten)]
msg: RawMessage,
}
- let text = to_string(&JsonRpc {
- jsonrpc: "2.0",
- msg: self,
- })?;
+ let text = to_string(&JsonRpc { jsonrpc: "2.0", msg: self })?;
write_msg_text(w, &text)?;
Ok(())
}
R: Request,
R::Params: serde::Serialize,
{
- RawRequest {
- id,
- method: R::METHOD.to_string(),
- params: to_value(params).unwrap(),
- }
+ RawRequest { id, method: R::METHOD.to_string(), params: to_value(params).unwrap() }
}
pub fn cast<R>(self) -> ::std::result::Result<(u64, R::Params), RawRequest>
where
R: Request,
R::Result: serde::Serialize,
{
- RawResponse {
- id,
- result: Some(to_value(&result).unwrap()),
- error: None,
- }
+ RawResponse { id, result: Some(to_value(&result).unwrap()), error: None }
}
pub fn err(id: u64, code: i32, message: String) -> RawResponse {
- let error = RawResponseError {
- code,
- message,
- data: None,
- };
- RawResponse {
- id,
- result: None,
- error: Some(error),
- }
+ let error = RawResponseError { code, message, data: None };
+ RawResponse { id, result: None, error: Some(error) }
}
}
N: Notification,
N::Params: serde::Serialize,
{
- RawNotification {
- method: N::METHOD.to_string(),
- params: to_value(params).unwrap(),
- }
+ RawNotification { method: N::METHOD.to_string(), params: to_value(params).unwrap() }
}
pub fn is<N>(&self) -> bool
where
}
let mut parts = buf.splitn(2, ": ");
let header_name = parts.next().unwrap();
- let header_value = parts
- .next()
- .ok_or_else(|| format_err!("malformed header: {:?}", buf))?;
+ let header_value =
+ parts.next().ok_or_else(|| format_err!("malformed header: {:?}", buf))?;
if header_name == "Content-Length" {
size = Some(header_value.parse::<usize>()?);
}
let writer = thread::spawn(move || {
let stdout = stdout();
let mut stdout = stdout.lock();
- writer_receiver
- .into_iter()
- .try_for_each(|it| it.write(&mut stdout))?;
+ writer_receiver.into_iter().try_for_each(|it| it.write(&mut stdout))?;
Ok(())
});
let (reader_sender, reader_receiver) = bounded::<RawMessage>(16);
impl<ID: ArenaId, T: fmt::Debug> fmt::Debug for Arena<ID, T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- fmt.debug_struct("Arena")
- .field("len", &self.len())
- .field("data", &self.data)
- .finish()
+ fmt.debug_struct("Arena").field("len", &self.len()).field("data", &self.data).finish()
}
}
ID::from_raw(id)
}
pub fn iter<'a>(&'a self) -> impl Iterator<Item = (ID, &'a T)> {
- self.data
- .iter()
- .enumerate()
- .map(|(idx, value)| (ID::from_raw(RawId(idx as u32)), value))
+ self.data.iter().enumerate().map(|(idx, value)| (ID::from_raw(RawId(idx as u32)), value))
}
}
impl<ID: ArenaId, T> Default for Arena<ID, T> {
fn default() -> Arena<ID, T> {
- Arena {
- data: Vec::new(),
- _ty: PhantomData,
- }
+ Arena { data: Vec::new(), _ty: PhantomData }
}
}
where
I: IntoIterator<Item = T>,
{
- Arena {
- data: Vec::from_iter(iter),
- _ty: PhantomData,
- }
+ Arena { data: Vec::from_iter(iter), _ty: PhantomData }
}
}
}
pub fn iter(&self) -> impl Iterator<Item = (ID, &T)> {
- self.v
- .iter()
- .enumerate()
- .filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?)))
+ self.v.iter().enumerate().filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?)))
}
fn to_idx(id: ID) -> usize {
impl<ID, T> Default for ArenaMap<ID, T> {
fn default() -> Self {
- ArenaMap {
- v: Vec::new(),
- _ty: PhantomData,
- }
+ ArenaMap { v: Vec::new(), _ty: PhantomData }
}
}
// Insert `derive` after doc comments.
fn derive_insertion_offset(nominal: &ast::NominalDef) -> Option<TextUnit> {
- let non_ws_child = nominal
- .syntax()
- .children()
- .find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?;
+ let non_ws_child =
+ nominal.syntax().children().find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?;
Some(non_ws_child.range().start())
}
buf.push_str(" ");
buf.push_str(name.text().as_str());
if let Some(type_params) = type_params {
- let lifetime_params = type_params
- .lifetime_params()
- .filter_map(|it| it.lifetime())
- .map(|it| it.text());
- let type_params = type_params
- .type_params()
- .filter_map(|it| it.name())
- .map(|it| it.text());
- join(lifetime_params.chain(type_params))
- .surround_with("<", ">")
- .to_buf(&mut buf);
+ let lifetime_params =
+ type_params.lifetime_params().filter_map(|it| it.lifetime()).map(|it| it.text());
+ let type_params =
+ type_params.type_params().filter_map(|it| it.name()).map(|it| it.text());
+ join(lifetime_params.chain(type_params)).surround_with("<", ">").to_buf(&mut buf);
}
buf.push_str(" {\n");
edit.set_cursor(start_offset + TextUnit::of_str(&buf));
#[test]
fn test_add_impl() {
- check_assist(
- add_impl,
- "struct Foo {<|>}\n",
- "struct Foo {}\n\nimpl Foo {\n<|>\n}\n",
- );
+ check_assist(add_impl, "struct Foo {<|>}\n", "struct Foo {}\n\nimpl Foo {\n<|>\n}\n");
check_assist(
add_impl,
"struct Foo<T: Clone> {<|>}",
F: FnOnce(AssistCtx<DB>) -> T,
{
let source_file = &db.parse(frange.file_id);
- let ctx = AssistCtx {
- db,
- frange,
- source_file,
- should_compute_edit,
- };
+ let ctx = AssistCtx { db, frange, source_file, should_compute_edit };
f(ctx)
}
label: impl Into<String>,
f: impl FnOnce(&mut AssistBuilder),
) -> Option<Assist> {
- let label = AssistLabel {
- label: label.into(),
- };
+ let label = AssistLabel { label: label.into() };
if !self.should_compute_edit {
return Some(Assist::Unresolved(label));
}
}
fn build(self) -> AssistAction {
- AssistAction {
- edit: self.edit.finish(),
- cursor_position: self.cursor_position,
- }
+ AssistAction { edit: self.edit.finish(), cursor_position: self.cursor_position }
}
}
#[test]
fn change_visibility_adds_pub_crate_to_items() {
- check_assist(
- change_visibility,
- "<|>fn foo() {}",
- "<|>pub(crate) fn foo() {}",
- );
- check_assist(
- change_visibility,
- "f<|>n foo() {}",
- "<|>pub(crate) fn foo() {}",
- );
- check_assist(
- change_visibility,
- "<|>struct Foo {}",
- "<|>pub(crate) struct Foo {}",
- );
- check_assist(
- change_visibility,
- "<|>mod foo {}",
- "<|>pub(crate) mod foo {}",
- );
- check_assist(
- change_visibility,
- "<|>trait Foo {}",
- "<|>pub(crate) trait Foo {}",
- );
+ check_assist(change_visibility, "<|>fn foo() {}", "<|>pub(crate) fn foo() {}");
+ check_assist(change_visibility, "f<|>n foo() {}", "<|>pub(crate) fn foo() {}");
+ check_assist(change_visibility, "<|>struct Foo {}", "<|>pub(crate) struct Foo {}");
+ check_assist(change_visibility, "<|>mod foo {}", "<|>pub(crate) mod foo {}");
+ check_assist(change_visibility, "<|>trait Foo {}", "<|>pub(crate) trait Foo {}");
check_assist(change_visibility, "m<|>od {}", "<|>pub(crate) mod {}");
check_assist(
change_visibility,
#[test]
fn change_visibility_pub_to_pub_crate() {
- check_assist(
- change_visibility,
- "<|>pub fn foo() {}",
- "<|>pub(crate) fn foo() {}",
- )
+ check_assist(change_visibility, "<|>pub fn foo() {}", "<|>pub(crate) fn foo() {}")
}
#[test]
fn change_visibility_pub_crate_to_pub() {
- check_assist(
- change_visibility,
- "<|>pub(crate) fn foo() {}",
- "<|>pub fn foo() {}",
- )
+ check_assist(change_visibility, "<|>pub(crate) fn foo() {}", "<|>pub fn foo() {}")
}
#[test]
let node_expr = syntax_mapping.node_expr(expr)?;
let match_expr_ty = infer_result[node_expr].clone();
let enum_def = match match_expr_ty {
- Ty::Adt {
- def_id: AdtDef::Enum(e),
- ..
- } => e,
+ Ty::Adt { def_id: AdtDef::Enum(e), .. } => e,
_ => return None,
};
let enum_name = enum_def.name(ctx.db)?;
return Some((node, false));
}
- if let Some(expr) = node
- .parent()
- .and_then(ast::Block::cast)
- .and_then(|it| it.expr())
- {
+ if let Some(expr) = node.parent().and_then(ast::Block::cast).and_then(|it| it.expr()) {
if expr.syntax() == node {
return Some((node, false));
}
}
fn non_trivia_sibling(node: &SyntaxNode, direction: Direction) -> Option<&SyntaxNode> {
- node.siblings(direction)
- .skip(1)
- .find(|node| !node.kind().is_trivia())
+ node.siblings(direction).skip(1).find(|node| !node.kind().is_trivia())
}
#[cfg(test)]
) {
let (before_cursor_pos, before) = extract_offset(before);
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
- let frange = FileRange {
- file_id,
- range: TextRange::offset_len(before_cursor_pos, 0.into()),
- };
+ let frange =
+ FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
let assist =
AssistCtx::with_ctx(&db, frange, true, assist).expect("code action is not applicable");
let action = match assist {
) {
let (before_cursor_pos, before) = extract_offset(before);
let (db, _source_root, file_id) = MockDatabase::with_single_file(&before);
- let frange = FileRange {
- file_id,
- range: TextRange::offset_len(before_cursor_pos, 0.into()),
- };
+ let frange =
+ FileRange { file_id, range: TextRange::offset_len(before_cursor_pos, 0.into()) };
let assist = AssistCtx::with_ctx(&db, frange, true, assist);
assert!(assist.is_none());
}
) -> String {
let mut buf = String::new();
buf.push_str(&format!("match {} {{\n", expr.syntax().text()));
- buf.push_str(&format!(
- " {} => {}\n",
- pat1.syntax().text(),
- format_arm(arm1)
- ));
+ buf.push_str(&format!(" {} => {}\n", pat1.syntax().text(), format_arm(arm1)));
buf.push_str(&format!(" _ => {}\n", format_arm(arm2)));
buf.push_str("}");
buf
use crate::{AssistCtx, Assist};
pub(crate) fn split_import(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> {
- let colon_colon = ctx
- .leaf_at_offset()
- .find(|leaf| leaf.kind() == COLONCOLON)?;
+ let colon_colon = ctx.leaf_at_offset().find(|leaf| leaf.kind() == COLONCOLON)?;
let path = colon_colon.parent().and_then(ast::Path::cast)?;
let top_path = generate(Some(path), |it| it.parent_path()).last()?;
.setting(clap::AppSettings::SubcommandRequiredElseHelp)
.subcommand(
SubCommand::with_name("render-test")
- .arg(
- Arg::with_name("line")
- .long("--line")
- .required(true)
- .takes_value(true),
- )
- .arg(
- Arg::with_name("file")
- .long("--file")
- .required(true)
- .takes_value(true),
- ),
+ .arg(Arg::with_name("line").long("--line").required(true).takes_value(true))
+ .arg(Arg::with_name("file").long("--file").required(true).takes_value(true)),
)
.subcommand(SubCommand::with_name("parse").arg(Arg::with_name("no-dump").long("--no-dump")))
.subcommand(SubCommand::with_name("symbols"))
.iter()
.map(|r| (1 + u32::from(r.start()), 1 + u32::from(r.end())))
.map(|(s, e)| format!("({} {})", s, e));
- join(ranges)
- .separator(" ")
- .surround_with("(", ")")
- .to_string()
+ join(ranges).separator(" ").surround_with("(", ")").to_string()
}
impl CrateData {
fn new(file_id: FileId) -> CrateData {
- CrateData {
- file_id,
- dependencies: Vec::new(),
- }
+ CrateData { file_id, dependencies: Vec::new() }
}
fn add_dep(&mut self, name: SmolStr, crate_id: CrateId) {
self.arena[&crate_id].file_id
}
pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
- let (&crate_id, _) = self
- .arena
- .iter()
- .find(|(_crate_id, data)| data.file_id == file_id)?;
+ let (&crate_id, _) = self.arena.iter().find(|(_crate_id, data)| data.file_id == file_id)?;
Some(crate_id)
}
pub fn dependencies<'a>(
let crate1 = graph.add_crate_root(FileId(1u32));
let crate2 = graph.add_crate_root(FileId(2u32));
let crate3 = graph.add_crate_root(FileId(3u32));
- assert!(graph
- .add_dep(crate1, SmolStr::new("crate2"), crate2)
- .is_ok());
- assert!(graph
- .add_dep(crate2, SmolStr::new("crate3"), crate3)
- .is_ok());
- assert!(graph
- .add_dep(crate3, SmolStr::new("crate1"), crate1)
- .is_err());
+ assert!(graph.add_dep(crate1, SmolStr::new("crate2"), crate2).is_ok());
+ assert!(graph.add_dep(crate2, SmolStr::new("crate3"), crate3).is_ok());
+ assert!(graph.add_dep(crate3, SmolStr::new("crate1"), crate1).is_err());
}
#[test]
let crate1 = graph.add_crate_root(FileId(1u32));
let crate2 = graph.add_crate_root(FileId(2u32));
let crate3 = graph.add_crate_root(FileId(3u32));
- assert!(graph
- .add_dep(crate1, SmolStr::new("crate2"), crate2)
- .is_ok());
- assert!(graph
- .add_dep(crate2, SmolStr::new("crate3"), crate3)
- .is_ok());
+ assert!(graph.add_dep(crate1, SmolStr::new("crate2"), crate2).is_ok());
+ assert!(graph.add_dep(crate2, SmolStr::new("crate3"), crate3).is_ok());
}
}
fn source_root_crates(db: &impl SourceDatabase, id: SourceRootId) -> Arc<Vec<CrateId>> {
let root = db.source_root(id);
let graph = db.crate_graph();
- let res = root
- .files
- .values()
- .filter_map(|&it| graph.crate_id_for_crate_root(it))
- .collect::<Vec<_>>();
+ let res =
+ root.files.values().filter_map(|&it| graph.crate_id_for_crate_root(it)).collect::<Vec<_>>();
Arc::new(res)
}
LOC: Clone + Eq + Hash,
{
fn default() -> Self {
- Loc2IdMap {
- id2loc: Arena::default(),
- loc2id: FxHashMap::default(),
- }
+ Loc2IdMap { id2loc: Arena::default(), loc2id: FxHashMap::default() }
}
}
LOC: Clone + Eq + Hash,
{
fn default() -> Self {
- LocationIntener {
- map: Default::default(),
- }
+ LocationIntener { map: Default::default() }
}
}
}
fn variants(enum_def: &ast::EnumDef) -> impl Iterator<Item = &ast::EnumVariant> {
- enum_def
- .variant_list()
- .into_iter()
- .flat_map(|it| it.variants())
+ enum_def.variant_list().into_iter().flat_map(|it| it.variants())
}
impl EnumVariant {
(file_id, var)
}
pub(crate) fn variant_data(&self, db: &impl PersistentHirDatabase) -> Arc<VariantData> {
- db.enum_data(self.parent).variants[self.id]
- .variant_data
- .clone()
+ db.enum_data(self.parent).variants[self.id].variant_data.clone()
}
}
};
let field_sources = match struct_flavor {
- ast::StructFlavor::Tuple(fl) => fl
- .fields()
- .map(|it| FieldSource::Pos(it.to_owned()))
- .collect(),
- ast::StructFlavor::Named(fl) => fl
- .fields()
- .map(|it| FieldSource::Named(it.to_owned()))
- .collect(),
+ ast::StructFlavor::Tuple(fl) => {
+ fl.fields().map(|it| FieldSource::Pos(it.to_owned())).collect()
+ }
+ ast::StructFlavor::Named(fl) => {
+ fl.fields().map(|it| FieldSource::Named(it.to_owned())).collect()
+ }
ast::StructFlavor::Unit => Vec::new(),
};
let field = field_sources
Trait(Trait),
Type(Type),
}
-impl_froms!(
- ModuleDef: Module,
- Function,
- Struct,
- Enum,
- EnumVariant,
- Const,
- Static,
- Trait,
- Type
-);
+impl_froms!(ModuleDef: Module, Function, Struct, Enum, EnumVariant, Const, Static, Trait, Type);
pub enum ModuleSource {
SourceFile(TreeArc<ast::SourceFile>),
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub enum Problem {
- UnresolvedModule {
- candidate: RelativePathBuf,
- },
- NotDirOwner {
- move_to: RelativePathBuf,
- candidate: RelativePathBuf,
- },
+ UnresolvedModule { candidate: RelativePathBuf },
+ NotDirOwner { move_to: RelativePathBuf, candidate: RelativePathBuf },
}
impl Module {
impl Docs for Module {
fn docs(&self, db: &impl HirDatabase) -> Option<Documentation> {
- self.declaration_source(db)
- .and_then(|it| docs_from_ast(&*it.1))
+ self.declaration_source(db).and_then(|it| docs_from_ast(&*it.1))
}
}
impl StructField {
pub fn name(&self, db: &impl HirDatabase) -> Name {
- self.parent.variant_data(db).fields().unwrap()[self.id]
- .name
- .clone()
+ self.parent.variant_data(db).fields().unwrap()[self.id].name.clone()
}
pub fn source(&self, db: &impl PersistentHirDatabase) -> (HirFileId, FieldSource) {
.fields()
.into_iter()
.flat_map(|it| it.iter())
- .map(|(id, _)| StructField {
- parent: (*self).into(),
- id,
- })
+ .map(|(id, _)| StructField { parent: (*self).into(), id })
.collect()
}
.into_iter()
.flat_map(|it| it.iter())
.find(|(_id, data)| data.name == *name)
- .map(|(id, _)| StructField {
- parent: (*self).into(),
- id,
- })
+ .map(|(id, _)| StructField { parent: (*self).into(), id })
}
pub fn generic_params(&self, db: &impl PersistentHirDatabase) -> Arc<GenericParams> {
let r = self.module(db).resolver(db);
// ...and add generic params, if present
let p = self.generic_params(db);
- let r = if !p.params.is_empty() {
- r.push_generic_params_scope(p)
- } else {
- r
- };
+ let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
r
}
}
let r = self.module(db).resolver(db);
// ...and add generic params, if present
let p = self.generic_params(db);
- let r = if !p.params.is_empty() {
- r.push_generic_params_scope(p)
- } else {
- r
- };
+ let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
r
}
}
.fields()
.into_iter()
.flat_map(|it| it.iter())
- .map(|(id, _)| StructField {
- parent: (*self).into(),
- id,
- })
+ .map(|(id, _)| StructField { parent: (*self).into(), id })
.collect()
}
.into_iter()
.flat_map(|it| it.iter())
.find(|(_id, data)| data.name == *name)
- .map(|(id, _)| StructField {
- parent: (*self).into(),
- id,
- })
+ .map(|(id, _)| StructField { parent: (*self).into(), id })
}
}
pub fn scopes(&self, db: &impl HirDatabase) -> ScopesWithSyntaxMapping {
let scopes = db.expr_scopes(*self);
let syntax_mapping = db.body_syntax_mapping(*self);
- ScopesWithSyntaxMapping {
- scopes,
- syntax_mapping,
- }
+ ScopesWithSyntaxMapping { scopes, syntax_mapping }
}
pub fn signature(&self, db: &impl HirDatabase) -> Arc<FnSignature> {
.unwrap_or_else(|| self.module(db).resolver(db));
// ...and add generic params, if present
let p = self.generic_params(db);
- let r = if !p.params.is_empty() {
- r.push_generic_params_scope(p)
- } else {
- r
- };
+ let r = if !p.params.is_empty() { r.push_generic_params_scope(p) } else { r };
r
}
}
func: Function,
) -> Arc<FnSignature> {
let (_, node) = func.source(db);
- let name = node
- .name()
- .map(|n| n.as_name())
- .unwrap_or_else(Name::missing);
+ let name = node.name().map(|n| n.as_name()).unwrap_or_else(Name::missing);
let mut params = Vec::new();
let mut has_self_param = false;
if let Some(param_list) = node.param_list() {
TypeRef::unit()
};
- let sig = FnSignature {
- name,
- params,
- ret_type,
- has_self_param,
- };
+ let sig = FnSignature { name, params, ret_type, has_self_param };
Arc::new(sig)
}
}
crate_graph
.dependencies(self.crate_id)
.map(|dep| {
- let krate = Crate {
- crate_id: dep.crate_id(),
- };
+ let krate = Crate { crate_id: dep.crate_id() };
let name = dep.as_name();
CrateDependency { krate, name }
})
let module_tree = db.module_tree(*self);
let module_id = module_tree.modules().next()?;
- let module = Module {
- krate: *self,
- module_id,
- };
+ let module = Module { krate: *self, module_id };
Some(module)
}
}
impl Module {
fn with_module_id(&self, module_id: ModuleId) -> Module {
- Module {
- module_id,
- krate: self.krate,
- }
+ Module { module_id, krate: self.krate }
}
pub(crate) fn name_impl(&self, db: &impl HirDatabase) -> Option<Name> {
) -> Option<(FileId, TreeArc<ast::Module>)> {
let module_tree = db.module_tree(self.krate);
let link = self.module_id.parent_link(&module_tree)?;
- let file_id = link
- .owner(&module_tree)
- .file_id(&module_tree)
- .as_original_file();
+ let file_id = link.owner(&module_tree).file_id(&module_tree).as_original_file();
let src = link.source(&module_tree, db);
Some((file_id, src))
}
}
pub fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> {
- self.expr_syntax_mapping
- .get(&SyntaxNodePtr::new(node.syntax()))
- .cloned()
+ self.expr_syntax_mapping.get(&SyntaxNodePtr::new(node.syntax())).cloned()
}
pub fn pat_syntax(&self, pat: PatId) -> Option<SyntaxNodePtr> {
}
pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
- self.pat_syntax_mapping
- .get(&SyntaxNodePtr::new(node.syntax()))
- .cloned()
+ self.pat_syntax_mapping.get(&SyntaxNodePtr::new(node.syntax())).cloned()
}
pub fn body(&self) -> &Arc<Body> {
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Statement {
- Let {
- pat: PatId,
- type_ref: Option<TypeRef>,
- initializer: Option<ExprId>,
- },
+ Let { pat: PatId, type_ref: Option<TypeRef>, initializer: Option<ExprId> },
Expr(ExprId),
}
match self {
Expr::Missing => {}
Expr::Path(_) => {}
- Expr::If {
- condition,
- then_branch,
- else_branch,
- } => {
+ Expr::If { condition, then_branch, else_branch } => {
f(*condition);
f(*then_branch);
if let Some(else_branch) = else_branch {
args.iter().map(|pat| *pat).for_each(f);
}
Pat::Ref { pat, .. } => f(*pat),
- Pat::Slice {
- prefix,
- rest,
- suffix,
- } => {
+ Pat::Slice { prefix, rest, suffix } => {
let total_iter = prefix.iter().chain(rest.iter()).chain(suffix.iter());
total_iter.map(|pat| *pat).for_each(f);
}
}
fn empty_block(&mut self) -> ExprId {
- let block = Expr::Block {
- statements: Vec::new(),
- tail: None,
- };
+ let block = Expr::Block { statements: Vec::new(), tail: None };
self.exprs.alloc(block)
}
.unwrap_or_else(|| self.empty_block());
let placeholder_pat = self.pats.alloc(Pat::Missing);
let arms = vec![
- MatchArm {
- pats: vec![pat],
- expr: then_branch,
- guard: None,
- },
- MatchArm {
- pats: vec![placeholder_pat],
- expr: else_branch,
- guard: None,
- },
+ MatchArm { pats: vec![pat], expr: then_branch, guard: None },
+ MatchArm { pats: vec![placeholder_pat], expr: else_branch, guard: None },
];
- self.alloc_expr(
- Expr::Match {
- expr: match_expr,
- arms,
- },
- syntax_ptr,
- )
+ self.alloc_expr(Expr::Match { expr: match_expr, arms }, syntax_ptr)
} else {
let condition = self.collect_expr_opt(e.condition().and_then(|c| c.expr()));
let then_branch = self.collect_block_opt(e.then_branch());
self.collect_expr(expr)
}
});
- self.alloc_expr(
- Expr::If {
- condition,
- then_branch,
- else_branch,
- },
- syntax_ptr,
- )
+ self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr)
}
}
ast::ExprKind::BlockExpr(e) => self.collect_block_opt(e.block()),
let iterable = self.collect_expr_opt(e.iterable());
let pat = self.collect_pat_opt(e.pat());
let body = self.collect_block_opt(e.loop_body());
- self.alloc_expr(
- Expr::For {
- iterable,
- pat,
- body,
- },
- syntax_ptr,
- )
+ self.alloc_expr(Expr::For { iterable, pat, body }, syntax_ptr)
}
ast::ExprKind::CallExpr(e) => {
let callee = self.collect_expr_opt(e.expr());
} else {
Vec::new()
};
- let method_name = e
- .name_ref()
- .map(|nr| nr.as_name())
- .unwrap_or_else(Name::missing);
- self.alloc_expr(
- Expr::MethodCall {
- receiver,
- method_name,
- args,
- },
- syntax_ptr,
- )
+ let method_name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
+ self.alloc_expr(Expr::MethodCall { receiver, method_name, args }, syntax_ptr)
}
ast::ExprKind::MatchExpr(e) => {
let expr = self.collect_expr_opt(e.expr());
self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr)
}
ast::ExprKind::PathExpr(e) => {
- let path = e
- .path()
- .and_then(Path::from_ast)
- .map(Expr::Path)
- .unwrap_or(Expr::Missing);
+ let path =
+ e.path().and_then(Path::from_ast).map(Expr::Path).unwrap_or(Expr::Missing);
self.alloc_expr(path, syntax_ptr)
}
ast::ExprKind::ContinueExpr(_e) => {
Vec::new()
};
let spread = e.spread().map(|s| self.collect_expr(s));
- self.alloc_expr(
- Expr::StructLit {
- path,
- fields,
- spread,
- },
- syntax_ptr,
- )
+ self.alloc_expr(Expr::StructLit { path, fields, spread }, syntax_ptr)
}
ast::ExprKind::FieldExpr(e) => {
let expr = self.collect_expr_opt(e.expr());
- let name = e
- .name_ref()
- .map(|nr| nr.as_name())
- .unwrap_or_else(Name::missing);
+ let name = e.name_ref().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
self.alloc_expr(Expr::Field { expr, name }, syntax_ptr)
}
ast::ExprKind::TryExpr(e) => {
}
}
let body = self.collect_expr_opt(e.body());
- self.alloc_expr(
- Expr::Lambda {
- args,
- arg_types,
- body,
- },
- syntax_ptr,
- )
+ self.alloc_expr(Expr::Lambda { args, arg_types, body }, syntax_ptr)
}
ast::ExprKind::BinExpr(e) => {
let lhs = self.collect_expr_opt(e.lhs());
let lit = match child.flavor() {
LiteralFlavor::IntNumber { suffix } => {
- let known_name = suffix
- .map(Name::new)
- .and_then(|name| UncertainIntTy::from_name(&name));
+ let known_name =
+ suffix.map(Name::new).and_then(|name| UncertainIntTy::from_name(&name));
Literal::Int(
Default::default(),
let pat = self.collect_pat_opt(stmt.pat());
let type_ref = stmt.type_ref().map(TypeRef::from_ast);
let initializer = stmt.initializer().map(|e| self.collect_expr(e));
- Statement::Let {
- pat,
- type_ref,
- initializer,
- }
+ Statement::Let { pat, type_ref, initializer }
}
ast::StmtKind::ExprStmt(stmt) => {
Statement::Expr(self.collect_expr_opt(stmt.expr()))
})
.collect();
let tail = block.expr().map(|e| self.collect_expr(e));
- self.alloc_expr(
- Expr::Block { statements, tail },
- SyntaxNodePtr::new(block.syntax()),
- )
+ self.alloc_expr(Expr::Block { statements, tail }, SyntaxNodePtr::new(block.syntax()))
}
fn collect_block_opt(&mut self, block: Option<&ast::Block>) -> ExprId {
fn collect_pat(&mut self, pat: &ast::Pat) -> PatId {
let pattern = match pat.kind() {
ast::PatKind::BindPat(bp) => {
- let name = bp
- .name()
- .map(|nr| nr.as_name())
- .unwrap_or_else(Name::missing);
+ let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing);
let annotation = BindingAnnotation::new(bp.is_mutable(), bp.is_ref());
let subpat = bp.pat().map(|subpat| self.collect_pat(subpat));
- Pat::Bind {
- name,
- mode: annotation,
- subpat,
- }
+ Pat::Bind { name, mode: annotation, subpat }
}
ast::PatKind::TupleStructPat(p) => {
let path = p.path().and_then(Path::from_ast);
ast::PatKind::PlaceholderPat(_) => Pat::Wild,
ast::PatKind::StructPat(p) => {
let path = p.path().and_then(Path::from_ast);
- let field_pat_list = p
- .field_pat_list()
- .expect("every struct should have a field list");
+ let field_pat_list =
+ p.field_pat_list().expect("every struct should have a field list");
let mut fields: Vec<_> = field_pat_list
.bind_pats()
.map(|bind_pat| {
if let Some(param_list) = node.param_list() {
if let Some(self_param) = param_list.self_param() {
let self_param = SyntaxNodePtr::new(
- self_param
- .self_kw()
- .expect("self param without self keyword")
- .syntax(),
+ self_param.self_kw().expect("self param without self keyword").syntax(),
);
let param_pat = self.alloc_pat(
Pat::Bind {
}
fn root_scope(&mut self) -> ScopeId {
- self.scopes.alloc(ScopeData {
- parent: None,
- entries: vec![],
- })
+ self.scopes.alloc(ScopeData { parent: None, entries: vec![] })
}
fn new_scope(&mut self, parent: ScopeId) -> ScopeId {
- self.scopes.alloc(ScopeData {
- parent: Some(parent),
- entries: vec![],
- })
+ self.scopes.alloc(ScopeData { parent: Some(parent), entries: vec![] })
}
fn add_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) {
Pat::Bind { name, .. } => {
// bind can have a subpattern, but it's actually not allowed
// to bind to things in there
- let entry = ScopeEntry {
- name: name.clone(),
- pat,
- };
+ let entry = ScopeEntry { name: name.clone(), pat };
self.scopes[scope].entries.push(entry)
}
p => p.walk_child_pats(|pat| self.add_bindings(body, scope, pat)),
fn add_params_bindings(&mut self, scope: ScopeId, params: &[PatId]) {
let body = Arc::clone(&self.body);
- params
- .iter()
- .for_each(|pat| self.add_bindings(&body, scope, *pat));
+ params.iter().for_each(|pat| self.add_bindings(&body, scope, *pat));
}
fn set_scope(&mut self, node: ExprId, scope: ScopeId) {
impl ScopesWithSyntaxMapping {
fn scope_chain<'a>(&'a self, node: &SyntaxNode) -> impl Iterator<Item = ScopeId> + 'a {
- generate(self.scope_for(node), move |&scope| {
- self.scopes.scopes[scope].parent
- })
+ generate(self.scope_for(node), move |&scope| self.scopes.scopes[scope].parent)
}
pub fn scope_for_offset(&self, offset: TextUnit) -> Option<ScopeId> {
.filter_map(|(id, scope)| Some((self.syntax_mapping.expr_syntax(*id)?, scope)))
// find containing scope
.min_by_key(|(ptr, _scope)| {
- (
- !(ptr.range().start() <= offset && offset <= ptr.range().end()),
- ptr.range().len(),
- )
+ (!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len())
})
.map(|(ptr, scope)| self.adjust(ptr, *scope, offset))
}
) {
for stmt in statements {
match stmt {
- Statement::Let {
- pat, initializer, ..
- } => {
+ Statement::Let { pat, initializer, .. } => {
if let Some(expr) = initializer {
scopes.set_scope(*expr, scope);
compute_expr_scopes(*expr, body, scopes, scope);
Expr::Block { statements, tail } => {
compute_block_scopes(&statements, *tail, body, scopes, scope);
}
- Expr::For {
- iterable,
- pat,
- body: body_expr,
- } => {
+ Expr::For { iterable, pat, body: body_expr } => {
compute_expr_scopes(*iterable, body, scopes, scope);
let scope = scopes.new_scope(scope);
scopes.add_bindings(body, scope, *pat);
compute_expr_scopes(*body_expr, body, scopes, scope);
}
- Expr::Lambda {
- args,
- body: body_expr,
- ..
- } => {
+ Expr::Lambda { args, body: body_expr, .. } => {
let scope = scopes.new_scope(scope);
scopes.add_params_bindings(scope, &args);
compute_expr_scopes(*body_expr, body, scopes, scope);
let file = SourceFile::parse(&code);
let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
- let irrelevant_function = Function {
- id: crate::ids::FunctionId::from_raw(0.into()),
- };
+ let irrelevant_function = Function { id: crate::ids::FunctionId::from_raw(0.into()) };
let body_hir = expr::collect_fn_body_syntax(irrelevant_function, fn_def);
let scopes = ExprScopes::new(Arc::clone(body_hir.body()));
let scopes = ScopesWithSyntaxMapping {
let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
- let irrelevant_function = Function {
- id: crate::ids::FunctionId::from_raw(0.into()),
- };
+ let irrelevant_function = Function { id: crate::ids::FunctionId::from_raw(0.into()) };
let body_hir = expr::collect_fn_body_syntax(irrelevant_function, fn_def);
let scopes = ExprScopes::new(Arc::clone(body_hir.body()));
let scopes = ScopesWithSyntaxMapping {
fn fill_params(&mut self, params: &ast::TypeParamList) {
for (idx, type_param) in params.type_params().enumerate() {
- let name = type_param
- .name()
- .map(AsName::as_name)
- .unwrap_or_else(Name::missing);
- let param = GenericParam {
- idx: idx as u32,
- name,
- };
+ let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing);
+ let param = GenericParam { idx: idx as u32, name };
self.params.push(param);
}
}
impl<N: AstNode> Clone for ItemLoc<N> {
fn clone(&self) -> ItemLoc<N> {
- ItemLoc {
- module: self.module,
- raw: self.raw,
- _ty: PhantomData,
- }
+ ItemLoc { module: self.module, raw: self.raw, _ty: PhantomData }
}
}
impl<'a, DB: PersistentHirDatabase> LocationCtx<&'a DB> {
pub(crate) fn new(db: &'a DB, module: Module, file_id: HirFileId) -> LocationCtx<&'a DB> {
- LocationCtx {
- db,
- module,
- file_id,
- }
+ LocationCtx { db, module, file_id }
}
pub(crate) fn to_def<N, DEF>(self, ast: &N) -> DEF
where
fn interner(interner: &HirInterner) -> &LocationIntener<ItemLoc<N>, Self>;
fn from_ast(ctx: LocationCtx<&impl PersistentHirDatabase>, ast: &N) -> Self {
let items = ctx.db.file_items(ctx.file_id);
- let raw = SourceItemId {
- file_id: ctx.file_id,
- item_id: items.id_of(ctx.file_id, ast.syntax()),
- };
- let loc = ItemLoc {
- module: ctx.module,
- raw,
- _ty: PhantomData,
- };
+ let raw =
+ SourceItemId { file_id: ctx.file_id, item_id: items.id_of(ctx.file_id, ast.syntax()) };
+ let loc = ItemLoc { module: ctx.module, raw, _ty: PhantomData };
Self::interner(ctx.db.as_ref()).loc2id(&loc)
}
let int = Self::interner(db.as_ref());
let loc = int.id2loc(self);
let syntax = db.file_item(loc.raw);
- let ast = N::cast(&syntax)
- .unwrap_or_else(|| panic!("invalid ItemLoc: {:?}", loc.raw))
- .to_owned();
+ let ast =
+ N::cast(&syntax).unwrap_or_else(|| panic!("invalid ItemLoc: {:?}", loc.raw)).to_owned();
(loc.raw.file_id, ast)
}
fn module(self, db: &impl HirDatabase) -> Module {
impl SourceFileItems {
pub(crate) fn new(file_id: HirFileId, source_file: &SourceFile) -> SourceFileItems {
- let mut res = SourceFileItems {
- file_id,
- arena: Arena::default(),
- };
+ let mut res = SourceFileItems { file_id, arena: Arena::default() };
res.init(source_file);
res
}
item: ImplItem,
) -> Option<ImplBlock> {
let impl_id = *module_impl_blocks.impls_by_def.get(&item)?;
- Some(ImplBlock {
- module_impl_blocks,
- impl_id,
- })
+ Some(ImplBlock { module_impl_blocks, impl_id })
}
pub(crate) fn from_id(module_impl_blocks: Arc<ModuleImplBlocks>, impl_id: ImplId) -> ImplBlock {
- ImplBlock {
- module_impl_blocks,
- impl_id,
- }
+ ImplBlock { module_impl_blocks, impl_id }
}
pub fn id(&self) -> ImplId {
} else {
Vec::new()
};
- ImplData {
- target_trait,
- target_type,
- items,
- }
+ ImplData { target_trait, target_type, items }
}
pub fn target_trait(&self) -> Option<&TypeRef> {
let file_id: HirFileId = file_id.into();
let node = match &module_source {
ModuleSource::SourceFile(node) => node.syntax(),
- ModuleSource::Module(node) => node
- .item_list()
- .expect("inline module should have item list")
- .syntax(),
+ ModuleSource::Module(node) => {
+ node.item_list().expect("inline module should have item list").syntax()
+ }
};
for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) {
let input = {
let arg = macro_call.token_tree()?.syntax();
- MacroInput {
- text: arg.text().to_string(),
- }
+ MacroInput { text: arg.text().to_string() }
};
Some((def, input))
}
let ptr = SyntaxNodePtr::new(array_expr.syntax());
let src_range = TextRange::offset_len(0.into(), TextUnit::of_str(&input.text));
let ranges_map = vec![(src_range, array_expr.syntax().range())];
- let res = MacroExpansion {
- text,
- ranges_map,
- ptr,
- };
+ let res = MacroExpansion { text, ranges_map, ptr };
Some(res)
}
fn expand_query_group(self, input: MacroInput) -> Option<MacroExpansion> {
let anchor = "trait ";
let pos = input.text.find(anchor)? + anchor.len();
- let trait_name = input.text[pos..]
- .chars()
- .take_while(|c| c.is_alphabetic())
- .collect::<String>();
+ let trait_name =
+ input.text[pos..].chars().take_while(|c| c.is_alphabetic()).collect::<String>();
if trait_name.is_empty() {
return None;
}
let name = trait_def.name()?;
let ptr = SyntaxNodePtr::new(trait_def.syntax());
let ranges_map = vec![(src_range, name.syntax().range())];
- let res = MacroExpansion {
- text,
- ranges_map,
- ptr,
- };
+ let res = MacroExpansion { text, ranges_map, ptr };
Some(res)
}
}
let mut source_root = SourceRoot::default();
for entry in parse_fixture(fixture) {
if entry.text.contains(CURSOR_MARKER) {
- assert!(
- position.is_none(),
- "only one marker (<|>) per fixture is allowed"
- );
+ assert!(position.is_none(), "only one marker (<|>) per fixture is allowed");
position = Some(self.add_file_with_position(
source_root_id,
&mut source_root,
file_id: HirFileId,
decl_id: Option<SourceFileItemId>,
) -> Option<ModuleId> {
- let (res, _) = self
- .mods
- .iter()
- .find(|(_, m)| (m.file_id, m.decl_id) == (file_id, decl_id))?;
+ let (res, _) =
+ self.mods.iter().find(|(_, m)| (m.file_id, m.decl_id) == (file_id, decl_id))?;
Some(res)
}
decl_id: Option<SourceFileItemId>,
) -> ModuleId {
let is_root = parent.is_none();
- let id = self.alloc_mod(ModuleData {
- file_id,
- decl_id,
- parent,
- children: Vec::new(),
- });
+ let id = self.alloc_mod(ModuleData { file_id, decl_id, parent, children: Vec::new() });
for sub in db.submodules(file_id, decl_id).iter() {
let link = self.alloc_link(LinkData {
- source: SourceItemId {
- file_id,
- item_id: sub.decl_id,
- },
+ source: SourceItemId { file_id, item_id: sub.decl_id },
name: sub.name.clone(),
owner: id,
points_to: Vec::new(),
Some(tree.links[link].owner)
}
pub(crate) fn crate_root(self, tree: &ModuleTree) -> ModuleId {
- generate(Some(self), move |it| it.parent(tree))
- .last()
- .unwrap()
+ generate(Some(self), move |it| it.parent(tree)).last().unwrap()
}
pub(crate) fn child(self, tree: &ModuleTree, name: &Name) -> Option<ModuleId> {
let link = tree.mods[self]
impl<T> Default for PerNs<T> {
fn default() -> Self {
- PerNs {
- types: None,
- values: None,
- }
+ PerNs { types: None, values: None }
}
}
impl<T> PerNs<T> {
pub fn none() -> PerNs<T> {
- PerNs {
- types: None,
- values: None,
- }
+ PerNs { types: None, values: None }
}
pub fn values(t: T) -> PerNs<T> {
- PerNs {
- types: None,
- values: Some(t),
- }
+ PerNs { types: None, values: Some(t) }
}
pub fn types(t: T) -> PerNs<T> {
- PerNs {
- types: Some(t),
- values: None,
- }
+ PerNs { types: Some(t), values: None }
}
pub fn both(types: T, values: T) -> PerNs<T> {
- PerNs {
- types: Some(types),
- values: Some(values),
- }
+ PerNs { types: Some(types), values: Some(values) }
}
pub fn is_none(&self) -> bool {
}
pub fn as_ref(&self) -> PerNs<&T> {
- PerNs {
- types: self.types.as_ref(),
- values: self.values.as_ref(),
- }
+ PerNs { types: self.types.as_ref(), values: self.values.as_ref() }
}
pub fn combine(self, other: PerNs<T>) -> PerNs<T> {
- PerNs {
- types: self.types.or(other.types),
- values: self.values.or(other.values),
- }
+ PerNs { types: self.types.or(other.types), values: self.values.or(other.values) }
}
pub fn and_then<U>(self, f: impl Fn(T) -> Option<U>) -> PerNs<U> {
- PerNs {
- types: self.types.and_then(&f),
- values: self.values.and_then(&f),
- }
+ PerNs { types: self.types.and_then(&f), values: self.values.and_then(&f) }
}
pub fn map<U>(self, f: impl Fn(T) -> U) -> PerNs<U> {
- PerNs {
- types: self.types.map(&f),
- values: self.values.map(&f),
- }
+ PerNs { types: self.types.map(&f), values: self.values.map(&f) }
}
}
for dep in self.krate.dependencies(self.db) {
log::debug!("crate dep {:?} -> {:?}", dep.name, dep.krate);
if let Some(module) = dep.krate.root_module(self.db) {
- self.result
- .extern_prelude
- .insert(dep.name.clone(), module.into());
+ self.result.extern_prelude.insert(dep.name.clone(), module.into());
}
}
}
for (import_id, import_data) in input.imports.iter() {
if let Some(last_segment) = import_data.path.segments.iter().last() {
if !import_data.is_glob {
- let name = import_data
- .alias
- .clone()
- .unwrap_or_else(|| last_segment.name.clone());
- module_items.items.insert(
- name,
- Resolution {
- def: PerNs::none(),
- import: Some(import_id),
- },
- );
+ let name =
+ import_data.alias.clone().unwrap_or_else(|| last_segment.name.clone());
+ module_items
+ .items
+ .insert(name, Resolution { def: PerNs::none(), import: Some(import_id) });
}
}
}
// Populate modules
for (name, module_id) in module_id.children(&self.module_tree) {
- let module = Module {
- module_id,
- krate: self.krate,
- };
+ let module = Module { module_id, krate: self.krate };
self.add_module_item(&mut module_items, name, PerNs::types(module.into()));
}
if import.is_glob {
return ReachedFixedPoint::Yes;
};
- let original_module = Module {
- krate: self.krate,
- module_id,
- };
+ let original_module = Module { krate: self.krate, module_id };
let (def, reached_fixedpoint) =
- self.result
- .resolve_path_fp(self.db, original_module, &import.path);
+ self.result.resolve_path_fp(self.db, original_module, &import.path);
if reached_fixedpoint == ReachedFixedPoint::Yes {
let last_segment = import.path.segments.last().unwrap();
- let name = import
- .alias
- .clone()
- .unwrap_or_else(|| last_segment.name.clone());
+ let name = import.alias.clone().unwrap_or_else(|| last_segment.name.clone());
log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
// extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
}
}
self.update(module_id, |items| {
- let res = Resolution {
- def,
- import: Some(import_id),
- };
+ let res = Resolution { def, import: Some(import_id) };
items.items.insert(name, res);
});
}
let module_tree = db.module_tree(krate);
let input = module_tree
.modules()
- .map(|module_id| {
- (
- module_id,
- db.lower_module_module(Module { krate, module_id }),
- )
- })
+ .map(|module_id| (module_id, db.lower_module_module(Module { krate, module_id })))
.collect::<FxHashMap<_, _>>();
let resolver = Resolver::new(db, &input, krate);
let mut source_map = ImportSourceMap::default();
let mut res = LoweredModule::default();
match source {
- ModuleSource::SourceFile(it) => res.fill(
- &mut source_map,
- db,
- module,
- file_id,
- &mut it.items_with_macros(),
- ),
+ ModuleSource::SourceFile(it) => {
+ res.fill(&mut source_map, db, module, file_id, &mut it.items_with_macros())
+ }
ModuleSource::Module(it) => {
if let Some(item_list) = it.item_list() {
res.fill(
}
ast::ItemOrMacro::Macro(macro_call) => {
let item_id = file_items.id_of_unchecked(macro_call.syntax());
- let loc = MacroCallLoc {
- module,
- source_item_id: SourceItemId { file_id, item_id },
- };
+ let loc =
+ MacroCallLoc { module, source_item_id: SourceItemId { file_id, item_id } };
let id = loc.id(db);
let file_id = HirFileId::from(id);
//FIXME: expand recursively
ast::ModuleItemKind::FnDef(it) => {
if let Some(name) = it.name() {
let func = Function { id: ctx.to_def(it) };
- self.declarations
- .insert(name.as_name(), PerNs::values(func.into()));
+ self.declarations.insert(name.as_name(), PerNs::values(func.into()));
}
}
ast::ModuleItemKind::TraitDef(it) => {
if let Some(name) = it.name() {
let t = Trait { id: ctx.to_def(it) };
- self.declarations
- .insert(name.as_name(), PerNs::types(t.into()));
+ self.declarations.insert(name.as_name(), PerNs::types(t.into()));
}
}
ast::ModuleItemKind::TypeDef(it) => {
if let Some(name) = it.name() {
let t = Type { id: ctx.to_def(it) };
- self.declarations
- .insert(name.as_name(), PerNs::types(t.into()));
+ self.declarations.insert(name.as_name(), PerNs::types(t.into()));
}
}
ast::ModuleItemKind::ImplBlock(_) => {
ast::ModuleItemKind::ConstDef(it) => {
if let Some(name) = it.name() {
let c = Const { id: ctx.to_def(it) };
- self.declarations
- .insert(name.as_name(), PerNs::values(c.into()));
+ self.declarations.insert(name.as_name(), PerNs::values(c.into()));
}
}
ast::ModuleItemKind::StaticDef(it) => {
if let Some(name) = it.name() {
let s = Static { id: ctx.to_def(it) };
- self.declarations
- .insert(name.as_name(), PerNs::values(s.into()));
+ self.declarations.insert(name.as_name(), PerNs::values(s.into()));
}
}
ast::ModuleItemKind::Module(_) => {
.collect::<Vec<_>>();
lines.sort();
let actual = lines.join("\n");
- let expected = expected
- .trim()
- .lines()
- .map(|it| it.trim())
- .collect::<Vec<_>>()
- .join("\n");
+ let expected = expected.trim().lines().map(|it| it.trim()).collect::<Vec<_>>().join("\n");
assert_eq_text!(&expected, &actual);
fn dump_resolution(resolution: &Resolution) -> &'static str {
- match (
- resolution.def.types.is_some(),
- resolution.def.values.is_some(),
- ) {
+ match (resolution.def.types.is_some(), resolution.def.values.is_some()) {
(true, true) => "t v",
(true, false) => "t",
(false, true) => "v",
let mut crate_graph = CrateGraph::default();
let main_crate = crate_graph.add_crate_root(main_id);
let lib_crate = crate_graph.add_crate_root(lib_id);
- crate_graph
- .add_dep(main_crate, "test_crate".into(), lib_crate)
- .unwrap();
+ crate_graph.add_dep(main_crate, "test_crate".into(), lib_crate).unwrap();
db.set_crate_graph(Arc::new(crate_graph));
let mut crate_graph = CrateGraph::default();
let main_crate = crate_graph.add_crate_root(main_id);
let lib_crate = crate_graph.add_crate_root(lib_id);
- crate_graph
- .add_dep(main_crate, "alloc".into(), lib_crate)
- .unwrap();
+ crate_graph.add_dep(main_crate, "alloc".into(), lib_crate).unwrap();
db.set_crate_graph(Arc::new(crate_graph));
let mut crate_graph = CrateGraph::default();
let main_crate = crate_graph.add_crate_root(main_id);
let lib_crate = crate_graph.add_crate_root(lib_id);
- crate_graph
- .add_dep(main_crate, "test_crate".into(), lib_crate)
- .unwrap();
+ crate_graph.add_dep(main_crate, "test_crate".into(), lib_crate).unwrap();
db.set_crate_graph(Arc::new(crate_graph));
let mut crate_graph = CrateGraph::default();
let main_crate = crate_graph.add_crate_root(main_id);
let lib_crate = crate_graph.add_crate_root(lib_id);
- crate_graph
- .add_dep(main_crate, "test_crate".into(), lib_crate)
- .unwrap();
+ crate_graph.add_dep(main_crate, "test_crate".into(), lib_crate).unwrap();
db.set_crate_graph(Arc::new(crate_graph));
let events = db.log_executed(|| {
db.item_map(krate);
});
- assert!(
- !format!("{:?}", events).contains("item_map"),
- "{:#?}",
- events
- )
+ assert!(!format!("{:?}", events).contains("item_map"), "{:#?}", events)
}
}
match segment.kind()? {
ast::PathSegmentKind::Name(name) => {
- let args = segment
- .type_arg_list()
- .and_then(GenericArgs::from_ast)
- .map(Arc::new);
- let segment = PathSegment {
- name: name.as_name(),
- args_and_bindings: args,
- };
+ let args =
+ segment.type_arg_list().and_then(GenericArgs::from_ast).map(Arc::new);
+ let segment = PathSegment { name: name.as_name(), args_and_bindings: args };
segments.push(segment);
}
ast::PathSegmentKind::CrateKw => {
fn from(name: Name) -> Path {
Path {
kind: PathKind::Plain,
- segments: vec![PathSegment {
- name,
- args_and_bindings: None,
- }],
+ segments: vec![PathSegment { name, args_and_bindings: None }],
}
}
}
}
fn convert_path(prefix: Option<Path>, path: &ast::Path) -> Option<Path> {
- let prefix = if let Some(qual) = path.qualifier() {
- Some(convert_path(prefix, qual)?)
- } else {
- prefix
- };
+ let prefix =
+ if let Some(qual) = path.qualifier() { Some(convert_path(prefix, qual)?) } else { prefix };
let segment = path.segment()?;
let res = match segment.kind()? {
ast::PathSegmentKind::Name(name) => {
- let mut res = prefix.unwrap_or_else(|| Path {
- kind: PathKind::Plain,
- segments: Vec::with_capacity(1),
- });
+ let mut res = prefix
+ .unwrap_or_else(|| Path { kind: PathKind::Plain, segments: Vec::with_capacity(1) });
res.segments.push(PathSegment {
name: name.as_name(),
args_and_bindings: None, // no type args in use
if prefix.is_some() {
return None;
}
- Path {
- kind: PathKind::Crate,
- segments: Vec::new(),
- }
+ Path { kind: PathKind::Crate, segments: Vec::new() }
}
ast::PathSegmentKind::SelfKw => {
if prefix.is_some() {
return None;
}
- Path {
- kind: PathKind::Self_,
- segments: Vec::new(),
- }
+ Path { kind: PathKind::Self_, segments: Vec::new() }
}
ast::PathSegmentKind::SuperKw => {
if prefix.is_some() {
return None;
}
- Path {
- kind: PathKind::Super,
- segments: Vec::new(),
- }
+ Path { kind: PathKind::Super, segments: Vec::new() }
}
};
Some(res)
source_item_id: SourceItemId,
) -> TreeArc<SyntaxNode> {
let source_file = db.hir_parse(source_item_id.file_id);
- db.file_items(source_item_id.file_id)[source_item_id.item_id]
- .to_node(&source_file)
- .to_owned()
+ db.file_items(source_item_id.file_id)[source_item_id.item_id].to_node(&source_file).to_owned()
}
expr_scopes: Arc<ExprScopes>,
scope_id: ScopeId,
) -> Resolver {
- self.push_scope(Scope::ExprScope(ExprScope {
- expr_scopes,
- scope_id,
- }))
+ self.push_scope(Scope::ExprScope(ExprScope { expr_scopes, scope_id }))
}
}
}
}
Scope::ExprScope(e) => {
- let entry = e
- .expr_scopes
- .entries(e.scope_id)
- .iter()
- .find(|entry| entry.name() == name);
+ let entry =
+ e.expr_scopes.entries(e.scope_id).iter().find(|entry| entry.name() == name);
match entry {
Some(e) => PerNs::values(Resolution::LocalBinding(e.pat())),
None => PerNs::none(),
// def: m.module.into(),
// }),
// );
- m.item_map[m.module.module_id]
- .entries()
- .for_each(|(name, res)| {
- f(name.clone(), res.def.map(Resolution::Def));
- });
+ m.item_map[m.module.module_id].entries().for_each(|(name, res)| {
+ f(name.clone(), res.def.map(Resolution::Def));
+ });
m.item_map.extern_prelude.iter().for_each(|(name, def)| {
f(name.clone(), PerNs::types(Resolution::Def(*def)));
});
}
Scope::GenericParams(gp) => {
for param in &gp.params {
- f(
- param.name.clone(),
- PerNs::types(Resolution::GenericParam(param.idx)),
- )
+ f(param.name.clone(), PerNs::types(Resolution::GenericParam(param.idx)))
}
}
Scope::ImplBlockScope(i) => {
- f(
- Name::self_type(),
- PerNs::types(Resolution::SelfType(i.clone())),
- );
+ f(Name::self_type(), PerNs::types(Resolution::SelfType(i.clone())));
}
Scope::ExprScope(e) => {
e.expr_scopes.entries(e.scope_id).iter().for_each(|e| {
- f(
- e.name().clone(),
- PerNs::values(Resolution::LocalBinding(e.pat())),
- );
+ f(e.name().clone(), PerNs::values(Resolution::LocalBinding(e.pat())));
});
}
}
file_id: FileId,
child: &SyntaxNode,
) -> Option<Module> {
- if let Some(m) = child
- .ancestors()
- .filter_map(ast::Module::cast)
- .find(|it| !it.has_semi())
- {
+ if let Some(m) = child.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi()) {
module_from_inline(db, file_id.into(), m)
} else {
module_from_file_id(db, file_id.into())
decl_id: Option<SourceFileItemId>,
) -> Option<Module> {
let source_root_id = db.file_source_root(file_id.as_original_file());
- db.source_root_crates(source_root_id)
- .iter()
- .map(|&crate_id| Crate { crate_id })
- .find_map(|krate| {
+ db.source_root_crates(source_root_id).iter().map(|&crate_id| Crate { crate_id }).find_map(
+ |krate| {
let module_tree = db.module_tree(krate);
let module_id = module_tree.find_module_by_source(file_id, decl_id)?;
Some(Module { krate, module_id })
- })
+ },
+ )
}
pub fn function_from_position(db: &impl HirDatabase, position: FilePosition) -> Option<Function> {
let (file_id, _) = module.definition_source(db);
let file_id = file_id.into();
let ctx = LocationCtx::new(db, module, file_id);
- Function {
- id: ctx.to_def(fn_def),
- }
+ Function { id: ctx.to_def(fn_def) }
}
pub fn function_from_child_node(
let (file_id, _) = module.definition_source(db);
let file_id = file_id.into();
let ctx = LocationCtx::new(db, module, file_id);
- Struct {
- id: ctx.to_def(struct_def),
- }
+ Struct { id: ctx.to_def(struct_def) }
}
pub fn enum_from_module(db: &impl HirDatabase, module: Module, enum_def: &ast::EnumDef) -> Enum {
let (file_id, _) = module.definition_source(db);
let file_id = file_id.into();
let ctx = LocationCtx::new(db, module, file_id);
- Enum {
- id: ctx.to_def(enum_def),
- }
+ Enum { id: ctx.to_def(enum_def) }
}
pub fn trait_from_module(
let (file_id, _) = module.definition_source(db);
let file_id = file_id.into();
let ctx = LocationCtx::new(db, module, file_id);
- Trait {
- id: ctx.to_def(trait_def),
- }
+ Trait { id: ctx.to_def(trait_def) }
}
pub fn macro_symbols(db: &impl HirDatabase, file_id: FileId) -> Vec<(SmolStr, TextRange)> {
match type_ref {
TypeRef::Never => Ty::Never,
TypeRef::Tuple(inner) => {
- let inner_tys = inner
- .iter()
- .map(|tr| Ty::from_hir(db, resolver, tr))
- .collect::<Vec<_>>();
+ let inner_tys =
+ inner.iter().map(|tr| Ty::from_hir(db, resolver, tr)).collect::<Vec<_>>();
Ty::Tuple(inner_tys.into())
}
TypeRef::Path(path) => Ty::from_hir_path(db, resolver, path),
}
TypeRef::Placeholder => Ty::Unknown,
TypeRef::Fn(params) => {
- let mut inner_tys = params
- .iter()
- .map(|tr| Ty::from_hir(db, resolver, tr))
- .collect::<Vec<_>>();
- let return_ty = inner_tys
- .pop()
- .expect("TypeRef::Fn should always have at least return type");
- let sig = FnSig {
- input: inner_tys,
- output: return_ty,
- };
+ let mut inner_tys =
+ params.iter().map(|tr| Ty::from_hir(db, resolver, tr)).collect::<Vec<_>>();
+ let return_ty =
+ inner_tys.pop().expect("TypeRef::Fn should always have at least return type");
+ let sig = FnSig { input: inner_tys, output: return_ty };
Ty::FnPtr(Arc::new(sig))
}
TypeRef::Error => Ty::Unknown,
resolved: TypableDef,
) -> Substs {
let mut substs = Vec::new();
- let last = path
- .segments
- .last()
- .expect("path should have at least one segment");
+ let last = path.segments.last().expect("path should have at least one segment");
let (def_generics, segment) = match resolved {
TypableDef::Function(func) => (func.generic_params(db), last),
TypableDef::Struct(s) => (s.generic_params(db), last),
}
// add placeholders for args that were not provided
// TODO: handle defaults
- let supplied_params = segment
- .args_and_bindings
- .as_ref()
- .map(|ga| ga.args.len())
- .unwrap_or(0);
+ let supplied_params =
+ segment.args_and_bindings.as_ref().map(|ga| ga.args.len()).unwrap_or(0);
for _ in supplied_params..def_generics.params.len() {
substs.push(Ty::Unknown);
}
/// `Option<u32>` afterwards.)
pub fn apply_substs(self, substs: Substs) -> Ty {
match self {
- Ty::Adt { def_id, name, .. } => Ty::Adt {
- def_id,
- name,
- substs,
- },
- Ty::FnDef { def, name, sig, .. } => Ty::FnDef {
- def,
- name,
- sig,
- substs,
- },
+ Ty::Adt { def_id, name, .. } => Ty::Adt { def_id, name, substs },
+ Ty::FnDef { def, name, sig, .. } => Ty::FnDef { def, name, sig, substs },
_ => self,
}
}
if ts.len() == 1 {
write!(f, "({},)", ts[0])
} else {
- join(ts.iter())
- .surround_with("(", ")")
- .separator(", ")
- .to_fmt(f)
+ join(ts.iter()).surround_with("(", ")").separator(", ").to_fmt(f)
}
}
Ty::FnPtr(sig) => {
- join(sig.input.iter())
- .surround_with("fn(", ")")
- .separator(", ")
- .to_fmt(f)?;
+ join(sig.input.iter()).surround_with("fn(", ")").separator(", ").to_fmt(f)?;
write!(f, " -> {}", sig.output)
}
- Ty::FnDef {
- name, substs, sig, ..
- } => {
+ Ty::FnDef { name, substs, sig, .. } => {
write!(f, "fn {}", name)?;
if substs.0.len() > 0 {
- join(substs.0.iter())
- .surround_with("<", ">")
- .separator(", ")
- .to_fmt(f)?;
+ join(substs.0.iter()).surround_with("<", ">").separator(", ").to_fmt(f)?;
}
- join(sig.input.iter())
- .surround_with("(", ")")
- .separator(", ")
- .to_fmt(f)?;
+ join(sig.input.iter()).surround_with("(", ")").separator(", ").to_fmt(f)?;
write!(f, " -> {}", sig.output)
}
Ty::Adt { name, substs, .. } => {
write!(f, "{}", name)?;
if substs.0.len() > 0 {
- join(substs.0.iter())
- .surround_with("<", ">")
- .separator(", ")
- .to_fmt(f)?;
+ join(substs.0.iter()).surround_with("<", ">").separator(", ").to_fmt(f)?;
}
Ok(())
}
let resolver = def.resolver(db);
let generics = def.generic_params(db);
let name = def.name(db);
- let input = signature
- .params()
- .iter()
- .map(|tr| Ty::from_hir(db, &resolver, tr))
- .collect::<Vec<_>>();
+ let input =
+ signature.params().iter().map(|tr| Ty::from_hir(db, &resolver, tr)).collect::<Vec<_>>();
let output = Ty::from_hir(db, &resolver, signature.ret_type());
let sig = Arc::new(FnSig { input, output });
let substs = make_substs(&generics);
- Ty::FnDef {
- def,
- sig,
- name,
- substs,
- }
+ Ty::FnDef { def, sig, name, substs }
}
fn make_substs(generics: &GenericParams) -> Substs {
- Substs(
- generics
- .params
- .iter()
- .map(|_p| Ty::Unknown)
- .collect::<Vec<_>>()
- .into(),
- )
+ Substs(generics.params.iter().map(|_p| Ty::Unknown).collect::<Vec<_>>().into())
}
fn type_for_struct(db: &impl HirDatabase, s: Struct) -> Ty {
}
fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs) -> bool {
- substs1
- .0
- .iter()
- .zip(substs2.0.iter())
- .all(|(t1, t2)| self.unify(t1, t2))
+ substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify(t1, t2))
}
fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
},
(Ty::Bool, _) | (Ty::Str, _) | (Ty::Never, _) | (Ty::Char, _) => ty1 == ty2,
(
- Ty::Adt {
- def_id: def_id1,
- substs: substs1,
- ..
- },
- Ty::Adt {
- def_id: def_id2,
- substs: substs2,
- ..
- },
+ Ty::Adt { def_id: def_id1, substs: substs1, .. },
+ Ty::Adt { def_id: def_id2, substs: substs2, .. },
) if def_id1 == def_id2 => self.unify_substs(substs1, substs2),
(Ty::Slice(t1), Ty::Slice(t2)) => self.unify(t1, t2),
(Ty::RawPtr(t1, m1), Ty::RawPtr(t2, m2)) if m1 == m2 => self.unify(t1, t2),
(Ty::Ref(t1, m1), Ty::Ref(t2, m2)) if m1 == m2 => self.unify(t1, t2),
(Ty::FnPtr(sig1), Ty::FnPtr(sig2)) if sig1 == sig2 => true,
- (Ty::Tuple(ts1), Ty::Tuple(ts2)) if ts1.len() == ts2.len() => ts1
- .iter()
- .zip(ts2.iter())
- .all(|(t1, t2)| self.unify(t1, t2)),
+ (Ty::Tuple(ts1), Ty::Tuple(ts2)) if ts1.len() == ts2.len() => {
+ ts1.iter().zip(ts2.iter()).all(|(t1, t2)| self.unify(t1, t2))
+ }
(Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2)))
| (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2)))
| (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2))) => {
| (Ty::Infer(InferTy::FloatVar(tv)), other)
| (other, Ty::Infer(InferTy::FloatVar(tv))) => {
// the type var is unknown since we tried to resolve it
- self.var_unification_table
- .union_value(*tv, TypeVarValue::Known(other.clone()));
+ self.var_unification_table.union_value(*tv, TypeVarValue::Known(other.clone()));
true
}
_ => false,
}
fn new_type_var(&mut self) -> Ty {
- Ty::Infer(InferTy::TypeVar(
- self.var_unification_table.new_key(TypeVarValue::Unknown),
- ))
+ Ty::Infer(InferTy::TypeVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
fn new_integer_var(&mut self) -> Ty {
- Ty::Infer(InferTy::IntVar(
- self.var_unification_table.new_key(TypeVarValue::Unknown),
- ))
+ Ty::Infer(InferTy::IntVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
fn new_float_var(&mut self) -> Ty {
- Ty::Infer(InferTy::FloatVar(
- self.var_unification_table.new_key(TypeVarValue::Unknown),
- ))
+ Ty::Infer(InferTy::FloatVar(self.var_unification_table.new_key(TypeVarValue::Unknown)))
}
/// Replaces Ty::Unknown by a new type var, so we can maybe still infer it.
for subpat in subpats {
let matching_field = def.and_then(|it| it.field(self.db, &subpat.name));
- let expected_ty = matching_field
- .map_or(Ty::Unknown, |field| field.ty(self.db))
- .subst(&substs);
+ let expected_ty =
+ matching_field.map_or(Ty::Unknown, |field| field.ty(self.db)).subst(&substs);
self.infer_pat(subpat.pat, &expected_ty);
}
let subty = self.infer_pat(*pat, expectation);
Ty::Ref(subty.into(), *mutability)
}
- Pat::TupleStruct {
- path: ref p,
- args: ref subpats,
- } => self.infer_tuple_struct_pat(p.as_ref(), subpats, expected),
- Pat::Struct {
- path: ref p,
- args: ref fields,
- } => self.infer_struct_pat(p.as_ref(), fields, expected),
+ Pat::TupleStruct { path: ref p, args: ref subpats } => {
+ self.infer_tuple_struct_pat(p.as_ref(), subpats, expected)
+ }
+ Pat::Struct { path: ref p, args: ref fields } => {
+ self.infer_struct_pat(p.as_ref(), fields, expected)
+ }
Pat::Path(path) => {
// TODO use correct resolver for the surrounding expression
let resolver = self.resolver.clone();
- self.infer_path_expr(&resolver, &path)
- .unwrap_or(Ty::Unknown)
- }
- Pat::Bind {
- mode,
- name: _name,
- subpat,
- } => {
+ self.infer_path_expr(&resolver, &path).unwrap_or(Ty::Unknown)
+ }
+ Pat::Bind { mode, name: _name, subpat } => {
let subty = if let Some(subpat) = subpat {
self.infer_pat(*subpat, expected)
} else {
let body = Arc::clone(&self.body); // avoid borrow checker problem
let ty = match &body[tgt_expr] {
Expr::Missing => Ty::Unknown,
- Expr::If {
- condition,
- then_branch,
- else_branch,
- } => {
+ Expr::If { condition, then_branch, else_branch } => {
// if let is desugared to match, so this is always simple if
self.infer_expr(*condition, &Expectation::has_type(Ty::Bool));
let then_ty = self.infer_expr(*then_branch, expected);
self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
Ty::unit()
}
- Expr::For {
- iterable,
- body,
- pat,
- } => {
+ Expr::For { iterable, body, pat } => {
let _iterable_ty = self.infer_expr(*iterable, &Expectation::none());
self.infer_pat(*pat, &Ty::Unknown);
self.infer_expr(*body, &Expectation::has_type(Ty::unit()));
Ty::unit()
}
- Expr::Lambda {
- body,
- args,
- arg_types,
- } => {
+ Expr::Lambda { body, args, arg_types } => {
assert_eq!(args.len(), arg_types.len());
for (arg_pat, arg_type) in args.iter().zip(arg_types.iter()) {
Ty::FnPtr(sig) => (sig.input.clone(), sig.output.clone()),
Ty::FnDef { substs, sig, .. } => {
let ret_ty = sig.output.clone().subst(&substs);
- let param_tys = sig
- .input
- .iter()
- .map(|ty| ty.clone().subst(&substs))
- .collect();
+ let param_tys =
+ sig.input.iter().map(|ty| ty.clone().subst(&substs)).collect();
(param_tys, ret_ty)
}
_ => {
}
ret_ty
}
- Expr::MethodCall {
- receiver,
- args,
- method_name,
- } => {
+ Expr::MethodCall { receiver, args, method_name } => {
let receiver_ty = self.infer_expr(*receiver, &Expectation::none());
let resolved = receiver_ty.clone().lookup_method(self.db, method_name);
let method_ty = match resolved {
let (expected_receiver_ty, param_tys, ret_ty) = match &method_ty {
Ty::FnPtr(sig) => {
if !sig.input.is_empty() {
- (
- sig.input[0].clone(),
- sig.input[1..].to_vec(),
- sig.output.clone(),
- )
+ (sig.input[0].clone(), sig.input[1..].to_vec(), sig.output.clone())
} else {
(Ty::Unknown, Vec::new(), sig.output.clone())
}
}
Ty::Never
}
- Expr::StructLit {
- path,
- fields,
- spread,
- } => {
+ Expr::StructLit { path, fields, spread } => {
let (ty, def_id) = self.resolve_variant(path.as_ref());
let substs = ty.substs().unwrap_or_else(Substs::empty);
for field in fields {
let i = name.to_string().parse::<usize>().ok();
i.and_then(|i| fields.get(i).cloned())
}
- Ty::Adt {
- def_id: AdtDef::Struct(s),
- ref substs,
- ..
- } => s.field(self.db, name).map(|field| {
- self.write_field_resolution(tgt_expr, field);
- field.ty(self.db).subst(substs)
- }),
+ Ty::Adt { def_id: AdtDef::Struct(s), ref substs, .. } => {
+ s.field(self.db, name).map(|field| {
+ self.write_field_resolution(tgt_expr, field);
+ field.ty(self.db).subst(substs)
+ })
+ }
_ => None,
})
.unwrap_or(Ty::Unknown);
) -> Ty {
for stmt in statements {
match stmt {
- Statement::Let {
- pat,
- type_ref,
- initializer,
- } => {
- let decl_ty = type_ref
- .as_ref()
- .map(|tr| self.make_ty(tr))
- .unwrap_or(Ty::Unknown);
+ Statement::Let { pat, type_ref, initializer } => {
+ let decl_ty =
+ type_ref.as_ref().map(|tr| self.make_ty(tr)).unwrap_or(Ty::Unknown);
let decl_ty = self.insert_type_vars(decl_ty);
let ty = if let Some(expr) = initializer {
let expr_ty = self.infer_expr(*expr, &Expectation::has_type(decl_ty));
}
}
}
- let ty = if let Some(expr) = tail {
- self.infer_expr(expr, expected)
- } else {
- Ty::unit()
- };
+ let ty = if let Some(expr) = tail { self.infer_expr(expr, expected) } else { Ty::unit() };
ty
}
}
fn infer_body(&mut self) {
- self.infer_expr(
- self.body.body_expr(),
- &Expectation::has_type(self.return_ty.clone()),
- );
+ self.infer_expr(self.body.body_expr(), &Expectation::has_type(self.return_ty.clone()));
}
}
ty: &Ty,
) -> impl Iterator<Item = (Module, ImplBlock)> + 'a {
let fingerprint = TyFingerprint::for_impl(ty);
- fingerprint
- .and_then(|f| self.impls.get(&f))
- .into_iter()
- .flat_map(|i| i.iter())
- .map(move |(module_id, impl_id)| {
- let module = Module {
- krate: self.krate,
- module_id: *module_id,
- };
+ fingerprint.and_then(|f| self.impls.get(&f)).into_iter().flat_map(|i| i.iter()).map(
+ move |(module_id, impl_id)| {
+ let module = Module { krate: self.krate, module_id: *module_id };
let module_impl_blocks = db.impls_in_module(module);
(module, ImplBlock::from_id(module_impl_blocks, *impl_id))
- })
+ },
+ )
}
pub fn lookup_impl_blocks_for_trait<'a>(
tr: &Trait,
) -> impl Iterator<Item = (Module, ImplBlock)> + 'a {
let id = tr.id;
- self.impls_by_trait
- .get(&id)
- .into_iter()
- .flat_map(|i| i.iter())
- .map(move |(module_id, impl_id)| {
- let module = Module {
- krate: self.krate,
- module_id: *module_id,
- };
+ self.impls_by_trait.get(&id).into_iter().flat_map(|i| i.iter()).map(
+ move |(module_id, impl_id)| {
+ let module = Module { krate: self.krate, module_id: *module_id };
let module_impl_blocks = db.impls_in_module(module);
(module, ImplBlock::from_id(module_impl_blocks, *impl_id))
- })
+ },
+ )
}
fn collect_recursive(&mut self, db: &impl HirDatabase, module: &Module) {
let (db, _, file_id) = MockDatabase::with_single_file(content);
let source_file = db.parse(file_id);
let mut acc = String::new();
- for fn_def in source_file
- .syntax()
- .descendants()
- .filter_map(ast::FnDef::cast)
- {
+ for fn_def in source_file.syntax().descendants().filter_map(ast::FnDef::cast) {
let func = source_binder::function_from_source(&db, file_id, fn_def).unwrap();
let inference_result = func.infer(&db);
let body_syntax_mapping = func.body_syntax_mapping(&db);
"
.to_string();
- db.query_mut(ra_db::FileTextQuery)
- .set(pos.file_id, Arc::new(new_text));
+ db.query_mut(ra_db::FileTextQuery).set(pos.file_id, Arc::new(new_text));
{
let events = db.log_executed(|| {
ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()),
TupleType(inner) => TypeRef::Tuple(inner.fields().map(TypeRef::from_ast).collect()),
NeverType(..) => TypeRef::Never,
- PathType(inner) => inner
- .path()
- .and_then(Path::from_ast)
- .map(TypeRef::Path)
- .unwrap_or(TypeRef::Error),
+ PathType(inner) => {
+ inner.path().and_then(Path::from_ast).map(TypeRef::Path).unwrap_or(TypeRef::Error)
+ }
PointerType(inner) => {
let inner_ty = TypeRef::from_ast_opt(inner.type_ref());
let mutability = Mutability::from_mutable(inner.is_mut());
FnPointerType(inner) => {
let ret_ty = TypeRef::from_ast_opt(inner.ret_type().and_then(|rt| rt.type_ref()));
let mut params = if let Some(pl) = inner.param_list() {
- pl.params()
- .map(|p| p.type_ref())
- .map(TypeRef::from_ast_opt)
- .collect()
+ pl.params().map(|p| p.type_ref()).map(TypeRef::from_ast_opt).collect()
} else {
Vec::new()
};
.into_iter()
.map(|(label, action)| {
let file_id = frange.file_id;
- let file_edit = SourceFileEdit {
- file_id,
- edit: action.edit,
- };
+ let file_edit = SourceFileEdit { file_id, edit: action.edit };
SourceChange {
label: label.label,
source_file_edits: vec![file_edit],
// Resolve the function's NameRef (NOTE: this isn't entirely accurate).
let file_symbols = crate::symbol_index::index_resolve(db, name_ref);
- let symbol = file_symbols
- .into_iter()
- .find(|it| it.ptr.kind() == FN_DEF)?;
+ let symbol = file_symbols.into_iter().find(|it| it.ptr.kind() == FN_DEF)?;
let fn_file = db.parse(symbol.file_id);
let fn_def = symbol.ptr.to_node(&fn_file);
let fn_def = ast::FnDef::cast(fn_def).unwrap();
let start = arg_list_range.start();
let range_search = TextRange::from_to(start, position.offset);
- let mut commas: usize = arg_list
- .syntax()
- .text()
- .slice(range_search)
- .to_string()
- .matches(',')
- .count();
+ let mut commas: usize =
+ arg_list.syntax().text().slice(range_search).to_string().matches(',').count();
// If we have a method call eat the first param since it's just self.
if has_self {
_ => return None,
}),
- FnCallNode::MethodCallExpr(call_expr) => call_expr
- .syntax()
- .children()
- .filter_map(ast::NameRef::cast)
- .nth(0),
+ FnCallNode::MethodCallExpr(call_expr) => {
+ call_expr.syntax().children().filter_map(ast::NameRef::cast).nth(0)
+ }
}
}
let label = crate::completion::function_label(node)?;
let doc = function.docs(db);
- Some(CallInfo {
- parameters: param_list(node),
- label,
- doc,
- active_parameter: None,
- })
+ Some(CallInfo { parameters: param_list(node), label, doc, active_parameter: None })
}
}
// Maybe use param.pat here? See if we can just extract the name?
//res.extend(param_list.params().map(|p| p.syntax().text().to_string()));
res.extend(
- param_list
- .params()
- .filter_map(|p| p.pat())
- .map(|pat| pat.syntax().text().to_string()),
+ param_list.params().filter_map(|p| p.pat()).map(|pat| pat.syntax().text().to_string()),
);
}
res
"#,
);
- assert_eq!(
- info.parameters,
- vec!["&mut self".to_string(), "ctx".to_string()]
- );
+ assert_eq!(info.parameters, vec!["&mut self".to_string(), "ctx".to_string()]);
assert_eq!(info.active_parameter, Some(1));
assert_eq!(
info.doc.map(|it| it.into()),
path: RelativePathBuf,
text: Arc<String>,
) {
- let file = AddFile {
- file_id,
- path,
- text,
- };
- self.roots_changed
- .entry(root_id)
- .or_default()
- .added
- .push(file);
+ let file = AddFile { file_id, path, text };
+ self.roots_changed.entry(root_id).or_default().added.push(file);
}
pub fn change_file(&mut self, file_id: FileId, new_text: Arc<String>) {
pub fn remove_file(&mut self, root_id: SourceRootId, file_id: FileId, path: RelativePathBuf) {
let file = RemoveFile { file_id, path };
- self.roots_changed
- .entry(root_id)
- .or_default()
- .removed
- .push(file);
+ self.roots_changed.entry(root_id).or_default().removed.push(file);
}
pub fn add_library(&mut self, data: LibraryData) {
let mut root_change = RootChange::default();
root_change.added = files
.into_iter()
- .map(|(file_id, path, text)| AddFile {
- file_id,
- path,
- text,
- })
+ .map(|(file_id, path, text)| AddFile { file_id, path, text })
.collect();
- LibraryData {
- root_id,
- root_change,
- symbol_index,
- }
+ LibraryData { root_id, root_change, symbol_index }
}
}
self.last_gc_check = time::Instant::now();
let retained_trees = syntax_tree_stats(self).retained;
if retained_trees > 100 {
- log::info!(
- "automatic garbadge collection, {} retained trees",
- retained_trees
- );
+ log::info!("automatic garbadge collection, {} retained trees", retained_trees);
self.collect_garbage();
}
}
pub(crate) fn collect_garbage(&mut self) {
self.last_gc = time::Instant::now();
- let sweep = SweepStrategy::default()
- .discard_values()
- .sweep_all_revisions();
+ let sweep = SweepStrategy::default().discard_values().sweep_all_revisions();
self.query(ra_db::ParseQuery).sweep(sweep);
fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) {
for receiver in receiver.autoderef(ctx.db) {
match receiver {
- Ty::Adt {
- def_id, ref substs, ..
- } => {
+ Ty::Adt { def_id, ref substs, .. } => {
match def_id {
AdtDef::Struct(s) => {
for field in s.fields(ctx.db) {
node: &'a N,
params: &mut FxHashMap<String, (u32, &'a ast::Param)>,
) {
- node.functions()
- .filter_map(|it| it.param_list())
- .flat_map(|it| it.params())
- .for_each(|param| {
+ node.functions().filter_map(|it| it.param_list()).flat_map(|it| it.params()).for_each(
+ |param| {
let text = param.syntax().text().to_string();
params.entry(text).or_insert((0, param)).0 += 1;
- })
+ },
+ )
}
}
hir::ModuleDef::Enum(e) => {
e.variants(ctx.db).into_iter().for_each(|variant| {
if let Some(name) = variant.name(ctx.db) {
- let detail_types = variant
- .fields(ctx.db)
- .into_iter()
- .map(|field| field.ty(ctx.db));
- let detail = join(detail_types)
- .separator(", ")
- .surround_with("(", ")")
- .to_string();
+ let detail_types =
+ variant.fields(ctx.db).into_iter().map(|field| field.ty(ctx.db));
+ let detail =
+ join(detail_types).separator(", ").surround_with("(", ")").to_string();
CompletionItem::new(
CompletionKind::Reference,
fn postfix_snippet(ctx: &CompletionContext, label: &str, snippet: &str) -> Builder {
let replace_range = ctx.source_range();
- let receiver_range = ctx
- .dot_receiver
- .expect("no receiver available")
- .syntax()
- .range();
+ let receiver_range = ctx.dot_receiver.expect("no receiver available").syntax().range();
let delete_range = TextRange::from_to(receiver_range.start(), replace_range.start());
let mut builder = TextEditBuilder::default();
builder.delete(delete_range);
let names = ctx.resolver.all_names();
names.into_iter().for_each(|(name, res)| {
- CompletionItem::new(
- CompletionKind::Reference,
- ctx.source_range(),
- name.to_string(),
- )
- .from_resolution(ctx, &res)
- .add_to(acc)
+ CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.to_string())
+ .from_resolution(ctx, &res)
+ .add_to(acc)
});
}
}
fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) {
let name_range = name_ref.syntax().range();
- let top_node = name_ref
- .syntax()
- .ancestors()
- .take_while(|it| it.range() == name_range)
- .last()
- .unwrap();
+ let top_node =
+ name_ref.syntax().ancestors().take_while(|it| it.range() == name_range).last().unwrap();
match top_node.parent().map(|it| it.kind()) {
Some(SOURCE_FILE) | Some(ITEM_LIST) => {
}
/// What string is used for filtering.
pub fn lookup(&self) -> &str {
- self.lookup
- .as_ref()
- .map(|it| it.as_str())
- .unwrap_or_else(|| self.label())
+ self.lookup.as_ref().map(|it| it.as_str()).unwrap_or_else(|| self.label())
}
pub fn insert_text_format(&self) -> InsertTextFormat {
) -> Builder {
use hir::ModuleDef::*;
- let def = resolution
- .as_ref()
- .take_types()
- .or_else(|| resolution.as_ref().take_values());
+ let def = resolution.as_ref().take_types().or_else(|| resolution.as_ref().take_values());
let def = match def {
None => return self,
Some(it) => it,
};
let completions = completions(&analysis.db, position).unwrap();
let completion_items: Vec<CompletionItem> = completions.into();
- let mut kind_completions: Vec<CompletionItem> = completion_items
- .into_iter()
- .filter(|c| c.completion_kind == kind)
- .collect();
+ let mut kind_completions: Vec<CompletionItem> =
+ completion_items.into_iter().filter(|c| c.completion_kind == kind).collect();
kind_completions.sort_by_key(|c| c.label.clone());
assert_debug_snapshot_matches!(test_name, kind_completions);
}
let source_root = db.file_source_root(file_id);
let diag = match problem {
Problem::UnresolvedModule { candidate } => {
- let create_file = FileSystemEdit::CreateFile {
- source_root,
- path: candidate.clone(),
- };
+ let create_file =
+ FileSystemEdit::CreateFile { source_root, path: candidate.clone() };
let fix = SourceChange {
label: "create module".to_string(),
source_file_edits: Vec::new(),
dst_source_root: source_root,
dst_path: move_to.clone(),
};
- let create_file = FileSystemEdit::CreateFile {
- source_root,
- path: move_to.join(candidate),
- };
+ let create_file =
+ FileSystemEdit::CreateFile { source_root, path: move_to.join(candidate) };
let fix = SourceChange {
label: "move file and create module".to_string(),
source_file_edits: Vec::new(),
}
fn find_macro_call(node: &SyntaxNode, range: TextRange) -> Option<&ast::MacroCall> {
- find_covering_node(node, range)
- .ancestors()
- .find_map(ast::MacroCall::cast)
+ find_covering_node(node, range).ancestors().find_map(ast::MacroCall::cast)
}
#[cfg(test)]
hir::source_binder::function_from_child_node(db, file_id, name_ref.syntax())
{
// Check if it is a method
- if let Some(method_call) = name_ref
- .syntax()
- .parent()
- .and_then(ast::MethodCallExpr::cast)
- {
+ if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) {
tested_by!(goto_definition_works_for_methods);
let infer_result = function.infer(db);
let syntax_mapping = function.body_syntax_mapping(db);
let expr = ast::Expr::cast(method_call.syntax()).unwrap();
- if let Some(func) = syntax_mapping
- .node_expr(expr)
- .and_then(|it| infer_result.method_resolution(it))
+ if let Some(func) =
+ syntax_mapping.node_expr(expr).and_then(|it| infer_result.method_resolution(it))
{
return Exact(NavigationTarget::from_function(db, func));
};
let infer_result = function.infer(db);
let syntax_mapping = function.body_syntax_mapping(db);
let expr = ast::Expr::cast(field_expr.syntax()).unwrap();
- if let Some(field) = syntax_mapping
- .node_expr(expr)
- .and_then(|it| infer_result.field_resolution(it))
+ if let Some(field) =
+ syntax_mapping.node_expr(expr).and_then(|it| infer_result.field_resolution(it))
{
return Exact(NavigationTarget::from_field(db, field));
};
}
// Try name resolution
let resolver = hir::source_binder::resolver_for_node(db, file_id, name_ref.syntax());
- if let Some(path) = name_ref
- .syntax()
- .ancestors()
- .find_map(ast::Path::cast)
- .and_then(hir::Path::from_ast)
+ if let Some(path) =
+ name_ref.syntax().ancestors().find_map(ast::Path::cast).and_then(hir::Path::from_ast)
{
let resolved = resolver.resolve_path(db, &path);
- match resolved
- .clone()
- .take_types()
- .or_else(|| resolved.take_values())
- {
+ match resolved.clone().take_types().or_else(|| resolved.take_values()) {
Some(Resolution::Def(def)) => return Exact(NavigationTarget::from_def(db, def)),
Some(Resolution::LocalBinding(pat)) => {
let body = resolver.body().expect("no body for local binding");
let syntax_mapping = body.syntax_mapping(db);
- let ptr = syntax_mapping
- .pat_syntax(pat)
- .expect("pattern not found in syntax mapping");
- let name = path
- .as_ident()
- .cloned()
- .expect("local binding from a multi-segment path");
+ let ptr =
+ syntax_mapping.pat_syntax(pat).expect("pattern not found in syntax mapping");
+ let name =
+ path.as_ident().cloned().expect("local binding from a multi-segment path");
let nav = NavigationTarget::from_scope_entry(file_id, name, ptr);
return Exact(nav);
}
}
if range.is_none() {
let node = find_leaf_at_offset(file.syntax(), position.offset).find_map(|leaf| {
- leaf.ancestors()
- .find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some())
+ leaf.ancestors().find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some())
})?;
- let frange = FileRange {
- file_id: position.file_id,
- range: node.range(),
- };
+ let frange = FileRange { file_id: position.file_id, range: node.range() };
res.extend(type_of(db, frange).map(Into::into));
range = Some(node.range());
};
where
T: ast::NameOwner + ast::VisibilityOwner,
{
- let mut string = node
- .visibility()
- .map(|v| format!("{} ", v.syntax().text()))
- .unwrap_or_default();
+ let mut string =
+ node.visibility().map(|v| format!("{} ", v.syntax().text())).unwrap_or_default();
string.push_str(label);
node.name()?.syntax().text().push_to(&mut string);
Some(string)
let navs = analysis.goto_implementation(pos).unwrap().unwrap().info;
assert_eq!(navs.len(), expected.len());
- navs.into_iter()
- .enumerate()
- .for_each(|(i, nav)| nav.assert_match(expected[i]));
+ navs.into_iter().enumerate().for_each(|(i, nav)| nav.assert_match(expected[i]));
}
#[test]
impl Foo {}
impl Foo {}
",
- &[
- "impl IMPL_BLOCK FileId(1) [12; 23)",
- "impl IMPL_BLOCK FileId(1) [24; 35)",
- ],
+ &["impl IMPL_BLOCK FileId(1) [12; 23)", "impl IMPL_BLOCK FileId(1) [24; 35)"],
);
}
impl super::Foo {}
}
",
- &[
- "impl IMPL_BLOCK FileId(1) [24; 42)",
- "impl IMPL_BLOCK FileId(1) [57; 75)",
- ],
+ &["impl IMPL_BLOCK FileId(1) [24; 42)", "impl IMPL_BLOCK FileId(1) [57; 75)"],
);
}
//- /b.rs
impl crate::Foo {}
",
- &[
- "impl IMPL_BLOCK FileId(2) [0; 18)",
- "impl IMPL_BLOCK FileId(3) [0; 18)",
- ],
+ &["impl IMPL_BLOCK FileId(2) [0; 18)", "impl IMPL_BLOCK FileId(3) [0; 18)"],
);
}
//- /b.rs
impl crate::T for crate::Foo {}
",
- &[
- "impl IMPL_BLOCK FileId(2) [0; 31)",
- "impl IMPL_BLOCK FileId(3) [0; 31)",
- ],
+ &["impl IMPL_BLOCK FileId(2) [0; 31)", "impl IMPL_BLOCK FileId(3) [0; 31)"],
);
}
}
#[derive(Debug)]
pub enum FileSystemEdit {
- CreateFile {
- source_root: SourceRootId,
- path: RelativePathBuf,
- },
- MoveFile {
- src: FileId,
- dst_source_root: SourceRootId,
- dst_path: RelativePathBuf,
- },
+ CreateFile { source_root: SourceRootId, path: RelativePathBuf },
+ MoveFile { src: FileId, dst_source_root: SourceRootId, dst_path: RelativePathBuf },
}
#[derive(Debug)]
/// Returns a snapshot of the current state, which you can query for
/// semantic information.
pub fn analysis(&self) -> Analysis {
- Analysis {
- db: self.db.snapshot(),
- }
+ Analysis { db: self.db.snapshot() }
}
/// Applies changes to the current state of the world. If there are
impl SourceChange {
pub(crate) fn from_local_edit(file_id: FileId, edit: LocalEdit) -> SourceChange {
- let file_edit = SourceFileEdit {
- file_id,
- edit: edit.edit,
- };
+ let file_edit = SourceFileEdit { file_id, edit: edit.edit };
SourceChange {
label: edit.label,
source_file_edits: vec![file_edit],
file_system_edits: vec![],
- cursor_position: edit
- .cursor_position
- .map(|offset| FilePosition { offset, file_id }),
+ cursor_position: edit.cursor_position.map(|offset| FilePosition { offset, file_id }),
}
}
}
let mut res = MockAnalysis::new();
for entry in parse_fixture(fixture) {
if entry.text.contains(CURSOR_MARKER) {
- assert!(
- position.is_none(),
- "only one marker (<|>) per fixture is allowed"
- );
+ assert!(position.is_none(), "only one marker (<|>) per fixture is allowed");
position = Some(res.add_file_with_position(&entry.meta, &entry.text));
} else {
res.add_file(&entry.meta, &entry.text);
let other_crate = crate_graph.add_crate_root(file_id);
let crate_name = path.parent().unwrap().file_name().unwrap();
if let Some(root_crate) = root_crate {
- crate_graph
- .add_dep(root_crate, crate_name.into(), other_crate)
- .unwrap();
+ crate_graph.add_dep(root_crate, crate_name.into(), other_crate).unwrap();
}
}
change.add_file(source_root, file_id, path, Arc::new(contents));
pub(crate) fn from_module(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
let (file_id, source) = module.definition_source(db);
- let name = module
- .name(db)
- .map(|it| it.to_string().into())
- .unwrap_or_default();
+ let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default();
match source {
ModuleSource::SourceFile(node) => {
NavigationTarget::from_syntax(file_id, name, None, node.syntax())
}
pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
- let name = module
- .name(db)
- .map(|it| it.to_string().into())
- .unwrap_or_default();
+ let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default();
if let Some((file_id, source)) = module.declaration_source(db) {
return NavigationTarget::from_syntax(file_id, name, None, source.syntax());
}
}
}
}
- let result = text_edit_bulder
- .finish()
- .apply(&*analysis.file_text(file_id.unwrap()));
+ let result = text_edit_bulder.finish().apply(&*analysis.file_text(file_id.unwrap()));
assert_eq_text!(expected, &*result);
}
}
pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
let source_file = db.parse(file_id);
- source_file
- .syntax()
- .descendants()
- .filter_map(|i| runnable(db, file_id, i))
- .collect()
+ source_file.syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect()
}
fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option<Runnable> {
let kind = if name == "main" {
RunnableKind::Bin
} else if fn_def.has_atom_attr("test") {
- RunnableKind::Test {
- name: name.to_string(),
- }
+ RunnableKind::Test { name: name.to_string() }
} else if fn_def.has_atom_attr("bench") {
- RunnableKind::Bench {
- name: name.to_string(),
- }
+ RunnableKind::Bench { name: name.to_string() }
} else {
return None;
};
- Some(Runnable {
- range: fn_def.syntax().range(),
- kind,
- })
+ Some(Runnable { range: fn_def.syntax().range(), kind })
}
fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Option<Runnable> {
let module = hir::source_binder::module_from_child_node(db, file_id, module.syntax())?;
// FIXME: thread cancellation instead of `.ok`ing
- let path = module
- .path_to_root(db)
- .into_iter()
- .rev()
- .filter_map(|it| it.name(db))
- .join("::");
- Some(Runnable {
- range,
- kind: RunnableKind::TestMod { path },
- })
+ let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::");
+ Some(Runnable { range, kind: RunnableKind::TestMod { path } })
}
#[cfg(test)]
pub(crate) fn status(db: &RootDatabase) -> String {
let files_stats = db.query(FileTextQuery).entries::<FilesStats>();
let syntax_tree_stats = syntax_tree_stats(db);
- let symbols_stats = db
- .query(LibrarySymbolsQuery)
- .entries::<LibrarySymbolsStats>();
+ let symbols_stats = db.query(LibrarySymbolsQuery).entries::<LibrarySymbolsStats>();
let n_defs = {
let interner: &hir::HirInterner = db.as_ref();
interner.len()
impl fmt::Display for SyntaxTreeStats {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- write!(
- fmt,
- "{} trees, {} ({}) retained",
- self.total, self.retained, self.retained_size,
- )
+ write!(fmt, "{} trees, {} ({}) retained", self.total, self.retained, self.retained_size,)
}
}
#[cfg(not(feature = "jemalloc"))]
fn current() -> MemoryStats {
- MemoryStats {
- allocated: Bytes(0),
- resident: Bytes(0),
- }
+ MemoryStats { allocated: Bytes(0), resident: Bytes(0) }
}
}
impl fmt::Display for MemoryStats {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- write!(
- fmt,
- "{} allocated {} resident",
- self.allocated, self.resident,
- )
+ write!(fmt, "{} allocated {} resident", self.allocated, self.resident,)
}
}
}
let snap = Snap(db.snapshot());
- files
- .par_iter()
- .map_with(snap, |db, &file_id| db.0.file_symbols(file_id))
- .collect()
+ files.par_iter().map_with(snap, |db, &file_id| db.0.file_symbols(file_id)).collect()
};
query.search(&buf)
}
pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> {
let source_file = db.parse(file_id);
let mut res = ra_ide_api_light::highlight(source_file.syntax());
- for macro_call in source_file
- .syntax()
- .descendants()
- .filter_map(ast::MacroCall::cast)
- {
+ for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
if let Some((off, exp)) = hir::MacroDef::ast_expand(macro_call) {
- let mapped_ranges = ra_ide_api_light::highlight(&exp.syntax())
- .into_iter()
- .filter_map(|r| {
+ let mapped_ranges =
+ ra_ide_api_light::highlight(&exp.syntax()).into_iter().filter_map(|r| {
let mapped_range = exp.map_range_back(r.range)?;
- let res = HighlightedRange {
- range: mapped_range + off,
- tag: r.tag,
- };
+ let res = HighlightedRange { range: mapped_range + off, tag: r.tag };
Some(res)
});
res.extend(mapped_ranges);
single_use_tree: &ast::UseTree,
) -> Option<TextEdit> {
let use_tree_list_node = single_use_tree.syntax().parent()?;
- if single_use_tree
- .path()?
- .segment()?
- .syntax()
- .first_child()?
- .kind()
- == SyntaxKind::SELF_KW
- {
+ if single_use_tree.path()?.segment()?.syntax().first_child()?.kind() == SyntaxKind::SELF_KW {
let start = use_tree_list_node.prev_sibling()?.range().start();
let end = use_tree_list_node.range().end();
let range = TextRange::from_to(start, end);
for node in file.syntax().descendants() {
func(&mut diagnostics, node);
}
- let diagnostic = diagnostics
- .pop()
- .unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before));
+ let diagnostic =
+ diagnostics.pop().unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before));
let fix = diagnostic.fix.unwrap();
let actual = fix.edit.apply(&before);
assert_eq_text!(after, &actual);
",
check_unnecessary_braces_in_use_statement,
);
- check_apply(
- "use {b};",
- "use b;",
- check_unnecessary_braces_in_use_statement,
- );
- check_apply(
- "use a::{c};",
- "use a::c;",
- check_unnecessary_braces_in_use_statement,
- );
- check_apply(
- "use a::{self};",
- "use a;",
- check_unnecessary_braces_in_use_statement,
- );
+ check_apply("use {b};", "use b;", check_unnecessary_braces_in_use_statement);
+ check_apply("use a::{c};", "use a::c;", check_unnecessary_braces_in_use_statement);
+ check_apply("use a::{self};", "use a;", check_unnecessary_braces_in_use_statement);
check_apply(
"use a::{c, d::{e}};",
"use a::{c, d::e};",
let node = find_covering_node(root, range);
// Using shallowest node with same range allows us to traverse siblings.
- let node = node
- .ancestors()
- .take_while(|n| n.range() == node.range())
- .last()
- .unwrap();
+ let node = node.ancestors().take_while(|n| n.range() == node.range()).last().unwrap();
if range == node.range() {
if string_kinds.contains(&node.kind()) {
}
if let Some(comma_node) = nearby_comma(node, Direction::Prev) {
- return Some(TextRange::from_to(
- comma_node.range().start(),
- node.range().end(),
- ));
+ return Some(TextRange::from_to(comma_node.range().start(), node.range().end()));
}
if let Some(comma_node) = nearby_comma(node, Direction::Next) {
.filter(|node| is_single_line_ws(node))
.unwrap_or(comma_node);
- return Some(TextRange::from_to(
- node.range().start(),
- final_node.range().end(),
- ));
+ return Some(TextRange::from_to(node.range().start(), final_node.range().end()));
}
return None;
#[test]
fn test_extend_selection_list() {
do_check(r#"fn foo(<|>x: i32) {}"#, &["x", "x: i32"]);
- do_check(
- r#"fn foo(<|>x: i32, y: i32) {}"#,
- &["x", "x: i32", "x: i32, "],
- );
- do_check(
- r#"fn foo(<|>x: i32,y: i32) {}"#,
- &["x", "x: i32", "x: i32,"],
- );
- do_check(
- r#"fn foo(x: i32, <|>y: i32) {}"#,
- &["y", "y: i32", ", y: i32"],
- );
- do_check(
- r#"fn foo(x: i32, <|>y: i32, ) {}"#,
- &["y", "y: i32", ", y: i32"],
- );
- do_check(
- r#"fn foo(x: i32,<|>y: i32) {}"#,
- &["y", "y: i32", ",y: i32"],
- );
+ do_check(r#"fn foo(<|>x: i32, y: i32) {}"#, &["x", "x: i32", "x: i32, "]);
+ do_check(r#"fn foo(<|>x: i32,y: i32) {}"#, &["x", "x: i32", "x: i32,"]);
+ do_check(r#"fn foo(x: i32, <|>y: i32) {}"#, &["y", "y: i32", ", y: i32"]);
+ do_check(r#"fn foo(x: i32, <|>y: i32, ) {}"#, &["y", "y: i32", ", y: i32"]);
+ do_check(r#"fn foo(x: i32,<|>y: i32) {}"#, &["y", "y: i32", ",y: i32"]);
- do_check(
- r#"const FOO: [usize; 2] = [ 22<|> , 33];"#,
- &["22", "22 , "],
- );
+ do_check(r#"const FOO: [usize; 2] = [ 22<|> , 33];"#, &["22", "22 , "]);
do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|>];"#, &["33", ", 33"]);
- do_check(
- r#"const FOO: [usize; 2] = [ 22 , 33<|> ,];"#,
- &["33", ", 33"],
- );
+ do_check(r#"const FOO: [usize; 2] = [ 22 , 33<|> ,];"#, &["33", ", 33"]);
do_check(
r#"
<|>
}
"#,
- &[
- "\n \n",
- "{\n \n}",
- "/// bla\n/// bla\nstruct B {\n \n}",
- ],
+ &["\n \n", "{\n \n}", "/// bla\n/// bla\nstruct B {\n \n}"],
)
}
// Fold items that span multiple lines
if let Some(kind) = fold_kind(node.kind()) {
if node.text().contains('\n') {
- res.push(Fold {
- range: node.range(),
- kind,
- });
+ res.push(Fold { range: node.range(), kind });
}
}
// Fold groups of comments
if node.kind() == COMMENT && !visited_comments.contains(&node) {
if let Some(range) = contiguous_range_for_comment(node, &mut visited_comments) {
- res.push(Fold {
- range,
- kind: FoldKind::Comment,
- })
+ res.push(Fold { range, kind: FoldKind::Comment })
}
}
// Fold groups of imports
if node.kind() == USE_ITEM && !visited_imports.contains(&node) {
if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) {
- res.push(Fold {
- range,
- kind: FoldKind::Imports,
- })
+ res.push(Fold { range, kind: FoldKind::Imports })
}
}
if let Some(range) =
contiguous_range_for_group_unless(node, has_visibility, &mut visited_mods)
{
- res.push(Fold {
- range,
- kind: FoldKind::Mods,
- })
+ res.push(Fold { range, kind: FoldKind::Mods })
}
}
}
}
fn has_visibility(node: &SyntaxNode) -> bool {
- ast::Module::cast(node)
- .and_then(|m| m.visibility())
- .is_some()
+ ast::Module::cast(node).and_then(|m| m.visibility()).is_some()
}
fn contiguous_range_for_group<'a>(
}
if first != last {
- Some(TextRange::from_to(
- first.range().start(),
- last.range().end(),
- ))
+ Some(TextRange::from_to(first.range().start(), last.range().end()))
} else {
// The group consists of only one element, therefore it cannot be folded
None
}
if first != last {
- Some(TextRange::from_to(
- first.range().start(),
- last.range().end(),
- ))
+ Some(TextRange::from_to(first.range().start(), last.range().end()))
} else {
// The group consists of only one element, therefore it cannot be folded
None
fold_kinds.len(),
"The amount of fold kinds is different than the expected amount"
);
- for ((fold, range), fold_kind) in folds
- .into_iter()
- .zip(ranges.into_iter())
- .zip(fold_kinds.into_iter())
+ for ((fold, range), fold_kind) in
+ folds.into_iter().zip(ranges.into_iter()).zip(fold_kinds.into_iter())
{
assert_eq!(fold.range.start(), range.start());
assert_eq!(fold.range.end(), range.end());
fn main() <fold>{
}</fold>"#;
- let folds = &[
- FoldKind::Mods,
- FoldKind::Mods,
- FoldKind::Mods,
- FoldKind::Block,
- ];
+ let folds = &[FoldKind::Mods, FoldKind::Mods, FoldKind::Mods, FoldKind::Block];
do_check(text, folds);
}
}
fn prev_leaf(node: &SyntaxNode) -> Option<&SyntaxNode> {
- generate(node.ancestors().find_map(SyntaxNode::prev_sibling), |it| {
- it.last_child()
- })
- .last()
+ generate(node.ancestors().find_map(SyntaxNode::prev_sibling), |it| it.last_child()).last()
}
pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> {
}
}
- LocalEdit {
- label: "join lines".to_string(),
- edit: edit.finish(),
- cursor_position: None,
- }
+ LocalEdit { label: "join lines".to_string(), edit: edit.finish(), cursor_position: None }
}
fn remove_newline(
)];
let spaces = suff.bytes().take_while(|&b| b == b' ').count();
- edit.replace(
- TextRange::offset_len(offset, ((spaces + 1) as u32).into()),
- " ".to_string(),
- );
+ edit.replace(TextRange::offset_len(offset, ((spaces + 1) as u32).into()), " ".to_string());
return;
}
edit.delete(TextRange::from_to(prev.range().start(), node.range().end()));
} else if prev.kind() == COMMA && next.kind() == R_CURLY {
// Removes: comma, newline (incl. surrounding whitespace)
- let space = if let Some(left) = prev.prev_sibling() {
- compute_ws(left, next)
- } else {
- " "
- };
+ let space = if let Some(left) = prev.prev_sibling() { compute_ws(left, next) } else { " " };
edit.replace(
TextRange::from_to(prev.range().start(), node.range().end()),
space.to_string(),
let block = ast::Block::cast(node.parent()?)?;
let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?;
let expr = extract_trivial_expression(block)?;
- edit.replace(
- block_expr.syntax().range(),
- expr.syntax().text().to_string(),
- );
+ edit.replace(block_expr.syntax().range(), expr.syntax().text().to_string());
Some(())
}
fn join_single_use_tree(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> {
let use_tree_list = ast::UseTreeList::cast(node.parent()?)?;
let (tree,) = use_tree_list.use_trees().collect_tuple()?;
- edit.replace(
- use_tree_list.syntax().range(),
- tree.syntax().text().to_string(),
- );
+ edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string());
Some(())
}
}
pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
- const BRACES: &[SyntaxKind] = &[
- L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE,
- ];
+ const BRACES: &[SyntaxKind] =
+ &[L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE];
let (brace_node, brace_idx) = find_leaf_at_offset(file.syntax(), offset)
.filter_map(|node| {
let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
.next()?;
let parent = brace_node.parent()?;
let matching_kind = BRACES[brace_idx ^ 1];
- let matching_node = parent
- .children()
- .find(|node| node.kind() == matching_kind)?;
+ let matching_node = parent.children().find(|node| node.kind() == matching_kind)?;
Some(matching_node.range().start())
}
continue;
}
};
- res.push(HighlightedRange {
- range: node.range(),
- tag,
- })
+ res.push(HighlightedRange { range: node.range(), tag })
}
res
}
let char_len = TextUnit::of_char(c);
if char_len.to_usize() > 1 {
- utf16_chars.push(Utf16Char {
- start: curr_col,
- end: curr_col + char_len,
- });
+ utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + char_len });
}
curr_col += char_len;
utf16_lines.insert(line, utf16_chars);
}
- LineIndex {
- newlines,
- utf16_lines,
- }
+ LineIndex { newlines, utf16_lines }
}
pub fn line_col(&self, offset: TextUnit) -> LineCol {
let line_start_offset = self.newlines[line];
let col = offset - line_start_offset;
- LineCol {
- line: line as u32,
- col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32,
- }
+ LineCol { line: line as u32, col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32 }
}
pub fn offset(&self, line_col: LineCol) -> TextUnit {
#[cfg(test)]
/// Simple reference implementation to use in proptests
pub fn to_line_col(text: &str, offset: TextUnit) -> LineCol {
- let mut res = LineCol {
- line: 0,
- col_utf16: 0,
- };
+ let mut res = LineCol { line: 0, col_utf16: 0 };
for (i, c) in text.char_indices() {
if i + c.len_utf8() > offset.to_usize() {
// if it's an invalid offset, inside a multibyte char
fn test_line_index() {
let text = "hello\nworld";
let index = LineIndex::new(text);
- assert_eq!(
- index.line_col(0.into()),
- LineCol {
- line: 0,
- col_utf16: 0
- }
- );
- assert_eq!(
- index.line_col(1.into()),
- LineCol {
- line: 0,
- col_utf16: 1
- }
- );
- assert_eq!(
- index.line_col(5.into()),
- LineCol {
- line: 0,
- col_utf16: 5
- }
- );
- assert_eq!(
- index.line_col(6.into()),
- LineCol {
- line: 1,
- col_utf16: 0
- }
- );
- assert_eq!(
- index.line_col(7.into()),
- LineCol {
- line: 1,
- col_utf16: 1
- }
- );
- assert_eq!(
- index.line_col(8.into()),
- LineCol {
- line: 1,
- col_utf16: 2
- }
- );
- assert_eq!(
- index.line_col(10.into()),
- LineCol {
- line: 1,
- col_utf16: 4
- }
- );
- assert_eq!(
- index.line_col(11.into()),
- LineCol {
- line: 1,
- col_utf16: 5
- }
- );
- assert_eq!(
- index.line_col(12.into()),
- LineCol {
- line: 1,
- col_utf16: 6
- }
- );
+ assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 });
+ assert_eq!(index.line_col(1.into()), LineCol { line: 0, col_utf16: 1 });
+ assert_eq!(index.line_col(5.into()), LineCol { line: 0, col_utf16: 5 });
+ assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 0 });
+ assert_eq!(index.line_col(7.into()), LineCol { line: 1, col_utf16: 1 });
+ assert_eq!(index.line_col(8.into()), LineCol { line: 1, col_utf16: 2 });
+ assert_eq!(index.line_col(10.into()), LineCol { line: 1, col_utf16: 4 });
+ assert_eq!(index.line_col(11.into()), LineCol { line: 1, col_utf16: 5 });
+ assert_eq!(index.line_col(12.into()), LineCol { line: 1, col_utf16: 6 });
let text = "\nhello\nworld";
let index = LineIndex::new(text);
- assert_eq!(
- index.line_col(0.into()),
- LineCol {
- line: 0,
- col_utf16: 0
- }
- );
- assert_eq!(
- index.line_col(1.into()),
- LineCol {
- line: 1,
- col_utf16: 0
- }
- );
- assert_eq!(
- index.line_col(2.into()),
- LineCol {
- line: 1,
- col_utf16: 1
- }
- );
- assert_eq!(
- index.line_col(6.into()),
- LineCol {
- line: 1,
- col_utf16: 5
- }
- );
- assert_eq!(
- index.line_col(7.into()),
- LineCol {
- line: 2,
- col_utf16: 0
- }
- );
+ assert_eq!(index.line_col(0.into()), LineCol { line: 0, col_utf16: 0 });
+ assert_eq!(index.line_col(1.into()), LineCol { line: 1, col_utf16: 0 });
+ assert_eq!(index.line_col(2.into()), LineCol { line: 1, col_utf16: 1 });
+ assert_eq!(index.line_col(6.into()), LineCol { line: 1, col_utf16: 5 });
+ assert_eq!(index.line_col(7.into()), LineCol { line: 2, col_utf16: 0 });
}
fn arb_text_with_offset() -> BoxedStrategy<(TextUnit, String)> {
- arb_text()
- .prop_flat_map(|text| (arb_offset(&text), Just(text)))
- .boxed()
+ arb_text().prop_flat_map(|text| (arb_offset(&text), Just(text))).boxed()
}
fn to_line_col(text: &str, offset: TextUnit) -> LineCol {
- let mut res = LineCol {
- line: 0,
- col_utf16: 0,
- };
+ let mut res = LineCol { line: 0, col_utf16: 0 };
for (i, c) in text.char_indices() {
if i + c.len_utf8() > offset.to_usize() {
// if it's an invalid offset, inside a multibyte char
assert_eq!(col_index.utf16_lines.len(), 1);
assert_eq!(col_index.utf16_lines[&1].len(), 1);
- assert_eq!(
- col_index.utf16_lines[&1][0],
- Utf16Char {
- start: 17.into(),
- end: 20.into()
- }
- );
+ assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
// UTF-8 to UTF-16, no changes
assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
assert_eq!(col_index.utf16_lines.len(), 1);
assert_eq!(col_index.utf16_lines[&1].len(), 2);
- assert_eq!(
- col_index.utf16_lines[&1][0],
- Utf16Char {
- start: 17.into(),
- end: 20.into()
- }
- );
- assert_eq!(
- col_index.utf16_lines[&1][1],
- Utf16Char {
- start: 21.into(),
- end: 24.into()
- }
- );
+ assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
+ assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() });
// UTF-8 to UTF-16
assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
impl<'a> LineIndexStepIter<'a> {
fn from(line_index: &LineIndex) -> LineIndexStepIter {
- let mut x = LineIndexStepIter {
- line_index,
- next_newline_idx: 0,
- utf16_chars: None,
- };
+ let mut x = LineIndexStepIter { line_index, next_newline_idx: 0, utf16_chars: None };
// skip first newline since it's not real
x.next();
x
.as_mut()
.and_then(|(newline, x)| {
let x = x.next()?;
- Some(Step::Utf16Char(TextRange::from_to(
- *newline + x.start,
- *newline + x.end,
- )))
+ Some(Step::Utf16Char(TextRange::from_to(*newline + x.start, *newline + x.end)))
})
.or_else(|| {
let next_newline = *self.line_index.newlines.get(self.next_newline_idx)?;
impl<'a> Edits<'a> {
fn from_text_edit(text_edit: &'a TextEdit) -> Edits<'a> {
- let mut x = Edits {
- edits: text_edit.as_atoms(),
- current: None,
- acc_diff: 0,
- };
+ let mut x = Edits { edits: text_edit.as_atoms(), current: None, acc_diff: 0 };
x.advance_edit();
x
}
Some((next, rest)) => {
let delete = self.translate_range(next.delete);
let diff = next.insert.len() as i64 - next.delete.len().to_usize() as i64;
- self.current = Some(TranslatedEdit {
- delete,
- insert: &next.insert,
- diff,
- });
+ self.current = Some(TranslatedEdit { delete, insert: &next.insert, diff });
self.edits = rest;
}
None => {
fn next_inserted_steps(&mut self) -> Option<OffsetStepIter<'a>> {
let cur = self.current.as_ref()?;
- let res = Some(OffsetStepIter {
- offset: cur.delete.start(),
- text: &cur.insert,
- });
+ let res = Some(OffsetStepIter { offset: cur.delete.start(), text: &cur.insert });
self.advance_edit();
res
}
if step_pos <= edit.delete.start() {
NextSteps::Use
} else if step_pos <= edit.delete.end() {
- let iter = OffsetStepIter {
- offset: edit.delete.start(),
- text: &edit.insert,
- };
+ let iter = OffsetStepIter { offset: edit.delete.start(), text: &edit.insert };
// empty slice to avoid returning steps again
edit.insert = &edit.insert[edit.insert.len()..];
NextSteps::ReplaceMany(iter)
} else {
- let iter = OffsetStepIter {
- offset: edit.delete.start(),
- text: &edit.insert,
- };
+ let iter = OffsetStepIter { offset: edit.delete.start(), text: &edit.insert };
// empty slice to avoid returning steps again
edit.insert = &edit.insert[edit.insert.len()..];
self.advance_edit();
impl RunningLineCol {
fn new() -> RunningLineCol {
- RunningLineCol {
- line: 0,
- last_newline: TextUnit::from(0),
- col_adjust: TextUnit::from(0),
- }
+ RunningLineCol { line: 0, last_newline: TextUnit::from(0), col_adjust: TextUnit::from(0) }
}
fn to_line_col(&self, offset: TextUnit) -> LineCol {
let edited_text = x.edit.apply(&x.text);
let arb_offset = arb_offset(&edited_text);
(Just(x), Just(edited_text), arb_offset).prop_map(|(x, edited_text, offset)| {
- ArbTextWithEditAndOffset {
- text: x.text,
- edit: x.edit,
- edited_text,
- offset,
- }
+ ArbTextWithEditAndOffset { text: x.text, edit: x.edit, edited_text, offset }
})
})
.boxed()
node_range: node.syntax().range(),
kind: node.syntax().kind(),
detail,
- deprecated: node
- .attrs()
- .filter_map(|x| x.as_named())
- .any(|x| x == "deprecated"),
+ deprecated: node.attrs().filter_map(|x| x.as_named()).any(|x| x == "deprecated"),
})
}
let target_trait = im.target_trait();
let label = match target_trait {
None => format!("impl {}", target_type.syntax().text()),
- Some(t) => format!(
- "impl {} for {}",
- t.syntax().text(),
- target_type.syntax().text(),
- ),
+ Some(t) => {
+ format!("impl {} for {}", t.syntax().text(), target_type.syntax().text(),)
+ }
};
let node = StructureNode {
use crate::{LocalEdit, TextEditBuilder, formatting::leading_indent};
pub fn on_enter(file: &SourceFile, offset: TextUnit) -> Option<LocalEdit> {
- let comment = find_leaf_at_offset(file.syntax(), offset)
- .left_biased()
- .and_then(ast::Comment::cast)?;
+ let comment =
+ find_leaf_at_offset(file.syntax(), offset).left_biased().and_then(ast::Comment::cast)?;
if let ast::CommentFlavor::Multiline = comment.flavor() {
return None;
if expr_range.contains(eq_offset) && eq_offset != expr_range.start() {
return None;
}
- if file
- .syntax()
- .text()
- .slice(eq_offset..expr_range.start())
- .contains('\n')
- {
+ if file.syntax().text().slice(eq_offset..expr_range.start()).contains('\n') {
return None;
}
} else {
let current_indent_len = TextUnit::of_str(current_indent);
// Make sure dot is a part of call chain
- let field_expr = whitespace
- .syntax()
- .parent()
- .and_then(ast::FieldExpr::cast)?;
+ let field_expr = whitespace.syntax().parent().and_then(ast::FieldExpr::cast)?;
let prev_indent = leading_indent(field_expr.syntax())?;
let target_indent = format!(" {}", prev_indent);
let target_indent_len = TextUnit::of_str(&target_indent);
pub fn server_capabilities() -> ServerCapabilities {
ServerCapabilities {
- text_document_sync: Some(TextDocumentSyncCapability::Options(
- TextDocumentSyncOptions {
- open_close: Some(true),
- change: Some(TextDocumentSyncKind::Full),
- will_save: None,
- will_save_wait_until: None,
- save: None,
- },
- )),
+ text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
+ open_close: Some(true),
+ change: Some(TextDocumentSyncKind::Full),
+ will_save: None,
+ will_save_wait_until: None,
+ save: None,
+ })),
hover_provider: Some(true),
completion_provider: Some(CompletionOptions {
resolve_provider: None,
document_symbol_provider: Some(true),
workspace_symbol_provider: Some(true),
code_action_provider: Some(CodeActionProviderCapability::Simple(true)),
- code_lens_provider: Some(CodeLensOptions {
- resolve_provider: Some(true),
- }),
+ code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }),
document_formatting_provider: Some(true),
document_range_formatting_provider: None,
document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions {
None => return Ok(None),
};
let file_id = world.analysis().crate_root(crate_id)?;
- let path = world
- .vfs
- .read()
- .file2path(ra_vfs::VfsFile(file_id.0.into()));
+ let path = world.vfs.read().file2path(ra_vfs::VfsFile(file_id.0.into()));
let res = world.workspaces.iter().find_map(|ws| {
let tgt = ws.cargo.target_by_root(&path)?;
let res = CargoTargetSpec {
fn conv_with(mut self, ctx: &LineIndex) -> ::lsp_types::CompletionItem {
let atom_text_edit = AtomTextEdit::replace(self.source_range(), self.insert_text());
let text_edit = (&atom_text_edit).conv_with(ctx);
- let additional_text_edits = if let Some(edit) = self.take_text_edit() {
- Some(edit.conv_with(ctx))
- } else {
- None
- };
+ let additional_text_edits =
+ if let Some(edit) = self.take_text_edit() { Some(edit.conv_with(ctx)) } else { None };
let mut res = lsp_types::CompletionItem {
label: self.label().to_string(),
type Output = TextUnit;
fn conv_with(self, line_index: &LineIndex) -> TextUnit {
- let line_col = LineCol {
- line: self.line as u32,
- col_utf16: self.character as u32,
- };
+ let line_col = LineCol { line: self.line as u32, col_utf16: self.character as u32 };
line_index.offset(line_col)
}
}
type Output = Range;
fn conv_with(self, line_index: &LineIndex) -> Range {
- Range::new(
- self.start().conv_with(line_index),
- self.end().conv_with(line_index),
- )
+ Range::new(self.start().conv_with(line_index), self.end().conv_with(line_index))
}
}
type Output = TextRange;
fn conv_with(self, line_index: &LineIndex) -> TextRange {
- TextRange::from_to(
- self.start.conv_with(line_index),
- self.end.conv_with(line_index),
- )
+ TextRange::from_to(self.start.conv_with(line_index), self.end.conv_with(line_index))
}
}
changes: None,
document_changes: Some(DocumentChanges::Operations(document_changes)),
};
- Ok(req::SourceChange {
- label: self.label,
- workspace_edit,
- cursor_position,
- })
+ Ok(req::SourceChange { label: self.label, workspace_edit, cursor_position })
}
}
version: None,
};
let line_index = world.analysis().file_line_index(self.file_id);
- let edits = self
- .edit
- .as_atoms()
- .iter()
- .map_conv_with(&line_index)
- .collect();
- Ok(TextDocumentEdit {
- text_document,
- edits,
- })
+ let edits = self.edit.as_atoms().iter().map_conv_with(&line_index).collect();
+ Ok(TextDocumentEdit { text_document, edits })
}
}
let uri = world.path_to_uri(source_root, &path)?;
ResourceOp::Create(CreateFile { uri, options: None })
}
- FileSystemEdit::MoveFile {
- src,
- dst_source_root,
- dst_path,
- } => {
+ FileSystemEdit::MoveFile { src, dst_source_root, dst_path } => {
let old_uri = world.file_id_to_uri(src)?;
let new_uri = world.path_to_uri(dst_source_root, &dst_path)?;
- ResourceOp::Rename(RenameFile {
- old_uri,
- new_uri,
- options: None,
- })
+ ResourceOp::Rename(RenameFile { old_uri, new_uri, options: None })
}
};
Ok(res)
let target_range = target.info.full_range().conv_with(&tgt_line_index);
- let target_selection_range = target
- .info
- .focus_range()
- .map(|it| it.conv_with(&tgt_line_index))
- .unwrap_or(target_range);
+ let target_selection_range =
+ target.info.focus_range().map(|it| it.conv_with(&tgt_line_index)).unwrap_or(target_range);
let res = LocationLink {
origin_selection_range: Some(target.range.conv_with(line_index)),
fn main_inner() -> Result<()> {
let (receiver, sender, threads) = stdio_transport();
let cwd = ::std::env::current_dir()?;
- run_server(
- ra_lsp_server::server_capabilities(),
- receiver,
- sender,
- |params, r, s| {
- let root = params
- .root_uri
- .and_then(|it| it.to_file_path().ok())
- .unwrap_or(cwd);
- let supports_decorations = params
- .initialization_options
- .and_then(|v| InitializationOptions::deserialize(v).ok())
- .and_then(|it| it.publish_decorations)
- == Some(true);
- ra_lsp_server::main_loop(false, root, supports_decorations, r, s)
- },
- )?;
+ run_server(ra_lsp_server::server_capabilities(), receiver, sender, |params, r, s| {
+ let root = params.root_uri.and_then(|it| it.to_file_path().ok()).unwrap_or(cwd);
+ let supports_decorations = params
+ .initialization_options
+ .and_then(|v| InitializationOptions::deserialize(v).ok())
+ .and_then(|it| it.publish_decorations)
+ == Some(true);
+ ra_lsp_server::main_loop(false, root, supports_decorations, r, s)
+ })?;
log::info!("shutting down IO...");
threads.join()?;
log::info!("... IO is down");
};
#[derive(Debug, Fail)]
-#[fail(
- display = "Language Server request failed with {}. ({})",
- code, message
-)]
+#[fail(display = "Language Server request failed with {}. ({})", code, message)]
pub struct LspError {
pub code: i32,
pub message: String,
}
};
ws_worker.shutdown();
- ws_watcher
- .shutdown()
- .map_err(|_| format_err!("ws watcher died"))?;
+ ws_watcher.shutdown().map_err(|_| format_err!("ws watcher died"))?;
let mut state = ServerWorldState::new(ws_root.clone(), workspaces);
log::info!("server initialized, serving requests");
);
log::info!("waiting for tasks to finish...");
- task_receiver
- .into_iter()
- .for_each(|task| on_task(task, msg_sender, &mut pending_requests));
+ task_receiver.into_iter().for_each(|task| on_task(task, msg_sender, &mut pending_requests));
log::info!("...tasks have finished");
log::info!("joining threadpool...");
drop(pool);
impl fmt::Debug for Event {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let debug_verbose_not = |not: &RawNotification, f: &mut fmt::Formatter| {
- f.debug_struct("RawNotification")
- .field("method", ¬.method)
- .finish()
+ f.debug_struct("RawNotification").field("method", ¬.method).finish()
};
match self {
sender: &Sender<Task>,
req: RawRequest,
) -> Result<Option<RawRequest>> {
- let mut pool_dispatcher = PoolDispatcher {
- req: Some(req),
- res: None,
- pool,
- world,
- sender,
- };
+ let mut pool_dispatcher = PoolDispatcher { req: Some(req), res: None, pool, world, sender };
let req = pool_dispatcher
.on::<req::AnalyzerStatus>(handlers::handle_analyzer_status)?
.on::<req::SyntaxTree>(handlers::handle_syntax_tree)?
let not = match not.cast::<req::DidOpenTextDocument>() {
Ok(params) => {
let uri = params.text_document.uri;
- let path = uri
- .to_file_path()
- .map_err(|()| format_err!("invalid uri: {}", uri))?;
- if let Some(file_id) = state
- .vfs
- .write()
- .add_file_overlay(&path, params.text_document.text)
+ let path = uri.to_file_path().map_err(|()| format_err!("invalid uri: {}", uri))?;
+ if let Some(file_id) =
+ state.vfs.write().add_file_overlay(&path, params.text_document.text)
{
subs.add_sub(FileId(file_id.0.into()));
}
let not = match not.cast::<req::DidChangeTextDocument>() {
Ok(mut params) => {
let uri = params.text_document.uri;
- let path = uri
- .to_file_path()
- .map_err(|()| format_err!("invalid uri: {}", uri))?;
- let text = params
- .content_changes
- .pop()
- .ok_or_else(|| format_err!("empty changes"))?
- .text;
+ let path = uri.to_file_path().map_err(|()| format_err!("invalid uri: {}", uri))?;
+ let text =
+ params.content_changes.pop().ok_or_else(|| format_err!("empty changes"))?.text;
state.vfs.write().change_file_overlay(path.as_path(), text);
return Ok(());
}
let not = match not.cast::<req::DidCloseTextDocument>() {
Ok(params) => {
let uri = params.text_document.uri;
- let path = uri
- .to_file_path()
- .map_err(|()| format_err!("invalid uri: {}", uri))?;
+ let path = uri.to_file_path().map_err(|()| format_err!("invalid uri: {}", uri))?;
if let Some(file_id) = state.vfs.write().remove_file_overlay(path.as_path()) {
subs.remove_sub(FileId(file_id.0.into()));
}
- let params = req::PublishDiagnosticsParams {
- uri,
- diagnostics: Vec::new(),
- };
+ let params = req::PublishDiagnosticsParams { uri, diagnostics: Vec::new() };
let not = RawNotification::new::<req::PublishDiagnostics>(¶ms);
msg_sender.send(RawMessage::Notification(not)).unwrap();
return Ok(());
.into_iter()
.map_conv_with(&line_index)
.map(|range| FileRange { file_id, range })
- .map(|frange| {
- world
- .analysis()
- .extend_selection(frange)
- .map(|it| it.conv_with(&line_index))
- })
+ .map(|frange| world.analysis().extend_selection(frange).map(|it| it.conv_with(&line_index)))
.collect::<Cancelable<Vec<_>>>()?;
Ok(req::ExtendSelectionResult { selections })
}
.into_iter()
.map_conv_with(&line_index)
.map(|offset| {
- world
- .analysis()
- .matching_brace(FilePosition { file_id, offset })
- .unwrap_or(offset)
+ world.analysis().matching_brace(FilePosition { file_id, offset }).unwrap_or(offset)
})
.map_conv_with(&line_index)
.collect();
let all_symbols = params.query.contains('#');
let libs = params.query.contains('*');
let query = {
- let query: String = params
- .query
- .chars()
- .filter(|&c| c != '#' && c != '*')
- .collect();
+ let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect();
let mut q = Query::new(query);
if !all_symbols {
q.only_types();
Some(items) => items,
};
let line_index = world.analysis().file_line_index(position.file_id);
- let items = items
- .into_iter()
- .map(|item| item.conv_with(&line_index))
- .collect();
+ let items = items.into_iter().map(|item| item.conv_with(&line_index)).collect();
Ok(Some(req::CompletionResponse::Array(items)))
}
.into());
}
- let optional_change = world
- .analysis()
- .rename(FilePosition { file_id, offset }, &*params.new_name)?;
+ let optional_change =
+ world.analysis().rename(FilePosition { file_id, offset }, &*params.new_name)?;
let change = match optional_change {
None => return Ok(None),
Some(it) => it,
let line_index = world.analysis().file_line_index(file_id);
let offset = params.position.conv_with(&line_index);
- let refs = world
- .analysis()
- .find_all_refs(FilePosition { file_id, offset })?;
+ let refs = world.analysis().find_all_refs(FilePosition { file_id, offset })?;
Ok(Some(
- refs.into_iter()
- .filter_map(|r| to_location(r.0, r.1, &world, &line_index).ok())
- .collect(),
+ refs.into_iter().filter_map(|r| to_location(r.0, r.1, &world, &line_index).ok()).collect(),
))
}
use std::process;
let mut rustfmt = process::Command::new("rustfmt");
- rustfmt
- .stdin(process::Stdio::piped())
- .stdout(process::Stdio::piped());
+ rustfmt.stdin(process::Stdio::piped()).stdout(process::Stdio::piped());
if let Ok(path) = params.text_document.uri.to_file_path() {
if let Some(parent) = path.parent() {
let line_index = world.analysis().file_line_index(file_id);
let range = params.range.conv_with(&line_index);
- let assists = world
- .analysis()
- .assists(FileRange { file_id, range })?
- .into_iter();
+ let assists = world.analysis().assists(FileRange { file_id, range })?.into_iter();
let fixes = world
.analysis()
.diagnostics(file_id)?
to_value(locations).unwrap(),
]),
};
- Ok(CodeLens {
- range: code_lens.range,
- command: Some(cmd),
- data: None,
- })
+ Ok(CodeLens { range: code_lens.range, command: Some(cmd), data: None })
}
None => Ok(CodeLens {
range: code_lens.range,
- command: Some(Command {
- title: "Error".into(),
- ..Default::default()
- }),
+ command: Some(Command { title: "Error".into(), ..Default::default() }),
data: None,
}),
}
let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id);
- let refs = world
- .analysis()
- .find_all_refs(params.try_conv_with(&world)?)?;
+ let refs = world.analysis().find_all_refs(params.try_conv_with(&world)?)?;
Ok(Some(
refs.into_iter()
- .map(|r| DocumentHighlight {
- range: r.1.conv_with(&line_index),
- kind: None,
- })
+ .map(|r| DocumentHighlight { range: r.1.conv_with(&line_index), kind: None })
.collect(),
))
}
file_id: FileId,
) -> Result<req::PublishDecorationsParams> {
let uri = world.file_id_to_uri(file_id)?;
- Ok(req::PublishDecorationsParams {
- uri,
- decorations: highlight(&world, file_id)?,
- })
+ Ok(req::PublishDecorationsParams { uri, decorations: highlight(&world, file_id)? })
}
fn highlight(world: &ServerWorld, file_id: FileId) -> Result<Vec<Decoration>> {
.analysis()
.highlight(file_id)?
.into_iter()
- .map(|h| Decoration {
- range: h.range.conv_with(&line_index),
- tag: h.tag,
- })
+ .map(|h| Decoration { range: h.range.conv_with(&line_index), tag: h.tag })
.collect();
Ok(res)
}
impl Subscriptions {
pub fn new() -> Subscriptions {
- Subscriptions {
- subs: FxHashSet::default(),
- }
+ Subscriptions { subs: FxHashSet::default() }
}
pub fn add_sub(&mut self, file_id: FileId) {
self.subs.insert(file_id);
impl CargoWorkspace {
pub fn from_cargo_metadata(cargo_toml: &Path) -> Result<CargoWorkspace> {
let mut meta = MetadataCommand::new();
- meta.manifest_path(cargo_toml)
- .features(CargoOpt::AllFeatures);
+ meta.manifest_path(cargo_toml).features(CargoOpt::AllFeatures);
if let Some(parent) = cargo_toml.parent() {
meta.current_dir(parent);
}
- let meta = meta
- .exec()
- .map_err(|e| format_err!("cargo metadata failed: {}", e))?;
+ let meta = meta.exec().map_err(|e| format_err!("cargo metadata failed: {}", e))?;
let mut pkg_by_id = FxHashMap::default();
let mut packages = Arena::default();
let mut targets = Arena::default();
for node in resolve.nodes {
let source = pkg_by_id[&node.id];
for dep_node in node.deps {
- let dep = PackageDependency {
- name: dep_node.name.into(),
- pkg: pkg_by_id[&dep_node.pkg],
- };
+ let dep =
+ PackageDependency { name: dep_node.name.into(), pkg: pkg_by_id[&dep_node.pkg] };
packages[source].dependencies.push(dep);
}
}
self.packages.iter().map(|(id, _pkg)| id)
}
pub fn target_by_root(&self, root: &Path) -> Option<Target> {
- self.packages()
- .filter_map(|pkg| pkg.targets(self).find(|it| it.root(self) == root))
- .next()
+ self.packages().filter_map(|pkg| pkg.targets(self).find(|it| it.root(self) == root)).next()
}
}
);
}
- let mut sysroot = Sysroot {
- crates: Arena::default(),
- };
+ let mut sysroot = Sysroot { crates: Arena::default() };
for name in SYSROOT_CRATES.trim().lines() {
let root = src.join(format!("lib{}", name)).join("lib.rs");
if root.exists() {
}
fn by_name(&self, name: &str) -> Option<SysrootCrate> {
- self.crates
- .iter()
- .find(|(_id, data)| data.name == name)
- .map(|(id, _data)| id)
+ self.crates.iter().find(|(_id, data)| data.name == name).map(|(id, _data)| id)
}
}
}
}
- let libstd = ws
- .sysroot
- .std()
- .and_then(|it| sysroot_crates.get(&it).map(|&it| it));
+ let libstd = ws.sysroot.std().and_then(|it| sysroot_crates.get(&it).map(|&it| it));
let mut pkg_to_lib_crate = FxHashMap::default();
let mut pkg_crates = FxHashMap::default();
lib_tgt = Some(crate_id);
pkg_to_lib_crate.insert(pkg, crate_id);
}
- pkg_crates
- .entry(pkg)
- .or_insert_with(Vec::new)
- .push(crate_id);
+ pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id);
}
}
libs.push((SourceRootId(root.0.into()), files));
}
}
- VfsChange::AddFile {
- root,
- file,
- path,
- text,
- } => {
- change.add_file(
- SourceRootId(root.0.into()),
- FileId(file.0.into()),
- path,
- text,
- );
+ VfsChange::AddFile { root, file, path, text } => {
+ change.add_file(SourceRootId(root.0.into()), FileId(file.0.into()), path, text);
}
VfsChange::RemoveFile { root, file, path } => {
change.remove_file(SourceRootId(root.0.into()), FileId(file.0.into()), path)
}
pub fn uri_to_file_id(&self, uri: &Url) -> Result<FileId> {
- let path = uri
- .to_file_path()
- .map_err(|()| format_err!("invalid uri: {}", uri))?;
+ let path = uri.to_file_path().map_err(|()| format_err!("invalid uri: {}", uri))?;
let file = self
.vfs
.read()
);
server.wait_for_feedback("workspace loaded");
server.request::<Runnables>(
- RunnablesParams {
- text_document: server.doc_id("lib.rs"),
- position: None,
- },
+ RunnablesParams { text_document: server.doc_id("lib.rs"), position: None },
json!([
{
"args": [ "test", "--", "foo", "--nocapture" ],
"#,
);
server.wait_for_feedback("workspace loaded");
- let empty_context = || CodeActionContext {
- diagnostics: Vec::new(),
- only: None,
- };
+ let empty_context = || CodeActionContext { diagnostics: Vec::new(), only: None };
server.request::<CodeActionRequest>(
CodeActionParams {
text_document: server.doc_id("src/lib.rs"),
pub fn doc_id(&self, rel_path: &str) -> TextDocumentIdentifier {
let path = self.dir.path().join(rel_path);
- TextDocumentIdentifier {
- uri: Url::from_file_path(path).unwrap(),
- }
+ TextDocumentIdentifier { uri: Url::from_file_path(path).unwrap() }
}
pub fn request<R>(&self, params: R::Params, expected_resp: Value)
}
fn send_request_(&self, r: RawRequest) -> Value {
let id = r.id;
- self.worker
- .as_ref()
- .unwrap()
- .send(RawMessage::Request(r))
- .unwrap();
+ self.worker.as_ref().unwrap().send(RawMessage::Request(r)).unwrap();
while let Some(msg) = self.recv() {
match msg {
RawMessage::Request(req) => panic!("unexpected request: {:?}", req),
})
}
fn send_notification(&self, not: RawNotification) {
- self.worker
- .as_ref()
- .unwrap()
- .send(RawMessage::Notification(not))
- .unwrap();
+ self.worker.as_ref().unwrap().send(RawMessage::Notification(not)).unwrap();
}
}
"#;
let source_file = ast::SourceFile::parse(macro_definition);
- let macro_definition = source_file
- .syntax()
- .descendants()
- .find_map(ast::MacroCall::cast)
- .unwrap();
+ let macro_definition =
+ source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let source_file = ast::SourceFile::parse(macro_invocation);
- let macro_invocation = source_file
- .syntax()
- .descendants()
- .find_map(ast::MacroCall::cast)
- .unwrap();
+ let macro_invocation =
+ source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let definition_tt = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
let invocation_tt = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
fn create_rules(macro_definition: &str) -> MacroRules {
let source_file = ast::SourceFile::parse(macro_definition);
- let macro_definition = source_file
- .syntax()
- .descendants()
- .find_map(ast::MacroCall::cast)
- .unwrap();
+ let macro_definition =
+ source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let definition_tt = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
crate::MacroRules::parse(&definition_tt).unwrap()
fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) {
let source_file = ast::SourceFile::parse(invocation);
- let macro_invocation = source_file
- .syntax()
- .descendants()
- .find_map(ast::MacroCall::cast)
- .unwrap();
+ let macro_invocation =
+ source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let invocation_tt = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
}
_ => return None,
},
- crate::TokenTree::Repeat(crate::Repeat {
- subtree,
- kind: _,
- separator,
- }) => {
+ crate::TokenTree::Repeat(crate::Repeat { subtree, kind: _, separator }) => {
while let Some(nested) = match_lhs(subtree, input) {
res.push_nested(nested)?;
if let Some(separator) = *separator {
.map(|it| expand_tt(it, bindings, nesting))
.collect::<Option<Vec<_>>>()?;
- Some(tt::Subtree {
- token_trees,
- delimiter: template.delimiter,
- })
+ Some(tt::Subtree { token_trees, delimiter: template.delimiter })
}
fn expand_tt(
token_trees.push(t.into())
}
nesting.pop().unwrap();
- tt::Subtree {
- token_trees,
- delimiter: tt::Delimiter::None,
- }
- .into()
+ tt::Subtree { token_trees, delimiter: tt::Delimiter::None }.into()
}
crate::TokenTree::Leaf(leaf) => match leaf {
- crate::Leaf::Ident(ident) => tt::Leaf::from(tt::Ident {
- text: ident.text.clone(),
- })
- .into(),
+ crate::Leaf::Ident(ident) => {
+ tt::Leaf::from(tt::Ident { text: ident.text.clone() }).into()
+ }
crate::Leaf::Punct(punct) => tt::Leaf::from(punct.clone()).into(),
crate::Leaf::Var(v) => bindings.get(&v.text, nesting)?.clone(),
- crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal {
- text: l.text.clone(),
- })
- .into(),
+ crate::Leaf::Literal(l) => tt::Leaf::from(tt::Literal { text: l.text.clone() }).into(),
},
};
Some(res)
};
token_trees.push(child);
}
- Some(crate::Subtree {
- token_trees,
- delimiter: tt.delimiter,
- })
+ Some(crate::Subtree { token_trees, delimiter: tt.delimiter })
}
fn parse_var(p: &mut TtCursor) -> Option<crate::Var> {
_ => return None,
};
p.bump();
- Some(crate::Repeat {
- subtree,
- kind,
- separator,
- })
+ Some(crate::Repeat { subtree, kind, separator })
}
for char in child.leaf_text().unwrap().chars() {
if let Some(char) = prev {
token_trees.push(
- tt::Leaf::from(tt::Punct {
- char,
- spacing: tt::Spacing::Joint,
- })
- .into(),
+ tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Joint }).into(),
);
}
prev = Some(char)
}
if let Some(char) = prev {
- token_trees.push(
- tt::Leaf::from(tt::Punct {
- char,
- spacing: tt::Spacing::Alone,
- })
- .into(),
- );
+ token_trees
+ .push(tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Alone }).into());
}
} else {
let child: tt::TokenTree = if child.kind() == TOKEN_TREE {
let text = child.leaf_text().unwrap().clone();
tt::Leaf::from(tt::Ident { text }).into()
} else if child.kind().is_literal() {
- tt::Leaf::from(tt::Literal {
- text: child.leaf_text().unwrap().clone(),
- })
- .into()
+ tt::Leaf::from(tt::Literal { text: child.leaf_text().unwrap().clone() }).into()
} else {
return None;
};
}
}
- let res = tt::Subtree {
- delimiter,
- token_trees,
- };
+ let res = tt::Subtree { delimiter, token_trees };
Some(res)
}
}
pub fn visitor_ctx<'a, T, C>(ctx: C) -> impl VisitorCtx<'a, Output = T, Ctx = C> {
- EmptyVisitorCtx {
- ph: PhantomData,
- ctx,
- }
+ EmptyVisitorCtx { ph: PhantomData, ctx }
}
pub trait Visitor<'a>: Sized {
N: AstNode + 'a,
F: FnOnce(&'a N) -> Self::Output,
{
- Vis {
- inner: self,
- f,
- ph: PhantomData,
- }
+ Vis { inner: self, f, ph: PhantomData }
}
}
N: AstNode + 'a,
F: FnOnce(&'a N, Self::Ctx) -> Self::Output,
{
- VisCtx {
- inner: self,
- f,
- ph: PhantomData,
- }
+ VisCtx { inner: self, f, ph: PhantomData }
}
}
let line = comment.text().as_str();
// Determine if the prefix or prefix + 1 char is stripped
- let pos = if line
- .chars()
- .nth(prefix_len)
- .map(|c| c.is_whitespace())
- .unwrap_or(false)
- {
- prefix_len + 1
- } else {
- prefix_len
- };
+ let pos =
+ if line.chars().nth(prefix_len).map(|c| c.is_whitespace()).unwrap_or(false) {
+ prefix_len + 1
+ } else {
+ prefix_len
+ };
line[pos..].to_owned()
})
impl PathSegment {
pub fn parent_path(&self) -> &Path {
- self.syntax()
- .parent()
- .and_then(Path::cast)
- .expect("segments are always nested in paths")
+ self.syntax().parent().and_then(Path::cast).expect("segments are always nested in paths")
}
pub fn kind(&self) -> Option<PathSegmentKind> {
impl<'a, N> AstChildren<'a, N> {
fn new(parent: &'a SyntaxNode) -> Self {
- AstChildren {
- inner: parent.children(),
- ph: PhantomData,
- }
+ AstChildren { inner: parent.children(), ph: PhantomData }
}
}
let borrowed = self.syntax().children().any(|n| n.kind() == AMP);
if borrowed {
// check for a `mut` coming after the & -- `mut &self` != `&mut self`
- if self
- .syntax()
- .children()
- .skip_while(|n| n.kind() != AMP)
- .any(|n| n.kind() == MUT_KW)
+ if self.syntax().children().skip_while(|n| n.kind() != AMP).any(|n| n.kind() == MUT_KW)
{
SelfParamFlavor::MutRef
} else {
"#,
);
let module = file.syntax().descendants().find_map(Module::cast).unwrap();
- assert_eq!(
- "doc1\n```\nfn foo() {\n // ...\n}\n```",
- module.doc_comment_text().unwrap()
- );
+ assert_eq!("doc1\n```\nfn foo() {\n // ...\n}\n```", module.doc_comment_text().unwrap());
}
const EXPR_FIRST: TokenSet = LHS_FIRST;
pub(super) fn expr(p: &mut Parser) -> BlockLike {
- let r = Restrictions {
- forbid_structs: false,
- prefer_stmt: false,
- };
+ let r = Restrictions { forbid_structs: false, prefer_stmt: false };
expr_bp(p, r, 1)
}
pub(super) fn expr_stmt(p: &mut Parser) -> BlockLike {
- let r = Restrictions {
- forbid_structs: false,
- prefer_stmt: true,
- };
+ let r = Restrictions { forbid_structs: false, prefer_stmt: true };
expr_bp(p, r, 1)
}
fn expr_no_struct(p: &mut Parser) {
- let r = Restrictions {
- forbid_structs: true,
- prefer_stmt: false,
- };
+ let r = Restrictions { forbid_structs: true, prefer_stmt: false };
expr_bp(p, r, 1);
}
}
}
p.expect(R_PAREN);
- m.complete(
- p,
- if saw_expr && !saw_comma {
- PAREN_EXPR
- } else {
- TUPLE_EXPR
- },
- )
+ m.complete(p, if saw_expr && !saw_comma { PAREN_EXPR } else { TUPLE_EXPR })
}
// test array_expr
IMPL_BLOCK
}
_ => {
- return if has_mods {
- MaybeItem::Modifiers
- } else {
- MaybeItem::None
- };
+ return if has_mods { MaybeItem::Modifiers } else { MaybeItem::None };
}
};
}
fn list_(p: &mut Parser, flavor: Flavor) {
- let (bra, ket) = if flavor.type_required() {
- (L_PAREN, R_PAREN)
- } else {
- (PIPE, PIPE)
- };
+ let (bra, ket) = if flavor.type_required() { (L_PAREN, R_PAREN) } else { (PIPE, PIPE) };
assert!(p.at(bra));
let m = p.start();
p.bump();
pub(super) const PATTERN_FIRST: TokenSet = expressions::LITERAL_FIRST
.union(paths::PATH_FIRST)
- .union(token_set![
- REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE
- ]);
+ .union(token_set![REF_KW, MUT_KW, L_PAREN, L_BRACK, AMP, UNDERSCORE]);
pub(super) fn pattern(p: &mut Parser) {
pattern_r(p, PAT_RECOVERY_SET)
impl<'s> Ptr<'s> {
/// Creates a new `Ptr` from a string.
pub fn new(text: &'s str) -> Ptr<'s> {
- Ptr {
- text,
- len: 0.into(),
- }
+ Ptr { text, len: 0.into() }
}
/// Gets the length of the remaining string.
//! [rfc#2256]: <https://github.com/rust-lang/rfcs/pull/2256>
//! [RFC.md]: <https://github.com/matklad/libsyntax2/blob/master/docs/RFC.md>
-#![forbid(
- missing_debug_implementations,
- unconditional_recursion,
- future_incompatible
-)]
+#![forbid(missing_debug_implementations, unconditional_recursion, future_incompatible)]
#![deny(bad_style, missing_docs)]
#![allow(missing_docs)]
//#![warn(unreachable_pub)] // rust-lang/rust#47816
}
pub fn reparse(&self, edit: &AtomTextEdit) -> TreeArc<SourceFile> {
- self.incremental_reparse(edit)
- .unwrap_or_else(|| self.full_reparse(edit))
+ self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
}
pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<TreeArc<SourceFile>> {
impl Marker {
fn new(pos: u32) -> Marker {
- Marker {
- pos,
- bomb: DropBomb::new("Marker must be either completed or abandoned"),
- }
+ Marker { pos, bomb: DropBomb::new("Marker must be either completed or abandoned") }
}
/// Finishes the syntax tree node and assigns `kind` to it,
parser(&mut parser_api);
parser_api.0.into_events()
};
- EventProcessor::new(sink, text, tokens, &mut events)
- .process()
- .finish()
+ EventProcessor::new(sink, text, tokens, &mut events).process().finish()
}
/// Implementation details of `Parser`, extracted
/// Append one Error event to the back of events.
pub(super) fn error(&mut self, msg: String) {
- self.push_event(Event::Error {
- msg: ParseError(msg),
- })
+ self.push_event(Event::Error { msg: ParseError(msg) })
}
/// Complete an event with appending a `Finish` event.
pub(super) fn complete(&mut self, pos: u32, kind: SyntaxKind) {
match self.events[pos as usize] {
- Event::Start {
- kind: ref mut slot, ..
- } => {
+ Event::Start { kind: ref mut slot, .. } => {
*slot = kind;
}
_ => unreachable!(),
let idx = pos as usize;
if idx == self.events.len() - 1 {
match self.events.pop() {
- Some(Event::Start {
- kind: TOMBSTONE,
- forward_parent: None,
- }) => (),
+ Some(Event::Start { kind: TOMBSTONE, forward_parent: None }) => (),
_ => unreachable!(),
}
}
pub(super) fn precede(&mut self, pos: u32) -> u32 {
let new_pos = self.start();
match self.events[pos as usize] {
- Event::Start {
- ref mut forward_parent,
- ..
- } => {
+ Event::Start { ref mut forward_parent, .. } => {
*forward_parent = Some(new_pos - pos);
}
_ => unreachable!(),
impl Event {
pub(crate) fn tombstone() -> Self {
- Event::Start {
- kind: TOMBSTONE,
- forward_parent: None,
- }
+ Event::Start { kind: TOMBSTONE, forward_parent: None }
}
}
tokens: &'a [Token],
events: &'a mut [Event],
) -> EventProcessor<'a, S> {
- EventProcessor {
- sink,
- text_pos: 0.into(),
- text,
- token_pos: 0,
- tokens,
- events,
- }
+ EventProcessor { sink, text_pos: 0.into(), text, token_pos: 0, tokens, events }
}
/// Generate the syntax tree with the control of events.
for i in 0..self.events.len() {
match mem::replace(&mut self.events[i], Event::tombstone()) {
- Event::Start {
- kind: TOMBSTONE, ..
- } => (),
+ Event::Start { kind: TOMBSTONE, .. } => (),
- Event::Start {
- kind,
- forward_parent,
- } => {
+ Event::Start { kind, forward_parent } => {
// For events[A, B, C], B is A's forward_parent, C is B's forward_parent,
// in the normal control flow, the parent-child relation: `A -> B -> C`,
// while with the magic forward_parent, it writes: `C <- B <- A`.
idx += fwd as usize;
// append `A`'s forward_parent `B`
fp = match mem::replace(&mut self.events[idx], Event::tombstone()) {
- Event::Start {
- kind,
- forward_parent,
- } => {
+ Event::Start { kind, forward_parent } => {
forward_parents.push(kind);
forward_parent
}
.sum::<TextUnit>();
self.leaf(kind, len, n_raw_tokens);
}
- Event::Error { msg } => self.sink.error(SyntaxError::new(
- SyntaxErrorKind::ParseError(msg),
- self.text_pos,
- )),
+ Event::Error { msg } => self
+ .sink
+ .error(SyntaxError::new(SyntaxErrorKind::ParseError(msg), self.text_pos)),
}
}
self.sink
self.sink.start_branch(kind);
return;
}
- let n_trivias = self.tokens[self.token_pos..]
- .iter()
- .take_while(|it| it.kind.is_trivia())
- .count();
+ let n_trivias =
+ self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count();
let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias];
let mut trivia_end =
self.text_pos + leading_trivias.iter().map(|it| it.len).sum::<TextUnit>();
len += token.len;
}
- ParserInput {
- text,
- start_offsets,
- tokens,
- }
+ ParserInput { text, start_offsets, tokens }
}
/// Get the syntax kind of token at given input position.
impl SyntaxNodePtr {
pub fn new(node: &SyntaxNode) -> SyntaxNodePtr {
- SyntaxNodePtr {
- range: node.range(),
- kind: node.kind(),
- }
+ SyntaxNodePtr { range: node.range(), kind: node.kind() }
}
pub fn to_node(self, source_file: &SourceFile) -> &SyntaxNode {
generate(Some(source_file.syntax()), |&node| {
- node.children()
- .find(|it| self.range.is_subrange(&it.range()))
+ node.children().find(|it| self.range.is_subrange(&it.range()))
})
.find(|it| it.range() == self.range && it.kind() == self.kind)
.unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self))
impl<N: AstNode> AstPtr<N> {
pub fn new(node: &N) -> AstPtr<N> {
- AstPtr {
- raw: SyntaxNodePtr::new(node.syntax()),
- _ty: PhantomData,
- }
+ AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData }
}
pub fn to_node(self, source_file: &SourceFile) -> &N {
use crate::{ast, AstNode};
let file = SourceFile::parse("struct Foo { f: u32, }");
- let field = file
- .syntax()
- .descendants()
- .find_map(ast::NamedFieldDef::cast)
- .unwrap();
+ let field = file.syntax().descendants().find_map(ast::NamedFieldDef::cast).unwrap();
let ptr = SyntaxNodePtr::new(field.syntax());
let field_syntax = ptr.to_node(&file);
assert_eq!(field.syntax(), &*field_syntax);
type ParseFn = fn(&mut Parser);
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, ParseFn)> {
let node = algo::find_covering_node(node, range);
- return node
- .ancestors()
- .filter_map(|node| reparser(node).map(|r| (node, r)))
- .next();
+ return node.ancestors().filter_map(|node| reparser(node).map(|r| (node, r))).next();
fn reparser(node: &SyntaxNode) -> Option<ParseFn> {
let res = match node.kind() {
let fully_reparsed = SourceFile::parse(&after);
let incrementally_reparsed = {
let f = SourceFile::parse(&before);
- let edit = AtomTextEdit {
- delete: range,
- insert: replace_with.to_string(),
- };
+ let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() };
let (node, green, new_errors) =
reparser(f.syntax(), &edit).expect("cannot incrementally reparse");
let green_root = node.replace_with(green);
}
pub fn advance(&mut self) -> char {
- let next = self
- .peek()
- .expect("cannot advance if end of input is reached");
+ let next = self.peek().expect("cannot advance if end of input is reached");
self.pos += next.len_utf8();
next
}
Some(self.parse_escape(start))
} else {
let end = self.get_pos();
- Some(StringComponent::new(
- TextRange::from_to(start, end),
- CodePoint,
- ))
+ Some(StringComponent::new(TextRange::from_to(start, end), CodePoint))
}
}
fn closed_char_component(src: &str) -> StringComponent {
let (has_closing_quote, components) = parse(src);
assert!(has_closing_quote, "char should have closing quote");
- assert!(
- components.len() == 1,
- "Literal: {}\nComponents: {:#?}",
- src,
- components
- );
+ assert!(components.len() == 1, "Literal: {}\nComponents: {:#?}", src, components);
components[0].clone()
}
_ => {}
}
}
- errors.extend(
- node.attrs()
- .map(|attr| SyntaxError::new(InvalidBlockAttr, attr.syntax().range())),
- )
+ errors
+ .extend(node.attrs().map(|attr| SyntaxError::new(InvalidBlockAttr, attr.syntax().range())))
}
}
if let Some(range) = components.suffix {
- errors.push(SyntaxError::new(
- InvalidSuffix,
- range + literal_range.start(),
- ));
+ errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start()));
}
if len == 0 {
AsciiCodeEscape => validate_byte_code_escape(text, range, errors),
UnicodeEscape => errors.push(SyntaxError::new(UnicodeEscapeForbidden, range)),
CodePoint => {
- let c = text
- .chars()
- .next()
- .expect("Code points should be one character long");
+ let c = text.chars().next().expect("Code points should be one character long");
// These bytes must always be escaped
if c == '\t' || c == '\r' || c == '\n' {
} else if text.chars().count() < 4 {
errors.push(SyntaxError::new(TooShortByteCodeEscape, range));
} else {
- assert!(
- text.chars().count() == 4,
- "ByteCodeEscape cannot be longer than 4 chars"
- );
+ assert!(text.chars().count() == 4, "ByteCodeEscape cannot be longer than 4 chars");
if u8::from_str_radix(&text[2..], 16).is_err() {
errors.push(SyntaxError::new(MalformedByteCodeEscape, range));
fn assert_valid_byte(literal: &str) {
let file = build_file(literal);
- assert!(
- file.errors().len() == 0,
- "Errors for literal '{}': {:?}",
- literal,
- file.errors()
- );
+ assert!(file.errors().len() == 0, "Errors for literal '{}': {:?}", literal, file.errors());
}
fn assert_invalid_byte(literal: &str) {
#[test]
fn test_invalid_unicode_escape() {
- let well_formed = [
- r"\u{FF}",
- r"\u{0}",
- r"\u{F}",
- r"\u{10FFFF}",
- r"\u{1_0__FF___FF_____}",
- ];
+ let well_formed = [r"\u{FF}", r"\u{0}", r"\u{F}", r"\u{10FFFF}", r"\u{1_0__FF___FF_____}"];
for c in &well_formed {
assert_invalid_byte(c);
}
}
if let Some(range) = components.suffix {
- errors.push(SyntaxError::new(
- InvalidSuffix,
- range + literal_range.start(),
- ));
+ errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start()));
}
}
fn assert_valid_str(literal: &str) {
let file = build_file(literal);
- assert!(
- file.errors().len() == 0,
- "Errors for literal '{}': {:?}",
- literal,
- file.errors()
- );
+ assert!(file.errors().len() == 0, "Errors for literal '{}': {:?}", literal, file.errors());
}
fn assert_invalid_str(literal: &str) {
#[test]
fn test_invalid_unicode_escape() {
- let well_formed = [
- r"\u{FF}",
- r"\u{0}",
- r"\u{F}",
- r"\u{10FFFF}",
- r"\u{1_0__FF___FF_____}",
- ];
+ let well_formed = [r"\u{FF}", r"\u{0}", r"\u{F}", r"\u{10FFFF}", r"\u{1_0__FF___FF_____}"];
for c in &well_formed {
assert_invalid_str(c);
}
}
if let Some(range) = components.suffix {
- errors.push(SyntaxError::new(
- InvalidSuffix,
- range + literal_range.start(),
- ));
+ errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start()));
}
if len == 0 {
fn assert_valid_char(literal: &str) {
let file = build_file(literal);
- assert!(
- file.errors().len() == 0,
- "Errors for literal '{}': {:?}",
- literal,
- file.errors()
- );
+ assert!(file.errors().len() == 0, "Errors for literal '{}': {:?}", literal, file.errors());
}
fn assert_invalid_char(literal: &str) {
#[test]
fn test_valid_unicode_escape() {
- let valid = [
- r"\u{FF}",
- r"\u{0}",
- r"\u{F}",
- r"\u{10FFFF}",
- r"\u{1_0__FF___FF_____}",
- ];
+ let valid = [r"\u{FF}", r"\u{0}", r"\u{F}", r"\u{10FFFF}", r"\u{1_0__FF___FF_____}"];
for c in &valid {
assert_valid_char(c);
}
}
if let Some(range) = components.suffix {
- errors.push(SyntaxError::new(
- InvalidSuffix,
- range + literal_range.start(),
- ));
+ errors.push(SyntaxError::new(InvalidSuffix, range + literal_range.start()));
}
}
fn assert_valid_str(literal: &str) {
let file = build_file(literal);
- assert!(
- file.errors().len() == 0,
- "Errors for literal '{}': {:?}",
- literal,
- file.errors()
- );
+ assert!(file.errors().len() == 0, "Errors for literal '{}': {:?}", literal, file.errors());
}
fn assert_invalid_str(literal: &str) {
#[test]
fn test_valid_unicode_escape() {
- let valid = [
- r"\u{FF}",
- r"\u{0}",
- r"\u{F}",
- r"\u{10FFFF}",
- r"\u{1_0__FF___FF_____}",
- ];
+ let valid = [r"\u{FF}", r"\u{0}", r"\u{F}", r"\u{10FFFF}", r"\u{1_0__FF___FF_____}"];
for c in &valid {
assert_valid_str(c);
}
impl GreenBuilder {
pub(crate) fn new() -> GreenBuilder {
- GreenBuilder {
- errors: Vec::new(),
- inner: GreenNodeBuilder::new(),
- }
+ GreenBuilder { errors: Vec::new(), inner: GreenNodeBuilder::new() }
}
}
impl SyntaxError {
pub fn new<L: Into<Location>>(kind: SyntaxErrorKind, loc: L) -> SyntaxError {
- SyntaxError {
- kind,
- location: loc.into(),
- }
+ SyntaxError { kind, location: loc.into() }
}
pub fn kind(&self) -> SyntaxErrorKind {
InvalidByteEscape => write!(f, "Invalid escape sequence"),
TooShortByteCodeEscape => write!(f, "Escape sequence should have two digits"),
MalformedByteCodeEscape => write!(f, "Escape sequence should be a hexadecimal number"),
- UnicodeEscapeForbidden => write!(
- f,
- "Unicode escapes are not allowed in byte literals or byte strings"
- ),
+ UnicodeEscapeForbidden => {
+ write!(f, "Unicode escapes are not allowed in byte literals or byte strings")
+ }
TooShortAsciiCodeEscape => write!(f, "Escape sequence should have two digits"),
AsciiCodeEscapeOutOfRange => {
write!(f, "Escape sequence should be between \\x00 and \\x7F")
impl<'a> SyntaxText<'a> {
pub(crate) fn new(node: &'a SyntaxNode) -> SyntaxText<'a> {
- SyntaxText {
- node,
- range: node.range(),
- }
+ SyntaxText { node, range: node.range() }
}
pub fn chunks(&self) -> impl Iterator<Item = &'a str> {
let range = range.restrict(self.range).unwrap_or_else(|| {
panic!("invalid slice, range: {:?}, slice: {:?}", self.range, range)
});
- SyntaxText {
- node: self.node,
- range,
- }
+ SyntaxText { node: self.node, range }
}
pub fn char_at(&self, offset: impl Into<TextUnit>) -> Option<char> {
#[test]
fn parser_tests() {
- dir_tests(
- &test_data_dir(),
- &["parser/inline/ok", "parser/ok"],
- |text, path| {
- let file = SourceFile::parse(text);
- let errors = file.errors();
- assert_eq!(
- &*errors,
- &[] as &[ra_syntax::SyntaxError],
- "There should be no errors in the file {:?}",
- path.display()
- );
- dump_tree(file.syntax())
- },
- );
- dir_tests(
- &test_data_dir(),
- &["parser/err", "parser/inline/err"],
- |text, path| {
- let file = SourceFile::parse(text);
- let errors = file.errors();
- assert_ne!(
- &*errors,
- &[] as &[ra_syntax::SyntaxError],
- "There should be errors in the file {:?}",
- path.display()
- );
- dump_tree(file.syntax())
- },
- );
+ dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| {
+ let file = SourceFile::parse(text);
+ let errors = file.errors();
+ assert_eq!(
+ &*errors,
+ &[] as &[ra_syntax::SyntaxError],
+ "There should be no errors in the file {:?}",
+ path.display()
+ );
+ dump_tree(file.syntax())
+ });
+ dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| {
+ let file = SourceFile::parse(text);
+ let errors = file.errors();
+ assert_ne!(
+ &*errors,
+ &[] as &[ra_syntax::SyntaxError],
+ "There should be errors in the file {:?}",
+ path.display()
+ );
+ dump_tree(file.syntax())
+ });
}
#[test]
let text = read_text(entry.path());
let node = SourceFile::parse(&text);
let errors = node.errors();
- assert_eq!(
- &*errors,
- &[],
- "There should be no errors in the file {:?}",
- entry
- );
+ assert_eq!(&*errors, &[], "There should be no errors in the file {:?}", entry);
}
assert!(
count > 30,
impl AtomTextEdit {
pub fn replace(range: TextRange, replace_with: String) -> AtomTextEdit {
- AtomTextEdit {
- delete: range,
- insert: replace_with,
- }
+ AtomTextEdit { delete: range, insert: replace_with }
}
pub fn delete(range: TextRange) -> AtomTextEdit {
}
fn text_offsets(text: &str) -> Vec<TextUnit> {
- text.char_indices()
- .map(|(i, _)| TextUnit::from_usize(i))
- .collect()
+ text.char_indices().map(|(i, _)| TextUnit::from_usize(i)).collect()
}
pub fn arb_offset(text: &str) -> BoxedStrategy<TextUnit> {
)
.boxed()
}
- &[x] => arb_text()
- .prop_map(move |text| AtomTextEdit::insert(x, text))
- .boxed(),
+ &[x] => arb_text().prop_map(move |text| AtomTextEdit::insert(x, text)).boxed(),
_ => unreachable!(),
})
.collect();
TextEdit::from_atoms(self.atoms)
}
pub fn invalidates_offset(&self, offset: TextUnit) -> bool {
- self.atoms
- .iter()
- .any(|atom| atom.delete.contains_inclusive(offset))
+ self.atoms.iter().any(|atom| atom.delete.contains_inclusive(offset))
}
}
use crate::{RootConfig, Roots, VfsRoot};
pub(crate) enum Task {
- AddRoot {
- root: VfsRoot,
- config: Arc<RootConfig>,
- },
+ AddRoot { root: VfsRoot, config: Arc<RootConfig> },
}
#[derive(Debug)]
pub enum TaskResult {
- BulkLoadRoot {
- root: VfsRoot,
- files: Vec<(RelativePathBuf, String)>,
- },
- AddSingleFile {
- root: VfsRoot,
- path: RelativePathBuf,
- text: String,
- },
- ChangeSingleFile {
- root: VfsRoot,
- path: RelativePathBuf,
- text: String,
- },
- RemoveSingleFile {
- root: VfsRoot,
- path: RelativePathBuf,
- },
+ BulkLoadRoot { root: VfsRoot, files: Vec<(RelativePathBuf, String)> },
+ AddSingleFile { root: VfsRoot, path: RelativePathBuf, text: String },
+ ChangeSingleFile { root: VfsRoot, path: RelativePathBuf, text: String },
+ RemoveSingleFile { root: VfsRoot, path: RelativePathBuf },
}
#[derive(Debug)]
},
);
- Worker {
- worker,
- worker_handle,
- }
+ Worker { worker, worker_handle }
}
pub(crate) fn sender(&self) -> &Sender<Task> {
Some((path, text))
})
.collect();
- sender
- .send(TaskResult::BulkLoadRoot { root, files })
- .unwrap();
+ sender.send(TaskResult::BulkLoadRoot { root, files }).unwrap();
log::debug!("... loaded {}", config.root.as_path().display());
}
}
ChangeKind::Write => {
if let Some(text) = read_to_string(&path) {
- sender
- .send(TaskResult::ChangeSingleFile {
- root,
- path: rel_path,
- text,
- })
- .unwrap();
+ sender.send(TaskResult::ChangeSingleFile { root, path: rel_path, text }).unwrap();
}
}
- ChangeKind::Remove => sender
- .send(TaskResult::RemoveSingleFile {
- root,
- path: rel_path,
- })
- .unwrap(),
+ ChangeKind::Remove => {
+ sender.send(TaskResult::RemoveSingleFile { root, path: rel_path }).unwrap()
+ }
}
}
}
fn read_to_string(path: &Path) -> Option<String> {
- fs::read_to_string(&path)
- .map_err(|e| log::warn!("failed to read file {}", e))
- .ok()
+ fs::read_to_string(&path).map_err(|e| log::warn!("failed to read file {}", e)).ok()
}
impl RootConfig {
fn new(root: PathBuf, excluded_dirs: Vec<PathBuf>) -> RootConfig {
- RootConfig {
- root,
- excluded_dirs,
- }
+ RootConfig { root, excluded_dirs }
}
/// Cheks if root contains a path and returns a root-relative path.
pub(crate) fn contains(&self, path: &Path) -> Option<RelativePathBuf> {
Roots { roots }
}
pub(crate) fn find(&self, path: &Path) -> Option<(VfsRoot, RelativePathBuf)> {
- self.roots
- .iter()
- .find_map(|(root, data)| data.contains(path).map(|it| (root, it)))
+ self.roots.iter().find_map(|(root, data)| data.contains(path).map(|it| (root, it)))
}
}
for (root, config) in roots.iter() {
root2files.insert(root, Default::default());
- worker
- .sender()
- .send(io::Task::AddRoot {
- root,
- config: Arc::clone(config),
- })
- .unwrap();
+ worker.sender().send(io::Task::AddRoot { root, config: Arc::clone(config) }).unwrap();
}
- let res = Vfs {
- roots,
- files: Arena::default(),
- root2files,
- worker,
- pending_changes: Vec::new(),
- };
+ let res =
+ Vfs { roots, files: Arena::default(), root2files, worker, pending_changes: Vec::new() };
let vfs_roots = res.roots.iter().map(|(id, _)| id).collect();
(res, vfs_roots)
}
let text = fs::read_to_string(path).unwrap_or_default();
let text = Arc::new(text);
let file = self.add_file(root, rel_path.clone(), Arc::clone(&text), false);
- let change = VfsChange::AddFile {
- file,
- text,
- root,
- path: rel_path,
- };
+ let change = VfsChange::AddFile { file, text, root, path: rel_path };
self.pending_changes.push(change);
Some(file)
};
cur_files.push((file, path, text));
}
- let change = VfsChange::AddRoot {
- root,
- files: cur_files,
- };
+ let change = VfsChange::AddRoot { root, files: cur_files };
self.pending_changes.push(change);
}
TaskResult::AddSingleFile { root, path, text } => {
) -> Option<VfsFile> {
let text = Arc::new(text);
let file = self.add_file(root, path.clone(), text.clone(), is_overlay);
- self.pending_changes.push(VfsChange::AddFile {
- file,
- root,
- path,
- text,
- });
+ self.pending_changes.push(VfsChange::AddFile { file, root, path, text });
Some(file)
}
}
let text = Arc::new(text);
self.change_file(file, text.clone(), is_overlay);
- self.pending_changes
- .push(VfsChange::ChangeFile { file, text });
+ self.pending_changes.push(VfsChange::ChangeFile { file, text });
}
fn do_remove_file(
return;
}
self.remove_file(file);
- self.pending_changes
- .push(VfsChange::RemoveFile { root, path, file });
+ self.pending_changes.push(VfsChange::RemoveFile { root, path, file });
}
pub fn add_file_overlay(&mut self, path: &Path, text: String) -> Option<VfsFile> {
text: Arc<String>,
is_overlayed: bool,
) -> VfsFile {
- let data = VfsFileData {
- root,
- path,
- text,
- is_overlayed,
- };
+ let data = VfsFileData { root, path, text, is_overlayed };
let file = self.files.alloc(data);
self.root2files.get_mut(root).unwrap().insert(file);
file
}
fn find_file(&self, root: VfsRoot, path: &RelativePath) -> Option<VfsFile> {
- self.root2files[root]
- .iter()
- .map(|&it| it)
- .find(|&file| self.files[file].path == path)
+ self.root2files[root].iter().map(|&it| it).find(|&file| self.files[file].path == path)
}
}
#[derive(Debug, Clone)]
pub enum VfsChange {
- AddRoot {
- root: VfsRoot,
- files: Vec<(VfsFile, RelativePathBuf, Arc<String>)>,
- },
- AddFile {
- root: VfsRoot,
- file: VfsFile,
- path: RelativePathBuf,
- text: Arc<String>,
- },
- RemoveFile {
- root: VfsRoot,
- file: VfsFile,
- path: RelativePathBuf,
- },
- ChangeFile {
- file: VfsFile,
- text: Arc<String>,
- },
+ AddRoot { root: VfsRoot, files: Vec<(VfsFile, RelativePathBuf, Arc<String>)> },
+ AddFile { root: VfsRoot, file: VfsFile, path: RelativePathBuf, text: Arc<String> },
+ RemoveFile { root: VfsRoot, file: VfsFile, path: RelativePathBuf },
+ ChangeFile { file: VfsFile, text: Arc<String> },
}
fn process_tasks(vfs: &mut Vfs, num_tasks: u32) {
for _ in 0..num_tasks {
- let task = vfs
- .task_receiver()
- .recv_timeout(Duration::from_secs(3))
- .unwrap();
+ let task = vfs.task_receiver().recv_timeout(Duration::from_secs(3)).unwrap();
log::debug!("{:?}", task);
vfs.handle_task(task);
}
fn test_vfs_works() -> std::io::Result<()> {
// Logger::with_str("vfs=debug,ra_vfs=debug").start().unwrap();
- let files = [
- ("a/foo.rs", "hello"),
- ("a/bar.rs", "world"),
- ("a/b/baz.rs", "nested hello"),
- ];
+ let files = [("a/foo.rs", "hello"), ("a/bar.rs", "world"), ("a/b/baz.rs", "nested hello")];
let dir = tempdir().unwrap();
for (path, text) in files.iter() {
})
.collect::<HashSet<_>>();
- let expected_files = [
- ("foo.rs", "hello"),
- ("bar.rs", "world"),
- ("baz.rs", "nested hello"),
- ]
- .iter()
- .map(|(path, text)| (path.to_string(), text.to_string()))
- .collect::<HashSet<_>>();
+ let expected_files = [("foo.rs", "hello"), ("bar.rs", "world"), ("baz.rs", "nested hello")]
+ .iter()
+ .map(|(path, text)| (path.to_string(), text.to_string()))
+ .collect::<HashSet<_>>();
assert_eq!(files, expected_files);
}
);
vfs.add_file_overlay(&dir.path().join("a/b/spam.rs"), "spam".to_string());
- assert_match!(
- vfs.commit_changes().as_slice(),
- [VfsChange::AddFile { text, path, .. }],
- {
- assert_eq!(text.as_str(), "spam");
- assert_eq!(path, "spam.rs");
- }
- );
+ assert_match!(vfs.commit_changes().as_slice(), [VfsChange::AddFile { text, path, .. }], {
+ assert_eq!(text.as_str(), "spam");
+ assert_eq!(path, "spam.rs");
+ });
vfs.remove_file_overlay(&dir.path().join("a/b/spam.rs"));
assert_match!(
fs::create_dir_all(dir.path().join("a/sub1/sub2")).unwrap();
fs::write(dir.path().join("a/sub1/sub2/new.rs"), "new hello").unwrap();
process_tasks(&mut vfs, 1);
- assert_match!(
- vfs.commit_changes().as_slice(),
- [VfsChange::AddFile { text, path, .. }],
- {
- assert_eq!(text.as_str(), "new hello");
- assert_eq!(path, "sub1/sub2/new.rs");
- }
- );
+ assert_match!(vfs.commit_changes().as_slice(), [VfsChange::AddFile { text, path, .. }], {
+ assert_eq!(text.as_str(), "new hello");
+ assert_eq!(path, "sub1/sub2/new.rs");
+ });
- fs::rename(
- &dir.path().join("a/sub1/sub2/new.rs"),
- &dir.path().join("a/sub1/sub2/new1.rs"),
- )
- .unwrap();
+ fs::rename(&dir.path().join("a/sub1/sub2/new.rs"), &dir.path().join("a/sub1/sub2/new1.rs"))
+ .unwrap();
process_tasks(&mut vfs, 2);
assert_match!(
vfs.commit_changes().as_slice(),
- [VfsChange::RemoveFile {
- path: removed_path, ..
- }, VfsChange::AddFile {
- text,
- path: added_path,
- ..
- }],
+ [VfsChange::RemoveFile { path: removed_path, .. }, VfsChange::AddFile { text, path: added_path, .. }],
{
assert_eq!(removed_path, "sub1/sub2/new.rs");
assert_eq!(added_path, "sub1/sub2/new1.rs");
stack.push(from);
} else if text.starts_with(&close) {
text = &text[close.len()..];
- let from = stack
- .pop()
- .unwrap_or_else(|| panic!("unmatched </{}>", tag));
+ let from = stack.pop().unwrap_or_else(|| panic!("unmatched </{}>", tag));
let to = TextUnit::of_str(&res);
ranges.push(TextRange::from_to(from, to));
}
macro_rules! flush {
() => {
if let Some(meta) = meta {
- res.push(FixtureEntry {
- meta: meta.to_string(),
- text: buf.clone(),
- });
+ res.push(FixtureEntry { meta: meta.to_string(), text: buf.clone() });
buf.clear();
}
};
let mut l = l.iter().collect::<Vec<_>>();
let mut r = r.iter().collect::<Vec<_>>();
- l.retain(
- |l| match r.iter().position(|r| find_mismatch(l, r).is_none()) {
- Some(i) => {
- r.remove(i);
- false
- }
- None => true,
- },
- );
+ l.retain(|l| match r.iter().position(|r| find_mismatch(l, r).is_none()) {
+ Some(i) => {
+ r.remove(i);
+ false
+ }
+ None => true,
+ });
if !l.is_empty() {
assert!(!r.is_empty());
return Some((expected, actual));
}
- l.values()
- .zip(r.values())
- .filter_map(|(l, r)| find_mismatch(l, r))
- .nth(0)
+ l.values().zip(r.values()).filter_map(|(l, r)| find_mismatch(l, r)).nth(0)
}
(&Null, &Null) => None,
// magic string literal "{...}" acts as wildcard for any sub-JSON
pub fn project_dir() -> PathBuf {
let dir = env!("CARGO_MANIFEST_DIR");
- PathBuf::from(dir)
- .parent()
- .unwrap()
- .parent()
- .unwrap()
- .to_owned()
+ PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()
}
/// Read file and normalize newlines.
impl MarkChecker {
pub fn new(mark: &'static AtomicUsize) -> MarkChecker {
let value_on_entry = mark.load(Ordering::SeqCst);
- MarkChecker {
- mark,
- value_on_entry,
- }
+ MarkChecker { mark, value_on_entry }
}
}
fn worker_chan<I, O>(buf: usize) -> (Worker<I, O>, Receiver<I>, Sender<O>) {
let (input_sender, input_receiver) = bounded::<I>(buf);
let (output_sender, output_receiver) = unbounded::<O>();
- (
- Worker {
- inp: input_sender,
- out: output_receiver,
- },
- input_receiver,
- output_sender,
- )
+ (Worker { inp: input_sender, out: output_receiver }, input_receiver, output_sender)
}
.current_dir(&root)
.output()?;
if !output.status.success() {
- bail!(
- "`git diff --diff-filter=MAR --name-only --cached` exited with {}",
- output.status
- );
+ bail!("`git diff --diff-filter=MAR --name-only --cached` exited with {}", output.status);
}
for line in String::from_utf8(output.stdout)?.lines() {
- run(
- &format!(
- "git update-index --add {}",
- root.join(line).to_string_lossy()
- ),
- ".",
- )?;
+ run(&format!("git update-index --add {}", root.join(line).to_string_lossy()), ".")?;
}
Ok(())
}
None => continue 'outer,
}
};
- let text: String = itertools::join(
- block.map(|(_, line)| line).chain(::std::iter::once("")),
- "\n",
- );
+ let text: String =
+ itertools::join(block.map(|(_, line)| line).chain(::std::iter::once("")), "\n");
assert!(!text.trim().is_empty() && text.ends_with('\n'));
res.push((start_line, Test { name, text, ok }))
}
}
pub fn project_root() -> PathBuf {
- Path::new(&env!("CARGO_MANIFEST_DIR"))
- .ancestors()
- .nth(2)
- .unwrap()
- .to_path_buf()
+ Path::new(&env!("CARGO_MANIFEST_DIR")).ancestors().nth(2).unwrap().to_path_buf()
}
pub fn run(cmdline: &str, dir: &str) -> Result<()> {
let project_dir = project_root().join(dir);
let mut args = cmdline.split_whitespace();
let exec = args.next().unwrap();
- let status = Command::new(exec)
- .args(args)
- .current_dir(project_dir)
- .status()?;
+ let status = Command::new(exec).args(args).current_dir(project_dir).status()?;
if !status.success() {
bail!("`{}` exited with {}", cmdline, status);
}
};
if mode == Verify {
- run(
- &format!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN),
- ".",
- )?;
+ run(&format!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN), ".")?;
} else {
run(&format!("rustup run {} -- cargo fmt", TOOLCHAIN), ".")?;
}
pub fn install_rustfmt() -> Result<()> {
run(&format!("rustup install {}", TOOLCHAIN), ".")?;
- run(
- &format!("rustup component add rustfmt --toolchain {}", TOOLCHAIN),
- ".",
- )
+ run(&format!("rustup component add rustfmt --toolchain {}", TOOLCHAIN), ".")
}
pub fn install_format_hook() -> Result<()> {
_ => run("cargo install cargo-fuzz", ".")?,
};
- run(
- "rustup run nightly -- cargo fuzz run parser",
- "./crates/ra_syntax",
- )
+ run("rustup run nightly -- cargo fuzz run parser", "./crates/ra_syntax")
}
pub fn gen_tests(mode: Mode) -> Result<()> {
file_name[5..file_name.len() - 3].to_string()
};
let text = fs::read_to_string(&path)?;
- let test = Test {
- name: name.clone(),
- text,
- ok,
- };
+ let test = Test { name: name.clone(), text, ok };
if let Some(old) = res.insert(name, (path, test)) {
println!("Duplicate test: {:?}", old);
}
.subcommand(SubCommand::with_name("format-hook"))
.subcommand(SubCommand::with_name("fuzz-tests"))
.get_matches();
- match matches
- .subcommand_name()
- .expect("Subcommand must be specified")
- {
+ match matches.subcommand_name().expect("Subcommand must be specified") {
"install-code" => install_code_extension()?,
"gen-tests" => gen_tests(Overwrite)?,
"gen-syntax" => generate(Overwrite)?,
"./editors/code",
)?;
} else {
- run(
- r"code --install-extension ./ra-lsp-0.0.1.vsix --force",
- "./editors/code",
- )?;
+ run(r"code --install-extension ./ra-lsp-0.0.1.vsix --force", "./editors/code")?;
}
Ok(())
}
#[test]
fn generated_tests_are_fresh() {
if let Err(error) = gen_tests(Verify) {
- panic!(
- "{}. Please update tests by running `cargo gen-tests`",
- error
- );
+ panic!("{}. Please update tests by running `cargo gen-tests`", error);
}
}
#[test]
fn check_code_formatting() {
if let Err(error) = run_rustfmt(Verify) {
- panic!(
- "{}. Please format the code by running `cargo format`",
- error
- );
+ panic!("{}. Please format the code by running `cargo format`", error);
}
}