X-Git-Url: https://git.lizzy.rs/?a=blobdiff_plain;f=crates%2Fide_db%2Fsrc%2Fhelpers.rs;h=944b69c1acf4ad6e01d4dafb357fb0a12933c051;hb=8848186213bb7f54f034c2f6f5fab724bfc3b451;hp=173e55b33f6ac7b6579b96d4a0065639be65740e;hpb=e77fc481adb98d95643e00f5fa5f02cbef698014;p=rust.git diff --git a/crates/ide_db/src/helpers.rs b/crates/ide_db/src/helpers.rs index 173e55b33f6..944b69c1acf 100644 --- a/crates/ide_db/src/helpers.rs +++ b/crates/ide_db/src/helpers.rs @@ -4,20 +4,23 @@ pub mod import_assets; pub mod insert_use; pub mod merge_imports; +pub mod insert_whitespace_into_node; pub mod node_ext; pub mod rust_doc; +pub mod format_string; use std::collections::VecDeque; use base_db::FileId; -use either::Either; use hir::{ItemInNs, MacroDef, ModuleDef, Name, Semantics}; +use itertools::Itertools; use syntax::{ ast::{self, make, HasLoopBody}, - AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent, T, + AstNode, AstToken, Preorder, RustLanguage, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent, + T, }; -use crate::RootDatabase; +use crate::{defs::Definition, RootDatabase}; pub use self::famous_defs::FamousDefs; @@ -29,35 +32,6 @@ pub fn item_name(db: &RootDatabase, item: ItemInNs) -> Option { } } -/// Resolves the path at the cursor token as a derive macro if it inside a token tree of a derive attribute. -pub fn try_resolve_derive_input_at( - sema: &hir::Semantics, - derive_attr: &ast::Attr, - cursor: &SyntaxToken, -) -> Option { - use itertools::Itertools; - if cursor.kind() != T![ident] { - return None; - } - let tt = match derive_attr.as_simple_call() { - Some((name, tt)) - if name == "derive" && tt.syntax().text_range().contains_range(cursor.text_range()) => - { - tt - } - _ => return None, - }; - let tokens: Vec<_> = cursor - .siblings_with_tokens(Direction::Prev) - .flat_map(SyntaxElement::into_token) - .take_while(|tok| tok.kind() != T!['('] && tok.kind() != T![,]) - .collect(); - let path = ast::Path::parse(&tokens.into_iter().rev().join("")).ok()?; - sema.scope(tt.syntax()) - .speculative_resolve_as_mac(&path) - .filter(|mac| mac.kind() == hir::MacroKind::Derive) -} - /// Picks the token with the highest rank returned by the passed in function. pub fn pick_best_token( tokens: TokenAtOffset, @@ -65,6 +39,9 @@ pub fn pick_best_token( ) -> Option { tokens.max_by_key(move |t| f(t.kind())) } +pub fn pick_token(mut tokens: TokenAtOffset) -> Option { + tokens.find_map(T::cast) +} /// Converts the mod path struct into its ast representation. pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path { @@ -85,7 +62,7 @@ pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path { segments.extend( path.segments() .iter() - .map(|segment| make::path_segment(make::name_ref(&segment.to_string()))), + .map(|segment| make::path_segment(make::name_ref(&segment.to_smol_str()))), ); make::path_from_segments(segments, is_abs) } @@ -94,7 +71,7 @@ pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path { pub fn visit_file_defs( sema: &Semantics, file_id: FileId, - cb: &mut dyn FnMut(Either), + cb: &mut dyn FnMut(Definition), ) { let db = sema.db; let module = match sema.to_module_def(file_id) { @@ -106,12 +83,12 @@ pub fn visit_file_defs( if let ModuleDef::Module(submodule) = def { if let hir::ModuleSource::Module(_) = submodule.definition_source(db).value { defs.extend(submodule.declarations(db)); - submodule.impl_defs(db).into_iter().for_each(|impl_| cb(Either::Right(impl_))); + submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into())); } } - cb(Either::Left(def)); + cb(def.into()); } - module.impl_defs(db).into_iter().for_each(|impl_| cb(Either::Right(impl_))); + module.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into())); } #[derive(Clone, Copy, Debug, PartialEq, Eq)] @@ -209,49 +186,153 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { | ast::Expr::TryExpr(_) | ast::Expr::TupleExpr(_) | ast::Expr::WhileExpr(_) + | ast::Expr::LetExpr(_) | ast::Expr::YieldExpr(_) => cb(expr), } } -/// Calls `cb` on each break expr inside of `body` that is applicable for the given label. -pub fn for_each_break_expr( +pub fn for_each_break_and_continue_expr( + label: Option, + body: Option, + cb: &mut dyn FnMut(ast::Expr), +) { + let label = label.and_then(|lbl| lbl.lifetime()); + if let Some(b) = body { + let tree_depth_iterator = TreeWithDepthIterator::new(b); + for (expr, depth) in tree_depth_iterator { + match expr { + ast::Expr::BreakExpr(b) + if (depth == 0 && b.lifetime().is_none()) + || eq_label_lt(&label, &b.lifetime()) => + { + cb(ast::Expr::BreakExpr(b)); + } + ast::Expr::ContinueExpr(c) + if (depth == 0 && c.lifetime().is_none()) + || eq_label_lt(&label, &c.lifetime()) => + { + cb(ast::Expr::ContinueExpr(c)); + } + _ => (), + } + } + } +} + +fn for_each_break_expr( label: Option, body: Option, cb: &mut dyn FnMut(ast::BreakExpr), ) { let label = label.and_then(|lbl| lbl.lifetime()); - let mut depth = 0; if let Some(b) = body { - let preorder = &mut b.syntax().preorder(); - let ev_as_expr = |ev| match ev { - WalkEvent::Enter(it) => Some(WalkEvent::Enter(ast::Expr::cast(it)?)), - WalkEvent::Leave(it) => Some(WalkEvent::Leave(ast::Expr::cast(it)?)), - }; - let eq_label = |lt: Option| { - lt.zip(label.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text()) - }; - while let Some(node) = preorder.find_map(ev_as_expr) { - match node { - WalkEvent::Enter(expr) => match expr { - ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_) => { - depth += 1 - } - ast::Expr::BlockExpr(e) if e.label().is_some() => depth += 1, - ast::Expr::BreakExpr(b) - if (depth == 0 && b.lifetime().is_none()) || eq_label(b.lifetime()) => - { - cb(b); - } - _ => (), - }, - WalkEvent::Leave(expr) => match expr { - ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_) => { - depth -= 1 - } - ast::Expr::BlockExpr(e) if e.label().is_some() => depth -= 1, - _ => (), - }, + let tree_depth_iterator = TreeWithDepthIterator::new(b); + for (expr, depth) in tree_depth_iterator { + match expr { + ast::Expr::BreakExpr(b) + if (depth == 0 && b.lifetime().is_none()) + || eq_label_lt(&label, &b.lifetime()) => + { + cb(b); + } + _ => (), } } } } + +fn eq_label_lt(lt1: &Option, lt2: &Option) -> bool { + lt1.as_ref().zip(lt2.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text()) +} + +struct TreeWithDepthIterator { + preorder: Preorder, + depth: i32, +} + +impl TreeWithDepthIterator { + fn new(body: ast::StmtList) -> Self { + let preorder = body.syntax().preorder(); + Self { preorder, depth: 0 } + } +} + +impl<'a> Iterator for TreeWithDepthIterator { + type Item = (ast::Expr, i32); + + fn next(&mut self) -> Option { + while let Some((event, expr)) = self.preorder.find_map(|ev| match ev { + WalkEvent::Enter(it) => Some(WalkEvent::Enter(())).zip(ast::Expr::cast(it)), + WalkEvent::Leave(it) => Some(WalkEvent::Leave(())).zip(ast::Expr::cast(it)), + }) { + match (event, expr) { + ( + WalkEvent::Enter(_), + ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_), + ) => { + self.depth += 1; + } + ( + WalkEvent::Leave(_), + ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_), + ) => { + self.depth -= 1; + } + (WalkEvent::Enter(_), ast::Expr::BlockExpr(e)) if e.label().is_some() => { + self.depth += 1; + } + (WalkEvent::Leave(_), ast::Expr::BlockExpr(e)) if e.label().is_some() => { + self.depth -= 1; + } + (WalkEvent::Enter(_), expr) => return Some((expr, self.depth)), + _ => (), + } + } + None + } +} + +/// Checks if the given lint is equal or is contained by the other lint which may or may not be a group. +pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool { + if lint == lint_is { + return true; + } + + if let Some(group) = generated_lints::DEFAULT_LINT_GROUPS + .iter() + .chain(generated_lints::CLIPPY_LINT_GROUPS.iter()) + .chain(generated_lints::RUSTDOC_LINT_GROUPS.iter()) + .find(|&check| check.lint.label == lint_is) + { + group.children.contains(&lint) + } else { + false + } +} + +/// Parses the input token tree as comma separated plain paths. +pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option> { + let r_paren = input.r_paren_token(); + let tokens = + input.syntax().children_with_tokens().skip(1).map_while(|it| match it.into_token() { + // seeing a keyword means the attribute is unclosed so stop parsing here + Some(tok) if tok.kind().is_keyword() => None, + // don't include the right token tree parenthesis if it exists + tok @ Some(_) if tok == r_paren => None, + // only nodes that we can find are other TokenTrees, those are unexpected in this parse though + None => None, + Some(tok) => Some(tok), + }); + let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]); + let paths = input_expressions + .into_iter() + .filter_map(|(is_sep, group)| (!is_sep).then(|| group)) + .filter_map(|mut tokens| { + syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr { + ast::Expr::PathExpr(it) => it.path(), + _ => None, + }) + }) + .collect(); + Some(paths) +}