pub mod import_assets;
pub mod insert_use;
pub mod merge_imports;
+pub mod insert_whitespace_into_node;
pub mod node_ext;
pub mod rust_doc;
+pub mod format_string;
use std::collections::VecDeque;
use base_db::FileId;
-use either::Either;
use hir::{ItemInNs, MacroDef, ModuleDef, Name, Semantics};
+use itertools::Itertools;
use syntax::{
ast::{self, make, HasLoopBody},
- AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent, T,
+ AstNode, AstToken, Preorder, RustLanguage, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent,
+ T,
};
-use crate::RootDatabase;
+use crate::{defs::Definition, RootDatabase};
pub use self::famous_defs::FamousDefs;
}
}
-/// Resolves the path at the cursor token as a derive macro if it inside a token tree of a derive attribute.
-pub fn try_resolve_derive_input_at(
- sema: &hir::Semantics<RootDatabase>,
- derive_attr: &ast::Attr,
- cursor: &SyntaxToken,
-) -> Option<MacroDef> {
- use itertools::Itertools;
- if cursor.kind() != T![ident] {
- return None;
- }
- let tt = match derive_attr.as_simple_call() {
- Some((name, tt))
- if name == "derive" && tt.syntax().text_range().contains_range(cursor.text_range()) =>
- {
- tt
- }
- _ => return None,
- };
- let tokens: Vec<_> = cursor
- .siblings_with_tokens(Direction::Prev)
- .flat_map(SyntaxElement::into_token)
- .take_while(|tok| tok.kind() != T!['('] && tok.kind() != T![,])
- .collect();
- let path = ast::Path::parse(&tokens.into_iter().rev().join("")).ok()?;
- match sema.scope(tt.syntax()).speculative_resolve(&path) {
- Some(hir::PathResolution::Macro(makro)) if makro.kind() == hir::MacroKind::Derive => {
- Some(makro)
- }
- _ => None,
- }
-}
-
/// Picks the token with the highest rank returned by the passed in function.
pub fn pick_best_token(
tokens: TokenAtOffset<SyntaxToken>,
) -> Option<SyntaxToken> {
tokens.max_by_key(move |t| f(t.kind()))
}
+pub fn pick_token<T: AstToken>(mut tokens: TokenAtOffset<SyntaxToken>) -> Option<T> {
+ tokens.find_map(T::cast)
+}
/// Converts the mod path struct into its ast representation.
pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path {
segments.extend(
path.segments()
.iter()
- .map(|segment| make::path_segment(make::name_ref(&segment.to_string()))),
+ .map(|segment| make::path_segment(make::name_ref(&segment.to_smol_str()))),
);
make::path_from_segments(segments, is_abs)
}
pub fn visit_file_defs(
sema: &Semantics<RootDatabase>,
file_id: FileId,
- cb: &mut dyn FnMut(Either<hir::ModuleDef, hir::Impl>),
+ cb: &mut dyn FnMut(Definition),
) {
let db = sema.db;
let module = match sema.to_module_def(file_id) {
if let ModuleDef::Module(submodule) = def {
if let hir::ModuleSource::Module(_) = submodule.definition_source(db).value {
defs.extend(submodule.declarations(db));
- submodule.impl_defs(db).into_iter().for_each(|impl_| cb(Either::Right(impl_)));
+ submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into()));
}
}
- cb(Either::Left(def));
+ cb(def.into());
}
- module.impl_defs(db).into_iter().for_each(|impl_| cb(Either::Right(impl_)));
+ module.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into()));
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
| ast::Expr::TryExpr(_)
| ast::Expr::TupleExpr(_)
| ast::Expr::WhileExpr(_)
+ | ast::Expr::LetExpr(_)
| ast::Expr::YieldExpr(_) => cb(expr),
}
}
-/// Calls `cb` on each break expr inside of `body` that is applicable for the given label.
-pub fn for_each_break_expr(
+pub fn for_each_break_and_continue_expr(
+ label: Option<ast::Label>,
+ body: Option<ast::StmtList>,
+ cb: &mut dyn FnMut(ast::Expr),
+) {
+ let label = label.and_then(|lbl| lbl.lifetime());
+ if let Some(b) = body {
+ let tree_depth_iterator = TreeWithDepthIterator::new(b);
+ for (expr, depth) in tree_depth_iterator {
+ match expr {
+ ast::Expr::BreakExpr(b)
+ if (depth == 0 && b.lifetime().is_none())
+ || eq_label_lt(&label, &b.lifetime()) =>
+ {
+ cb(ast::Expr::BreakExpr(b));
+ }
+ ast::Expr::ContinueExpr(c)
+ if (depth == 0 && c.lifetime().is_none())
+ || eq_label_lt(&label, &c.lifetime()) =>
+ {
+ cb(ast::Expr::ContinueExpr(c));
+ }
+ _ => (),
+ }
+ }
+ }
+}
+
+fn for_each_break_expr(
label: Option<ast::Label>,
body: Option<ast::StmtList>,
cb: &mut dyn FnMut(ast::BreakExpr),
) {
let label = label.and_then(|lbl| lbl.lifetime());
- let mut depth = 0;
if let Some(b) = body {
- let preorder = &mut b.syntax().preorder();
- let ev_as_expr = |ev| match ev {
- WalkEvent::Enter(it) => Some(WalkEvent::Enter(ast::Expr::cast(it)?)),
- WalkEvent::Leave(it) => Some(WalkEvent::Leave(ast::Expr::cast(it)?)),
- };
- let eq_label = |lt: Option<ast::Lifetime>| {
- lt.zip(label.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text())
- };
- while let Some(node) = preorder.find_map(ev_as_expr) {
- match node {
- WalkEvent::Enter(expr) => match expr {
- ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_) => {
- depth += 1
- }
- ast::Expr::BlockExpr(e) if e.label().is_some() => depth += 1,
- ast::Expr::BreakExpr(b)
- if (depth == 0 && b.lifetime().is_none()) || eq_label(b.lifetime()) =>
- {
- cb(b);
- }
- _ => (),
- },
- WalkEvent::Leave(expr) => match expr {
- ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_) => {
- depth -= 1
- }
- ast::Expr::BlockExpr(e) if e.label().is_some() => depth -= 1,
- _ => (),
- },
+ let tree_depth_iterator = TreeWithDepthIterator::new(b);
+ for (expr, depth) in tree_depth_iterator {
+ match expr {
+ ast::Expr::BreakExpr(b)
+ if (depth == 0 && b.lifetime().is_none())
+ || eq_label_lt(&label, &b.lifetime()) =>
+ {
+ cb(b);
+ }
+ _ => (),
}
}
}
}
+
+fn eq_label_lt(lt1: &Option<ast::Lifetime>, lt2: &Option<ast::Lifetime>) -> bool {
+ lt1.as_ref().zip(lt2.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text())
+}
+
+struct TreeWithDepthIterator {
+ preorder: Preorder<RustLanguage>,
+ depth: i32,
+}
+
+impl TreeWithDepthIterator {
+ fn new(body: ast::StmtList) -> Self {
+ let preorder = body.syntax().preorder();
+ Self { preorder, depth: 0 }
+ }
+}
+
+impl<'a> Iterator for TreeWithDepthIterator {
+ type Item = (ast::Expr, i32);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ while let Some((event, expr)) = self.preorder.find_map(|ev| match ev {
+ WalkEvent::Enter(it) => Some(WalkEvent::Enter(())).zip(ast::Expr::cast(it)),
+ WalkEvent::Leave(it) => Some(WalkEvent::Leave(())).zip(ast::Expr::cast(it)),
+ }) {
+ match (event, expr) {
+ (
+ WalkEvent::Enter(_),
+ ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_),
+ ) => {
+ self.depth += 1;
+ }
+ (
+ WalkEvent::Leave(_),
+ ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_),
+ ) => {
+ self.depth -= 1;
+ }
+ (WalkEvent::Enter(_), ast::Expr::BlockExpr(e)) if e.label().is_some() => {
+ self.depth += 1;
+ }
+ (WalkEvent::Leave(_), ast::Expr::BlockExpr(e)) if e.label().is_some() => {
+ self.depth -= 1;
+ }
+ (WalkEvent::Enter(_), expr) => return Some((expr, self.depth)),
+ _ => (),
+ }
+ }
+ None
+ }
+}
+
+/// Checks if the given lint is equal or is contained by the other lint which may or may not be a group.
+pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool {
+ if lint == lint_is {
+ return true;
+ }
+
+ if let Some(group) = generated_lints::DEFAULT_LINT_GROUPS
+ .iter()
+ .chain(generated_lints::CLIPPY_LINT_GROUPS.iter())
+ .chain(generated_lints::RUSTDOC_LINT_GROUPS.iter())
+ .find(|&check| check.lint.label == lint_is)
+ {
+ group.children.contains(&lint)
+ } else {
+ false
+ }
+}
+
+/// Parses the input token tree as comma separated plain paths.
+pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Path>> {
+ let r_paren = input.r_paren_token();
+ let tokens =
+ input.syntax().children_with_tokens().skip(1).map_while(|it| match it.into_token() {
+ // seeing a keyword means the attribute is unclosed so stop parsing here
+ Some(tok) if tok.kind().is_keyword() => None,
+ // don't include the right token tree parenthesis if it exists
+ tok @ Some(_) if tok == r_paren => None,
+ // only nodes that we can find are other TokenTrees, those are unexpected in this parse though
+ None => None,
+ Some(tok) => Some(tok),
+ });
+ let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
+ let paths = input_expressions
+ .into_iter()
+ .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
+ .filter_map(|mut tokens| {
+ syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
+ ast::Expr::PathExpr(it) => it.path(),
+ _ => None,
+ })
+ })
+ .collect();
+ Some(paths)
+}