pub mod import_assets;
pub mod insert_use;
pub mod merge_imports;
+pub mod insert_whitespace_into_node;
pub mod node_ext;
pub mod rust_doc;
+pub mod format_string;
use std::{collections::VecDeque, iter};
use base_db::FileId;
-use either::Either;
use hir::{ItemInNs, MacroDef, ModuleDef, Name, PathResolution, Semantics};
use itertools::Itertools;
use syntax::{
- ast::{self, make, HasLoopBody, Ident},
+ ast::{self, make, HasLoopBody},
AstNode, AstToken, Direction, SyntaxElement, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent,
T,
};
-use crate::RootDatabase;
+use crate::{defs::Definition, RootDatabase};
pub use self::famous_defs::FamousDefs;
pub fn get_path_in_derive_attr(
sema: &hir::Semantics<RootDatabase>,
attr: &ast::Attr,
- cursor: &Ident,
+ cursor: &ast::Ident,
) -> Option<ast::Path> {
- let cursor = cursor.syntax();
let path = attr.path()?;
let tt = attr.token_tree()?;
- if !tt.syntax().text_range().contains_range(cursor.text_range()) {
+ if !tt.syntax().text_range().contains_range(cursor.syntax().text_range()) {
return None;
}
let scope = sema.scope(attr.syntax());
if PathResolution::Macro(derive) != resolved_attr {
return None;
}
+ get_path_at_cursor_in_tt(cursor)
+}
+/// Parses the path the identifier is part of inside a token tree.
+pub fn get_path_at_cursor_in_tt(cursor: &ast::Ident) -> Option<ast::Path> {
+ let cursor = cursor.syntax();
let first = cursor
.siblings_with_tokens(Direction::Prev)
.filter_map(SyntaxElement::into_token)
.filter_map(SyntaxElement::into_token)
.take_while(|tok| tok != cursor);
- ast::Path::parse(&path_tokens.chain(iter::once(cursor.clone())).join("")).ok()
-}
-
-/// Parses and resolves the path at the cursor position in the given attribute, if it is a derive.
-/// This special case is required because the derive macro is a compiler builtin that discards the input derives.
-pub fn try_resolve_derive_input(
- sema: &hir::Semantics<RootDatabase>,
- attr: &ast::Attr,
- cursor: &Ident,
-) -> Option<PathResolution> {
- let path = get_path_in_derive_attr(sema, attr, cursor)?;
- let scope = sema.scope(attr.syntax());
- // FIXME: This double resolve shouldn't be necessary
- // It's only here so we prefer macros over other namespaces
- match scope.speculative_resolve_as_mac(&path) {
- Some(mac) if mac.kind() == hir::MacroKind::Derive => Some(PathResolution::Macro(mac)),
- Some(_) => return None,
- None => scope
- .speculative_resolve(&path)
- .filter(|res| matches!(res, PathResolution::Def(ModuleDef::Module(_)))),
- }
+ syntax::hacks::parse_expr_from_str(&path_tokens.chain(iter::once(cursor.clone())).join(""))
+ .and_then(|expr| match expr {
+ ast::Expr::PathExpr(it) => it.path(),
+ _ => None,
+ })
}
/// Picks the token with the highest rank returned by the passed in function.
) -> Option<SyntaxToken> {
tokens.max_by_key(move |t| f(t.kind()))
}
+pub fn pick_token<T: AstToken>(mut tokens: TokenAtOffset<SyntaxToken>) -> Option<T> {
+ tokens.find_map(T::cast)
+}
/// Converts the mod path struct into its ast representation.
pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path {
pub fn visit_file_defs(
sema: &Semantics<RootDatabase>,
file_id: FileId,
- cb: &mut dyn FnMut(Either<hir::ModuleDef, hir::Impl>),
+ cb: &mut dyn FnMut(Definition),
) {
let db = sema.db;
let module = match sema.to_module_def(file_id) {
if let ModuleDef::Module(submodule) = def {
if let hir::ModuleSource::Module(_) = submodule.definition_source(db).value {
defs.extend(submodule.declarations(db));
- submodule.impl_defs(db).into_iter().for_each(|impl_| cb(Either::Right(impl_)));
+ submodule.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into()));
}
}
- cb(Either::Left(def));
+ cb(def.into());
}
- module.impl_defs(db).into_iter().for_each(|impl_| cb(Either::Right(impl_)));
+ module.impl_defs(db).into_iter().for_each(|impl_| cb(impl_.into()));
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
| ast::Expr::TryExpr(_)
| ast::Expr::TupleExpr(_)
| ast::Expr::WhileExpr(_)
+ | ast::Expr::LetExpr(_)
| ast::Expr::YieldExpr(_) => cb(expr),
}
}
}
}
}
+
+/// Checks if the given lint is equal or is contained by the other lint which may or may not be a group.
+pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool {
+ if lint == lint_is {
+ return true;
+ }
+
+ if let Some(group) = generated_lints::DEFAULT_LINT_GROUPS
+ .iter()
+ .chain(generated_lints::CLIPPY_LINT_GROUPS.iter())
+ .chain(generated_lints::RUSTDOC_LINT_GROUPS.iter())
+ .find(|&check| check.lint.label == lint_is)
+ {
+ group.children.contains(&lint)
+ } else {
+ false
+ }
+}
+
+/// Parses the input token tree as comma separated plain paths.
+pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Path>> {
+ let r_paren = input.r_paren_token();
+ let tokens =
+ input.syntax().children_with_tokens().skip(1).map_while(|it| match it.into_token() {
+ // seeing a keyword means the attribute is unclosed so stop parsing here
+ Some(tok) if tok.kind().is_keyword() => None,
+ // don't include the right token tree parenthesis if it exists
+ tok @ Some(_) if tok == r_paren => None,
+ // only nodes that we can find are other TokenTrees, those are unexpected in this parse though
+ None => None,
+ Some(tok) => Some(tok),
+ });
+ let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
+ let paths = input_expressions
+ .into_iter()
+ .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
+ .filter_map(|mut tokens| {
+ syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
+ ast::Expr::PathExpr(it) => it.path(),
+ _ => None,
+ })
+ })
+ .collect();
+ Some(paths)
+}