use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use clippy_utils::ty::match_type;
-use clippy_utils::visitors::LocalUsedVisitor;
-use clippy_utils::{path_to_local_id, paths, peel_ref_operators, remove_blocks, strip_pat_refs};
+use clippy_utils::visitors::is_local_used;
+use clippy_utils::{path_to_local_id, paths, peel_blocks, peel_ref_operators, strip_pat_refs};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{BinOpKind, Expr, ExprKind, PatKind};
use rustc_span::sym;
declare_clippy_lint! {
- /// **What it does:** Checks for naive byte counts
+ /// ### What it does
+ /// Checks for naive byte counts
///
- /// **Why is this bad?** The [`bytecount`](https://crates.io/crates/bytecount)
+ /// ### Why is this bad?
+ /// The [`bytecount`](https://crates.io/crates/bytecount)
/// crate has methods to count your bytes faster, especially for large slices.
///
- /// **Known problems:** If you have predominantly small slices, the
+ /// ### Known problems
+ /// If you have predominantly small slices, the
/// `bytecount::count(..)` method may actually be slower. However, if you can
/// ensure that less than 2³²-1 matches arise, the `naive_count_32(..)` can be
/// faster in those cases.
///
- /// **Example:**
- ///
+ /// ### Example
/// ```rust
/// # let vec = vec![1_u8];
/// &vec.iter().filter(|x| **x == 0u8).count(); // use bytecount::count instead
/// ```
+ #[clippy::version = "pre 1.29.0"]
pub NAIVE_BYTECOUNT,
pedantic,
"use of naive `<slice>.filter(|&x| x == y).count()` to count byte values"
impl<'tcx> LateLintPass<'tcx> for ByteCount {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
if_chain! {
- if let ExprKind::MethodCall(count, _, [count_recv], _) = expr.kind;
- if count.ident.name == sym!(count);
- if let ExprKind::MethodCall(filter, _, [filter_recv, filter_arg], _) = count_recv.kind;
+ if let ExprKind::MethodCall(count, [count_recv], _) = expr.kind;
+ if count.ident.name == sym::count;
+ if let ExprKind::MethodCall(filter, [filter_recv, filter_arg], _) = count_recv.kind;
if filter.ident.name == sym!(filter);
if let ExprKind::Closure(_, _, body_id, _, _) = filter_arg.kind;
let body = cx.tcx.hir().body(body_id);
cx.typeck_results().expr_ty(filter_recv).peel_refs(),
&paths::SLICE_ITER);
let operand_is_arg = |expr| {
- let expr = peel_ref_operators(cx, remove_blocks(expr));
+ let expr = peel_ref_operators(cx, peel_blocks(expr));
path_to_local_id(expr, arg_id)
};
let needle = if operand_is_arg(l) {
return;
};
if ty::Uint(UintTy::U8) == *cx.typeck_results().expr_ty(needle).peel_refs().kind();
- if !LocalUsedVisitor::new(cx, arg_id).check_expr(needle);
+ if !is_local_used(cx, needle, arg_id);
then {
- let haystack = if let ExprKind::MethodCall(path, _, args, _) =
+ let haystack = if let ExprKind::MethodCall(path, args, _) =
filter_recv.kind {
let p = path.ident.name;
if (p == sym::iter || p == sym!(iter_mut)) && args.len() == 1 {
&args[0]
} else {
- &filter_recv
+ filter_recv
}
} else {
- &filter_recv
+ filter_recv
};
let mut applicability = Applicability::MaybeIncorrect;
span_lint_and_sugg(