1 #![allow(clippy::default_hash_types)]
3 use crate::reexport::*;
5 use crate::rustc::hir::*;
6 use crate::rustc::hir::intravisit::{walk_body, walk_expr, walk_ty, FnKind, NestedVisitorMap, Visitor};
7 use crate::rustc::lint::{LateContext, LateLintPass, LintArray, LintPass, in_external_macro, LintContext};
8 use crate::rustc::{declare_tool_lint, lint_array};
9 use if_chain::if_chain;
10 use crate::rustc::ty::{self, Ty, TyCtxt, TypeckTables};
11 use crate::rustc::ty::layout::LayoutOf;
12 use crate::rustc_typeck::hir_ty_to_ty;
13 use std::cmp::Ordering;
14 use std::collections::BTreeMap;
16 use crate::syntax::ast::{FloatTy, IntTy, UintTy};
17 use crate::syntax::source_map::Span;
18 use crate::syntax::errors::DiagnosticBuilder;
19 use crate::rustc_target::spec::abi::Abi;
20 use crate::utils::{comparisons, differing_macro_contexts, higher, in_constant, in_macro, last_path_segment, match_def_path, match_path,
21 match_type, multispan_sugg, opt_def_id, same_tys, snippet, snippet_opt, span_help_and_lint, span_lint,
22 span_lint_and_sugg, span_lint_and_then, clip, unsext, sext, int_bits};
23 use crate::utils::paths;
24 use crate::consts::{constant, Constant};
26 /// Handles all the linting of funky types
27 #[allow(missing_copy_implementations)]
30 /// **What it does:** Checks for use of `Box<Vec<_>>` anywhere in the code.
32 /// **Why is this bad?** `Vec` already keeps its contents in a separate area on
33 /// the heap. So if you `Box` it, you just add another level of indirection
34 /// without any benefit whatsoever.
36 /// **Known problems:** None.
41 /// values: Box<Vec<Foo>>,
52 declare_clippy_lint! {
55 "usage of `Box<Vec<T>>`, vector elements are already on the heap"
58 /// **What it does:** Checks for use of `Option<Option<_>>` in function signatures and type
61 /// **Why is this bad?** `Option<_>` represents an optional value. `Option<Option<_>>`
62 /// represents an optional optional value which is logically the same thing as an optional
63 /// value but has an unneeded extra level of wrapping.
65 /// **Known problems:** None.
69 /// fn x() -> Option<Option<u32>> {
72 declare_clippy_lint! {
75 "usage of `Option<Option<T>>`"
78 /// **What it does:** Checks for usage of any `LinkedList`, suggesting to use a
79 /// `Vec` or a `VecDeque` (formerly called `RingBuf`).
81 /// **Why is this bad?** Gankro says:
83 /// > The TL;DR of `LinkedList` is that it's built on a massive amount of
84 /// pointers and indirection.
85 /// > It wastes memory, it has terrible cache locality, and is all-around slow.
87 /// > "only" amortized for push/pop, should be faster in the general case for
88 /// almost every possible
89 /// > workload, and isn't even amortized at all if you can predict the capacity
92 /// > `LinkedList`s are only really good if you're doing a lot of merging or
93 /// splitting of lists.
94 /// > This is because they can just mangle some pointers instead of actually
95 /// copying the data. Even
96 /// > if you're doing a lot of insertion in the middle of the list, `RingBuf`
97 /// can still be better
98 /// > because of how expensive it is to seek to the middle of a `LinkedList`.
100 /// **Known problems:** False positives – the instances where using a
101 /// `LinkedList` makes sense are few and far between, but they can still happen.
105 /// let x = LinkedList::new();
107 declare_clippy_lint! {
110 "usage of LinkedList, usually a vector is faster, or a more specialized data \
111 structure like a VecDeque"
114 /// **What it does:** Checks for use of `&Box<T>` anywhere in the code.
116 /// **Why is this bad?** Any `&Box<T>` can also be a `&T`, which is more
119 /// **Known problems:** None.
123 /// fn foo(bar: &Box<T>) { ... }
129 /// fn foo(bar: &T) { ... }
131 declare_clippy_lint! {
134 "a borrow of a boxed type"
137 impl LintPass for TypePass {
138 fn get_lints(&self) -> LintArray {
139 lint_array!(BOX_VEC, OPTION_OPTION, LINKEDLIST, BORROWED_BOX)
143 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypePass {
144 fn check_fn(&mut self, cx: &LateContext<'_, '_>, _: FnKind<'_>, decl: &FnDecl, _: &Body, _: Span, id: NodeId) {
145 // skip trait implementations, see #605
146 if let Some(hir::Node::Item(item)) = cx.tcx.hir.find(cx.tcx.hir.get_parent(id)) {
147 if let ItemKind::Impl(_, _, _, _, Some(..), _, _) = item.node {
152 check_fn_decl(cx, decl);
155 fn check_struct_field(&mut self, cx: &LateContext<'_, '_>, field: &StructField) {
156 check_ty(cx, &field.ty, false);
159 fn check_trait_item(&mut self, cx: &LateContext<'_, '_>, item: &TraitItem) {
161 TraitItemKind::Const(ref ty, _) | TraitItemKind::Type(_, Some(ref ty)) => check_ty(cx, ty, false),
162 TraitItemKind::Method(ref sig, _) => check_fn_decl(cx, &sig.decl),
167 fn check_local(&mut self, cx: &LateContext<'_, '_>, local: &Local) {
168 if let Some(ref ty) = local.ty {
169 check_ty(cx, ty, true);
174 fn check_fn_decl(cx: &LateContext<'_, '_>, decl: &FnDecl) {
175 for input in &decl.inputs {
176 check_ty(cx, input, false);
179 if let FunctionRetTy::Return(ref ty) = decl.output {
180 check_ty(cx, ty, false);
184 /// Check if `qpath` has last segment with type parameter matching `path`
185 fn match_type_parameter(cx: &LateContext<'_, '_>, qpath: &QPath, path: &[&str]) -> bool {
186 let last = last_path_segment(qpath);
188 if let Some(ref params) = last.args;
189 if !params.parenthesized;
190 if let Some(ty) = params.args.iter().find_map(|arg| match arg {
191 GenericArg::Type(ty) => Some(ty),
192 GenericArg::Lifetime(_) => None,
194 if let TyKind::Path(ref qpath) = ty.node;
195 if let Some(did) = opt_def_id(cx.tables.qpath_def(qpath, cx.tcx.hir.node_to_hir_id(ty.id)));
196 if match_def_path(cx.tcx, did, path);
204 /// Recursively check for `TypePass` lints in the given type. Stop at the first
207 /// The parameter `is_local` distinguishes the context of the type; types from
208 /// local bindings should only be checked for the `BORROWED_BOX` lint.
209 fn check_ty(cx: &LateContext<'_, '_>, ast_ty: &hir::Ty, is_local: bool) {
210 if in_macro(ast_ty.span) {
214 TyKind::Path(ref qpath) if !is_local => {
215 let hir_id = cx.tcx.hir.node_to_hir_id(ast_ty.id);
216 let def = cx.tables.qpath_def(qpath, hir_id);
217 if let Some(def_id) = opt_def_id(def) {
218 if Some(def_id) == cx.tcx.lang_items().owned_box() {
219 if match_type_parameter(cx, qpath, &paths::VEC) {
224 "you seem to be trying to use `Box<Vec<T>>`. Consider using just `Vec<T>`",
225 "`Vec<T>` is already on the heap, `Box<Vec<T>>` makes an extra allocation.",
227 return; // don't recurse into the type
229 } else if match_def_path(cx.tcx, def_id, &paths::OPTION) {
230 if match_type_parameter(cx, qpath, &paths::OPTION) {
235 "consider using `Option<T>` instead of `Option<Option<T>>` or a custom \
236 enum if you need to distinguish all 3 cases",
238 return; // don't recurse into the type
240 } else if match_def_path(cx.tcx, def_id, &paths::LINKED_LIST) {
245 "I see you're using a LinkedList! Perhaps you meant some other data structure?",
246 "a VecDeque might work",
248 return; // don't recurse into the type
252 QPath::Resolved(Some(ref ty), ref p) => {
253 check_ty(cx, ty, is_local);
254 for ty in p.segments.iter().flat_map(|seg| {
257 .map_or_else(|| [].iter(), |params| params.args.iter())
258 .filter_map(|arg| match arg {
259 GenericArg::Type(ty) => Some(ty),
260 GenericArg::Lifetime(_) => None,
263 check_ty(cx, ty, is_local);
266 QPath::Resolved(None, ref p) => for ty in p.segments.iter().flat_map(|seg| {
269 .map_or_else(|| [].iter(), |params| params.args.iter())
270 .filter_map(|arg| match arg {
271 GenericArg::Type(ty) => Some(ty),
272 GenericArg::Lifetime(_) => None,
275 check_ty(cx, ty, is_local);
277 QPath::TypeRelative(ref ty, ref seg) => {
278 check_ty(cx, ty, is_local);
279 if let Some(ref params) = seg.args {
280 for ty in params.args.iter().filter_map(|arg| match arg {
281 GenericArg::Type(ty) => Some(ty),
282 GenericArg::Lifetime(_) => None,
284 check_ty(cx, ty, is_local);
290 TyKind::Rptr(ref lt, ref mut_ty) => check_ty_rptr(cx, ast_ty, is_local, lt, mut_ty),
292 TyKind::Slice(ref ty) | TyKind::Array(ref ty, _) | TyKind::Ptr(MutTy { ref ty, .. }) => check_ty(cx, ty, is_local),
293 TyKind::Tup(ref tys) => for ty in tys {
294 check_ty(cx, ty, is_local);
300 fn check_ty_rptr(cx: &LateContext<'_, '_>, ast_ty: &hir::Ty, is_local: bool, lt: &Lifetime, mut_ty: &MutTy) {
301 match mut_ty.ty.node {
302 TyKind::Path(ref qpath) => {
303 let hir_id = cx.tcx.hir.node_to_hir_id(mut_ty.ty.id);
304 let def = cx.tables.qpath_def(qpath, hir_id);
306 if let Some(def_id) = opt_def_id(def);
307 if Some(def_id) == cx.tcx.lang_items().owned_box();
308 if let QPath::Resolved(None, ref path) = *qpath;
309 if let [ref bx] = *path.segments;
310 if let Some(ref params) = bx.args;
311 if !params.parenthesized;
312 if let Some(inner) = params.args.iter().find_map(|arg| match arg {
313 GenericArg::Type(ty) => Some(ty),
314 GenericArg::Lifetime(_) => None,
317 if is_any_trait(inner) {
318 // Ignore `Box<Any>` types, see #1884 for details.
322 let ltopt = if lt.is_elided() {
325 format!("{} ", lt.name.ident().name.as_str())
327 let mutopt = if mut_ty.mutbl == Mutability::MutMutable {
332 span_lint_and_sugg(cx,
335 "you seem to be trying to use `&Box<T>`. Consider using just `&T`",
337 format!("&{}{}{}", ltopt, mutopt, &snippet(cx, inner.span, ".."))
339 return; // don't recurse into the type
342 check_ty(cx, &mut_ty.ty, is_local);
344 _ => check_ty(cx, &mut_ty.ty, is_local),
348 // Returns true if given type is `Any` trait.
349 fn is_any_trait(t: &hir::Ty) -> bool {
351 if let TyKind::TraitObject(ref traits, _) = t.node;
352 if traits.len() >= 1;
353 // Only Send/Sync can be used as additional traits, so it is enough to
354 // check only the first trait.
355 if match_path(&traits[0].trait_ref.path, &paths::ANY_TRAIT);
364 #[allow(missing_copy_implementations)]
367 /// **What it does:** Checks for binding a unit value.
369 /// **Why is this bad?** A unit value cannot usefully be used anywhere. So
370 /// binding one is kind of pointless.
372 /// **Known problems:** None.
378 declare_clippy_lint! {
381 "creating a let binding to a value of unit type, which usually can't be used afterwards"
384 fn check_let_unit(cx: &LateContext<'_, '_>, decl: &Decl) {
385 if let DeclKind::Local(ref local) = decl.node {
386 if is_unit(cx.tables.pat_ty(&local.pat)) {
387 if in_external_macro(cx.sess(), decl.span) || in_macro(local.pat.span) {
390 if higher::is_from_for_desugar(decl) {
398 "this let-binding has unit value. Consider omitting `let {} =`",
399 snippet(cx, local.pat.span, "..")
406 impl LintPass for LetPass {
407 fn get_lints(&self) -> LintArray {
408 lint_array!(LET_UNIT_VALUE)
412 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for LetPass {
413 fn check_decl(&mut self, cx: &LateContext<'a, 'tcx>, decl: &'tcx Decl) {
414 check_let_unit(cx, decl)
418 /// **What it does:** Checks for comparisons to unit.
420 /// **Why is this bad?** Unit is always equal to itself, and thus is just a
421 /// clumsily written constant. Mostly this happens when someone accidentally
422 /// adds semicolons at the end of the operands.
424 /// **Known problems:** None.
428 /// if { foo(); } == { bar(); } { baz(); }
432 /// { foo(); bar(); baz(); }
434 declare_clippy_lint! {
437 "comparing unit values"
440 #[allow(missing_copy_implementations)]
443 impl LintPass for UnitCmp {
444 fn get_lints(&self) -> LintArray {
445 lint_array!(UNIT_CMP)
449 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnitCmp {
450 fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr) {
451 if in_macro(expr.span) {
454 if let ExprKind::Binary(ref cmp, ref left, _) = expr.node {
456 if op.is_comparison() && is_unit(cx.tables.expr_ty(left)) {
457 let result = match op {
458 BinOpKind::Eq | BinOpKind::Le | BinOpKind::Ge => "true",
466 "{}-comparison of unit values detected. This will always be {}",
476 /// **What it does:** Checks for passing a unit value as an argument to a function without using a unit literal (`()`).
478 /// **Why is this bad?** This is likely the result of an accidental semicolon.
480 /// **Known problems:** None.
489 declare_clippy_lint! {
492 "passing unit to a function"
497 impl LintPass for UnitArg {
498 fn get_lints(&self) -> LintArray {
499 lint_array!(UNIT_ARG)
503 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnitArg {
504 fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr) {
505 if in_macro(expr.span) {
509 ExprKind::Call(_, ref args) | ExprKind::MethodCall(_, _, ref args) => {
511 if is_unit(cx.tables.expr_ty(arg)) && !is_unit_literal(arg) {
512 let map = &cx.tcx.hir;
513 // apparently stuff in the desugaring of `?` can trigger this
514 // so check for that here
515 // only the calls to `Try::from_error` is marked as desugared,
516 // so we need to check both the current Expr and its parent.
517 if !is_questionmark_desugar_marked_call(expr) {
519 let opt_parent_node = map.find(map.get_parent_node(expr.id));
520 if let Some(hir::Node::Expr(parent_expr)) = opt_parent_node;
521 if is_questionmark_desugar_marked_call(parent_expr);
524 // `expr` and `parent_expr` where _both_ not from
525 // desugaring `?`, so lint
530 "passing a unit value to a function",
531 "if you intended to pass a unit value, use a unit literal instead",
545 fn is_questionmark_desugar_marked_call(expr: &Expr) -> bool {
546 use crate::syntax_pos::hygiene::CompilerDesugaringKind;
547 if let ExprKind::Call(ref callee, _) = expr.node {
548 callee.span.is_compiler_desugaring(CompilerDesugaringKind::QuestionMark)
554 fn is_unit(ty: Ty<'_>) -> bool {
556 ty::Tuple(slice) if slice.is_empty() => true,
561 fn is_unit_literal(expr: &Expr) -> bool {
563 ExprKind::Tup(ref slice) if slice.is_empty() => true,
570 /// **What it does:** Checks for casts from any numerical to a float type where
571 /// the receiving type cannot store all values from the original type without
572 /// rounding errors. This possible rounding is to be expected, so this lint is
573 /// `Allow` by default.
575 /// Basically, this warns on casting any integer with 32 or more bits to `f32`
576 /// or any 64-bit integer to `f64`.
578 /// **Why is this bad?** It's not bad at all. But in some applications it can be
579 /// helpful to know where precision loss can take place. This lint can help find
580 /// those places in the code.
582 /// **Known problems:** None.
586 /// let x = u64::MAX; x as f64
588 declare_clippy_lint! {
589 pub CAST_PRECISION_LOSS,
591 "casts that cause loss of precision, e.g. `x as f32` where `x: u64`"
594 /// **What it does:** Checks for casts from a signed to an unsigned numerical
595 /// type. In this case, negative values wrap around to large positive values,
596 /// which can be quite surprising in practice. However, as the cast works as
597 /// defined, this lint is `Allow` by default.
599 /// **Why is this bad?** Possibly surprising results. You can activate this lint
600 /// as a one-time check to see where numerical wrapping can arise.
602 /// **Known problems:** None.
607 /// y as u128 // will return 18446744073709551615
609 declare_clippy_lint! {
612 "casts from signed types to unsigned types, e.g. `x as u32` where `x: i32`"
615 /// **What it does:** Checks for on casts between numerical types that may
616 /// truncate large values. This is expected behavior, so the cast is `Allow` by
619 /// **Why is this bad?** In some problem domains, it is good practice to avoid
620 /// truncation. This lint can be activated to help assess where additional
621 /// checks could be beneficial.
623 /// **Known problems:** None.
627 /// fn as_u8(x: u64) -> u8 { x as u8 }
629 declare_clippy_lint! {
630 pub CAST_POSSIBLE_TRUNCATION,
632 "casts that may cause truncation of the value, e.g. `x as u8` where `x: u32`, \
633 or `x as i32` where `x: f32`"
636 /// **What it does:** Checks for casts from an unsigned type to a signed type of
637 /// the same size. Performing such a cast is a 'no-op' for the compiler,
638 /// i.e. nothing is changed at the bit level, and the binary representation of
639 /// the value is reinterpreted. This can cause wrapping if the value is too big
640 /// for the target signed type. However, the cast works as defined, so this lint
641 /// is `Allow` by default.
643 /// **Why is this bad?** While such a cast is not bad in itself, the results can
644 /// be surprising when this is not the intended behavior, as demonstrated by the
647 /// **Known problems:** None.
651 /// u32::MAX as i32 // will yield a value of `-1`
653 declare_clippy_lint! {
654 pub CAST_POSSIBLE_WRAP,
656 "casts that may cause wrapping around the value, e.g. `x as i32` where `x: u32` \
660 /// **What it does:** Checks for on casts between numerical types that may
661 /// be replaced by safe conversion functions.
663 /// **Why is this bad?** Rust's `as` keyword will perform many kinds of
664 /// conversions, including silently lossy conversions. Conversion functions such
665 /// as `i32::from` will only perform lossless conversions. Using the conversion
666 /// functions prevents conversions from turning into silent lossy conversions if
667 /// the types of the input expressions ever change, and make it easier for
668 /// people reading the code to know that the conversion is lossless.
670 /// **Known problems:** None.
674 /// fn as_u64(x: u8) -> u64 { x as u64 }
677 /// Using `::from` would look like this:
680 /// fn as_u64(x: u8) -> u64 { u64::from(x) }
682 declare_clippy_lint! {
685 "casts using `as` that are known to be lossless, e.g. `x as u64` where `x: u8`"
688 /// **What it does:** Checks for casts to the same type.
690 /// **Why is this bad?** It's just unnecessary.
692 /// **Known problems:** None.
696 /// let _ = 2i32 as i32
698 declare_clippy_lint! {
699 pub UNNECESSARY_CAST,
701 "cast to the same type, e.g. `x as i32` where `x: i32`"
704 /// **What it does:** Checks for casts from a less-strictly-aligned pointer to a
705 /// more-strictly-aligned pointer
707 /// **Why is this bad?** Dereferencing the resulting pointer may be undefined
710 /// **Known problems:** None.
714 /// let _ = (&1u8 as *const u8) as *const u16;
715 /// let _ = (&mut 1u8 as *mut u8) as *mut u16;
717 declare_clippy_lint! {
718 pub CAST_PTR_ALIGNMENT,
720 "cast from a pointer to a more-strictly-aligned pointer"
723 /// **What it does:** Checks for casts of function pointers to something other than usize
725 /// **Why is this bad?**
726 /// Casting a function pointer to anything other than usize/isize is not portable across
727 /// architectures, because you end up losing bits if the target type is too small or end up with a
728 /// bunch of extra bits that waste space and add more instructions to the final binary than
729 /// strictly necessary for the problem
731 /// Casting to isize also doesn't make sense since there are no signed addresses.
737 /// fn fun() -> i32 {}
738 /// let a = fun as i64;
741 /// fn fun2() -> i32 {}
742 /// let a = fun2 as usize;
744 declare_clippy_lint! {
745 pub FN_TO_NUMERIC_CAST,
747 "casting a function pointer to a numeric type other than usize"
750 /// **What it does:** Checks for casts of a function pointer to a numeric type not wide enough to
753 /// **Why is this bad?**
754 /// Such a cast discards some bits of the function's address. If this is intended, it would be more
755 /// clearly expressed by casting to usize first, then casting the usize to the intended type (with
756 /// a comment) to perform the truncation.
762 /// fn fn1() -> i16 { 1 };
763 /// let _ = fn1 as i32;
765 /// // Better: Cast to usize first, then comment with the reason for the truncation
766 /// fn fn2() -> i16 { 1 };
767 /// let fn_ptr = fn2 as usize;
768 /// let fn_ptr_truncated = fn_ptr as i32;
770 declare_clippy_lint! {
771 pub FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
773 "casting a function pointer to a numeric type not wide enough to store the address"
776 /// Returns the size in bits of an integral type.
777 /// Will return 0 if the type is not an int or uint variant
778 fn int_ty_to_nbits(typ: Ty<'_>, tcx: TyCtxt<'_, '_, '_>) -> u64 {
780 ty::Int(i) => match i {
781 IntTy::Isize => tcx.data_layout.pointer_size.bits(),
788 ty::Uint(i) => match i {
789 UintTy::Usize => tcx.data_layout.pointer_size.bits(),
800 fn is_isize_or_usize(typ: Ty<'_>) -> bool {
802 ty::Int(IntTy::Isize) | ty::Uint(UintTy::Usize) => true,
807 fn span_precision_loss_lint(cx: &LateContext<'_, '_>, expr: &Expr, cast_from: Ty<'_>, cast_to_f64: bool) {
808 let mantissa_nbits = if cast_to_f64 { 52 } else { 23 };
809 let arch_dependent = is_isize_or_usize(cast_from) && cast_to_f64;
810 let arch_dependent_str = "on targets with 64-bit wide pointers ";
811 let from_nbits_str = if arch_dependent {
813 } else if is_isize_or_usize(cast_from) {
814 "32 or 64".to_owned()
816 int_ty_to_nbits(cast_from, cx.tcx).to_string()
823 "casting {0} to {1} causes a loss of precision {2}({0} is {3} bits wide, but {1}'s mantissa \
824 is only {4} bits wide)",
826 if cast_to_f64 { "f64" } else { "f32" },
838 fn should_strip_parens(op: &Expr, snip: &str) -> bool {
839 if let ExprKind::Binary(_, _, _) = op.node {
840 if snip.starts_with('(') && snip.ends_with(')') {
847 fn span_lossless_lint(cx: &LateContext<'_, '_>, expr: &Expr, op: &Expr, cast_from: Ty<'_>, cast_to: Ty<'_>) {
848 // Do not suggest using From in consts/statics until it is valid to do so (see #2267).
849 if in_constant(cx, expr.id) { return }
850 // The suggestion is to use a function call, so if the original expression
851 // has parens on the outside, they are no longer needed.
852 let opt = snippet_opt(cx, op.span);
853 let sugg = if let Some(ref snip) = opt {
854 if should_strip_parens(op, snip) {
855 &snip[1..snip.len() - 1]
867 &format!("casting {} to {} may become silently lossy if types change", cast_from, cast_to),
869 format!("{}::from({})", cast_to, sugg),
879 fn check_truncation_and_wrapping(cx: &LateContext<'_, '_>, expr: &Expr, cast_from: Ty<'_>, cast_to: Ty<'_>) {
880 let arch_64_suffix = " on targets with 64-bit wide pointers";
881 let arch_32_suffix = " on targets with 32-bit wide pointers";
882 let cast_unsigned_to_signed = !cast_from.is_signed() && cast_to.is_signed();
883 let from_nbits = int_ty_to_nbits(cast_from, cx.tcx);
884 let to_nbits = int_ty_to_nbits(cast_to, cx.tcx);
885 let (span_truncation, suffix_truncation, span_wrap, suffix_wrap) =
886 match (is_isize_or_usize(cast_from), is_isize_or_usize(cast_to)) {
887 (true, true) | (false, false) => (
888 to_nbits < from_nbits,
890 to_nbits == from_nbits && cast_unsigned_to_signed,
900 to_nbits <= 32 && cast_unsigned_to_signed,
906 cast_unsigned_to_signed,
907 if from_nbits == 64 {
917 CAST_POSSIBLE_TRUNCATION,
920 "casting {} to {} may truncate the value{}",
923 match suffix_truncation {
924 ArchSuffix::_32 => arch_32_suffix,
925 ArchSuffix::_64 => arch_64_suffix,
926 ArchSuffix::None => "",
937 "casting {} to {} may wrap around the value{}",
941 ArchSuffix::_32 => arch_32_suffix,
942 ArchSuffix::_64 => arch_64_suffix,
943 ArchSuffix::None => "",
950 fn check_lossless(cx: &LateContext<'_, '_>, expr: &Expr, op: &Expr, cast_from: Ty<'_>, cast_to: Ty<'_>) {
951 let cast_signed_to_unsigned = cast_from.is_signed() && !cast_to.is_signed();
952 let from_nbits = int_ty_to_nbits(cast_from, cx.tcx);
953 let to_nbits = int_ty_to_nbits(cast_to, cx.tcx);
954 if !is_isize_or_usize(cast_from) && !is_isize_or_usize(cast_to) && from_nbits < to_nbits && !cast_signed_to_unsigned
956 span_lossless_lint(cx, expr, op, cast_from, cast_to);
960 impl LintPass for CastPass {
961 fn get_lints(&self) -> LintArray {
965 CAST_POSSIBLE_TRUNCATION,
975 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for CastPass {
976 fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr) {
977 if let ExprKind::Cast(ref ex, _) = expr.node {
978 let (cast_from, cast_to) = (cx.tables.expr_ty(ex), cx.tables.expr_ty(expr));
979 lint_fn_to_numeric_cast(cx, expr, ex, cast_from, cast_to);
980 if let ExprKind::Lit(ref lit) = ex.node {
981 use crate::syntax::ast::{LitIntType, LitKind};
983 LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::FloatUnsuffixed(_) => {},
984 _ => if cast_from.sty == cast_to.sty && !in_external_macro(cx.sess(), expr.span) {
989 &format!("casting to the same type is unnecessary (`{}` -> `{}`)", cast_from, cast_to),
994 if cast_from.is_numeric() && cast_to.is_numeric() && !in_external_macro(cx.sess(), expr.span) {
995 match (cast_from.is_integral(), cast_to.is_integral()) {
997 let from_nbits = int_ty_to_nbits(cast_from, cx.tcx);
998 let to_nbits = if let ty::Float(FloatTy::F32) = cast_to.sty {
1003 if is_isize_or_usize(cast_from) || from_nbits >= to_nbits {
1004 span_precision_loss_lint(cx, expr, cast_from, to_nbits == 64);
1006 if from_nbits < to_nbits {
1007 span_lossless_lint(cx, expr, ex, cast_from, cast_to);
1013 CAST_POSSIBLE_TRUNCATION,
1015 &format!("casting {} to {} may truncate the value", cast_from, cast_to),
1017 if !cast_to.is_signed() {
1022 &format!("casting {} to {} may lose the sign of the value", cast_from, cast_to),
1027 if cast_from.is_signed() && !cast_to.is_signed() {
1032 &format!("casting {} to {} may lose the sign of the value", cast_from, cast_to),
1035 check_truncation_and_wrapping(cx, expr, cast_from, cast_to);
1036 check_lossless(cx, expr, ex, cast_from, cast_to);
1039 if let (&ty::Float(FloatTy::F64), &ty::Float(FloatTy::F32)) = (&cast_from.sty, &cast_to.sty)
1043 CAST_POSSIBLE_TRUNCATION,
1045 "casting f64 to f32 may truncate the value",
1048 if let (&ty::Float(FloatTy::F32), &ty::Float(FloatTy::F64)) = (&cast_from.sty, &cast_to.sty)
1050 span_lossless_lint(cx, expr, ex, cast_from, cast_to);
1057 if let ty::RawPtr(from_ptr_ty) = &cast_from.sty;
1058 if let ty::RawPtr(to_ptr_ty) = &cast_to.sty;
1059 if let Some(from_align) = cx.layout_of(from_ptr_ty.ty).ok().map(|a| a.align.abi());
1060 if let Some(to_align) = cx.layout_of(to_ptr_ty.ty).ok().map(|a| a.align.abi());
1061 if from_align < to_align;
1062 // with c_void, we inherently need to trust the user
1064 match_type(cx, from_ptr_ty.ty, &paths::C_VOID)
1065 || match_type(cx, from_ptr_ty.ty, &paths::C_VOID_LIBC)
1072 &format!("casting from `{}` to a more-strictly-aligned pointer (`{}`)", cast_from, cast_to)
1080 fn lint_fn_to_numeric_cast(cx: &LateContext<'_, '_>, expr: &Expr, cast_expr: &Expr, cast_from: Ty<'_>, cast_to: Ty<'_>) {
1081 match cast_from.sty {
1082 ty::FnDef(..) | ty::FnPtr(_) => {
1083 let from_snippet = snippet(cx, cast_expr.span, "x");
1085 let to_nbits = int_ty_to_nbits(cast_to, cx.tcx);
1086 if to_nbits < cx.tcx.data_layout.pointer_size.bits() {
1089 FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
1091 &format!("casting function pointer `{}` to `{}`, which truncates the value", from_snippet, cast_to),
1093 format!("{} as usize", from_snippet)
1096 } else if cast_to.sty != ty::Uint(UintTy::Usize) {
1101 &format!("casting function pointer `{}` to `{}`", from_snippet, cast_to),
1103 format!("{} as usize", from_snippet)
1111 /// **What it does:** Checks for types used in structs, parameters and `let`
1112 /// declarations above a certain complexity threshold.
1114 /// **Why is this bad?** Too complex types make the code less readable. Consider
1115 /// using a `type` definition to simplify them.
1117 /// **Known problems:** None.
1121 /// struct Foo { inner: Rc<Vec<Vec<Box<(u32, u32, u32, u32)>>>> }
1123 declare_clippy_lint! {
1124 pub TYPE_COMPLEXITY,
1126 "usage of very complex types that might be better factored into `type` definitions"
1129 #[allow(missing_copy_implementations)]
1130 pub struct TypeComplexityPass {
1134 impl TypeComplexityPass {
1135 pub fn new(threshold: u64) -> Self {
1142 impl LintPass for TypeComplexityPass {
1143 fn get_lints(&self) -> LintArray {
1144 lint_array!(TYPE_COMPLEXITY)
1148 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypeComplexityPass {
1151 cx: &LateContext<'a, 'tcx>,
1158 self.check_fndecl(cx, decl);
1161 fn check_struct_field(&mut self, cx: &LateContext<'a, 'tcx>, field: &'tcx StructField) {
1162 // enum variants are also struct fields now
1163 self.check_type(cx, &field.ty);
1166 fn check_item(&mut self, cx: &LateContext<'a, 'tcx>, item: &'tcx Item) {
1168 ItemKind::Static(ref ty, _, _) | ItemKind::Const(ref ty, _) => self.check_type(cx, ty),
1169 // functions, enums, structs, impls and traits are covered
1174 fn check_trait_item(&mut self, cx: &LateContext<'a, 'tcx>, item: &'tcx TraitItem) {
1176 TraitItemKind::Const(ref ty, _) | TraitItemKind::Type(_, Some(ref ty)) => self.check_type(cx, ty),
1177 TraitItemKind::Method(MethodSig { ref decl, .. }, TraitMethod::Required(_)) => self.check_fndecl(cx, decl),
1178 // methods with default impl are covered by check_fn
1183 fn check_impl_item(&mut self, cx: &LateContext<'a, 'tcx>, item: &'tcx ImplItem) {
1185 ImplItemKind::Const(ref ty, _) | ImplItemKind::Type(ref ty) => self.check_type(cx, ty),
1186 // methods are covered by check_fn
1191 fn check_local(&mut self, cx: &LateContext<'a, 'tcx>, local: &'tcx Local) {
1192 if let Some(ref ty) = local.ty {
1193 self.check_type(cx, ty);
1198 impl<'a, 'tcx> TypeComplexityPass {
1199 fn check_fndecl(&self, cx: &LateContext<'a, 'tcx>, decl: &'tcx FnDecl) {
1200 for arg in &decl.inputs {
1201 self.check_type(cx, arg);
1203 if let Return(ref ty) = decl.output {
1204 self.check_type(cx, ty);
1208 fn check_type(&self, cx: &LateContext<'_, '_>, ty: &hir::Ty) {
1209 if in_macro(ty.span) {
1213 let mut visitor = TypeComplexityVisitor { score: 0, nest: 1 };
1214 visitor.visit_ty(ty);
1218 if score > self.threshold {
1223 "very complex type used. Consider factoring parts into `type` definitions",
1229 /// Walks a type and assigns a complexity score to it.
1230 struct TypeComplexityVisitor {
1231 /// total complexity score of the type
1233 /// current nesting level
1237 impl<'tcx> Visitor<'tcx> for TypeComplexityVisitor {
1238 fn visit_ty(&mut self, ty: &'tcx hir::Ty) {
1239 let (add_score, sub_nest) = match ty.node {
1240 // _, &x and *x have only small overhead; don't mess with nesting level
1241 TyKind::Infer | TyKind::Ptr(..) | TyKind::Rptr(..) => (1, 0),
1243 // the "normal" components of a type: named types, arrays/tuples
1244 TyKind::Path(..) | TyKind::Slice(..) | TyKind::Tup(..) | TyKind::Array(..) => (10 * self.nest, 1),
1246 // function types bring a lot of overhead
1247 TyKind::BareFn(ref bare) if bare.abi == Abi::Rust => (50 * self.nest, 1),
1249 TyKind::TraitObject(ref param_bounds, _) => {
1250 let has_lifetime_parameters = param_bounds
1252 .any(|bound| bound.bound_generic_params.iter().any(|gen| match gen.kind {
1253 GenericParamKind::Lifetime { .. } => true,
1256 if has_lifetime_parameters {
1257 // complex trait bounds like A<'a, 'b>
1260 // simple trait bounds like A + B
1267 self.score += add_score;
1268 self.nest += sub_nest;
1270 self.nest -= sub_nest;
1272 fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
1273 NestedVisitorMap::None
1277 /// **What it does:** Checks for expressions where a character literal is cast
1278 /// to `u8` and suggests using a byte literal instead.
1280 /// **Why is this bad?** In general, casting values to smaller types is
1281 /// error-prone and should be avoided where possible. In the particular case of
1282 /// converting a character literal to u8, it is easy to avoid by just using a
1283 /// byte literal instead. As an added bonus, `b'a'` is even slightly shorter
1284 /// than `'a' as u8`.
1286 /// **Known problems:** None.
1293 /// A better version, using the byte literal:
1298 declare_clippy_lint! {
1301 "casting a character literal to u8"
1304 pub struct CharLitAsU8;
1306 impl LintPass for CharLitAsU8 {
1307 fn get_lints(&self) -> LintArray {
1308 lint_array!(CHAR_LIT_AS_U8)
1312 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for CharLitAsU8 {
1313 fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr) {
1314 use crate::syntax::ast::{LitKind, UintTy};
1316 if let ExprKind::Cast(ref e, _) = expr.node {
1317 if let ExprKind::Lit(ref l) = e.node {
1318 if let LitKind::Char(_) = l.node {
1319 if ty::Uint(UintTy::U8) == cx.tables.expr_ty(expr).sty && !in_macro(expr.span) {
1320 let msg = "casting character literal to u8. `char`s \
1321 are 4 bytes wide in rust, so casting to u8 \
1323 let help = format!("Consider using a byte literal instead:\nb{}", snippet(cx, e.span, "'x'"));
1324 span_help_and_lint(cx, CHAR_LIT_AS_U8, expr.span, msg, &help);
1332 /// **What it does:** Checks for comparisons where one side of the relation is
1333 /// either the minimum or maximum value for its type and warns if it involves a
1334 /// case that is always true or always false. Only integer and boolean types are
1337 /// **Why is this bad?** An expression like `min <= x` may misleadingly imply
1338 /// that is is possible for `x` to be less than the minimum. Expressions like
1339 /// `max < x` are probably mistakes.
1341 /// **Known problems:** For `usize` the size of the current compile target will
1342 /// be assumed (e.g. 64 bits on 64 bit systems). This means code that uses such
1343 /// a comparison to detect target pointer width will trigger this lint. One can
1344 /// use `mem::sizeof` and compare its value or conditional compilation
1346 /// like `#[cfg(target_pointer_width = "64")] ..` instead.
1351 /// 100 > std::i32::MAX
1353 declare_clippy_lint! {
1354 pub ABSURD_EXTREME_COMPARISONS,
1356 "a comparison with a maximum or minimum value that is always true or false"
1359 pub struct AbsurdExtremeComparisons;
1361 impl LintPass for AbsurdExtremeComparisons {
1362 fn get_lints(&self) -> LintArray {
1363 lint_array!(ABSURD_EXTREME_COMPARISONS)
1372 struct ExtremeExpr<'a> {
1377 enum AbsurdComparisonResult {
1380 InequalityImpossible,
1384 fn is_cast_between_fixed_and_target<'a, 'tcx>(
1385 cx: &LateContext<'a, 'tcx>,
1389 if let ExprKind::Cast(ref cast_exp, _) = expr.node {
1390 let precast_ty = cx.tables.expr_ty(cast_exp);
1391 let cast_ty = cx.tables.expr_ty(expr);
1393 return is_isize_or_usize(precast_ty) != is_isize_or_usize(cast_ty)
1399 fn detect_absurd_comparison<'a, 'tcx>(
1400 cx: &LateContext<'a, 'tcx>,
1404 ) -> Option<(ExtremeExpr<'tcx>, AbsurdComparisonResult)> {
1405 use crate::types::ExtremeType::*;
1406 use crate::types::AbsurdComparisonResult::*;
1407 use crate::utils::comparisons::*;
1409 // absurd comparison only makes sense on primitive types
1410 // primitive types don't implement comparison operators with each other
1411 if cx.tables.expr_ty(lhs) != cx.tables.expr_ty(rhs) {
1415 // comparisons between fix sized types and target sized types are considered unanalyzable
1416 if is_cast_between_fixed_and_target(cx, lhs) || is_cast_between_fixed_and_target(cx, rhs) {
1420 let normalized = normalize_comparison(op, lhs, rhs);
1421 let (rel, normalized_lhs, normalized_rhs) = if let Some(val) = normalized {
1427 let lx = detect_extreme_expr(cx, normalized_lhs);
1428 let rx = detect_extreme_expr(cx, normalized_rhs);
1433 (Some(l @ ExtremeExpr { which: Maximum, .. }), _) => (l, AlwaysFalse), // max < x
1434 (_, Some(r @ ExtremeExpr { which: Minimum, .. })) => (r, AlwaysFalse), // x < min
1440 (Some(l @ ExtremeExpr { which: Minimum, .. }), _) => (l, AlwaysTrue), // min <= x
1441 (Some(l @ ExtremeExpr { which: Maximum, .. }), _) => (l, InequalityImpossible), // max <= x
1442 (_, Some(r @ ExtremeExpr { which: Minimum, .. })) => (r, InequalityImpossible), // x <= min
1443 (_, Some(r @ ExtremeExpr { which: Maximum, .. })) => (r, AlwaysTrue), // x <= max
1447 Rel::Ne | Rel::Eq => return None,
1451 fn detect_extreme_expr<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr) -> Option<ExtremeExpr<'tcx>> {
1452 use crate::types::ExtremeType::*;
1454 let ty = cx.tables.expr_ty(expr);
1456 let cv = constant(cx, cx.tables, expr)?.0;
1458 let which = match (&ty.sty, cv) {
1459 (&ty::Bool, Constant::Bool(false)) |
1460 (&ty::Uint(_), Constant::Int(0)) => Minimum,
1461 (&ty::Int(ity), Constant::Int(i)) if i == unsext(cx.tcx, i128::min_value() >> (128 - int_bits(cx.tcx, ity)), ity) => Minimum,
1463 (&ty::Bool, Constant::Bool(true)) => Maximum,
1464 (&ty::Int(ity), Constant::Int(i)) if i == unsext(cx.tcx, i128::max_value() >> (128 - int_bits(cx.tcx, ity)), ity) => Maximum,
1465 (&ty::Uint(uty), Constant::Int(i)) if clip(cx.tcx, u128::max_value(), uty) == i => Maximum,
1475 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for AbsurdExtremeComparisons {
1476 fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr) {
1477 use crate::types::ExtremeType::*;
1478 use crate::types::AbsurdComparisonResult::*;
1480 if let ExprKind::Binary(ref cmp, ref lhs, ref rhs) = expr.node {
1481 if let Some((culprit, result)) = detect_absurd_comparison(cx, cmp.node, lhs, rhs) {
1482 if !in_macro(expr.span) {
1483 let msg = "this comparison involving the minimum or maximum element for this \
1484 type contains a case that is always true or always false";
1486 let conclusion = match result {
1487 AlwaysFalse => "this comparison is always false".to_owned(),
1488 AlwaysTrue => "this comparison is always true".to_owned(),
1489 InequalityImpossible => format!(
1490 "the case where the two sides are not equal never occurs, consider using {} == {} \
1492 snippet(cx, lhs.span, "lhs"),
1493 snippet(cx, rhs.span, "rhs")
1498 "because {} is the {} value for this type, {}",
1499 snippet(cx, culprit.expr.span, "x"),
1500 match culprit.which {
1501 Minimum => "minimum",
1502 Maximum => "maximum",
1507 span_help_and_lint(cx, ABSURD_EXTREME_COMPARISONS, expr.span, msg, &help);
1514 /// **What it does:** Checks for comparisons where the relation is always either
1515 /// true or false, but where one side has been upcast so that the comparison is
1516 /// necessary. Only integer types are checked.
1518 /// **Why is this bad?** An expression like `let x : u8 = ...; (x as u32) > 300`
1519 /// will mistakenly imply that it is possible for `x` to be outside the range of
1522 /// **Known problems:**
1523 /// https://github.com/rust-lang-nursery/rust-clippy/issues/886
1527 /// let x : u8 = ...; (x as u32) > 300
1529 declare_clippy_lint! {
1530 pub INVALID_UPCAST_COMPARISONS,
1532 "a comparison involving an upcast which is always true or false"
1535 pub struct InvalidUpcastComparisons;
1537 impl LintPass for InvalidUpcastComparisons {
1538 fn get_lints(&self) -> LintArray {
1539 lint_array!(INVALID_UPCAST_COMPARISONS)
1543 #[derive(Copy, Clone, Debug, Eq)]
1550 #[allow(clippy::cast_sign_loss)]
1551 fn cmp_s_u(s: i128, u: u128) -> Ordering {
1554 } else if u > (i128::max_value() as u128) {
1562 impl PartialEq for FullInt {
1563 fn eq(&self, other: &Self) -> bool {
1564 self.partial_cmp(other)
1565 .expect("partial_cmp only returns Some(_)") == Ordering::Equal
1569 impl PartialOrd for FullInt {
1570 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1571 Some(match (self, other) {
1572 (&FullInt::S(s), &FullInt::S(o)) => s.cmp(&o),
1573 (&FullInt::U(s), &FullInt::U(o)) => s.cmp(&o),
1574 (&FullInt::S(s), &FullInt::U(o)) => Self::cmp_s_u(s, o),
1575 (&FullInt::U(s), &FullInt::S(o)) => Self::cmp_s_u(o, s).reverse(),
1579 impl Ord for FullInt {
1580 fn cmp(&self, other: &Self) -> Ordering {
1581 self.partial_cmp(other)
1582 .expect("partial_cmp for FullInt can never return None")
1587 fn numeric_cast_precast_bounds<'a>(cx: &LateContext<'_, '_>, expr: &'a Expr) -> Option<(FullInt, FullInt)> {
1588 use crate::syntax::ast::{IntTy, UintTy};
1591 if let ExprKind::Cast(ref cast_exp, _) = expr.node {
1592 let pre_cast_ty = cx.tables.expr_ty(cast_exp);
1593 let cast_ty = cx.tables.expr_ty(expr);
1594 // if it's a cast from i32 to u32 wrapping will invalidate all these checks
1595 if cx.layout_of(pre_cast_ty).ok().map(|l| l.size) == cx.layout_of(cast_ty).ok().map(|l| l.size) {
1598 match pre_cast_ty.sty {
1599 ty::Int(int_ty) => Some(match int_ty {
1600 IntTy::I8 => (FullInt::S(i128::from(i8::min_value())), FullInt::S(i128::from(i8::max_value()))),
1602 FullInt::S(i128::from(i16::min_value())),
1603 FullInt::S(i128::from(i16::max_value())),
1606 FullInt::S(i128::from(i32::min_value())),
1607 FullInt::S(i128::from(i32::max_value())),
1610 FullInt::S(i128::from(i64::min_value())),
1611 FullInt::S(i128::from(i64::max_value())),
1613 IntTy::I128 => (FullInt::S(i128::min_value()), FullInt::S(i128::max_value())),
1614 IntTy::Isize => (FullInt::S(isize::min_value() as i128), FullInt::S(isize::max_value() as i128)),
1616 ty::Uint(uint_ty) => Some(match uint_ty {
1617 UintTy::U8 => (FullInt::U(u128::from(u8::min_value())), FullInt::U(u128::from(u8::max_value()))),
1619 FullInt::U(u128::from(u16::min_value())),
1620 FullInt::U(u128::from(u16::max_value())),
1623 FullInt::U(u128::from(u32::min_value())),
1624 FullInt::U(u128::from(u32::max_value())),
1627 FullInt::U(u128::from(u64::min_value())),
1628 FullInt::U(u128::from(u64::max_value())),
1630 UintTy::U128 => (FullInt::U(u128::min_value()), FullInt::U(u128::max_value())),
1631 UintTy::Usize => (FullInt::U(usize::min_value() as u128), FullInt::U(usize::max_value() as u128)),
1640 fn node_as_const_fullint<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr) -> Option<FullInt> {
1641 let val = constant(cx, cx.tables, expr)?.0;
1642 if let Constant::Int(const_int) = val {
1643 match cx.tables.expr_ty(expr).sty {
1644 ty::Int(ity) => Some(FullInt::S(sext(cx.tcx, const_int, ity))),
1645 ty::Uint(_) => Some(FullInt::U(const_int)),
1653 fn err_upcast_comparison(cx: &LateContext<'_, '_>, span: Span, expr: &Expr, always: bool) {
1654 if let ExprKind::Cast(ref cast_val, _) = expr.node {
1657 INVALID_UPCAST_COMPARISONS,
1660 "because of the numeric bounds on `{}` prior to casting, this expression is always {}",
1661 snippet(cx, cast_val.span, "the expression"),
1662 if always { "true" } else { "false" },
1668 fn upcast_comparison_bounds_err<'a, 'tcx>(
1669 cx: &LateContext<'a, 'tcx>,
1671 rel: comparisons::Rel,
1672 lhs_bounds: Option<(FullInt, FullInt)>,
1677 use crate::utils::comparisons::*;
1679 if let Some((lb, ub)) = lhs_bounds {
1680 if let Some(norm_rhs_val) = node_as_const_fullint(cx, rhs) {
1681 if rel == Rel::Eq || rel == Rel::Ne {
1682 if norm_rhs_val < lb || norm_rhs_val > ub {
1683 err_upcast_comparison(cx, span, lhs, rel == Rel::Ne);
1685 } else if match rel {
1686 Rel::Lt => if invert {
1691 Rel::Le => if invert {
1696 Rel::Eq | Rel::Ne => unreachable!(),
1698 err_upcast_comparison(cx, span, lhs, true)
1699 } else if match rel {
1700 Rel::Lt => if invert {
1705 Rel::Le => if invert {
1710 Rel::Eq | Rel::Ne => unreachable!(),
1712 err_upcast_comparison(cx, span, lhs, false)
1718 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for InvalidUpcastComparisons {
1719 fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr) {
1720 if let ExprKind::Binary(ref cmp, ref lhs, ref rhs) = expr.node {
1721 let normalized = comparisons::normalize_comparison(cmp.node, lhs, rhs);
1722 let (rel, normalized_lhs, normalized_rhs) = if let Some(val) = normalized {
1728 let lhs_bounds = numeric_cast_precast_bounds(cx, normalized_lhs);
1729 let rhs_bounds = numeric_cast_precast_bounds(cx, normalized_rhs);
1731 upcast_comparison_bounds_err(cx, expr.span, rel, lhs_bounds, normalized_lhs, normalized_rhs, false);
1732 upcast_comparison_bounds_err(cx, expr.span, rel, rhs_bounds, normalized_rhs, normalized_lhs, true);
1737 /// **What it does:** Checks for public `impl` or `fn` missing generalization
1738 /// over different hashers and implicitly defaulting to the default hashing
1739 /// algorithm (SipHash).
1741 /// **Why is this bad?** `HashMap` or `HashSet` with custom hashers cannot be
1744 /// **Known problems:** Suggestions for replacing constructors can contain
1745 /// false-positives. Also applying suggestions can require modification of other
1746 /// pieces of code, possibly including external crates.
1750 /// impl<K: Hash + Eq, V> Serialize for HashMap<K, V> { ... }
1752 /// pub foo(map: &mut HashMap<i32, i32>) { .. }
1754 declare_clippy_lint! {
1755 pub IMPLICIT_HASHER,
1757 "missing generalization over different hashers"
1760 pub struct ImplicitHasher;
1762 impl LintPass for ImplicitHasher {
1763 fn get_lints(&self) -> LintArray {
1764 lint_array!(IMPLICIT_HASHER)
1768 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for ImplicitHasher {
1769 #[allow(clippy::cast_possible_truncation)]
1770 fn check_item(&mut self, cx: &LateContext<'a, 'tcx>, item: &'tcx Item) {
1771 use crate::syntax_pos::BytePos;
1773 fn suggestion<'a, 'tcx>(
1774 cx: &LateContext<'a, 'tcx>,
1775 db: &mut DiagnosticBuilder<'_>,
1776 generics_span: Span,
1777 generics_suggestion_span: Span,
1778 target: &ImplicitHasherType<'_>,
1779 vis: ImplicitHasherConstructorVisitor<'_, '_, '_>,
1781 let generics_snip = snippet(cx, generics_span, "");
1783 let generics_snip = if generics_snip.is_empty() {
1786 &generics_snip[1..generics_snip.len() - 1]
1791 "consider adding a type parameter".to_string(),
1794 generics_suggestion_span,
1796 "<{}{}S: ::std::hash::BuildHasher{}>",
1798 if generics_snip.is_empty() { "" } else { ", " },
1799 if vis.suggestions.is_empty() {
1802 // request users to add `Default` bound so that generic constructors can be used
1809 format!("{}<{}, S>", target.type_name(), target.type_arguments(),),
1814 if !vis.suggestions.is_empty() {
1815 multispan_sugg(db, "...and use generic constructor".into(), vis.suggestions);
1819 if !cx.access_levels.is_exported(item.id) {
1824 ItemKind::Impl(_, _, _, ref generics, _, ref ty, ref items) => {
1825 let mut vis = ImplicitHasherTypeVisitor::new(cx);
1828 for target in &vis.found {
1829 if differing_macro_contexts(item.span, target.span()) {
1833 let generics_suggestion_span = generics.span.substitute_dummy({
1834 let pos = snippet_opt(cx, item.span.until(target.span()))
1835 .and_then(|snip| Some(item.span.lo() + BytePos(snip.find("impl")? as u32 + 4)));
1836 if let Some(pos) = pos {
1837 Span::new(pos, pos, item.span.data().ctxt)
1843 let mut ctr_vis = ImplicitHasherConstructorVisitor::new(cx, target);
1844 for item in items.iter().map(|item| cx.tcx.hir.impl_item(item.id)) {
1845 ctr_vis.visit_impl_item(item);
1852 &format!("impl for `{}` should be generalized over different hashers", target.type_name()),
1854 suggestion(cx, db, generics.span, generics_suggestion_span, target, ctr_vis);
1859 ItemKind::Fn(ref decl, .., ref generics, body_id) => {
1860 let body = cx.tcx.hir.body(body_id);
1862 for ty in &decl.inputs {
1863 let mut vis = ImplicitHasherTypeVisitor::new(cx);
1866 for target in &vis.found {
1867 let generics_suggestion_span = generics.span.substitute_dummy({
1868 let pos = snippet_opt(cx, item.span.until(body.arguments[0].pat.span))
1870 let i = snip.find("fn")?;
1871 Some(item.span.lo() + BytePos((i + (&snip[i..]).find('(')?) as u32))
1873 .expect("failed to create span for type parameters");
1874 Span::new(pos, pos, item.span.data().ctxt)
1877 let mut ctr_vis = ImplicitHasherConstructorVisitor::new(cx, target);
1878 ctr_vis.visit_body(body);
1885 "parameter of type `{}` should be generalized over different hashers",
1889 suggestion(cx, db, generics.span, generics_suggestion_span, target, ctr_vis);
1900 enum ImplicitHasherType<'tcx> {
1901 HashMap(Span, Ty<'tcx>, Cow<'static, str>, Cow<'static, str>),
1902 HashSet(Span, Ty<'tcx>, Cow<'static, str>),
1905 impl<'tcx> ImplicitHasherType<'tcx> {
1906 /// Checks that `ty` is a target type without a BuildHasher.
1907 fn new<'a>(cx: &LateContext<'a, 'tcx>, hir_ty: &hir::Ty) -> Option<Self> {
1908 if let TyKind::Path(QPath::Resolved(None, ref path)) = hir_ty.node {
1909 let params: Vec<_> = path.segments.last().as_ref()?.args.as_ref()?
1910 .args.iter().filter_map(|arg| match arg {
1911 GenericArg::Type(ty) => Some(ty),
1912 GenericArg::Lifetime(_) => None,
1914 let params_len = params.len();
1916 let ty = hir_ty_to_ty(cx.tcx, hir_ty);
1918 if match_path(path, &paths::HASHMAP) && params_len == 2 {
1919 Some(ImplicitHasherType::HashMap(
1922 snippet(cx, params[0].span, "K"),
1923 snippet(cx, params[1].span, "V"),
1925 } else if match_path(path, &paths::HASHSET) && params_len == 1 {
1926 Some(ImplicitHasherType::HashSet(hir_ty.span, ty, snippet(cx, params[0].span, "T")))
1935 fn type_name(&self) -> &'static str {
1937 ImplicitHasherType::HashMap(..) => "HashMap",
1938 ImplicitHasherType::HashSet(..) => "HashSet",
1942 fn type_arguments(&self) -> String {
1944 ImplicitHasherType::HashMap(.., ref k, ref v) => format!("{}, {}", k, v),
1945 ImplicitHasherType::HashSet(.., ref t) => format!("{}", t),
1949 fn ty(&self) -> Ty<'tcx> {
1951 ImplicitHasherType::HashMap(_, ty, ..) | ImplicitHasherType::HashSet(_, ty, ..) => ty,
1955 fn span(&self) -> Span {
1957 ImplicitHasherType::HashMap(span, ..) | ImplicitHasherType::HashSet(span, ..) => span,
1962 struct ImplicitHasherTypeVisitor<'a, 'tcx: 'a> {
1963 cx: &'a LateContext<'a, 'tcx>,
1964 found: Vec<ImplicitHasherType<'tcx>>,
1967 impl<'a, 'tcx: 'a> ImplicitHasherTypeVisitor<'a, 'tcx> {
1968 fn new(cx: &'a LateContext<'a, 'tcx>) -> Self {
1969 Self { cx, found: vec![] }
1973 impl<'a, 'tcx: 'a> Visitor<'tcx> for ImplicitHasherTypeVisitor<'a, 'tcx> {
1974 fn visit_ty(&mut self, t: &'tcx hir::Ty) {
1975 if let Some(target) = ImplicitHasherType::new(self.cx, t) {
1976 self.found.push(target);
1982 fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
1983 NestedVisitorMap::None
1987 /// Looks for default-hasher-dependent constructors like `HashMap::new`.
1988 struct ImplicitHasherConstructorVisitor<'a, 'b, 'tcx: 'a + 'b> {
1989 cx: &'a LateContext<'a, 'tcx>,
1990 body: &'a TypeckTables<'tcx>,
1991 target: &'b ImplicitHasherType<'tcx>,
1992 suggestions: BTreeMap<Span, String>,
1995 impl<'a, 'b, 'tcx: 'a + 'b> ImplicitHasherConstructorVisitor<'a, 'b, 'tcx> {
1996 fn new(cx: &'a LateContext<'a, 'tcx>, target: &'b ImplicitHasherType<'tcx>) -> Self {
2001 suggestions: BTreeMap::new(),
2006 impl<'a, 'b, 'tcx: 'a + 'b> Visitor<'tcx> for ImplicitHasherConstructorVisitor<'a, 'b, 'tcx> {
2007 fn visit_body(&mut self, body: &'tcx Body) {
2008 self.body = self.cx.tcx.body_tables(body.id());
2009 walk_body(self, body);
2012 fn visit_expr(&mut self, e: &'tcx Expr) {
2014 if let ExprKind::Call(ref fun, ref args) = e.node;
2015 if let ExprKind::Path(QPath::TypeRelative(ref ty, ref method)) = fun.node;
2016 if let TyKind::Path(QPath::Resolved(None, ref ty_path)) = ty.node;
2018 if !same_tys(self.cx, self.target.ty(), self.body.expr_ty(e)) {
2022 if match_path(ty_path, &paths::HASHMAP) {
2023 if method.ident.name == "new" {
2025 .insert(e.span, "HashMap::default()".to_string());
2026 } else if method.ident.name == "with_capacity" {
2027 self.suggestions.insert(
2030 "HashMap::with_capacity_and_hasher({}, Default::default())",
2031 snippet(self.cx, args[0].span, "capacity"),
2035 } else if match_path(ty_path, &paths::HASHSET) {
2036 if method.ident.name == "new" {
2038 .insert(e.span, "HashSet::default()".to_string());
2039 } else if method.ident.name == "with_capacity" {
2040 self.suggestions.insert(
2043 "HashSet::with_capacity_and_hasher({}, Default::default())",
2044 snippet(self.cx, args[0].span, "capacity"),
2055 fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
2056 NestedVisitorMap::OnlyBodies(&self.cx.tcx.hir)