1 #![allow(rustc::default_hash_types)]
4 use std::cmp::Ordering;
5 use std::collections::BTreeMap;
7 use if_chain::if_chain;
8 use rustc_ast::ast::{FloatTy, IntTy, LitFloatType, LitIntType, LitKind, UintTy};
9 use rustc_errors::{Applicability, DiagnosticBuilder};
11 use rustc_hir::intravisit::{walk_body, walk_expr, walk_ty, FnKind, NestedVisitorMap, Visitor};
13 BinOpKind, Body, Expr, ExprKind, FnDecl, FnRetTy, FnSig, GenericArg, GenericParamKind, HirId, ImplItem,
14 ImplItemKind, Item, ItemKind, Lifetime, Local, MatchSource, MutTy, Mutability, QPath, Stmt, StmtKind, TraitFn,
15 TraitItem, TraitItemKind, TyKind, UnOp,
17 use rustc_lint::{LateContext, LateLintPass, LintContext};
18 use rustc_middle::hir::map::Map;
19 use rustc_middle::lint::in_external_macro;
20 use rustc_middle::ty::{self, InferTy, Ty, TyCtxt, TypeckTables};
21 use rustc_session::{declare_lint_pass, declare_tool_lint, impl_lint_pass};
22 use rustc_span::hygiene::{ExpnKind, MacroKind};
23 use rustc_span::source_map::Span;
24 use rustc_span::symbol::{sym, Symbol};
25 use rustc_target::abi::LayoutOf;
26 use rustc_target::spec::abi::Abi;
27 use rustc_typeck::hir_ty_to_ty;
29 use crate::consts::{constant, Constant};
30 use crate::utils::paths;
32 clip, comparisons, differing_macro_contexts, higher, in_constant, int_bits, last_path_segment, match_def_path,
33 match_path, method_chain_args, multispan_sugg, numeric_literal::NumericLiteral, qpath_res, same_tys, sext, snippet,
34 snippet_opt, snippet_with_applicability, snippet_with_macro_callsite, span_lint, span_lint_and_help,
35 span_lint_and_sugg, span_lint_and_then, unsext,
38 declare_clippy_lint! {
39 /// **What it does:** Checks for use of `Box<Vec<_>>` anywhere in the code.
41 /// **Why is this bad?** `Vec` already keeps its contents in a separate area on
42 /// the heap. So if you `Box` it, you just add another level of indirection
43 /// without any benefit whatsoever.
45 /// **Known problems:** None.
50 /// values: Box<Vec<Foo>>,
63 "usage of `Box<Vec<T>>`, vector elements are already on the heap"
66 declare_clippy_lint! {
67 /// **What it does:** Checks for use of `Vec<Box<T>>` where T: Sized anywhere in the code.
69 /// **Why is this bad?** `Vec` already keeps its contents in a separate area on
70 /// the heap. So if you `Box` its contents, you just add another level of indirection.
72 /// **Known problems:** Vec<Box<T: Sized>> makes sense if T is a large type (see #3530,
78 /// values: Vec<Box<i32>>,
91 "usage of `Vec<Box<T>>` where T: Sized, vector elements are already on the heap"
94 declare_clippy_lint! {
95 /// **What it does:** Checks for use of `Option<Option<_>>` in function signatures and type
98 /// **Why is this bad?** `Option<_>` represents an optional value. `Option<Option<_>>`
99 /// represents an optional optional value which is logically the same thing as an optional
100 /// value but has an unneeded extra level of wrapping.
102 /// If you have a case where `Some(Some(_))`, `Some(None)` and `None` are distinct cases,
103 /// consider a custom `enum` instead, with clear names for each case.
105 /// **Known problems:** None.
109 /// fn get_data() -> Option<Option<u32>> {
117 /// pub enum Contents {
118 /// Data(Vec<u8>), // Was Some(Some(Vec<u8>))
119 /// NotYetFetched, // Was Some(None)
120 /// None, // Was None
123 /// fn get_data() -> Contents {
129 "usage of `Option<Option<T>>`"
132 declare_clippy_lint! {
133 /// **What it does:** Checks for usage of any `LinkedList`, suggesting to use a
134 /// `Vec` or a `VecDeque` (formerly called `RingBuf`).
136 /// **Why is this bad?** Gankro says:
138 /// > The TL;DR of `LinkedList` is that it's built on a massive amount of
139 /// pointers and indirection.
140 /// > It wastes memory, it has terrible cache locality, and is all-around slow.
142 /// > "only" amortized for push/pop, should be faster in the general case for
143 /// almost every possible
144 /// > workload, and isn't even amortized at all if you can predict the capacity
147 /// > `LinkedList`s are only really good if you're doing a lot of merging or
148 /// splitting of lists.
149 /// > This is because they can just mangle some pointers instead of actually
150 /// copying the data. Even
151 /// > if you're doing a lot of insertion in the middle of the list, `RingBuf`
152 /// can still be better
153 /// > because of how expensive it is to seek to the middle of a `LinkedList`.
155 /// **Known problems:** False positives – the instances where using a
156 /// `LinkedList` makes sense are few and far between, but they can still happen.
160 /// # use std::collections::LinkedList;
161 /// let x: LinkedList<usize> = LinkedList::new();
165 "usage of LinkedList, usually a vector is faster, or a more specialized data structure like a `VecDeque`"
168 declare_clippy_lint! {
169 /// **What it does:** Checks for use of `&Box<T>` anywhere in the code.
171 /// **Why is this bad?** Any `&Box<T>` can also be a `&T`, which is more
174 /// **Known problems:** None.
178 /// fn foo(bar: &Box<T>) { ... }
184 /// fn foo(bar: &T) { ... }
188 "a borrow of a boxed type"
191 declare_clippy_lint! {
192 /// **What it does:** Checks for use of redundant allocations anywhere in the code.
194 /// **Why is this bad?** Expressions such as `Rc<&T>`, `Rc<Rc<T>>`, `Rc<Box<T>>`, `Box<&T>`
195 /// add an unnecessary level of indirection.
197 /// **Known problems:** None.
201 /// # use std::rc::Rc;
202 /// fn foo(bar: Rc<&usize>) {}
208 /// fn foo(bar: &usize) {}
210 pub REDUNDANT_ALLOCATION,
212 "redundant allocation"
216 vec_box_size_threshold: u64,
219 impl_lint_pass!(Types => [BOX_VEC, VEC_BOX, OPTION_OPTION, LINKEDLIST, BORROWED_BOX, REDUNDANT_ALLOCATION]);
221 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for Types {
224 cx: &LateContext<'_, '_>,
231 // Skip trait implementations; see issue #605.
232 if let Some(hir::Node::Item(item)) = cx.tcx.hir().find(cx.tcx.hir().get_parent_item(id)) {
233 if let ItemKind::Impl { of_trait: Some(_), .. } = item.kind {
238 self.check_fn_decl(cx, decl);
241 fn check_struct_field(&mut self, cx: &LateContext<'_, '_>, field: &hir::StructField<'_>) {
242 self.check_ty(cx, &field.ty, false);
245 fn check_trait_item(&mut self, cx: &LateContext<'_, '_>, item: &TraitItem<'_>) {
247 TraitItemKind::Const(ref ty, _) | TraitItemKind::Type(_, Some(ref ty)) => self.check_ty(cx, ty, false),
248 TraitItemKind::Fn(ref sig, _) => self.check_fn_decl(cx, &sig.decl),
253 fn check_local(&mut self, cx: &LateContext<'_, '_>, local: &Local<'_>) {
254 if let Some(ref ty) = local.ty {
255 self.check_ty(cx, ty, true);
260 /// Checks if `qpath` has last segment with type parameter matching `path`
261 fn match_type_parameter(cx: &LateContext<'_, '_>, qpath: &QPath<'_>, path: &[&str]) -> Option<Span> {
262 let last = last_path_segment(qpath);
264 if let Some(ref params) = last.args;
265 if !params.parenthesized;
266 if let Some(ty) = params.args.iter().find_map(|arg| match arg {
267 GenericArg::Type(ty) => Some(ty),
270 if let TyKind::Path(ref qpath) = ty.kind;
271 if let Some(did) = qpath_res(cx, qpath, ty.hir_id).opt_def_id();
272 if match_def_path(cx, did, path);
274 return Some(ty.span);
280 fn match_borrows_parameter(_cx: &LateContext<'_, '_>, qpath: &QPath<'_>) -> Option<Span> {
281 let last = last_path_segment(qpath);
283 if let Some(ref params) = last.args;
284 if !params.parenthesized;
285 if let Some(ty) = params.args.iter().find_map(|arg| match arg {
286 GenericArg::Type(ty) => Some(ty),
289 if let TyKind::Rptr(..) = ty.kind;
291 return Some(ty.span);
298 pub fn new(vec_box_size_threshold: u64) -> Self {
299 Self { vec_box_size_threshold }
302 fn check_fn_decl(&mut self, cx: &LateContext<'_, '_>, decl: &FnDecl<'_>) {
303 for input in decl.inputs {
304 self.check_ty(cx, input, false);
307 if let FnRetTy::Return(ref ty) = decl.output {
308 self.check_ty(cx, ty, false);
312 /// Recursively check for `TypePass` lints in the given type. Stop at the first
315 /// The parameter `is_local` distinguishes the context of the type; types from
316 /// local bindings should only be checked for the `BORROWED_BOX` lint.
317 #[allow(clippy::too_many_lines)]
318 #[allow(clippy::cognitive_complexity)]
319 fn check_ty(&mut self, cx: &LateContext<'_, '_>, hir_ty: &hir::Ty<'_>, is_local: bool) {
320 if hir_ty.span.from_expansion() {
324 TyKind::Path(ref qpath) if !is_local => {
325 let hir_id = hir_ty.hir_id;
326 let res = qpath_res(cx, qpath, hir_id);
327 if let Some(def_id) = res.opt_def_id() {
328 if Some(def_id) == cx.tcx.lang_items().owned_box() {
329 if let Some(span) = match_borrows_parameter(cx, qpath) {
332 REDUNDANT_ALLOCATION,
334 "usage of `Box<&T>`",
336 snippet(cx, span, "..").to_string(),
337 Applicability::MachineApplicable,
339 return; // don't recurse into the type
341 if match_type_parameter(cx, qpath, &paths::VEC).is_some() {
346 "you seem to be trying to use `Box<Vec<T>>`. Consider using just `Vec<T>`",
347 "`Vec<T>` is already on the heap, `Box<Vec<T>>` makes an extra allocation.",
349 return; // don't recurse into the type
351 } else if Some(def_id) == cx.tcx.lang_items().rc() {
352 if let Some(span) = match_type_parameter(cx, qpath, &paths::RC) {
355 REDUNDANT_ALLOCATION,
357 "usage of `Rc<Rc<T>>`",
359 snippet(cx, span, "..").to_string(),
360 Applicability::MachineApplicable,
362 return; // don't recurse into the type
364 if let Some(span) = match_type_parameter(cx, qpath, &paths::BOX) {
367 REDUNDANT_ALLOCATION,
369 "usage of `Rc<Box<T>>`",
371 snippet(cx, span, "..").to_string(),
372 Applicability::MachineApplicable,
374 return; // don't recurse into the type
376 if let Some(span) = match_borrows_parameter(cx, qpath) {
379 REDUNDANT_ALLOCATION,
383 snippet(cx, span, "..").to_string(),
384 Applicability::MachineApplicable,
386 return; // don't recurse into the type
388 } else if cx.tcx.is_diagnostic_item(Symbol::intern("vec_type"), def_id) {
390 // Get the _ part of Vec<_>
391 if let Some(ref last) = last_path_segment(qpath).args;
392 if let Some(ty) = last.args.iter().find_map(|arg| match arg {
393 GenericArg::Type(ty) => Some(ty),
396 // ty is now _ at this point
397 if let TyKind::Path(ref ty_qpath) = ty.kind;
398 let res = qpath_res(cx, ty_qpath, ty.hir_id);
399 if let Some(def_id) = res.opt_def_id();
400 if Some(def_id) == cx.tcx.lang_items().owned_box();
401 // At this point, we know ty is Box<T>, now get T
402 if let Some(ref last) = last_path_segment(ty_qpath).args;
403 if let Some(boxed_ty) = last.args.iter().find_map(|arg| match arg {
404 GenericArg::Type(ty) => Some(ty),
407 let ty_ty = hir_ty_to_ty(cx.tcx, boxed_ty);
408 if ty_ty.is_sized(cx.tcx.at(ty.span), cx.param_env);
409 if let Ok(ty_ty_size) = cx.layout_of(ty_ty).map(|l| l.size.bytes());
410 if ty_ty_size <= self.vec_box_size_threshold;
416 "`Vec<T>` is already on the heap, the boxing is unnecessary.",
418 format!("Vec<{}>", ty_ty),
419 Applicability::MachineApplicable,
421 return; // don't recurse into the type
424 } else if match_def_path(cx, def_id, &paths::OPTION) {
425 if match_type_parameter(cx, qpath, &paths::OPTION).is_some() {
430 "consider using `Option<T>` instead of `Option<Option<T>>` or a custom \
431 enum if you need to distinguish all 3 cases",
433 return; // don't recurse into the type
435 } else if match_def_path(cx, def_id, &paths::LINKED_LIST) {
440 "I see you're using a LinkedList! Perhaps you meant some other data structure?",
441 "a `VecDeque` might work",
443 return; // don't recurse into the type
447 QPath::Resolved(Some(ref ty), ref p) => {
448 self.check_ty(cx, ty, is_local);
449 for ty in p.segments.iter().flat_map(|seg| {
452 .map_or_else(|| [].iter(), |params| params.args.iter())
453 .filter_map(|arg| match arg {
454 GenericArg::Type(ty) => Some(ty),
458 self.check_ty(cx, ty, is_local);
461 QPath::Resolved(None, ref p) => {
462 for ty in p.segments.iter().flat_map(|seg| {
465 .map_or_else(|| [].iter(), |params| params.args.iter())
466 .filter_map(|arg| match arg {
467 GenericArg::Type(ty) => Some(ty),
471 self.check_ty(cx, ty, is_local);
474 QPath::TypeRelative(ref ty, ref seg) => {
475 self.check_ty(cx, ty, is_local);
476 if let Some(ref params) = seg.args {
477 for ty in params.args.iter().filter_map(|arg| match arg {
478 GenericArg::Type(ty) => Some(ty),
481 self.check_ty(cx, ty, is_local);
487 TyKind::Rptr(ref lt, ref mut_ty) => self.check_ty_rptr(cx, hir_ty, is_local, lt, mut_ty),
489 TyKind::Slice(ref ty) | TyKind::Array(ref ty, _) | TyKind::Ptr(MutTy { ref ty, .. }) => {
490 self.check_ty(cx, ty, is_local)
492 TyKind::Tup(tys) => {
494 self.check_ty(cx, ty, is_local);
503 cx: &LateContext<'_, '_>,
504 hir_ty: &hir::Ty<'_>,
509 match mut_ty.ty.kind {
510 TyKind::Path(ref qpath) => {
511 let hir_id = mut_ty.ty.hir_id;
512 let def = qpath_res(cx, qpath, hir_id);
514 if let Some(def_id) = def.opt_def_id();
515 if Some(def_id) == cx.tcx.lang_items().owned_box();
516 if let QPath::Resolved(None, ref path) = *qpath;
517 if let [ref bx] = *path.segments;
518 if let Some(ref params) = bx.args;
519 if !params.parenthesized;
520 if let Some(inner) = params.args.iter().find_map(|arg| match arg {
521 GenericArg::Type(ty) => Some(ty),
525 if is_any_trait(inner) {
526 // Ignore `Box<Any>` types; see issue #1884 for details.
530 let ltopt = if lt.is_elided() {
533 format!("{} ", lt.name.ident().as_str())
535 let mutopt = if mut_ty.mutbl == Mutability::Mut {
540 let mut applicability = Applicability::MachineApplicable;
545 "you seem to be trying to use `&Box<T>`. Consider using just `&T`",
551 &snippet_with_applicability(cx, inner.span, "..", &mut applicability)
553 Applicability::Unspecified,
555 return; // don't recurse into the type
558 self.check_ty(cx, &mut_ty.ty, is_local);
560 _ => self.check_ty(cx, &mut_ty.ty, is_local),
565 // Returns true if given type is `Any` trait.
566 fn is_any_trait(t: &hir::Ty<'_>) -> bool {
568 if let TyKind::TraitObject(ref traits, _) = t.kind;
569 if !traits.is_empty();
570 // Only Send/Sync can be used as additional traits, so it is enough to
571 // check only the first trait.
572 if match_path(&traits[0].trait_ref.path, &paths::ANY_TRAIT);
581 declare_clippy_lint! {
582 /// **What it does:** Checks for binding a unit value.
584 /// **Why is this bad?** A unit value cannot usefully be used anywhere. So
585 /// binding one is kind of pointless.
587 /// **Known problems:** None.
597 "creating a `let` binding to a value of unit type, which usually can't be used afterwards"
600 declare_lint_pass!(LetUnitValue => [LET_UNIT_VALUE]);
602 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for LetUnitValue {
603 fn check_stmt(&mut self, cx: &LateContext<'a, 'tcx>, stmt: &'tcx Stmt<'_>) {
604 if let StmtKind::Local(ref local) = stmt.kind {
605 if is_unit(cx.tables.pat_ty(&local.pat)) {
606 if in_external_macro(cx.sess(), stmt.span) || local.pat.span.from_expansion() {
609 if higher::is_from_for_desugar(local) {
612 span_lint_and_then(cx, LET_UNIT_VALUE, stmt.span, "this let-binding has unit value", |db| {
613 if let Some(expr) = &local.init {
614 let snip = snippet_with_macro_callsite(cx, expr.span, "()");
617 "omit the `let` binding",
618 format!("{};", snip),
619 Applicability::MachineApplicable, // snippet
628 declare_clippy_lint! {
629 /// **What it does:** Checks for comparisons to unit. This includes all binary
630 /// comparisons (like `==` and `<`) and asserts.
632 /// **Why is this bad?** Unit is always equal to itself, and thus is just a
633 /// clumsily written constant. Mostly this happens when someone accidentally
634 /// adds semicolons at the end of the operands.
636 /// **Known problems:** None.
667 /// assert_eq!({ foo(); }, { bar(); });
669 /// will always succeed
672 "comparing unit values"
675 declare_lint_pass!(UnitCmp => [UNIT_CMP]);
677 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnitCmp {
678 fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr<'tcx>) {
679 if expr.span.from_expansion() {
680 if let Some(callee) = expr.span.source_callee() {
681 if let ExpnKind::Macro(MacroKind::Bang, symbol) = callee.kind {
682 if let ExprKind::Binary(ref cmp, ref left, _) = expr.kind {
684 if op.is_comparison() && is_unit(cx.tables.expr_ty(left)) {
685 let result = match &*symbol.as_str() {
686 "assert_eq" | "debug_assert_eq" => "succeed",
687 "assert_ne" | "debug_assert_ne" => "fail",
695 "`{}` of unit values detected. This will always {}",
706 if let ExprKind::Binary(ref cmp, ref left, _) = expr.kind {
708 if op.is_comparison() && is_unit(cx.tables.expr_ty(left)) {
709 let result = match op {
710 BinOpKind::Eq | BinOpKind::Le | BinOpKind::Ge => "true",
718 "{}-comparison of unit values detected. This will always be {}",
728 declare_clippy_lint! {
729 /// **What it does:** Checks for passing a unit value as an argument to a function without using a
730 /// unit literal (`()`).
732 /// **Why is this bad?** This is likely the result of an accidental semicolon.
734 /// **Known problems:** None.
745 "passing unit to a function"
748 declare_lint_pass!(UnitArg => [UNIT_ARG]);
750 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnitArg {
751 fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr<'_>) {
752 if expr.span.from_expansion() {
756 // apparently stuff in the desugaring of `?` can trigger this
757 // so check for that here
758 // only the calls to `Try::from_error` is marked as desugared,
759 // so we need to check both the current Expr and its parent.
760 if is_questionmark_desugar_marked_call(expr) {
764 let map = &cx.tcx.hir();
765 let opt_parent_node = map.find(map.get_parent_node(expr.hir_id));
766 if let Some(hir::Node::Expr(parent_expr)) = opt_parent_node;
767 if is_questionmark_desugar_marked_call(parent_expr);
774 ExprKind::Call(_, args) | ExprKind::MethodCall(_, _, args) => {
776 if is_unit(cx.tables.expr_ty(arg)) && !is_unit_literal(arg) {
777 if let ExprKind::Match(.., match_source) = &arg.kind {
778 if *match_source == MatchSource::TryDesugar {
787 "passing a unit value to a function",
788 "if you intended to pass a unit value, use a unit literal instead",
790 Applicability::MachineApplicable,
800 fn is_questionmark_desugar_marked_call(expr: &Expr<'_>) -> bool {
801 use rustc_span::hygiene::DesugaringKind;
802 if let ExprKind::Call(ref callee, _) = expr.kind {
803 callee.span.is_desugaring(DesugaringKind::QuestionMark)
809 fn is_unit(ty: Ty<'_>) -> bool {
811 ty::Tuple(slice) if slice.is_empty() => true,
816 fn is_unit_literal(expr: &Expr<'_>) -> bool {
818 ExprKind::Tup(ref slice) if slice.is_empty() => true,
823 declare_clippy_lint! {
824 /// **What it does:** Checks for casts from any numerical to a float type where
825 /// the receiving type cannot store all values from the original type without
826 /// rounding errors. This possible rounding is to be expected, so this lint is
827 /// `Allow` by default.
829 /// Basically, this warns on casting any integer with 32 or more bits to `f32`
830 /// or any 64-bit integer to `f64`.
832 /// **Why is this bad?** It's not bad at all. But in some applications it can be
833 /// helpful to know where precision loss can take place. This lint can help find
834 /// those places in the code.
836 /// **Known problems:** None.
840 /// let x = std::u64::MAX;
843 pub CAST_PRECISION_LOSS,
845 "casts that cause loss of precision, e.g., `x as f32` where `x: u64`"
848 declare_clippy_lint! {
849 /// **What it does:** Checks for casts from a signed to an unsigned numerical
850 /// type. In this case, negative values wrap around to large positive values,
851 /// which can be quite surprising in practice. However, as the cast works as
852 /// defined, this lint is `Allow` by default.
854 /// **Why is this bad?** Possibly surprising results. You can activate this lint
855 /// as a one-time check to see where numerical wrapping can arise.
857 /// **Known problems:** None.
862 /// y as u128; // will return 18446744073709551615
866 "casts from signed types to unsigned types, e.g., `x as u32` where `x: i32`"
869 declare_clippy_lint! {
870 /// **What it does:** Checks for casts between numerical types that may
871 /// truncate large values. This is expected behavior, so the cast is `Allow` by
874 /// **Why is this bad?** In some problem domains, it is good practice to avoid
875 /// truncation. This lint can be activated to help assess where additional
876 /// checks could be beneficial.
878 /// **Known problems:** None.
882 /// fn as_u8(x: u64) -> u8 {
886 pub CAST_POSSIBLE_TRUNCATION,
888 "casts that may cause truncation of the value, e.g., `x as u8` where `x: u32`, or `x as i32` where `x: f32`"
891 declare_clippy_lint! {
892 /// **What it does:** Checks for casts from an unsigned type to a signed type of
893 /// the same size. Performing such a cast is a 'no-op' for the compiler,
894 /// i.e., nothing is changed at the bit level, and the binary representation of
895 /// the value is reinterpreted. This can cause wrapping if the value is too big
896 /// for the target signed type. However, the cast works as defined, so this lint
897 /// is `Allow` by default.
899 /// **Why is this bad?** While such a cast is not bad in itself, the results can
900 /// be surprising when this is not the intended behavior, as demonstrated by the
903 /// **Known problems:** None.
907 /// std::u32::MAX as i32; // will yield a value of `-1`
909 pub CAST_POSSIBLE_WRAP,
911 "casts that may cause wrapping around the value, e.g., `x as i32` where `x: u32` and `x > i32::MAX`"
914 declare_clippy_lint! {
915 /// **What it does:** Checks for casts between numerical types that may
916 /// be replaced by safe conversion functions.
918 /// **Why is this bad?** Rust's `as` keyword will perform many kinds of
919 /// conversions, including silently lossy conversions. Conversion functions such
920 /// as `i32::from` will only perform lossless conversions. Using the conversion
921 /// functions prevents conversions from turning into silent lossy conversions if
922 /// the types of the input expressions ever change, and make it easier for
923 /// people reading the code to know that the conversion is lossless.
925 /// **Known problems:** None.
929 /// fn as_u64(x: u8) -> u64 {
934 /// Using `::from` would look like this:
937 /// fn as_u64(x: u8) -> u64 {
943 "casts using `as` that are known to be lossless, e.g., `x as u64` where `x: u8`"
946 declare_clippy_lint! {
947 /// **What it does:** Checks for casts to the same type.
949 /// **Why is this bad?** It's just unnecessary.
951 /// **Known problems:** None.
955 /// let _ = 2i32 as i32;
957 pub UNNECESSARY_CAST,
959 "cast to the same type, e.g., `x as i32` where `x: i32`"
962 declare_clippy_lint! {
963 /// **What it does:** Checks for casts from a less-strictly-aligned pointer to a
964 /// more-strictly-aligned pointer
966 /// **Why is this bad?** Dereferencing the resulting pointer may be undefined
969 /// **Known problems:** Using `std::ptr::read_unaligned` and `std::ptr::write_unaligned` or similar
970 /// on the resulting pointer is fine.
974 /// let _ = (&1u8 as *const u8) as *const u16;
975 /// let _ = (&mut 1u8 as *mut u8) as *mut u16;
977 pub CAST_PTR_ALIGNMENT,
979 "cast from a pointer to a more-strictly-aligned pointer"
982 declare_clippy_lint! {
983 /// **What it does:** Checks for casts of function pointers to something other than usize
985 /// **Why is this bad?**
986 /// Casting a function pointer to anything other than usize/isize is not portable across
987 /// architectures, because you end up losing bits if the target type is too small or end up with a
988 /// bunch of extra bits that waste space and add more instructions to the final binary than
989 /// strictly necessary for the problem
991 /// Casting to isize also doesn't make sense since there are no signed addresses.
997 /// fn fun() -> i32 { 1 }
998 /// let a = fun as i64;
1001 /// fn fun2() -> i32 { 1 }
1002 /// let a = fun2 as usize;
1004 pub FN_TO_NUMERIC_CAST,
1006 "casting a function pointer to a numeric type other than usize"
1009 declare_clippy_lint! {
1010 /// **What it does:** Checks for casts of a function pointer to a numeric type not wide enough to
1013 /// **Why is this bad?**
1014 /// Such a cast discards some bits of the function's address. If this is intended, it would be more
1015 /// clearly expressed by casting to usize first, then casting the usize to the intended type (with
1016 /// a comment) to perform the truncation.
1022 /// fn fn1() -> i16 {
1025 /// let _ = fn1 as i32;
1027 /// // Better: Cast to usize first, then comment with the reason for the truncation
1028 /// fn fn2() -> i16 {
1031 /// let fn_ptr = fn2 as usize;
1032 /// let fn_ptr_truncated = fn_ptr as i32;
1034 pub FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
1036 "casting a function pointer to a numeric type not wide enough to store the address"
1039 /// Returns the size in bits of an integral type.
1040 /// Will return 0 if the type is not an int or uint variant
1041 fn int_ty_to_nbits(typ: Ty<'_>, tcx: TyCtxt<'_>) -> u64 {
1043 ty::Int(i) => match i {
1044 IntTy::Isize => tcx.data_layout.pointer_size.bits(),
1051 ty::Uint(i) => match i {
1052 UintTy::Usize => tcx.data_layout.pointer_size.bits(),
1057 UintTy::U128 => 128,
1063 fn is_isize_or_usize(typ: Ty<'_>) -> bool {
1065 ty::Int(IntTy::Isize) | ty::Uint(UintTy::Usize) => true,
1070 fn span_precision_loss_lint(cx: &LateContext<'_, '_>, expr: &Expr<'_>, cast_from: Ty<'_>, cast_to_f64: bool) {
1071 let mantissa_nbits = if cast_to_f64 { 52 } else { 23 };
1072 let arch_dependent = is_isize_or_usize(cast_from) && cast_to_f64;
1073 let arch_dependent_str = "on targets with 64-bit wide pointers ";
1074 let from_nbits_str = if arch_dependent {
1076 } else if is_isize_or_usize(cast_from) {
1077 "32 or 64".to_owned()
1079 int_ty_to_nbits(cast_from, cx.tcx).to_string()
1083 CAST_PRECISION_LOSS,
1086 "casting `{0}` to `{1}` causes a loss of precision {2}(`{0}` is {3} bits wide, \
1087 but `{1}`'s mantissa is only {4} bits wide)",
1089 if cast_to_f64 { "f64" } else { "f32" },
1090 if arch_dependent { arch_dependent_str } else { "" },
1097 fn should_strip_parens(op: &Expr<'_>, snip: &str) -> bool {
1098 if let ExprKind::Binary(_, _, _) = op.kind {
1099 if snip.starts_with('(') && snip.ends_with(')') {
1106 fn span_lossless_lint(cx: &LateContext<'_, '_>, expr: &Expr<'_>, op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
1107 // Do not suggest using From in consts/statics until it is valid to do so (see #2267).
1108 if in_constant(cx, expr.hir_id) {
1111 // The suggestion is to use a function call, so if the original expression
1112 // has parens on the outside, they are no longer needed.
1113 let mut applicability = Applicability::MachineApplicable;
1114 let opt = snippet_opt(cx, op.span);
1115 let sugg = if let Some(ref snip) = opt {
1116 if should_strip_parens(op, snip) {
1117 &snip[1..snip.len() - 1]
1122 applicability = Applicability::HasPlaceholders;
1131 "casting `{}` to `{}` may become silently lossy if you later change the type",
1135 format!("{}::from({})", cast_to, sugg),
1146 fn check_loss_of_sign(cx: &LateContext<'_, '_>, expr: &Expr<'_>, op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
1147 if !cast_from.is_signed() || cast_to.is_signed() {
1151 // don't lint for positive constants
1152 let const_val = constant(cx, &cx.tables, op);
1154 if let Some((const_val, _)) = const_val;
1155 if let Constant::Int(n) = const_val;
1156 if let ty::Int(ity) = cast_from.kind;
1157 if sext(cx.tcx, n, ity) >= 0;
1163 // don't lint for the result of methods that always return non-negative values
1164 if let ExprKind::MethodCall(ref path, _, _) = op.kind {
1165 let mut method_name = path.ident.name.as_str();
1166 let whitelisted_methods = ["abs", "checked_abs", "rem_euclid", "checked_rem_euclid"];
1169 if method_name == "unwrap";
1170 if let Some(arglist) = method_chain_args(op, &["unwrap"]);
1171 if let ExprKind::MethodCall(ref inner_path, _, _) = &arglist[0][0].kind;
1173 method_name = inner_path.ident.name.as_str();
1177 if whitelisted_methods.iter().any(|&name| method_name == name) {
1187 "casting `{}` to `{}` may lose the sign of the value",
1193 fn check_truncation_and_wrapping(cx: &LateContext<'_, '_>, expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
1194 let arch_64_suffix = " on targets with 64-bit wide pointers";
1195 let arch_32_suffix = " on targets with 32-bit wide pointers";
1196 let cast_unsigned_to_signed = !cast_from.is_signed() && cast_to.is_signed();
1197 let from_nbits = int_ty_to_nbits(cast_from, cx.tcx);
1198 let to_nbits = int_ty_to_nbits(cast_to, cx.tcx);
1199 let (span_truncation, suffix_truncation, span_wrap, suffix_wrap) =
1200 match (is_isize_or_usize(cast_from), is_isize_or_usize(cast_to)) {
1201 (true, true) | (false, false) => (
1202 to_nbits < from_nbits,
1204 to_nbits == from_nbits && cast_unsigned_to_signed,
1214 to_nbits <= 32 && cast_unsigned_to_signed,
1220 cast_unsigned_to_signed,
1221 if from_nbits == 64 {
1228 if span_truncation {
1231 CAST_POSSIBLE_TRUNCATION,
1234 "casting `{}` to `{}` may truncate the value{}",
1237 match suffix_truncation {
1238 ArchSuffix::_32 => arch_32_suffix,
1239 ArchSuffix::_64 => arch_64_suffix,
1240 ArchSuffix::None => "",
1251 "casting `{}` to `{}` may wrap around the value{}",
1255 ArchSuffix::_32 => arch_32_suffix,
1256 ArchSuffix::_64 => arch_64_suffix,
1257 ArchSuffix::None => "",
1264 fn check_lossless(cx: &LateContext<'_, '_>, expr: &Expr<'_>, op: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
1265 let cast_signed_to_unsigned = cast_from.is_signed() && !cast_to.is_signed();
1266 let from_nbits = int_ty_to_nbits(cast_from, cx.tcx);
1267 let to_nbits = int_ty_to_nbits(cast_to, cx.tcx);
1268 if !is_isize_or_usize(cast_from) && !is_isize_or_usize(cast_to) && from_nbits < to_nbits && !cast_signed_to_unsigned
1270 span_lossless_lint(cx, expr, op, cast_from, cast_to);
1274 declare_lint_pass!(Casts => [
1275 CAST_PRECISION_LOSS,
1277 CAST_POSSIBLE_TRUNCATION,
1283 FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
1286 // Check if the given type is either `core::ffi::c_void` or
1287 // one of the platform specific `libc::<platform>::c_void` of libc.
1288 fn is_c_void(cx: &LateContext<'_, '_>, ty: Ty<'_>) -> bool {
1289 if let ty::Adt(adt, _) = ty.kind {
1290 let names = cx.get_def_path(adt.did);
1292 if names.is_empty() {
1295 if names[0] == sym!(libc) || names[0] == sym::core && *names.last().unwrap() == sym!(c_void) {
1302 /// Returns the mantissa bits wide of a fp type.
1303 /// Will return 0 if the type is not a fp
1304 fn fp_ty_mantissa_nbits(typ: Ty<'_>) -> u32 {
1306 ty::Float(FloatTy::F32) => 23,
1307 ty::Float(FloatTy::F64) | ty::Infer(InferTy::FloatVar(_)) => 52,
1312 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for Casts {
1313 fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr<'_>) {
1314 if expr.span.from_expansion() {
1317 if let ExprKind::Cast(ref ex, _) = expr.kind {
1318 let (cast_from, cast_to) = (cx.tables.expr_ty(ex), cx.tables.expr_ty(expr));
1319 lint_fn_to_numeric_cast(cx, expr, ex, cast_from, cast_to);
1320 if let ExprKind::Lit(ref lit) = ex.kind {
1322 if let LitKind::Int(n, _) = lit.node;
1323 if let Some(src) = snippet_opt(cx, lit.span);
1324 if cast_to.is_floating_point();
1325 if let Some(num_lit) = NumericLiteral::from_lit_kind(&src, &lit.node);
1326 let from_nbits = 128 - n.leading_zeros();
1327 let to_nbits = fp_ty_mantissa_nbits(cast_to);
1328 if from_nbits != 0 && to_nbits != 0 && from_nbits <= to_nbits && num_lit.is_decimal();
1334 &format!("casting integer literal to `{}` is unnecessary", cast_to),
1336 format!("{}_{}", n, cast_to),
1337 Applicability::MachineApplicable,
1343 LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::Float(_, LitFloatType::Unsuffixed) => {},
1345 if cast_from.kind == cast_to.kind && !in_external_macro(cx.sess(), expr.span) {
1351 "casting to the same type is unnecessary (`{}` -> `{}`)",
1359 if cast_from.is_numeric() && cast_to.is_numeric() && !in_external_macro(cx.sess(), expr.span) {
1360 lint_numeric_casts(cx, expr, ex, cast_from, cast_to);
1363 lint_cast_ptr_alignment(cx, expr, cast_from, cast_to);
1368 fn lint_numeric_casts<'tcx>(
1369 cx: &LateContext<'_, 'tcx>,
1371 cast_expr: &Expr<'_>,
1372 cast_from: Ty<'tcx>,
1375 match (cast_from.is_integral(), cast_to.is_integral()) {
1377 let from_nbits = int_ty_to_nbits(cast_from, cx.tcx);
1378 let to_nbits = if let ty::Float(FloatTy::F32) = cast_to.kind {
1383 if is_isize_or_usize(cast_from) || from_nbits >= to_nbits {
1384 span_precision_loss_lint(cx, expr, cast_from, to_nbits == 64);
1386 if from_nbits < to_nbits {
1387 span_lossless_lint(cx, expr, cast_expr, cast_from, cast_to);
1393 CAST_POSSIBLE_TRUNCATION,
1395 &format!("casting `{}` to `{}` may truncate the value", cast_from, cast_to),
1397 if !cast_to.is_signed() {
1403 "casting `{}` to `{}` may lose the sign of the value",
1410 check_loss_of_sign(cx, expr, cast_expr, cast_from, cast_to);
1411 check_truncation_and_wrapping(cx, expr, cast_from, cast_to);
1412 check_lossless(cx, expr, cast_expr, cast_from, cast_to);
1415 if let (&ty::Float(FloatTy::F64), &ty::Float(FloatTy::F32)) = (&cast_from.kind, &cast_to.kind) {
1418 CAST_POSSIBLE_TRUNCATION,
1420 "casting `f64` to `f32` may truncate the value",
1423 if let (&ty::Float(FloatTy::F32), &ty::Float(FloatTy::F64)) = (&cast_from.kind, &cast_to.kind) {
1424 span_lossless_lint(cx, expr, cast_expr, cast_from, cast_to);
1430 fn lint_cast_ptr_alignment<'tcx>(cx: &LateContext<'_, 'tcx>, expr: &Expr<'_>, cast_from: Ty<'tcx>, cast_to: Ty<'tcx>) {
1432 if let ty::RawPtr(from_ptr_ty) = &cast_from.kind;
1433 if let ty::RawPtr(to_ptr_ty) = &cast_to.kind;
1434 if let Ok(from_layout) = cx.layout_of(from_ptr_ty.ty);
1435 if let Ok(to_layout) = cx.layout_of(to_ptr_ty.ty);
1436 if from_layout.align.abi < to_layout.align.abi;
1437 // with c_void, we inherently need to trust the user
1438 if !is_c_void(cx, from_ptr_ty.ty);
1439 // when casting from a ZST, we don't know enough to properly lint
1440 if !from_layout.is_zst();
1447 "casting from `{}` to a more-strictly-aligned pointer (`{}`) ({} < {} bytes)",
1450 from_layout.align.abi.bytes(),
1451 to_layout.align.abi.bytes(),
1458 fn lint_fn_to_numeric_cast(
1459 cx: &LateContext<'_, '_>,
1461 cast_expr: &Expr<'_>,
1465 // We only want to check casts to `ty::Uint` or `ty::Int`
1466 match cast_to.kind {
1467 ty::Uint(_) | ty::Int(..) => { /* continue on */ },
1470 match cast_from.kind {
1471 ty::FnDef(..) | ty::FnPtr(_) => {
1472 let mut applicability = Applicability::MaybeIncorrect;
1473 let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability);
1475 let to_nbits = int_ty_to_nbits(cast_to, cx.tcx);
1476 if to_nbits < cx.tcx.data_layout.pointer_size.bits() {
1479 FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
1482 "casting function pointer `{}` to `{}`, which truncates the value",
1483 from_snippet, cast_to
1486 format!("{} as usize", from_snippet),
1489 } else if cast_to.kind != ty::Uint(UintTy::Usize) {
1494 &format!("casting function pointer `{}` to `{}`", from_snippet, cast_to),
1496 format!("{} as usize", from_snippet),
1505 declare_clippy_lint! {
1506 /// **What it does:** Checks for types used in structs, parameters and `let`
1507 /// declarations above a certain complexity threshold.
1509 /// **Why is this bad?** Too complex types make the code less readable. Consider
1510 /// using a `type` definition to simplify them.
1512 /// **Known problems:** None.
1516 /// # use std::rc::Rc;
1518 /// inner: Rc<Vec<Vec<Box<(u32, u32, u32, u32)>>>>,
1521 pub TYPE_COMPLEXITY,
1523 "usage of very complex types that might be better factored into `type` definitions"
1526 pub struct TypeComplexity {
1530 impl TypeComplexity {
1532 pub fn new(threshold: u64) -> Self {
1537 impl_lint_pass!(TypeComplexity => [TYPE_COMPLEXITY]);
1539 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypeComplexity {
1542 cx: &LateContext<'a, 'tcx>,
1544 decl: &'tcx FnDecl<'_>,
1549 self.check_fndecl(cx, decl);
1552 fn check_struct_field(&mut self, cx: &LateContext<'a, 'tcx>, field: &'tcx hir::StructField<'_>) {
1553 // enum variants are also struct fields now
1554 self.check_type(cx, &field.ty);
1557 fn check_item(&mut self, cx: &LateContext<'a, 'tcx>, item: &'tcx Item<'_>) {
1559 ItemKind::Static(ref ty, _, _) | ItemKind::Const(ref ty, _) => self.check_type(cx, ty),
1560 // functions, enums, structs, impls and traits are covered
1565 fn check_trait_item(&mut self, cx: &LateContext<'a, 'tcx>, item: &'tcx TraitItem<'_>) {
1567 TraitItemKind::Const(ref ty, _) | TraitItemKind::Type(_, Some(ref ty)) => self.check_type(cx, ty),
1568 TraitItemKind::Fn(FnSig { ref decl, .. }, TraitFn::Required(_)) => self.check_fndecl(cx, decl),
1569 // methods with default impl are covered by check_fn
1574 fn check_impl_item(&mut self, cx: &LateContext<'a, 'tcx>, item: &'tcx ImplItem<'_>) {
1576 ImplItemKind::Const(ref ty, _) | ImplItemKind::TyAlias(ref ty) => self.check_type(cx, ty),
1577 // methods are covered by check_fn
1582 fn check_local(&mut self, cx: &LateContext<'a, 'tcx>, local: &'tcx Local<'_>) {
1583 if let Some(ref ty) = local.ty {
1584 self.check_type(cx, ty);
1589 impl<'a, 'tcx> TypeComplexity {
1590 fn check_fndecl(&self, cx: &LateContext<'a, 'tcx>, decl: &'tcx FnDecl<'_>) {
1591 for arg in decl.inputs {
1592 self.check_type(cx, arg);
1594 if let FnRetTy::Return(ref ty) = decl.output {
1595 self.check_type(cx, ty);
1599 fn check_type(&self, cx: &LateContext<'_, '_>, ty: &hir::Ty<'_>) {
1600 if ty.span.from_expansion() {
1604 let mut visitor = TypeComplexityVisitor { score: 0, nest: 1 };
1605 visitor.visit_ty(ty);
1609 if score > self.threshold {
1614 "very complex type used. Consider factoring parts into `type` definitions",
1620 /// Walks a type and assigns a complexity score to it.
1621 struct TypeComplexityVisitor {
1622 /// total complexity score of the type
1624 /// current nesting level
1628 impl<'tcx> Visitor<'tcx> for TypeComplexityVisitor {
1629 type Map = Map<'tcx>;
1631 fn visit_ty(&mut self, ty: &'tcx hir::Ty<'_>) {
1632 let (add_score, sub_nest) = match ty.kind {
1633 // _, &x and *x have only small overhead; don't mess with nesting level
1634 TyKind::Infer | TyKind::Ptr(..) | TyKind::Rptr(..) => (1, 0),
1636 // the "normal" components of a type: named types, arrays/tuples
1637 TyKind::Path(..) | TyKind::Slice(..) | TyKind::Tup(..) | TyKind::Array(..) => (10 * self.nest, 1),
1639 // function types bring a lot of overhead
1640 TyKind::BareFn(ref bare) if bare.abi == Abi::Rust => (50 * self.nest, 1),
1642 TyKind::TraitObject(ref param_bounds, _) => {
1643 let has_lifetime_parameters = param_bounds.iter().any(|bound| {
1644 bound.bound_generic_params.iter().any(|gen| match gen.kind {
1645 GenericParamKind::Lifetime { .. } => true,
1649 if has_lifetime_parameters {
1650 // complex trait bounds like A<'a, 'b>
1653 // simple trait bounds like A + B
1660 self.score += add_score;
1661 self.nest += sub_nest;
1663 self.nest -= sub_nest;
1665 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
1666 NestedVisitorMap::None
1670 declare_clippy_lint! {
1671 /// **What it does:** Checks for expressions where a character literal is cast
1672 /// to `u8` and suggests using a byte literal instead.
1674 /// **Why is this bad?** In general, casting values to smaller types is
1675 /// error-prone and should be avoided where possible. In the particular case of
1676 /// converting a character literal to u8, it is easy to avoid by just using a
1677 /// byte literal instead. As an added bonus, `b'a'` is even slightly shorter
1678 /// than `'a' as u8`.
1680 /// **Known problems:** None.
1687 /// A better version, using the byte literal:
1694 "casting a character literal to `u8` truncates"
1697 declare_lint_pass!(CharLitAsU8 => [CHAR_LIT_AS_U8]);
1699 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for CharLitAsU8 {
1700 fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr<'_>) {
1702 if !expr.span.from_expansion();
1703 if let ExprKind::Cast(e, _) = &expr.kind;
1704 if let ExprKind::Lit(l) = &e.kind;
1705 if let LitKind::Char(c) = l.node;
1706 if ty::Uint(UintTy::U8) == cx.tables.expr_ty(expr).kind;
1708 let mut applicability = Applicability::MachineApplicable;
1709 let snippet = snippet_with_applicability(cx, e.span, "'x'", &mut applicability);
1715 "casting a character literal to `u8` truncates",
1717 db.note("`char` is four bytes wide, but `u8` is a single byte");
1722 "use a byte literal instead",
1723 format!("b{}", snippet),
1733 declare_clippy_lint! {
1734 /// **What it does:** Checks for comparisons where one side of the relation is
1735 /// either the minimum or maximum value for its type and warns if it involves a
1736 /// case that is always true or always false. Only integer and boolean types are
1739 /// **Why is this bad?** An expression like `min <= x` may misleadingly imply
1740 /// that it is possible for `x` to be less than the minimum. Expressions like
1741 /// `max < x` are probably mistakes.
1743 /// **Known problems:** For `usize` the size of the current compile target will
1744 /// be assumed (e.g., 64 bits on 64 bit systems). This means code that uses such
1745 /// a comparison to detect target pointer width will trigger this lint. One can
1746 /// use `mem::sizeof` and compare its value or conditional compilation
1748 /// like `#[cfg(target_pointer_width = "64")] ..` instead.
1753 /// let vec: Vec<isize> = Vec::new();
1754 /// if vec.len() <= 0 {}
1755 /// if 100 > std::i32::MAX {}
1757 pub ABSURD_EXTREME_COMPARISONS,
1759 "a comparison with a maximum or minimum value that is always true or false"
1762 declare_lint_pass!(AbsurdExtremeComparisons => [ABSURD_EXTREME_COMPARISONS]);
1769 struct ExtremeExpr<'a> {
1774 enum AbsurdComparisonResult {
1777 InequalityImpossible,
1780 fn is_cast_between_fixed_and_target<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr<'tcx>) -> bool {
1781 if let ExprKind::Cast(ref cast_exp, _) = expr.kind {
1782 let precast_ty = cx.tables.expr_ty(cast_exp);
1783 let cast_ty = cx.tables.expr_ty(expr);
1785 return is_isize_or_usize(precast_ty) != is_isize_or_usize(cast_ty);
1791 fn detect_absurd_comparison<'a, 'tcx>(
1792 cx: &LateContext<'a, 'tcx>,
1794 lhs: &'tcx Expr<'_>,
1795 rhs: &'tcx Expr<'_>,
1796 ) -> Option<(ExtremeExpr<'tcx>, AbsurdComparisonResult)> {
1797 use crate::types::AbsurdComparisonResult::{AlwaysFalse, AlwaysTrue, InequalityImpossible};
1798 use crate::types::ExtremeType::{Maximum, Minimum};
1799 use crate::utils::comparisons::{normalize_comparison, Rel};
1801 // absurd comparison only makes sense on primitive types
1802 // primitive types don't implement comparison operators with each other
1803 if cx.tables.expr_ty(lhs) != cx.tables.expr_ty(rhs) {
1807 // comparisons between fix sized types and target sized types are considered unanalyzable
1808 if is_cast_between_fixed_and_target(cx, lhs) || is_cast_between_fixed_and_target(cx, rhs) {
1812 let (rel, normalized_lhs, normalized_rhs) = normalize_comparison(op, lhs, rhs)?;
1814 let lx = detect_extreme_expr(cx, normalized_lhs);
1815 let rx = detect_extreme_expr(cx, normalized_rhs);
1820 (Some(l @ ExtremeExpr { which: Maximum, .. }), _) => (l, AlwaysFalse), // max < x
1821 (_, Some(r @ ExtremeExpr { which: Minimum, .. })) => (r, AlwaysFalse), // x < min
1827 (Some(l @ ExtremeExpr { which: Minimum, .. }), _) => (l, AlwaysTrue), // min <= x
1828 (Some(l @ ExtremeExpr { which: Maximum, .. }), _) => (l, InequalityImpossible), // max <= x
1829 (_, Some(r @ ExtremeExpr { which: Minimum, .. })) => (r, InequalityImpossible), // x <= min
1830 (_, Some(r @ ExtremeExpr { which: Maximum, .. })) => (r, AlwaysTrue), // x <= max
1834 Rel::Ne | Rel::Eq => return None,
1838 fn detect_extreme_expr<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr<'_>) -> Option<ExtremeExpr<'tcx>> {
1839 use crate::types::ExtremeType::{Maximum, Minimum};
1841 let ty = cx.tables.expr_ty(expr);
1843 let cv = constant(cx, cx.tables, expr)?.0;
1845 let which = match (&ty.kind, cv) {
1846 (&ty::Bool, Constant::Bool(false)) | (&ty::Uint(_), Constant::Int(0)) => Minimum,
1847 (&ty::Int(ity), Constant::Int(i))
1848 if i == unsext(cx.tcx, i128::min_value() >> (128 - int_bits(cx.tcx, ity)), ity) =>
1853 (&ty::Bool, Constant::Bool(true)) => Maximum,
1854 (&ty::Int(ity), Constant::Int(i))
1855 if i == unsext(cx.tcx, i128::max_value() >> (128 - int_bits(cx.tcx, ity)), ity) =>
1859 (&ty::Uint(uty), Constant::Int(i)) if clip(cx.tcx, u128::max_value(), uty) == i => Maximum,
1863 Some(ExtremeExpr { which, expr })
1866 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for AbsurdExtremeComparisons {
1867 fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr<'_>) {
1868 use crate::types::AbsurdComparisonResult::{AlwaysFalse, AlwaysTrue, InequalityImpossible};
1869 use crate::types::ExtremeType::{Maximum, Minimum};
1871 if let ExprKind::Binary(ref cmp, ref lhs, ref rhs) = expr.kind {
1872 if let Some((culprit, result)) = detect_absurd_comparison(cx, cmp.node, lhs, rhs) {
1873 if !expr.span.from_expansion() {
1874 let msg = "this comparison involving the minimum or maximum element for this \
1875 type contains a case that is always true or always false";
1877 let conclusion = match result {
1878 AlwaysFalse => "this comparison is always false".to_owned(),
1879 AlwaysTrue => "this comparison is always true".to_owned(),
1880 InequalityImpossible => format!(
1881 "the case where the two sides are not equal never occurs, consider using `{} == {}` \
1883 snippet(cx, lhs.span, "lhs"),
1884 snippet(cx, rhs.span, "rhs")
1889 "because `{}` is the {} value for this type, {}",
1890 snippet(cx, culprit.expr.span, "x"),
1891 match culprit.which {
1892 Minimum => "minimum",
1893 Maximum => "maximum",
1898 span_lint_and_help(cx, ABSURD_EXTREME_COMPARISONS, expr.span, msg, &help);
1905 declare_clippy_lint! {
1906 /// **What it does:** Checks for comparisons where the relation is always either
1907 /// true or false, but where one side has been upcast so that the comparison is
1908 /// necessary. Only integer types are checked.
1910 /// **Why is this bad?** An expression like `let x : u8 = ...; (x as u32) > 300`
1911 /// will mistakenly imply that it is possible for `x` to be outside the range of
1914 /// **Known problems:**
1915 /// https://github.com/rust-lang/rust-clippy/issues/886
1920 /// (x as u32) > 300;
1922 pub INVALID_UPCAST_COMPARISONS,
1924 "a comparison involving an upcast which is always true or false"
1927 declare_lint_pass!(InvalidUpcastComparisons => [INVALID_UPCAST_COMPARISONS]);
1929 #[derive(Copy, Clone, Debug, Eq)]
1936 #[allow(clippy::cast_sign_loss)]
1938 fn cmp_s_u(s: i128, u: u128) -> Ordering {
1941 } else if u > (i128::max_value() as u128) {
1949 impl PartialEq for FullInt {
1951 fn eq(&self, other: &Self) -> bool {
1952 self.partial_cmp(other).expect("`partial_cmp` only returns `Some(_)`") == Ordering::Equal
1956 impl PartialOrd for FullInt {
1958 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1959 Some(match (self, other) {
1960 (&Self::S(s), &Self::S(o)) => s.cmp(&o),
1961 (&Self::U(s), &Self::U(o)) => s.cmp(&o),
1962 (&Self::S(s), &Self::U(o)) => Self::cmp_s_u(s, o),
1963 (&Self::U(s), &Self::S(o)) => Self::cmp_s_u(o, s).reverse(),
1967 impl Ord for FullInt {
1969 fn cmp(&self, other: &Self) -> Ordering {
1970 self.partial_cmp(other)
1971 .expect("`partial_cmp` for FullInt can never return `None`")
1975 fn numeric_cast_precast_bounds<'a>(cx: &LateContext<'_, '_>, expr: &'a Expr<'_>) -> Option<(FullInt, FullInt)> {
1976 use std::{i128, i16, i32, i64, i8, isize, u128, u16, u32, u64, u8, usize};
1978 if let ExprKind::Cast(ref cast_exp, _) = expr.kind {
1979 let pre_cast_ty = cx.tables.expr_ty(cast_exp);
1980 let cast_ty = cx.tables.expr_ty(expr);
1981 // if it's a cast from i32 to u32 wrapping will invalidate all these checks
1982 if cx.layout_of(pre_cast_ty).ok().map(|l| l.size) == cx.layout_of(cast_ty).ok().map(|l| l.size) {
1985 match pre_cast_ty.kind {
1986 ty::Int(int_ty) => Some(match int_ty {
1988 FullInt::S(i128::from(i8::min_value())),
1989 FullInt::S(i128::from(i8::max_value())),
1992 FullInt::S(i128::from(i16::min_value())),
1993 FullInt::S(i128::from(i16::max_value())),
1996 FullInt::S(i128::from(i32::min_value())),
1997 FullInt::S(i128::from(i32::max_value())),
2000 FullInt::S(i128::from(i64::min_value())),
2001 FullInt::S(i128::from(i64::max_value())),
2003 IntTy::I128 => (FullInt::S(i128::min_value()), FullInt::S(i128::max_value())),
2005 FullInt::S(isize::min_value() as i128),
2006 FullInt::S(isize::max_value() as i128),
2009 ty::Uint(uint_ty) => Some(match uint_ty {
2011 FullInt::U(u128::from(u8::min_value())),
2012 FullInt::U(u128::from(u8::max_value())),
2015 FullInt::U(u128::from(u16::min_value())),
2016 FullInt::U(u128::from(u16::max_value())),
2019 FullInt::U(u128::from(u32::min_value())),
2020 FullInt::U(u128::from(u32::max_value())),
2023 FullInt::U(u128::from(u64::min_value())),
2024 FullInt::U(u128::from(u64::max_value())),
2026 UintTy::U128 => (FullInt::U(u128::min_value()), FullInt::U(u128::max_value())),
2028 FullInt::U(usize::min_value() as u128),
2029 FullInt::U(usize::max_value() as u128),
2039 fn node_as_const_fullint<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr<'_>) -> Option<FullInt> {
2040 let val = constant(cx, cx.tables, expr)?.0;
2041 if let Constant::Int(const_int) = val {
2042 match cx.tables.expr_ty(expr).kind {
2043 ty::Int(ity) => Some(FullInt::S(sext(cx.tcx, const_int, ity))),
2044 ty::Uint(_) => Some(FullInt::U(const_int)),
2052 fn err_upcast_comparison(cx: &LateContext<'_, '_>, span: Span, expr: &Expr<'_>, always: bool) {
2053 if let ExprKind::Cast(ref cast_val, _) = expr.kind {
2056 INVALID_UPCAST_COMPARISONS,
2059 "because of the numeric bounds on `{}` prior to casting, this expression is always {}",
2060 snippet(cx, cast_val.span, "the expression"),
2061 if always { "true" } else { "false" },
2067 fn upcast_comparison_bounds_err<'a, 'tcx>(
2068 cx: &LateContext<'a, 'tcx>,
2070 rel: comparisons::Rel,
2071 lhs_bounds: Option<(FullInt, FullInt)>,
2072 lhs: &'tcx Expr<'_>,
2073 rhs: &'tcx Expr<'_>,
2076 use crate::utils::comparisons::Rel;
2078 if let Some((lb, ub)) = lhs_bounds {
2079 if let Some(norm_rhs_val) = node_as_const_fullint(cx, rhs) {
2080 if rel == Rel::Eq || rel == Rel::Ne {
2081 if norm_rhs_val < lb || norm_rhs_val > ub {
2082 err_upcast_comparison(cx, span, lhs, rel == Rel::Ne);
2084 } else if match rel {
2099 Rel::Eq | Rel::Ne => unreachable!(),
2101 err_upcast_comparison(cx, span, lhs, true)
2102 } else if match rel {
2117 Rel::Eq | Rel::Ne => unreachable!(),
2119 err_upcast_comparison(cx, span, lhs, false)
2125 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for InvalidUpcastComparisons {
2126 fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr<'_>) {
2127 if let ExprKind::Binary(ref cmp, ref lhs, ref rhs) = expr.kind {
2128 let normalized = comparisons::normalize_comparison(cmp.node, lhs, rhs);
2129 let (rel, normalized_lhs, normalized_rhs) = if let Some(val) = normalized {
2135 let lhs_bounds = numeric_cast_precast_bounds(cx, normalized_lhs);
2136 let rhs_bounds = numeric_cast_precast_bounds(cx, normalized_rhs);
2138 upcast_comparison_bounds_err(cx, expr.span, rel, lhs_bounds, normalized_lhs, normalized_rhs, false);
2139 upcast_comparison_bounds_err(cx, expr.span, rel, rhs_bounds, normalized_rhs, normalized_lhs, true);
2144 declare_clippy_lint! {
2145 /// **What it does:** Checks for public `impl` or `fn` missing generalization
2146 /// over different hashers and implicitly defaulting to the default hashing
2147 /// algorithm (`SipHash`).
2149 /// **Why is this bad?** `HashMap` or `HashSet` with custom hashers cannot be
2152 /// **Known problems:** Suggestions for replacing constructors can contain
2153 /// false-positives. Also applying suggestions can require modification of other
2154 /// pieces of code, possibly including external crates.
2158 /// # use std::collections::HashMap;
2159 /// # use std::hash::{Hash, BuildHasher};
2160 /// # trait Serialize {};
2161 /// impl<K: Hash + Eq, V> Serialize for HashMap<K, V> { }
2163 /// pub fn foo(map: &mut HashMap<i32, i32>) { }
2165 /// could be rewritten as
2167 /// # use std::collections::HashMap;
2168 /// # use std::hash::{Hash, BuildHasher};
2169 /// # trait Serialize {};
2170 /// impl<K: Hash + Eq, V, S: BuildHasher> Serialize for HashMap<K, V, S> { }
2172 /// pub fn foo<S: BuildHasher>(map: &mut HashMap<i32, i32, S>) { }
2174 pub IMPLICIT_HASHER,
2176 "missing generalization over different hashers"
2179 declare_lint_pass!(ImplicitHasher => [IMPLICIT_HASHER]);
2181 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for ImplicitHasher {
2182 #[allow(clippy::cast_possible_truncation, clippy::too_many_lines)]
2183 fn check_item(&mut self, cx: &LateContext<'a, 'tcx>, item: &'tcx Item<'_>) {
2184 use rustc_span::BytePos;
2186 fn suggestion<'a, 'tcx>(
2187 cx: &LateContext<'a, 'tcx>,
2188 db: &mut DiagnosticBuilder<'_>,
2189 generics_span: Span,
2190 generics_suggestion_span: Span,
2191 target: &ImplicitHasherType<'_>,
2192 vis: ImplicitHasherConstructorVisitor<'_, '_, '_>,
2194 let generics_snip = snippet(cx, generics_span, "");
2196 let generics_snip = if generics_snip.is_empty() {
2199 &generics_snip[1..generics_snip.len() - 1]
2204 "consider adding a type parameter".to_string(),
2207 generics_suggestion_span,
2209 "<{}{}S: ::std::hash::BuildHasher{}>",
2211 if generics_snip.is_empty() { "" } else { ", " },
2212 if vis.suggestions.is_empty() {
2215 // request users to add `Default` bound so that generic constructors can be used
2222 format!("{}<{}, S>", target.type_name(), target.type_arguments(),),
2227 if !vis.suggestions.is_empty() {
2228 multispan_sugg(db, "...and use generic constructor".into(), vis.suggestions);
2232 if !cx.access_levels.is_exported(item.hir_id) {
2243 let mut vis = ImplicitHasherTypeVisitor::new(cx);
2246 for target in &vis.found {
2247 if differing_macro_contexts(item.span, target.span()) {
2251 let generics_suggestion_span = generics.span.substitute_dummy({
2252 let pos = snippet_opt(cx, item.span.until(target.span()))
2253 .and_then(|snip| Some(item.span.lo() + BytePos(snip.find("impl")? as u32 + 4)));
2254 if let Some(pos) = pos {
2255 Span::new(pos, pos, item.span.data().ctxt)
2261 let mut ctr_vis = ImplicitHasherConstructorVisitor::new(cx, target);
2262 for item in items.iter().map(|item| cx.tcx.hir().impl_item(item.id)) {
2263 ctr_vis.visit_impl_item(item);
2271 "impl for `{}` should be generalized over different hashers",
2275 suggestion(cx, db, generics.span, generics_suggestion_span, target, ctr_vis);
2280 ItemKind::Fn(ref sig, ref generics, body_id) => {
2281 let body = cx.tcx.hir().body(body_id);
2283 for ty in sig.decl.inputs {
2284 let mut vis = ImplicitHasherTypeVisitor::new(cx);
2287 for target in &vis.found {
2288 if in_external_macro(cx.sess(), generics.span) {
2291 let generics_suggestion_span = generics.span.substitute_dummy({
2292 let pos = snippet_opt(cx, item.span.until(body.params[0].pat.span))
2294 let i = snip.find("fn")?;
2295 Some(item.span.lo() + BytePos((i + (&snip[i..]).find('(')?) as u32))
2297 .expect("failed to create span for type parameters");
2298 Span::new(pos, pos, item.span.data().ctxt)
2301 let mut ctr_vis = ImplicitHasherConstructorVisitor::new(cx, target);
2302 ctr_vis.visit_body(body);
2309 "parameter of type `{}` should be generalized over different hashers",
2313 suggestion(cx, db, generics.span, generics_suggestion_span, target, ctr_vis);
2324 enum ImplicitHasherType<'tcx> {
2325 HashMap(Span, Ty<'tcx>, Cow<'static, str>, Cow<'static, str>),
2326 HashSet(Span, Ty<'tcx>, Cow<'static, str>),
2329 impl<'tcx> ImplicitHasherType<'tcx> {
2330 /// Checks that `ty` is a target type without a `BuildHasher`.
2331 fn new<'a>(cx: &LateContext<'a, 'tcx>, hir_ty: &hir::Ty<'_>) -> Option<Self> {
2332 if let TyKind::Path(QPath::Resolved(None, ref path)) = hir_ty.kind {
2333 let params: Vec<_> = path
2341 .filter_map(|arg| match arg {
2342 GenericArg::Type(ty) => Some(ty),
2346 let params_len = params.len();
2348 let ty = hir_ty_to_ty(cx.tcx, hir_ty);
2350 if match_path(path, &paths::HASHMAP) && params_len == 2 {
2351 Some(ImplicitHasherType::HashMap(
2354 snippet(cx, params[0].span, "K"),
2355 snippet(cx, params[1].span, "V"),
2357 } else if match_path(path, &paths::HASHSET) && params_len == 1 {
2358 Some(ImplicitHasherType::HashSet(
2361 snippet(cx, params[0].span, "T"),
2371 fn type_name(&self) -> &'static str {
2373 ImplicitHasherType::HashMap(..) => "HashMap",
2374 ImplicitHasherType::HashSet(..) => "HashSet",
2378 fn type_arguments(&self) -> String {
2380 ImplicitHasherType::HashMap(.., ref k, ref v) => format!("{}, {}", k, v),
2381 ImplicitHasherType::HashSet(.., ref t) => format!("{}", t),
2385 fn ty(&self) -> Ty<'tcx> {
2387 ImplicitHasherType::HashMap(_, ty, ..) | ImplicitHasherType::HashSet(_, ty, ..) => ty,
2391 fn span(&self) -> Span {
2393 ImplicitHasherType::HashMap(span, ..) | ImplicitHasherType::HashSet(span, ..) => span,
2398 struct ImplicitHasherTypeVisitor<'a, 'tcx> {
2399 cx: &'a LateContext<'a, 'tcx>,
2400 found: Vec<ImplicitHasherType<'tcx>>,
2403 impl<'a, 'tcx> ImplicitHasherTypeVisitor<'a, 'tcx> {
2404 fn new(cx: &'a LateContext<'a, 'tcx>) -> Self {
2405 Self { cx, found: vec![] }
2409 impl<'a, 'tcx> Visitor<'tcx> for ImplicitHasherTypeVisitor<'a, 'tcx> {
2410 type Map = Map<'tcx>;
2412 fn visit_ty(&mut self, t: &'tcx hir::Ty<'_>) {
2413 if let Some(target) = ImplicitHasherType::new(self.cx, t) {
2414 self.found.push(target);
2420 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
2421 NestedVisitorMap::None
2425 /// Looks for default-hasher-dependent constructors like `HashMap::new`.
2426 struct ImplicitHasherConstructorVisitor<'a, 'b, 'tcx> {
2427 cx: &'a LateContext<'a, 'tcx>,
2428 body: &'a TypeckTables<'tcx>,
2429 target: &'b ImplicitHasherType<'tcx>,
2430 suggestions: BTreeMap<Span, String>,
2433 impl<'a, 'b, 'tcx> ImplicitHasherConstructorVisitor<'a, 'b, 'tcx> {
2434 fn new(cx: &'a LateContext<'a, 'tcx>, target: &'b ImplicitHasherType<'tcx>) -> Self {
2439 suggestions: BTreeMap::new(),
2444 impl<'a, 'b, 'tcx> Visitor<'tcx> for ImplicitHasherConstructorVisitor<'a, 'b, 'tcx> {
2445 type Map = Map<'tcx>;
2447 fn visit_body(&mut self, body: &'tcx Body<'_>) {
2448 let prev_body = self.body;
2449 self.body = self.cx.tcx.body_tables(body.id());
2450 walk_body(self, body);
2451 self.body = prev_body;
2454 fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
2456 if let ExprKind::Call(ref fun, ref args) = e.kind;
2457 if let ExprKind::Path(QPath::TypeRelative(ref ty, ref method)) = fun.kind;
2458 if let TyKind::Path(QPath::Resolved(None, ref ty_path)) = ty.kind;
2460 if !same_tys(self.cx, self.target.ty(), self.body.expr_ty(e)) {
2464 if match_path(ty_path, &paths::HASHMAP) {
2465 if method.ident.name == sym!(new) {
2467 .insert(e.span, "HashMap::default()".to_string());
2468 } else if method.ident.name == sym!(with_capacity) {
2469 self.suggestions.insert(
2472 "HashMap::with_capacity_and_hasher({}, Default::default())",
2473 snippet(self.cx, args[0].span, "capacity"),
2477 } else if match_path(ty_path, &paths::HASHSET) {
2478 if method.ident.name == sym!(new) {
2480 .insert(e.span, "HashSet::default()".to_string());
2481 } else if method.ident.name == sym!(with_capacity) {
2482 self.suggestions.insert(
2485 "HashSet::with_capacity_and_hasher({}, Default::default())",
2486 snippet(self.cx, args[0].span, "capacity"),
2497 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
2498 NestedVisitorMap::OnlyBodies(self.cx.tcx.hir())
2502 declare_clippy_lint! {
2503 /// **What it does:** Checks for casts of `&T` to `&mut T` anywhere in the code.
2505 /// **Why is this bad?** It’s basically guaranteed to be undefined behaviour.
2506 /// `UnsafeCell` is the only way to obtain aliasable data that is considered
2509 /// **Known problems:** None.
2515 /// *(r as *const _ as *mut _) += 1;
2520 /// Instead consider using interior mutability types.
2523 /// use std::cell::UnsafeCell;
2525 /// fn x(r: &UnsafeCell<i32>) {
2531 pub CAST_REF_TO_MUT,
2533 "a cast of reference to a mutable pointer"
2536 declare_lint_pass!(RefToMut => [CAST_REF_TO_MUT]);
2538 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for RefToMut {
2539 fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr<'_>) {
2541 if let ExprKind::Unary(UnOp::UnDeref, e) = &expr.kind;
2542 if let ExprKind::Cast(e, t) = &e.kind;
2543 if let TyKind::Ptr(MutTy { mutbl: Mutability::Mut, .. }) = t.kind;
2544 if let ExprKind::Cast(e, t) = &e.kind;
2545 if let TyKind::Ptr(MutTy { mutbl: Mutability::Not, .. }) = t.kind;
2546 if let ty::Ref(..) = cx.tables.node_type(e.hir_id).kind;
2552 "casting `&T` to `&mut T` may cause undefined behavior, consider instead using an `UnsafeCell`",