]> git.lizzy.rs Git - rust.git/blob - clippy_lints/src/atomic_ordering.rs
clippy: support `QPath::LangItem`
[rust.git] / clippy_lints / src / atomic_ordering.rs
1 use crate::utils::{match_def_path, span_lint_and_help};
2 use if_chain::if_chain;
3 use rustc_hir::def_id::DefId;
4 use rustc_hir::{Expr, ExprKind};
5 use rustc_lint::{LateContext, LateLintPass};
6 use rustc_middle::ty;
7 use rustc_session::{declare_lint_pass, declare_tool_lint};
8
9 declare_clippy_lint! {
10     /// **What it does:** Checks for usage of invalid atomic
11     /// ordering in atomic loads/stores and memory fences.
12     ///
13     /// **Why is this bad?** Using an invalid atomic ordering
14     /// will cause a panic at run-time.
15     ///
16     /// **Known problems:** None.
17     ///
18     /// **Example:**
19     /// ```rust,no_run
20     /// # use std::sync::atomic::{self, AtomicBool, Ordering};
21     ///
22     /// let x = AtomicBool::new(true);
23     ///
24     /// let _ = x.load(Ordering::Release);
25     /// let _ = x.load(Ordering::AcqRel);
26     ///
27     /// x.store(false, Ordering::Acquire);
28     /// x.store(false, Ordering::AcqRel);
29     ///
30     /// atomic::fence(Ordering::Relaxed);
31     /// atomic::compiler_fence(Ordering::Relaxed);
32     /// ```
33     pub INVALID_ATOMIC_ORDERING,
34     correctness,
35     "usage of invalid atomic ordering in atomic loads/stores and memory fences"
36 }
37
38 declare_lint_pass!(AtomicOrdering => [INVALID_ATOMIC_ORDERING]);
39
40 const ATOMIC_TYPES: [&str; 12] = [
41     "AtomicBool",
42     "AtomicI8",
43     "AtomicI16",
44     "AtomicI32",
45     "AtomicI64",
46     "AtomicIsize",
47     "AtomicPtr",
48     "AtomicU8",
49     "AtomicU16",
50     "AtomicU32",
51     "AtomicU64",
52     "AtomicUsize",
53 ];
54
55 fn type_is_atomic(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
56     if let ty::Adt(&ty::AdtDef { did, .. }, _) = cx.typeck_results().expr_ty(expr).kind {
57         ATOMIC_TYPES
58             .iter()
59             .any(|ty| match_def_path(cx, did, &["core", "sync", "atomic", ty]))
60     } else {
61         false
62     }
63 }
64
65 fn match_ordering_def_path(cx: &LateContext<'_>, did: DefId, orderings: &[&str]) -> bool {
66     orderings
67         .iter()
68         .any(|ordering| match_def_path(cx, did, &["core", "sync", "atomic", "Ordering", ordering]))
69 }
70
71 fn check_atomic_load_store(cx: &LateContext<'_>, expr: &Expr<'_>) {
72     if_chain! {
73         if let ExprKind::MethodCall(ref method_path, _, args, _) = &expr.kind;
74         let method = method_path.ident.name.as_str();
75         if type_is_atomic(cx, &args[0]);
76         if method == "load" || method == "store";
77         let ordering_arg = if method == "load" { &args[1] } else { &args[2] };
78         if let ExprKind::Path(ref ordering_qpath) = ordering_arg.kind;
79         if let Some(ordering_def_id) = cx.qpath_res(ordering_qpath, ordering_arg.hir_id).opt_def_id();
80         then {
81             if method == "load" &&
82                 match_ordering_def_path(cx, ordering_def_id, &["Release", "AcqRel"]) {
83                 span_lint_and_help(
84                     cx,
85                     INVALID_ATOMIC_ORDERING,
86                     ordering_arg.span,
87                     "atomic loads cannot have `Release` and `AcqRel` ordering",
88                     None,
89                     "consider using ordering modes `Acquire`, `SeqCst` or `Relaxed`"
90                 );
91             } else if method == "store" &&
92                 match_ordering_def_path(cx, ordering_def_id, &["Acquire", "AcqRel"]) {
93                 span_lint_and_help(
94                     cx,
95                     INVALID_ATOMIC_ORDERING,
96                     ordering_arg.span,
97                     "atomic stores cannot have `Acquire` and `AcqRel` ordering",
98                     None,
99                     "consider using ordering modes `Release`, `SeqCst` or `Relaxed`"
100                 );
101             }
102         }
103     }
104 }
105
106 fn check_memory_fence(cx: &LateContext<'_>, expr: &Expr<'_>) {
107     if_chain! {
108         if let ExprKind::Call(ref func, ref args) = expr.kind;
109         if let ExprKind::Path(ref func_qpath) = func.kind;
110         if let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id();
111         if ["fence", "compiler_fence"]
112             .iter()
113             .any(|func| match_def_path(cx, def_id, &["core", "sync", "atomic", func]));
114         if let ExprKind::Path(ref ordering_qpath) = &args[0].kind;
115         if let Some(ordering_def_id) = cx.qpath_res(ordering_qpath, args[0].hir_id).opt_def_id();
116         if match_ordering_def_path(cx, ordering_def_id, &["Relaxed"]);
117         then {
118             span_lint_and_help(
119                 cx,
120                 INVALID_ATOMIC_ORDERING,
121                 args[0].span,
122                 "memory fences cannot have `Relaxed` ordering",
123                 None,
124                 "consider using ordering modes `Acquire`, `Release`, `AcqRel` or `SeqCst`"
125             );
126         }
127     }
128 }
129
130 impl<'tcx> LateLintPass<'tcx> for AtomicOrdering {
131     fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
132         check_atomic_load_store(cx, expr);
133         check_memory_fence(cx, expr);
134     }
135 }