4 use clippy_utils::diagnostics::span_lint_and_sugg;
5 use clippy_utils::is_self_ty;
6 use clippy_utils::source::snippet;
7 use clippy_utils::ty::is_copy;
8 use if_chain::if_chain;
10 use rustc_errors::Applicability;
12 use rustc_hir::intravisit::FnKind;
13 use rustc_hir::{BindingAnnotation, Body, FnDecl, HirId, Impl, ItemKind, MutTy, Mutability, Node, PatKind};
14 use rustc_lint::{LateContext, LateLintPass};
16 use rustc_session::{declare_tool_lint, impl_lint_pass};
17 use rustc_span::{sym, Span};
18 use rustc_target::abi::LayoutOf;
19 use rustc_target::spec::abi::Abi;
20 use rustc_target::spec::Target;
22 declare_clippy_lint! {
23 /// **What it does:** Checks for functions taking arguments by reference, where
24 /// the argument type is `Copy` and small enough to be more efficient to always
27 /// **Why is this bad?** In many calling conventions instances of structs will
28 /// be passed through registers if they fit into two or less general purpose
31 /// **Known problems:** This lint is target register size dependent, it is
32 /// limited to 32-bit to try and reduce portability problems between 32 and
33 /// 64-bit, but if you are compiling for 8 or 16-bit targets then the limit
34 /// will be different.
36 /// The configuration option `trivial_copy_size_limit` can be set to override
37 /// this limit for a project.
39 /// This lint attempts to allow passing arguments by reference if a reference
40 /// to that argument is returned. This is implemented by comparing the lifetime
41 /// of the argument and return value for equality. However, this can cause
42 /// false positives in cases involving multiple lifetimes that are bounded by
45 /// Also, it does not take account of other similar cases where getting memory addresses
46 /// matters; namely, returning the pointer to the argument in question,
47 /// and passing the argument, as both references and pointers,
48 /// to a function that needs the memory address. For further details, refer to
49 /// [this issue](https://github.com/rust-lang/rust-clippy/issues/5953)
50 /// that explains a real case in which this false positive
51 /// led to an **undefined behaviour** introduced with unsafe code.
57 /// fn foo(v: &u32) {}
64 pub TRIVIALLY_COPY_PASS_BY_REF,
66 "functions taking small copyable arguments by reference"
69 declare_clippy_lint! {
70 /// **What it does:** Checks for functions taking arguments by value, where
71 /// the argument type is `Copy` and large enough to be worth considering
72 /// passing by reference. Does not trigger if the function is being exported,
73 /// because that might induce API breakage, if the parameter is declared as mutable,
74 /// or if the argument is a `self`.
76 /// **Why is this bad?** Arguments passed by value might result in an unnecessary
77 /// shallow copy, taking up more space in the stack and requiring a call to
78 /// `memcpy`, which can be expensive.
83 /// #[derive(Clone, Copy)]
84 /// struct TooLarge([u8; 2048]);
87 /// fn foo(v: TooLarge) {}
90 /// #[derive(Clone, Copy)]
91 /// struct TooLarge([u8; 2048]);
94 /// fn foo(v: &TooLarge) {}
96 pub LARGE_TYPES_PASSED_BY_VALUE,
98 "functions taking large arguments by value"
101 #[derive(Copy, Clone)]
102 pub struct PassByRefOrValue {
107 impl<'tcx> PassByRefOrValue {
108 pub fn new(ref_min_size: Option<u64>, value_max_size: u64, target: &Target) -> Self {
109 let ref_min_size = ref_min_size.unwrap_or_else(|| {
110 let bit_width = u64::from(target.pointer_width);
111 // Cap the calculated bit width at 32-bits to reduce
112 // portability problems between 32 and 64-bit targets
113 let bit_width = cmp::min(bit_width, 32);
114 #[allow(clippy::integer_division)]
115 let byte_width = bit_width / 8;
116 // Use a limit of 2 times the register byte width
126 fn check_poly_fn(&mut self, cx: &LateContext<'tcx>, hir_id: HirId, decl: &FnDecl<'_>, span: Option<Span>) {
127 let fn_def_id = cx.tcx.hir().local_def_id(hir_id);
129 let fn_sig = cx.tcx.fn_sig(fn_def_id);
130 let fn_sig = cx.tcx.erase_late_bound_regions(fn_sig);
132 let fn_body = cx.enclosing_body.map(|id| cx.tcx.hir().body(id));
134 for (index, (input, &ty)) in iter::zip(decl.inputs, fn_sig.inputs()).enumerate() {
135 // All spans generated from a proc-macro invocation are the same...
137 Some(s) if s == input.span => return,
142 ty::Ref(input_lt, ty, Mutability::Not) => {
143 // Use lifetimes to determine if we're returning a reference to the
144 // argument. In that case we can't switch to pass-by-value as the
145 // argument will not live long enough.
146 let output_lts = match *fn_sig.output().kind() {
147 ty::Ref(output_lt, _, _) => vec![output_lt],
148 ty::Adt(_, substs) => substs.regions().collect(),
153 if !output_lts.contains(input_lt);
155 if let Some(size) = cx.layout_of(ty).ok().map(|l| l.size.bytes());
156 if size <= self.ref_min_size;
157 if let hir::TyKind::Rptr(_, MutTy { ty: decl_ty, .. }) = input.kind;
159 let value_type = if is_self_ty(decl_ty) {
162 snippet(cx, decl_ty.span, "_").into()
166 TRIVIALLY_COPY_PASS_BY_REF,
168 &format!("this argument ({} byte) is passed by reference, but would be more efficient if passed by value (limit: {} byte)", size, self.ref_min_size),
169 "consider passing by value instead",
171 Applicability::Unspecified,
177 ty::Adt(_, _) | ty::Array(_, _) | ty::Tuple(_) => {
178 // if function has a body and parameter is annotated with mut, ignore
179 if let Some(param) = fn_body.and_then(|body| body.params.get(index)) {
180 match param.pat.kind {
181 PatKind::Binding(BindingAnnotation::Unannotated, _, _, _) => {},
187 if !cx.access_levels.is_exported(hir_id);
189 if !is_self_ty(input);
190 if let Some(size) = cx.layout_of(ty).ok().map(|l| l.size.bytes());
191 if size > self.value_max_size;
195 LARGE_TYPES_PASSED_BY_VALUE,
197 &format!("this argument ({} byte) is passed by value, but might be more efficient if passed by reference (limit: {} byte)", size, self.value_max_size),
198 "consider passing by reference instead",
199 format!("&{}", snippet(cx, input.span, "_")),
200 Applicability::MaybeIncorrect,
212 impl_lint_pass!(PassByRefOrValue => [TRIVIALLY_COPY_PASS_BY_REF, LARGE_TYPES_PASSED_BY_VALUE]);
214 impl<'tcx> LateLintPass<'tcx> for PassByRefOrValue {
215 fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) {
216 if item.span.from_expansion() {
220 if let hir::TraitItemKind::Fn(method_sig, _) = &item.kind {
221 self.check_poly_fn(cx, item.hir_id(), &*method_sig.decl, None);
227 cx: &LateContext<'tcx>,
229 decl: &'tcx FnDecl<'_>,
230 _body: &'tcx Body<'_>,
234 if span.from_expansion() {
239 FnKind::ItemFn(.., header, _) => {
240 if header.abi != Abi::Rust {
243 let attrs = cx.tcx.hir().attrs(hir_id);
245 if let Some(meta_items) = a.meta_item_list() {
246 if a.has_name(sym::proc_macro_derive)
247 || (a.has_name(sym::inline) && attr::list_contains_name(&meta_items, sym::always))
254 FnKind::Method(..) => (),
255 FnKind::Closure => return,
258 // Exclude non-inherent impls
259 if let Some(Node::Item(item)) = cx.tcx.hir().find(cx.tcx.hir().get_parent_node(hir_id)) {
262 ItemKind::Impl(Impl { of_trait: Some(_), .. }) | ItemKind::Trait(..)
268 self.check_poly_fn(cx, hir_id, decl, Some(span));