4 use clippy_utils::diagnostics::span_lint_and_sugg;
5 use clippy_utils::source::snippet;
6 use clippy_utils::ty::{for_each_top_level_late_bound_region, is_copy};
7 use clippy_utils::{is_self, is_self_ty};
8 use core::ops::ControlFlow;
9 use if_chain::if_chain;
11 use rustc_data_structures::fx::FxHashSet;
12 use rustc_errors::Applicability;
14 use rustc_hir::intravisit::FnKind;
15 use rustc_hir::{BindingAnnotation, Body, FnDecl, HirId, Impl, ItemKind, MutTy, Mutability, Node, PatKind};
16 use rustc_lint::{LateContext, LateLintPass};
17 use rustc_middle::ty::layout::LayoutOf;
18 use rustc_middle::ty::{self, RegionKind};
19 use rustc_session::{declare_tool_lint, impl_lint_pass};
20 use rustc_span::def_id::LocalDefId;
21 use rustc_span::{sym, Span};
22 use rustc_target::spec::abi::Abi;
23 use rustc_target::spec::Target;
25 declare_clippy_lint! {
27 /// Checks for functions taking arguments by reference, where
28 /// the argument type is `Copy` and small enough to be more efficient to always
31 /// ### Why is this bad?
32 /// In many calling conventions instances of structs will
33 /// be passed through registers if they fit into two or less general purpose
36 /// ### Known problems
37 /// This lint is target register size dependent, it is
38 /// limited to 32-bit to try and reduce portability problems between 32 and
39 /// 64-bit, but if you are compiling for 8 or 16-bit targets then the limit
40 /// will be different.
42 /// The configuration option `trivial_copy_size_limit` can be set to override
43 /// this limit for a project.
45 /// This lint attempts to allow passing arguments by reference if a reference
46 /// to that argument is returned. This is implemented by comparing the lifetime
47 /// of the argument and return value for equality. However, this can cause
48 /// false positives in cases involving multiple lifetimes that are bounded by
51 /// Also, it does not take account of other similar cases where getting memory addresses
52 /// matters; namely, returning the pointer to the argument in question,
53 /// and passing the argument, as both references and pointers,
54 /// to a function that needs the memory address. For further details, refer to
55 /// [this issue](https://github.com/rust-lang/rust-clippy/issues/5953)
56 /// that explains a real case in which this false positive
57 /// led to an **undefined behavior** introduced with unsafe code.
62 /// fn foo(v: &u32) {}
69 #[clippy::version = "pre 1.29.0"]
70 pub TRIVIALLY_COPY_PASS_BY_REF,
72 "functions taking small copyable arguments by reference"
75 declare_clippy_lint! {
77 /// Checks for functions taking arguments by value, where
78 /// the argument type is `Copy` and large enough to be worth considering
79 /// passing by reference. Does not trigger if the function is being exported,
80 /// because that might induce API breakage, if the parameter is declared as mutable,
81 /// or if the argument is a `self`.
83 /// ### Why is this bad?
84 /// Arguments passed by value might result in an unnecessary
85 /// shallow copy, taking up more space in the stack and requiring a call to
86 /// `memcpy`, which can be expensive.
90 /// #[derive(Clone, Copy)]
91 /// struct TooLarge([u8; 2048]);
93 /// fn foo(v: TooLarge) {}
98 /// # #[derive(Clone, Copy)]
99 /// # struct TooLarge([u8; 2048]);
100 /// fn foo(v: &TooLarge) {}
102 #[clippy::version = "1.49.0"]
103 pub LARGE_TYPES_PASSED_BY_VALUE,
105 "functions taking large arguments by value"
108 #[derive(Copy, Clone)]
109 pub struct PassByRefOrValue {
112 avoid_breaking_exported_api: bool,
115 impl<'tcx> PassByRefOrValue {
117 ref_min_size: Option<u64>,
119 avoid_breaking_exported_api: bool,
122 let ref_min_size = ref_min_size.unwrap_or_else(|| {
123 let bit_width = u64::from(target.pointer_width);
124 // Cap the calculated bit width at 32-bits to reduce
125 // portability problems between 32 and 64-bit targets
126 let bit_width = cmp::min(bit_width, 32);
127 #[expect(clippy::integer_division)]
128 let byte_width = bit_width / 8;
129 // Use a limit of 2 times the register byte width
136 avoid_breaking_exported_api,
140 fn check_poly_fn(&mut self, cx: &LateContext<'tcx>, def_id: LocalDefId, decl: &FnDecl<'_>, span: Option<Span>) {
141 if self.avoid_breaking_exported_api && cx.access_levels.is_exported(def_id) {
145 let fn_sig = cx.tcx.fn_sig(def_id);
146 let fn_body = cx.enclosing_body.map(|id| cx.tcx.hir().body(id));
148 // Gather all the lifetimes found in the output type which may affect whether
149 // `TRIVIALLY_COPY_PASS_BY_REF` should be linted.
150 let mut output_regions = FxHashSet::default();
151 for_each_top_level_late_bound_region(fn_sig.skip_binder().output(), |region| -> ControlFlow<!> {
152 output_regions.insert(region);
153 ControlFlow::Continue(())
156 for (index, (input, ty)) in iter::zip(
158 fn_sig.skip_binder().inputs().iter().map(|&ty| fn_sig.rebind(ty)),
162 // All spans generated from a proc-macro invocation are the same...
164 Some(s) if s == input.span => continue,
168 match *ty.skip_binder().kind() {
169 ty::Ref(lt, ty, Mutability::Not) => {
171 RegionKind::ReLateBound(index, region)
172 if index.as_u32() == 0 && output_regions.contains(®ion) =>
176 // Early bound regions on functions are either from the containing item, are bounded by another
177 // lifetime, or are used as a bound for a type or lifetime.
178 RegionKind::ReEarlyBound(..) => continue,
182 let ty = cx.tcx.erase_late_bound_regions(fn_sig.rebind(ty));
184 && let Some(size) = cx.layout_of(ty).ok().map(|l| l.size.bytes())
185 && size <= self.ref_min_size
186 && let hir::TyKind::Rptr(_, MutTy { ty: decl_ty, .. }) = input.kind
188 let value_type = if fn_body.and_then(|body| body.params.get(index)).map_or(false, is_self) {
191 snippet(cx, decl_ty.span, "_").into()
195 TRIVIALLY_COPY_PASS_BY_REF,
197 &format!("this argument ({} byte) is passed by reference, but would be more efficient if passed by value (limit: {} byte)", size, self.ref_min_size),
198 "consider passing by value instead",
200 Applicability::Unspecified,
205 ty::Adt(_, _) | ty::Array(_, _) | ty::Tuple(_) => {
206 // if function has a body and parameter is annotated with mut, ignore
207 if let Some(param) = fn_body.and_then(|body| body.params.get(index)) {
208 match param.pat.kind {
209 PatKind::Binding(BindingAnnotation::Unannotated, _, _, _) => {},
213 let ty = cx.tcx.erase_late_bound_regions(ty);
217 if !is_self_ty(input);
218 if let Some(size) = cx.layout_of(ty).ok().map(|l| l.size.bytes());
219 if size > self.value_max_size;
223 LARGE_TYPES_PASSED_BY_VALUE,
225 &format!("this argument ({} byte) is passed by value, but might be more efficient if passed by reference (limit: {} byte)", size, self.value_max_size),
226 "consider passing by reference instead",
227 format!("&{}", snippet(cx, input.span, "_")),
228 Applicability::MaybeIncorrect,
240 impl_lint_pass!(PassByRefOrValue => [TRIVIALLY_COPY_PASS_BY_REF, LARGE_TYPES_PASSED_BY_VALUE]);
242 impl<'tcx> LateLintPass<'tcx> for PassByRefOrValue {
243 fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) {
244 if item.span.from_expansion() {
248 if let hir::TraitItemKind::Fn(method_sig, _) = &item.kind {
249 self.check_poly_fn(cx, item.def_id, method_sig.decl, None);
255 cx: &LateContext<'tcx>,
257 decl: &'tcx FnDecl<'_>,
258 _body: &'tcx Body<'_>,
262 if span.from_expansion() {
267 FnKind::ItemFn(.., header) => {
268 if header.abi != Abi::Rust {
271 let attrs = cx.tcx.hir().attrs(hir_id);
273 if let Some(meta_items) = a.meta_item_list() {
274 if a.has_name(sym::proc_macro_derive)
275 || (a.has_name(sym::inline) && attr::list_contains_name(&meta_items, sym::always))
282 FnKind::Method(..) => (),
283 FnKind::Closure => return,
286 // Exclude non-inherent impls
287 if let Some(Node::Item(item)) = cx.tcx.hir().find(cx.tcx.hir().get_parent_node(hir_id)) {
290 ItemKind::Impl(Impl { of_trait: Some(_), .. }) | ItemKind::Trait(..)
296 self.check_poly_fn(cx, cx.tcx.hir().local_def_id(hir_id), decl, Some(span));