1 use rustc::hir::intravisit as visit;
3 use rustc::lint::{LateContext, LateLintPass, LintArray, LintPass};
4 use rustc::middle::expr_use_visitor::*;
5 use rustc::middle::mem_categorization::{cmt_, Categorization};
6 use rustc::ty::layout::LayoutOf;
7 use rustc::ty::{self, Ty};
8 use rustc::util::nodemap::HirIdSet;
9 use rustc::{declare_tool_lint, impl_lint_pass};
10 use syntax::source_map::Span;
12 use crate::utils::span_lint;
14 #[derive(Copy, Clone)]
15 pub struct BoxedLocal {
16 pub too_large_for_stack: u64,
19 declare_clippy_lint! {
20 /// **What it does:** Checks for usage of `Box<T>` where an unboxed `T` would
23 /// **Why is this bad?** This is an unnecessary allocation, and bad for
24 /// performance. It is only necessary to allocate if you wish to move the box
27 /// **Known problems:** None.
32 /// let x = Box::new(1);
34 /// println!("{}", *x);
39 "using `Box<T>` where unnecessary"
42 fn is_non_trait_box(ty: Ty<'_>) -> bool {
43 ty.is_box() && !ty.boxed_ty().is_trait()
46 struct EscapeDelegate<'a, 'tcx: 'a> {
47 cx: &'a LateContext<'a, 'tcx>,
49 too_large_for_stack: u64,
52 impl_lint_pass!(BoxedLocal => [BOXED_LOCAL]);
54 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for BoxedLocal {
57 cx: &LateContext<'a, 'tcx>,
58 _: visit::FnKind<'tcx>,
64 // If the method is an impl for a trait, don't warn.
65 let parent_id = cx.tcx.hir().get_parent_item(hir_id);
66 let parent_node = cx.tcx.hir().find_by_hir_id(parent_id);
68 if let Some(Node::Item(item)) = parent_node {
69 if let ItemKind::Impl(_, _, _, _, Some(..), _, _) = item.node {
74 let mut v = EscapeDelegate {
76 set: HirIdSet::default(),
77 too_large_for_stack: self.too_large_for_stack,
80 let fn_def_id = cx.tcx.hir().local_def_id_from_hir_id(hir_id);
81 let region_scope_tree = &cx.tcx.region_scope_tree(fn_def_id);
82 ExprUseVisitor::new(&mut v, cx.tcx, fn_def_id, cx.param_env, region_scope_tree, cx.tables, None).consume_body(body);
88 cx.tcx.hir().span_by_hir_id(node),
89 "local variable doesn't need to be boxed here",
95 impl<'a, 'tcx> Delegate<'tcx> for EscapeDelegate<'a, 'tcx> {
96 fn consume(&mut self, _: HirId, _: Span, cmt: &cmt_<'tcx>, mode: ConsumeMode) {
97 if let Categorization::Local(lid) = cmt.cat {
98 if let Move(DirectRefMove) | Move(CaptureMove) = mode {
99 // moved out or in. clearly can't be localized
100 self.set.remove(&lid);
104 fn matched_pat(&mut self, _: &Pat, _: &cmt_<'tcx>, _: MatchMode) {}
105 fn consume_pat(&mut self, consume_pat: &Pat, cmt: &cmt_<'tcx>, _: ConsumeMode) {
106 let map = &self.cx.tcx.hir();
107 if map.is_argument(map.hir_to_node_id(consume_pat.hir_id)) {
108 // Skip closure arguments
109 if let Some(Node::Expr(..)) = map.find_by_hir_id(map.get_parent_node_by_hir_id(consume_pat.hir_id)) {
112 if is_non_trait_box(cmt.ty) && !self.is_large_box(cmt.ty) {
113 self.set.insert(consume_pat.hir_id);
117 if let Categorization::Rvalue(..) = cmt.cat {
118 if let Some(Node::Stmt(st)) = map.find_by_hir_id(map.get_parent_node_by_hir_id(cmt.hir_id)) {
119 if let StmtKind::Local(ref loc) = st.node {
120 if let Some(ref ex) = loc.init {
121 if let ExprKind::Box(..) = ex.node {
122 if is_non_trait_box(cmt.ty) && !self.is_large_box(cmt.ty) {
124 self.set.insert(consume_pat.hir_id);
128 // TODO "foo".to_owned() and friends
134 if let Categorization::Local(lid) = cmt.cat {
135 if self.set.contains(&lid) {
136 // let y = x where x is known
137 // remove x, insert y
138 self.set.insert(consume_pat.hir_id);
139 self.set.remove(&lid);
150 loan_cause: LoanCause,
152 if let Categorization::Local(lid) = cmt.cat {
155 // Used without autoderef-ing (i.e., `x.clone()`).
159 // `foo(&x)` where no extra autoref-ing is happening.
162 // `match x` can move.
163 LoanCause::MatchDiscriminant => {
164 self.set.remove(&lid);
167 // Do nothing for matches, etc. These can't escape.
172 fn decl_without_init(&mut self, _: HirId, _: Span) {}
173 fn mutate(&mut self, _: HirId, _: Span, _: &cmt_<'tcx>, _: MutateMode) {}
176 impl<'a, 'tcx> EscapeDelegate<'a, 'tcx> {
177 fn is_large_box(&self, ty: Ty<'tcx>) -> bool {
178 // Large types need to be boxed to avoid stack overflows.
180 self.cx.layout_of(ty.boxed_ty()).ok().map_or(0, |l| l.size.bytes()) > self.too_large_for_stack