1 use crate::utils::span_lint;
2 use rustc::hir::intravisit as visit;
4 use rustc::lint::{LateContext, LateLintPass, LintArray, LintPass};
5 use rustc::middle::expr_use_visitor::*;
6 use rustc::middle::mem_categorization::{cmt_, Categorization};
7 use rustc::ty::layout::LayoutOf;
8 use rustc::ty::{self, Ty};
9 use rustc::util::nodemap::HirIdSet;
10 use rustc::{declare_tool_lint, lint_array};
11 use syntax::source_map::Span;
14 pub too_large_for_stack: u64,
17 /// **What it does:** Checks for usage of `Box<T>` where an unboxed `T` would
20 /// **Why is this bad?** This is an unnecessary allocation, and bad for
21 /// performance. It is only necessary to allocate if you wish to move the box
24 /// **Known problems:** None.
29 /// let x = Box::new(1);
31 /// println!("{}", *x);
34 declare_clippy_lint! {
37 "using `Box<T>` where unnecessary"
40 fn is_non_trait_box(ty: Ty<'_>) -> bool {
41 ty.is_box() && !ty.boxed_ty().is_trait()
44 struct EscapeDelegate<'a, 'tcx: 'a> {
45 cx: &'a LateContext<'a, 'tcx>,
47 too_large_for_stack: u64,
50 impl LintPass for Pass {
51 fn get_lints(&self) -> LintArray {
52 lint_array!(BOXED_LOCAL)
55 fn name(&self) -> &'static str {
60 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for Pass {
63 cx: &LateContext<'a, 'tcx>,
64 _: visit::FnKind<'tcx>,
70 // If the method is an impl for a trait, don't warn
71 let parent_id = cx.tcx.hir().get_parent_item(hir_id);
72 let parent_node = cx.tcx.hir().find_by_hir_id(parent_id);
74 if let Some(Node::Item(item)) = parent_node {
75 if let ItemKind::Impl(_, _, _, _, Some(..), _, _) = item.node {
80 let mut v = EscapeDelegate {
82 set: HirIdSet::default(),
83 too_large_for_stack: self.too_large_for_stack,
86 let fn_def_id = cx.tcx.hir().local_def_id_from_hir_id(hir_id);
87 let region_scope_tree = &cx.tcx.region_scope_tree(fn_def_id);
88 ExprUseVisitor::new(&mut v, cx.tcx, cx.param_env, region_scope_tree, cx.tables, None).consume_body(body);
94 cx.tcx.hir().span_by_hir_id(node),
95 "local variable doesn't need to be boxed here",
101 impl<'a, 'tcx> Delegate<'tcx> for EscapeDelegate<'a, 'tcx> {
102 fn consume(&mut self, _: HirId, _: Span, cmt: &cmt_<'tcx>, mode: ConsumeMode) {
103 if let Categorization::Local(lid) = cmt.cat {
104 if let Move(DirectRefMove) = mode {
105 // moved out or in. clearly can't be localized
106 self.set.remove(&lid);
110 fn matched_pat(&mut self, _: &Pat, _: &cmt_<'tcx>, _: MatchMode) {}
111 fn consume_pat(&mut self, consume_pat: &Pat, cmt: &cmt_<'tcx>, _: ConsumeMode) {
112 let map = &self.cx.tcx.hir();
113 if map.is_argument(map.hir_to_node_id(consume_pat.hir_id)) {
114 // Skip closure arguments
115 if let Some(Node::Expr(..)) = map.find_by_hir_id(
116 map.get_parent_node_by_hir_id(consume_pat.hir_id))
120 if is_non_trait_box(cmt.ty) && !self.is_large_box(cmt.ty) {
121 self.set.insert(consume_pat.hir_id);
125 if let Categorization::Rvalue(..) = cmt.cat {
126 let id = map.hir_to_node_id(cmt.hir_id);
127 if let Some(Node::Stmt(st)) = map.find(map.get_parent_node(id)) {
128 if let StmtKind::Local(ref loc) = st.node {
129 if let Some(ref ex) = loc.init {
130 if let ExprKind::Box(..) = ex.node {
131 if is_non_trait_box(cmt.ty) && !self.is_large_box(cmt.ty) {
133 self.set.insert(consume_pat.hir_id);
137 // TODO "foo".to_owned() and friends
143 if let Categorization::Local(lid) = cmt.cat {
144 if self.set.contains(&lid) {
145 // let y = x where x is known
146 // remove x, insert y
147 self.set.insert(consume_pat.hir_id);
148 self.set.remove(&lid);
159 loan_cause: LoanCause,
161 if let Categorization::Local(lid) = cmt.cat {
164 // Used without autodereffing (i.e. x.clone())
168 // foo(&x) where no extra autoreffing is happening
171 // `match x` can move
172 LoanCause::MatchDiscriminant => {
173 self.set.remove(&lid);
176 // do nothing for matches, etc. These can't escape
181 fn decl_without_init(&mut self, _: HirId, _: Span) {}
182 fn mutate(&mut self, _: HirId, _: Span, _: &cmt_<'tcx>, _: MutateMode) {}
185 impl<'a, 'tcx> EscapeDelegate<'a, 'tcx> {
186 fn is_large_box(&self, ty: Ty<'tcx>) -> bool {
187 // Large types need to be boxed to avoid stack
190 self.cx.layout_of(ty.boxed_ty()).ok().map_or(0, |l| l.size.bytes()) > self.too_large_for_stack