1 use rustc::hir::intravisit as visit;
3 use rustc::lint::{LateContext, LateLintPass, LintArray, LintPass};
4 use rustc::middle::expr_use_visitor::*;
5 use rustc::middle::mem_categorization::{cmt_, Categorization};
6 use rustc::ty::layout::LayoutOf;
7 use rustc::ty::{self, Ty};
8 use rustc::util::nodemap::HirIdSet;
9 use rustc::{declare_tool_lint, lint_array};
10 use syntax::source_map::Span;
12 use crate::utils::span_lint;
15 pub too_large_for_stack: u64,
18 declare_clippy_lint! {
19 /// **What it does:** Checks for usage of `Box<T>` where an unboxed `T` would
22 /// **Why is this bad?** This is an unnecessary allocation, and bad for
23 /// performance. It is only necessary to allocate if you wish to move the box
26 /// **Known problems:** None.
31 /// let x = Box::new(1);
33 /// println!("{}", *x);
38 "using `Box<T>` where unnecessary"
41 fn is_non_trait_box(ty: Ty<'_>) -> bool {
42 ty.is_box() && !ty.boxed_ty().is_trait()
45 struct EscapeDelegate<'a, 'tcx: 'a> {
46 cx: &'a LateContext<'a, 'tcx>,
48 too_large_for_stack: u64,
51 impl LintPass for Pass {
52 fn get_lints(&self) -> LintArray {
53 lint_array!(BOXED_LOCAL)
56 fn name(&self) -> &'static str {
61 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for Pass {
64 cx: &LateContext<'a, 'tcx>,
65 _: visit::FnKind<'tcx>,
71 // If the method is an impl for a trait, don't warn.
72 let parent_id = cx.tcx.hir().get_parent_item(hir_id);
73 let parent_node = cx.tcx.hir().find_by_hir_id(parent_id);
75 if let Some(Node::Item(item)) = parent_node {
76 if let ItemKind::Impl(_, _, _, _, Some(..), _, _) = item.node {
81 let mut v = EscapeDelegate {
83 set: HirIdSet::default(),
84 too_large_for_stack: self.too_large_for_stack,
87 let fn_def_id = cx.tcx.hir().local_def_id_from_hir_id(hir_id);
88 let region_scope_tree = &cx.tcx.region_scope_tree(fn_def_id);
89 ExprUseVisitor::new(&mut v, cx.tcx, cx.param_env, region_scope_tree, cx.tables, None).consume_body(body);
95 cx.tcx.hir().span_by_hir_id(node),
96 "local variable doesn't need to be boxed here",
102 impl<'a, 'tcx> Delegate<'tcx> for EscapeDelegate<'a, 'tcx> {
103 fn consume(&mut self, _: HirId, _: Span, cmt: &cmt_<'tcx>, mode: ConsumeMode) {
104 if let Categorization::Local(lid) = cmt.cat {
105 if let Move(DirectRefMove) = mode {
106 // Moved out or in. Clearly can't be localized.
107 self.set.remove(&lid);
111 fn matched_pat(&mut self, _: &Pat, _: &cmt_<'tcx>, _: MatchMode) {}
112 fn consume_pat(&mut self, consume_pat: &Pat, cmt: &cmt_<'tcx>, _: ConsumeMode) {
113 let map = &self.cx.tcx.hir();
114 if map.is_argument(map.hir_to_node_id(consume_pat.hir_id)) {
115 // Skip closure arguments
116 if let Some(Node::Expr(..)) = map.find_by_hir_id(map.get_parent_node_by_hir_id(consume_pat.hir_id)) {
119 if is_non_trait_box(cmt.ty) && !self.is_large_box(cmt.ty) {
120 self.set.insert(consume_pat.hir_id);
124 if let Categorization::Rvalue(..) = cmt.cat {
125 let id = map.hir_to_node_id(cmt.hir_id);
126 if let Some(Node::Stmt(st)) = map.find(map.get_parent_node(id)) {
127 if let StmtKind::Local(ref loc) = st.node {
128 if let Some(ref ex) = loc.init {
129 if let ExprKind::Box(..) = ex.node {
130 if is_non_trait_box(cmt.ty) && !self.is_large_box(cmt.ty) {
132 self.set.insert(consume_pat.hir_id);
136 // TODO "foo".to_owned() and friends
142 if let Categorization::Local(lid) = cmt.cat {
143 if self.set.contains(&lid) {
144 // let y = x where x is known
145 // remove x, insert y
146 self.set.insert(consume_pat.hir_id);
147 self.set.remove(&lid);
158 loan_cause: LoanCause,
160 if let Categorization::Local(lid) = cmt.cat {
163 // Used without autoderef-ing (i.e., `x.clone()`).
167 // `foo(&x)` where no extra autoref-ing is happening.
170 // `match x` can move.
171 LoanCause::MatchDiscriminant => {
172 self.set.remove(&lid);
175 // Do nothing for matches, etc. These can't escape.
180 fn decl_without_init(&mut self, _: HirId, _: Span) {}
181 fn mutate(&mut self, _: HirId, _: Span, _: &cmt_<'tcx>, _: MutateMode) {}
184 impl<'a, 'tcx> EscapeDelegate<'a, 'tcx> {
185 fn is_large_box(&self, ty: Ty<'tcx>) -> bool {
186 // Large types need to be boxed to avoid stack overflows.
188 self.cx.layout_of(ty.boxed_ty()).ok().map_or(0, |l| l.size.bytes()) > self.too_large_for_stack