2 use rustc::hir::intravisit as visit;
3 use rustc::hir::map::Node::{NodeExpr, NodeStmt};
5 use rustc::middle::expr_use_visitor::*;
6 use rustc::middle::mem_categorization::{cmt, Categorization};
8 use rustc::ty::layout::TargetDataLayout;
9 use rustc::traits::Reveal;
10 use rustc::util::nodemap::NodeSet;
11 use syntax::ast::NodeId;
12 use syntax::codemap::Span;
16 pub too_large_for_stack: u64,
19 /// **What it does:** Checks for usage of `Box<T>` where an unboxed `T` would
22 /// **Why is this bad?** This is an unnecessary allocation, and bad for
23 /// performance. It is only necessary to allocate if you wish to move the box
26 /// **Known problems:** None.
31 /// let x = Box::new(1);
33 /// println!("{}", *x);
39 "using `Box<T>` where unnecessary"
42 fn is_non_trait_box(ty: ty::Ty) -> bool {
43 ty.is_box() && !ty.boxed_ty().is_trait()
46 struct EscapeDelegate<'a, 'tcx: 'a> {
48 tcx: ty::TyCtxt<'a, 'tcx, 'tcx>,
49 tables: &'a ty::TypeckTables<'tcx>,
50 target: TargetDataLayout,
51 too_large_for_stack: u64,
54 impl LintPass for Pass {
55 fn get_lints(&self) -> LintArray {
56 lint_array!(BOXED_LOCAL)
60 impl<'a, 'tcx> LateLintPass<'a, 'tcx> for Pass {
63 cx: &LateContext<'a, 'tcx>,
64 _: visit::FnKind<'tcx>,
70 // we store the infcx because it is expensive to recreate
71 // the context each time.
72 let mut v = EscapeDelegate {
76 target: TargetDataLayout::parse(cx.sess()),
77 too_large_for_stack: self.too_large_for_stack,
80 let infcx = cx.tcx.borrowck_fake_infer_ctxt(body.id());
81 let fn_def_id = cx.tcx.hir.local_def_id(node_id);
82 let region_maps = &cx.tcx.region_maps(fn_def_id);
84 let mut vis = ExprUseVisitor::new(&mut v, region_maps, &infcx);
85 vis.consume_body(body);
91 cx.tcx.hir.span(node),
92 "local variable doesn't need to be boxed here");
97 impl<'a, 'tcx: 'a> Delegate<'tcx> for EscapeDelegate<'a, 'tcx> {
98 fn consume(&mut self, _: NodeId, _: Span, cmt: cmt<'tcx>, mode: ConsumeMode) {
99 if let Categorization::Local(lid) = cmt.cat {
100 if self.set.contains(&lid) {
101 if let Move(DirectRefMove) = mode {
102 // moved out or in. clearly can't be localized
103 self.set.remove(&lid);
108 fn matched_pat(&mut self, _: &Pat, _: cmt<'tcx>, _: MatchMode) {}
109 fn consume_pat(&mut self, consume_pat: &Pat, cmt: cmt<'tcx>, _: ConsumeMode) {
110 let map = &self.tcx.hir;
111 if map.is_argument(consume_pat.id) {
112 // Skip closure arguments
113 if let Some(NodeExpr(..)) = map.find(map.get_parent_node(consume_pat.id)) {
116 if is_non_trait_box(cmt.ty) && !self.is_large_box(cmt.ty) {
117 self.set.insert(consume_pat.id);
121 if let Categorization::Rvalue(..) = cmt.cat {
122 if let Some(NodeStmt(st)) = map.find(map.get_parent_node(cmt.id)) {
123 if let StmtDecl(ref decl, _) = st.node {
124 if let DeclLocal(ref loc) = decl.node {
125 if let Some(ref ex) = loc.init {
126 if let ExprBox(..) = ex.node {
127 if is_non_trait_box(cmt.ty) && !self.is_large_box(cmt.ty) {
129 self.set.insert(consume_pat.id);
133 // TODO "foo".to_owned() and friends
140 if let Categorization::Local(lid) = cmt.cat {
141 if self.set.contains(&lid) {
142 // let y = x where x is known
143 // remove x, insert y
144 self.set.insert(consume_pat.id);
145 self.set.remove(&lid);
157 loan_cause: LoanCause
159 use rustc::ty::adjustment::Adjust;
161 if let Categorization::Local(lid) = cmt.cat {
162 if self.set.contains(&lid) {
163 if let Some(&Adjust::DerefRef { autoderefs, .. }) =
168 if LoanCause::AutoRef == loan_cause {
171 self.set.remove(&lid); // Used without autodereffing (i.e. x.clone())
174 span_bug!(cmt.span, "Unknown adjusted AutoRef");
176 } else if LoanCause::AddrOf == loan_cause {
178 if let Some(&Adjust::DerefRef { autoderefs, .. }) =
183 .get_parent_node(borrow_id))
186 // foo(&x) where no extra autoreffing is happening
187 self.set.remove(&lid);
191 } else if LoanCause::MatchDiscriminant == loan_cause {
192 self.set.remove(&lid); // `match x` can move
194 // do nothing for matches, etc. These can't escape
198 fn decl_without_init(&mut self, _: NodeId, _: Span) {}
199 fn mutate(&mut self, _: NodeId, _: Span, _: cmt<'tcx>, _: MutateMode) {}
202 impl<'a, 'tcx: 'a> EscapeDelegate<'a, 'tcx> {
203 fn is_large_box(&self, ty: ty::Ty<'tcx>) -> bool {
204 // Large types need to be boxed to avoid stack
207 let inner = ty.boxed_ty();
208 self.tcx.infer_ctxt((), Reveal::All).enter(|infcx| if let Ok(layout) = inner.layout(&infcx) {
209 let size = layout.size(&self.target);
210 size.bytes() > self.too_large_for_stack