1 //! A classic liveness analysis based on dataflow over the AST. Computes,
2 //! for each local variable in a function, whether that variable is live
3 //! at a given point. Program execution points are identified by their
8 //! The basic model is that each local variable is assigned an index. We
9 //! represent sets of local variables using a vector indexed by this
10 //! index. The value in the vector is either 0, indicating the variable
11 //! is dead, or the ID of an expression that uses the variable.
13 //! We conceptually walk over the AST in reverse execution order. If we
14 //! find a use of a variable, we add it to the set of live variables. If
15 //! we find an assignment to a variable, we remove it from the set of live
16 //! variables. When we have to merge two flows, we take the union of
17 //! those two flows -- if the variable is live on both paths, we simply
18 //! pick one ID. In the event of loops, we continue doing this until a
19 //! fixed point is reached.
21 //! ## Checking initialization
23 //! At the function entry point, all variables must be dead. If this is
24 //! not the case, we can report an error using the ID found in the set of
25 //! live variables, which identifies a use of the variable which is not
26 //! dominated by an assignment.
30 //! After each explicit move, the variable must be dead.
32 //! ## Computing last uses
34 //! Any use of the variable where the variable is dead afterwards is a
37 //! # Implementation details
39 //! The actual implementation contains two (nested) walks over the AST.
40 //! The outer walk has the job of building up the ir_maps instance for the
41 //! enclosing function. On the way down the tree, it identifies those AST
42 //! nodes and variable IDs that will be needed for the liveness analysis
43 //! and assigns them contiguous IDs. The liveness ID for an AST node is
44 //! called a `live_node` (it's a newtype'd `u32`) and the ID for a variable
45 //! is called a `variable` (another newtype'd `u32`).
47 //! On the way back up the tree, as we are about to exit from a function
48 //! declaration we allocate a `liveness` instance. Now that we know
49 //! precisely how many nodes and variables we need, we can allocate all
50 //! the various arrays that we will need to precisely the right size. We then
51 //! perform the actual propagation on the `liveness` instance.
53 //! This propagation is encoded in the various `propagate_through_*()`
54 //! methods. It effectively does a reverse walk of the AST; whenever we
55 //! reach a loop node, we iterate until a fixed point is reached.
57 //! ## The `RWU` struct
59 //! At each live node `N`, we track three pieces of information for each
60 //! variable `V` (these are encapsulated in the `RWU` struct):
62 //! - `reader`: the `LiveNode` ID of some node which will read the value
63 //! that `V` holds on entry to `N`. Formally: a node `M` such
64 //! that there exists a path `P` from `N` to `M` where `P` does not
65 //! write `V`. If the `reader` is `None`, then the current
66 //! value will never be read (the variable is dead, essentially).
68 //! - `writer`: the `LiveNode` ID of some node which will write the
69 //! variable `V` and which is reachable from `N`. Formally: a node `M`
70 //! such that there exists a path `P` from `N` to `M` and `M` writes
71 //! `V`. If the `writer` is `None`, then there is no writer
72 //! of `V` that follows `N`.
74 //! - `used`: a boolean value indicating whether `V` is *used*. We
75 //! distinguish a *read* from a *use* in that a *use* is some read that
76 //! is not just used to generate a new value. For example, `x += 1` is
77 //! a read but not a use. This is used to generate better warnings.
79 //! ## Special nodes and variables
81 //! We generate various special nodes for various, well, special purposes.
82 //! These are described in the `Liveness` struct.
84 use self::LiveNodeKind::*;
87 use rustc_ast::InlineAsmOptions;
88 use rustc_data_structures::fx::FxIndexMap;
89 use rustc_errors::Applicability;
91 use rustc_hir::def::*;
92 use rustc_hir::def_id::LocalDefId;
93 use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
94 use rustc_hir::{Expr, HirId, HirIdMap, HirIdSet};
95 use rustc_index::vec::IndexVec;
96 use rustc_middle::hir::map::Map;
97 use rustc_middle::ty::query::Providers;
98 use rustc_middle::ty::{self, DefIdTree, RootVariableMinCaptureList, Ty, TyCtxt};
99 use rustc_session::lint;
100 use rustc_span::symbol::{kw, sym, Symbol};
101 use rustc_span::Span;
103 use std::collections::VecDeque;
105 use std::io::prelude::*;
111 rustc_index::newtype_index! {
112 pub struct Variable {
113 DEBUG_FORMAT = "v({})",
117 rustc_index::newtype_index! {
118 pub struct LiveNode {
119 DEBUG_FORMAT = "ln({})",
123 #[derive(Copy, Clone, PartialEq, Debug)]
126 ExprNode(Span, HirId),
127 VarDefNode(Span, HirId),
132 fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_>) -> String {
133 let sm = tcx.sess.source_map();
135 UpvarNode(s) => format!("Upvar node [{}]", sm.span_to_diagnostic_string(s)),
136 ExprNode(s, _) => format!("Expr node [{}]", sm.span_to_diagnostic_string(s)),
137 VarDefNode(s, _) => format!("Var def node [{}]", sm.span_to_diagnostic_string(s)),
138 ClosureNode => "Closure node".to_owned(),
139 ExitNode => "Exit node".to_owned(),
143 fn check_mod_liveness(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
144 tcx.hir().visit_item_likes_in_module(module_def_id, &mut IrMaps::new(tcx).as_deep_visitor());
147 pub fn provide(providers: &mut Providers) {
148 *providers = Providers { check_mod_liveness, ..*providers };
151 // ______________________________________________________________________
154 // This is the first pass and the one that drives the main
155 // computation. It walks up and down the IR once. On the way down,
156 // we count for each function the number of variables as well as
157 // liveness nodes. A liveness node is basically an expression or
158 // capture clause that does something of interest: either it has
159 // interesting control flow or it uses/defines a local variable.
161 // On the way back up, at each function node we create liveness sets
162 // (we now know precisely how big to make our various vectors and so
163 // forth) and then do the data-flow propagation to compute the set
164 // of live variables at each program point.
166 // Finally, we run back over the IR one last time and, using the
167 // computed liveness, check various safety conditions. For example,
168 // there must be no live nodes at the definition site for a variable
169 // unless it has an initializer. Similarly, each non-mutable local
170 // variable must not be assigned if there is some successor
171 // assignment. And so forth.
178 #[derive(Copy, Clone, Debug)]
185 #[derive(Copy, Clone, Debug)]
187 Param(HirId, Symbol),
189 Upvar(HirId, Symbol),
192 struct IrMaps<'tcx> {
194 live_node_map: HirIdMap<LiveNode>,
195 variable_map: HirIdMap<Variable>,
196 capture_info_map: HirIdMap<Rc<Vec<CaptureInfo>>>,
197 var_kinds: IndexVec<Variable, VarKind>,
198 lnks: IndexVec<LiveNode, LiveNodeKind>,
202 fn new(tcx: TyCtxt<'tcx>) -> IrMaps<'tcx> {
205 live_node_map: HirIdMap::default(),
206 variable_map: HirIdMap::default(),
207 capture_info_map: Default::default(),
208 var_kinds: IndexVec::new(),
209 lnks: IndexVec::new(),
213 fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
214 let ln = self.lnks.push(lnk);
216 debug!("{:?} is of kind {}", ln, live_node_kind_to_string(lnk, self.tcx));
221 fn add_live_node_for_node(&mut self, hir_id: HirId, lnk: LiveNodeKind) {
222 let ln = self.add_live_node(lnk);
223 self.live_node_map.insert(hir_id, ln);
225 debug!("{:?} is node {:?}", ln, hir_id);
228 fn add_variable(&mut self, vk: VarKind) -> Variable {
229 let v = self.var_kinds.push(vk);
232 Local(LocalInfo { id: node_id, .. }) | Param(node_id, _) | Upvar(node_id, _) => {
233 self.variable_map.insert(node_id, v);
237 debug!("{:?} is {:?}", v, vk);
242 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
243 match self.variable_map.get(&hir_id) {
246 span_bug!(span, "no variable registered for id {:?}", hir_id);
251 fn variable_name(&self, var: Variable) -> Symbol {
252 match self.var_kinds[var] {
253 Local(LocalInfo { name, .. }) | Param(_, name) | Upvar(_, name) => name,
257 fn variable_is_shorthand(&self, var: Variable) -> bool {
258 match self.var_kinds[var] {
259 Local(LocalInfo { is_shorthand, .. }) => is_shorthand,
260 Param(..) | Upvar(..) => false,
264 fn set_captures(&mut self, hir_id: HirId, cs: Vec<CaptureInfo>) {
265 self.capture_info_map.insert(hir_id, Rc::new(cs));
268 fn collect_shorthand_field_ids(&self, pat: &hir::Pat<'tcx>) -> HirIdSet {
269 // For struct patterns, take note of which fields used shorthand
270 // (`x` rather than `x: x`).
271 let mut shorthand_field_ids = HirIdSet::default();
272 let mut pats = VecDeque::new();
275 while let Some(pat) = pats.pop_front() {
276 use rustc_hir::PatKind::*;
278 Binding(.., inner_pat) => {
279 pats.extend(inner_pat.iter());
281 Struct(_, fields, _) => {
282 let (short, not_short): (Vec<&_>, Vec<&_>) =
283 fields.iter().partition(|f| f.is_shorthand);
284 shorthand_field_ids.extend(short.iter().map(|f| f.pat.hir_id));
285 pats.extend(not_short.iter().map(|f| f.pat));
287 Ref(inner_pat, _) | Box(inner_pat) => {
288 pats.push_back(inner_pat);
290 TupleStruct(_, inner_pats, _) | Tuple(inner_pats, _) | Or(inner_pats) => {
291 pats.extend(inner_pats.iter());
293 Slice(pre_pats, inner_pat, post_pats) => {
294 pats.extend(pre_pats.iter());
295 pats.extend(inner_pat.iter());
296 pats.extend(post_pats.iter());
302 return shorthand_field_ids;
305 fn add_from_pat(&mut self, pat: &hir::Pat<'tcx>) {
306 let shorthand_field_ids = self.collect_shorthand_field_ids(pat);
308 pat.each_binding(|_, hir_id, _, ident| {
309 self.add_live_node_for_node(hir_id, VarDefNode(ident.span, hir_id));
310 self.add_variable(Local(LocalInfo {
313 is_shorthand: shorthand_field_ids.contains(&hir_id),
319 impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
320 type Map = Map<'tcx>;
322 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
323 NestedVisitorMap::OnlyBodies(self.tcx.hir())
326 fn visit_body(&mut self, body: &'tcx hir::Body<'tcx>) {
327 debug!("visit_body {:?}", body.id());
329 // swap in a new set of IR maps for this body
330 let mut maps = IrMaps::new(self.tcx);
331 let hir_id = maps.tcx.hir().body_owner(body.id());
332 let local_def_id = maps.tcx.hir().local_def_id(hir_id);
333 let def_id = local_def_id.to_def_id();
335 // Don't run unused pass for #[derive()]
336 if let Some(parent) = self.tcx.parent(def_id) {
337 if let DefKind::Impl = self.tcx.def_kind(parent.expect_local()) {
338 if self.tcx.has_attr(parent, sym::automatically_derived) {
344 // Don't run unused pass for #[naked]
345 if self.tcx.has_attr(def_id, sym::naked) {
349 if let Some(upvars) = maps.tcx.upvars_mentioned(def_id) {
350 for &var_hir_id in upvars.keys() {
351 let var_name = maps.tcx.hir().name(var_hir_id);
352 maps.add_variable(Upvar(var_hir_id, var_name));
356 // gather up the various local variables, significant expressions,
358 intravisit::walk_body(&mut maps, body);
361 let mut lsets = Liveness::new(&mut maps, local_def_id);
362 let entry_ln = lsets.compute(&body, hir_id);
363 lsets.log_liveness(entry_ln, body.id().hir_id);
365 // check for various error conditions
366 lsets.visit_body(body);
367 lsets.warn_about_unused_upvars(entry_ln);
368 lsets.warn_about_unused_args(body, entry_ln);
371 fn visit_local(&mut self, local: &'tcx hir::Local<'tcx>) {
372 self.add_from_pat(&local.pat);
373 intravisit::walk_local(self, local);
376 fn visit_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) {
377 self.add_from_pat(&arm.pat);
378 if let Some(hir::Guard::IfLet(ref pat, _)) = arm.guard {
379 self.add_from_pat(pat);
381 intravisit::walk_arm(self, arm);
384 fn visit_param(&mut self, param: &'tcx hir::Param<'tcx>) {
385 let shorthand_field_ids = self.collect_shorthand_field_ids(param.pat);
386 param.pat.each_binding(|_bm, hir_id, _x, ident| {
387 let var = match param.pat.kind {
388 rustc_hir::PatKind::Struct(..) => Local(LocalInfo {
391 is_shorthand: shorthand_field_ids.contains(&hir_id),
393 _ => Param(hir_id, ident.name),
395 self.add_variable(var);
397 intravisit::walk_param(self, param);
400 fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
402 // live nodes required for uses or definitions of variables:
403 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
404 debug!("expr {}: path that leads to {:?}", expr.hir_id, path.res);
405 if let Res::Local(_var_hir_id) = path.res {
406 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span, expr.hir_id));
408 intravisit::walk_expr(self, expr);
410 hir::ExprKind::Closure(..) => {
411 // Interesting control flow (for loops can contain labeled
412 // breaks or continues)
413 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span, expr.hir_id));
415 // Make a live_node for each mentioned variable, with the span
416 // being the location that the variable is used. This results
417 // in better error messages than just pointing at the closure
418 // construction site.
419 let mut call_caps = Vec::new();
420 let closure_def_id = self.tcx.hir().local_def_id(expr.hir_id);
421 if let Some(upvars) = self.tcx.upvars_mentioned(closure_def_id) {
422 call_caps.extend(upvars.keys().map(|var_id| {
423 let upvar = upvars[var_id];
424 let upvar_ln = self.add_live_node(UpvarNode(upvar.span));
425 CaptureInfo { ln: upvar_ln, var_hid: *var_id }
428 self.set_captures(expr.hir_id, call_caps);
429 intravisit::walk_expr(self, expr);
432 hir::ExprKind::Let(ref pat, ..) => {
433 self.add_from_pat(pat);
434 intravisit::walk_expr(self, expr);
437 // live nodes required for interesting control flow:
438 hir::ExprKind::If(..)
439 | hir::ExprKind::Match(..)
440 | hir::ExprKind::Loop(..)
441 | hir::ExprKind::Yield(..) => {
442 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span, expr.hir_id));
443 intravisit::walk_expr(self, expr);
445 hir::ExprKind::Binary(op, ..) if op.node.is_lazy() => {
446 self.add_live_node_for_node(expr.hir_id, ExprNode(expr.span, expr.hir_id));
447 intravisit::walk_expr(self, expr);
450 // otherwise, live nodes are not required:
451 hir::ExprKind::Index(..)
452 | hir::ExprKind::Field(..)
453 | hir::ExprKind::Array(..)
454 | hir::ExprKind::Call(..)
455 | hir::ExprKind::MethodCall(..)
456 | hir::ExprKind::Tup(..)
457 | hir::ExprKind::Binary(..)
458 | hir::ExprKind::AddrOf(..)
459 | hir::ExprKind::Cast(..)
460 | hir::ExprKind::DropTemps(..)
461 | hir::ExprKind::Unary(..)
462 | hir::ExprKind::Break(..)
463 | hir::ExprKind::Continue(_)
464 | hir::ExprKind::Lit(_)
465 | hir::ExprKind::ConstBlock(..)
466 | hir::ExprKind::Ret(..)
467 | hir::ExprKind::Block(..)
468 | hir::ExprKind::Assign(..)
469 | hir::ExprKind::AssignOp(..)
470 | hir::ExprKind::Struct(..)
471 | hir::ExprKind::Repeat(..)
472 | hir::ExprKind::InlineAsm(..)
473 | hir::ExprKind::LlvmInlineAsm(..)
474 | hir::ExprKind::Box(..)
475 | hir::ExprKind::Type(..)
477 | hir::ExprKind::Path(hir::QPath::TypeRelative(..))
478 | hir::ExprKind::Path(hir::QPath::LangItem(..)) => {
479 intravisit::walk_expr(self, expr);
485 // ______________________________________________________________________
486 // Computing liveness sets
488 // Actually we compute just a bit more than just liveness, but we use
489 // the same basic propagation framework in all cases.
491 const ACC_READ: u32 = 1;
492 const ACC_WRITE: u32 = 2;
493 const ACC_USE: u32 = 4;
495 struct Liveness<'a, 'tcx> {
496 ir: &'a mut IrMaps<'tcx>,
497 typeck_results: &'a ty::TypeckResults<'tcx>,
498 param_env: ty::ParamEnv<'tcx>,
499 closure_min_captures: Option<&'tcx RootVariableMinCaptureList<'tcx>>,
500 successors: IndexVec<LiveNode, Option<LiveNode>>,
501 rwu_table: rwu_table::RWUTable,
503 /// A live node representing a point of execution before closure entry &
504 /// after closure exit. Used to calculate liveness of captured variables
505 /// through calls to the same closure. Used for Fn & FnMut closures only.
506 closure_ln: LiveNode,
507 /// A live node representing every 'exit' from the function, whether it be
508 /// by explicit return, panic, or other means.
511 // mappings from loop node ID to LiveNode
512 // ("break" label should map to loop node ID,
513 // it probably doesn't now)
514 break_ln: HirIdMap<LiveNode>,
515 cont_ln: HirIdMap<LiveNode>,
518 impl<'a, 'tcx> Liveness<'a, 'tcx> {
519 fn new(ir: &'a mut IrMaps<'tcx>, body_owner: LocalDefId) -> Liveness<'a, 'tcx> {
520 let typeck_results = ir.tcx.typeck(body_owner);
521 let param_env = ir.tcx.param_env(body_owner);
522 let closure_min_captures = typeck_results.closure_min_captures.get(&body_owner.to_def_id());
523 let closure_ln = ir.add_live_node(ClosureNode);
524 let exit_ln = ir.add_live_node(ExitNode);
526 let num_live_nodes = ir.lnks.len();
527 let num_vars = ir.var_kinds.len();
533 closure_min_captures,
534 successors: IndexVec::from_elem_n(None, num_live_nodes),
535 rwu_table: rwu_table::RWUTable::new(num_live_nodes, num_vars),
538 break_ln: Default::default(),
539 cont_ln: Default::default(),
543 fn live_node(&self, hir_id: HirId, span: Span) -> LiveNode {
544 match self.ir.live_node_map.get(&hir_id) {
547 // This must be a mismatch between the ir_map construction
548 // above and the propagation code below; the two sets of
549 // code have to agree about which AST nodes are worth
550 // creating liveness nodes for.
551 span_bug!(span, "no live node registered for node {:?}", hir_id);
556 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
557 self.ir.variable(hir_id, span)
560 fn define_bindings_in_pat(&mut self, pat: &hir::Pat<'_>, mut succ: LiveNode) -> LiveNode {
561 // In an or-pattern, only consider the first pattern; any later patterns
562 // must have the same bindings, and we also consider the first pattern
563 // to be the "authoritative" set of ids.
564 pat.each_binding_or_first(&mut |_, hir_id, pat_sp, ident| {
565 let ln = self.live_node(hir_id, pat_sp);
566 let var = self.variable(hir_id, ident.span);
567 self.init_from_succ(ln, succ);
568 self.define(ln, var);
574 fn live_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
575 self.rwu_table.get_reader(ln, var)
578 // Is this variable live on entry to any of its successor nodes?
579 fn live_on_exit(&self, ln: LiveNode, var: Variable) -> bool {
580 let successor = self.successors[ln].unwrap();
581 self.live_on_entry(successor, var)
584 fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
585 self.rwu_table.get_used(ln, var)
588 fn assigned_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
589 self.rwu_table.get_writer(ln, var)
592 fn assigned_on_exit(&self, ln: LiveNode, var: Variable) -> bool {
593 let successor = self.successors[ln].unwrap();
594 self.assigned_on_entry(successor, var)
597 fn write_vars<F>(&self, wr: &mut dyn Write, mut test: F) -> io::Result<()>
599 F: FnMut(Variable) -> bool,
601 for var_idx in 0..self.ir.var_kinds.len() {
602 let var = Variable::from(var_idx);
604 write!(wr, " {:?}", var)?;
610 #[allow(unused_must_use)]
611 fn ln_str(&self, ln: LiveNode) -> String {
612 let mut wr = Vec::new();
614 let wr = &mut wr as &mut dyn Write;
615 write!(wr, "[{:?} of kind {:?} reads", ln, self.ir.lnks[ln]);
616 self.write_vars(wr, |var| self.rwu_table.get_reader(ln, var));
617 write!(wr, " writes");
618 self.write_vars(wr, |var| self.rwu_table.get_writer(ln, var));
620 self.write_vars(wr, |var| self.rwu_table.get_used(ln, var));
622 write!(wr, " precedes {:?}]", self.successors[ln]);
624 String::from_utf8(wr).unwrap()
627 fn log_liveness(&self, entry_ln: LiveNode, hir_id: hir::HirId) {
628 // hack to skip the loop unless debug! is enabled:
630 "^^ liveness computation results for body {} (entry={:?})",
632 for ln_idx in 0..self.ir.lnks.len() {
633 debug!("{:?}", self.ln_str(LiveNode::from(ln_idx)));
641 fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) {
642 self.successors[ln] = Some(succ_ln);
644 // It is not necessary to initialize the RWUs here because they are all
645 // empty when created, and the sets only grow during iterations.
648 fn init_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode) {
649 // more efficient version of init_empty() / merge_from_succ()
650 self.successors[ln] = Some(succ_ln);
651 self.rwu_table.copy(ln, succ_ln);
652 debug!("init_from_succ(ln={}, succ={})", self.ln_str(ln), self.ln_str(succ_ln));
655 fn merge_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode) -> bool {
660 let changed = self.rwu_table.union(ln, succ_ln);
661 debug!("merge_from_succ(ln={:?}, succ={}, changed={})", ln, self.ln_str(succ_ln), changed);
665 // Indicates that a local variable was *defined*; we know that no
666 // uses of the variable can precede the definition (resolve checks
667 // this) so we just clear out all the data.
668 fn define(&mut self, writer: LiveNode, var: Variable) {
669 let used = self.rwu_table.get_used(writer, var);
670 self.rwu_table.set(writer, var, rwu_table::RWU { reader: false, writer: false, used });
671 debug!("{:?} defines {:?}: {}", writer, var, self.ln_str(writer));
674 // Either read, write, or both depending on the acc bitset
675 fn acc(&mut self, ln: LiveNode, var: Variable, acc: u32) {
676 debug!("{:?} accesses[{:x}] {:?}: {}", ln, acc, var, self.ln_str(ln));
678 let mut rwu = self.rwu_table.get(ln, var);
680 if (acc & ACC_WRITE) != 0 {
685 // Important: if we both read/write, must do read second
686 // or else the write will override.
687 if (acc & ACC_READ) != 0 {
691 if (acc & ACC_USE) != 0 {
695 self.rwu_table.set(ln, var, rwu);
698 fn compute(&mut self, body: &hir::Body<'_>, hir_id: HirId) -> LiveNode {
699 debug!("compute: for body {:?}", body.id().hir_id);
701 // # Liveness of captured variables
703 // When computing the liveness for captured variables we take into
704 // account how variable is captured (ByRef vs ByValue) and what is the
705 // closure kind (Generator / FnOnce vs Fn / FnMut).
707 // Variables captured by reference are assumed to be used on the exit
710 // In FnOnce closures, variables captured by value are known to be dead
711 // on exit since it is impossible to call the closure again.
713 // In Fn / FnMut closures, variables captured by value are live on exit
714 // if they are live on the entry to the closure, since only the closure
715 // itself can access them on subsequent calls.
717 if let Some(closure_min_captures) = self.closure_min_captures {
718 // Mark upvars captured by reference as used after closure exits.
719 for (&var_hir_id, min_capture_list) in closure_min_captures {
720 for captured_place in min_capture_list {
721 match captured_place.info.capture_kind {
722 ty::UpvarCapture::ByRef(_) => {
723 let var = self.variable(
725 captured_place.get_capture_kind_span(self.ir.tcx),
727 self.acc(self.exit_ln, var, ACC_READ | ACC_USE);
729 ty::UpvarCapture::ByValue(_) => {}
735 let succ = self.propagate_through_expr(&body.value, self.exit_ln);
737 if self.closure_min_captures.is_none() {
738 // Either not a closure, or closure without any captured variables.
739 // No need to determine liveness of captured variables, since there
744 let ty = self.typeck_results.node_type(hir_id);
746 ty::Closure(_def_id, substs) => match substs.as_closure().kind() {
747 ty::ClosureKind::Fn => {}
748 ty::ClosureKind::FnMut => {}
749 ty::ClosureKind::FnOnce => return succ,
751 ty::Generator(..) => return succ,
755 "{} has upvars so it should have a closure type: {:?}",
762 // Propagate through calls to the closure.
764 self.init_from_succ(self.closure_ln, succ);
765 for param in body.params {
766 param.pat.each_binding(|_bm, hir_id, _x, ident| {
767 let var = self.variable(hir_id, ident.span);
768 self.define(self.closure_ln, var);
772 if !self.merge_from_succ(self.exit_ln, self.closure_ln) {
775 assert_eq!(succ, self.propagate_through_expr(&body.value, self.exit_ln));
781 fn propagate_through_block(&mut self, blk: &hir::Block<'_>, succ: LiveNode) -> LiveNode {
782 if blk.targeted_by_break {
783 self.break_ln.insert(blk.hir_id, succ);
785 let succ = self.propagate_through_opt_expr(blk.expr, succ);
786 blk.stmts.iter().rev().fold(succ, |succ, stmt| self.propagate_through_stmt(stmt, succ))
789 fn propagate_through_stmt(&mut self, stmt: &hir::Stmt<'_>, succ: LiveNode) -> LiveNode {
791 hir::StmtKind::Local(ref local) => {
792 // Note: we mark the variable as defined regardless of whether
793 // there is an initializer. Initially I had thought to only mark
794 // the live variable as defined if it was initialized, and then we
795 // could check for uninit variables just by scanning what is live
796 // at the start of the function. But that doesn't work so well for
797 // immutable variables defined in a loop:
798 // loop { let x; x = 5; }
799 // because the "assignment" loops back around and generates an error.
801 // So now we just check that variables defined w/o an
802 // initializer are not live at the point of their
803 // initialization, which is mildly more complex than checking
804 // once at the func header but otherwise equivalent.
806 let succ = self.propagate_through_opt_expr(local.init, succ);
807 self.define_bindings_in_pat(&local.pat, succ)
809 hir::StmtKind::Item(..) => succ,
810 hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => {
811 self.propagate_through_expr(&expr, succ)
816 fn propagate_through_exprs(&mut self, exprs: &[Expr<'_>], succ: LiveNode) -> LiveNode {
817 exprs.iter().rev().fold(succ, |succ, expr| self.propagate_through_expr(&expr, succ))
820 fn propagate_through_opt_expr(
822 opt_expr: Option<&Expr<'_>>,
825 opt_expr.map_or(succ, |expr| self.propagate_through_expr(expr, succ))
828 fn propagate_through_expr(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
829 debug!("propagate_through_expr: {:?}", expr);
832 // Interesting cases with control flow or which gen/kill
833 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
834 self.access_path(expr.hir_id, path, succ, ACC_READ | ACC_USE)
837 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
839 hir::ExprKind::Closure(..) => {
840 debug!("{:?} is an ExprKind::Closure", expr);
842 // the construction of a closure itself is not important,
843 // but we have to consider the closed over variables.
849 .unwrap_or_else(|| span_bug!(expr.span, "no registered caps"));
851 caps.iter().rev().fold(succ, |succ, cap| {
852 self.init_from_succ(cap.ln, succ);
853 let var = self.variable(cap.var_hid, expr.span);
854 self.acc(cap.ln, var, ACC_READ | ACC_USE);
859 hir::ExprKind::Let(ref pat, ref scrutinee, _) => {
860 let succ = self.propagate_through_expr(scrutinee, succ);
861 self.define_bindings_in_pat(pat, succ)
864 // Note that labels have been resolved, so we don't need to look
865 // at the label ident
866 hir::ExprKind::Loop(ref blk, ..) => self.propagate_through_loop(expr, &blk, succ),
868 hir::ExprKind::Yield(ref e, ..) => {
869 let yield_ln = self.live_node(expr.hir_id, expr.span);
870 self.init_from_succ(yield_ln, succ);
871 self.merge_from_succ(yield_ln, self.exit_ln);
872 self.propagate_through_expr(e, yield_ln)
875 hir::ExprKind::If(ref cond, ref then, ref else_opt) => {
890 self.propagate_through_opt_expr(else_opt.as_ref().map(|e| &**e), succ);
891 let then_ln = self.propagate_through_expr(&then, succ);
892 let ln = self.live_node(expr.hir_id, expr.span);
893 self.init_from_succ(ln, else_ln);
894 self.merge_from_succ(ln, then_ln);
895 self.propagate_through_expr(&cond, ln)
898 hir::ExprKind::Match(ref e, arms, _) => {
913 let ln = self.live_node(expr.hir_id, expr.span);
914 self.init_empty(ln, succ);
916 let body_succ = self.propagate_through_expr(&arm.body, succ);
918 let guard_succ = arm.guard.as_ref().map_or(body_succ, |g| match g {
919 hir::Guard::If(e) => self.propagate_through_expr(e, body_succ),
920 hir::Guard::IfLet(pat, e) => {
921 let let_bind = self.define_bindings_in_pat(pat, body_succ);
922 self.propagate_through_expr(e, let_bind)
925 let arm_succ = self.define_bindings_in_pat(&arm.pat, guard_succ);
926 self.merge_from_succ(ln, arm_succ);
928 self.propagate_through_expr(&e, ln)
931 hir::ExprKind::Ret(ref o_e) => {
932 // Ignore succ and subst exit_ln.
933 self.propagate_through_opt_expr(o_e.as_ref().map(|e| &**e), self.exit_ln)
936 hir::ExprKind::Break(label, ref opt_expr) => {
937 // Find which label this break jumps to
938 let target = match label.target_id {
939 Ok(hir_id) => self.break_ln.get(&hir_id),
940 Err(err) => span_bug!(expr.span, "loop scope error: {}", err),
944 // Now that we know the label we're going to,
945 // look it up in the break loop nodes table
948 Some(b) => self.propagate_through_opt_expr(opt_expr.as_ref().map(|e| &**e), b),
949 None => span_bug!(expr.span, "`break` to unknown label"),
953 hir::ExprKind::Continue(label) => {
954 // Find which label this expr continues to
957 .unwrap_or_else(|err| span_bug!(expr.span, "loop scope error: {}", err));
959 // Now that we know the label we're going to,
960 // look it up in the continue loop nodes table
964 .unwrap_or_else(|| span_bug!(expr.span, "continue to unknown label"))
967 hir::ExprKind::Assign(ref l, ref r, _) => {
968 // see comment on places in
969 // propagate_through_place_components()
970 let succ = self.write_place(&l, succ, ACC_WRITE);
971 let succ = self.propagate_through_place_components(&l, succ);
972 self.propagate_through_expr(&r, succ)
975 hir::ExprKind::AssignOp(_, ref l, ref r) => {
976 // an overloaded assign op is like a method call
977 if self.typeck_results.is_method_call(expr) {
978 let succ = self.propagate_through_expr(&l, succ);
979 self.propagate_through_expr(&r, succ)
981 // see comment on places in
982 // propagate_through_place_components()
983 let succ = self.write_place(&l, succ, ACC_WRITE | ACC_READ);
984 let succ = self.propagate_through_expr(&r, succ);
985 self.propagate_through_place_components(&l, succ)
989 // Uninteresting cases: just propagate in rev exec order
990 hir::ExprKind::Array(ref exprs) => self.propagate_through_exprs(exprs, succ),
992 hir::ExprKind::Struct(_, ref fields, ref with_expr) => {
993 let succ = self.propagate_through_opt_expr(with_expr.as_ref().map(|e| &**e), succ);
997 .fold(succ, |succ, field| self.propagate_through_expr(&field.expr, succ))
1000 hir::ExprKind::Call(ref f, ref args) => {
1001 let succ = self.check_is_ty_uninhabited(expr, succ);
1002 let succ = self.propagate_through_exprs(args, succ);
1003 self.propagate_through_expr(&f, succ)
1006 hir::ExprKind::MethodCall(.., ref args, _) => {
1007 let succ = self.check_is_ty_uninhabited(expr, succ);
1008 self.propagate_through_exprs(args, succ)
1011 hir::ExprKind::Tup(ref exprs) => self.propagate_through_exprs(exprs, succ),
1013 hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => {
1014 let r_succ = self.propagate_through_expr(&r, succ);
1016 let ln = self.live_node(expr.hir_id, expr.span);
1017 self.init_from_succ(ln, succ);
1018 self.merge_from_succ(ln, r_succ);
1020 self.propagate_through_expr(&l, ln)
1023 hir::ExprKind::Index(ref l, ref r) | hir::ExprKind::Binary(_, ref l, ref r) => {
1024 let r_succ = self.propagate_through_expr(&r, succ);
1025 self.propagate_through_expr(&l, r_succ)
1028 hir::ExprKind::Box(ref e)
1029 | hir::ExprKind::AddrOf(_, _, ref e)
1030 | hir::ExprKind::Cast(ref e, _)
1031 | hir::ExprKind::Type(ref e, _)
1032 | hir::ExprKind::DropTemps(ref e)
1033 | hir::ExprKind::Unary(_, ref e)
1034 | hir::ExprKind::Repeat(ref e, _) => self.propagate_through_expr(&e, succ),
1036 hir::ExprKind::InlineAsm(ref asm) => {
1037 // Handle non-returning asm
1038 let mut succ = if asm.options.contains(InlineAsmOptions::NORETURN) {
1044 // Do a first pass for writing outputs only
1045 for (op, _op_sp) in asm.operands.iter().rev() {
1047 hir::InlineAsmOperand::In { .. }
1048 | hir::InlineAsmOperand::Const { .. }
1049 | hir::InlineAsmOperand::Sym { .. } => {}
1050 hir::InlineAsmOperand::Out { expr, .. } => {
1051 if let Some(expr) = expr {
1052 succ = self.write_place(expr, succ, ACC_WRITE);
1055 hir::InlineAsmOperand::InOut { expr, .. } => {
1056 succ = self.write_place(expr, succ, ACC_READ | ACC_WRITE | ACC_USE);
1058 hir::InlineAsmOperand::SplitInOut { out_expr, .. } => {
1059 if let Some(expr) = out_expr {
1060 succ = self.write_place(expr, succ, ACC_WRITE);
1066 // Then do a second pass for inputs
1067 let mut succ = succ;
1068 for (op, _op_sp) in asm.operands.iter().rev() {
1070 hir::InlineAsmOperand::In { expr, .. }
1071 | hir::InlineAsmOperand::Sym { expr, .. } => {
1072 succ = self.propagate_through_expr(expr, succ)
1074 hir::InlineAsmOperand::Out { expr, .. } => {
1075 if let Some(expr) = expr {
1076 succ = self.propagate_through_place_components(expr, succ);
1079 hir::InlineAsmOperand::InOut { expr, .. } => {
1080 succ = self.propagate_through_place_components(expr, succ);
1082 hir::InlineAsmOperand::SplitInOut { in_expr, out_expr, .. } => {
1083 if let Some(expr) = out_expr {
1084 succ = self.propagate_through_place_components(expr, succ);
1086 succ = self.propagate_through_expr(in_expr, succ);
1088 hir::InlineAsmOperand::Const { .. } => {}
1094 hir::ExprKind::LlvmInlineAsm(ref asm) => {
1095 let ia = &asm.inner;
1096 let outputs = asm.outputs_exprs;
1097 let inputs = asm.inputs_exprs;
1098 let succ = iter::zip(&ia.outputs, outputs).rev().fold(succ, |succ, (o, output)| {
1099 // see comment on places
1100 // in propagate_through_place_components()
1102 self.propagate_through_expr(output, succ)
1104 let acc = if o.is_rw { ACC_WRITE | ACC_READ } else { ACC_WRITE };
1105 let succ = self.write_place(output, succ, acc);
1106 self.propagate_through_place_components(output, succ)
1110 // Inputs are executed first. Propagate last because of rev order
1111 self.propagate_through_exprs(inputs, succ)
1114 hir::ExprKind::Lit(..)
1115 | hir::ExprKind::ConstBlock(..)
1116 | hir::ExprKind::Err
1117 | hir::ExprKind::Path(hir::QPath::TypeRelative(..))
1118 | hir::ExprKind::Path(hir::QPath::LangItem(..)) => succ,
1120 // Note that labels have been resolved, so we don't need to look
1121 // at the label ident
1122 hir::ExprKind::Block(ref blk, _) => self.propagate_through_block(&blk, succ),
1126 fn propagate_through_place_components(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
1129 // In general, the full flow graph structure for an
1130 // assignment/move/etc can be handled in one of two ways,
1131 // depending on whether what is being assigned is a "tracked
1132 // value" or not. A tracked value is basically a local
1133 // variable or argument.
1135 // The two kinds of graphs are:
1137 // Tracked place Untracked place
1138 // ----------------------++-----------------------
1142 // (rvalue) || (rvalue)
1145 // (write of place) || (place components)
1150 // ----------------------++-----------------------
1152 // I will cover the two cases in turn:
1156 // A tracked place is a local variable/argument `x`. In
1157 // these cases, the link_node where the write occurs is linked
1158 // to node id of `x`. The `write_place()` routine generates
1159 // the contents of this node. There are no subcomponents to
1162 // # Non-tracked places
1164 // These are places like `x[5]` or `x.f`. In that case, we
1165 // basically ignore the value which is written to but generate
1166 // reads for the components---`x` in these two examples. The
1167 // components reads are generated by
1168 // `propagate_through_place_components()` (this fn).
1172 // It is still possible to observe assignments to non-places;
1173 // these errors are detected in the later pass borrowck. We
1174 // just ignore such cases and treat them as reads.
1177 hir::ExprKind::Path(_) => succ,
1178 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
1179 _ => self.propagate_through_expr(expr, succ),
1183 // see comment on propagate_through_place()
1184 fn write_place(&mut self, expr: &Expr<'_>, succ: LiveNode, acc: u32) -> LiveNode {
1186 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1187 self.access_path(expr.hir_id, path, succ, acc)
1190 // We do not track other places, so just propagate through
1191 // to their subcomponents. Also, it may happen that
1192 // non-places occur here, because those are detected in the
1193 // later pass borrowck.
1206 let ln = self.live_node(hir_id, span);
1208 self.init_from_succ(ln, succ);
1209 let var = self.variable(var_hid, span);
1210 self.acc(ln, var, acc);
1218 path: &hir::Path<'_>,
1223 Res::Local(hid) => self.access_var(hir_id, hid, succ, acc, path.span),
1228 fn propagate_through_loop(
1231 body: &hir::Block<'_>,
1235 We model control flow like this:
1242 Note that a `continue` expression targeting the `loop` will have a successor of `expr`.
1243 Meanwhile, a `break` expression will have a successor of `succ`.
1247 let ln = self.live_node(expr.hir_id, expr.span);
1248 self.init_empty(ln, succ);
1249 debug!("propagate_through_loop: using id for loop body {} {:?}", expr.hir_id, body);
1251 self.break_ln.insert(expr.hir_id, succ);
1253 self.cont_ln.insert(expr.hir_id, ln);
1255 let body_ln = self.propagate_through_block(body, ln);
1257 // repeat until fixed point is reached:
1258 while self.merge_from_succ(ln, body_ln) {
1259 assert_eq!(body_ln, self.propagate_through_block(body, ln));
1265 fn check_is_ty_uninhabited(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
1266 let ty = self.typeck_results.expr_ty(expr);
1267 let m = self.ir.tcx.parent_module(expr.hir_id).to_def_id();
1268 if self.ir.tcx.is_ty_uninhabited_from(m, ty, self.param_env) {
1269 match self.ir.lnks[succ] {
1270 LiveNodeKind::ExprNode(succ_span, succ_id) => {
1271 self.warn_about_unreachable(expr.span, ty, succ_span, succ_id, "expression");
1273 LiveNodeKind::VarDefNode(succ_span, succ_id) => {
1274 self.warn_about_unreachable(expr.span, ty, succ_span, succ_id, "definition");
1284 fn warn_about_unreachable(
1292 if !orig_ty.is_never() {
1293 // Unreachable code warnings are already emitted during type checking.
1294 // However, during type checking, full type information is being
1295 // calculated but not yet available, so the check for diverging
1296 // expressions due to uninhabited result types is pretty crude and
1297 // only checks whether ty.is_never(). Here, we have full type
1298 // information available and can issue warnings for less obviously
1299 // uninhabited types (e.g. empty enums). The check above is used so
1300 // that we do not emit the same warning twice if the uninhabited type
1303 self.ir.tcx.struct_span_lint_hir(
1304 lint::builtin::UNREACHABLE_CODE,
1308 let msg = format!("unreachable {}", descr);
1310 .span_label(expr_span, &msg)
1311 .span_label(orig_span, "any code following this expression is unreachable")
1315 "this expression has type `{}`, which is uninhabited",
1326 // _______________________________________________________________________
1327 // Checking for error conditions
1329 impl<'a, 'tcx> Visitor<'tcx> for Liveness<'a, 'tcx> {
1330 type Map = intravisit::ErasedMap<'tcx>;
1332 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
1333 NestedVisitorMap::None
1336 fn visit_local(&mut self, local: &'tcx hir::Local<'tcx>) {
1337 self.check_unused_vars_in_pat(&local.pat, None, |spans, hir_id, ln, var| {
1338 if local.init.is_some() {
1339 self.warn_about_dead_assign(spans, hir_id, ln, var);
1343 intravisit::walk_local(self, local);
1346 fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
1347 check_expr(self, ex);
1348 intravisit::walk_expr(self, ex);
1351 fn visit_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) {
1352 self.check_unused_vars_in_pat(&arm.pat, None, |_, _, _, _| {});
1353 intravisit::walk_arm(self, arm);
1357 fn check_expr<'tcx>(this: &mut Liveness<'_, 'tcx>, expr: &'tcx Expr<'tcx>) {
1359 hir::ExprKind::Assign(ref l, ..) => {
1360 this.check_place(&l);
1363 hir::ExprKind::AssignOp(_, ref l, _) => {
1364 if !this.typeck_results.is_method_call(expr) {
1365 this.check_place(&l);
1369 hir::ExprKind::InlineAsm(ref asm) => {
1370 for (op, _op_sp) in asm.operands {
1372 hir::InlineAsmOperand::Out { expr, .. } => {
1373 if let Some(expr) = expr {
1374 this.check_place(expr);
1377 hir::InlineAsmOperand::InOut { expr, .. } => {
1378 this.check_place(expr);
1380 hir::InlineAsmOperand::SplitInOut { out_expr, .. } => {
1381 if let Some(out_expr) = out_expr {
1382 this.check_place(out_expr);
1390 hir::ExprKind::LlvmInlineAsm(ref asm) => {
1391 for input in asm.inputs_exprs {
1392 this.visit_expr(input);
1395 // Output operands must be places
1396 for (o, output) in iter::zip(&asm.inner.outputs, asm.outputs_exprs) {
1398 this.check_place(output);
1400 this.visit_expr(output);
1404 hir::ExprKind::Let(ref pat, ..) => {
1405 this.check_unused_vars_in_pat(pat, None, |_, _, _, _| {});
1408 // no correctness conditions related to liveness
1409 hir::ExprKind::Call(..)
1410 | hir::ExprKind::MethodCall(..)
1411 | hir::ExprKind::Match(..)
1412 | hir::ExprKind::Loop(..)
1413 | hir::ExprKind::Index(..)
1414 | hir::ExprKind::Field(..)
1415 | hir::ExprKind::Array(..)
1416 | hir::ExprKind::Tup(..)
1417 | hir::ExprKind::Binary(..)
1418 | hir::ExprKind::Cast(..)
1419 | hir::ExprKind::If(..)
1420 | hir::ExprKind::DropTemps(..)
1421 | hir::ExprKind::Unary(..)
1422 | hir::ExprKind::Ret(..)
1423 | hir::ExprKind::Break(..)
1424 | hir::ExprKind::Continue(..)
1425 | hir::ExprKind::Lit(_)
1426 | hir::ExprKind::ConstBlock(..)
1427 | hir::ExprKind::Block(..)
1428 | hir::ExprKind::AddrOf(..)
1429 | hir::ExprKind::Struct(..)
1430 | hir::ExprKind::Repeat(..)
1431 | hir::ExprKind::Closure(..)
1432 | hir::ExprKind::Path(_)
1433 | hir::ExprKind::Yield(..)
1434 | hir::ExprKind::Box(..)
1435 | hir::ExprKind::Type(..)
1436 | hir::ExprKind::Err => {}
1440 impl<'tcx> Liveness<'_, 'tcx> {
1441 fn check_place(&mut self, expr: &'tcx Expr<'tcx>) {
1443 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1444 if let Res::Local(var_hid) = path.res {
1445 // Assignment to an immutable variable or argument: only legal
1446 // if there is no later assignment. If this local is actually
1447 // mutable, then check for a reassignment to flag the mutability
1449 let ln = self.live_node(expr.hir_id, expr.span);
1450 let var = self.variable(var_hid, expr.span);
1451 self.warn_about_dead_assign(vec![expr.span], expr.hir_id, ln, var);
1455 // For other kinds of places, no checks are required,
1456 // and any embedded expressions are actually rvalues
1457 intravisit::walk_expr(self, expr);
1462 fn should_warn(&self, var: Variable) -> Option<String> {
1463 let name = self.ir.variable_name(var);
1464 if name == kw::Empty {
1467 let name: &str = &name.as_str();
1468 if name.as_bytes()[0] == b'_' {
1471 Some(name.to_owned())
1474 fn warn_about_unused_upvars(&self, entry_ln: LiveNode) {
1475 let closure_min_captures = match self.closure_min_captures {
1477 Some(closure_min_captures) => closure_min_captures,
1480 // If closure_min_captures is Some(), upvars must be Some() too.
1481 for (&var_hir_id, min_capture_list) in closure_min_captures {
1482 for captured_place in min_capture_list {
1483 match captured_place.info.capture_kind {
1484 ty::UpvarCapture::ByValue(_) => {}
1485 ty::UpvarCapture::ByRef(..) => continue,
1487 let span = captured_place.get_capture_kind_span(self.ir.tcx);
1488 let var = self.variable(var_hir_id, span);
1489 if self.used_on_entry(entry_ln, var) {
1490 if !self.live_on_entry(entry_ln, var) {
1491 if let Some(name) = self.should_warn(var) {
1492 self.ir.tcx.struct_span_lint_hir(
1493 lint::builtin::UNUSED_ASSIGNMENTS,
1497 lint.build(&format!(
1498 "value captured by `{}` is never read",
1501 .help("did you mean to capture by reference instead?")
1508 if let Some(name) = self.should_warn(var) {
1509 self.ir.tcx.struct_span_lint_hir(
1510 lint::builtin::UNUSED_VARIABLES,
1514 lint.build(&format!("unused variable: `{}`", name))
1515 .help("did you mean to capture by reference instead?")
1525 fn warn_about_unused_args(&self, body: &hir::Body<'_>, entry_ln: LiveNode) {
1526 for p in body.params {
1527 self.check_unused_vars_in_pat(&p.pat, Some(entry_ln), |spans, hir_id, ln, var| {
1528 if !self.live_on_entry(ln, var) {
1529 self.report_unused_assign(hir_id, spans, var, |name| {
1530 format!("value passed to `{}` is never read", name)
1537 fn check_unused_vars_in_pat(
1540 entry_ln: Option<LiveNode>,
1541 on_used_on_entry: impl Fn(Vec<Span>, HirId, LiveNode, Variable),
1543 // In an or-pattern, only consider the variable; any later patterns must have the same
1544 // bindings, and we also consider the first pattern to be the "authoritative" set of ids.
1545 // However, we should take the ids and spans of variables with the same name from the later
1546 // patterns so the suggestions to prefix with underscores will apply to those too.
1547 let mut vars: FxIndexMap<Symbol, (LiveNode, Variable, Vec<(HirId, Span, Span)>)> =
1550 pat.each_binding(|_, hir_id, pat_sp, ident| {
1551 let ln = entry_ln.unwrap_or_else(|| self.live_node(hir_id, pat_sp));
1552 let var = self.variable(hir_id, ident.span);
1553 let id_and_sp = (hir_id, pat_sp, ident.span);
1554 vars.entry(self.ir.variable_name(var))
1555 .and_modify(|(.., hir_ids_and_spans)| hir_ids_and_spans.push(id_and_sp))
1556 .or_insert_with(|| (ln, var, vec![id_and_sp]));
1559 for (_, (ln, var, hir_ids_and_spans)) in vars {
1560 if self.used_on_entry(ln, var) {
1561 let id = hir_ids_and_spans[0].0;
1563 hir_ids_and_spans.into_iter().map(|(_, _, ident_span)| ident_span).collect();
1564 on_used_on_entry(spans, id, ln, var);
1566 self.report_unused(hir_ids_and_spans, ln, var);
1573 hir_ids_and_spans: Vec<(HirId, Span, Span)>,
1577 let first_hir_id = hir_ids_and_spans[0].0;
1579 if let Some(name) = self.should_warn(var).filter(|name| name != "self") {
1580 // annoying: for parameters in funcs like `fn(x: i32)
1581 // {ret}`, there is only one node, so asking about
1582 // assigned_on_exit() is not meaningful.
1584 if ln == self.exit_ln { false } else { self.assigned_on_exit(ln, var) };
1587 self.ir.tcx.struct_span_lint_hir(
1588 lint::builtin::UNUSED_VARIABLES,
1592 .map(|(_, _, ident_span)| ident_span)
1593 .collect::<Vec<_>>(),
1595 lint.build(&format!("variable `{}` is assigned to, but never used", name))
1596 .note(&format!("consider using `_{}` instead", name))
1601 let (shorthands, non_shorthands): (Vec<_>, Vec<_>) =
1602 hir_ids_and_spans.iter().copied().partition(|(hir_id, _, ident_span)| {
1603 let var = self.variable(*hir_id, *ident_span);
1604 self.ir.variable_is_shorthand(var)
1607 // If we have both shorthand and non-shorthand, prefer the "try ignoring
1608 // the field" message, and suggest `_` for the non-shorthands. If we only
1609 // have non-shorthand, then prefix with an underscore instead.
1610 if !shorthands.is_empty() {
1611 let shorthands = shorthands
1613 .map(|(_, pat_span, _)| (pat_span, format!("{}: _", name)))
1617 .map(|(_, pat_span, _)| (pat_span, "_".to_string())),
1619 .collect::<Vec<_>>();
1621 self.ir.tcx.struct_span_lint_hir(
1622 lint::builtin::UNUSED_VARIABLES,
1626 .map(|(_, pat_span, _)| *pat_span)
1627 .collect::<Vec<_>>(),
1629 let mut err = lint.build(&format!("unused variable: `{}`", name));
1630 err.multipart_suggestion(
1631 "try ignoring the field",
1633 Applicability::MachineApplicable,
1639 let non_shorthands = non_shorthands
1641 .map(|(_, _, ident_span)| (ident_span, format!("_{}", name)))
1642 .collect::<Vec<_>>();
1644 self.ir.tcx.struct_span_lint_hir(
1645 lint::builtin::UNUSED_VARIABLES,
1649 .map(|(_, _, ident_span)| *ident_span)
1650 .collect::<Vec<_>>(),
1652 let mut err = lint.build(&format!("unused variable: `{}`", name));
1653 err.multipart_suggestion(
1654 "if this is intentional, prefix it with an underscore",
1656 Applicability::MachineApplicable,
1666 fn warn_about_dead_assign(&self, spans: Vec<Span>, hir_id: HirId, ln: LiveNode, var: Variable) {
1667 if !self.live_on_exit(ln, var) {
1668 self.report_unused_assign(hir_id, spans, var, |name| {
1669 format!("value assigned to `{}` is never read", name)
1674 fn report_unused_assign(
1679 message: impl Fn(&str) -> String,
1681 if let Some(name) = self.should_warn(var) {
1682 self.ir.tcx.struct_span_lint_hir(
1683 lint::builtin::UNUSED_ASSIGNMENTS,
1687 lint.build(&message(&name))
1688 .help("maybe it is overwritten before being read?")