1 //! A classic liveness analysis based on dataflow over the AST. Computes,
2 //! for each local variable in a function, whether that variable is live
3 //! at a given point. Program execution points are identified by their
8 //! The basic model is that each local variable is assigned an index. We
9 //! represent sets of local variables using a vector indexed by this
10 //! index. The value in the vector is either 0, indicating the variable
11 //! is dead, or the ID of an expression that uses the variable.
13 //! We conceptually walk over the AST in reverse execution order. If we
14 //! find a use of a variable, we add it to the set of live variables. If
15 //! we find an assignment to a variable, we remove it from the set of live
16 //! variables. When we have to merge two flows, we take the union of
17 //! those two flows -- if the variable is live on both paths, we simply
18 //! pick one ID. In the event of loops, we continue doing this until a
19 //! fixed point is reached.
21 //! ## Checking initialization
23 //! At the function entry point, all variables must be dead. If this is
24 //! not the case, we can report an error using the ID found in the set of
25 //! live variables, which identifies a use of the variable which is not
26 //! dominated by an assignment.
30 //! After each explicit move, the variable must be dead.
32 //! ## Computing last uses
34 //! Any use of the variable where the variable is dead afterwards is a
37 //! # Implementation details
39 //! The actual implementation contains two (nested) walks over the AST.
40 //! The outer walk has the job of building up the ir_maps instance for the
41 //! enclosing function. On the way down the tree, it identifies those AST
42 //! nodes and variable IDs that will be needed for the liveness analysis
43 //! and assigns them contiguous IDs. The liveness ID for an AST node is
44 //! called a `live_node` (it's a newtype'd `u32`) and the ID for a variable
45 //! is called a `variable` (another newtype'd `u32`).
47 //! On the way back up the tree, as we are about to exit from a function
48 //! declaration we allocate a `liveness` instance. Now that we know
49 //! precisely how many nodes and variables we need, we can allocate all
50 //! the various arrays that we will need to precisely the right size. We then
51 //! perform the actual propagation on the `liveness` instance.
53 //! This propagation is encoded in the various `propagate_through_*()`
54 //! methods. It effectively does a reverse walk of the AST; whenever we
55 //! reach a loop node, we iterate until a fixed point is reached.
57 //! ## The `RWU` struct
59 //! At each live node `N`, we track three pieces of information for each
60 //! variable `V` (these are encapsulated in the `RWU` struct):
62 //! - `reader`: the `LiveNode` ID of some node which will read the value
63 //! that `V` holds on entry to `N`. Formally: a node `M` such
64 //! that there exists a path `P` from `N` to `M` where `P` does not
65 //! write `V`. If the `reader` is `invalid_node()`, then the current
66 //! value will never be read (the variable is dead, essentially).
68 //! - `writer`: the `LiveNode` ID of some node which will write the
69 //! variable `V` and which is reachable from `N`. Formally: a node `M`
70 //! such that there exists a path `P` from `N` to `M` and `M` writes
71 //! `V`. If the `writer` is `invalid_node()`, then there is no writer
72 //! of `V` that follows `N`.
74 //! - `used`: a boolean value indicating whether `V` is *used*. We
75 //! distinguish a *read* from a *use* in that a *use* is some read that
76 //! is not just used to generate a new value. For example, `x += 1` is
77 //! a read but not a use. This is used to generate better warnings.
79 //! ## Special Variables
81 //! We generate various special variables for various, well, special purposes.
82 //! These are described in the `specials` struct:
84 //! - `exit_ln`: a live node that is generated to represent every 'exit' from
85 //! the function, whether it be by explicit return, panic, or other means.
87 //! - `fallthrough_ln`: a live node that represents a fallthrough
89 //! - `clean_exit_var`: a synthetic variable that is only 'read' from the
90 //! fallthrough node. It is only live if the function could converge
91 //! via means other than an explicit `return` expression. That is, it is
92 //! only dead if the end of the function's block can never be reached.
93 //! It is the responsibility of typeck to ensure that there are no
94 //! `return` expressions in a function declared as diverging.
96 use self::LiveNodeKind::*;
99 use errors::Applicability;
100 use rustc::hir::map::Map;
102 use rustc::ty::query::Providers;
103 use rustc::ty::{self, TyCtxt};
104 use rustc_data_structures::fx::FxIndexMap;
105 use rustc_hir as hir;
106 use rustc_hir::def::*;
107 use rustc_hir::def_id::DefId;
108 use rustc_hir::intravisit::{self, FnKind, NestedVisitorMap, Visitor};
109 use rustc_hir::{Expr, HirId, HirIdMap, HirIdSet, Node};
110 use rustc_span::symbol::sym;
111 use rustc_span::Span;
114 use std::collections::VecDeque;
116 use std::io::prelude::*;
120 #[derive(Copy, Clone, PartialEq)]
121 struct Variable(u32);
123 #[derive(Copy, Clone, PartialEq)]
124 struct LiveNode(u32);
127 fn get(&self) -> usize {
133 fn get(&self) -> usize {
138 #[derive(Copy, Clone, PartialEq, Debug)]
146 fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_>) -> String {
147 let cm = tcx.sess.source_map();
149 UpvarNode(s) => format!("Upvar node [{}]", cm.span_to_string(s)),
150 ExprNode(s) => format!("Expr node [{}]", cm.span_to_string(s)),
151 VarDefNode(s) => format!("Var def node [{}]", cm.span_to_string(s)),
152 ExitNode => "Exit node".to_owned(),
156 impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
157 type Map = Map<'tcx>;
159 fn nested_visit_map(&mut self) -> NestedVisitorMap<'_, Self::Map> {
160 NestedVisitorMap::OnlyBodies(&self.tcx.hir())
166 fd: &'tcx hir::FnDecl<'tcx>,
171 visit_fn(self, fk, fd, b, s, id);
174 fn visit_local(&mut self, l: &'tcx hir::Local<'tcx>) {
175 visit_local(self, l);
177 fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
178 visit_expr(self, ex);
180 fn visit_arm(&mut self, a: &'tcx hir::Arm<'tcx>) {
185 fn check_mod_liveness(tcx: TyCtxt<'_>, module_def_id: DefId) {
186 tcx.hir().visit_item_likes_in_module(
188 &mut IrMaps::new(tcx, module_def_id).as_deep_visitor(),
192 pub fn provide(providers: &mut Providers<'_>) {
193 *providers = Providers { check_mod_liveness, ..*providers };
196 impl fmt::Debug for LiveNode {
197 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
198 write!(f, "ln({})", self.get())
202 impl fmt::Debug for Variable {
203 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
204 write!(f, "v({})", self.get())
208 // ______________________________________________________________________
211 // This is the first pass and the one that drives the main
212 // computation. It walks up and down the IR once. On the way down,
213 // we count for each function the number of variables as well as
214 // liveness nodes. A liveness node is basically an expression or
215 // capture clause that does something of interest: either it has
216 // interesting control flow or it uses/defines a local variable.
218 // On the way back up, at each function node we create liveness sets
219 // (we now know precisely how big to make our various vectors and so
220 // forth) and then do the data-flow propagation to compute the set
221 // of live variables at each program point.
223 // Finally, we run back over the IR one last time and, using the
224 // computed liveness, check various safety conditions. For example,
225 // there must be no live nodes at the definition site for a variable
226 // unless it has an initializer. Similarly, each non-mutable local
227 // variable must not be assigned if there is some successor
228 // assignment. And so forth.
231 fn is_valid(&self) -> bool {
236 fn invalid_node() -> LiveNode {
245 #[derive(Copy, Clone, Debug)]
252 #[derive(Copy, Clone, Debug)]
254 Param(HirId, ast::Name),
259 struct IrMaps<'tcx> {
262 num_live_nodes: usize,
264 live_node_map: HirIdMap<LiveNode>,
265 variable_map: HirIdMap<Variable>,
266 capture_info_map: HirIdMap<Rc<Vec<CaptureInfo>>>,
267 var_kinds: Vec<VarKind>,
268 lnks: Vec<LiveNodeKind>,
272 fn new(tcx: TyCtxt<'tcx>, body_owner: DefId) -> IrMaps<'tcx> {
278 live_node_map: HirIdMap::default(),
279 variable_map: HirIdMap::default(),
280 capture_info_map: Default::default(),
281 var_kinds: Vec::new(),
286 fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
287 let ln = LiveNode(self.num_live_nodes as u32);
289 self.num_live_nodes += 1;
291 debug!("{:?} is of kind {}", ln, live_node_kind_to_string(lnk, self.tcx));
296 fn add_live_node_for_node(&mut self, hir_id: HirId, lnk: LiveNodeKind) {
297 let ln = self.add_live_node(lnk);
298 self.live_node_map.insert(hir_id, ln);
300 debug!("{:?} is node {:?}", ln, hir_id);
303 fn add_variable(&mut self, vk: VarKind) -> Variable {
304 let v = Variable(self.num_vars as u32);
305 self.var_kinds.push(vk);
309 Local(LocalInfo { id: node_id, .. }) | Param(node_id, _) => {
310 self.variable_map.insert(node_id, v);
315 debug!("{:?} is {:?}", v, vk);
320 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
321 match self.variable_map.get(&hir_id) {
324 span_bug!(span, "no variable registered for id {:?}", hir_id);
329 fn variable_name(&self, var: Variable) -> String {
330 match self.var_kinds[var.get()] {
331 Local(LocalInfo { name, .. }) | Param(_, name) => name.to_string(),
332 CleanExit => "<clean-exit>".to_owned(),
336 fn variable_is_shorthand(&self, var: Variable) -> bool {
337 match self.var_kinds[var.get()] {
338 Local(LocalInfo { is_shorthand, .. }) => is_shorthand,
339 Param(..) | CleanExit => false,
343 fn set_captures(&mut self, hir_id: HirId, cs: Vec<CaptureInfo>) {
344 self.capture_info_map.insert(hir_id, Rc::new(cs));
347 fn lnk(&self, ln: LiveNode) -> LiveNodeKind {
353 ir: &mut IrMaps<'tcx>,
355 decl: &'tcx hir::FnDecl<'tcx>,
356 body_id: hir::BodyId,
362 // swap in a new set of IR maps for this function body:
363 let def_id = ir.tcx.hir().local_def_id(id);
364 let mut fn_maps = IrMaps::new(ir.tcx, def_id);
366 // Don't run unused pass for #[derive()]
367 if let FnKind::Method(..) = fk {
368 let parent = ir.tcx.hir().get_parent_item(id);
369 if let Some(Node::Item(i)) = ir.tcx.hir().find(parent) {
370 if i.attrs.iter().any(|a| a.check_name(sym::automatically_derived)) {
376 debug!("creating fn_maps: {:p}", &fn_maps);
378 let body = ir.tcx.hir().body(body_id);
380 for param in body.params {
381 let is_shorthand = match param.pat.kind {
382 rustc_hir::PatKind::Struct(..) => true,
385 param.pat.each_binding(|_bm, hir_id, _x, ident| {
386 debug!("adding parameters {:?}", hir_id);
387 let var = if is_shorthand {
388 Local(LocalInfo { id: hir_id, name: ident.name, is_shorthand: true })
390 Param(hir_id, ident.name)
392 fn_maps.add_variable(var);
396 // gather up the various local variables, significant expressions,
398 intravisit::walk_fn(&mut fn_maps, fk, decl, body_id, sp, id);
401 let mut lsets = Liveness::new(&mut fn_maps, body_id);
402 let entry_ln = lsets.compute(&body.value);
404 // check for various error conditions
405 lsets.visit_body(body);
406 lsets.warn_about_unused_args(body, entry_ln);
409 fn add_from_pat(ir: &mut IrMaps<'_>, pat: &hir::Pat<'_>) {
410 // For struct patterns, take note of which fields used shorthand
411 // (`x` rather than `x: x`).
412 let mut shorthand_field_ids = HirIdSet::default();
413 let mut pats = VecDeque::new();
415 while let Some(pat) = pats.pop_front() {
416 use rustc_hir::PatKind::*;
418 Binding(.., inner_pat) => {
419 pats.extend(inner_pat.iter());
421 Struct(_, fields, _) => {
422 let ids = fields.iter().filter(|f| f.is_shorthand).map(|f| f.pat.hir_id);
423 shorthand_field_ids.extend(ids);
425 Ref(inner_pat, _) | Box(inner_pat) => {
426 pats.push_back(inner_pat);
428 TupleStruct(_, inner_pats, _) | Tuple(inner_pats, _) | Or(inner_pats) => {
429 pats.extend(inner_pats.iter());
431 Slice(pre_pats, inner_pat, post_pats) => {
432 pats.extend(pre_pats.iter());
433 pats.extend(inner_pat.iter());
434 pats.extend(post_pats.iter());
440 pat.each_binding(|_, hir_id, _, ident| {
441 ir.add_live_node_for_node(hir_id, VarDefNode(ident.span));
442 ir.add_variable(Local(LocalInfo {
445 is_shorthand: shorthand_field_ids.contains(&hir_id),
450 fn visit_local<'tcx>(ir: &mut IrMaps<'tcx>, local: &'tcx hir::Local<'tcx>) {
451 add_from_pat(ir, &local.pat);
452 intravisit::walk_local(ir, local);
455 fn visit_arm<'tcx>(ir: &mut IrMaps<'tcx>, arm: &'tcx hir::Arm<'tcx>) {
456 add_from_pat(ir, &arm.pat);
457 intravisit::walk_arm(ir, arm);
460 fn visit_expr<'tcx>(ir: &mut IrMaps<'tcx>, expr: &'tcx Expr<'tcx>) {
462 // live nodes required for uses or definitions of variables:
463 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
464 debug!("expr {}: path that leads to {:?}", expr.hir_id, path.res);
465 if let Res::Local(var_hir_id) = path.res {
466 let upvars = ir.tcx.upvars(ir.body_owner);
467 if !upvars.map_or(false, |upvars| upvars.contains_key(&var_hir_id)) {
468 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
471 intravisit::walk_expr(ir, expr);
473 hir::ExprKind::Closure(..) => {
474 // Interesting control flow (for loops can contain labeled
475 // breaks or continues)
476 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
478 // Make a live_node for each captured variable, with the span
479 // being the location that the variable is used. This results
480 // in better error messages than just pointing at the closure
481 // construction site.
482 let mut call_caps = Vec::new();
483 let closure_def_id = ir.tcx.hir().local_def_id(expr.hir_id);
484 if let Some(upvars) = ir.tcx.upvars(closure_def_id) {
485 let parent_upvars = ir.tcx.upvars(ir.body_owner);
486 call_caps.extend(upvars.iter().filter_map(|(&var_id, upvar)| {
488 parent_upvars.map_or(false, |upvars| upvars.contains_key(&var_id));
490 let upvar_ln = ir.add_live_node(UpvarNode(upvar.span));
491 Some(CaptureInfo { ln: upvar_ln, var_hid: var_id })
497 ir.set_captures(expr.hir_id, call_caps);
498 let old_body_owner = ir.body_owner;
499 ir.body_owner = closure_def_id;
500 intravisit::walk_expr(ir, expr);
501 ir.body_owner = old_body_owner;
504 // live nodes required for interesting control flow:
505 hir::ExprKind::Match(..) | hir::ExprKind::Loop(..) => {
506 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
507 intravisit::walk_expr(ir, expr);
509 hir::ExprKind::Binary(op, ..) if op.node.is_lazy() => {
510 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
511 intravisit::walk_expr(ir, expr);
514 // otherwise, live nodes are not required:
515 hir::ExprKind::Index(..)
516 | hir::ExprKind::Field(..)
517 | hir::ExprKind::Array(..)
518 | hir::ExprKind::Call(..)
519 | hir::ExprKind::MethodCall(..)
520 | hir::ExprKind::Tup(..)
521 | hir::ExprKind::Binary(..)
522 | hir::ExprKind::AddrOf(..)
523 | hir::ExprKind::Cast(..)
524 | hir::ExprKind::DropTemps(..)
525 | hir::ExprKind::Unary(..)
526 | hir::ExprKind::Break(..)
527 | hir::ExprKind::Continue(_)
528 | hir::ExprKind::Lit(_)
529 | hir::ExprKind::Ret(..)
530 | hir::ExprKind::Block(..)
531 | hir::ExprKind::Assign(..)
532 | hir::ExprKind::AssignOp(..)
533 | hir::ExprKind::Struct(..)
534 | hir::ExprKind::Repeat(..)
535 | hir::ExprKind::InlineAsm(..)
536 | hir::ExprKind::Box(..)
537 | hir::ExprKind::Yield(..)
538 | hir::ExprKind::Type(..)
540 | hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => {
541 intravisit::walk_expr(ir, expr);
546 // ______________________________________________________________________
547 // Computing liveness sets
549 // Actually we compute just a bit more than just liveness, but we use
550 // the same basic propagation framework in all cases.
552 #[derive(Clone, Copy)]
559 /// Conceptually, this is like a `Vec<RWU>`. But the number of `RWU`s can get
560 /// very large, so it uses a more compact representation that takes advantage
561 /// of the fact that when the number of `RWU`s is large, most of them have an
562 /// invalid reader and an invalid writer.
564 /// Each entry in `packed_rwus` is either INV_INV_FALSE, INV_INV_TRUE, or
565 /// an index into `unpacked_rwus`. In the common cases, this compacts the
566 /// 65 bits of data into 32; in the uncommon cases, it expands the 65 bits
569 /// More compact representations are possible -- e.g., use only 2 bits per
570 /// packed `RWU` and make the secondary table a HashMap that maps from
571 /// indices to `RWU`s -- but this one strikes a good balance between size
573 packed_rwus: Vec<u32>,
574 unpacked_rwus: Vec<RWU>,
577 // A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: false }`.
578 const INV_INV_FALSE: u32 = u32::MAX;
580 // A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: true }`.
581 const INV_INV_TRUE: u32 = u32::MAX - 1;
584 fn new(num_rwus: usize) -> RWUTable {
585 Self { packed_rwus: vec![INV_INV_FALSE; num_rwus], unpacked_rwus: vec![] }
588 fn get(&self, idx: usize) -> RWU {
589 let packed_rwu = self.packed_rwus[idx];
591 INV_INV_FALSE => RWU { reader: invalid_node(), writer: invalid_node(), used: false },
592 INV_INV_TRUE => RWU { reader: invalid_node(), writer: invalid_node(), used: true },
593 _ => self.unpacked_rwus[packed_rwu as usize],
597 fn get_reader(&self, idx: usize) -> LiveNode {
598 let packed_rwu = self.packed_rwus[idx];
600 INV_INV_FALSE | INV_INV_TRUE => invalid_node(),
601 _ => self.unpacked_rwus[packed_rwu as usize].reader,
605 fn get_writer(&self, idx: usize) -> LiveNode {
606 let packed_rwu = self.packed_rwus[idx];
608 INV_INV_FALSE | INV_INV_TRUE => invalid_node(),
609 _ => self.unpacked_rwus[packed_rwu as usize].writer,
613 fn get_used(&self, idx: usize) -> bool {
614 let packed_rwu = self.packed_rwus[idx];
616 INV_INV_FALSE => false,
617 INV_INV_TRUE => true,
618 _ => self.unpacked_rwus[packed_rwu as usize].used,
623 fn copy_packed(&mut self, dst_idx: usize, src_idx: usize) {
624 self.packed_rwus[dst_idx] = self.packed_rwus[src_idx];
627 fn assign_unpacked(&mut self, idx: usize, rwu: RWU) {
628 if rwu.reader == invalid_node() && rwu.writer == invalid_node() {
629 // When we overwrite an indexing entry in `self.packed_rwus` with
630 // `INV_INV_{TRUE,FALSE}` we don't remove the corresponding entry
631 // from `self.unpacked_rwus`; it's not worth the effort, and we
632 // can't have entries shifting around anyway.
633 self.packed_rwus[idx] = if rwu.used { INV_INV_TRUE } else { INV_INV_FALSE }
635 // Add a new RWU to `unpacked_rwus` and make `packed_rwus[idx]`
637 self.packed_rwus[idx] = self.unpacked_rwus.len() as u32;
638 self.unpacked_rwus.push(rwu);
642 fn assign_inv_inv(&mut self, idx: usize) {
643 self.packed_rwus[idx] = if self.get_used(idx) { INV_INV_TRUE } else { INV_INV_FALSE };
647 #[derive(Copy, Clone)]
650 fallthrough_ln: LiveNode,
651 clean_exit_var: Variable,
654 const ACC_READ: u32 = 1;
655 const ACC_WRITE: u32 = 2;
656 const ACC_USE: u32 = 4;
658 struct Liveness<'a, 'tcx> {
659 ir: &'a mut IrMaps<'tcx>,
660 tables: &'a ty::TypeckTables<'tcx>,
662 successors: Vec<LiveNode>,
665 // mappings from loop node ID to LiveNode
666 // ("break" label should map to loop node ID,
667 // it probably doesn't now)
668 break_ln: HirIdMap<LiveNode>,
669 cont_ln: HirIdMap<LiveNode>,
672 impl<'a, 'tcx> Liveness<'a, 'tcx> {
673 fn new(ir: &'a mut IrMaps<'tcx>, body: hir::BodyId) -> Liveness<'a, 'tcx> {
674 // Special nodes and variables:
675 // - exit_ln represents the end of the fn, either by return or panic
676 // - implicit_ret_var is a pseudo-variable that represents
677 // an implicit return
678 let specials = Specials {
679 exit_ln: ir.add_live_node(ExitNode),
680 fallthrough_ln: ir.add_live_node(ExitNode),
681 clean_exit_var: ir.add_variable(CleanExit),
684 let tables = ir.tcx.body_tables(body);
686 let num_live_nodes = ir.num_live_nodes;
687 let num_vars = ir.num_vars;
693 successors: vec![invalid_node(); num_live_nodes],
694 rwu_table: RWUTable::new(num_live_nodes * num_vars),
695 break_ln: Default::default(),
696 cont_ln: Default::default(),
700 fn live_node(&self, hir_id: HirId, span: Span) -> LiveNode {
701 match self.ir.live_node_map.get(&hir_id) {
704 // This must be a mismatch between the ir_map construction
705 // above and the propagation code below; the two sets of
706 // code have to agree about which AST nodes are worth
707 // creating liveness nodes for.
708 span_bug!(span, "no live node registered for node {:?}", hir_id);
713 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
714 self.ir.variable(hir_id, span)
717 fn define_bindings_in_pat(&mut self, pat: &hir::Pat<'_>, mut succ: LiveNode) -> LiveNode {
718 // In an or-pattern, only consider the first pattern; any later patterns
719 // must have the same bindings, and we also consider the first pattern
720 // to be the "authoritative" set of ids.
721 pat.each_binding_or_first(&mut |_, hir_id, pat_sp, ident| {
722 let ln = self.live_node(hir_id, pat_sp);
723 let var = self.variable(hir_id, ident.span);
724 self.init_from_succ(ln, succ);
725 self.define(ln, var);
731 fn idx(&self, ln: LiveNode, var: Variable) -> usize {
732 ln.get() * self.ir.num_vars + var.get()
735 fn live_on_entry(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
736 assert!(ln.is_valid());
737 let reader = self.rwu_table.get_reader(self.idx(ln, var));
738 if reader.is_valid() { Some(self.ir.lnk(reader)) } else { None }
741 // Is this variable live on entry to any of its successor nodes?
742 fn live_on_exit(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
743 let successor = self.successors[ln.get()];
744 self.live_on_entry(successor, var)
747 fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
748 assert!(ln.is_valid());
749 self.rwu_table.get_used(self.idx(ln, var))
752 fn assigned_on_entry(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
753 assert!(ln.is_valid());
754 let writer = self.rwu_table.get_writer(self.idx(ln, var));
755 if writer.is_valid() { Some(self.ir.lnk(writer)) } else { None }
758 fn assigned_on_exit(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
759 let successor = self.successors[ln.get()];
760 self.assigned_on_entry(successor, var)
763 fn indices2<F>(&mut self, ln: LiveNode, succ_ln: LiveNode, mut op: F)
765 F: FnMut(&mut Liveness<'a, 'tcx>, usize, usize),
767 let node_base_idx = self.idx(ln, Variable(0));
768 let succ_base_idx = self.idx(succ_ln, Variable(0));
769 for var_idx in 0..self.ir.num_vars {
770 op(self, node_base_idx + var_idx, succ_base_idx + var_idx);
774 fn write_vars<F>(&self, wr: &mut dyn Write, ln: LiveNode, mut test: F) -> io::Result<()>
776 F: FnMut(usize) -> LiveNode,
778 let node_base_idx = self.idx(ln, Variable(0));
779 for var_idx in 0..self.ir.num_vars {
780 let idx = node_base_idx + var_idx;
781 if test(idx).is_valid() {
782 write!(wr, " {:?}", Variable(var_idx as u32))?;
788 #[allow(unused_must_use)]
789 fn ln_str(&self, ln: LiveNode) -> String {
790 let mut wr = Vec::new();
792 let wr = &mut wr as &mut dyn Write;
793 write!(wr, "[ln({:?}) of kind {:?} reads", ln.get(), self.ir.lnk(ln));
794 self.write_vars(wr, ln, |idx| self.rwu_table.get_reader(idx));
795 write!(wr, " writes");
796 self.write_vars(wr, ln, |idx| self.rwu_table.get_writer(idx));
797 write!(wr, " precedes {:?}]", self.successors[ln.get()]);
799 String::from_utf8(wr).unwrap()
802 fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) {
803 self.successors[ln.get()] = succ_ln;
805 // It is not necessary to initialize the RWUs here because they are all
806 // set to INV_INV_FALSE when they are created, and the sets only grow
807 // during iterations.
810 fn init_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode) {
811 // more efficient version of init_empty() / merge_from_succ()
812 self.successors[ln.get()] = succ_ln;
814 self.indices2(ln, succ_ln, |this, idx, succ_idx| {
815 this.rwu_table.copy_packed(idx, succ_idx);
817 debug!("init_from_succ(ln={}, succ={})", self.ln_str(ln), self.ln_str(succ_ln));
820 fn merge_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode, first_merge: bool) -> bool {
825 let mut changed = false;
826 self.indices2(ln, succ_ln, |this, idx, succ_idx| {
827 let mut rwu = this.rwu_table.get(idx);
828 let succ_rwu = this.rwu_table.get(succ_idx);
829 if succ_rwu.reader.is_valid() && !rwu.reader.is_valid() {
830 rwu.reader = succ_rwu.reader;
834 if succ_rwu.writer.is_valid() && !rwu.writer.is_valid() {
835 rwu.writer = succ_rwu.writer;
839 if succ_rwu.used && !rwu.used {
845 this.rwu_table.assign_unpacked(idx, rwu);
850 "merge_from_succ(ln={:?}, succ={}, first_merge={}, changed={})",
852 self.ln_str(succ_ln),
859 // Indicates that a local variable was *defined*; we know that no
860 // uses of the variable can precede the definition (resolve checks
861 // this) so we just clear out all the data.
862 fn define(&mut self, writer: LiveNode, var: Variable) {
863 let idx = self.idx(writer, var);
864 self.rwu_table.assign_inv_inv(idx);
866 debug!("{:?} defines {:?} (idx={}): {}", writer, var, idx, self.ln_str(writer));
869 // Either read, write, or both depending on the acc bitset
870 fn acc(&mut self, ln: LiveNode, var: Variable, acc: u32) {
871 debug!("{:?} accesses[{:x}] {:?}: {}", ln, acc, var, self.ln_str(ln));
873 let idx = self.idx(ln, var);
874 let mut rwu = self.rwu_table.get(idx);
876 if (acc & ACC_WRITE) != 0 {
877 rwu.reader = invalid_node();
881 // Important: if we both read/write, must do read second
882 // or else the write will override.
883 if (acc & ACC_READ) != 0 {
887 if (acc & ACC_USE) != 0 {
891 self.rwu_table.assign_unpacked(idx, rwu);
894 fn compute(&mut self, body: &hir::Expr<'_>) -> LiveNode {
896 "compute: using id for body, {}",
897 self.ir.tcx.hir().hir_to_pretty_string(body.hir_id)
900 // the fallthrough exit is only for those cases where we do not
901 // explicitly return:
903 self.init_from_succ(s.fallthrough_ln, s.exit_ln);
904 self.acc(s.fallthrough_ln, s.clean_exit_var, ACC_READ);
906 let entry_ln = self.propagate_through_expr(body, s.fallthrough_ln);
908 // hack to skip the loop unless debug! is enabled:
910 "^^ liveness computation results for body {} (entry={:?})",
912 for ln_idx in 0..self.ir.num_live_nodes {
913 debug!("{:?}", self.ln_str(LiveNode(ln_idx as u32)));
923 fn propagate_through_block(&mut self, blk: &hir::Block<'_>, succ: LiveNode) -> LiveNode {
924 if blk.targeted_by_break {
925 self.break_ln.insert(blk.hir_id, succ);
927 let succ = self.propagate_through_opt_expr(blk.expr.as_ref().map(|e| &**e), succ);
928 blk.stmts.iter().rev().fold(succ, |succ, stmt| self.propagate_through_stmt(stmt, succ))
931 fn propagate_through_stmt(&mut self, stmt: &hir::Stmt<'_>, succ: LiveNode) -> LiveNode {
933 hir::StmtKind::Local(ref local) => {
934 // Note: we mark the variable as defined regardless of whether
935 // there is an initializer. Initially I had thought to only mark
936 // the live variable as defined if it was initialized, and then we
937 // could check for uninit variables just by scanning what is live
938 // at the start of the function. But that doesn't work so well for
939 // immutable variables defined in a loop:
940 // loop { let x; x = 5; }
941 // because the "assignment" loops back around and generates an error.
943 // So now we just check that variables defined w/o an
944 // initializer are not live at the point of their
945 // initialization, which is mildly more complex than checking
946 // once at the func header but otherwise equivalent.
948 let succ = self.propagate_through_opt_expr(local.init.as_ref().map(|e| &**e), succ);
949 self.define_bindings_in_pat(&local.pat, succ)
951 hir::StmtKind::Item(..) => succ,
952 hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => {
953 self.propagate_through_expr(&expr, succ)
958 fn propagate_through_exprs(&mut self, exprs: &[Expr<'_>], succ: LiveNode) -> LiveNode {
959 exprs.iter().rev().fold(succ, |succ, expr| self.propagate_through_expr(&expr, succ))
962 fn propagate_through_opt_expr(
964 opt_expr: Option<&Expr<'_>>,
967 opt_expr.map_or(succ, |expr| self.propagate_through_expr(expr, succ))
970 fn propagate_through_expr(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
971 debug!("propagate_through_expr: {}", self.ir.tcx.hir().hir_to_pretty_string(expr.hir_id));
974 // Interesting cases with control flow or which gen/kill
975 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
976 self.access_path(expr.hir_id, path, succ, ACC_READ | ACC_USE)
979 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
981 hir::ExprKind::Closure(..) => {
983 "{} is an ExprKind::Closure",
984 self.ir.tcx.hir().hir_to_pretty_string(expr.hir_id)
987 // the construction of a closure itself is not important,
988 // but we have to consider the closed over variables.
994 .unwrap_or_else(|| span_bug!(expr.span, "no registered caps"));
996 caps.iter().rev().fold(succ, |succ, cap| {
997 self.init_from_succ(cap.ln, succ);
998 let var = self.variable(cap.var_hid, expr.span);
999 self.acc(cap.ln, var, ACC_READ | ACC_USE);
1004 // Note that labels have been resolved, so we don't need to look
1005 // at the label ident
1006 hir::ExprKind::Loop(ref blk, _, _) => self.propagate_through_loop(expr, &blk, succ),
1008 hir::ExprKind::Match(ref e, arms, _) => {
1023 let ln = self.live_node(expr.hir_id, expr.span);
1024 self.init_empty(ln, succ);
1025 let mut first_merge = true;
1027 let body_succ = self.propagate_through_expr(&arm.body, succ);
1029 let guard_succ = self.propagate_through_opt_expr(
1030 arm.guard.as_ref().map(|hir::Guard::If(e)| *e),
1033 let arm_succ = self.define_bindings_in_pat(&arm.pat, guard_succ);
1034 self.merge_from_succ(ln, arm_succ, first_merge);
1035 first_merge = false;
1037 self.propagate_through_expr(&e, ln)
1040 hir::ExprKind::Ret(ref o_e) => {
1041 // ignore succ and subst exit_ln:
1042 let exit_ln = self.s.exit_ln;
1043 self.propagate_through_opt_expr(o_e.as_ref().map(|e| &**e), exit_ln)
1046 hir::ExprKind::Break(label, ref opt_expr) => {
1047 // Find which label this break jumps to
1048 let target = match label.target_id {
1049 Ok(hir_id) => self.break_ln.get(&hir_id),
1050 Err(err) => span_bug!(expr.span, "loop scope error: {}", err),
1054 // Now that we know the label we're going to,
1055 // look it up in the break loop nodes table
1058 Some(b) => self.propagate_through_opt_expr(opt_expr.as_ref().map(|e| &**e), b),
1060 // FIXME: This should have been checked earlier. Once this is fixed,
1061 // replace with `delay_span_bug`. (#62480)
1065 .struct_span_err(expr.span, "`break` to unknown label")
1067 errors::FatalError.raise()
1072 hir::ExprKind::Continue(label) => {
1073 // Find which label this expr continues to
1076 .unwrap_or_else(|err| span_bug!(expr.span, "loop scope error: {}", err));
1078 // Now that we know the label we're going to,
1079 // look it up in the continue loop nodes table
1083 .unwrap_or_else(|| span_bug!(expr.span, "continue to unknown label"))
1086 hir::ExprKind::Assign(ref l, ref r, _) => {
1087 // see comment on places in
1088 // propagate_through_place_components()
1089 let succ = self.write_place(&l, succ, ACC_WRITE);
1090 let succ = self.propagate_through_place_components(&l, succ);
1091 self.propagate_through_expr(&r, succ)
1094 hir::ExprKind::AssignOp(_, ref l, ref r) => {
1095 // an overloaded assign op is like a method call
1096 if self.tables.is_method_call(expr) {
1097 let succ = self.propagate_through_expr(&l, succ);
1098 self.propagate_through_expr(&r, succ)
1100 // see comment on places in
1101 // propagate_through_place_components()
1102 let succ = self.write_place(&l, succ, ACC_WRITE | ACC_READ);
1103 let succ = self.propagate_through_expr(&r, succ);
1104 self.propagate_through_place_components(&l, succ)
1108 // Uninteresting cases: just propagate in rev exec order
1109 hir::ExprKind::Array(ref exprs) => self.propagate_through_exprs(exprs, succ),
1111 hir::ExprKind::Struct(_, ref fields, ref with_expr) => {
1112 let succ = self.propagate_through_opt_expr(with_expr.as_ref().map(|e| &**e), succ);
1116 .fold(succ, |succ, field| self.propagate_through_expr(&field.expr, succ))
1119 hir::ExprKind::Call(ref f, ref args) => {
1120 let m = self.ir.tcx.hir().get_module_parent(expr.hir_id);
1121 let succ = if self.ir.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) {
1126 let succ = self.propagate_through_exprs(args, succ);
1127 self.propagate_through_expr(&f, succ)
1130 hir::ExprKind::MethodCall(.., ref args) => {
1131 let m = self.ir.tcx.hir().get_module_parent(expr.hir_id);
1132 let succ = if self.ir.tcx.is_ty_uninhabited_from(m, self.tables.expr_ty(expr)) {
1138 self.propagate_through_exprs(args, succ)
1141 hir::ExprKind::Tup(ref exprs) => self.propagate_through_exprs(exprs, succ),
1143 hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => {
1144 let r_succ = self.propagate_through_expr(&r, succ);
1146 let ln = self.live_node(expr.hir_id, expr.span);
1147 self.init_from_succ(ln, succ);
1148 self.merge_from_succ(ln, r_succ, false);
1150 self.propagate_through_expr(&l, ln)
1153 hir::ExprKind::Index(ref l, ref r) | hir::ExprKind::Binary(_, ref l, ref r) => {
1154 let r_succ = self.propagate_through_expr(&r, succ);
1155 self.propagate_through_expr(&l, r_succ)
1158 hir::ExprKind::Box(ref e)
1159 | hir::ExprKind::AddrOf(_, _, ref e)
1160 | hir::ExprKind::Cast(ref e, _)
1161 | hir::ExprKind::Type(ref e, _)
1162 | hir::ExprKind::DropTemps(ref e)
1163 | hir::ExprKind::Unary(_, ref e)
1164 | hir::ExprKind::Yield(ref e, _)
1165 | hir::ExprKind::Repeat(ref e, _) => self.propagate_through_expr(&e, succ),
1167 hir::ExprKind::InlineAsm(ref asm) => {
1168 let ia = &asm.inner;
1169 let outputs = asm.outputs_exprs;
1170 let inputs = asm.inputs_exprs;
1171 let succ = ia.outputs.iter().zip(outputs).rev().fold(succ, |succ, (o, output)| {
1172 // see comment on places
1173 // in propagate_through_place_components()
1175 self.propagate_through_expr(output, succ)
1177 let acc = if o.is_rw { ACC_WRITE | ACC_READ } else { ACC_WRITE };
1178 let succ = self.write_place(output, succ, acc);
1179 self.propagate_through_place_components(output, succ)
1183 // Inputs are executed first. Propagate last because of rev order
1184 self.propagate_through_exprs(inputs, succ)
1187 hir::ExprKind::Lit(..)
1188 | hir::ExprKind::Err
1189 | hir::ExprKind::Path(hir::QPath::TypeRelative(..)) => succ,
1191 // Note that labels have been resolved, so we don't need to look
1192 // at the label ident
1193 hir::ExprKind::Block(ref blk, _) => self.propagate_through_block(&blk, succ),
1197 fn propagate_through_place_components(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
1200 // In general, the full flow graph structure for an
1201 // assignment/move/etc can be handled in one of two ways,
1202 // depending on whether what is being assigned is a "tracked
1203 // value" or not. A tracked value is basically a local
1204 // variable or argument.
1206 // The two kinds of graphs are:
1208 // Tracked place Untracked place
1209 // ----------------------++-----------------------
1213 // (rvalue) || (rvalue)
1216 // (write of place) || (place components)
1221 // ----------------------++-----------------------
1223 // I will cover the two cases in turn:
1227 // A tracked place is a local variable/argument `x`. In
1228 // these cases, the link_node where the write occurs is linked
1229 // to node id of `x`. The `write_place()` routine generates
1230 // the contents of this node. There are no subcomponents to
1233 // # Non-tracked places
1235 // These are places like `x[5]` or `x.f`. In that case, we
1236 // basically ignore the value which is written to but generate
1237 // reads for the components---`x` in these two examples. The
1238 // components reads are generated by
1239 // `propagate_through_place_components()` (this fn).
1243 // It is still possible to observe assignments to non-places;
1244 // these errors are detected in the later pass borrowck. We
1245 // just ignore such cases and treat them as reads.
1248 hir::ExprKind::Path(_) => succ,
1249 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
1250 _ => self.propagate_through_expr(expr, succ),
1254 // see comment on propagate_through_place()
1255 fn write_place(&mut self, expr: &Expr<'_>, succ: LiveNode, acc: u32) -> LiveNode {
1257 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1258 self.access_path(expr.hir_id, path, succ, acc)
1261 // We do not track other places, so just propagate through
1262 // to their subcomponents. Also, it may happen that
1263 // non-places occur here, because those are detected in the
1264 // later pass borrowck.
1277 let ln = self.live_node(hir_id, span);
1279 self.init_from_succ(ln, succ);
1280 let var = self.variable(var_hid, span);
1281 self.acc(ln, var, acc);
1289 path: &hir::Path<'_>,
1294 Res::Local(hid) => {
1295 let upvars = self.ir.tcx.upvars(self.ir.body_owner);
1296 if !upvars.map_or(false, |upvars| upvars.contains_key(&hid)) {
1297 self.access_var(hir_id, hid, succ, acc, path.span)
1306 fn propagate_through_loop(
1309 body: &hir::Block<'_>,
1313 We model control flow like this:
1320 Note that a `continue` expression targeting the `loop` will have a successor of `expr`.
1321 Meanwhile, a `break` expression will have a successor of `succ`.
1325 let mut first_merge = true;
1326 let ln = self.live_node(expr.hir_id, expr.span);
1327 self.init_empty(ln, succ);
1329 "propagate_through_loop: using id for loop body {} {}",
1331 self.ir.tcx.hir().hir_to_pretty_string(body.hir_id)
1334 self.break_ln.insert(expr.hir_id, succ);
1336 self.cont_ln.insert(expr.hir_id, ln);
1338 let body_ln = self.propagate_through_block(body, ln);
1340 // repeat until fixed point is reached:
1341 while self.merge_from_succ(ln, body_ln, first_merge) {
1342 first_merge = false;
1343 assert_eq!(body_ln, self.propagate_through_block(body, ln));
1350 // _______________________________________________________________________
1351 // Checking for error conditions
1353 impl<'a, 'tcx> Visitor<'tcx> for Liveness<'a, 'tcx> {
1354 type Map = Map<'tcx>;
1356 fn nested_visit_map(&mut self) -> NestedVisitorMap<'_, Self::Map> {
1357 NestedVisitorMap::None
1360 fn visit_local(&mut self, local: &'tcx hir::Local<'tcx>) {
1361 self.check_unused_vars_in_pat(&local.pat, None, |spans, hir_id, ln, var| {
1362 if local.init.is_some() {
1363 self.warn_about_dead_assign(spans, hir_id, ln, var);
1367 intravisit::walk_local(self, local);
1370 fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
1371 check_expr(self, ex);
1374 fn visit_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) {
1375 self.check_unused_vars_in_pat(&arm.pat, None, |_, _, _, _| {});
1376 intravisit::walk_arm(self, arm);
1380 fn check_expr<'tcx>(this: &mut Liveness<'_, 'tcx>, expr: &'tcx Expr<'tcx>) {
1382 hir::ExprKind::Assign(ref l, ..) => {
1383 this.check_place(&l);
1386 hir::ExprKind::AssignOp(_, ref l, _) => {
1387 if !this.tables.is_method_call(expr) {
1388 this.check_place(&l);
1392 hir::ExprKind::InlineAsm(ref asm) => {
1393 for input in asm.inputs_exprs {
1394 this.visit_expr(input);
1397 // Output operands must be places
1398 for (o, output) in asm.inner.outputs.iter().zip(asm.outputs_exprs) {
1400 this.check_place(output);
1402 this.visit_expr(output);
1406 // no correctness conditions related to liveness
1407 hir::ExprKind::Call(..)
1408 | hir::ExprKind::MethodCall(..)
1409 | hir::ExprKind::Match(..)
1410 | hir::ExprKind::Loop(..)
1411 | hir::ExprKind::Index(..)
1412 | hir::ExprKind::Field(..)
1413 | hir::ExprKind::Array(..)
1414 | hir::ExprKind::Tup(..)
1415 | hir::ExprKind::Binary(..)
1416 | hir::ExprKind::Cast(..)
1417 | hir::ExprKind::DropTemps(..)
1418 | hir::ExprKind::Unary(..)
1419 | hir::ExprKind::Ret(..)
1420 | hir::ExprKind::Break(..)
1421 | hir::ExprKind::Continue(..)
1422 | hir::ExprKind::Lit(_)
1423 | hir::ExprKind::Block(..)
1424 | hir::ExprKind::AddrOf(..)
1425 | hir::ExprKind::Struct(..)
1426 | hir::ExprKind::Repeat(..)
1427 | hir::ExprKind::Closure(..)
1428 | hir::ExprKind::Path(_)
1429 | hir::ExprKind::Yield(..)
1430 | hir::ExprKind::Box(..)
1431 | hir::ExprKind::Type(..)
1432 | hir::ExprKind::Err => {}
1435 intravisit::walk_expr(this, expr);
1438 impl<'tcx> Liveness<'_, 'tcx> {
1439 fn check_place(&mut self, expr: &'tcx Expr<'tcx>) {
1441 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1442 if let Res::Local(var_hid) = path.res {
1443 let upvars = self.ir.tcx.upvars(self.ir.body_owner);
1444 if !upvars.map_or(false, |upvars| upvars.contains_key(&var_hid)) {
1445 // Assignment to an immutable variable or argument: only legal
1446 // if there is no later assignment. If this local is actually
1447 // mutable, then check for a reassignment to flag the mutability
1449 let ln = self.live_node(expr.hir_id, expr.span);
1450 let var = self.variable(var_hid, expr.span);
1451 self.warn_about_dead_assign(vec![expr.span], expr.hir_id, ln, var);
1456 // For other kinds of places, no checks are required,
1457 // and any embedded expressions are actually rvalues
1458 intravisit::walk_expr(self, expr);
1463 fn should_warn(&self, var: Variable) -> Option<String> {
1464 let name = self.ir.variable_name(var);
1465 if name.is_empty() || name.as_bytes()[0] == b'_' { None } else { Some(name) }
1468 fn warn_about_unused_args(&self, body: &hir::Body<'_>, entry_ln: LiveNode) {
1469 for p in body.params {
1470 self.check_unused_vars_in_pat(&p.pat, Some(entry_ln), |spans, hir_id, ln, var| {
1471 if self.live_on_entry(ln, var).is_none() {
1472 self.report_dead_assign(hir_id, spans, var, true);
1478 fn check_unused_vars_in_pat(
1481 entry_ln: Option<LiveNode>,
1482 on_used_on_entry: impl Fn(Vec<Span>, HirId, LiveNode, Variable),
1484 // In an or-pattern, only consider the variable; any later patterns must have the same
1485 // bindings, and we also consider the first pattern to be the "authoritative" set of ids.
1486 // However, we should take the spans of variables with the same name from the later
1487 // patterns so the suggestions to prefix with underscores will apply to those too.
1488 let mut vars: FxIndexMap<String, (LiveNode, Variable, HirId, Vec<Span>)> = <_>::default();
1490 pat.each_binding(|_, hir_id, pat_sp, ident| {
1491 let ln = entry_ln.unwrap_or_else(|| self.live_node(hir_id, pat_sp));
1492 let var = self.variable(hir_id, ident.span);
1493 vars.entry(self.ir.variable_name(var))
1494 .and_modify(|(.., spans)| spans.push(ident.span))
1495 .or_insert_with(|| (ln, var, hir_id, vec![ident.span]));
1498 for (_, (ln, var, id, spans)) in vars {
1499 if self.used_on_entry(ln, var) {
1500 on_used_on_entry(spans, id, ln, var);
1502 self.report_unused(spans, id, ln, var);
1507 fn report_unused(&self, spans: Vec<Span>, hir_id: HirId, ln: LiveNode, var: Variable) {
1508 if let Some(name) = self.should_warn(var).filter(|name| name != "self") {
1509 // annoying: for parameters in funcs like `fn(x: i32)
1510 // {ret}`, there is only one node, so asking about
1511 // assigned_on_exit() is not meaningful.
1513 if ln == self.s.exit_ln { false } else { self.assigned_on_exit(ln, var).is_some() };
1516 self.ir.tcx.lint_hir_note(
1517 lint::builtin::UNUSED_VARIABLES,
1520 &format!("variable `{}` is assigned to, but never used", name),
1521 &format!("consider using `_{}` instead", name),
1524 let mut err = self.ir.tcx.struct_span_lint_hir(
1525 lint::builtin::UNUSED_VARIABLES,
1528 &format!("unused variable: `{}`", name),
1531 if self.ir.variable_is_shorthand(var) {
1532 if let Node::Binding(pat) = self.ir.tcx.hir().get(hir_id) {
1533 // Handle `ref` and `ref mut`.
1535 spans.iter().map(|_span| (pat.span, format!("{}: _", name))).collect();
1537 err.multipart_suggestion(
1538 "try ignoring the field",
1540 Applicability::MachineApplicable,
1544 err.multipart_suggestion(
1545 "consider prefixing with an underscore",
1546 spans.iter().map(|span| (*span, format!("_{}", name))).collect(),
1547 Applicability::MachineApplicable,
1556 fn warn_about_dead_assign(&self, spans: Vec<Span>, hir_id: HirId, ln: LiveNode, var: Variable) {
1557 if self.live_on_exit(ln, var).is_none() {
1558 self.report_dead_assign(hir_id, spans, var, false);
1562 fn report_dead_assign(&self, hir_id: HirId, spans: Vec<Span>, var: Variable, is_param: bool) {
1563 if let Some(name) = self.should_warn(var) {
1567 .struct_span_lint_hir(
1568 lint::builtin::UNUSED_ASSIGNMENTS,
1571 &format!("value passed to `{}` is never read", name),
1573 .help("maybe it is overwritten before being read?")
1578 .struct_span_lint_hir(
1579 lint::builtin::UNUSED_ASSIGNMENTS,
1582 &format!("value assigned to `{}` is never read", name),
1584 .help("maybe it is overwritten before being read?")