1 //! A classic liveness analysis based on dataflow over the AST. Computes,
2 //! for each local variable in a function, whether that variable is live
3 //! at a given point. Program execution points are identified by their
8 //! The basic model is that each local variable is assigned an index. We
9 //! represent sets of local variables using a vector indexed by this
10 //! index. The value in the vector is either 0, indicating the variable
11 //! is dead, or the ID of an expression that uses the variable.
13 //! We conceptually walk over the AST in reverse execution order. If we
14 //! find a use of a variable, we add it to the set of live variables. If
15 //! we find an assignment to a variable, we remove it from the set of live
16 //! variables. When we have to merge two flows, we take the union of
17 //! those two flows -- if the variable is live on both paths, we simply
18 //! pick one ID. In the event of loops, we continue doing this until a
19 //! fixed point is reached.
21 //! ## Checking initialization
23 //! At the function entry point, all variables must be dead. If this is
24 //! not the case, we can report an error using the ID found in the set of
25 //! live variables, which identifies a use of the variable which is not
26 //! dominated by an assignment.
30 //! After each explicit move, the variable must be dead.
32 //! ## Computing last uses
34 //! Any use of the variable where the variable is dead afterwards is a
37 //! # Implementation details
39 //! The actual implementation contains two (nested) walks over the AST.
40 //! The outer walk has the job of building up the ir_maps instance for the
41 //! enclosing function. On the way down the tree, it identifies those AST
42 //! nodes and variable IDs that will be needed for the liveness analysis
43 //! and assigns them contiguous IDs. The liveness ID for an AST node is
44 //! called a `live_node` (it's a newtype'd `u32`) and the ID for a variable
45 //! is called a `variable` (another newtype'd `u32`).
47 //! On the way back up the tree, as we are about to exit from a function
48 //! declaration we allocate a `liveness` instance. Now that we know
49 //! precisely how many nodes and variables we need, we can allocate all
50 //! the various arrays that we will need to precisely the right size. We then
51 //! perform the actual propagation on the `liveness` instance.
53 //! This propagation is encoded in the various `propagate_through_*()`
54 //! methods. It effectively does a reverse walk of the AST; whenever we
55 //! reach a loop node, we iterate until a fixed point is reached.
57 //! ## The `RWU` struct
59 //! At each live node `N`, we track three pieces of information for each
60 //! variable `V` (these are encapsulated in the `RWU` struct):
62 //! - `reader`: the `LiveNode` ID of some node which will read the value
63 //! that `V` holds on entry to `N`. Formally: a node `M` such
64 //! that there exists a path `P` from `N` to `M` where `P` does not
65 //! write `V`. If the `reader` is `invalid_node()`, then the current
66 //! value will never be read (the variable is dead, essentially).
68 //! - `writer`: the `LiveNode` ID of some node which will write the
69 //! variable `V` and which is reachable from `N`. Formally: a node `M`
70 //! such that there exists a path `P` from `N` to `M` and `M` writes
71 //! `V`. If the `writer` is `invalid_node()`, then there is no writer
72 //! of `V` that follows `N`.
74 //! - `used`: a boolean value indicating whether `V` is *used*. We
75 //! distinguish a *read* from a *use* in that a *use* is some read that
76 //! is not just used to generate a new value. For example, `x += 1` is
77 //! a read but not a use. This is used to generate better warnings.
79 //! ## Special nodes and variables
81 //! We generate various special nodes for various, well, special purposes.
82 //! These are described in the `Liveness` struct.
84 use self::LiveNodeKind::*;
87 use rustc_ast::InlineAsmOptions;
88 use rustc_data_structures::fx::FxIndexMap;
89 use rustc_errors::Applicability;
91 use rustc_hir::def::*;
92 use rustc_hir::def_id::LocalDefId;
93 use rustc_hir::intravisit::{self, FnKind, NestedVisitorMap, Visitor};
94 use rustc_hir::{Expr, HirId, HirIdMap, HirIdSet, Node};
95 use rustc_middle::hir::map::Map;
96 use rustc_middle::ty::query::Providers;
97 use rustc_middle::ty::{self, TyCtxt};
98 use rustc_session::lint;
99 use rustc_span::symbol::{kw, sym, Symbol};
100 use rustc_span::Span;
102 use std::collections::VecDeque;
105 use std::io::prelude::*;
108 #[derive(Copy, Clone, PartialEq)]
109 struct Variable(u32);
111 #[derive(Copy, Clone, PartialEq)]
112 struct LiveNode(u32);
115 fn get(&self) -> usize {
121 fn get(&self) -> usize {
126 #[derive(Copy, Clone, PartialEq, Debug)]
135 fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt<'_>) -> String {
136 let sm = tcx.sess.source_map();
138 UpvarNode(s) => format!("Upvar node [{}]", sm.span_to_string(s)),
139 ExprNode(s) => format!("Expr node [{}]", sm.span_to_string(s)),
140 VarDefNode(s) => format!("Var def node [{}]", sm.span_to_string(s)),
141 ClosureNode => "Closure node".to_owned(),
142 ExitNode => "Exit node".to_owned(),
146 impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
147 type Map = Map<'tcx>;
149 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
150 NestedVisitorMap::OnlyBodies(self.tcx.hir())
156 fd: &'tcx hir::FnDecl<'tcx>,
161 visit_fn(self, fk, fd, b, s, id);
164 fn visit_local(&mut self, l: &'tcx hir::Local<'tcx>) {
165 visit_local(self, l);
167 fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
168 visit_expr(self, ex);
170 fn visit_arm(&mut self, a: &'tcx hir::Arm<'tcx>) {
175 fn check_mod_liveness(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
176 tcx.hir().visit_item_likes_in_module(
178 &mut IrMaps::new(tcx, module_def_id).as_deep_visitor(),
182 pub fn provide(providers: &mut Providers) {
183 *providers = Providers { check_mod_liveness, ..*providers };
186 impl fmt::Debug for LiveNode {
187 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
188 write!(f, "ln({})", self.get())
192 impl fmt::Debug for Variable {
193 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
194 write!(f, "v({})", self.get())
198 // ______________________________________________________________________
201 // This is the first pass and the one that drives the main
202 // computation. It walks up and down the IR once. On the way down,
203 // we count for each function the number of variables as well as
204 // liveness nodes. A liveness node is basically an expression or
205 // capture clause that does something of interest: either it has
206 // interesting control flow or it uses/defines a local variable.
208 // On the way back up, at each function node we create liveness sets
209 // (we now know precisely how big to make our various vectors and so
210 // forth) and then do the data-flow propagation to compute the set
211 // of live variables at each program point.
213 // Finally, we run back over the IR one last time and, using the
214 // computed liveness, check various safety conditions. For example,
215 // there must be no live nodes at the definition site for a variable
216 // unless it has an initializer. Similarly, each non-mutable local
217 // variable must not be assigned if there is some successor
218 // assignment. And so forth.
221 fn is_valid(&self) -> bool {
226 fn invalid_node() -> LiveNode {
235 #[derive(Copy, Clone, Debug)]
242 #[derive(Copy, Clone, Debug)]
244 Param(HirId, Symbol),
246 Upvar(HirId, Symbol),
249 struct IrMaps<'tcx> {
251 body_owner: LocalDefId,
252 live_node_map: HirIdMap<LiveNode>,
253 variable_map: HirIdMap<Variable>,
254 capture_info_map: HirIdMap<Rc<Vec<CaptureInfo>>>,
255 var_kinds: Vec<VarKind>,
256 lnks: Vec<LiveNodeKind>,
260 fn new(tcx: TyCtxt<'tcx>, body_owner: LocalDefId) -> IrMaps<'tcx> {
264 live_node_map: HirIdMap::default(),
265 variable_map: HirIdMap::default(),
266 capture_info_map: Default::default(),
267 var_kinds: Vec::new(),
272 fn add_live_node(&mut self, lnk: LiveNodeKind) -> LiveNode {
273 let ln = LiveNode(self.lnks.len() as u32);
276 debug!("{:?} is of kind {}", ln, live_node_kind_to_string(lnk, self.tcx));
281 fn add_live_node_for_node(&mut self, hir_id: HirId, lnk: LiveNodeKind) {
282 let ln = self.add_live_node(lnk);
283 self.live_node_map.insert(hir_id, ln);
285 debug!("{:?} is node {:?}", ln, hir_id);
288 fn add_variable(&mut self, vk: VarKind) -> Variable {
289 let v = Variable(self.var_kinds.len() as u32);
290 self.var_kinds.push(vk);
293 Local(LocalInfo { id: node_id, .. }) | Param(node_id, _) | Upvar(node_id, _) => {
294 self.variable_map.insert(node_id, v);
298 debug!("{:?} is {:?}", v, vk);
303 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
304 match self.variable_map.get(&hir_id) {
307 span_bug!(span, "no variable registered for id {:?}", hir_id);
312 fn variable_name(&self, var: Variable) -> Symbol {
313 match self.var_kinds[var.get()] {
314 Local(LocalInfo { name, .. }) | Param(_, name) | Upvar(_, name) => name,
318 fn variable_is_shorthand(&self, var: Variable) -> bool {
319 match self.var_kinds[var.get()] {
320 Local(LocalInfo { is_shorthand, .. }) => is_shorthand,
321 Param(..) | Upvar(..) => false,
325 fn set_captures(&mut self, hir_id: HirId, cs: Vec<CaptureInfo>) {
326 self.capture_info_map.insert(hir_id, Rc::new(cs));
329 fn lnk(&self, ln: LiveNode) -> LiveNodeKind {
335 ir: &mut IrMaps<'tcx>,
337 decl: &'tcx hir::FnDecl<'tcx>,
338 body_id: hir::BodyId,
342 debug!("visit_fn {:?}", id);
344 // swap in a new set of IR maps for this function body:
345 let def_id = ir.tcx.hir().local_def_id(id);
346 let mut fn_maps = IrMaps::new(ir.tcx, def_id);
348 // Don't run unused pass for #[derive()]
349 if let FnKind::Method(..) = fk {
350 let parent = ir.tcx.hir().get_parent_item(id);
351 if let Some(Node::Item(i)) = ir.tcx.hir().find(parent) {
352 if i.attrs.iter().any(|a| ir.tcx.sess.check_name(a, sym::automatically_derived)) {
358 debug!("creating fn_maps: {:p}", &fn_maps);
360 let body = ir.tcx.hir().body(body_id);
362 if let Some(upvars) = ir.tcx.upvars_mentioned(def_id) {
363 for (&var_hir_id, _upvar) in upvars {
364 let var_name = ir.tcx.hir().name(var_hir_id);
365 fn_maps.add_variable(Upvar(var_hir_id, var_name));
369 for param in body.params {
370 let is_shorthand = match param.pat.kind {
371 rustc_hir::PatKind::Struct(..) => true,
374 param.pat.each_binding(|_bm, hir_id, _x, ident| {
375 let var = if is_shorthand {
376 Local(LocalInfo { id: hir_id, name: ident.name, is_shorthand: true })
378 Param(hir_id, ident.name)
380 fn_maps.add_variable(var);
384 // gather up the various local variables, significant expressions,
386 intravisit::walk_fn(&mut fn_maps, fk, decl, body_id, sp, id);
389 let mut lsets = Liveness::new(&mut fn_maps, def_id);
390 let entry_ln = lsets.compute(fk, &body, sp, id);
391 lsets.log_liveness(entry_ln, id);
393 // check for various error conditions
394 lsets.visit_body(body);
395 lsets.warn_about_unused_upvars(entry_ln);
396 lsets.warn_about_unused_args(body, entry_ln);
399 fn add_from_pat(ir: &mut IrMaps<'_>, pat: &hir::Pat<'_>) {
400 // For struct patterns, take note of which fields used shorthand
401 // (`x` rather than `x: x`).
402 let mut shorthand_field_ids = HirIdSet::default();
403 let mut pats = VecDeque::new();
405 while let Some(pat) = pats.pop_front() {
406 use rustc_hir::PatKind::*;
408 Binding(.., inner_pat) => {
409 pats.extend(inner_pat.iter());
411 Struct(_, fields, _) => {
412 let ids = fields.iter().filter(|f| f.is_shorthand).map(|f| f.pat.hir_id);
413 shorthand_field_ids.extend(ids);
415 Ref(inner_pat, _) | Box(inner_pat) => {
416 pats.push_back(inner_pat);
418 TupleStruct(_, inner_pats, _) | Tuple(inner_pats, _) | Or(inner_pats) => {
419 pats.extend(inner_pats.iter());
421 Slice(pre_pats, inner_pat, post_pats) => {
422 pats.extend(pre_pats.iter());
423 pats.extend(inner_pat.iter());
424 pats.extend(post_pats.iter());
430 pat.each_binding(|_, hir_id, _, ident| {
431 ir.add_live_node_for_node(hir_id, VarDefNode(ident.span));
432 ir.add_variable(Local(LocalInfo {
435 is_shorthand: shorthand_field_ids.contains(&hir_id),
440 fn visit_local<'tcx>(ir: &mut IrMaps<'tcx>, local: &'tcx hir::Local<'tcx>) {
441 add_from_pat(ir, &local.pat);
442 intravisit::walk_local(ir, local);
445 fn visit_arm<'tcx>(ir: &mut IrMaps<'tcx>, arm: &'tcx hir::Arm<'tcx>) {
446 add_from_pat(ir, &arm.pat);
447 intravisit::walk_arm(ir, arm);
450 fn visit_expr<'tcx>(ir: &mut IrMaps<'tcx>, expr: &'tcx Expr<'tcx>) {
452 // live nodes required for uses or definitions of variables:
453 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
454 debug!("expr {}: path that leads to {:?}", expr.hir_id, path.res);
455 if let Res::Local(_var_hir_id) = path.res {
456 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
458 intravisit::walk_expr(ir, expr);
460 hir::ExprKind::Closure(..) => {
461 // Interesting control flow (for loops can contain labeled
462 // breaks or continues)
463 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
465 // Make a live_node for each captured variable, with the span
466 // being the location that the variable is used. This results
467 // in better error messages than just pointing at the closure
468 // construction site.
469 let mut call_caps = Vec::new();
470 let closure_def_id = ir.tcx.hir().local_def_id(expr.hir_id);
471 if let Some(upvars) = ir.tcx.upvars_mentioned(closure_def_id) {
472 call_caps.extend(upvars.iter().map(|(&var_id, upvar)| {
473 let upvar_ln = ir.add_live_node(UpvarNode(upvar.span));
474 CaptureInfo { ln: upvar_ln, var_hid: var_id }
477 ir.set_captures(expr.hir_id, call_caps);
478 let old_body_owner = ir.body_owner;
479 ir.body_owner = closure_def_id;
480 intravisit::walk_expr(ir, expr);
481 ir.body_owner = old_body_owner;
484 // live nodes required for interesting control flow:
485 hir::ExprKind::Match(..) | hir::ExprKind::Loop(..) => {
486 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
487 intravisit::walk_expr(ir, expr);
489 hir::ExprKind::Binary(op, ..) if op.node.is_lazy() => {
490 ir.add_live_node_for_node(expr.hir_id, ExprNode(expr.span));
491 intravisit::walk_expr(ir, expr);
494 // otherwise, live nodes are not required:
495 hir::ExprKind::Index(..)
496 | hir::ExprKind::Field(..)
497 | hir::ExprKind::Array(..)
498 | hir::ExprKind::Call(..)
499 | hir::ExprKind::MethodCall(..)
500 | hir::ExprKind::Tup(..)
501 | hir::ExprKind::Binary(..)
502 | hir::ExprKind::AddrOf(..)
503 | hir::ExprKind::Cast(..)
504 | hir::ExprKind::DropTemps(..)
505 | hir::ExprKind::Unary(..)
506 | hir::ExprKind::Break(..)
507 | hir::ExprKind::Continue(_)
508 | hir::ExprKind::Lit(_)
509 | hir::ExprKind::Ret(..)
510 | hir::ExprKind::Block(..)
511 | hir::ExprKind::Assign(..)
512 | hir::ExprKind::AssignOp(..)
513 | hir::ExprKind::Struct(..)
514 | hir::ExprKind::Repeat(..)
515 | hir::ExprKind::InlineAsm(..)
516 | hir::ExprKind::LlvmInlineAsm(..)
517 | hir::ExprKind::Box(..)
518 | hir::ExprKind::Yield(..)
519 | hir::ExprKind::Type(..)
521 | hir::ExprKind::Path(hir::QPath::TypeRelative(..))
522 | hir::ExprKind::Path(hir::QPath::LangItem(..)) => {
523 intravisit::walk_expr(ir, expr);
528 // ______________________________________________________________________
529 // Computing liveness sets
531 // Actually we compute just a bit more than just liveness, but we use
532 // the same basic propagation framework in all cases.
534 #[derive(Clone, Copy)]
541 /// Conceptually, this is like a `Vec<RWU>`. But the number of `RWU`s can get
542 /// very large, so it uses a more compact representation that takes advantage
543 /// of the fact that when the number of `RWU`s is large, most of them have an
544 /// invalid reader and an invalid writer.
546 /// Each entry in `packed_rwus` is either INV_INV_FALSE, INV_INV_TRUE, or
547 /// an index into `unpacked_rwus`. In the common cases, this compacts the
548 /// 65 bits of data into 32; in the uncommon cases, it expands the 65 bits
551 /// More compact representations are possible -- e.g., use only 2 bits per
552 /// packed `RWU` and make the secondary table a HashMap that maps from
553 /// indices to `RWU`s -- but this one strikes a good balance between size
555 packed_rwus: Vec<u32>,
556 unpacked_rwus: Vec<RWU>,
559 // A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: false }`.
560 const INV_INV_FALSE: u32 = u32::MAX;
562 // A constant representing `RWU { reader: invalid_node(); writer: invalid_node(); used: true }`.
563 const INV_INV_TRUE: u32 = u32::MAX - 1;
566 fn new(num_rwus: usize) -> RWUTable {
567 Self { packed_rwus: vec![INV_INV_FALSE; num_rwus], unpacked_rwus: vec![] }
570 fn get(&self, idx: usize) -> RWU {
571 let packed_rwu = self.packed_rwus[idx];
573 INV_INV_FALSE => RWU { reader: invalid_node(), writer: invalid_node(), used: false },
574 INV_INV_TRUE => RWU { reader: invalid_node(), writer: invalid_node(), used: true },
575 _ => self.unpacked_rwus[packed_rwu as usize],
579 fn get_reader(&self, idx: usize) -> LiveNode {
580 let packed_rwu = self.packed_rwus[idx];
582 INV_INV_FALSE | INV_INV_TRUE => invalid_node(),
583 _ => self.unpacked_rwus[packed_rwu as usize].reader,
587 fn get_writer(&self, idx: usize) -> LiveNode {
588 let packed_rwu = self.packed_rwus[idx];
590 INV_INV_FALSE | INV_INV_TRUE => invalid_node(),
591 _ => self.unpacked_rwus[packed_rwu as usize].writer,
595 fn get_used(&self, idx: usize) -> bool {
596 let packed_rwu = self.packed_rwus[idx];
598 INV_INV_FALSE => false,
599 INV_INV_TRUE => true,
600 _ => self.unpacked_rwus[packed_rwu as usize].used,
605 fn copy_packed(&mut self, dst_idx: usize, src_idx: usize) {
606 self.packed_rwus[dst_idx] = self.packed_rwus[src_idx];
609 fn assign_unpacked(&mut self, idx: usize, rwu: RWU) {
610 if rwu.reader == invalid_node() && rwu.writer == invalid_node() {
611 // When we overwrite an indexing entry in `self.packed_rwus` with
612 // `INV_INV_{TRUE,FALSE}` we don't remove the corresponding entry
613 // from `self.unpacked_rwus`; it's not worth the effort, and we
614 // can't have entries shifting around anyway.
615 self.packed_rwus[idx] = if rwu.used { INV_INV_TRUE } else { INV_INV_FALSE }
617 // Add a new RWU to `unpacked_rwus` and make `packed_rwus[idx]`
619 self.packed_rwus[idx] = self.unpacked_rwus.len() as u32;
620 self.unpacked_rwus.push(rwu);
624 fn assign_inv_inv(&mut self, idx: usize) {
625 self.packed_rwus[idx] = if self.get_used(idx) { INV_INV_TRUE } else { INV_INV_FALSE };
629 const ACC_READ: u32 = 1;
630 const ACC_WRITE: u32 = 2;
631 const ACC_USE: u32 = 4;
633 struct Liveness<'a, 'tcx> {
634 ir: &'a mut IrMaps<'tcx>,
635 typeck_results: &'a ty::TypeckResults<'tcx>,
636 param_env: ty::ParamEnv<'tcx>,
637 successors: Vec<LiveNode>,
640 /// A live node representing a point of execution before closure entry &
641 /// after closure exit. Used to calculate liveness of captured variables
642 /// through calls to the same closure. Used for Fn & FnMut closures only.
643 closure_ln: LiveNode,
644 /// A live node representing every 'exit' from the function, whether it be
645 /// by explicit return, panic, or other means.
648 // mappings from loop node ID to LiveNode
649 // ("break" label should map to loop node ID,
650 // it probably doesn't now)
651 break_ln: HirIdMap<LiveNode>,
652 cont_ln: HirIdMap<LiveNode>,
655 impl<'a, 'tcx> Liveness<'a, 'tcx> {
656 fn new(ir: &'a mut IrMaps<'tcx>, def_id: LocalDefId) -> Liveness<'a, 'tcx> {
657 let typeck_results = ir.tcx.typeck(def_id);
658 let param_env = ir.tcx.param_env(def_id);
660 let closure_ln = ir.add_live_node(ClosureNode);
661 let exit_ln = ir.add_live_node(ExitNode);
663 let num_live_nodes = ir.lnks.len();
664 let num_vars = ir.var_kinds.len();
670 successors: vec![invalid_node(); num_live_nodes],
671 rwu_table: RWUTable::new(num_live_nodes * num_vars),
674 break_ln: Default::default(),
675 cont_ln: Default::default(),
679 fn live_node(&self, hir_id: HirId, span: Span) -> LiveNode {
680 match self.ir.live_node_map.get(&hir_id) {
683 // This must be a mismatch between the ir_map construction
684 // above and the propagation code below; the two sets of
685 // code have to agree about which AST nodes are worth
686 // creating liveness nodes for.
687 span_bug!(span, "no live node registered for node {:?}", hir_id);
692 fn variable(&self, hir_id: HirId, span: Span) -> Variable {
693 self.ir.variable(hir_id, span)
696 fn define_bindings_in_pat(&mut self, pat: &hir::Pat<'_>, mut succ: LiveNode) -> LiveNode {
697 // In an or-pattern, only consider the first pattern; any later patterns
698 // must have the same bindings, and we also consider the first pattern
699 // to be the "authoritative" set of ids.
700 pat.each_binding_or_first(&mut |_, hir_id, pat_sp, ident| {
701 let ln = self.live_node(hir_id, pat_sp);
702 let var = self.variable(hir_id, ident.span);
703 self.init_from_succ(ln, succ);
704 self.define(ln, var);
710 fn idx(&self, ln: LiveNode, var: Variable) -> usize {
711 ln.get() * self.ir.var_kinds.len() + var.get()
714 fn live_on_entry(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
715 assert!(ln.is_valid());
716 let reader = self.rwu_table.get_reader(self.idx(ln, var));
717 if reader.is_valid() { Some(self.ir.lnk(reader)) } else { None }
720 // Is this variable live on entry to any of its successor nodes?
721 fn live_on_exit(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
722 let successor = self.successors[ln.get()];
723 self.live_on_entry(successor, var)
726 fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool {
727 assert!(ln.is_valid());
728 self.rwu_table.get_used(self.idx(ln, var))
731 fn assigned_on_entry(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
732 assert!(ln.is_valid());
733 let writer = self.rwu_table.get_writer(self.idx(ln, var));
734 if writer.is_valid() { Some(self.ir.lnk(writer)) } else { None }
737 fn assigned_on_exit(&self, ln: LiveNode, var: Variable) -> Option<LiveNodeKind> {
738 let successor = self.successors[ln.get()];
739 self.assigned_on_entry(successor, var)
742 fn indices2<F>(&mut self, ln: LiveNode, succ_ln: LiveNode, mut op: F)
744 F: FnMut(&mut Liveness<'a, 'tcx>, usize, usize),
746 let node_base_idx = self.idx(ln, Variable(0));
747 let succ_base_idx = self.idx(succ_ln, Variable(0));
748 for var_idx in 0..self.ir.var_kinds.len() {
749 op(self, node_base_idx + var_idx, succ_base_idx + var_idx);
753 fn write_vars<F>(&self, wr: &mut dyn Write, ln: LiveNode, mut test: F) -> io::Result<()>
755 F: FnMut(usize) -> bool,
757 let node_base_idx = self.idx(ln, Variable(0));
758 for var_idx in 0..self.ir.var_kinds.len() {
759 let idx = node_base_idx + var_idx;
761 write!(wr, " {:?}", Variable(var_idx as u32))?;
767 #[allow(unused_must_use)]
768 fn ln_str(&self, ln: LiveNode) -> String {
769 let mut wr = Vec::new();
771 let wr = &mut wr as &mut dyn Write;
772 write!(wr, "[ln({:?}) of kind {:?} reads", ln.get(), self.ir.lnk(ln));
773 self.write_vars(wr, ln, |idx| self.rwu_table.get_reader(idx).is_valid());
774 write!(wr, " writes");
775 self.write_vars(wr, ln, |idx| self.rwu_table.get_writer(idx).is_valid());
777 self.write_vars(wr, ln, |idx| self.rwu_table.get_used(idx));
779 write!(wr, " precedes {:?}]", self.successors[ln.get()]);
781 String::from_utf8(wr).unwrap()
784 fn log_liveness(&self, entry_ln: LiveNode, hir_id: hir::HirId) {
785 // hack to skip the loop unless debug! is enabled:
787 "^^ liveness computation results for body {} (entry={:?})",
789 for ln_idx in 0..self.ir.lnks.len() {
790 debug!("{:?}", self.ln_str(LiveNode(ln_idx as u32)));
798 fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) {
799 self.successors[ln.get()] = succ_ln;
801 // It is not necessary to initialize the RWUs here because they are all
802 // set to INV_INV_FALSE when they are created, and the sets only grow
803 // during iterations.
806 fn init_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode) {
807 // more efficient version of init_empty() / merge_from_succ()
808 self.successors[ln.get()] = succ_ln;
810 self.indices2(ln, succ_ln, |this, idx, succ_idx| {
811 this.rwu_table.copy_packed(idx, succ_idx);
813 debug!("init_from_succ(ln={}, succ={})", self.ln_str(ln), self.ln_str(succ_ln));
816 fn merge_from_succ(&mut self, ln: LiveNode, succ_ln: LiveNode, first_merge: bool) -> bool {
821 let mut any_changed = false;
822 self.indices2(ln, succ_ln, |this, idx, succ_idx| {
823 // This is a special case, pulled out from the code below, where we
824 // don't have to do anything. It occurs about 60-70% of the time.
825 if this.rwu_table.packed_rwus[succ_idx] == INV_INV_FALSE {
829 let mut changed = false;
830 let mut rwu = this.rwu_table.get(idx);
831 let succ_rwu = this.rwu_table.get(succ_idx);
832 if succ_rwu.reader.is_valid() && !rwu.reader.is_valid() {
833 rwu.reader = succ_rwu.reader;
837 if succ_rwu.writer.is_valid() && !rwu.writer.is_valid() {
838 rwu.writer = succ_rwu.writer;
842 if succ_rwu.used && !rwu.used {
848 this.rwu_table.assign_unpacked(idx, rwu);
854 "merge_from_succ(ln={:?}, succ={}, first_merge={}, changed={})",
856 self.ln_str(succ_ln),
863 // Indicates that a local variable was *defined*; we know that no
864 // uses of the variable can precede the definition (resolve checks
865 // this) so we just clear out all the data.
866 fn define(&mut self, writer: LiveNode, var: Variable) {
867 let idx = self.idx(writer, var);
868 self.rwu_table.assign_inv_inv(idx);
870 debug!("{:?} defines {:?} (idx={}): {}", writer, var, idx, self.ln_str(writer));
873 // Either read, write, or both depending on the acc bitset
874 fn acc(&mut self, ln: LiveNode, var: Variable, acc: u32) {
875 debug!("{:?} accesses[{:x}] {:?}: {}", ln, acc, var, self.ln_str(ln));
877 let idx = self.idx(ln, var);
878 let mut rwu = self.rwu_table.get(idx);
880 if (acc & ACC_WRITE) != 0 {
881 rwu.reader = invalid_node();
885 // Important: if we both read/write, must do read second
886 // or else the write will override.
887 if (acc & ACC_READ) != 0 {
891 if (acc & ACC_USE) != 0 {
895 self.rwu_table.assign_unpacked(idx, rwu);
901 body: &hir::Body<'_>,
905 debug!("compute: using id for body, {:?}", body.value);
907 // # Liveness of captured variables
909 // When computing the liveness for captured variables we take into
910 // account how variable is captured (ByRef vs ByValue) and what is the
911 // closure kind (Generator / FnOnce vs Fn / FnMut).
913 // Variables captured by reference are assumed to be used on the exit
916 // In FnOnce closures, variables captured by value are known to be dead
917 // on exit since it is impossible to call the closure again.
919 // In Fn / FnMut closures, variables captured by value are live on exit
920 // if they are live on the entry to the closure, since only the closure
921 // itself can access them on subsequent calls.
923 if let Some(upvars) = self.ir.tcx.upvars_mentioned(self.ir.body_owner) {
924 // Mark upvars captured by reference as used after closure exits.
925 for (&var_hir_id, upvar) in upvars.iter().rev() {
926 let upvar_id = ty::UpvarId {
927 var_path: ty::UpvarPath { hir_id: var_hir_id },
928 closure_expr_id: self.ir.body_owner,
930 match self.typeck_results.upvar_capture(upvar_id) {
931 ty::UpvarCapture::ByRef(_) => {
932 let var = self.variable(var_hir_id, upvar.span);
933 self.acc(self.exit_ln, var, ACC_READ | ACC_USE);
935 ty::UpvarCapture::ByValue(_) => {}
940 let succ = self.propagate_through_expr(&body.value, self.exit_ln);
943 FnKind::Method(..) | FnKind::ItemFn(..) => return succ,
944 FnKind::Closure(..) => {}
947 let ty = self.typeck_results.node_type(id);
949 ty::Closure(_def_id, substs) => match substs.as_closure().kind() {
950 ty::ClosureKind::Fn => {}
951 ty::ClosureKind::FnMut => {}
952 ty::ClosureKind::FnOnce => return succ,
954 ty::Generator(..) => return succ,
956 span_bug!(span, "type of closure expr {:?} is not a closure {:?}", id, ty,);
960 // Propagate through calls to the closure.
961 let mut first_merge = true;
963 self.init_from_succ(self.closure_ln, succ);
964 for param in body.params {
965 param.pat.each_binding(|_bm, hir_id, _x, ident| {
966 let var = self.variable(hir_id, ident.span);
967 self.define(self.closure_ln, var);
971 if !self.merge_from_succ(self.exit_ln, self.closure_ln, first_merge) {
975 assert_eq!(succ, self.propagate_through_expr(&body.value, self.exit_ln));
981 fn propagate_through_block(&mut self, blk: &hir::Block<'_>, succ: LiveNode) -> LiveNode {
982 if blk.targeted_by_break {
983 self.break_ln.insert(blk.hir_id, succ);
985 let succ = self.propagate_through_opt_expr(blk.expr.as_deref(), succ);
986 blk.stmts.iter().rev().fold(succ, |succ, stmt| self.propagate_through_stmt(stmt, succ))
989 fn propagate_through_stmt(&mut self, stmt: &hir::Stmt<'_>, succ: LiveNode) -> LiveNode {
991 hir::StmtKind::Local(ref local) => {
992 // Note: we mark the variable as defined regardless of whether
993 // there is an initializer. Initially I had thought to only mark
994 // the live variable as defined if it was initialized, and then we
995 // could check for uninit variables just by scanning what is live
996 // at the start of the function. But that doesn't work so well for
997 // immutable variables defined in a loop:
998 // loop { let x; x = 5; }
999 // because the "assignment" loops back around and generates an error.
1001 // So now we just check that variables defined w/o an
1002 // initializer are not live at the point of their
1003 // initialization, which is mildly more complex than checking
1004 // once at the func header but otherwise equivalent.
1006 let succ = self.propagate_through_opt_expr(local.init.as_deref(), succ);
1007 self.define_bindings_in_pat(&local.pat, succ)
1009 hir::StmtKind::Item(..) => succ,
1010 hir::StmtKind::Expr(ref expr) | hir::StmtKind::Semi(ref expr) => {
1011 self.propagate_through_expr(&expr, succ)
1016 fn propagate_through_exprs(&mut self, exprs: &[Expr<'_>], succ: LiveNode) -> LiveNode {
1017 exprs.iter().rev().fold(succ, |succ, expr| self.propagate_through_expr(&expr, succ))
1020 fn propagate_through_opt_expr(
1022 opt_expr: Option<&Expr<'_>>,
1025 opt_expr.map_or(succ, |expr| self.propagate_through_expr(expr, succ))
1028 fn propagate_through_expr(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
1029 debug!("propagate_through_expr: {:?}", expr);
1032 // Interesting cases with control flow or which gen/kill
1033 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1034 self.access_path(expr.hir_id, path, succ, ACC_READ | ACC_USE)
1037 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
1039 hir::ExprKind::Closure(..) => {
1040 debug!("{:?} is an ExprKind::Closure", expr);
1042 // the construction of a closure itself is not important,
1043 // but we have to consider the closed over variables.
1049 .unwrap_or_else(|| span_bug!(expr.span, "no registered caps"));
1051 caps.iter().rev().fold(succ, |succ, cap| {
1052 self.init_from_succ(cap.ln, succ);
1053 let var = self.variable(cap.var_hid, expr.span);
1054 self.acc(cap.ln, var, ACC_READ | ACC_USE);
1059 // Note that labels have been resolved, so we don't need to look
1060 // at the label ident
1061 hir::ExprKind::Loop(ref blk, _, _) => self.propagate_through_loop(expr, &blk, succ),
1063 hir::ExprKind::Match(ref e, arms, _) => {
1078 let ln = self.live_node(expr.hir_id, expr.span);
1079 self.init_empty(ln, succ);
1080 let mut first_merge = true;
1082 let body_succ = self.propagate_through_expr(&arm.body, succ);
1084 let guard_succ = self.propagate_through_opt_expr(
1085 arm.guard.as_ref().map(|hir::Guard::If(e)| *e),
1088 let arm_succ = self.define_bindings_in_pat(&arm.pat, guard_succ);
1089 self.merge_from_succ(ln, arm_succ, first_merge);
1090 first_merge = false;
1092 self.propagate_through_expr(&e, ln)
1095 hir::ExprKind::Ret(ref o_e) => {
1096 // Ignore succ and subst exit_ln.
1097 self.propagate_through_opt_expr(o_e.as_ref().map(|e| &**e), self.exit_ln)
1100 hir::ExprKind::Break(label, ref opt_expr) => {
1101 // Find which label this break jumps to
1102 let target = match label.target_id {
1103 Ok(hir_id) => self.break_ln.get(&hir_id),
1104 Err(err) => span_bug!(expr.span, "loop scope error: {}", err),
1108 // Now that we know the label we're going to,
1109 // look it up in the break loop nodes table
1112 Some(b) => self.propagate_through_opt_expr(opt_expr.as_ref().map(|e| &**e), b),
1113 None => span_bug!(expr.span, "`break` to unknown label"),
1117 hir::ExprKind::Continue(label) => {
1118 // Find which label this expr continues to
1121 .unwrap_or_else(|err| span_bug!(expr.span, "loop scope error: {}", err));
1123 // Now that we know the label we're going to,
1124 // look it up in the continue loop nodes table
1128 .unwrap_or_else(|| span_bug!(expr.span, "continue to unknown label"))
1131 hir::ExprKind::Assign(ref l, ref r, _) => {
1132 // see comment on places in
1133 // propagate_through_place_components()
1134 let succ = self.write_place(&l, succ, ACC_WRITE);
1135 let succ = self.propagate_through_place_components(&l, succ);
1136 self.propagate_through_expr(&r, succ)
1139 hir::ExprKind::AssignOp(_, ref l, ref r) => {
1140 // an overloaded assign op is like a method call
1141 if self.typeck_results.is_method_call(expr) {
1142 let succ = self.propagate_through_expr(&l, succ);
1143 self.propagate_through_expr(&r, succ)
1145 // see comment on places in
1146 // propagate_through_place_components()
1147 let succ = self.write_place(&l, succ, ACC_WRITE | ACC_READ);
1148 let succ = self.propagate_through_expr(&r, succ);
1149 self.propagate_through_place_components(&l, succ)
1153 // Uninteresting cases: just propagate in rev exec order
1154 hir::ExprKind::Array(ref exprs) => self.propagate_through_exprs(exprs, succ),
1156 hir::ExprKind::Struct(_, ref fields, ref with_expr) => {
1157 let succ = self.propagate_through_opt_expr(with_expr.as_ref().map(|e| &**e), succ);
1161 .fold(succ, |succ, field| self.propagate_through_expr(&field.expr, succ))
1164 hir::ExprKind::Call(ref f, ref args) => {
1165 let m = self.ir.tcx.parent_module(expr.hir_id).to_def_id();
1166 let succ = if self.ir.tcx.is_ty_uninhabited_from(
1168 self.typeck_results.expr_ty(expr),
1175 let succ = self.propagate_through_exprs(args, succ);
1176 self.propagate_through_expr(&f, succ)
1179 hir::ExprKind::MethodCall(.., ref args, _) => {
1180 let m = self.ir.tcx.parent_module(expr.hir_id).to_def_id();
1181 let succ = if self.ir.tcx.is_ty_uninhabited_from(
1183 self.typeck_results.expr_ty(expr),
1191 self.propagate_through_exprs(args, succ)
1194 hir::ExprKind::Tup(ref exprs) => self.propagate_through_exprs(exprs, succ),
1196 hir::ExprKind::Binary(op, ref l, ref r) if op.node.is_lazy() => {
1197 let r_succ = self.propagate_through_expr(&r, succ);
1199 let ln = self.live_node(expr.hir_id, expr.span);
1200 self.init_from_succ(ln, succ);
1201 self.merge_from_succ(ln, r_succ, false);
1203 self.propagate_through_expr(&l, ln)
1206 hir::ExprKind::Index(ref l, ref r) | hir::ExprKind::Binary(_, ref l, ref r) => {
1207 let r_succ = self.propagate_through_expr(&r, succ);
1208 self.propagate_through_expr(&l, r_succ)
1211 hir::ExprKind::Box(ref e)
1212 | hir::ExprKind::AddrOf(_, _, ref e)
1213 | hir::ExprKind::Cast(ref e, _)
1214 | hir::ExprKind::Type(ref e, _)
1215 | hir::ExprKind::DropTemps(ref e)
1216 | hir::ExprKind::Unary(_, ref e)
1217 | hir::ExprKind::Yield(ref e, _)
1218 | hir::ExprKind::Repeat(ref e, _) => self.propagate_through_expr(&e, succ),
1220 hir::ExprKind::InlineAsm(ref asm) => {
1221 // Handle non-returning asm
1222 let mut succ = if asm.options.contains(InlineAsmOptions::NORETURN) {
1228 // Do a first pass for writing outputs only
1229 for op in asm.operands.iter().rev() {
1231 hir::InlineAsmOperand::In { .. }
1232 | hir::InlineAsmOperand::Const { .. }
1233 | hir::InlineAsmOperand::Sym { .. } => {}
1234 hir::InlineAsmOperand::Out { expr, .. } => {
1235 if let Some(expr) = expr {
1236 succ = self.write_place(expr, succ, ACC_WRITE);
1239 hir::InlineAsmOperand::InOut { expr, .. } => {
1240 succ = self.write_place(expr, succ, ACC_READ | ACC_WRITE);
1242 hir::InlineAsmOperand::SplitInOut { out_expr, .. } => {
1243 if let Some(expr) = out_expr {
1244 succ = self.write_place(expr, succ, ACC_WRITE);
1250 // Then do a second pass for inputs
1251 let mut succ = succ;
1252 for op in asm.operands.iter().rev() {
1254 hir::InlineAsmOperand::In { expr, .. }
1255 | hir::InlineAsmOperand::Const { expr, .. }
1256 | hir::InlineAsmOperand::Sym { expr, .. } => {
1257 succ = self.propagate_through_expr(expr, succ)
1259 hir::InlineAsmOperand::Out { expr, .. } => {
1260 if let Some(expr) = expr {
1261 succ = self.propagate_through_place_components(expr, succ);
1264 hir::InlineAsmOperand::InOut { expr, .. } => {
1265 succ = self.propagate_through_place_components(expr, succ);
1267 hir::InlineAsmOperand::SplitInOut { in_expr, out_expr, .. } => {
1268 if let Some(expr) = out_expr {
1269 succ = self.propagate_through_place_components(expr, succ);
1271 succ = self.propagate_through_expr(in_expr, succ);
1278 hir::ExprKind::LlvmInlineAsm(ref asm) => {
1279 let ia = &asm.inner;
1280 let outputs = asm.outputs_exprs;
1281 let inputs = asm.inputs_exprs;
1282 let succ = ia.outputs.iter().zip(outputs).rev().fold(succ, |succ, (o, output)| {
1283 // see comment on places
1284 // in propagate_through_place_components()
1286 self.propagate_through_expr(output, succ)
1288 let acc = if o.is_rw { ACC_WRITE | ACC_READ } else { ACC_WRITE };
1289 let succ = self.write_place(output, succ, acc);
1290 self.propagate_through_place_components(output, succ)
1294 // Inputs are executed first. Propagate last because of rev order
1295 self.propagate_through_exprs(inputs, succ)
1298 hir::ExprKind::Lit(..)
1299 | hir::ExprKind::Err
1300 | hir::ExprKind::Path(hir::QPath::TypeRelative(..))
1301 | hir::ExprKind::Path(hir::QPath::LangItem(..)) => succ,
1303 // Note that labels have been resolved, so we don't need to look
1304 // at the label ident
1305 hir::ExprKind::Block(ref blk, _) => self.propagate_through_block(&blk, succ),
1309 fn propagate_through_place_components(&mut self, expr: &Expr<'_>, succ: LiveNode) -> LiveNode {
1312 // In general, the full flow graph structure for an
1313 // assignment/move/etc can be handled in one of two ways,
1314 // depending on whether what is being assigned is a "tracked
1315 // value" or not. A tracked value is basically a local
1316 // variable or argument.
1318 // The two kinds of graphs are:
1320 // Tracked place Untracked place
1321 // ----------------------++-----------------------
1325 // (rvalue) || (rvalue)
1328 // (write of place) || (place components)
1333 // ----------------------++-----------------------
1335 // I will cover the two cases in turn:
1339 // A tracked place is a local variable/argument `x`. In
1340 // these cases, the link_node where the write occurs is linked
1341 // to node id of `x`. The `write_place()` routine generates
1342 // the contents of this node. There are no subcomponents to
1345 // # Non-tracked places
1347 // These are places like `x[5]` or `x.f`. In that case, we
1348 // basically ignore the value which is written to but generate
1349 // reads for the components---`x` in these two examples. The
1350 // components reads are generated by
1351 // `propagate_through_place_components()` (this fn).
1355 // It is still possible to observe assignments to non-places;
1356 // these errors are detected in the later pass borrowck. We
1357 // just ignore such cases and treat them as reads.
1360 hir::ExprKind::Path(_) => succ,
1361 hir::ExprKind::Field(ref e, _) => self.propagate_through_expr(&e, succ),
1362 _ => self.propagate_through_expr(expr, succ),
1366 // see comment on propagate_through_place()
1367 fn write_place(&mut self, expr: &Expr<'_>, succ: LiveNode, acc: u32) -> LiveNode {
1369 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1370 self.access_path(expr.hir_id, path, succ, acc)
1373 // We do not track other places, so just propagate through
1374 // to their subcomponents. Also, it may happen that
1375 // non-places occur here, because those are detected in the
1376 // later pass borrowck.
1389 let ln = self.live_node(hir_id, span);
1391 self.init_from_succ(ln, succ);
1392 let var = self.variable(var_hid, span);
1393 self.acc(ln, var, acc);
1401 path: &hir::Path<'_>,
1406 Res::Local(hid) => self.access_var(hir_id, hid, succ, acc, path.span),
1411 fn propagate_through_loop(
1414 body: &hir::Block<'_>,
1418 We model control flow like this:
1425 Note that a `continue` expression targeting the `loop` will have a successor of `expr`.
1426 Meanwhile, a `break` expression will have a successor of `succ`.
1430 let mut first_merge = true;
1431 let ln = self.live_node(expr.hir_id, expr.span);
1432 self.init_empty(ln, succ);
1433 debug!("propagate_through_loop: using id for loop body {} {:?}", expr.hir_id, body);
1435 self.break_ln.insert(expr.hir_id, succ);
1437 self.cont_ln.insert(expr.hir_id, ln);
1439 let body_ln = self.propagate_through_block(body, ln);
1441 // repeat until fixed point is reached:
1442 while self.merge_from_succ(ln, body_ln, first_merge) {
1443 first_merge = false;
1444 assert_eq!(body_ln, self.propagate_through_block(body, ln));
1451 // _______________________________________________________________________
1452 // Checking for error conditions
1454 impl<'a, 'tcx> Visitor<'tcx> for Liveness<'a, 'tcx> {
1455 type Map = intravisit::ErasedMap<'tcx>;
1457 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
1458 NestedVisitorMap::None
1461 fn visit_local(&mut self, local: &'tcx hir::Local<'tcx>) {
1462 self.check_unused_vars_in_pat(&local.pat, None, |spans, hir_id, ln, var| {
1463 if local.init.is_some() {
1464 self.warn_about_dead_assign(spans, hir_id, ln, var);
1468 intravisit::walk_local(self, local);
1471 fn visit_expr(&mut self, ex: &'tcx Expr<'tcx>) {
1472 check_expr(self, ex);
1475 fn visit_arm(&mut self, arm: &'tcx hir::Arm<'tcx>) {
1476 self.check_unused_vars_in_pat(&arm.pat, None, |_, _, _, _| {});
1477 intravisit::walk_arm(self, arm);
1481 fn check_expr<'tcx>(this: &mut Liveness<'_, 'tcx>, expr: &'tcx Expr<'tcx>) {
1483 hir::ExprKind::Assign(ref l, ..) => {
1484 this.check_place(&l);
1487 hir::ExprKind::AssignOp(_, ref l, _) => {
1488 if !this.typeck_results.is_method_call(expr) {
1489 this.check_place(&l);
1493 hir::ExprKind::InlineAsm(ref asm) => {
1494 for op in asm.operands {
1496 hir::InlineAsmOperand::Out { expr, .. } => {
1497 if let Some(expr) = expr {
1498 this.check_place(expr);
1501 hir::InlineAsmOperand::InOut { expr, .. } => {
1502 this.check_place(expr);
1504 hir::InlineAsmOperand::SplitInOut { out_expr, .. } => {
1505 if let Some(out_expr) = out_expr {
1506 this.check_place(out_expr);
1514 hir::ExprKind::LlvmInlineAsm(ref asm) => {
1515 for input in asm.inputs_exprs {
1516 this.visit_expr(input);
1519 // Output operands must be places
1520 for (o, output) in asm.inner.outputs.iter().zip(asm.outputs_exprs) {
1522 this.check_place(output);
1524 this.visit_expr(output);
1528 // no correctness conditions related to liveness
1529 hir::ExprKind::Call(..)
1530 | hir::ExprKind::MethodCall(..)
1531 | hir::ExprKind::Match(..)
1532 | hir::ExprKind::Loop(..)
1533 | hir::ExprKind::Index(..)
1534 | hir::ExprKind::Field(..)
1535 | hir::ExprKind::Array(..)
1536 | hir::ExprKind::Tup(..)
1537 | hir::ExprKind::Binary(..)
1538 | hir::ExprKind::Cast(..)
1539 | hir::ExprKind::DropTemps(..)
1540 | hir::ExprKind::Unary(..)
1541 | hir::ExprKind::Ret(..)
1542 | hir::ExprKind::Break(..)
1543 | hir::ExprKind::Continue(..)
1544 | hir::ExprKind::Lit(_)
1545 | hir::ExprKind::Block(..)
1546 | hir::ExprKind::AddrOf(..)
1547 | hir::ExprKind::Struct(..)
1548 | hir::ExprKind::Repeat(..)
1549 | hir::ExprKind::Closure(..)
1550 | hir::ExprKind::Path(_)
1551 | hir::ExprKind::Yield(..)
1552 | hir::ExprKind::Box(..)
1553 | hir::ExprKind::Type(..)
1554 | hir::ExprKind::Err => {}
1557 intravisit::walk_expr(this, expr);
1560 impl<'tcx> Liveness<'_, 'tcx> {
1561 fn check_place(&mut self, expr: &'tcx Expr<'tcx>) {
1563 hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) => {
1564 if let Res::Local(var_hid) = path.res {
1565 // Assignment to an immutable variable or argument: only legal
1566 // if there is no later assignment. If this local is actually
1567 // mutable, then check for a reassignment to flag the mutability
1569 let ln = self.live_node(expr.hir_id, expr.span);
1570 let var = self.variable(var_hid, expr.span);
1571 self.warn_about_dead_assign(vec![expr.span], expr.hir_id, ln, var);
1575 // For other kinds of places, no checks are required,
1576 // and any embedded expressions are actually rvalues
1577 intravisit::walk_expr(self, expr);
1582 fn should_warn(&self, var: Variable) -> Option<String> {
1583 let name = self.ir.variable_name(var);
1584 if name == kw::Invalid {
1587 let name: &str = &name.as_str();
1588 if name.as_bytes()[0] == b'_' {
1591 Some(name.to_owned())
1594 fn warn_about_unused_upvars(&self, entry_ln: LiveNode) {
1595 let upvars = match self.ir.tcx.upvars_mentioned(self.ir.body_owner) {
1597 Some(upvars) => upvars,
1599 for (&var_hir_id, upvar) in upvars.iter() {
1600 let var = self.variable(var_hir_id, upvar.span);
1601 let upvar_id = ty::UpvarId {
1602 var_path: ty::UpvarPath { hir_id: var_hir_id },
1603 closure_expr_id: self.ir.body_owner,
1605 match self.typeck_results.upvar_capture(upvar_id) {
1606 ty::UpvarCapture::ByValue(_) => {}
1607 ty::UpvarCapture::ByRef(..) => continue,
1609 if self.used_on_entry(entry_ln, var) {
1610 if self.live_on_entry(entry_ln, var).is_none() {
1611 if let Some(name) = self.should_warn(var) {
1612 self.ir.tcx.struct_span_lint_hir(
1613 lint::builtin::UNUSED_ASSIGNMENTS,
1617 lint.build(&format!("value captured by `{}` is never read", name))
1618 .help("did you mean to capture by reference instead?")
1625 if let Some(name) = self.should_warn(var) {
1626 self.ir.tcx.struct_span_lint_hir(
1627 lint::builtin::UNUSED_VARIABLES,
1631 lint.build(&format!("unused variable: `{}`", name))
1632 .help("did you mean to capture by reference instead?")
1641 fn warn_about_unused_args(&self, body: &hir::Body<'_>, entry_ln: LiveNode) {
1642 for p in body.params {
1643 self.check_unused_vars_in_pat(&p.pat, Some(entry_ln), |spans, hir_id, ln, var| {
1644 if self.live_on_entry(ln, var).is_none() {
1645 self.report_unsed_assign(hir_id, spans, var, |name| {
1646 format!("value passed to `{}` is never read", name)
1653 fn check_unused_vars_in_pat(
1656 entry_ln: Option<LiveNode>,
1657 on_used_on_entry: impl Fn(Vec<Span>, HirId, LiveNode, Variable),
1659 // In an or-pattern, only consider the variable; any later patterns must have the same
1660 // bindings, and we also consider the first pattern to be the "authoritative" set of ids.
1661 // However, we should take the ids and spans of variables with the same name from the later
1662 // patterns so the suggestions to prefix with underscores will apply to those too.
1663 let mut vars: FxIndexMap<Symbol, (LiveNode, Variable, Vec<(HirId, Span)>)> = <_>::default();
1665 pat.each_binding(|_, hir_id, pat_sp, ident| {
1666 let ln = entry_ln.unwrap_or_else(|| self.live_node(hir_id, pat_sp));
1667 let var = self.variable(hir_id, ident.span);
1668 let id_and_sp = (hir_id, pat_sp);
1669 vars.entry(self.ir.variable_name(var))
1670 .and_modify(|(.., hir_ids_and_spans)| hir_ids_and_spans.push(id_and_sp))
1671 .or_insert_with(|| (ln, var, vec![id_and_sp]));
1674 for (_, (ln, var, hir_ids_and_spans)) in vars {
1675 if self.used_on_entry(ln, var) {
1676 let id = hir_ids_and_spans[0].0;
1677 let spans = hir_ids_and_spans.into_iter().map(|(_, sp)| sp).collect();
1678 on_used_on_entry(spans, id, ln, var);
1680 self.report_unused(hir_ids_and_spans, ln, var);
1685 fn report_unused(&self, hir_ids_and_spans: Vec<(HirId, Span)>, ln: LiveNode, var: Variable) {
1686 let first_hir_id = hir_ids_and_spans[0].0;
1688 if let Some(name) = self.should_warn(var).filter(|name| name != "self") {
1689 // annoying: for parameters in funcs like `fn(x: i32)
1690 // {ret}`, there is only one node, so asking about
1691 // assigned_on_exit() is not meaningful.
1693 if ln == self.exit_ln { false } else { self.assigned_on_exit(ln, var).is_some() };
1696 self.ir.tcx.struct_span_lint_hir(
1697 lint::builtin::UNUSED_VARIABLES,
1699 hir_ids_and_spans.into_iter().map(|(_, sp)| sp).collect::<Vec<_>>(),
1701 lint.build(&format!("variable `{}` is assigned to, but never used", name))
1702 .note(&format!("consider using `_{}` instead", name))
1707 self.ir.tcx.struct_span_lint_hir(
1708 lint::builtin::UNUSED_VARIABLES,
1710 hir_ids_and_spans.iter().map(|(_, sp)| *sp).collect::<Vec<_>>(),
1712 let mut err = lint.build(&format!("unused variable: `{}`", name));
1714 let (shorthands, non_shorthands): (Vec<_>, Vec<_>) =
1715 hir_ids_and_spans.into_iter().partition(|(hir_id, span)| {
1716 let var = self.variable(*hir_id, *span);
1717 self.ir.variable_is_shorthand(var)
1720 let mut shorthands = shorthands
1722 .map(|(_, span)| (span, format!("{}: _", name)))
1723 .collect::<Vec<_>>();
1725 // If we have both shorthand and non-shorthand, prefer the "try ignoring
1726 // the field" message, and suggest `_` for the non-shorthands. If we only
1727 // have non-shorthand, then prefix with an underscore instead.
1728 if !shorthands.is_empty() {
1732 .map(|(_, span)| (span, "_".to_string()))
1733 .collect::<Vec<_>>(),
1736 err.multipart_suggestion(
1737 "try ignoring the field",
1739 Applicability::MachineApplicable,
1742 err.multipart_suggestion(
1743 "if this is intentional, prefix it with an underscore",
1746 .map(|(_, span)| (span, format!("_{}", name)))
1747 .collect::<Vec<_>>(),
1748 Applicability::MachineApplicable,
1759 fn warn_about_dead_assign(&self, spans: Vec<Span>, hir_id: HirId, ln: LiveNode, var: Variable) {
1760 if self.live_on_exit(ln, var).is_none() {
1761 self.report_unsed_assign(hir_id, spans, var, |name| {
1762 format!("value assigned to `{}` is never read", name)
1767 fn report_unsed_assign(
1772 message: impl Fn(&str) -> String,
1774 if let Some(name) = self.should_warn(var) {
1775 self.ir.tcx.struct_span_lint_hir(
1776 lint::builtin::UNUSED_ASSIGNMENTS,
1780 lint.build(&message(&name))
1781 .help("maybe it is overwritten before being read?")