1 use super::debug::term_type;
2 use super::graph::{BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph, START_BCB};
4 use itertools::Itertools;
5 use rustc_data_structures::graph::WithNumNodes;
6 use rustc_middle::mir::spanview::source_range_no_file;
7 use rustc_middle::mir::{
8 self, AggregateKind, BasicBlock, FakeReadCause, Rvalue, Statement, StatementKind, Terminator,
11 use rustc_middle::ty::TyCtxt;
12 use rustc_span::source_map::original_sp;
13 use rustc_span::{BytePos, ExpnKind, MacroKind, Span, Symbol};
15 use std::cell::RefCell;
16 use std::cmp::Ordering;
18 #[derive(Debug, Copy, Clone)]
19 pub(super) enum CoverageStatement {
20 Statement(BasicBlock, Span, usize),
21 Terminator(BasicBlock, Span),
24 impl CoverageStatement {
25 pub fn format<'tcx>(&self, tcx: TyCtxt<'tcx>, mir_body: &mir::Body<'tcx>) -> String {
27 Self::Statement(bb, span, stmt_index) => {
28 let stmt = &mir_body[bb].statements[stmt_index];
31 source_range_no_file(tcx, &span),
37 Self::Terminator(bb, span) => {
38 let term = mir_body[bb].terminator();
41 source_range_no_file(tcx, &span),
43 term_type(&term.kind),
50 pub fn span(&self) -> &Span {
52 Self::Statement(_, span, _) | Self::Terminator(_, span) => span,
57 /// A BCB is deconstructed into one or more `Span`s. Each `Span` maps to a `CoverageSpan` that
58 /// references the originating BCB and one or more MIR `Statement`s and/or `Terminator`s.
59 /// Initially, the `Span`s come from the `Statement`s and `Terminator`s, but subsequent
60 /// transforms can combine adjacent `Span`s and `CoverageSpan` from the same BCB, merging the
61 /// `CoverageStatement` vectors, and the `Span`s to cover the extent of the combined `Span`s.
63 /// Note: A `CoverageStatement` merged into another CoverageSpan may come from a `BasicBlock` that
64 /// is not part of the `CoverageSpan` bcb if the statement was included because it's `Span` matches
65 /// or is subsumed by the `Span` associated with this `CoverageSpan`, and it's `BasicBlock`
66 /// `is_dominated_by()` the `BasicBlock`s in this `CoverageSpan`.
67 #[derive(Debug, Clone)]
68 pub(super) struct CoverageSpan {
71 pub current_macro_or_none: RefCell<Option<Option<Symbol>>>,
72 pub bcb: BasicCoverageBlock,
73 pub coverage_statements: Vec<CoverageStatement>,
78 pub fn for_fn_sig(fn_sig_span: Span) -> Self {
81 expn_span: fn_sig_span,
82 current_macro_or_none: Default::default(),
84 coverage_statements: vec![],
90 statement: &Statement<'_>,
93 bcb: BasicCoverageBlock,
97 let is_closure = match statement.kind {
98 StatementKind::Assign(box (_, Rvalue::Aggregate(box ref kind, _))) => {
99 matches!(kind, AggregateKind::Closure(_, _) | AggregateKind::Generator(_, _, _))
107 current_macro_or_none: Default::default(),
109 coverage_statements: vec![CoverageStatement::Statement(bb, span, stmt_index)],
114 pub fn for_terminator(
117 bcb: BasicCoverageBlock,
123 current_macro_or_none: Default::default(),
125 coverage_statements: vec![CoverageStatement::Terminator(bb, span)],
130 pub fn merge_from(&mut self, mut other: CoverageSpan) {
131 debug_assert!(self.is_mergeable(&other));
132 self.span = self.span.to(other.span);
133 self.coverage_statements.append(&mut other.coverage_statements);
136 pub fn cutoff_statements_at(&mut self, cutoff_pos: BytePos) {
137 self.coverage_statements.retain(|covstmt| covstmt.span().hi() <= cutoff_pos);
138 if let Some(highest_covstmt) =
139 self.coverage_statements.iter().max_by_key(|covstmt| covstmt.span().hi())
141 self.span = self.span.with_hi(highest_covstmt.span().hi());
146 pub fn is_mergeable(&self, other: &Self) -> bool {
147 self.is_in_same_bcb(other) && !(self.is_closure || other.is_closure)
151 pub fn is_in_same_bcb(&self, other: &Self) -> bool {
152 self.bcb == other.bcb
155 pub fn format<'tcx>(&self, tcx: TyCtxt<'tcx>, mir_body: &mir::Body<'tcx>) -> String {
158 source_range_no_file(tcx, &self.span),
159 self.format_coverage_statements(tcx, mir_body).replace('\n', "\n "),
163 pub fn format_coverage_statements<'tcx>(
166 mir_body: &mir::Body<'tcx>,
168 let mut sorted_coverage_statements = self.coverage_statements.clone();
169 sorted_coverage_statements.sort_unstable_by_key(|covstmt| match *covstmt {
170 CoverageStatement::Statement(bb, _, index) => (bb, index),
171 CoverageStatement::Terminator(bb, _) => (bb, usize::MAX),
173 sorted_coverage_statements.iter().map(|covstmt| covstmt.format(tcx, mir_body)).join("\n")
176 /// If the span is part of a macro, returns the macro name symbol.
177 pub fn current_macro(&self) -> Option<Symbol> {
178 self.current_macro_or_none
180 .get_or_insert_with(|| {
181 if let ExpnKind::Macro(MacroKind::Bang, current_macro) =
182 self.expn_span.ctxt().outer_expn_data().kind
184 return Some(current_macro);
188 .map(|symbol| symbol)
191 /// If the span is part of a macro, and the macro is visible (expands directly to the given
192 /// body_span), returns the macro name symbol.
193 pub fn visible_macro(&self, body_span: Span) -> Option<Symbol> {
194 if let Some(current_macro) = self.current_macro() {
198 .unwrap_or_else(|| bug!("macro must have a parent"))
202 return Some(current_macro);
208 pub fn is_macro_expansion(&self) -> bool {
209 self.current_macro().is_some()
213 /// Converts the initial set of `CoverageSpan`s (one per MIR `Statement` or `Terminator`) into a
214 /// minimal set of `CoverageSpan`s, using the BCB CFG to determine where it is safe and useful to:
216 /// * Remove duplicate source code coverage regions
217 /// * Merge spans that represent continuous (both in source code and control flow), non-branching
219 /// * Carve out (leave uncovered) any span that will be counted by another MIR (notably, closures)
220 pub struct CoverageSpans<'a, 'tcx> {
221 /// The MIR, used to look up `BasicBlockData`.
222 mir_body: &'a mir::Body<'tcx>,
224 /// A `Span` covering the signature of function for the MIR.
227 /// A `Span` covering the function body of the MIR (typically from left curly brace to right
231 /// The BasicCoverageBlock Control Flow Graph (BCB CFG).
232 basic_coverage_blocks: &'a CoverageGraph,
234 /// The initial set of `CoverageSpan`s, sorted by `Span` (`lo` and `hi`) and by relative
235 /// dominance between the `BasicCoverageBlock`s of equal `Span`s.
236 sorted_spans_iter: Option<std::vec::IntoIter<CoverageSpan>>,
238 /// The current `CoverageSpan` to compare to its `prev`, to possibly merge, discard, force the
239 /// discard of the `prev` (and or `pending_dups`), or keep both (with `prev` moved to
240 /// `pending_dups`). If `curr` is not discarded or merged, it becomes `prev` for the next
242 some_curr: Option<CoverageSpan>,
244 /// The original `span` for `curr`, in case `curr.span()` is modified. The `curr_original_span`
245 /// **must not be mutated** (except when advancing to the next `curr`), even if `curr.span()`
247 curr_original_span: Span,
249 /// The CoverageSpan from a prior iteration; typically assigned from that iteration's `curr`.
250 /// If that `curr` was discarded, `prev` retains its value from the previous iteration.
251 some_prev: Option<CoverageSpan>,
253 /// Assigned from `curr_original_span` from the previous iteration. The `prev_original_span`
254 /// **must not be mutated** (except when advancing to the next `prev`), even if `prev.span()`
256 prev_original_span: Span,
258 /// A copy of the expn_span from the prior iteration.
259 prev_expn_span: Option<Span>,
261 /// One or more `CoverageSpan`s with the same `Span` but different `BasicCoverageBlock`s, and
262 /// no `BasicCoverageBlock` in this list dominates another `BasicCoverageBlock` in the list.
263 /// If a new `curr` span also fits this criteria (compared to an existing list of
264 /// `pending_dups`), that `curr` `CoverageSpan` moves to `prev` before possibly being added to
265 /// the `pending_dups` list, on the next iteration. As a result, if `prev` and `pending_dups`
266 /// have the same `Span`, the criteria for `pending_dups` holds for `prev` as well: a `prev`
267 /// with a matching `Span` does not dominate any `pending_dup` and no `pending_dup` dominates a
268 /// `prev` with a matching `Span`)
269 pending_dups: Vec<CoverageSpan>,
271 /// The final `CoverageSpan`s to add to the coverage map. A `Counter` or `Expression`
272 /// will also be injected into the MIR for each `CoverageSpan`.
273 refined_spans: Vec<CoverageSpan>,
276 impl<'a, 'tcx> CoverageSpans<'a, 'tcx> {
277 /// Generate a minimal set of `CoverageSpan`s, each representing a contiguous code region to be
280 /// The basic steps are:
282 /// 1. Extract an initial set of spans from the `Statement`s and `Terminator`s of each
283 /// `BasicCoverageBlockData`.
284 /// 2. Sort the spans by span.lo() (starting position). Spans that start at the same position
285 /// are sorted with longer spans before shorter spans; and equal spans are sorted
286 /// (deterministically) based on "dominator" relationship (if any).
287 /// 3. Traverse the spans in sorted order to identify spans that can be dropped (for instance,
288 /// if another span or spans are already counting the same code region), or should be merged
289 /// into a broader combined span (because it represents a contiguous, non-branching, and
290 /// uninterrupted region of source code).
292 /// Closures are exposed in their enclosing functions as `Assign` `Rvalue`s, and since
293 /// closures have their own MIR, their `Span` in their enclosing function should be left
296 /// Note the resulting vector of `CoverageSpan`s may not be fully sorted (and does not need
298 pub(super) fn generate_coverage_spans(
299 mir_body: &'a mir::Body<'tcx>,
300 fn_sig_span: Span, // Ensured to be same SourceFile and SyntaxContext as `body_span`
302 basic_coverage_blocks: &'a CoverageGraph,
303 ) -> Vec<CoverageSpan> {
304 let mut coverage_spans = CoverageSpans {
308 basic_coverage_blocks,
309 sorted_spans_iter: None,
310 refined_spans: Vec::with_capacity(basic_coverage_blocks.num_nodes() * 2),
312 curr_original_span: Span::with_root_ctxt(BytePos(0), BytePos(0)),
314 prev_original_span: Span::with_root_ctxt(BytePos(0), BytePos(0)),
315 prev_expn_span: None,
316 pending_dups: Vec::new(),
319 let sorted_spans = coverage_spans.mir_to_initial_sorted_coverage_spans();
321 coverage_spans.sorted_spans_iter = Some(sorted_spans.into_iter());
323 coverage_spans.to_refined_spans()
326 fn mir_to_initial_sorted_coverage_spans(&self) -> Vec<CoverageSpan> {
327 let mut initial_spans = Vec::<CoverageSpan>::with_capacity(self.mir_body.num_nodes() * 2);
328 for (bcb, bcb_data) in self.basic_coverage_blocks.iter_enumerated() {
329 initial_spans.extend(self.bcb_to_initial_coverage_spans(bcb, bcb_data));
332 if initial_spans.is_empty() {
333 // This can happen if, for example, the function is unreachable (contains only a
334 // `BasicBlock`(s) with an `Unreachable` terminator).
335 return initial_spans;
338 initial_spans.push(CoverageSpan::for_fn_sig(self.fn_sig_span));
340 initial_spans.sort_unstable_by(|a, b| {
341 if a.span.lo() == b.span.lo() {
342 if a.span.hi() == b.span.hi() {
343 if a.is_in_same_bcb(b) {
344 Some(Ordering::Equal)
346 // Sort equal spans by dominator relationship, in reverse order (so
347 // dominators always come after the dominated equal spans). When later
348 // comparing two spans in order, the first will either dominate the second,
349 // or they will have no dominator relationship.
350 self.basic_coverage_blocks.dominators().rank_partial_cmp(b.bcb, a.bcb)
353 // Sort hi() in reverse order so shorter spans are attempted after longer spans.
354 // This guarantees that, if a `prev` span overlaps, and is not equal to, a
355 // `curr` span, the prev span either extends further left of the curr span, or
356 // they start at the same position and the prev span extends further right of
357 // the end of the curr span.
358 b.span.hi().partial_cmp(&a.span.hi())
361 a.span.lo().partial_cmp(&b.span.lo())
369 /// Iterate through the sorted `CoverageSpan`s, and return the refined list of merged and
370 /// de-duplicated `CoverageSpan`s.
371 fn to_refined_spans(mut self) -> Vec<CoverageSpan> {
372 while self.next_coverage_span() {
373 if self.some_prev.is_none() {
374 debug!(" initial span");
375 self.check_invoked_macro_name_span();
376 } else if self.curr().is_mergeable(self.prev()) {
377 debug!(" same bcb (and neither is a closure), merge with prev={:?}", self.prev());
378 let prev = self.take_prev();
379 self.curr_mut().merge_from(prev);
380 self.check_invoked_macro_name_span();
381 // Note that curr.span may now differ from curr_original_span
382 } else if self.prev_ends_before_curr() {
384 " different bcbs and disjoint spans, so keep curr for next iter, and add \
388 let prev = self.take_prev();
389 self.push_refined_span(prev);
390 self.check_invoked_macro_name_span();
391 } else if self.prev().is_closure {
392 // drop any equal or overlapping span (`curr`) and keep `prev` to test again in the
395 " curr overlaps a closure (prev). Drop curr and keep prev for next iter. \
400 } else if self.curr().is_closure {
401 self.carve_out_span_for_closure();
402 } else if self.prev_original_span == self.curr().span {
403 // Note that this compares the new (`curr`) span to `prev_original_span`.
404 // In this branch, the actual span byte range of `prev_original_span` is not
405 // important. What is important is knowing whether the new `curr` span was
406 // **originally** the same as the original span of `prev()`. The original spans
407 // reflect their original sort order, and for equal spans, conveys a partial
408 // ordering based on CFG dominator priority.
409 if self.prev().is_macro_expansion() && self.curr().is_macro_expansion() {
410 // Macros that expand to include branching (such as
411 // `assert_eq!()`, `assert_ne!()`, `info!()`, `debug!()`, or
412 // `trace!()) typically generate callee spans with identical
413 // ranges (typically the full span of the macro) for all
414 // `BasicBlocks`. This makes it impossible to distinguish
415 // the condition (`if val1 != val2`) from the optional
416 // branched statements (such as the call to `panic!()` on
417 // assert failure). In this case it is better (or less
418 // worse) to drop the optional branch bcbs and keep the
419 // non-conditional statements, to count when reached.
421 " curr and prev are part of a macro expansion, and curr has the same span \
422 as prev, but is in a different bcb. Drop curr and keep prev for next iter. \
428 self.hold_pending_dups_unless_dominated();
431 self.cutoff_prev_at_overlapping_curr();
432 self.check_invoked_macro_name_span();
436 debug!(" AT END, adding last prev={:?}", self.prev());
437 let prev = self.take_prev();
438 let pending_dups = self.pending_dups.split_off(0);
439 for dup in pending_dups {
440 debug!(" ...adding at least one pending dup={:?}", dup);
441 self.push_refined_span(dup);
444 // Async functions wrap a closure that implements the body to be executed. The enclosing
445 // function is called and returns an `impl Future` without initially executing any of the
446 // body. To avoid showing the return from the enclosing function as a "covered" return from
447 // the closure, the enclosing function's `TerminatorKind::Return`s `CoverageSpan` is
448 // excluded. The closure's `Return` is the only one that will be counted. This provides
449 // adequate coverage, and more intuitive counts. (Avoids double-counting the closing brace
450 // of the function body.)
451 let body_ends_with_closure = if let Some(last_covspan) = self.refined_spans.last() {
452 last_covspan.is_closure && last_covspan.span.hi() == self.body_span.hi()
457 if !body_ends_with_closure {
458 self.push_refined_span(prev);
461 // Remove `CoverageSpan`s derived from closures, originally added to ensure the coverage
462 // regions for the current function leave room for the closure's own coverage regions
463 // (injected separately, from the closure's own MIR).
464 self.refined_spans.retain(|covspan| !covspan.is_closure);
468 fn push_refined_span(&mut self, covspan: CoverageSpan) {
469 let len = self.refined_spans.len();
471 let last = &mut self.refined_spans[len - 1];
472 if last.is_mergeable(&covspan) {
474 "merging new refined span with last refined span, last={:?}, covspan={:?}",
477 last.merge_from(covspan);
481 self.refined_spans.push(covspan)
484 fn check_invoked_macro_name_span(&mut self) {
485 if let Some(visible_macro) = self.curr().visible_macro(self.body_span) {
486 if self.prev_expn_span.map_or(true, |prev_expn_span| {
487 self.curr().expn_span.ctxt() != prev_expn_span.ctxt()
489 let merged_prefix_len = self.curr_original_span.lo() - self.curr().span.lo();
490 let after_macro_bang =
491 merged_prefix_len + BytePos(visible_macro.as_str().bytes().count() as u32 + 1);
492 let mut macro_name_cov = self.curr().clone();
493 self.curr_mut().span =
494 self.curr().span.with_lo(self.curr().span.lo() + after_macro_bang);
495 macro_name_cov.span =
496 macro_name_cov.span.with_hi(macro_name_cov.span.lo() + after_macro_bang);
498 " and curr starts a new macro expansion, so add a new span just for \
499 the macro `{}!`, new span={:?}",
500 visible_macro, macro_name_cov
502 self.push_refined_span(macro_name_cov);
507 // Generate a set of `CoverageSpan`s from the filtered set of `Statement`s and `Terminator`s of
508 // the `BasicBlock`(s) in the given `BasicCoverageBlockData`. One `CoverageSpan` is generated
509 // for each `Statement` and `Terminator`. (Note that subsequent stages of coverage analysis will
510 // merge some `CoverageSpan`s, at which point a `CoverageSpan` may represent multiple
511 // `Statement`s and/or `Terminator`s.)
512 fn bcb_to_initial_coverage_spans(
514 bcb: BasicCoverageBlock,
515 bcb_data: &'a BasicCoverageBlockData,
516 ) -> Vec<CoverageSpan> {
521 let data = &self.mir_body[bb];
525 .filter_map(move |(index, statement)| {
526 filtered_statement_span(statement).map(|span| {
527 CoverageSpan::for_statement(
529 function_source_span(span, self.body_span),
537 .chain(filtered_terminator_span(data.terminator()).map(|span| {
538 CoverageSpan::for_terminator(
539 function_source_span(span, self.body_span),
549 fn curr(&self) -> &CoverageSpan {
552 .unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_curr"))
555 fn curr_mut(&mut self) -> &mut CoverageSpan {
558 .unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_curr"))
561 fn prev(&self) -> &CoverageSpan {
564 .unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_prev"))
567 fn prev_mut(&mut self) -> &mut CoverageSpan {
570 .unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_prev"))
573 fn take_prev(&mut self) -> CoverageSpan {
574 self.some_prev.take().unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_prev"))
577 /// If there are `pending_dups` but `prev` is not a matching dup (`prev.span` doesn't match the
578 /// `pending_dups` spans), then one of the following two things happened during the previous
580 /// * the previous `curr` span (which is now `prev`) was not a duplicate of the pending_dups
581 /// (in which case there should be at least two spans in `pending_dups`); or
582 /// * the `span` of `prev` was modified by `curr_mut().merge_from(prev)` (in which case
583 /// `pending_dups` could have as few as one span)
584 /// In either case, no more spans will match the span of `pending_dups`, so
585 /// add the `pending_dups` if they don't overlap `curr`, and clear the list.
586 fn check_pending_dups(&mut self) {
587 if let Some(dup) = self.pending_dups.last() {
588 if dup.span != self.prev().span {
590 " SAME spans, but pending_dups are NOT THE SAME, so BCBs matched on \
591 previous iteration, or prev started a new disjoint span"
593 if dup.span.hi() <= self.curr().span.lo() {
594 let pending_dups = self.pending_dups.split_off(0);
595 for dup in pending_dups.into_iter() {
596 debug!(" ...adding at least one pending={:?}", dup);
597 self.push_refined_span(dup);
600 self.pending_dups.clear();
606 /// Advance `prev` to `curr` (if any), and `curr` to the next `CoverageSpan` in sorted order.
607 fn next_coverage_span(&mut self) -> bool {
608 if let Some(curr) = self.some_curr.take() {
609 self.prev_expn_span = Some(curr.expn_span);
610 self.some_prev = Some(curr);
611 self.prev_original_span = self.curr_original_span;
613 while let Some(curr) = self.sorted_spans_iter.as_mut().unwrap().next() {
614 debug!("FOR curr={:?}", curr);
615 if self.some_prev.is_some() && self.prev_starts_after_next(&curr) {
617 " prev.span starts after curr.span, so curr will be dropped (skipping past \
618 closure?); prev={:?}",
622 // Save a copy of the original span for `curr` in case the `CoverageSpan` is changed
623 // by `self.curr_mut().merge_from(prev)`.
624 self.curr_original_span = curr.span;
625 self.some_curr.replace(curr);
626 self.check_pending_dups();
633 /// If called, then the next call to `next_coverage_span()` will *not* update `prev` with the
634 /// `curr` coverage span.
635 fn take_curr(&mut self) -> CoverageSpan {
636 self.some_curr.take().unwrap_or_else(|| bug!("invalid attempt to unwrap a None some_curr"))
639 /// Returns true if the curr span should be skipped because prev has already advanced beyond the
640 /// end of curr. This can only happen if a prior iteration updated `prev` to skip past a region
641 /// of code, such as skipping past a closure.
642 fn prev_starts_after_next(&self, next_curr: &CoverageSpan) -> bool {
643 self.prev().span.lo() > next_curr.span.lo()
646 /// Returns true if the curr span starts past the end of the prev span, which means they don't
647 /// overlap, so we now know the prev can be added to the refined coverage spans.
648 fn prev_ends_before_curr(&self) -> bool {
649 self.prev().span.hi() <= self.curr().span.lo()
652 /// If `prev`s span extends left of the closure (`curr`), carve out the closure's span from
653 /// `prev`'s span. (The closure's coverage counters will be injected when processing the
654 /// closure's own MIR.) Add the portion of the span to the left of the closure; and if the span
655 /// extends to the right of the closure, update `prev` to that portion of the span. For any
656 /// `pending_dups`, repeat the same process.
657 fn carve_out_span_for_closure(&mut self) {
658 let curr_span = self.curr().span;
659 let left_cutoff = curr_span.lo();
660 let right_cutoff = curr_span.hi();
661 let has_pre_closure_span = self.prev().span.lo() < right_cutoff;
662 let has_post_closure_span = self.prev().span.hi() > right_cutoff;
663 let mut pending_dups = self.pending_dups.split_off(0);
664 if has_pre_closure_span {
665 let mut pre_closure = self.prev().clone();
666 pre_closure.span = pre_closure.span.with_hi(left_cutoff);
667 debug!(" prev overlaps a closure. Adding span for pre_closure={:?}", pre_closure);
668 if !pending_dups.is_empty() {
669 for mut dup in pending_dups.iter().cloned() {
670 dup.span = dup.span.with_hi(left_cutoff);
671 debug!(" ...and at least one pre_closure dup={:?}", dup);
672 self.push_refined_span(dup);
675 self.push_refined_span(pre_closure);
677 if has_post_closure_span {
678 // Mutate `prev.span()` to start after the closure (and discard curr).
679 // (**NEVER** update `prev_original_span` because it affects the assumptions
680 // about how the `CoverageSpan`s are ordered.)
681 self.prev_mut().span = self.prev().span.with_lo(right_cutoff);
682 debug!(" Mutated prev.span to start after the closure. prev={:?}", self.prev());
683 for dup in pending_dups.iter_mut() {
684 debug!(" ...and at least one overlapping dup={:?}", dup);
685 dup.span = dup.span.with_lo(right_cutoff);
687 self.pending_dups.append(&mut pending_dups);
688 let closure_covspan = self.take_curr();
689 self.push_refined_span(closure_covspan); // since self.prev() was already updated
691 pending_dups.clear();
695 /// Called if `curr.span` equals `prev_original_span` (and potentially equal to all
696 /// `pending_dups` spans, if any). Keep in mind, `prev.span()` may have been changed.
697 /// If prev.span() was merged into other spans (with matching BCB, for instance),
698 /// `prev.span.hi()` will be greater than (further right of) `prev_original_span.hi()`.
699 /// If prev.span() was split off to the right of a closure, prev.span().lo() will be
700 /// greater than prev_original_span.lo(). The actual span of `prev_original_span` is
701 /// not as important as knowing that `prev()` **used to have the same span** as `curr(),
702 /// which means their sort order is still meaningful for determinating the dominator
705 /// When two `CoverageSpan`s have the same `Span`, dominated spans can be discarded; but if
706 /// neither `CoverageSpan` dominates the other, both (or possibly more than two) are held,
707 /// until their disposition is determined. In this latter case, the `prev` dup is moved into
708 /// `pending_dups` so the new `curr` dup can be moved to `prev` for the next iteration.
709 fn hold_pending_dups_unless_dominated(&mut self) {
710 // Equal coverage spans are ordered by dominators before dominated (if any), so it should be
711 // impossible for `curr` to dominate any previous `CoverageSpan`.
712 debug_assert!(!self.span_bcb_is_dominated_by(self.prev(), self.curr()));
714 let initial_pending_count = self.pending_dups.len();
715 if initial_pending_count > 0 {
716 let mut pending_dups = self.pending_dups.split_off(0);
717 pending_dups.retain(|dup| !self.span_bcb_is_dominated_by(self.curr(), dup));
718 self.pending_dups.append(&mut pending_dups);
719 if self.pending_dups.len() < initial_pending_count {
721 " discarded {} of {} pending_dups that dominated curr",
722 initial_pending_count - self.pending_dups.len(),
723 initial_pending_count
728 if self.span_bcb_is_dominated_by(self.curr(), self.prev()) {
730 " different bcbs but SAME spans, and prev dominates curr. Discard prev={:?}",
733 self.cutoff_prev_at_overlapping_curr();
734 // If one span dominates the other, assocate the span with the code from the dominated
735 // block only (`curr`), and discard the overlapping portion of the `prev` span. (Note
736 // that if `prev.span` is wider than `prev_original_span`, a `CoverageSpan` will still
737 // be created for `prev`s block, for the non-overlapping portion, left of `curr.span`.)
741 // x if x < 1 => { ... }
744 // The span for the first `x` is referenced by both the pattern block (every time it is
745 // evaluated) and the arm code (only when matched). The counter will be applied only to
746 // the dominated block. This allows coverage to track and highlight things like the
747 // assignment of `x` above, if the branch is matched, making `x` available to the arm
748 // code; and to track and highlight the question mark `?` "try" operator at the end of
749 // a function call returning a `Result`, so the `?` is covered when the function returns
750 // an `Err`, and not counted as covered if the function always returns `Ok`.
752 // Save `prev` in `pending_dups`. (`curr` will become `prev` in the next iteration.)
753 // If the `curr` CoverageSpan is later discarded, `pending_dups` can be discarded as
754 // well; but if `curr` is added to refined_spans, the `pending_dups` will also be added.
756 " different bcbs but SAME spans, and neither dominates, so keep curr for \
757 next iter, and, pending upcoming spans (unless overlapping) add prev={:?}",
760 let prev = self.take_prev();
761 self.pending_dups.push(prev);
765 /// `curr` overlaps `prev`. If `prev`s span extends left of `curr`s span, keep _only_
766 /// statements that end before `curr.lo()` (if any), and add the portion of the
767 /// combined span for those statements. Any other statements have overlapping spans
768 /// that can be ignored because `curr` and/or other upcoming statements/spans inside
769 /// the overlap area will produce their own counters. This disambiguation process
770 /// avoids injecting multiple counters for overlapping spans, and the potential for
772 fn cutoff_prev_at_overlapping_curr(&mut self) {
774 " different bcbs, overlapping spans, so ignore/drop pending and only add prev \
775 if it has statements that end before curr; prev={:?}",
778 if self.pending_dups.is_empty() {
779 let curr_span = self.curr().span;
780 self.prev_mut().cutoff_statements_at(curr_span.lo());
781 if self.prev().coverage_statements.is_empty() {
782 debug!(" ... no non-overlapping statements to add");
784 debug!(" ... adding modified prev={:?}", self.prev());
785 let prev = self.take_prev();
786 self.push_refined_span(prev);
789 // with `pending_dups`, `prev` cannot have any statements that don't overlap
790 self.pending_dups.clear();
794 fn span_bcb_is_dominated_by(&self, covspan: &CoverageSpan, dom_covspan: &CoverageSpan) -> bool {
795 self.basic_coverage_blocks.is_dominated_by(covspan.bcb, dom_covspan.bcb)
799 /// If the MIR `Statement` has a span contributive to computing coverage spans,
800 /// return it; otherwise return `None`.
801 pub(super) fn filtered_statement_span(statement: &Statement<'_>) -> Option<Span> {
802 match statement.kind {
803 // These statements have spans that are often outside the scope of the executed source code
804 // for their parent `BasicBlock`.
805 StatementKind::StorageLive(_)
806 | StatementKind::StorageDead(_)
807 // Coverage should not be encountered, but don't inject coverage coverage
808 | StatementKind::Coverage(_)
810 | StatementKind::Nop => None,
812 // FIXME(#78546): MIR InstrumentCoverage - Can the source_info.span for `FakeRead`
813 // statements be more consistent?
815 // FakeReadCause::ForGuardBinding, in this example:
817 // x if x < 1 => { ... }
819 // The BasicBlock within the match arm code included one of these statements, but the span
820 // for it covered the `1` in this source. The actual statements have nothing to do with that
822 // FakeRead(ForGuardBinding, _4);
824 // _4 = &_1; (at the span for the first `x`)
825 // and `_1` is the `Place` for `somenum`.
827 // If and when the Issue is resolved, remove this special case match pattern:
828 StatementKind::FakeRead(box (cause, _)) if cause == FakeReadCause::ForGuardBinding => None,
830 // Retain spans from all other statements
831 StatementKind::FakeRead(box (_, _)) // Not including `ForGuardBinding`
832 | StatementKind::CopyNonOverlapping(..)
833 | StatementKind::Assign(_)
834 | StatementKind::SetDiscriminant { .. }
835 | StatementKind::Retag(_, _)
836 | StatementKind::AscribeUserType(_, _) => {
837 Some(statement.source_info.span)
842 /// If the MIR `Terminator` has a span contributive to computing coverage spans,
843 /// return it; otherwise return `None`.
844 pub(super) fn filtered_terminator_span(terminator: &Terminator<'_>) -> Option<Span> {
845 match terminator.kind {
846 // These terminators have spans that don't positively contribute to computing a reasonable
847 // span of actually executed source code. (For example, SwitchInt terminators extracted from
848 // an `if condition { block }` has a span that includes the executed block, if true,
849 // but for coverage, the code region executed, up to *and* through the SwitchInt,
850 // actually stops before the if's block.)
851 TerminatorKind::Unreachable // Unreachable blocks are not connected to the MIR CFG
852 | TerminatorKind::Assert { .. }
853 | TerminatorKind::Drop { .. }
854 | TerminatorKind::DropAndReplace { .. }
855 | TerminatorKind::SwitchInt { .. }
856 // For `FalseEdge`, only the `real` branch is taken, so it is similar to a `Goto`.
857 | TerminatorKind::FalseEdge { .. }
858 | TerminatorKind::Goto { .. } => None,
860 // Call `func` operand can have a more specific span when part of a chain of calls
861 | TerminatorKind::Call { ref func, .. } => {
862 let mut span = terminator.source_info.span;
863 if let mir::Operand::Constant(box constant) = func {
864 if constant.span.lo() > span.lo() {
865 span = span.with_lo(constant.span.lo());
871 // Retain spans from all other terminators
872 TerminatorKind::Resume
873 | TerminatorKind::Abort
874 | TerminatorKind::Return
875 | TerminatorKind::Yield { .. }
876 | TerminatorKind::GeneratorDrop
877 | TerminatorKind::FalseUnwind { .. }
878 | TerminatorKind::InlineAsm { .. } => {
879 Some(terminator.source_info.span)
884 /// Returns an extrapolated span (pre-expansion[^1]) corresponding to a range
885 /// within the function's body source. This span is guaranteed to be contained
886 /// within, or equal to, the `body_span`. If the extrapolated span is not
887 /// contained within the `body_span`, the `body_span` is returned.
889 /// [^1]Expansions result from Rust syntax including macros, syntactic sugar,
892 pub(super) fn function_source_span(span: Span, body_span: Span) -> Span {
893 let original_span = original_sp(span, body_span).with_ctxt(body_span.ctxt());
894 if body_span.contains(original_span) { original_span } else { body_span }