1 use crate::transform::{MirPass, MirSource};
2 use crate::util::patch::MirPatch;
3 use rustc_data_structures::fingerprint::Fingerprint;
4 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
5 use rustc_hir::lang_items;
7 use rustc_middle::ich::StableHashingContext;
9 use rustc_middle::mir::coverage::*;
10 use rustc_middle::mir::interpret::Scalar;
11 use rustc_middle::mir::traversal;
12 use rustc_middle::mir::{
13 BasicBlock, BasicBlockData, CoverageInfo, Operand, Place, SourceInfo, SourceScope,
14 StatementKind, Terminator, TerminatorKind,
17 use rustc_middle::ty::query::Providers;
18 use rustc_middle::ty::{FnDef, TyCtxt};
19 use rustc_span::def_id::DefId;
20 use rustc_span::{FileName, Pos, RealFileName, Span};
22 /// Inserts call to count_code_region() as a placeholder to be replaced during code generation with
23 /// the intrinsic llvm.instrprof.increment.
24 pub struct InstrumentCoverage;
26 /// The `query` provider for `CoverageInfo`, requested by `codegen_intrinsic_call()` when
27 /// constructing the arguments for `llvm.instrprof.increment`.
28 pub(crate) fn provide(providers: &mut Providers) {
29 providers.coverageinfo = |tcx, def_id| coverageinfo_from_mir(tcx, def_id);
32 fn coverageinfo_from_mir<'tcx>(tcx: TyCtxt<'tcx>, mir_def_id: DefId) -> CoverageInfo {
33 let mir_body = tcx.optimized_mir(mir_def_id);
34 // FIXME(richkadel): The current implementation assumes the MIR for the given DefId
35 // represents a single function. Validate and/or correct if inlining (which should be disabled
36 // if -Zinstrument-coverage is enabled) and/or monomorphization invalidates these assumptions.
37 let count_code_region_fn = tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None);
38 let coverage_counter_add_fn =
39 tcx.require_lang_item(lang_items::CoverageCounterAddFnLangItem, None);
40 let coverage_counter_subtract_fn =
41 tcx.require_lang_item(lang_items::CoverageCounterSubtractFnLangItem, None);
43 // The `num_counters` argument to `llvm.instrprof.increment` is the number of injected
44 // counters, with each counter having a counter ID from `0..num_counters-1`. MIR optimization
45 // may split and duplicate some BasicBlock sequences. Simply counting the calls may not
46 // work; but computing the num_counters by adding `1` to the highest counter_id (for a given
47 // instrumented function) is valid.
49 // `num_expressions` is the number of counter expressions added to the MIR body. Both
50 // `num_counters` and `num_expressions` are used to initialize new vectors, during backend
51 // code generate, to lookup counters and expressions by simple u32 indexes.
52 let mut num_counters: u32 = 0;
53 let mut num_expressions: u32 = 0;
55 traversal::preorder(mir_body).map(|(_, data)| data).filter_map(call_terminators)
57 if let TerminatorKind::Call { func: Operand::Constant(func), args, .. } = &terminator.kind {
58 match func.literal.ty.kind {
59 FnDef(id, _) if id == count_code_region_fn => {
61 args.get(count_code_region_args::COUNTER_ID).expect("arg found");
62 let counter_id = mir::Operand::scalar_from_const(counter_id_arg)
64 .expect("counter_id arg is u32");
65 num_counters = std::cmp::max(num_counters, counter_id + 1);
68 if id == coverage_counter_add_fn || id == coverage_counter_subtract_fn =>
70 let expression_id_arg = args
71 .get(coverage_counter_expression_args::EXPRESSION_ID)
73 let id_descending_from_max = mir::Operand::scalar_from_const(expression_id_arg)
75 .expect("expression_id arg is u32");
76 // Counter expressions are initially assigned IDs descending from `u32::MAX`, so
77 // the range of expression IDs is disjoint from the range of counter IDs. This
78 // way, both counters and expressions can be operands in other expressions.
79 let expression_index = u32::MAX - id_descending_from_max;
80 num_expressions = std::cmp::max(num_expressions, expression_index + 1);
86 CoverageInfo { num_counters, num_expressions }
89 fn call_terminators(data: &'tcx BasicBlockData<'tcx>) -> Option<&'tcx Terminator<'tcx>> {
90 let terminator = data.terminator();
91 match terminator.kind {
92 TerminatorKind::Call { .. } => Some(terminator),
97 impl<'tcx> MirPass<'tcx> for InstrumentCoverage {
98 fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, mir_body: &mut mir::Body<'tcx>) {
99 // If the InstrumentCoverage pass is called on promoted MIRs, skip them.
100 // See: https://github.com/rust-lang/rust/pull/73011#discussion_r438317601
101 if src.promoted.is_none() {
102 Instrumentor::new(tcx, src, mir_body).inject_counters();
107 /// Distinguishes the expression operators.
113 struct InjectedCall<'tcx> {
115 args: Vec<Operand<'tcx>>,
120 struct Instrumentor<'a, 'tcx> {
123 mir_body: &'a mut mir::Body<'tcx>,
124 hir_body: &'tcx rustc_hir::Body<'tcx>,
125 function_source_hash: Option<u64>,
127 num_expressions: u32,
130 impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
131 fn new(tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, mir_body: &'a mut mir::Body<'tcx>) -> Self {
132 let mir_def_id = src.def_id();
133 let hir_body = hir_body(tcx, mir_def_id);
139 function_source_hash: None,
145 /// Counter IDs start from zero and go up.
146 fn next_counter(&mut self) -> u32 {
147 assert!(self.num_counters < u32::MAX - self.num_expressions);
148 let next = self.num_counters;
149 self.num_counters += 1;
153 /// Expression IDs start from u32::MAX and go down because a CounterExpression can reference
154 /// (add or subtract counts) of both Counter regions and CounterExpression regions. The counter
155 /// expression operand IDs must be unique across both types.
156 fn next_expression(&mut self) -> u32 {
157 assert!(self.num_counters < u32::MAX - self.num_expressions);
158 let next = u32::MAX - self.num_expressions;
159 self.num_expressions += 1;
163 fn function_source_hash(&mut self) -> u64 {
164 match self.function_source_hash {
167 let hash = hash_mir_source(self.tcx, self.hir_body);
168 self.function_source_hash.replace(hash);
174 fn inject_counters(&mut self) {
175 let mir_body = &self.mir_body;
176 let body_span = self.hir_body.value.span;
177 debug!("instrumenting {:?}, span: {:?}", self.mir_def_id, body_span);
179 // FIXME(richkadel): As a first step, counters are only injected at the top of each
180 // function. The complete solution will inject counters at each conditional code branch.
181 let _ignore = mir_body;
182 let id = self.next_counter();
183 let function_source_hash = self.function_source_hash();
184 let scope = rustc_middle::mir::OUTERMOST_SOURCE_SCOPE;
185 let is_cleanup = false;
186 let next_block = rustc_middle::mir::START_BLOCK;
188 self.make_counter(id, function_source_hash, body_span),
194 // FIXME(richkadel): The next step to implement source based coverage analysis will be
195 // instrumenting branches within functions, and some regions will be counted by "counter
196 // expression". The function to inject counter expression is implemented. Replace this
197 // "fake use" with real use.
198 let fake_use = false;
202 let op = if add { Op::Add } else { Op::Subtract };
205 let scope = rustc_middle::mir::OUTERMOST_SOURCE_SCOPE;
206 let is_cleanup = false;
207 let next_block = rustc_middle::mir::START_BLOCK;
209 let id = self.next_expression();
211 self.make_expression(id, body_span, lhs, op, rhs),
219 fn make_counter(&self, id: u32, function_source_hash: u64, span: Span) -> InjectedCall<'tcx> {
220 let inject_at = span.shrink_to_lo();
222 let func = function_handle(
224 self.tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None),
228 let mut args = Vec::new();
230 use count_code_region_args::*;
231 debug_assert_eq!(FUNCTION_SOURCE_HASH, args.len());
232 args.push(self.const_u64(function_source_hash, inject_at));
234 debug_assert_eq!(COUNTER_ID, args.len());
235 args.push(self.const_u32(id, inject_at));
237 InjectedCall { func, args, span, inject_at }
247 ) -> InjectedCall<'tcx> {
248 let inject_at = span.shrink_to_lo();
250 let func = function_handle(
252 self.tcx.require_lang_item(
254 Op::Add => lang_items::CoverageCounterAddFnLangItem,
255 Op::Subtract => lang_items::CoverageCounterSubtractFnLangItem,
262 let mut args = Vec::new();
264 use coverage_counter_expression_args::*;
265 debug_assert_eq!(EXPRESSION_ID, args.len());
266 args.push(self.const_u32(id, inject_at));
268 debug_assert_eq!(LEFT_ID, args.len());
269 args.push(self.const_u32(lhs, inject_at));
271 debug_assert_eq!(RIGHT_ID, args.len());
272 args.push(self.const_u32(rhs, inject_at));
274 InjectedCall { func, args, span, inject_at }
279 call: InjectedCall<'tcx>,
282 next_block: BasicBlock,
284 let InjectedCall { func, mut args, span, inject_at } = call;
286 " injecting {}call to {:?}({:?}) at: {:?}, scope: {:?}",
287 if is_cleanup { "cleanup " } else { "" },
294 let mut patch = MirPatch::new(self.mir_body);
296 let (file_name, start_line, start_col, end_line, end_col) = self.code_region(&span);
298 // FIXME(richkadel): Note that `const_str()` results in the creation of an `Allocation` to
299 // hold one copy of each unique filename. It looks like that `Allocation` may translate into
300 // the creation of an `@alloc` in LLVM IR that is never actually used by runtime code.
304 // @alloc4 = private unnamed_addr constant <{ [43 x i8] }> \
305 // <{ [43 x i8] c"C:\\msys64\\home\\richkadel\\rust\\rust_basic.rs" }>, align 1
307 // Can I flag the alloc as something not to be added to codegen? Or somehow remove it before
308 // it gets added to the LLVM IR? Do we need some kind of reference counting to know it's
309 // not used by any runtime code?
311 // This question is moot if I convert the Call Terminators to Statements, I believe:
312 // https://rust-lang.zulipchat.com/#narrow/stream/233931-t-compiler.2Fmajor-changes/topic/Implement.20LLVM-compatible.20source-based.20cod.20compiler-team.23278/near/206731748
313 args.push(self.const_str(&file_name, inject_at));
314 args.push(self.const_u32(start_line, inject_at));
315 args.push(self.const_u32(start_col, inject_at));
316 args.push(self.const_u32(end_line, inject_at));
317 args.push(self.const_u32(end_col, inject_at));
319 let temp = patch.new_temp(self.tcx.mk_unit(), inject_at);
320 let new_block = patch.new_block(placeholder_block(inject_at, scope, is_cleanup));
321 patch.patch_terminator(
323 TerminatorKind::Call {
326 // new_block will swapped with the next_block, after applying patch
327 destination: Some((Place::from(temp), new_block)),
329 from_hir_call: false,
334 patch.add_statement(new_block.start_location(), StatementKind::StorageLive(temp));
335 patch.add_statement(next_block.start_location(), StatementKind::StorageDead(temp));
337 patch.apply(self.mir_body);
339 // To insert the `new_block` in front of the first block in the counted branch (the
340 // `next_block`), just swap the indexes, leaving the rest of the graph unchanged.
341 self.mir_body.basic_blocks_mut().swap(next_block, new_block);
344 /// Convert the Span into its file name, start line and column, and end line and column
345 fn code_region(&self, span: &Span) -> (String, u32, u32, u32, u32) {
346 let source_map = self.tcx.sess.source_map();
347 let start = source_map.lookup_char_pos(span.lo());
348 let end = if span.hi() == span.lo() {
351 let end = source_map.lookup_char_pos(span.hi());
355 "Region start ({:?} -> {:?}) and end ({:?} -> {:?}) don't come from the same source file!",
363 match &start.file.name {
364 FileName::Real(RealFileName::Named(path)) => (
365 path.to_string_lossy().to_string(),
367 start.col.to_u32() + 1,
369 end.col.to_u32() + 1,
372 bug!("start.file.name should be a RealFileName, but it was: {:?}", start.file.name)
377 fn const_str(&self, value: &str, span: Span) -> Operand<'tcx> {
378 Operand::const_from_str(self.tcx, value, span)
381 fn const_u32(&self, value: u32, span: Span) -> Operand<'tcx> {
382 Operand::const_from_scalar(self.tcx, self.tcx.types.u32, Scalar::from_u32(value), span)
385 fn const_u64(&self, value: u64, span: Span) -> Operand<'tcx> {
386 Operand::const_from_scalar(self.tcx, self.tcx.types.u64, Scalar::from_u64(value), span)
390 fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Operand<'tcx> {
391 let ret_ty = tcx.fn_sig(fn_def_id).output();
392 let ret_ty = ret_ty.no_bound_vars().unwrap();
393 let substs = tcx.mk_substs(::std::iter::once(ty::subst::GenericArg::from(ret_ty)));
394 Operand::function_handle(tcx, fn_def_id, substs, span)
397 fn placeholder_block(span: Span, scope: SourceScope, is_cleanup: bool) -> BasicBlockData<'tcx> {
400 terminator: Some(Terminator {
401 source_info: SourceInfo { span, scope },
402 // this gets overwritten by the counter Call
403 kind: TerminatorKind::Unreachable,
409 fn hir_body<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx rustc_hir::Body<'tcx> {
410 let hir_node = tcx.hir().get_if_local(def_id).expect("DefId is local");
411 let fn_body_id = hir::map::associated_body(hir_node).expect("HIR node is a function with body");
412 tcx.hir().body(fn_body_id)
415 fn hash_mir_source<'tcx>(tcx: TyCtxt<'tcx>, hir_body: &'tcx rustc_hir::Body<'tcx>) -> u64 {
416 let mut hcx = tcx.create_no_span_stable_hashing_context();
417 hash(&mut hcx, &hir_body.value).to_smaller_hash()
421 hcx: &mut StableHashingContext<'tcx>,
422 node: &impl HashStable<StableHashingContext<'tcx>>,
424 let mut stable_hasher = StableHasher::new();
425 node.hash_stable(hcx, &mut stable_hasher);
426 stable_hasher.finish()