#[rustc_const_unstable(feature = "const_ptr_offset_from", issue = "41079")]
pub fn ptr_offset_from<T>(ptr: *const T, base: *const T) -> isize;
- /// Internal placeholder for injecting code coverage counters when the "instrument-coverage"
- /// option is enabled. The source code region information is extracted prior to code generation,
- /// and added to the "coverage map", which is injected into the generated code as additional
- /// data. This intrinsic then triggers the generation of LLVM intrinsic call
- /// `instrprof.increment`, using the remaining args (`function_source_hash` and `index`).
- #[cfg(not(bootstrap))]
- #[lang = "count_code_region"]
- pub fn count_code_region(
- function_source_hash: u64,
- index: u32,
- file_name: &'static str,
- start_line: u32,
- start_col: u32,
- end_line: u32,
- end_col: u32,
- );
-
- /// Internal marker for code coverage expressions, injected into the MIR when the
- /// "instrument-coverage" option is enabled. This intrinsic is not converted into a
- /// backend intrinsic call, but its arguments are extracted during the production of a
- /// "coverage map", which is injected into the generated code, as additional data.
- /// This marker identifies a code region and two other counters or counter expressions
- /// whose sum is the number of times the code region was executed.
- #[cfg(not(bootstrap))]
- #[lang = "coverage_counter_add"]
- pub fn coverage_counter_add(
- index: u32,
- left_index: u32,
- right_index: u32,
- file_name: &'static str,
- start_line: u32,
- start_col: u32,
- end_line: u32,
- end_col: u32,
- );
-
- /// This marker identifies a code region and two other counters or counter expressions
- /// whose difference is the number of times the code region was executed.
- /// (See `coverage_counter_add` for more information.)
- #[cfg(not(bootstrap))]
- #[lang = "coverage_counter_subtract"]
- pub fn coverage_counter_subtract(
- index: u32,
- left_index: u32,
- right_index: u32,
- file_name: &'static str,
- start_line: u32,
- start_col: u32,
- end_line: u32,
- end_col: u32,
- );
-
- /// This marker identifies a code region to be added to the "coverage map" to indicate source
- /// code that can never be reached.
- /// (See `coverage_counter_add` for more information.)
- #[cfg(not(bootstrap))]
- pub fn coverage_unreachable(
- file_name: &'static str,
- start_line: u32,
- start_col: u32,
- end_line: u32,
- end_col: u32,
- );
-
/// See documentation of `<*const T>::guaranteed_eq` for details.
#[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
pub fn ptr_guaranteed_eq<T>(ptr: *const T, other: *const T) -> bool;
hash: &'ll Value,
num_counters: &'ll Value,
index: &'ll Value,
- ) -> &'ll Value {
+ ) {
debug!(
"instrprof_increment() with args ({:?}, {:?}, {:?}, {:?})",
fn_name, hash, num_counters, index
let args = self.check_call("call", llfn, args);
unsafe {
- llvm::LLVMRustBuildCall(
+ let _ = llvm::LLVMRustBuildCall(
self.llbuilder,
llfn,
args.as_ptr() as *const &llvm::Value,
args.len() as c_uint,
None,
- )
+ );
}
}
use crate::llvm;
use llvm::coverageinfo::CounterMappingRegion;
-use rustc_codegen_ssa::coverageinfo::map::{Counter, CounterExpression, Region};
+use rustc_codegen_ssa::coverageinfo::map::{Counter, CounterExpression};
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods};
use rustc_data_structures::fx::FxIndexSet;
use rustc_llvm::RustString;
-use tracing::debug;
+use rustc_middle::mir::coverage::CodeRegion;
use std::ffi::CString;
+use tracing::debug;
+
/// Generates and exports the Coverage Map.
///
/// This Coverage Map complies with Coverage Mapping Format version 3 (zero-based encoded as 2),
fn write_coverage_mappings(
&mut self,
expressions: Vec<CounterExpression>,
- counter_regions: impl Iterator<Item = (Counter, &'tcx Region<'tcx>)>,
+ counter_regions: impl Iterator<Item = (Counter, &'a CodeRegion)>,
coverage_mappings_buffer: &RustString,
) {
let mut counter_regions = counter_regions.collect::<Vec<_>>();
let mut current_file_name = None;
let mut current_file_id = 0;
- // Convert the list of (Counter, Region) pairs to an array of `CounterMappingRegion`, sorted
+ // Convert the list of (Counter, CodeRegion) pairs to an array of `CounterMappingRegion`, sorted
// by filename and position. Capture any new files to compute the `CounterMappingRegion`s
// `file_id` (indexing files referenced by the current function), and construct the
// function-specific `virtual_file_mapping` from `file_id` to its index in the module's
// `filenames` array.
counter_regions.sort_unstable_by_key(|(_counter, region)| *region);
for (counter, region) in counter_regions {
- let Region { file_name, start_line, start_col, end_line, end_col } = *region;
- let same_file = current_file_name.as_ref().map_or(false, |p| p == file_name);
+ let CodeRegion { file_name, start_line, start_col, end_line, end_col } = *region;
+ let same_file = current_file_name.as_ref().map_or(false, |p| *p == file_name);
if !same_file {
if current_file_name.is_some() {
current_file_id += 1;
}
- current_file_name = Some(file_name.to_string());
- let c_filename =
- CString::new(file_name).expect("null error converting filename to C string");
+ current_file_name = Some(file_name);
+ let c_filename = CString::new(file_name.to_string())
+ .expect("null error converting filename to C string");
debug!(" file_id: {} = '{:?}'", current_file_id, c_filename);
let (filenames_index, _) = self.filenames.insert_full(c_filename);
virtual_file_mapping.push(filenames_index as u32);
use libc::c_uint;
use llvm::coverageinfo::CounterMappingRegion;
-use rustc_codegen_ssa::coverageinfo::map::{CounterExpression, ExprKind, FunctionCoverage, Region};
+use rustc_codegen_ssa::coverageinfo::map::{CounterExpression, FunctionCoverage};
use rustc_codegen_ssa::traits::{
BaseTypeMethods, CoverageInfoBuilderMethods, CoverageInfoMethods, MiscMethods, StaticMethods,
};
use rustc_data_structures::fx::FxHashMap;
use rustc_llvm::RustString;
+use rustc_middle::mir::coverage::{
+ CodeRegion, CounterValueReference, ExpressionOperandId, InjectedExpressionIndex, Op,
+};
use rustc_middle::ty::Instance;
-use tracing::debug;
use std::cell::RefCell;
use std::ffi::CString;
+use tracing::debug;
+
pub mod mapgen;
const COVMAP_VAR_ALIGN_BYTES: usize = 8;
/// A context object for maintaining all state needed by the coverageinfo module.
pub struct CrateCoverageContext<'tcx> {
// Coverage region data for each instrumented function identified by DefId.
- pub(crate) function_coverage_map: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverage<'tcx>>>,
+ pub(crate) function_coverage_map: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverage>>,
}
impl<'tcx> CrateCoverageContext<'tcx> {
Self { function_coverage_map: Default::default() }
}
- pub fn take_function_coverage_map(&self) -> FxHashMap<Instance<'tcx>, FunctionCoverage<'tcx>> {
+ pub fn take_function_coverage_map(&self) -> FxHashMap<Instance<'tcx>, FunctionCoverage> {
self.function_coverage_map.replace(FxHashMap::default())
}
}
&mut self,
instance: Instance<'tcx>,
function_source_hash: u64,
- id: u32,
- region: Region<'tcx>,
+ id: CounterValueReference,
+ region: CodeRegion,
) {
debug!(
- "adding counter to coverage_regions: instance={:?}, function_source_hash={}, id={}, \
+ "adding counter to coverage_regions: instance={:?}, function_source_hash={}, id={:?}, \
at {:?}",
instance, function_source_hash, id, region,
);
fn add_counter_expression_region(
&mut self,
instance: Instance<'tcx>,
- id_descending_from_max: u32,
- lhs: u32,
- op: ExprKind,
- rhs: u32,
- region: Region<'tcx>,
+ id: InjectedExpressionIndex,
+ lhs: ExpressionOperandId,
+ op: Op,
+ rhs: ExpressionOperandId,
+ region: CodeRegion,
) {
debug!(
- "adding counter expression to coverage_regions: instance={:?}, id={}, {} {:?} {}, \
+ "adding counter expression to coverage_regions: instance={:?}, id={:?}, {:?} {:?} {:?}, \
at {:?}",
- instance, id_descending_from_max, lhs, op, rhs, region,
+ instance, id, lhs, op, rhs, region,
);
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
coverage_regions
.entry(instance)
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
- .add_counter_expression(id_descending_from_max, lhs, op, rhs, region);
+ .add_counter_expression(id, lhs, op, rhs, region);
}
- fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: Region<'tcx>) {
+ fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: CodeRegion) {
debug!(
"adding unreachable code to coverage_regions: instance={:?}, at {:?}",
instance, region,
use rustc_codegen_ssa::base::{compare_simd_types, to_immediate, wants_msvc_seh};
use rustc_codegen_ssa::common::span_invalid_monomorphization_error;
use rustc_codegen_ssa::common::{IntPredicate, TypeKind};
-use rustc_codegen_ssa::coverageinfo;
use rustc_codegen_ssa::glue;
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef;
use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::MemFlags;
use rustc_hir as hir;
-use rustc_middle::mir::coverage;
-use rustc_middle::mir::Operand;
use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt};
use rustc_middle::ty::{self, Ty};
use rustc_middle::{bug, span_bug};
use rustc_span::{sym, symbol::kw, Span, Symbol};
use rustc_target::abi::{self, HasDataLayout, LayoutOf, Primitive};
use rustc_target::spec::PanicStrategy;
-use tracing::debug;
use std::cmp::Ordering;
use std::iter;
}
impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
- fn is_codegen_intrinsic(
- &mut self,
- intrinsic: Symbol,
- args: &Vec<Operand<'tcx>>,
- caller_instance: ty::Instance<'tcx>,
- ) -> bool {
- let mut is_codegen_intrinsic = true;
- // Set `is_codegen_intrinsic` to `false` to bypass `codegen_intrinsic_call()`.
-
- // FIXME(richkadel): Make sure to add coverage analysis tests on a crate with
- // external crate dependencies, where:
- // 1. Both binary and dependent crates are compiled with `-Zinstrument-coverage`
- // 2. Only binary is compiled with `-Zinstrument-coverage`
- // 3. Only dependent crates are compiled with `-Zinstrument-coverage`
- match intrinsic {
- sym::count_code_region => {
- use coverage::count_code_region_args::*;
- self.add_counter_region(
- caller_instance,
- op_to_u64(&args[FUNCTION_SOURCE_HASH]),
- op_to_u32(&args[COUNTER_ID]),
- coverageinfo::Region::new(
- op_to_str_slice(&args[FILE_NAME]),
- op_to_u32(&args[START_LINE]),
- op_to_u32(&args[START_COL]),
- op_to_u32(&args[END_LINE]),
- op_to_u32(&args[END_COL]),
- ),
- );
- }
- sym::coverage_counter_add | sym::coverage_counter_subtract => {
- is_codegen_intrinsic = false;
- use coverage::coverage_counter_expression_args::*;
- self.add_counter_expression_region(
- caller_instance,
- op_to_u32(&args[EXPRESSION_ID]),
- op_to_u32(&args[LEFT_ID]),
- if intrinsic == sym::coverage_counter_add {
- coverageinfo::ExprKind::Add
- } else {
- coverageinfo::ExprKind::Subtract
- },
- op_to_u32(&args[RIGHT_ID]),
- coverageinfo::Region::new(
- op_to_str_slice(&args[FILE_NAME]),
- op_to_u32(&args[START_LINE]),
- op_to_u32(&args[START_COL]),
- op_to_u32(&args[END_LINE]),
- op_to_u32(&args[END_COL]),
- ),
- );
- }
- sym::coverage_unreachable => {
- is_codegen_intrinsic = false;
- use coverage::coverage_unreachable_args::*;
- self.add_unreachable_region(
- caller_instance,
- coverageinfo::Region::new(
- op_to_str_slice(&args[FILE_NAME]),
- op_to_u32(&args[START_LINE]),
- op_to_u32(&args[START_COL]),
- op_to_u32(&args[END_LINE]),
- op_to_u32(&args[END_COL]),
- ),
- );
- }
- _ => {}
- }
- is_codegen_intrinsic
- }
-
fn codegen_intrinsic_call(
&mut self,
instance: ty::Instance<'tcx>,
args: &[OperandRef<'tcx, &'ll Value>],
llresult: &'ll Value,
span: Span,
- caller_instance: ty::Instance<'tcx>,
) {
let tcx = self.tcx;
let callee_ty = instance.ty(tcx, ty::ParamEnv::reveal_all());
let llfn = self.get_intrinsic(&("llvm.debugtrap"));
self.call(llfn, &[], None)
}
- sym::count_code_region => {
- use coverage::count_code_region_args::*;
- let coverageinfo = tcx.coverageinfo(caller_instance.def_id());
-
- let fn_name = self.create_pgo_func_name_var(caller_instance);
- let hash = args[FUNCTION_SOURCE_HASH].immediate();
- let num_counters = self.const_u32(coverageinfo.num_counters);
- let index = args[COUNTER_ID].immediate();
- debug!(
- "translating Rust intrinsic `count_code_region()` to LLVM intrinsic: \
- instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})",
- fn_name, hash, num_counters, index,
- );
- self.instrprof_increment(fn_name, hash, num_counters, index)
- }
sym::va_start => self.va_start(args[0].immediate()),
sym::va_end => self.va_end(args[0].immediate()),
sym::va_copy => {
_ => None,
}
}
-
-fn op_to_str_slice<'tcx>(op: &Operand<'tcx>) -> &'tcx str {
- Operand::value_from_const(op).try_to_str_slice().expect("Value is &str")
-}
-
-fn op_to_u32<'tcx>(op: &Operand<'tcx>) -> u32 {
- Operand::scalar_from_const(op).to_u32().expect("Scalar is u32")
-}
-
-fn op_to_u64<'tcx>(op: &Operand<'tcx>) -> u64 {
- Operand::scalar_from_const(op).to_u64().expect("Scalar is u64")
-}
-use super::map::{CounterValueReference, MappedExpressionIndex};
+use rustc_middle::mir::coverage::{CounterValueReference, MappedExpressionIndex};
/// Aligns with [llvm::coverage::Counter::CounterKind](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L91)
#[derive(Copy, Clone, Debug)]
pub use super::ffi::*;
use rustc_index::vec::IndexVec;
+use rustc_middle::mir::coverage::{
+ CodeRegion, CounterValueReference, ExpressionOperandId, InjectedExpressionIndex,
+ MappedExpressionIndex, Op,
+};
use rustc_middle::ty::Instance;
use rustc_middle::ty::TyCtxt;
-use std::cmp::Ord;
-
-rustc_index::newtype_index! {
- pub struct ExpressionOperandId {
- DEBUG_FORMAT = "ExpressionOperandId({})",
- MAX = 0xFFFF_FFFF,
- }
-}
-
-rustc_index::newtype_index! {
- pub struct CounterValueReference {
- DEBUG_FORMAT = "CounterValueReference({})",
- MAX = 0xFFFF_FFFF,
- }
-}
-
-rustc_index::newtype_index! {
- pub struct InjectedExpressionIndex {
- DEBUG_FORMAT = "InjectedExpressionIndex({})",
- MAX = 0xFFFF_FFFF,
- }
-}
-
-rustc_index::newtype_index! {
- pub struct MappedExpressionIndex {
- DEBUG_FORMAT = "MappedExpressionIndex({})",
- MAX = 0xFFFF_FFFF,
- }
-}
-
-#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
-pub struct Region<'tcx> {
- pub file_name: &'tcx str,
- pub start_line: u32,
- pub start_col: u32,
- pub end_line: u32,
- pub end_col: u32,
-}
-
-impl<'tcx> Region<'tcx> {
- pub fn new(
- file_name: &'tcx str,
- start_line: u32,
- start_col: u32,
- end_line: u32,
- end_col: u32,
- ) -> Self {
- Self { file_name, start_line, start_col, end_line, end_col }
- }
-}
-
#[derive(Clone, Debug)]
-pub struct ExpressionRegion<'tcx> {
+pub struct ExpressionRegion {
lhs: ExpressionOperandId,
- op: ExprKind,
+ op: Op,
rhs: ExpressionOperandId,
- region: Region<'tcx>,
+ region: CodeRegion,
}
/// Collects all of the coverage regions associated with (a) injected counters, (b) counter
/// only whitespace or comments). According to LLVM Code Coverage Mapping documentation, "A count
/// for a gap area is only used as the line execution count if there are no other regions on a
/// line."
-pub struct FunctionCoverage<'tcx> {
+pub struct FunctionCoverage {
source_hash: u64,
- counters: IndexVec<CounterValueReference, Option<Region<'tcx>>>,
- expressions: IndexVec<InjectedExpressionIndex, Option<ExpressionRegion<'tcx>>>,
- unreachable_regions: Vec<Region<'tcx>>,
+ counters: IndexVec<CounterValueReference, Option<CodeRegion>>,
+ expressions: IndexVec<InjectedExpressionIndex, Option<ExpressionRegion>>,
+ unreachable_regions: Vec<CodeRegion>,
}
-impl<'tcx> FunctionCoverage<'tcx> {
- pub fn new(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self {
+impl FunctionCoverage {
+ pub fn new<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self {
let coverageinfo = tcx.coverageinfo(instance.def_id());
Self {
source_hash: 0, // will be set with the first `add_counter()`
/// Adds a code region to be counted by an injected counter intrinsic.
/// The source_hash (computed during coverage instrumentation) should also be provided, and
/// should be the same for all counters in a given function.
- pub fn add_counter(&mut self, source_hash: u64, id: u32, region: Region<'tcx>) {
+ pub fn add_counter(&mut self, source_hash: u64, id: CounterValueReference, region: CodeRegion) {
if self.source_hash == 0 {
self.source_hash = source_hash;
} else {
debug_assert_eq!(source_hash, self.source_hash);
}
- self.counters[CounterValueReference::from(id)]
- .replace(region)
- .expect_none("add_counter called with duplicate `id`");
+ self.counters[id].replace(region).expect_none("add_counter called with duplicate `id`");
}
/// Both counters and "counter expressions" (or simply, "expressions") can be operands in other
/// counters and expressions have been added.
pub fn add_counter_expression(
&mut self,
- id_descending_from_max: u32,
- lhs: u32,
- op: ExprKind,
- rhs: u32,
- region: Region<'tcx>,
+ expression_id: InjectedExpressionIndex,
+ lhs: ExpressionOperandId,
+ op: Op,
+ rhs: ExpressionOperandId,
+ region: CodeRegion,
) {
- let expression_id = ExpressionOperandId::from(id_descending_from_max);
- let lhs = ExpressionOperandId::from(lhs);
- let rhs = ExpressionOperandId::from(rhs);
-
- let expression_index = self.expression_index(expression_id);
+ let expression_index = self.expression_index(u32::from(expression_id));
self.expressions[expression_index]
.replace(ExpressionRegion { lhs, op, rhs, region })
.expect_none("add_counter_expression called with duplicate `id_descending_from_max`");
}
/// Add a region that will be marked as "unreachable", with a constant "zero counter".
- pub fn add_unreachable_region(&mut self, region: Region<'tcx>) {
+ pub fn add_unreachable_region(&mut self, region: CodeRegion) {
self.unreachable_regions.push(region)
}
/// Generate an array of CounterExpressions, and an iterator over all `Counter`s and their
/// associated `Regions` (from which the LLVM-specific `CoverageMapGenerator` will create
/// `CounterMappingRegion`s.
- pub fn get_expressions_and_counter_regions(
- &'tcx self,
- ) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'tcx Region<'tcx>)>) {
+ pub fn get_expressions_and_counter_regions<'a>(
+ &'a self,
+ ) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'a CodeRegion)>) {
assert!(self.source_hash != 0);
let counter_regions = self.counter_regions();
(counter_expressions, counter_regions)
}
- fn counter_regions(&'tcx self) -> impl Iterator<Item = (Counter, &'tcx Region<'tcx>)> {
+ fn counter_regions<'a>(&'a self) -> impl Iterator<Item = (Counter, &'a CodeRegion)> {
self.counters.iter_enumerated().filter_map(|(index, entry)| {
// Option::map() will return None to filter out missing counters. This may happen
// if, for example, a MIR-instrumented counter is removed during an optimization.
}
fn expressions_with_regions(
- &'tcx self,
- ) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'tcx Region<'tcx>)>) {
+ &'a self,
+ ) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'a CodeRegion)>) {
let mut counter_expressions = Vec::with_capacity(self.expressions.len());
let mut expression_regions = Vec::with_capacity(self.expressions.len());
let mut new_indexes =
.as_ref()
.map(|_| Counter::counter_value_reference(index))
} else {
- let index = self.expression_index(id);
+ let index = self.expression_index(u32::from(id));
self.expressions
.get(index)
.expect("expression id is out of range")
// been assigned a `new_index`.
let mapped_expression_index =
MappedExpressionIndex::from(counter_expressions.len());
- counter_expressions.push(CounterExpression::new(lhs_counter, op, rhs_counter));
+ counter_expressions.push(CounterExpression::new(
+ lhs_counter,
+ match op {
+ Op::Add => ExprKind::Add,
+ Op::Subtract => ExprKind::Subtract,
+ },
+ rhs_counter,
+ ));
new_indexes[original_index] = mapped_expression_index;
expression_regions.push((Counter::expression(mapped_expression_index), region));
}
(counter_expressions, expression_regions.into_iter())
}
- fn unreachable_regions(&'tcx self) -> impl Iterator<Item = (Counter, &'tcx Region<'tcx>)> {
+ fn unreachable_regions<'a>(&'a self) -> impl Iterator<Item = (Counter, &'a CodeRegion)> {
self.unreachable_regions.iter().map(|region| (Counter::zero(), region))
}
- fn expression_index(
- &self,
- id_descending_from_max: ExpressionOperandId,
- ) -> InjectedExpressionIndex {
- debug_assert!(id_descending_from_max.index() >= self.counters.len());
- InjectedExpressionIndex::from(u32::MAX - u32::from(id_descending_from_max))
+ fn expression_index(&self, id_descending_from_max: u32) -> InjectedExpressionIndex {
+ debug_assert!(id_descending_from_max >= self.counters.len() as u32);
+ InjectedExpressionIndex::from(u32::MAX - id_descending_from_max)
}
}
pub mod ffi;
pub mod map;
-
-pub use map::ExprKind;
-pub use map::Region;
if intrinsic.is_some() && intrinsic != Some(sym::drop_in_place) {
let intrinsic = intrinsic.unwrap();
-
- // `is_codegen_intrinsic()` allows the backend implementation to perform compile-time
- // operations before converting the `args` to backend values.
- if !bx.is_codegen_intrinsic(intrinsic, &args, self.instance) {
- // If the intrinsic call was fully addressed by the `is_codegen_intrinsic()` call
- // (as a compile-time operation), return immediately. This avoids the need to
- // convert the arguments, the call to `codegen_intrinsic_call()`, and the return
- // value handling.
- return;
- }
-
let dest = match ret_dest {
_ if fn_abi.ret.is_indirect() => llargs[0],
ReturnDest::Nothing => {
&args,
dest,
terminator.source_info.span,
- self.instance,
);
if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
--- /dev/null
+use crate::traits::*;
+
+use rustc_middle::mir::coverage::*;
+use rustc_middle::mir::Coverage;
+
+use super::FunctionCx;
+
+impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
+ pub fn codegen_coverage(&self, bx: &mut Bx, coverage: Coverage) {
+ let Coverage { kind, code_region } = coverage;
+ match kind {
+ CoverageKind::Counter { function_source_hash, id } => {
+ bx.add_counter_region(self.instance, function_source_hash, id, code_region);
+
+ let coverageinfo = bx.tcx().coverageinfo(self.instance.def_id());
+
+ let fn_name = bx.create_pgo_func_name_var(self.instance);
+ let hash = bx.const_u64(function_source_hash);
+ let num_counters = bx.const_u32(coverageinfo.num_counters);
+ let id = bx.const_u32(u32::from(id));
+ debug!(
+ "codegen intrinsic instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})",
+ fn_name, hash, num_counters, id,
+ );
+ bx.instrprof_increment(fn_name, hash, num_counters, id);
+ }
+ CoverageKind::Expression { id, lhs, op, rhs } => {
+ bx.add_counter_expression_region(self.instance, id, lhs, op, rhs, code_region);
+ }
+ CoverageKind::Unreachable => {
+ bx.add_unreachable_region(self.instance, code_region);
+ }
+ }
+ }
+}
mod analyze;
mod block;
pub mod constant;
+pub mod coverageinfo;
pub mod debuginfo;
pub mod operand;
pub mod place;
}
bx
}
+ mir::StatementKind::Coverage(box ref coverage) => {
+ self.codegen_coverage(&mut bx, coverage.clone());
+ bx
+ }
mir::StatementKind::FakeRead(..)
| mir::StatementKind::Retag { .. }
| mir::StatementKind::AscribeUserType(..)
hash: Self::Value,
num_counters: Self::Value,
index: Self::Value,
- ) -> Self::Value;
+ );
fn call(
&mut self,
use super::BackendTypes;
-use crate::coverageinfo::{ExprKind, Region};
+use rustc_middle::mir::coverage::*;
use rustc_middle::ty::Instance;
pub trait CoverageInfoMethods: BackendTypes {
&mut self,
instance: Instance<'tcx>,
function_source_hash: u64,
- index: u32,
- region: Region<'tcx>,
+ id: CounterValueReference,
+ region: CodeRegion,
);
fn add_counter_expression_region(
&mut self,
instance: Instance<'tcx>,
- index: u32,
- lhs: u32,
- op: ExprKind,
- rhs: u32,
- region: Region<'tcx>,
+ id: InjectedExpressionIndex,
+ lhs: ExpressionOperandId,
+ op: Op,
+ rhs: ExpressionOperandId,
+ region: CodeRegion,
);
- fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: Region<'tcx>);
+ fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: CodeRegion);
}
use super::BackendTypes;
use crate::mir::operand::OperandRef;
-use rustc_middle::mir::Operand;
use rustc_middle::ty::{self, Ty};
-use rustc_span::{Span, Symbol};
+use rustc_span::Span;
use rustc_target::abi::call::FnAbi;
pub trait IntrinsicCallMethods<'tcx>: BackendTypes {
args: &[OperandRef<'tcx, Self::Value>],
llresult: Self::Value,
span: Span,
- caller_instance: ty::Instance<'tcx>,
);
- /// Intrinsic-specific pre-codegen processing, if any is required. Some intrinsics are handled
- /// at compile time and do not generate code. Returns true if codegen is required or false if
- /// the intrinsic does not need code generation.
- fn is_codegen_intrinsic(
- &mut self,
- intrinsic: Symbol,
- args: &Vec<Operand<'tcx>>,
- caller_instance: ty::Instance<'tcx>,
- ) -> bool;
-
fn abort(&mut self);
fn assume(&mut self, val: Self::Value);
fn expect(&mut self, cond: Self::Value, expected: bool) -> Self::Value;
//! This API is completely unstable and subject to change.
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
+#![allow(incomplete_features)]
#![feature(in_band_lifetimes)]
#![feature(unboxed_closures)]
#![feature(generators)]
#![feature(associated_type_bounds)]
#![feature(thread_id_value)]
#![feature(extend_one)]
+#![feature(const_panic)]
+#![feature(const_generics)]
#![allow(rustc::default_hash_types)]
#[macro_use]
pub mod work_queue;
pub use atomic_ref::AtomicRef;
pub mod frozen;
+pub mod tagged_ptr;
pub mod temp_dir;
pub struct OnDrop<F: Fn()>(pub F);
--- /dev/null
+//! This module implements tagged pointers.
+//!
+//! In order to utilize the pointer packing, you must have two types: a pointer,
+//! and a tag.
+//!
+//! The pointer must implement the `Pointer` trait, with the primary requirement
+//! being conversion to and from a usize. Note that the pointer must be
+//! dereferenceable, so raw pointers generally cannot implement the `Pointer`
+//! trait. This implies that the pointer must also be nonzero.
+//!
+//! Many common pointer types already implement the `Pointer` trait.
+//!
+//! The tag must implement the `Tag` trait. We assert that the tag and `Pointer`
+//! are compatible at compile time.
+
+use std::mem::ManuallyDrop;
+use std::ops::Deref;
+use std::rc::Rc;
+use std::sync::Arc;
+
+mod copy;
+mod drop;
+
+pub use copy::CopyTaggedPtr;
+pub use drop::TaggedPtr;
+
+/// This describes the pointer type encaspulated by TaggedPtr.
+///
+/// # Safety
+///
+/// The usize returned from `into_usize` must be a valid, dereferenceable,
+/// pointer to `<Self as Deref>::Target`. Note that pointers to `Pointee` must
+/// be thin, even though `Pointee` may not be sized.
+///
+/// Note that the returned pointer from `into_usize` should be castable to `&mut
+/// <Self as Deref>::Target` if `Pointer: DerefMut`.
+///
+/// The BITS constant must be correct. At least `BITS` bits, least-significant,
+/// must be zero on all returned pointers from `into_usize`.
+///
+/// For example, if the alignment of `Pointee` is 2, then `BITS` should be 1.
+pub unsafe trait Pointer: Deref {
+ /// Most likely the value you want to use here is the following, unless
+ /// your Pointee type is unsized (e.g., `ty::List<T>` in rustc) in which
+ /// case you'll need to manually figure out what the right type to pass to
+ /// align_of is.
+ ///
+ /// ```rust
+ /// std::mem::align_of::<<Self as Deref>::Target>().trailing_zeros() as usize;
+ /// ```
+ const BITS: usize;
+ fn into_usize(self) -> usize;
+
+ /// # Safety
+ ///
+ /// The passed `ptr` must be returned from `into_usize`.
+ ///
+ /// This acts as `ptr::read` semantically, it should not be called more than
+ /// once on non-`Copy` `Pointer`s.
+ unsafe fn from_usize(ptr: usize) -> Self;
+
+ /// This provides a reference to the `Pointer` itself, rather than the
+ /// `Deref::Target`. It is used for cases where we want to call methods that
+ /// may be implement differently for the Pointer than the Pointee (e.g.,
+ /// `Rc::clone` vs cloning the inner value).
+ ///
+ /// # Safety
+ ///
+ /// The passed `ptr` must be returned from `into_usize`.
+ unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R;
+}
+
+/// This describes tags that the `TaggedPtr` struct can hold.
+///
+/// # Safety
+///
+/// The BITS constant must be correct.
+///
+/// No more than `BITS` least significant bits may be set in the returned usize.
+pub unsafe trait Tag: Copy {
+ const BITS: usize;
+
+ fn into_usize(self) -> usize;
+
+ /// # Safety
+ ///
+ /// The passed `tag` must be returned from `into_usize`.
+ unsafe fn from_usize(tag: usize) -> Self;
+}
+
+unsafe impl<T> Pointer for Box<T> {
+ const BITS: usize = std::mem::align_of::<T>().trailing_zeros() as usize;
+ fn into_usize(self) -> usize {
+ Box::into_raw(self) as usize
+ }
+ unsafe fn from_usize(ptr: usize) -> Self {
+ Box::from_raw(ptr as *mut T)
+ }
+ unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
+ let raw = ManuallyDrop::new(Self::from_usize(ptr));
+ f(&raw)
+ }
+}
+
+unsafe impl<T> Pointer for Rc<T> {
+ const BITS: usize = std::mem::align_of::<T>().trailing_zeros() as usize;
+ fn into_usize(self) -> usize {
+ Rc::into_raw(self) as usize
+ }
+ unsafe fn from_usize(ptr: usize) -> Self {
+ Rc::from_raw(ptr as *const T)
+ }
+ unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
+ let raw = ManuallyDrop::new(Self::from_usize(ptr));
+ f(&raw)
+ }
+}
+
+unsafe impl<T> Pointer for Arc<T> {
+ const BITS: usize = std::mem::align_of::<T>().trailing_zeros() as usize;
+ fn into_usize(self) -> usize {
+ Arc::into_raw(self) as usize
+ }
+ unsafe fn from_usize(ptr: usize) -> Self {
+ Arc::from_raw(ptr as *const T)
+ }
+ unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
+ let raw = ManuallyDrop::new(Self::from_usize(ptr));
+ f(&raw)
+ }
+}
+
+unsafe impl<'a, T: 'a> Pointer for &'a T {
+ const BITS: usize = std::mem::align_of::<T>().trailing_zeros() as usize;
+ fn into_usize(self) -> usize {
+ self as *const T as usize
+ }
+ unsafe fn from_usize(ptr: usize) -> Self {
+ &*(ptr as *const T)
+ }
+ unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
+ f(&*(&ptr as *const usize as *const Self))
+ }
+}
+
+unsafe impl<'a, T: 'a> Pointer for &'a mut T {
+ const BITS: usize = std::mem::align_of::<T>().trailing_zeros() as usize;
+ fn into_usize(self) -> usize {
+ self as *mut T as usize
+ }
+ unsafe fn from_usize(ptr: usize) -> Self {
+ &mut *(ptr as *mut T)
+ }
+ unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
+ f(&*(&ptr as *const usize as *const Self))
+ }
+}
--- /dev/null
+use super::{Pointer, Tag};
+use crate::stable_hasher::{HashStable, StableHasher};
+use std::fmt;
+use std::marker::PhantomData;
+use std::num::NonZeroUsize;
+
+/// A `Copy` TaggedPtr.
+///
+/// You should use this instead of the `TaggedPtr` type in all cases where
+/// `P: Copy`.
+///
+/// If `COMPARE_PACKED` is true, then the pointers will be compared and hashed without
+/// unpacking. Otherwise we don't implement PartialEq/Eq/Hash; if you want that,
+/// wrap the TaggedPtr.
+pub struct CopyTaggedPtr<P, T, const COMPARE_PACKED: bool>
+where
+ P: Pointer,
+ T: Tag,
+{
+ packed: NonZeroUsize,
+ data: PhantomData<(P, T)>,
+}
+
+impl<P, T, const COMPARE_PACKED: bool> Copy for CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer,
+ T: Tag,
+ P: Copy,
+{
+}
+
+impl<P, T, const COMPARE_PACKED: bool> Clone for CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer,
+ T: Tag,
+ P: Copy,
+{
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+// We pack the tag into the *upper* bits of the pointer to ease retrieval of the
+// value; a left shift is a multiplication and those are embeddable in
+// instruction encoding.
+impl<P, T, const COMPARE_PACKED: bool> CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer,
+ T: Tag,
+{
+ const TAG_BIT_SHIFT: usize = (8 * std::mem::size_of::<usize>()) - T::BITS;
+ const ASSERTION: () = {
+ assert!(T::BITS <= P::BITS);
+ // Used for the transmute_copy's below
+ assert!(std::mem::size_of::<&P::Target>() == std::mem::size_of::<usize>());
+ };
+
+ pub fn new(pointer: P, tag: T) -> Self {
+ // Trigger assert!
+ let () = Self::ASSERTION;
+ let packed_tag = tag.into_usize() << Self::TAG_BIT_SHIFT;
+
+ Self {
+ // SAFETY: We know that the pointer is non-null, as it must be
+ // dereferenceable per `Pointer` safety contract.
+ packed: unsafe {
+ NonZeroUsize::new_unchecked((P::into_usize(pointer) >> T::BITS) | packed_tag)
+ },
+ data: PhantomData,
+ }
+ }
+
+ pub(super) fn pointer_raw(&self) -> usize {
+ self.packed.get() << T::BITS
+ }
+ pub fn pointer(self) -> P
+ where
+ P: Copy,
+ {
+ // SAFETY: pointer_raw returns the original pointer
+ //
+ // Note that this isn't going to double-drop or anything because we have
+ // P: Copy
+ unsafe { P::from_usize(self.pointer_raw()) }
+ }
+ pub fn pointer_ref(&self) -> &P::Target {
+ // SAFETY: pointer_raw returns the original pointer
+ unsafe { std::mem::transmute_copy(&self.pointer_raw()) }
+ }
+ pub fn pointer_mut(&mut self) -> &mut P::Target
+ where
+ P: std::ops::DerefMut,
+ {
+ // SAFETY: pointer_raw returns the original pointer
+ unsafe { std::mem::transmute_copy(&self.pointer_raw()) }
+ }
+ pub fn tag(&self) -> T {
+ unsafe { T::from_usize(self.packed.get() >> Self::TAG_BIT_SHIFT) }
+ }
+ pub fn set_tag(&mut self, tag: T) {
+ let mut packed = self.packed.get();
+ let new_tag = T::into_usize(tag) << Self::TAG_BIT_SHIFT;
+ let tag_mask = (1 << T::BITS) - 1;
+ packed &= !(tag_mask << Self::TAG_BIT_SHIFT);
+ packed |= new_tag;
+ self.packed = unsafe { NonZeroUsize::new_unchecked(packed) };
+ }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> std::ops::Deref for CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer,
+ T: Tag,
+{
+ type Target = P::Target;
+ fn deref(&self) -> &Self::Target {
+ self.pointer_ref()
+ }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> std::ops::DerefMut for CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer + std::ops::DerefMut,
+ T: Tag,
+{
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.pointer_mut()
+ }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> fmt::Debug for CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer,
+ P::Target: fmt::Debug,
+ T: Tag + fmt::Debug,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("CopyTaggedPtr")
+ .field("pointer", &self.pointer_ref())
+ .field("tag", &self.tag())
+ .finish()
+ }
+}
+
+impl<P, T> PartialEq for CopyTaggedPtr<P, T, true>
+where
+ P: Pointer,
+ T: Tag,
+{
+ fn eq(&self, other: &Self) -> bool {
+ self.packed == other.packed
+ }
+}
+
+impl<P, T> Eq for CopyTaggedPtr<P, T, true>
+where
+ P: Pointer,
+ T: Tag,
+{
+}
+
+impl<P, T> std::hash::Hash for CopyTaggedPtr<P, T, true>
+where
+ P: Pointer,
+ T: Tag,
+{
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ self.packed.hash(state);
+ }
+}
+
+impl<P, T, HCX, const COMPARE_PACKED: bool> HashStable<HCX> for CopyTaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer + HashStable<HCX>,
+ T: Tag + HashStable<HCX>,
+{
+ fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) {
+ unsafe {
+ Pointer::with_ref(self.pointer_raw(), |p: &P| p.hash_stable(hcx, hasher));
+ }
+ self.tag().hash_stable(hcx, hasher);
+ }
+}
--- /dev/null
+use super::{Pointer, Tag};
+use crate::stable_hasher::{HashStable, StableHasher};
+use std::fmt;
+
+use super::CopyTaggedPtr;
+
+/// A TaggedPtr implementing `Drop`.
+///
+/// If `COMPARE_PACKED` is true, then the pointers will be compared and hashed without
+/// unpacking. Otherwise we don't implement PartialEq/Eq/Hash; if you want that,
+/// wrap the TaggedPtr.
+pub struct TaggedPtr<P, T, const COMPARE_PACKED: bool>
+where
+ P: Pointer,
+ T: Tag,
+{
+ raw: CopyTaggedPtr<P, T, COMPARE_PACKED>,
+}
+
+impl<P, T, const COMPARE_PACKED: bool> Clone for TaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer + Clone,
+ T: Tag,
+{
+ fn clone(&self) -> Self {
+ unsafe { Self::new(P::with_ref(self.raw.pointer_raw(), |p| p.clone()), self.raw.tag()) }
+ }
+}
+
+// We pack the tag into the *upper* bits of the pointer to ease retrieval of the
+// value; a right shift is a multiplication and those are embeddable in
+// instruction encoding.
+impl<P, T, const COMPARE_PACKED: bool> TaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer,
+ T: Tag,
+{
+ pub fn new(pointer: P, tag: T) -> Self {
+ TaggedPtr { raw: CopyTaggedPtr::new(pointer, tag) }
+ }
+
+ pub fn pointer_ref(&self) -> &P::Target {
+ self.raw.pointer_ref()
+ }
+ pub fn pointer_mut(&mut self) -> &mut P::Target
+ where
+ P: std::ops::DerefMut,
+ {
+ self.raw.pointer_mut()
+ }
+ pub fn tag(&self) -> T {
+ self.raw.tag()
+ }
+ pub fn set_tag(&mut self, tag: T) {
+ self.raw.set_tag(tag);
+ }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> std::ops::Deref for TaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer,
+ T: Tag,
+{
+ type Target = P::Target;
+ fn deref(&self) -> &Self::Target {
+ self.raw.pointer_ref()
+ }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> std::ops::DerefMut for TaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer + std::ops::DerefMut,
+ T: Tag,
+{
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ self.raw.pointer_mut()
+ }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> Drop for TaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer,
+ T: Tag,
+{
+ fn drop(&mut self) {
+ // No need to drop the tag, as it's Copy
+ unsafe {
+ std::mem::drop(P::from_usize(self.raw.pointer_raw()));
+ }
+ }
+}
+
+impl<P, T, const COMPARE_PACKED: bool> fmt::Debug for TaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer,
+ P::Target: fmt::Debug,
+ T: Tag + fmt::Debug,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("TaggedPtr")
+ .field("pointer", &self.pointer_ref())
+ .field("tag", &self.tag())
+ .finish()
+ }
+}
+
+impl<P, T> PartialEq for TaggedPtr<P, T, true>
+where
+ P: Pointer,
+ T: Tag,
+{
+ fn eq(&self, other: &Self) -> bool {
+ self.raw.eq(&other.raw)
+ }
+}
+
+impl<P, T> Eq for TaggedPtr<P, T, true>
+where
+ P: Pointer,
+ T: Tag,
+{
+}
+
+impl<P, T> std::hash::Hash for TaggedPtr<P, T, true>
+where
+ P: Pointer,
+ T: Tag,
+{
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ self.raw.hash(state);
+ }
+}
+
+impl<P, T, HCX, const COMPARE_PACKED: bool> HashStable<HCX> for TaggedPtr<P, T, COMPARE_PACKED>
+where
+ P: Pointer + HashStable<HCX>,
+ T: Tag + HashStable<HCX>,
+{
+ fn hash_stable(&self, hcx: &mut HCX, hasher: &mut StableHasher) {
+ self.raw.hash_stable(hcx, hasher);
+ }
+}
+++ /dev/null
-//! Validity checking for fake lang items
-
-use crate::def_id::DefId;
-use crate::{lang_items, LangItem, LanguageItems};
-
-use rustc_data_structures::fx::FxHashMap;
-use rustc_span::symbol::{sym, Symbol};
-
-use lazy_static::lazy_static;
-
-macro_rules! fake_lang_items {
- ($($item:ident, $name:ident, $method:ident;)*) => (
-
-lazy_static! {
- pub static ref FAKE_ITEMS_REFS: FxHashMap<Symbol, LangItem> = {
- let mut map = FxHashMap::default();
- $(map.insert(sym::$name, lang_items::$item);)*
- map
- };
-}
-
-impl LanguageItems {
- pub fn is_fake_lang_item(&self, item_def_id: DefId) -> bool {
- let did = Some(item_def_id);
-
- $(self.$method() == did)||*
- }
-}
-
-) }
-
-fake_lang_items! {
-// Variant name, Symbol, Method name,
- CountCodeRegionFnLangItem, count_code_region, count_code_region_fn;
- CoverageCounterAddFnLangItem, coverage_counter_add, coverage_counter_add_fn;
- CoverageCounterSubtractFnLangItem, coverage_counter_subtract, coverage_counter_subtract_fn;
-}
TryTraitLangItem, kw::Try, try_trait, Target::Trait;
- // language items related to source code coverage instrumentation (-Zinstrument-coverage)
- CountCodeRegionFnLangItem, sym::count_code_region, count_code_region_fn, Target::Fn;
- CoverageCounterAddFnLangItem, sym::coverage_counter_add, coverage_counter_add_fn, Target::Fn;
- CoverageCounterSubtractFnLangItem, sym::coverage_counter_subtract, coverage_counter_subtract_fn, Target::Fn;
-
// Language items from AST lowering
TryFromError, sym::from_error, from_error_fn, Target::Method(MethodKind::Trait { body: false });
TryFromOk, sym::from_ok, from_ok_fn, Target::Method(MethodKind::Trait { body: false });
pub mod def;
pub mod definitions;
pub use rustc_span::def_id;
-pub mod fake_lang_items;
mod hir;
pub mod hir_id;
pub mod intravisit;
//! Metadata from source code coverage analysis and instrumentation.
-/// Positional arguments to `libcore::count_code_region()`
-pub mod count_code_region_args {
- pub const FUNCTION_SOURCE_HASH: usize = 0;
- pub const COUNTER_ID: usize = 1;
- pub const FILE_NAME: usize = 2;
- pub const START_LINE: usize = 3;
- pub const START_COL: usize = 4;
- pub const END_LINE: usize = 5;
- pub const END_COL: usize = 6;
-}
-
-/// Positional arguments to `libcore::coverage_counter_add()` and
-/// `libcore::coverage_counter_subtract()`
-pub mod coverage_counter_expression_args {
- pub const EXPRESSION_ID: usize = 0;
- pub const LEFT_ID: usize = 1;
- pub const RIGHT_ID: usize = 2;
- pub const FILE_NAME: usize = 3;
- pub const START_LINE: usize = 4;
- pub const START_COL: usize = 5;
- pub const END_LINE: usize = 6;
- pub const END_COL: usize = 7;
-}
-
-/// Positional arguments to `libcore::coverage_unreachable()`
-pub mod coverage_unreachable_args {
- pub const FILE_NAME: usize = 0;
- pub const START_LINE: usize = 1;
- pub const START_COL: usize = 2;
- pub const END_LINE: usize = 3;
- pub const END_COL: usize = 4;
+use rustc_macros::HashStable;
+use rustc_span::Symbol;
+
+use std::cmp::Ord;
+use std::fmt::{self, Debug, Formatter};
+
+rustc_index::newtype_index! {
+ pub struct ExpressionOperandId {
+ derive [HashStable]
+ DEBUG_FORMAT = "ExpressionOperandId({})",
+ MAX = 0xFFFF_FFFF,
+ }
+}
+
+rustc_index::newtype_index! {
+ pub struct CounterValueReference {
+ derive [HashStable]
+ DEBUG_FORMAT = "CounterValueReference({})",
+ MAX = 0xFFFF_FFFF,
+ }
+}
+
+rustc_index::newtype_index! {
+ pub struct InjectedExpressionIndex {
+ derive [HashStable]
+ DEBUG_FORMAT = "InjectedExpressionIndex({})",
+ MAX = 0xFFFF_FFFF,
+ }
+}
+
+rustc_index::newtype_index! {
+ pub struct MappedExpressionIndex {
+ derive [HashStable]
+ DEBUG_FORMAT = "MappedExpressionIndex({})",
+ MAX = 0xFFFF_FFFF,
+ }
+}
+
+impl From<CounterValueReference> for ExpressionOperandId {
+ #[inline]
+ fn from(v: CounterValueReference) -> ExpressionOperandId {
+ ExpressionOperandId::from(v.as_u32())
+ }
+}
+
+impl From<InjectedExpressionIndex> for ExpressionOperandId {
+ #[inline]
+ fn from(v: InjectedExpressionIndex) -> ExpressionOperandId {
+ ExpressionOperandId::from(v.as_u32())
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, TyEncodable, TyDecodable, HashStable, TypeFoldable)]
+pub enum CoverageKind {
+ Counter {
+ function_source_hash: u64,
+ id: CounterValueReference,
+ },
+ Expression {
+ id: InjectedExpressionIndex,
+ lhs: ExpressionOperandId,
+ op: Op,
+ rhs: ExpressionOperandId,
+ },
+ Unreachable,
+}
+
+impl CoverageKind {
+ pub fn as_operand_id(&self) -> ExpressionOperandId {
+ match *self {
+ CoverageKind::Counter { id, .. } => ExpressionOperandId::from(id),
+ CoverageKind::Expression { id, .. } => ExpressionOperandId::from(id),
+ CoverageKind::Unreachable => {
+ bug!("Unreachable coverage cannot be part of an expression")
+ }
+ }
+ }
+}
+
+#[derive(Clone, TyEncodable, TyDecodable, HashStable, TypeFoldable, PartialEq, Eq, PartialOrd, Ord)]
+pub struct CodeRegion {
+ pub file_name: Symbol,
+ pub start_line: u32,
+ pub start_col: u32,
+ pub end_line: u32,
+ pub end_col: u32,
+}
+
+impl Debug for CodeRegion {
+ fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
+ write!(
+ fmt,
+ "{}:{}:{} - {}:{}",
+ self.file_name, self.start_line, self.start_col, self.end_line, self.end_col
+ )
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, TyEncodable, TyDecodable, HashStable, TypeFoldable)]
+pub enum Op {
+ Subtract,
+ Add,
}
//!
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/mir/index.html
+use crate::mir::coverage::{CodeRegion, CoverageKind};
use crate::mir::interpret::{Allocation, ConstValue, GlobalAlloc, Scalar};
use crate::mir::visit::MirVisitable;
use crate::ty::adjustment::PointerCast;
use rustc_data_structures::graph::{self, GraphSuccessors};
use rustc_index::bit_set::BitMatrix;
use rustc_index::vec::{Idx, IndexVec};
-use rustc_macros::HashStable;
use rustc_serialize::{Decodable, Encodable};
use rustc_span::symbol::Symbol;
use rustc_span::{Span, DUMMY_SP};
/// - `Bivariant` -- no effect
AscribeUserType(Box<(Place<'tcx>, UserTypeProjection)>, ty::Variance),
+ /// Marks the start of a "coverage region", injected with '-Zinstrument-coverage'. A
+ /// `CoverageInfo` statement carries metadata about the coverage region, used to inject a coverage
+ /// map into the binary. The `Counter` kind also generates executable code, to increment a
+ /// counter varible at runtime, each time the code region is executed.
+ Coverage(Box<Coverage>),
+
/// No-op. Useful for deleting instructions without affecting statement indices.
Nop,
}
AscribeUserType(box (ref place, ref c_ty), ref variance) => {
write!(fmt, "AscribeUserType({:?}, {:?}, {:?})", place, variance, c_ty)
}
+ Coverage(box ref coverage) => write!(fmt, "{:?}", coverage),
Nop => write!(fmt, "nop"),
}
}
}
+#[derive(Clone, Debug, PartialEq, TyEncodable, TyDecodable, HashStable, TypeFoldable)]
+pub struct Coverage {
+ pub kind: CoverageKind,
+ pub code_region: CodeRegion,
+}
+
///////////////////////////////////////////////////////////////////////////
// Places
self.super_ascribe_user_ty(place, variance, user_ty, location);
}
+ fn visit_coverage(&mut self,
+ coverage: & $($mutability)? Coverage,
+ location: Location) {
+ self.super_coverage(coverage, location);
+ }
+
fn visit_retag(&mut self,
kind: & $($mutability)? RetagKind,
place: & $($mutability)? Place<'tcx>,
) => {
self.visit_ascribe_user_ty(place, variance, user_ty, location);
}
+ StatementKind::Coverage(coverage) => {
+ self.visit_coverage(
+ coverage,
+ location
+ )
+ }
StatementKind::Nop => {}
}
}
self.visit_user_type_projection(user_ty);
}
+ fn super_coverage(&mut self,
+ _kind: & $($mutability)? Coverage,
+ _location: Location) {
+ }
+
fn super_retag(&mut self,
_kind: & $($mutability)? RetagKind,
place: & $($mutability)? Place<'tcx>,
StorageDead,
/// User type annotation assertions for NLL.
AscribeUserTy,
+ /// Coverage code region and counter metadata.
+ Coverage,
/// The data of an user variable, for debug info.
VarDebugInfo,
}
opaque: OpaqueListContents,
}
+unsafe impl<'a, T: 'a> rustc_data_structures::tagged_ptr::Pointer for &'a List<T> {
+ const BITS: usize = std::mem::align_of::<usize>().trailing_zeros() as usize;
+ fn into_usize(self) -> usize {
+ self as *const List<T> as usize
+ }
+ unsafe fn from_usize(ptr: usize) -> Self {
+ &*(ptr as *const List<T>)
+ }
+ unsafe fn with_ref<R, F: FnOnce(&Self) -> R>(ptr: usize, f: F) -> R {
+ // Self: Copy so this is fine
+ let ptr = Self::from_usize(ptr);
+ f(&ptr)
+ }
+}
+
unsafe impl<T: Sync> Sync for List<T> {}
impl<T: Copy> List<T> {
use rustc_data_structures::sorted_map::SortedIndexMultiMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{self, par_iter, ParallelIterator};
+use rustc_data_structures::tagged_ptr::CopyTaggedPtr;
use rustc_errors::ErrorReported;
use rustc_hir as hir;
use rustc_hir::def::{CtorKind, CtorOf, DefKind, Namespace, Res};
use std::cmp::Ordering;
use std::fmt;
use std::hash::{Hash, Hasher};
-use std::marker::PhantomData;
use std::ops::Range;
use std::ptr;
use std::str;
/// When type checking, we use the `ParamEnv` to track
/// details about the set of where-clauses that are in scope at this
/// particular point.
-#[derive(Copy, Clone)]
+#[derive(Copy, Clone, Hash, PartialEq, Eq)]
pub struct ParamEnv<'tcx> {
- // We pack the caller_bounds List pointer and a Reveal enum into this usize.
- // Specifically, the low bit represents Reveal, with 0 meaning `UserFacing`
- // and 1 meaning `All`. The rest is the pointer.
- //
- // This relies on the List<Predicate<'tcx>> type having at least 2-byte
- // alignment. Lists start with a usize and are repr(C) so this should be
- // fine; there is a debug_assert in the constructor as well.
- //
- // Note that the choice of 0 for UserFacing is intentional -- since it is the
- // first variant in Reveal this means that joining the pointer is a simple `or`.
- packed_data: usize,
-
- /// `Obligation`s that the caller must satisfy. This is basically
- /// the set of bounds on the in-scope type parameters, translated
+ /// This packs both caller bounds and the reveal enum into one pointer.
+ ///
+ /// Caller bounds are `Obligation`s that the caller must satisfy. This is
+ /// basically the set of bounds on the in-scope type parameters, translated
/// into `Obligation`s, and elaborated and normalized.
///
- /// Note: This is packed into the `packed_data` usize above, use the
- /// `caller_bounds()` method to access it.
- caller_bounds: PhantomData<&'tcx List<Predicate<'tcx>>>,
-
+ /// Use the `caller_bounds()` method to access.
+ ///
/// Typically, this is `Reveal::UserFacing`, but during codegen we
/// want `Reveal::All`.
///
- /// Note: This is packed into the caller_bounds usize above, use the reveal()
- /// method to access it.
- reveal: PhantomData<traits::Reveal>,
+ /// Note: This is packed, use the reveal() method to access it.
+ packed: CopyTaggedPtr<&'tcx List<Predicate<'tcx>>, traits::Reveal, true>,
/// If this `ParamEnv` comes from a call to `tcx.param_env(def_id)`,
/// register that `def_id` (useful for transitioning to the chalk trait
pub def_id: Option<DefId>,
}
+unsafe impl rustc_data_structures::tagged_ptr::Tag for traits::Reveal {
+ const BITS: usize = 1;
+ fn into_usize(self) -> usize {
+ match self {
+ traits::Reveal::UserFacing => 0,
+ traits::Reveal::All => 1,
+ }
+ }
+ unsafe fn from_usize(ptr: usize) -> Self {
+ match ptr {
+ 0 => traits::Reveal::UserFacing,
+ 1 => traits::Reveal::All,
+ _ => std::hint::unreachable_unchecked(),
+ }
+ }
+}
+
impl<'tcx> fmt::Debug for ParamEnv<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ParamEnv")
}
}
-impl<'tcx> Hash for ParamEnv<'tcx> {
- fn hash<H: Hasher>(&self, state: &mut H) {
- // List hashes as the raw pointer, so we can skip splitting into the
- // pointer and the enum.
- self.packed_data.hash(state);
- self.def_id.hash(state);
- }
-}
-
-impl<'tcx> PartialEq for ParamEnv<'tcx> {
- fn eq(&self, other: &Self) -> bool {
- self.caller_bounds() == other.caller_bounds()
- && self.reveal() == other.reveal()
- && self.def_id == other.def_id
- }
-}
-impl<'tcx> Eq for ParamEnv<'tcx> {}
-
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for ParamEnv<'tcx> {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
self.caller_bounds().hash_stable(hcx, hasher);
#[inline]
pub fn caller_bounds(self) -> &'tcx List<Predicate<'tcx>> {
- // mask out bottom bit
- unsafe { &*((self.packed_data & (!1)) as *const _) }
+ self.packed.pointer()
}
#[inline]
pub fn reveal(self) -> traits::Reveal {
- if self.packed_data & 1 == 0 { traits::Reveal::UserFacing } else { traits::Reveal::All }
+ self.packed.tag()
}
/// Construct a trait environment with no where-clauses in scope
reveal: Reveal,
def_id: Option<DefId>,
) -> Self {
- let packed_data = caller_bounds as *const _ as usize;
- // Check that we can pack the reveal data into the pointer.
- debug_assert!(packed_data & 1 == 0);
- ty::ParamEnv {
- packed_data: packed_data
- | match reveal {
- Reveal::UserFacing => 0,
- Reveal::All => 1,
- },
- caller_bounds: PhantomData,
- reveal: PhantomData,
- def_id,
- }
+ ty::ParamEnv { packed: CopyTaggedPtr::new(caller_bounds, reveal), def_id }
}
pub fn with_user_facing(mut self) -> Self {
- // clear bottom bit
- self.packed_data &= !1;
+ self.packed.set_tag(Reveal::UserFacing);
self
}
/// will be normalized to their underlying types.
/// See PR #65989 and issue #65918 for more details
pub fn with_reveal_all_normalized(self, tcx: TyCtxt<'tcx>) -> Self {
- if self.packed_data & 1 == 1 {
+ if self.packed.tag() == traits::Reveal::All {
return self;
}
bool,
usize,
::rustc_target::abi::VariantIdx,
+ u32,
u64,
String,
crate::middle::region::Scope,
::rustc_hir::Unsafety,
::rustc_target::asm::InlineAsmRegOrRegClass,
::rustc_target::spec::abi::Abi,
+ crate::mir::coverage::ExpressionOperandId,
+ crate::mir::coverage::CounterValueReference,
+ crate::mir::coverage::InjectedExpressionIndex,
+ crate::mir::coverage::MappedExpressionIndex,
crate::mir::Local,
crate::mir::Promoted,
crate::traits::Reveal,
PlaceContext::MutatingUse(MutatingUseContext::Drop) =>
Some(DefUse::Drop),
- // Debug info is neither def nor use.
+ // Coverage and debug info are neither def nor use.
+ PlaceContext::NonUse(NonUseContext::Coverage) |
PlaceContext::NonUse(NonUseContext::VarDebugInfo) => None,
}
}
}
}
StatementKind::Nop
+ | StatementKind::Coverage(..)
| StatementKind::AscribeUserType(..)
| StatementKind::Retag { .. }
| StatementKind::StorageLive(..) => {
}
}
StatementKind::Nop
+ | StatementKind::Coverage(..)
| StatementKind::AscribeUserType(..)
| StatementKind::Retag { .. }
| StatementKind::StorageLive(..) => {
| StatementKind::StorageDead(..)
| StatementKind::LlvmInlineAsm { .. }
| StatementKind::Retag { .. }
+ | StatementKind::Coverage(..)
| StatementKind::Nop => {}
}
}
| mir::StatementKind::StorageLive(..)
| mir::StatementKind::Retag { .. }
| mir::StatementKind::AscribeUserType(..)
+ | mir::StatementKind::Coverage(..)
| mir::StatementKind::Nop => {}
}
}
PlaceContext::NonUse(
NonUseContext::StorageLive
| NonUseContext::AscribeUserTy
+ | NonUseContext::Coverage
| NonUseContext::VarDebugInfo,
)
| PlaceContext::NonMutatingUse(
// Nothing to do for these. Match exhaustively so this fails to compile when new
// variants are added.
StatementKind::AscribeUserType(..)
+ | StatementKind::Coverage(..)
| StatementKind::FakeRead(..)
| StatementKind::Nop
| StatementKind::Retag(..)
}
StatementKind::Retag { .. }
| StatementKind::AscribeUserType(..)
+ | StatementKind::Coverage(..)
| StatementKind::Nop => {}
}
}
// These just return their argument
self.copy_op(args[0], dest)?;
}
- // FIXME(#73156): Handle source code coverage in const eval
- sym::count_code_region
- | sym::coverage_counter_add
- | sym::coverage_counter_subtract
- | sym::coverage_unreachable => (),
_ => return Ok(false),
}
// Statements we do not track.
AscribeUserType(..) => {}
+ // Currently, Miri discards Coverage statements. Coverage statements are only injected
+ // via an optional compile time MIR pass and have no side effects. Since Coverage
+ // statements don't exist at the source level, it is safe for Miri to ignore them, even
+ // for undefined behavior (UB) checks.
+ //
+ // A coverage counter inside a const expression (for example, a counter injected in a
+ // const function) is discarded when the const is evaluated at compile time. Whether
+ // this should change, and/or how to implement a const eval counter, is a subject of the
+ // following issue:
+ //
+ // FIXME(#73156): Handle source code coverage in const eval
+ Coverage(..) => {}
+
// Defined to do nothing. These are added by optimization passes, to avoid changing the
// size of MIR constantly.
Nop => {}
| StatementKind::StorageDead(_)
| StatementKind::Retag { .. }
| StatementKind::AscribeUserType(..)
+ | StatementKind::Coverage(..)
| StatementKind::Nop => {}
}
}
| StatementKind::StorageDead(..)
| StatementKind::Retag { .. }
| StatementKind::AscribeUserType(..)
+ | StatementKind::Coverage(..)
| StatementKind::Nop => {
// safe (at least as emitted during MIR construction)
}
| StatementKind::StorageDead(_)
| StatementKind::Retag(..)
| StatementKind::AscribeUserType(..)
+ | StatementKind::Coverage(..)
| StatementKind::Nop => {}
}
}
use crate::transform::{MirPass, MirSource};
-use crate::util::patch::MirPatch;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
-use rustc_hir::lang_items;
use rustc_middle::hir;
use rustc_middle::ich::StableHashingContext;
use rustc_middle::mir;
use rustc_middle::mir::coverage::*;
-use rustc_middle::mir::interpret::Scalar;
-use rustc_middle::mir::traversal;
-use rustc_middle::mir::{
- BasicBlock, BasicBlockData, CoverageInfo, Operand, Place, SourceInfo, SourceScope,
- StatementKind, Terminator, TerminatorKind,
-};
-use rustc_middle::ty;
+use rustc_middle::mir::visit::Visitor;
+use rustc_middle::mir::{BasicBlock, Coverage, CoverageInfo, Location, Statement, StatementKind};
use rustc_middle::ty::query::Providers;
-use rustc_middle::ty::{FnDef, TyCtxt};
+use rustc_middle::ty::TyCtxt;
use rustc_span::def_id::DefId;
-use rustc_span::{FileName, Pos, RealFileName, Span};
+use rustc_span::{FileName, Pos, RealFileName, Span, Symbol};
/// Inserts call to count_code_region() as a placeholder to be replaced during code generation with
/// the intrinsic llvm.instrprof.increment.
providers.coverageinfo = |tcx, def_id| coverageinfo_from_mir(tcx, def_id);
}
+struct CoverageVisitor {
+ info: CoverageInfo,
+}
+
+impl Visitor<'_> for CoverageVisitor {
+ fn visit_coverage(&mut self, coverage: &Coverage, _location: Location) {
+ match coverage.kind {
+ CoverageKind::Counter { id, .. } => {
+ let counter_id = u32::from(id);
+ self.info.num_counters = std::cmp::max(self.info.num_counters, counter_id + 1);
+ }
+ CoverageKind::Expression { id, .. } => {
+ let expression_index = u32::MAX - u32::from(id);
+ self.info.num_expressions =
+ std::cmp::max(self.info.num_expressions, expression_index + 1);
+ }
+ _ => {}
+ }
+ }
+}
+
fn coverageinfo_from_mir<'tcx>(tcx: TyCtxt<'tcx>, mir_def_id: DefId) -> CoverageInfo {
let mir_body = tcx.optimized_mir(mir_def_id);
- // FIXME(richkadel): The current implementation assumes the MIR for the given DefId
- // represents a single function. Validate and/or correct if inlining (which should be disabled
- // if -Zinstrument-coverage is enabled) and/or monomorphization invalidates these assumptions.
- let count_code_region_fn = tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None);
- let coverage_counter_add_fn =
- tcx.require_lang_item(lang_items::CoverageCounterAddFnLangItem, None);
- let coverage_counter_subtract_fn =
- tcx.require_lang_item(lang_items::CoverageCounterSubtractFnLangItem, None);
// The `num_counters` argument to `llvm.instrprof.increment` is the number of injected
// counters, with each counter having a counter ID from `0..num_counters-1`. MIR optimization
// `num_expressions` is the number of counter expressions added to the MIR body. Both
// `num_counters` and `num_expressions` are used to initialize new vectors, during backend
// code generate, to lookup counters and expressions by simple u32 indexes.
- let mut num_counters: u32 = 0;
- let mut num_expressions: u32 = 0;
- for terminator in
- traversal::preorder(mir_body).map(|(_, data)| data).filter_map(call_terminators)
- {
- if let TerminatorKind::Call { func: Operand::Constant(func), args, .. } = &terminator.kind {
- match func.literal.ty.kind {
- FnDef(id, _) if id == count_code_region_fn => {
- let counter_id_arg =
- args.get(count_code_region_args::COUNTER_ID).expect("arg found");
- let counter_id = mir::Operand::scalar_from_const(counter_id_arg)
- .to_u32()
- .expect("counter_id arg is u32");
- num_counters = std::cmp::max(num_counters, counter_id + 1);
- }
- FnDef(id, _)
- if id == coverage_counter_add_fn || id == coverage_counter_subtract_fn =>
- {
- let expression_id_arg = args
- .get(coverage_counter_expression_args::EXPRESSION_ID)
- .expect("arg found");
- let id_descending_from_max = mir::Operand::scalar_from_const(expression_id_arg)
- .to_u32()
- .expect("expression_id arg is u32");
- // Counter expressions are initially assigned IDs descending from `u32::MAX`, so
- // the range of expression IDs is disjoint from the range of counter IDs. This
- // way, both counters and expressions can be operands in other expressions.
- let expression_index = u32::MAX - id_descending_from_max;
- num_expressions = std::cmp::max(num_expressions, expression_index + 1);
- }
- _ => {}
- }
- }
- }
- CoverageInfo { num_counters, num_expressions }
-}
+ let mut coverage_visitor =
+ CoverageVisitor { info: CoverageInfo { num_counters: 0, num_expressions: 0 } };
-fn call_terminators(data: &'tcx BasicBlockData<'tcx>) -> Option<&'tcx Terminator<'tcx>> {
- let terminator = data.terminator();
- match terminator.kind {
- TerminatorKind::Call { .. } => Some(terminator),
- _ => None,
- }
+ coverage_visitor.visit_body(mir_body);
+ coverage_visitor.info
}
impl<'tcx> MirPass<'tcx> for InstrumentCoverage {
}
}
-/// Distinguishes the expression operators.
-enum Op {
- Add,
- Subtract,
-}
-
-struct InjectedCall<'tcx> {
- func: Operand<'tcx>,
- args: Vec<Operand<'tcx>>,
- span: Span,
- inject_at: Span,
-}
-
struct Instrumentor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
mir_def_id: DefId,
}
/// Counter IDs start from zero and go up.
- fn next_counter(&mut self) -> u32 {
+ fn next_counter(&mut self) -> CounterValueReference {
assert!(self.num_counters < u32::MAX - self.num_expressions);
let next = self.num_counters;
self.num_counters += 1;
- next
+ CounterValueReference::from(next)
}
/// Expression IDs start from u32::MAX and go down because a CounterExpression can reference
/// (add or subtract counts) of both Counter regions and CounterExpression regions. The counter
/// expression operand IDs must be unique across both types.
- fn next_expression(&mut self) -> u32 {
+ fn next_expression(&mut self) -> InjectedExpressionIndex {
assert!(self.num_counters < u32::MAX - self.num_expressions);
let next = u32::MAX - self.num_expressions;
self.num_expressions += 1;
- next
+ InjectedExpressionIndex::from(next)
}
fn function_source_hash(&mut self) -> u64 {
}
fn inject_counters(&mut self) {
- let mir_body = &self.mir_body;
let body_span = self.hir_body.value.span;
debug!("instrumenting {:?}, span: {:?}", self.mir_def_id, body_span);
// FIXME(richkadel): As a first step, counters are only injected at the top of each
// function. The complete solution will inject counters at each conditional code branch.
- let _ignore = mir_body;
- let id = self.next_counter();
- let function_source_hash = self.function_source_hash();
- let scope = rustc_middle::mir::OUTERMOST_SOURCE_SCOPE;
- let is_cleanup = false;
- let next_block = rustc_middle::mir::START_BLOCK;
- self.inject_call(
- self.make_counter(id, function_source_hash, body_span),
- scope,
- is_cleanup,
- next_block,
- );
+ let block = rustc_middle::mir::START_BLOCK;
+ let counter = self.make_counter();
+ self.inject_statement(counter, body_span, block);
// FIXME(richkadel): The next step to implement source based coverage analysis will be
// instrumenting branches within functions, and some regions will be counted by "counter
let fake_use = false;
if fake_use {
let add = false;
- let lhs = 1;
+ let fake_counter = CoverageKind::Counter {
+ function_source_hash: self.function_source_hash(),
+ id: CounterValueReference::from_u32(1),
+ };
+ let fake_expression = CoverageKind::Expression {
+ id: InjectedExpressionIndex::from(u32::MAX - 1),
+ lhs: ExpressionOperandId::from_u32(1),
+ op: Op::Add,
+ rhs: ExpressionOperandId::from_u32(2),
+ };
+
+ let lhs = fake_counter.as_operand_id();
let op = if add { Op::Add } else { Op::Subtract };
- let rhs = 2;
+ let rhs = fake_expression.as_operand_id();
- let scope = rustc_middle::mir::OUTERMOST_SOURCE_SCOPE;
- let is_cleanup = false;
- let next_block = rustc_middle::mir::START_BLOCK;
+ let block = rustc_middle::mir::START_BLOCK;
- let id = self.next_expression();
- self.inject_call(
- self.make_expression(id, body_span, lhs, op, rhs),
- scope,
- is_cleanup,
- next_block,
- );
+ let expression = self.make_expression(lhs, op, rhs);
+ self.inject_statement(expression, body_span, block);
}
}
- fn make_counter(&self, id: u32, function_source_hash: u64, span: Span) -> InjectedCall<'tcx> {
- let inject_at = span.shrink_to_lo();
-
- let func = function_handle(
- self.tcx,
- self.tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None),
- inject_at,
- );
-
- let mut args = Vec::new();
-
- use count_code_region_args::*;
- debug_assert_eq!(FUNCTION_SOURCE_HASH, args.len());
- args.push(self.const_u64(function_source_hash, inject_at));
-
- debug_assert_eq!(COUNTER_ID, args.len());
- args.push(self.const_u32(id, inject_at));
-
- InjectedCall { func, args, span, inject_at }
+ fn make_counter(&mut self) -> CoverageKind {
+ CoverageKind::Counter {
+ function_source_hash: self.function_source_hash(),
+ id: self.next_counter(),
+ }
}
fn make_expression(
- &self,
- id: u32,
- span: Span,
- lhs: u32,
+ &mut self,
+ lhs: ExpressionOperandId,
op: Op,
- rhs: u32,
- ) -> InjectedCall<'tcx> {
- let inject_at = span.shrink_to_lo();
-
- let func = function_handle(
- self.tcx,
- self.tcx.require_lang_item(
- match op {
- Op::Add => lang_items::CoverageCounterAddFnLangItem,
- Op::Subtract => lang_items::CoverageCounterSubtractFnLangItem,
- },
- None,
- ),
- inject_at,
- );
-
- let mut args = Vec::new();
-
- use coverage_counter_expression_args::*;
- debug_assert_eq!(EXPRESSION_ID, args.len());
- args.push(self.const_u32(id, inject_at));
-
- debug_assert_eq!(LEFT_ID, args.len());
- args.push(self.const_u32(lhs, inject_at));
-
- debug_assert_eq!(RIGHT_ID, args.len());
- args.push(self.const_u32(rhs, inject_at));
-
- InjectedCall { func, args, span, inject_at }
+ rhs: ExpressionOperandId,
+ ) -> CoverageKind {
+ CoverageKind::Expression { id: self.next_expression(), lhs, op, rhs }
}
- fn inject_call(
- &mut self,
- call: InjectedCall<'tcx>,
- scope: SourceScope,
- is_cleanup: bool,
- next_block: BasicBlock,
- ) {
- let InjectedCall { func, mut args, span, inject_at } = call;
- debug!(
- " injecting {}call to {:?}({:?}) at: {:?}, scope: {:?}",
- if is_cleanup { "cleanup " } else { "" },
- func,
- args,
- inject_at,
- scope,
- );
-
- let mut patch = MirPatch::new(self.mir_body);
-
- let (file_name, start_line, start_col, end_line, end_col) = self.code_region(&span);
-
- // FIXME(richkadel): Note that `const_str()` results in the creation of an `Allocation` to
- // hold one copy of each unique filename. It looks like that `Allocation` may translate into
- // the creation of an `@alloc` in LLVM IR that is never actually used by runtime code.
- //
- // Example LLVM IR:
- //
- // @alloc4 = private unnamed_addr constant <{ [43 x i8] }> \
- // <{ [43 x i8] c"C:\\msys64\\home\\richkadel\\rust\\rust_basic.rs" }>, align 1
- //
- // Can I flag the alloc as something not to be added to codegen? Or somehow remove it before
- // it gets added to the LLVM IR? Do we need some kind of reference counting to know it's
- // not used by any runtime code?
- //
- // This question is moot if I convert the Call Terminators to Statements, I believe:
- // https://rust-lang.zulipchat.com/#narrow/stream/233931-t-compiler.2Fmajor-changes/topic/Implement.20LLVM-compatible.20source-based.20cod.20compiler-team.23278/near/206731748
- args.push(self.const_str(&file_name, inject_at));
- args.push(self.const_u32(start_line, inject_at));
- args.push(self.const_u32(start_col, inject_at));
- args.push(self.const_u32(end_line, inject_at));
- args.push(self.const_u32(end_col, inject_at));
-
- let temp = patch.new_temp(self.tcx.mk_unit(), inject_at);
- let new_block = patch.new_block(placeholder_block(inject_at, scope, is_cleanup));
- patch.patch_terminator(
- new_block,
- TerminatorKind::Call {
- func,
- args,
- // new_block will swapped with the next_block, after applying patch
- destination: Some((Place::from(temp), new_block)),
- cleanup: None,
- from_hir_call: false,
- fn_span: inject_at,
- },
- );
-
- patch.add_statement(new_block.start_location(), StatementKind::StorageLive(temp));
- patch.add_statement(next_block.start_location(), StatementKind::StorageDead(temp));
-
- patch.apply(self.mir_body);
-
- // To insert the `new_block` in front of the first block in the counted branch (the
- // `next_block`), just swap the indexes, leaving the rest of the graph unchanged.
- self.mir_body.basic_blocks_mut().swap(next_block, new_block);
- }
+ fn inject_statement(&mut self, coverage_kind: CoverageKind, span: Span, block: BasicBlock) {
+ let code_region = make_code_region(self.tcx, &span);
+ debug!(" injecting statement {:?} covering {:?}", coverage_kind, code_region);
- /// Convert the Span into its file name, start line and column, and end line and column
- fn code_region(&self, span: &Span) -> (String, u32, u32, u32, u32) {
- let source_map = self.tcx.sess.source_map();
- let start = source_map.lookup_char_pos(span.lo());
- let end = if span.hi() == span.lo() {
- start.clone()
- } else {
- let end = source_map.lookup_char_pos(span.hi());
- debug_assert_eq!(
- start.file.name,
- end.file.name,
- "Region start ({:?} -> {:?}) and end ({:?} -> {:?}) don't come from the same source file!",
- span.lo(),
- start,
- span.hi(),
- end
- );
- end
+ let data = &mut self.mir_body[block];
+ let source_info = data.terminator().source_info;
+ let statement = Statement {
+ source_info,
+ kind: StatementKind::Coverage(box Coverage { kind: coverage_kind, code_region }),
};
- match &start.file.name {
- FileName::Real(RealFileName::Named(path)) => (
- path.to_string_lossy().to_string(),
- start.line as u32,
- start.col.to_u32() + 1,
- end.line as u32,
- end.col.to_u32() + 1,
- ),
- _ => {
- bug!("start.file.name should be a RealFileName, but it was: {:?}", start.file.name)
- }
- }
- }
-
- fn const_str(&self, value: &str, span: Span) -> Operand<'tcx> {
- Operand::const_from_str(self.tcx, value, span)
- }
-
- fn const_u32(&self, value: u32, span: Span) -> Operand<'tcx> {
- Operand::const_from_scalar(self.tcx, self.tcx.types.u32, Scalar::from_u32(value), span)
- }
-
- fn const_u64(&self, value: u64, span: Span) -> Operand<'tcx> {
- Operand::const_from_scalar(self.tcx, self.tcx.types.u64, Scalar::from_u64(value), span)
+ data.statements.push(statement);
}
}
-fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Operand<'tcx> {
- let ret_ty = tcx.fn_sig(fn_def_id).output();
- let ret_ty = ret_ty.no_bound_vars().unwrap();
- let substs = tcx.mk_substs(::std::iter::once(ty::subst::GenericArg::from(ret_ty)));
- Operand::function_handle(tcx, fn_def_id, substs, span)
-}
-
-fn placeholder_block(span: Span, scope: SourceScope, is_cleanup: bool) -> BasicBlockData<'tcx> {
- BasicBlockData {
- statements: vec![],
- terminator: Some(Terminator {
- source_info: SourceInfo { span, scope },
- // this gets overwritten by the counter Call
- kind: TerminatorKind::Unreachable,
- }),
- is_cleanup,
+/// Convert the Span into its file name, start line and column, and end line and column
+fn make_code_region<'tcx>(tcx: TyCtxt<'tcx>, span: &Span) -> CodeRegion {
+ let source_map = tcx.sess.source_map();
+ let start = source_map.lookup_char_pos(span.lo());
+ let end = if span.hi() == span.lo() {
+ start.clone()
+ } else {
+ let end = source_map.lookup_char_pos(span.hi());
+ debug_assert_eq!(
+ start.file.name,
+ end.file.name,
+ "Region start ({:?} -> {:?}) and end ({:?} -> {:?}) don't come from the same source file!",
+ span.lo(),
+ start,
+ span.hi(),
+ end
+ );
+ end
+ };
+ match &start.file.name {
+ FileName::Real(RealFileName::Named(path)) => CodeRegion {
+ file_name: Symbol::intern(&path.to_string_lossy()),
+ start_line: start.line as u32,
+ start_col: start.col.to_u32() + 1,
+ end_line: end.line as u32,
+ end_col: end.col.to_u32() + 1,
+ },
+ _ => bug!("start.file.name should be a RealFileName, but it was: {:?}", start.file.name),
}
}
| StatementKind::StorageDead(_)
| StatementKind::Retag { .. }
| StatementKind::AscribeUserType(..)
+ | StatementKind::Coverage(..)
| StatementKind::Nop => Ok(()),
}
}
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::AscribeUserType(..)
+ | StatementKind::Coverage(..)
| StatementKind::Nop => {
// These are all nops in a landing pad
}
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::struct_span_err;
use rustc_hir as hir;
-use rustc_hir::fake_lang_items::FAKE_ITEMS_REFS;
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_hir::lang_items;
use rustc_hir::weak_lang_items::WEAK_ITEMS_REFS;
}
impl<'a, 'tcx> Context<'a, 'tcx> {
- fn register(&mut self, name: Symbol, span: Span, hir_id: hir::HirId) {
+ fn register(&mut self, name: Symbol, span: Span) {
if let Some(&item) = WEAK_ITEMS_REFS.get(&name) {
if self.items.require(item).is_err() {
self.items.missing.push(item);
}
- } else if let Some(&item) = FAKE_ITEMS_REFS.get(&name) {
- // Ensure "fake lang items" are registered. These are `extern` lang items that are
- // injected into the MIR automatically (such as source code coverage counters), but are
- // never actually linked; therefore, unlike "weak lang items", they cannot by registered
- // when used, because they never appear to be used.
- if self.items.items[item as usize].is_none() {
- let item_def_id = self.tcx.hir().local_def_id(hir_id).to_def_id();
- self.items.items[item as usize] = Some(item_def_id);
- }
} else {
struct_span_err!(self.tcx.sess, span, E0264, "unknown external lang item: `{}`", name)
.emit();
fn visit_foreign_item(&mut self, i: &hir::ForeignItem<'_>) {
let check_name = |attr, sym| self.tcx.sess.check_name(attr, sym);
if let Some((lang_item, _)) = hir::lang_items::extract(check_name, &i.attrs) {
- self.register(lang_item, i.span, i.hir_id);
+ self.register(lang_item, i.span);
}
intravisit::walk_foreign_item(self, i)
}
core_intrinsics,
cosf32,
cosf64,
- count_code_region,
- coverage_counter_add,
- coverage_counter_subtract,
- coverage_unreachable,
crate_id,
crate_in_paths,
crate_local,
sym::nontemporal_store => (1, vec![tcx.mk_mut_ptr(param(0)), param(0)], tcx.mk_unit()),
- sym::count_code_region => (
- 0,
- vec![
- tcx.types.u64,
- tcx.types.u32,
- tcx.mk_static_str(),
- tcx.types.u32,
- tcx.types.u32,
- tcx.types.u32,
- tcx.types.u32,
- ],
- tcx.mk_unit(),
- ),
-
- sym::coverage_counter_add | sym::coverage_counter_subtract => (
- 0,
- vec![
- tcx.types.u32,
- tcx.types.u32,
- tcx.types.u32,
- tcx.mk_static_str(),
- tcx.types.u32,
- tcx.types.u32,
- tcx.types.u32,
- tcx.types.u32,
- ],
- tcx.mk_unit(),
- ),
-
- sym::coverage_unreachable => (
- 0,
- vec![
- tcx.mk_static_str(),
- tcx.types.u32,
- tcx.types.u32,
- tcx.types.u32,
- tcx.types.u32,
- ],
- tcx.mk_unit(),
- ),
-
other => {
struct_span_err!(
tcx.sess,
fn bar() -> bool {
let mut _0: bool; // return place in scope 0 at /the/src/instrument_coverage.rs:19:13: 19:17
-+ let mut _1: (); // in scope 0 at /the/src/instrument_coverage.rs:19:18: 19:18
bb0: {
-+ StorageLive(_1); // scope 0 at /the/src/instrument_coverage.rs:19:18: 19:18
-+ _1 = const std::intrinsics::count_code_region(const 10208505205182607101_u64, const 0_u32, const "/the/src/instrument_coverage.rs", const 19_u32, const 18_u32, const 21_u32, const 2_u32) -> bb2; // scope 0 at /the/src/instrument_coverage.rs:19:18: 19:18
-+ // ty::Const
-+ // + ty: unsafe extern "rust-intrinsic" fn(u64, u32, &'static str, u32, u32, u32, u32) {std::intrinsics::count_code_region}
-+ // + val: Value(Scalar(<ZST>))
-+ // mir::Constant
-+ // + span: /the/src/instrument_coverage.rs:19:18: 19:18
-+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u64, u32, &'static str, u32, u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
-+ // ty::Const
-+ // + ty: &str
-+ // + val: Value(Slice { data: Allocation { bytes: [47, 116, 104, 101, 47, 115, 114, 99, 47, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 95, 99, 111, 118, 101, 114, 97, 103, 101, 46, 114, 115], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [2147483647], len: Size { raw: 31 } }, size: Size { raw: 31 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 31 })
-+ // mir::Constant
-+ // + span: /the/src/instrument_coverage.rs:19:18: 19:18
-+ // + literal: Const { ty: &str, val: Value(Slice { data: Allocation { bytes: [47, 116, 104, 101, 47, 115, 114, 99, 47, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 95, 99, 111, 118, 101, 114, 97, 103, 101, 46, 114, 115], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [2147483647], len: Size { raw: 31 } }, size: Size { raw: 31 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 31 }) }
-+ }
-+
-+ bb1 (cleanup): {
-+ resume; // scope 0 at /the/src/instrument_coverage.rs:19:1: 21:2
-+ }
-+
-+ bb2: {
-+ StorageDead(_1); // scope 0 at /the/src/instrument_coverage.rs:20:5: 20:9
_0 = const true; // scope 0 at /the/src/instrument_coverage.rs:20:5: 20:9
++ Coverage { kind: Counter { function_source_hash: 10208505205182607101, id: CounterValueReference(0) }, code_region: /the/src/instrument_coverage.rs:19:18 - 21:2 }; // scope 0 at /the/src/instrument_coverage.rs:21:2: 21:2
return; // scope 0 at /the/src/instrument_coverage.rs:21:2: 21:2
}
}
let mut _1: (); // in scope 0 at /the/src/instrument_coverage.rs:10:1: 16:2
let mut _2: bool; // in scope 0 at /the/src/instrument_coverage.rs:12:12: 12:17
let mut _3: !; // in scope 0 at /the/src/instrument_coverage.rs:12:18: 14:10
-+ let mut _4: (); // in scope 0 at /the/src/instrument_coverage.rs:10:11: 10:11
bb0: {
-- falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
-+ StorageLive(_4); // scope 0 at /the/src/instrument_coverage.rs:10:11: 10:11
-+ _4 = const std::intrinsics::count_code_region(const 16004455475339839479_u64, const 0_u32, const "/the/src/instrument_coverage.rs", const 10_u32, const 11_u32, const 16_u32, const 2_u32) -> bb7; // scope 0 at /the/src/instrument_coverage.rs:10:11: 10:11
-+ // ty::Const
-+ // + ty: unsafe extern "rust-intrinsic" fn(u64, u32, &'static str, u32, u32, u32, u32) {std::intrinsics::count_code_region}
-+ // + val: Value(Scalar(<ZST>))
-+ // mir::Constant
-+ // + span: /the/src/instrument_coverage.rs:10:11: 10:11
-+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u64, u32, &'static str, u32, u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
-+ // ty::Const
-+ // + ty: &str
-+ // + val: Value(Slice { data: Allocation { bytes: [47, 116, 104, 101, 47, 115, 114, 99, 47, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 95, 99, 111, 118, 101, 114, 97, 103, 101, 46, 114, 115], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [2147483647], len: Size { raw: 31 } }, size: Size { raw: 31 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 31 })
-+ // mir::Constant
-+ // + span: /the/src/instrument_coverage.rs:10:11: 10:11
-+ // + literal: Const { ty: &str, val: Value(Slice { data: Allocation { bytes: [47, 116, 104, 101, 47, 115, 114, 99, 47, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 95, 99, 111, 118, 101, 114, 97, 103, 101, 46, 114, 115], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [2147483647], len: Size { raw: 31 } }, size: Size { raw: 31 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 31 }) }
++ Coverage { kind: Counter { function_source_hash: 16004455475339839479, id: CounterValueReference(0) }, code_region: /the/src/instrument_coverage.rs:10:11 - 16:2 }; // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
+ falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
}
bb1: {
// + literal: Const { ty: (), val: Value(Scalar(<ZST>)) }
StorageDead(_2); // scope 0 at /the/src/instrument_coverage.rs:15:5: 15:6
return; // scope 0 at /the/src/instrument_coverage.rs:16:2: 16:2
-+ }
-+
-+ bb7: {
-+ StorageDead(_4); // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
-+ falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
}
}
-// Test that the initial version of Rust coverage injects count_code_region() placeholder calls,
-// at the top of each function. The placeholders are later converted into LLVM instrprof.increment
+// Test that the initial version of Rust coverage injects Coverage statements at the top of each
+// function. The Coverage Counter statements are later converted into LLVM instrprof.increment
// intrinsics, during codegen.
// needs-profiler-support