3 use rustc::ty::{self, layout::{self, Size, Align}};
4 use rustc::hir::def_id::{DefId, CRATE_DEF_INDEX};
10 impl<'mir, 'tcx> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
12 pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
13 /// Gets an instance for a path.
14 fn resolve_path(&self, path: &[&str]) -> InterpResult<'tcx, ty::Instance<'tcx>> {
15 let this = self.eval_context_ref();
19 .find(|&&krate| this.tcx.original_crate_name(krate).as_str() == path[0])
23 index: CRATE_DEF_INDEX,
25 let mut items = this.tcx.item_children(krate);
26 let mut path_it = path.iter().skip(1).peekable();
28 while let Some(segment) = path_it.next() {
29 for item in mem::replace(&mut items, Default::default()).iter() {
30 if item.ident.name.as_str() == *segment {
31 if path_it.peek().is_none() {
32 return Some(ty::Instance::mono(this.tcx.tcx, item.res.def_id()));
35 items = this.tcx.item_children(item.res.def_id());
43 let path = path.iter().map(|&s| s.to_owned()).collect();
44 InterpError::PathNotFound(path).into()
48 /// Write a 0 of the appropriate size to `dest`.
49 fn write_null(&mut self, dest: PlaceTy<'tcx, Tag>) -> InterpResult<'tcx> {
50 self.eval_context_mut().write_scalar(Scalar::from_int(0, dest.layout.size), dest)
53 /// Test if this immediate equals 0.
54 fn is_null(&self, val: Scalar<Tag>) -> InterpResult<'tcx, bool> {
55 let this = self.eval_context_ref();
56 let null = Scalar::from_int(0, this.memory().pointer_size());
57 this.ptr_eq(val, null)
60 /// Turn a Scalar into an Option<NonNullScalar>
61 fn test_null(&self, val: Scalar<Tag>) -> InterpResult<'tcx, Option<Scalar<Tag>>> {
62 let this = self.eval_context_ref();
63 Ok(if this.is_null(val)? {
70 /// Generate some random bytes, and write them to `dest`.
75 ) -> InterpResult<'tcx> {
76 let this = self.eval_context_mut();
78 let ptr = match this.memory().check_ptr_access(ptr, Size::from_bytes(len as u64), Align::from_bytes(1).unwrap())? {
80 None => return Ok(()), // zero-sized access
83 let data = match &mut this.memory_mut().extra.rng {
85 let mut rng = rng.borrow_mut();
86 let mut data = vec![0; len];
87 rng.fill_bytes(&mut data);
91 return err!(Unimplemented(
92 "miri does not support gathering system entropy in deterministic mode!
93 Use '-Zmiri-seed=<seed>' to enable random number generation.
94 WARNING: Miri does *not* generate cryptographically secure entropy -
95 do not use Miri to run any program that needs secure random number generation".to_owned(),
99 let tcx = &{this.tcx.tcx};
100 this.memory_mut().get_mut(ptr.alloc_id)?
101 .write_bytes(tcx, ptr, &data)
104 /// Visits the memory covered by `place`, sensitive to freezing: the 3rd parameter
105 /// will be true if this is frozen, false if this is in an `UnsafeCell`.
106 fn visit_freeze_sensitive(
108 place: MPlaceTy<'tcx, Tag>,
110 mut action: impl FnMut(Pointer<Tag>, Size, bool) -> InterpResult<'tcx>,
111 ) -> InterpResult<'tcx> {
112 let this = self.eval_context_ref();
113 trace!("visit_frozen(place={:?}, size={:?})", *place, size);
114 debug_assert_eq!(size,
115 this.size_and_align_of_mplace(place)?
116 .map(|(size, _)| size)
117 .unwrap_or_else(|| place.layout.size)
119 // Store how far we proceeded into the place so far. Everything to the left of
120 // this offset has already been handled, in the sense that the frozen parts
121 // have had `action` called on them.
122 let mut end_ptr = place.ptr.assert_ptr();
123 // Called when we detected an `UnsafeCell` at the given offset and size.
124 // Calls `action` and advances `end_ptr`.
125 let mut unsafe_cell_action = |unsafe_cell_ptr: Scalar<Tag>, unsafe_cell_size: Size| {
126 let unsafe_cell_ptr = unsafe_cell_ptr.assert_ptr();
127 debug_assert_eq!(unsafe_cell_ptr.alloc_id, end_ptr.alloc_id);
128 debug_assert_eq!(unsafe_cell_ptr.tag, end_ptr.tag);
129 // We assume that we are given the fields in increasing offset order,
130 // and nothing else changes.
131 let unsafe_cell_offset = unsafe_cell_ptr.offset;
132 let end_offset = end_ptr.offset;
133 assert!(unsafe_cell_offset >= end_offset);
134 let frozen_size = unsafe_cell_offset - end_offset;
135 // Everything between the end_ptr and this `UnsafeCell` is frozen.
136 if frozen_size != Size::ZERO {
137 action(end_ptr, frozen_size, /*frozen*/true)?;
139 // This `UnsafeCell` is NOT frozen.
140 if unsafe_cell_size != Size::ZERO {
141 action(unsafe_cell_ptr, unsafe_cell_size, /*frozen*/false)?;
143 // Update end end_ptr.
144 end_ptr = unsafe_cell_ptr.wrapping_offset(unsafe_cell_size, this);
150 let mut visitor = UnsafeCellVisitor {
152 unsafe_cell_action: |place| {
153 trace!("unsafe_cell_action on {:?}", place.ptr);
154 // We need a size to go on.
155 let unsafe_cell_size = this.size_and_align_of_mplace(place)?
156 .map(|(size, _)| size)
157 // for extern types, just cover what we can
158 .unwrap_or_else(|| place.layout.size);
159 // Now handle this `UnsafeCell`, unless it is empty.
160 if unsafe_cell_size != Size::ZERO {
161 unsafe_cell_action(place.ptr, unsafe_cell_size)
167 visitor.visit_value(place)?;
169 // The part between the end_ptr and the end of the place is also frozen.
170 // So pretend there is a 0-sized `UnsafeCell` at the end.
171 unsafe_cell_action(place.ptr.ptr_wrapping_offset(size, this), Size::ZERO)?;
175 /// Visiting the memory covered by a `MemPlace`, being aware of
176 /// whether we are inside an `UnsafeCell` or not.
177 struct UnsafeCellVisitor<'ecx, 'mir, 'tcx, F>
178 where F: FnMut(MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx>
180 ecx: &'ecx MiriEvalContext<'mir, 'tcx>,
181 unsafe_cell_action: F,
184 impl<'ecx, 'mir, 'tcx, F>
185 ValueVisitor<'mir, 'tcx, Evaluator<'tcx>>
187 UnsafeCellVisitor<'ecx, 'mir, 'tcx, F>
189 F: FnMut(MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx>
191 type V = MPlaceTy<'tcx, Tag>;
194 fn ecx(&self) -> &MiriEvalContext<'mir, 'tcx> {
198 // Hook to detect `UnsafeCell`.
199 fn visit_value(&mut self, v: MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx>
201 trace!("UnsafeCellVisitor: {:?} {:?}", *v, v.layout.ty);
202 let is_unsafe_cell = match v.layout.ty.sty {
203 ty::Adt(adt, _) => Some(adt.did) == self.ecx.tcx.lang_items().unsafe_cell_type(),
207 // We do not have to recurse further, this is an `UnsafeCell`.
208 (self.unsafe_cell_action)(v)
209 } else if self.ecx.type_is_freeze(v.layout.ty) {
210 // This is `Freeze`, there cannot be an `UnsafeCell`
218 // Make sure we visit aggregrates in increasing offset order.
221 place: MPlaceTy<'tcx, Tag>,
222 fields: impl Iterator<Item=InterpResult<'tcx, MPlaceTy<'tcx, Tag>>>,
223 ) -> InterpResult<'tcx> {
224 match place.layout.fields {
225 layout::FieldPlacement::Array { .. } => {
226 // For the array layout, we know the iterator will yield sorted elements so
227 // we can avoid the allocation.
228 self.walk_aggregate(place, fields)
230 layout::FieldPlacement::Arbitrary { .. } => {
231 // Gather the subplaces and sort them before visiting.
232 let mut places = fields.collect::<InterpResult<'tcx, Vec<MPlaceTy<'tcx, Tag>>>>()?;
233 places.sort_by_key(|place| place.ptr.assert_ptr().offset);
234 self.walk_aggregate(place, places.into_iter().map(Ok))
236 layout::FieldPlacement::Union { .. } => {
238 bug!("a union is not an aggregate we should ever visit")
243 // We have to do *something* for unions.
244 fn visit_union(&mut self, v: MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx>
246 // With unions, we fall back to whatever the type says, to hopefully be consistent
248 // FIXME: are we consistent, and is this really the behavior we want?
249 let frozen = self.ecx.type_is_freeze(v.layout.ty);
253 (self.unsafe_cell_action)(v)
257 // We should never get to a primitive, but always short-circuit somewhere above.
258 fn visit_primitive(&mut self, _v: MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx>
260 bug!("we should always short-circuit before coming to a primitive")