4 use rustc::ty::layout::{self, TyLayout, LayoutOf};
5 use syntax::source_map::Span;
6 use rustc_target::spec::abi::Abi;
8 use rustc::mir::interpret::{EvalResult, PointerArithmetic, InterpError, Scalar};
10 InterpretCx, Machine, Immediate, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup
13 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
15 pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> EvalResult<'tcx> {
16 if let Some(target) = target {
17 self.frame_mut().block = target;
18 self.frame_mut().stmt = 0;
25 pub(super) fn eval_terminator(
27 terminator: &mir::Terminator<'tcx>,
28 ) -> EvalResult<'tcx> {
29 use rustc::mir::TerminatorKind::*;
30 match terminator.kind {
32 self.frame().return_place.map(|r| self.dump_place(*r));
33 self.pop_stack_frame()?
36 Goto { target } => self.goto_block(Some(target))?,
44 let discr = self.read_immediate(self.eval_operand(discr, None)?)?;
45 trace!("SwitchInt({:?})", *discr);
47 // Branch to the `otherwise` case by default, if no match is found.
48 let mut target_block = targets[targets.len() - 1];
50 for (index, &const_int) in values.iter().enumerate() {
51 // Compare using binary_op, to also support pointer values
52 let const_int = Scalar::from_uint(const_int, discr.layout.size);
53 let (res, _) = self.binary_op(mir::BinOp::Eq,
55 ImmTy::from_scalar(const_int, discr.layout),
58 target_block = targets[index];
63 self.goto_block(Some(target_block))?;
72 let (dest, ret) = match *destination {
73 Some((ref lv, target)) => (Some(self.eval_place(lv)?), Some(target)),
77 let func = self.eval_operand(func, None)?;
78 let (fn_def, abi) = match func.layout.ty.sty {
80 let caller_abi = sig.abi();
81 let fn_ptr = self.read_scalar(func)?.to_ptr()?;
82 let instance = self.memory.get_fn(fn_ptr)?;
83 (instance, caller_abi)
85 ty::FnDef(def_id, substs) => {
86 let sig = func.layout.ty.fn_sig(*self.tcx);
87 (self.resolve(def_id, substs)?, sig.abi())
90 let msg = format!("can't handle callee of type {:?}", func.layout.ty);
91 return err!(Unimplemented(msg));
94 let args = self.eval_operands(args)?;
97 terminator.source_info.span,
110 // FIXME(CTFE): forbid drop in const eval
111 let place = self.eval_place(location)?;
112 let ty = place.layout.ty;
113 trace!("TerminatorKind::drop: {:?}, type {}", location, ty);
115 let instance = crate::monomorphize::resolve_drop_in_place(*self.tcx, ty);
119 terminator.source_info.span,
131 let cond_val = self.read_immediate(self.eval_operand(cond, None)?)?
132 .to_scalar()?.to_bool()?;
133 if expected == cond_val {
134 self.goto_block(Some(target))?;
136 // Compute error message
137 use rustc::mir::interpret::InterpError::*;
139 BoundsCheck { ref len, ref index } => {
140 let len = self.read_immediate(self.eval_operand(len, None)?)
141 .expect("can't eval len").to_scalar()?
142 .to_bits(self.memory().pointer_size())? as u64;
143 let index = self.read_immediate(self.eval_operand(index, None)?)
144 .expect("can't eval index").to_scalar()?
145 .to_bits(self.memory().pointer_size())? as u64;
146 err!(BoundsCheck { len, index })
148 Overflow(op) => Err(Overflow(op).into()),
149 OverflowNeg => Err(OverflowNeg.into()),
150 DivisionByZero => Err(DivisionByZero.into()),
151 RemainderByZero => Err(RemainderByZero.into()),
152 GeneratorResumedAfterReturn |
153 GeneratorResumedAfterPanic => unimplemented!(),
161 DropAndReplace { .. } |
163 Abort => unimplemented!("{:#?}", terminator.kind),
164 FalseEdges { .. } => bug!("should have been eliminated by\
165 `simplify_branches` mir pass"),
166 FalseUnwind { .. } => bug!("should have been eliminated by\
167 `simplify_branches` mir pass"),
168 Unreachable => return err!(Unreachable),
174 fn check_argument_compat(
176 caller: TyLayout<'tcx>,
177 callee: TyLayout<'tcx>,
179 if caller.ty == callee.ty {
184 // Don't risk anything
188 match (&caller.abi, &callee.abi) {
189 // Different valid ranges are okay (once we enforce validity,
190 // that will take care to make it UB to leave the range, just
191 // like for transmute).
192 (layout::Abi::Scalar(ref caller), layout::Abi::Scalar(ref callee)) =>
193 caller.value == callee.value,
194 (layout::Abi::ScalarPair(ref caller1, ref caller2),
195 layout::Abi::ScalarPair(ref callee1, ref callee2)) =>
196 caller1.value == callee1.value && caller2.value == callee2.value,
202 /// Pass a single argument, checking the types for compatibility.
206 caller_arg: &mut impl Iterator<Item=OpTy<'tcx, M::PointerTag>>,
207 callee_arg: PlaceTy<'tcx, M::PointerTag>,
208 ) -> EvalResult<'tcx> {
209 if rust_abi && callee_arg.layout.is_zst() {
211 trace!("Skipping callee ZST");
214 let caller_arg = caller_arg.next()
215 .ok_or_else(|| InterpError::FunctionArgCountMismatch)?;
217 debug_assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out");
220 if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) {
221 return err!(FunctionArgMismatch(caller_arg.layout.ty, callee_arg.layout.ty));
223 // We allow some transmutes here
224 self.copy_op_transmute(caller_arg, callee_arg)
227 /// Call this function -- pushing the stack frame and initializing the arguments.
230 instance: ty::Instance<'tcx>,
233 args: &[OpTy<'tcx, M::PointerTag>],
234 dest: Option<PlaceTy<'tcx, M::PointerTag>>,
235 ret: Option<mir::BasicBlock>,
236 ) -> EvalResult<'tcx> {
237 trace!("eval_fn_call: {:#?}", instance);
240 ty::InstanceDef::Intrinsic(..) => {
241 if caller_abi != Abi::RustIntrinsic {
242 return err!(FunctionAbiMismatch(caller_abi, Abi::RustIntrinsic));
244 // The intrinsic itself cannot diverge, so if we got here without a return
245 // place... (can happen e.g., for transmute returning `!`)
246 let dest = match dest {
248 None => return err!(Unreachable)
250 M::call_intrinsic(self, instance, args, dest)?;
251 // No stack frame gets pushed, the main loop will just act as if the
253 self.goto_block(ret)?;
254 self.dump_place(*dest);
257 ty::InstanceDef::VtableShim(..) |
258 ty::InstanceDef::ClosureOnceShim { .. } |
259 ty::InstanceDef::FnPtrShim(..) |
260 ty::InstanceDef::DropGlue(..) |
261 ty::InstanceDef::CloneShim(..) |
262 ty::InstanceDef::Item(_) => {
266 let instance_ty = instance.ty(*self.tcx);
267 match instance_ty.sty {
269 instance_ty.fn_sig(*self.tcx).abi(),
270 ty::Closure(..) => Abi::RustCall,
271 ty::Generator(..) => Abi::Rust,
272 _ => bug!("unexpected callee ty: {:?}", instance_ty),
275 // Rust and RustCall are compatible
276 let normalize_abi = |abi| if abi == Abi::RustCall { Abi::Rust } else { abi };
277 if normalize_abi(caller_abi) != normalize_abi(callee_abi) {
278 return err!(FunctionAbiMismatch(caller_abi, callee_abi));
282 // We need MIR for this fn
283 let mir = match M::find_fn(self, instance, args, dest, ret)? {
285 None => return Ok(()),
288 self.push_stack_frame(
293 StackPopCleanup::Goto(ret),
296 // We want to pop this frame again in case there was an error, to put
297 // the blame in the right location. Until the 2018 edition is used in
298 // the compiler, we have to do this with an immediately invoked function.
301 "caller ABI: {:?}, args: {:#?}",
304 .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
308 "spread_arg: {:?}, locals: {:#?}",
312 (local, self.layout_of_local(self.frame(), local, None).unwrap().ty)
317 // Figure out how to pass which arguments.
318 // We have two iterators: Where the arguments come from,
319 // and where they go to.
320 let rust_abi = match caller_abi {
321 Abi::Rust | Abi::RustCall => true,
325 // For where they come from: If the ABI is RustCall, we untuple the
326 // last incoming argument. These two iterators do not have the same type,
327 // so to keep the code paths uniform we accept an allocation
328 // (for RustCall ABI only).
329 let caller_args : Cow<'_, [OpTy<'tcx, M::PointerTag>]> =
330 if caller_abi == Abi::RustCall && !args.is_empty() {
332 let (&untuple_arg, args) = args.split_last().unwrap();
333 trace!("eval_fn_call: Will pass last argument by untupling");
334 Cow::from(args.iter().map(|&a| Ok(a))
335 .chain((0..untuple_arg.layout.fields.count()).into_iter()
336 .map(|i| self.operand_field(untuple_arg, i as u64))
338 .collect::<EvalResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>()?)
344 let mut caller_iter = caller_args.iter()
345 .filter(|op| !rust_abi || !op.layout.is_zst())
348 // Now we have to spread them out across the callee's locals,
349 // taking into account the `spread_arg`. If we could write
350 // this is a single iterator (that handles `spread_arg`), then
351 // `pass_argument` would be the loop body. It takes care to
352 // not advance `caller_iter` for ZSTs.
353 let mut locals_iter = mir.args_iter();
354 while let Some(local) = locals_iter.next() {
355 let dest = self.eval_place(
356 &mir::Place::Base(mir::PlaceBase::Local(local))
358 if Some(local) == mir.spread_arg {
360 for i in 0..dest.layout.fields.count() {
361 let dest = self.place_field(dest, i as u64)?;
362 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
366 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
369 // Now we should have no more caller args
370 if caller_iter.next().is_some() {
371 trace!("Caller has too many args over");
372 return err!(FunctionArgCountMismatch);
374 // Don't forget to check the return type!
375 if let Some(caller_ret) = dest {
376 let callee_ret = self.eval_place(
377 &mir::Place::RETURN_PLACE
379 if !Self::check_argument_compat(
384 return err!(FunctionRetMismatch(
385 caller_ret.layout.ty, callee_ret.layout.ty
390 self.layout_of_local(self.frame(), mir::RETURN_PLACE, None)?;
391 if !callee_layout.abi.is_uninhabited() {
392 return err!(FunctionRetMismatch(
393 self.tcx.types.never, callee_layout.ty
407 // cannot use the shim here, because that will only result in infinite recursion
408 ty::InstanceDef::Virtual(_, idx) => {
409 let ptr_size = self.pointer_size();
410 let ptr = self.deref_operand(args[0])?;
411 let vtable = ptr.vtable()?;
412 self.memory.check_align(vtable.into(), self.tcx.data_layout.pointer_align.abi)?;
413 let fn_ptr = self.memory.get(vtable.alloc_id)?.read_ptr_sized(
415 vtable.offset(ptr_size * (idx as u64 + 3), self)?,
417 let instance = self.memory.get_fn(fn_ptr)?;
419 // We have to patch the self argument, in particular get the layout
420 // expected by the actual function. Cannot just use "field 0" due to
422 let mut args = args.to_vec();
423 let pointee = args[0].layout.ty.builtin_deref(true).unwrap().ty;
424 let fake_fat_ptr_ty = self.tcx.mk_mut_ptr(pointee);
425 args[0] = OpTy::from(ImmTy { // strip vtable
426 layout: self.layout_of(fake_fat_ptr_ty)?.field(self, 0)?,
427 imm: Immediate::Scalar(ptr.ptr.into())
429 trace!("Patched self operand to {:#?}", args[0]);
430 // recurse with concrete function
431 self.eval_fn_call(instance, span, caller_abi, &args, dest, ret)
438 place: PlaceTy<'tcx, M::PointerTag>,
439 instance: ty::Instance<'tcx>,
441 target: mir::BasicBlock,
442 ) -> EvalResult<'tcx> {
443 trace!("drop_in_place: {:?},\n {:?}, {:?}", *place, place.layout.ty, instance);
444 // We take the address of the object. This may well be unaligned, which is fine
445 // for us here. However, unaligned accesses will probably make the actual drop
446 // implementation fail -- a problem shared by rustc.
447 let place = self.force_allocation(place)?;
449 let (instance, place) = match place.layout.ty.sty {
451 // Dropping a trait object.
452 self.unpack_dyn_trait(place)?
454 _ => (instance, place),
459 layout: self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
462 let ty = self.tcx.mk_unit(); // return type is ()
463 let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);