4 use rustc::ty::Instance;
5 use rustc::ty::layout::{self, TyLayout, LayoutOf};
6 use syntax::source_map::Span;
7 use rustc_target::spec::abi::Abi;
10 InterpResult, PointerArithmetic, InterpError, Scalar,
11 InterpCx, Machine, Immediate, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup, FnVal,
15 impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
17 pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> {
18 if let Some(target) = target {
19 self.frame_mut().block = target;
20 self.frame_mut().stmt = 0;
27 pub(super) fn eval_terminator(
29 terminator: &mir::Terminator<'tcx>,
30 ) -> InterpResult<'tcx> {
31 use rustc::mir::TerminatorKind::*;
32 match terminator.kind {
34 self.frame().return_place.map(|r| self.dump_place(*r));
35 self.pop_stack_frame()?
38 Goto { target } => self.goto_block(Some(target))?,
46 let discr = self.read_immediate(self.eval_operand(discr, None)?)?;
47 trace!("SwitchInt({:?})", *discr);
49 // Branch to the `otherwise` case by default, if no match is found.
50 let mut target_block = targets[targets.len() - 1];
52 for (index, &const_int) in values.iter().enumerate() {
53 // Compare using binary_op, to also support pointer values
54 let const_int = Scalar::from_uint(const_int, discr.layout.size);
55 let (res, _) = self.binary_op(mir::BinOp::Eq,
57 ImmTy::from_scalar(const_int, discr.layout),
60 target_block = targets[index];
65 self.goto_block(Some(target_block))?;
74 let (dest, ret) = match *destination {
75 Some((ref lv, target)) => (Some(self.eval_place(lv)?), Some(target)),
79 let func = self.eval_operand(func, None)?;
80 let (fn_val, abi) = match func.layout.ty.sty {
82 let caller_abi = sig.abi();
83 let fn_ptr = self.read_scalar(func)?.not_undef()?;
84 let fn_val = self.memory.get_fn(fn_ptr)?;
87 ty::FnDef(def_id, substs) => {
88 let sig = func.layout.ty.fn_sig(*self.tcx);
89 (FnVal::Instance(self.resolve(def_id, substs)?), sig.abi())
92 let msg = format!("can't handle callee of type {:?}", func.layout.ty);
93 return err!(Unimplemented(msg));
96 let args = self.eval_operands(args)?;
99 terminator.source_info.span,
112 // FIXME(CTFE): forbid drop in const eval
113 let place = self.eval_place(location)?;
114 let ty = place.layout.ty;
115 trace!("TerminatorKind::drop: {:?}, type {}", location, ty);
117 let instance = Instance::resolve_drop_in_place(*self.tcx, ty);
121 terminator.source_info.span,
133 let cond_val = self.read_immediate(self.eval_operand(cond, None)?)?
134 .to_scalar()?.to_bool()?;
135 if expected == cond_val {
136 self.goto_block(Some(target))?;
138 // Compute error message
139 use rustc::mir::interpret::PanicMessage::*;
141 BoundsCheck { ref len, ref index } => {
142 let len = self.read_immediate(self.eval_operand(len, None)?)
143 .expect("can't eval len").to_scalar()?
144 .to_bits(self.memory().pointer_size())? as u64;
145 let index = self.read_immediate(self.eval_operand(index, None)?)
146 .expect("can't eval index").to_scalar()?
147 .to_bits(self.memory().pointer_size())? as u64;
148 err_panic!(BoundsCheck { len, index })
151 err_panic!(Overflow(*op)),
153 err_panic!(OverflowNeg),
155 err_panic!(DivisionByZero),
157 err_panic!(RemainderByZero),
158 GeneratorResumedAfterReturn =>
159 err_panic!(GeneratorResumedAfterReturn),
160 GeneratorResumedAfterPanic =>
161 err_panic!(GeneratorResumedAfterPanic),
163 bug!("`Panic` variant cannot occur in MIR"),
170 DropAndReplace { .. } |
172 Abort => unimplemented!("{:#?}", terminator.kind),
173 FalseEdges { .. } => bug!("should have been eliminated by\
174 `simplify_branches` mir pass"),
175 FalseUnwind { .. } => bug!("should have been eliminated by\
176 `simplify_branches` mir pass"),
177 Unreachable => return err_ub!(Unreachable),
183 fn check_argument_compat(
185 caller: TyLayout<'tcx>,
186 callee: TyLayout<'tcx>,
188 if caller.ty == callee.ty {
193 // Don't risk anything
197 match (&caller.abi, &callee.abi) {
198 // Different valid ranges are okay (once we enforce validity,
199 // that will take care to make it UB to leave the range, just
200 // like for transmute).
201 (layout::Abi::Scalar(ref caller), layout::Abi::Scalar(ref callee)) =>
202 caller.value == callee.value,
203 (layout::Abi::ScalarPair(ref caller1, ref caller2),
204 layout::Abi::ScalarPair(ref callee1, ref callee2)) =>
205 caller1.value == callee1.value && caller2.value == callee2.value,
211 /// Pass a single argument, checking the types for compatibility.
215 caller_arg: &mut impl Iterator<Item=OpTy<'tcx, M::PointerTag>>,
216 callee_arg: PlaceTy<'tcx, M::PointerTag>,
217 ) -> InterpResult<'tcx> {
218 if rust_abi && callee_arg.layout.is_zst() {
220 trace!("Skipping callee ZST");
223 let caller_arg = caller_arg.next()
224 .ok_or_else(|| InterpError::Unsupported(FunctionArgCountMismatch))?;
226 debug_assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out");
229 if !Self::check_argument_compat(rust_abi, caller_arg.layout, callee_arg.layout) {
230 return err!(FunctionArgMismatch(caller_arg.layout.ty, callee_arg.layout.ty));
232 // We allow some transmutes here
233 self.copy_op_transmute(caller_arg, callee_arg)
236 /// Call this function -- pushing the stack frame and initializing the arguments.
239 fn_val: FnVal<'tcx, M::ExtraFnVal>,
242 args: &[OpTy<'tcx, M::PointerTag>],
243 dest: Option<PlaceTy<'tcx, M::PointerTag>>,
244 ret: Option<mir::BasicBlock>,
245 ) -> InterpResult<'tcx> {
246 trace!("eval_fn_call: {:#?}", fn_val);
248 let instance = match fn_val {
249 FnVal::Instance(instance) => instance,
250 FnVal::Other(extra) => {
251 return M::call_extra_fn(self, extra, args, dest, ret);
256 ty::InstanceDef::Intrinsic(..) => {
257 if caller_abi != Abi::RustIntrinsic {
258 return err!(FunctionAbiMismatch(caller_abi, Abi::RustIntrinsic));
260 // The intrinsic itself cannot diverge, so if we got here without a return
261 // place... (can happen e.g., for transmute returning `!`)
262 let dest = match dest {
264 None => return err_ub!(Unreachable)
266 M::call_intrinsic(self, instance, args, dest)?;
267 // No stack frame gets pushed, the main loop will just act as if the
269 self.goto_block(ret)?;
270 self.dump_place(*dest);
273 ty::InstanceDef::VtableShim(..) |
274 ty::InstanceDef::ClosureOnceShim { .. } |
275 ty::InstanceDef::FnPtrShim(..) |
276 ty::InstanceDef::DropGlue(..) |
277 ty::InstanceDef::CloneShim(..) |
278 ty::InstanceDef::Item(_) => {
282 let instance_ty = instance.ty(*self.tcx);
283 match instance_ty.sty {
285 instance_ty.fn_sig(*self.tcx).abi(),
286 ty::Closure(..) => Abi::RustCall,
287 ty::Generator(..) => Abi::Rust,
288 _ => bug!("unexpected callee ty: {:?}", instance_ty),
291 let normalize_abi = |abi| match abi {
292 Abi::Rust | Abi::RustCall | Abi::RustIntrinsic | Abi::PlatformIntrinsic =>
293 // These are all the same ABI, really.
298 if normalize_abi(caller_abi) != normalize_abi(callee_abi) {
299 return err!(FunctionAbiMismatch(caller_abi, callee_abi));
303 // We need MIR for this fn
304 let body = match M::find_fn(self, instance, args, dest, ret)? {
306 None => return Ok(()),
309 self.push_stack_frame(
314 StackPopCleanup::Goto(ret),
317 // We want to pop this frame again in case there was an error, to put
318 // the blame in the right location. Until the 2018 edition is used in
319 // the compiler, we have to do this with an immediately invoked function.
322 "caller ABI: {:?}, args: {:#?}",
325 .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
329 "spread_arg: {:?}, locals: {:#?}",
333 (local, self.layout_of_local(self.frame(), local, None).unwrap().ty)
338 // Figure out how to pass which arguments.
339 // The Rust ABI is special: ZST get skipped.
340 let rust_abi = match caller_abi {
341 Abi::Rust | Abi::RustCall => true,
344 // We have two iterators: Where the arguments come from,
345 // and where they go to.
347 // For where they come from: If the ABI is RustCall, we untuple the
348 // last incoming argument. These two iterators do not have the same type,
349 // so to keep the code paths uniform we accept an allocation
350 // (for RustCall ABI only).
351 let caller_args : Cow<'_, [OpTy<'tcx, M::PointerTag>]> =
352 if caller_abi == Abi::RustCall && !args.is_empty() {
354 let (&untuple_arg, args) = args.split_last().unwrap();
355 trace!("eval_fn_call: Will pass last argument by untupling");
356 Cow::from(args.iter().map(|&a| Ok(a))
357 .chain((0..untuple_arg.layout.fields.count()).into_iter()
358 .map(|i| self.operand_field(untuple_arg, i as u64))
360 .collect::<InterpResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>()?)
366 let mut caller_iter = caller_args.iter()
367 .filter(|op| !rust_abi || !op.layout.is_zst())
370 // Now we have to spread them out across the callee's locals,
371 // taking into account the `spread_arg`. If we could write
372 // this is a single iterator (that handles `spread_arg`), then
373 // `pass_argument` would be the loop body. It takes care to
374 // not advance `caller_iter` for ZSTs.
375 let mut locals_iter = body.args_iter();
376 while let Some(local) = locals_iter.next() {
377 let dest = self.eval_place(
378 &mir::Place::from(local)
380 if Some(local) == body.spread_arg {
382 for i in 0..dest.layout.fields.count() {
383 let dest = self.place_field(dest, i as u64)?;
384 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
388 self.pass_argument(rust_abi, &mut caller_iter, dest)?;
391 // Now we should have no more caller args
392 if caller_iter.next().is_some() {
393 trace!("Caller has passed too many args");
394 return err!(FunctionArgCountMismatch);
396 // Don't forget to check the return type!
397 if let Some(caller_ret) = dest {
398 let callee_ret = self.eval_place(
399 &mir::Place::RETURN_PLACE
401 if !Self::check_argument_compat(
407 FunctionRetMismatch(caller_ret.layout.ty, callee_ret.layout.ty)
411 let local = mir::RETURN_PLACE;
412 let ty = self.frame().body.local_decls[local].ty;
413 if !self.tcx.is_ty_uninhabited_from_any_module(ty) {
414 return err!(FunctionRetMismatch(self.tcx.types.never, ty));
427 // cannot use the shim here, because that will only result in infinite recursion
428 ty::InstanceDef::Virtual(_, idx) => {
429 let mut args = args.to_vec();
430 let ptr_size = self.pointer_size();
431 // We have to implement all "object safe receivers". Currently we
432 // support built-in pointers (&, &mut, Box) as well as unsized-self. We do
433 // not yet support custom self types.
434 // Also see librustc_codegen_llvm/abi.rs and librustc_codegen_llvm/mir/block.rs.
435 let receiver_place = match args[0].layout.ty.builtin_deref(true) {
438 self.deref_operand(args[0])?
442 args[0].assert_mem_place()
445 // Find and consult vtable
446 let vtable = receiver_place.vtable();
447 let vtable_slot = vtable.ptr_offset(ptr_size * (idx as u64 + 3), self)?;
448 let vtable_slot = self.memory.check_ptr_access(
451 self.tcx.data_layout.pointer_align.abi,
452 )?.expect("cannot be a ZST");
453 let fn_ptr = self.memory.get(vtable_slot.alloc_id)?
454 .read_ptr_sized(self, vtable_slot)?.not_undef()?;
455 let drop_fn = self.memory.get_fn(fn_ptr)?;
457 // `*mut receiver_place.layout.ty` is almost the layout that we
458 // want for args[0]: We have to project to field 0 because we want
460 assert!(receiver_place.layout.is_unsized());
461 let receiver_ptr_ty = self.tcx.mk_mut_ptr(receiver_place.layout.ty);
462 let this_receiver_ptr = self.layout_of(receiver_ptr_ty)?.field(self, 0)?;
463 // Adjust receiver argument.
464 args[0] = OpTy::from(ImmTy {
465 layout: this_receiver_ptr,
466 imm: Immediate::Scalar(receiver_place.ptr.into())
468 trace!("Patched self operand to {:#?}", args[0]);
469 // recurse with concrete function
470 self.eval_fn_call(drop_fn, span, caller_abi, &args, dest, ret)
477 place: PlaceTy<'tcx, M::PointerTag>,
478 instance: ty::Instance<'tcx>,
480 target: mir::BasicBlock,
481 ) -> InterpResult<'tcx> {
482 trace!("drop_in_place: {:?},\n {:?}, {:?}", *place, place.layout.ty, instance);
483 // We take the address of the object. This may well be unaligned, which is fine
484 // for us here. However, unaligned accesses will probably make the actual drop
485 // implementation fail -- a problem shared by rustc.
486 let place = self.force_allocation(place)?;
488 let (instance, place) = match place.layout.ty.sty {
490 // Dropping a trait object.
491 self.unpack_dyn_trait(place)?
493 _ => (instance, place),
498 layout: self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
501 let ty = self.tcx.mk_unit(); // return type is ()
502 let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);
505 FnVal::Instance(instance),