1 // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
14 use rustc::ty::layout::{self, TyLayout, LayoutOf};
15 use syntax::source_map::Span;
16 use rustc_target::spec::abi::Abi;
18 use rustc::mir::interpret::{EvalResult, PointerArithmetic, EvalErrorKind, Scalar};
20 EvalContext, Machine, Immediate, OpTy, PlaceTy, MPlaceTy, Operand, StackPopCleanup
23 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
25 pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> EvalResult<'tcx> {
26 if let Some(target) = target {
27 self.frame_mut().block = target;
28 self.frame_mut().stmt = 0;
35 pub(super) fn eval_terminator(
37 terminator: &mir::Terminator<'tcx>,
38 ) -> EvalResult<'tcx> {
39 use rustc::mir::TerminatorKind::*;
40 match terminator.kind {
42 self.frame().return_place.map(|r| self.dump_place(*r));
43 self.pop_stack_frame()?
46 Goto { target } => self.goto_block(Some(target))?,
54 let discr = self.read_immediate(self.eval_operand(discr, None)?)?;
55 trace!("SwitchInt({:?})", *discr);
57 // Branch to the `otherwise` case by default, if no match is found.
58 let mut target_block = targets[targets.len() - 1];
60 for (index, &const_int) in values.iter().enumerate() {
61 // Compare using binary_op, to also support pointer values
62 let const_int = Scalar::from_uint(const_int, discr.layout.size);
63 let (res, _) = self.binary_op(mir::BinOp::Eq,
64 discr.to_scalar()?, discr.layout,
65 const_int, discr.layout,
68 target_block = targets[index];
73 self.goto_block(Some(target_block))?;
82 let (dest, ret) = match *destination {
83 Some((ref lv, target)) => (Some(self.eval_place(lv)?), Some(target)),
87 let func = self.eval_operand(func, None)?;
88 let (fn_def, abi) = match func.layout.ty.sty {
90 let caller_abi = sig.abi();
91 let fn_ptr = self.read_scalar(func)?.to_ptr()?;
92 let instance = self.memory.get_fn(fn_ptr)?;
93 (instance, caller_abi)
95 ty::FnDef(def_id, substs) => {
96 let sig = func.layout.ty.fn_sig(*self.tcx);
97 (self.resolve(def_id, substs)?, sig.abi())
100 let msg = format!("can't handle callee of type {:?}", func.layout.ty);
101 return err!(Unimplemented(msg));
104 let args = self.eval_operands(args)?;
107 terminator.source_info.span,
120 // FIXME(CTFE): forbid drop in const eval
121 let place = self.eval_place(location)?;
122 let ty = place.layout.ty;
123 trace!("TerminatorKind::drop: {:?}, type {}", location, ty);
125 let instance = ::monomorphize::resolve_drop_in_place(*self.tcx, ty);
129 terminator.source_info.span,
141 let cond_val = self.read_immediate(self.eval_operand(cond, None)?)?
142 .to_scalar()?.to_bool()?;
143 if expected == cond_val {
144 self.goto_block(Some(target))?;
146 // Compute error message
147 use rustc::mir::interpret::EvalErrorKind::*;
149 BoundsCheck { ref len, ref index } => {
150 let len = self.read_immediate(self.eval_operand(len, None)?)
151 .expect("can't eval len").to_scalar()?
152 .to_bits(self.memory().pointer_size())? as u64;
153 let index = self.read_immediate(self.eval_operand(index, None)?)
154 .expect("can't eval index").to_scalar()?
155 .to_bits(self.memory().pointer_size())? as u64;
156 err!(BoundsCheck { len, index })
158 Overflow(op) => Err(Overflow(op).into()),
159 OverflowNeg => Err(OverflowNeg.into()),
160 DivisionByZero => Err(DivisionByZero.into()),
161 RemainderByZero => Err(RemainderByZero.into()),
162 GeneratorResumedAfterReturn |
163 GeneratorResumedAfterPanic => unimplemented!(),
171 DropAndReplace { .. } |
173 Abort => unimplemented!("{:#?}", terminator.kind),
174 FalseEdges { .. } => bug!("should have been eliminated by\
175 `simplify_branches` mir pass"),
176 FalseUnwind { .. } => bug!("should have been eliminated by\
177 `simplify_branches` mir pass"),
178 Unreachable => return err!(Unreachable),
184 fn check_argument_compat(
185 caller: TyLayout<'tcx>,
186 callee: TyLayout<'tcx>,
188 if caller.ty == callee.ty {
193 match (&caller.abi, &callee.abi) {
194 (layout::Abi::Scalar(ref caller), layout::Abi::Scalar(ref callee)) =>
195 // Different valid ranges are okay (once we enforce validity,
196 // that will take care to make it UB to leave the range, just
197 // like for transmute).
198 caller.value == callee.value,
204 /// Pass a single argument, checking the types for compatibility.
208 caller_arg: &mut impl Iterator<Item=OpTy<'tcx, M::PointerTag>>,
209 callee_arg: PlaceTy<'tcx, M::PointerTag>,
210 ) -> EvalResult<'tcx> {
211 if skip_zst && callee_arg.layout.is_zst() {
213 trace!("Skipping callee ZST");
216 let caller_arg = caller_arg.next()
217 .ok_or_else(|| EvalErrorKind::FunctionArgCountMismatch)?;
219 debug_assert!(!caller_arg.layout.is_zst(), "ZSTs must have been already filtered out");
222 if !Self::check_argument_compat(caller_arg.layout, callee_arg.layout) {
223 return err!(FunctionArgMismatch(caller_arg.layout.ty, callee_arg.layout.ty));
225 // We allow some transmutes here
226 self.copy_op_transmute(caller_arg, callee_arg)
229 /// Call this function -- pushing the stack frame and initializing the arguments.
232 instance: ty::Instance<'tcx>,
235 args: &[OpTy<'tcx, M::PointerTag>],
236 dest: Option<PlaceTy<'tcx, M::PointerTag>>,
237 ret: Option<mir::BasicBlock>,
238 ) -> EvalResult<'tcx> {
239 trace!("eval_fn_call: {:#?}", instance);
242 ty::InstanceDef::Intrinsic(..) => {
243 if caller_abi != Abi::RustIntrinsic {
244 return err!(FunctionAbiMismatch(caller_abi, Abi::RustIntrinsic));
246 // The intrinsic itself cannot diverge, so if we got here without a return
247 // place... (can happen e.g. for transmute returning `!`)
248 let dest = match dest {
250 None => return err!(Unreachable)
252 M::call_intrinsic(self, instance, args, dest)?;
253 // No stack frame gets pushed, the main loop will just act as if the
255 self.goto_block(ret)?;
256 self.dump_place(*dest);
259 ty::InstanceDef::VtableShim(..) |
260 ty::InstanceDef::ClosureOnceShim { .. } |
261 ty::InstanceDef::FnPtrShim(..) |
262 ty::InstanceDef::DropGlue(..) |
263 ty::InstanceDef::CloneShim(..) |
264 ty::InstanceDef::Item(_) => {
268 let instance_ty = instance.ty(*self.tcx);
269 match instance_ty.sty {
271 instance_ty.fn_sig(*self.tcx).abi(),
272 ty::Closure(..) => Abi::RustCall,
273 ty::Generator(..) => Abi::Rust,
274 _ => bug!("unexpected callee ty: {:?}", instance_ty),
277 // Rust and RustCall are compatible
278 let normalize_abi = |abi| if abi == Abi::RustCall { Abi::Rust } else { abi };
279 if normalize_abi(caller_abi) != normalize_abi(callee_abi) {
280 return err!(FunctionAbiMismatch(caller_abi, callee_abi));
284 // We need MIR for this fn
285 let mir = match M::find_fn(self, instance, args, dest, ret)? {
287 None => return Ok(()),
290 self.push_stack_frame(
295 StackPopCleanup::Goto(ret),
298 // We want to pop this frame again in case there was an error, to put
299 // the blame in the right location. Until the 2018 edition is used in
300 // the compiler, we have to do this with an immediately invoked function.
303 "caller ABI: {:?}, args: {:#?}",
306 .map(|arg| (arg.layout.ty, format!("{:?}", **arg)))
310 "spread_arg: {:?}, locals: {:#?}",
314 (local, self.layout_of_local(self.frame(), local).unwrap().ty)
319 // Figure out how to pass which arguments.
320 // We have two iterators: Where the arguments come from,
321 // and where they go to.
322 let skip_zst = match caller_abi {
323 Abi::Rust | Abi::RustCall => true,
327 // For where they come from: If the ABI is RustCall, we untuple the
328 // last incoming argument. These two iterators do not have the same type,
329 // so to keep the code paths uniform we accept an allocation
330 // (for RustCall ABI only).
331 let caller_args : Cow<[OpTy<'tcx, M::PointerTag>]> =
332 if caller_abi == Abi::RustCall && !args.is_empty() {
334 let (&untuple_arg, args) = args.split_last().unwrap();
335 trace!("eval_fn_call: Will pass last argument by untupling");
336 Cow::from(args.iter().map(|&a| Ok(a))
337 .chain((0..untuple_arg.layout.fields.count()).into_iter()
338 .map(|i| self.operand_field(untuple_arg, i as u64))
340 .collect::<EvalResult<Vec<OpTy<'tcx, M::PointerTag>>>>()?)
346 let mut caller_iter = caller_args.iter()
347 .filter(|op| !skip_zst || !op.layout.is_zst())
350 // Now we have to spread them out across the callee's locals,
351 // taking into account the `spread_arg`. If we could write
352 // this is a single iterator (that handles `spread_arg`), then
353 // `pass_argument` would be the loop body. It takes care to
354 // not advance `caller_iter` for ZSTs.
355 let mut locals_iter = mir.args_iter();
356 while let Some(local) = locals_iter.next() {
357 let dest = self.eval_place(&mir::Place::Local(local))?;
358 if Some(local) == mir.spread_arg {
360 for i in 0..dest.layout.fields.count() {
361 let dest = self.place_field(dest, i as u64)?;
362 self.pass_argument(skip_zst, &mut caller_iter, dest)?;
366 self.pass_argument(skip_zst, &mut caller_iter, dest)?;
369 // Now we should have no more caller args
370 if caller_iter.next().is_some() {
371 trace!("Caller has too many args over");
372 return err!(FunctionArgCountMismatch);
374 // Don't forget to check the return type!
375 if let Some(caller_ret) = dest {
376 let callee_ret = self.eval_place(&mir::Place::Local(mir::RETURN_PLACE))?;
377 if !Self::check_argument_compat(caller_ret.layout, callee_ret.layout) {
378 return err!(FunctionRetMismatch(
379 caller_ret.layout.ty, callee_ret.layout.ty
384 self.layout_of_local(self.frame(), mir::RETURN_PLACE)?;
385 if !callee_layout.abi.is_uninhabited() {
386 return err!(FunctionRetMismatch(
387 self.tcx.types.never, callee_layout.ty
401 // cannot use the shim here, because that will only result in infinite recursion
402 ty::InstanceDef::Virtual(_, idx) => {
403 let ptr_size = self.pointer_size();
404 let ptr_align = self.tcx.data_layout.pointer_align.abi;
405 let ptr = self.deref_operand(args[0])?;
406 let vtable = ptr.vtable()?;
407 let fn_ptr = self.memory.read_ptr_sized(
408 vtable.offset(ptr_size * (idx as u64 + 3), self)?,
411 let instance = self.memory.get_fn(fn_ptr)?;
413 // We have to patch the self argument, in particular get the layout
414 // expected by the actual function. Cannot just use "field 0" due to
416 let mut args = args.to_vec();
417 let pointee = args[0].layout.ty.builtin_deref(true).unwrap().ty;
418 let fake_fat_ptr_ty = self.tcx.mk_mut_ptr(pointee);
419 args[0].layout = self.layout_of(fake_fat_ptr_ty)?.field(self, 0)?;
420 args[0].op = Operand::Immediate(Immediate::Scalar(ptr.ptr.into())); // strip vtable
421 trace!("Patched self operand to {:#?}", args[0]);
422 // recurse with concrete function
423 self.eval_fn_call(instance, span, caller_abi, &args, dest, ret)
430 place: PlaceTy<'tcx, M::PointerTag>,
431 instance: ty::Instance<'tcx>,
433 target: mir::BasicBlock,
434 ) -> EvalResult<'tcx> {
435 trace!("drop_in_place: {:?},\n {:?}, {:?}", *place, place.layout.ty, instance);
436 // We take the address of the object. This may well be unaligned, which is fine
437 // for us here. However, unaligned accesses will probably make the actual drop
438 // implementation fail -- a problem shared by rustc.
439 let place = self.force_allocation(place)?;
441 let (instance, place) = match place.layout.ty.sty {
443 // Dropping a trait object.
444 self.unpack_dyn_trait(place)?
446 _ => (instance, place),
450 op: Operand::Immediate(place.to_ref()),
451 layout: self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
454 let ty = self.tcx.mk_unit(); // return type is ()
455 let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);