2 use rustc::ty::{self, Ty};
3 use rustc::ty::layout::{LayoutOf, Size};
4 use syntax::codemap::Span;
5 use rustc_target::spec::abi::Abi;
7 use rustc::mir::interpret::{EvalResult, Scalar, Value};
8 use super::{EvalContext, Place, Machine, ValTy};
10 use rustc_data_structures::indexed_vec::Idx;
11 use interpret::memory::HasMemory;
15 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
16 pub fn goto_block(&mut self, target: mir::BasicBlock) {
17 self.frame_mut().block = target;
18 self.frame_mut().stmt = 0;
21 pub(super) fn eval_terminator(
23 terminator: &mir::Terminator<'tcx>,
24 ) -> EvalResult<'tcx> {
25 use rustc::mir::TerminatorKind::*;
26 match terminator.kind {
28 self.dump_local(self.frame().return_place);
29 self.pop_stack_frame()?
32 Goto { target } => self.goto_block(target),
40 let discr_val = self.eval_operand(discr)?;
41 let discr_prim = self.value_to_scalar(discr_val)?;
42 let discr_layout = self.layout_of(discr_val.ty).unwrap();
43 trace!("SwitchInt({:?}, {:#?})", discr_prim, discr_layout);
45 // Branch to the `otherwise` case by default, if no match is found.
46 let mut target_block = targets[targets.len() - 1];
48 for (index, &const_int) in values.iter().enumerate() {
49 // Compare using binary_op
50 let const_int = Scalar::Bits { bits: const_int, size: discr_layout.size.bytes() as u8 };
51 let res = self.binary_op(mir::BinOp::Eq,
52 discr_prim, discr_val.ty,
53 const_int, discr_val.ty
55 if res.0.to_bits(Size::from_bytes(1))? != 0 {
56 target_block = targets[index];
61 self.goto_block(target_block);
70 let destination = match *destination {
71 Some((ref lv, target)) => Some((self.eval_place(lv)?, target)),
75 let func = self.eval_operand(func)?;
76 let (fn_def, sig) = match func.ty.sty {
78 let fn_ptr = self.value_to_scalar(func)?.to_ptr()?;
79 let instance = self.memory.get_fn(fn_ptr)?;
80 let instance_ty = instance.ty(*self.tcx);
81 match instance_ty.sty {
83 let real_sig = instance_ty.fn_sig(*self.tcx);
84 let sig = self.tcx.normalize_erasing_late_bound_regions(
85 ty::ParamEnv::reveal_all(),
88 let real_sig = self.tcx.normalize_erasing_late_bound_regions(
89 ty::ParamEnv::reveal_all(),
92 if !self.check_sig_compat(sig, real_sig)? {
93 return err!(FunctionPointerTyMismatch(real_sig, sig));
96 ref other => bug!("instance def ty: {:?}", other),
100 ty::TyFnDef(def_id, substs) => (
101 self.resolve(def_id, substs)?,
102 func.ty.fn_sig(*self.tcx),
105 let msg = format!("can't handle callee of type {:?}", func.ty);
106 return err!(Unimplemented(msg));
109 let args = self.operands_to_args(args)?;
110 let sig = self.tcx.normalize_erasing_late_bound_regions(
111 ty::ParamEnv::reveal_all(),
118 terminator.source_info.span,
128 // FIXME(CTFE): forbid drop in const eval
129 let place = self.eval_place(location)?;
130 let ty = self.place_ty(location);
131 let ty = self.tcx.subst_and_normalize_erasing_regions(
133 ty::ParamEnv::reveal_all(),
136 trace!("TerminatorKind::drop: {:?}, type {}", location, ty);
138 let instance = ::monomorphize::resolve_drop_in_place(*self.tcx, ty);
143 terminator.source_info.span,
155 let cond_val = self.eval_operand_to_scalar(cond)?.to_bool()?;
156 if expected == cond_val {
157 self.goto_block(target);
159 use rustc::mir::interpret::EvalErrorKind::*;
161 BoundsCheck { ref len, ref index } => {
162 let len = self.eval_operand_to_scalar(len)
163 .expect("can't eval len")
164 .to_bits(self.memory().pointer_size())? as u64;
165 let index = self.eval_operand_to_scalar(index)
166 .expect("can't eval index")
167 .to_bits(self.memory().pointer_size())? as u64;
168 err!(BoundsCheck { len, index })
170 Overflow(op) => Err(Overflow(op).into()),
171 OverflowNeg => Err(OverflowNeg.into()),
172 DivisionByZero => Err(DivisionByZero.into()),
173 RemainderByZero => Err(RemainderByZero.into()),
174 GeneratorResumedAfterReturn |
175 GeneratorResumedAfterPanic => unimplemented!(),
181 Yield { .. } => unimplemented!("{:#?}", terminator.kind),
182 GeneratorDrop => unimplemented!(),
183 DropAndReplace { .. } => unimplemented!(),
184 Resume => unimplemented!(),
185 Abort => unimplemented!(),
186 FalseEdges { .. } => bug!("should have been eliminated by `simplify_branches` mir pass"),
187 FalseUnwind { .. } => bug!("should have been eliminated by `simplify_branches` mir pass"),
188 Unreachable => return err!(Unreachable),
194 /// Decides whether it is okay to call the method with signature `real_sig` using signature `sig`.
195 /// FIXME: This should take into account the platform-dependent ABI description.
198 sig: ty::FnSig<'tcx>,
199 real_sig: ty::FnSig<'tcx>,
200 ) -> EvalResult<'tcx, bool> {
201 fn check_ty_compat<'tcx>(ty: Ty<'tcx>, real_ty: Ty<'tcx>) -> bool {
204 } // This is actually a fast pointer comparison
205 return match (&ty.sty, &real_ty.sty) {
206 // Permit changing the pointer type of raw pointers and references as well as
207 // mutability of raw pointers.
208 // TODO: Should not be allowed when fat pointers are involved.
209 (&ty::TyRawPtr(_), &ty::TyRawPtr(_)) => true,
210 (&ty::TyRef(_, _, _), &ty::TyRef(_, _, _)) => {
211 ty.is_mutable_pointer() == real_ty.is_mutable_pointer()
213 // rule out everything else
218 if sig.abi == real_sig.abi && sig.variadic == real_sig.variadic &&
219 sig.inputs_and_output.len() == real_sig.inputs_and_output.len() &&
220 sig.inputs_and_output
222 .zip(real_sig.inputs_and_output)
223 .all(|(ty, real_ty)| check_ty_compat(ty, real_ty))
229 if sig.variadic || real_sig.variadic {
230 // We're not touching this
234 // We need to allow what comes up when a non-capturing closure is cast to a fn().
235 match (sig.abi, real_sig.abi) {
236 (Abi::Rust, Abi::RustCall) // check the ABIs. This makes the test here non-symmetric.
237 if check_ty_compat(sig.output(), real_sig.output()) && real_sig.inputs_and_output.len() == 3 => {
238 // First argument of real_sig must be a ZST
239 let fst_ty = real_sig.inputs_and_output[0];
240 if self.layout_of(fst_ty)?.is_zst() {
241 // Second argument must be a tuple matching the argument list of sig
242 let snd_ty = real_sig.inputs_and_output[1];
244 ty::TyTuple(tys) if sig.inputs().len() == tys.len() =>
245 if sig.inputs().iter().zip(tys).all(|(ty, real_ty)| check_ty_compat(ty, real_ty)) {
255 // Nope, this doesn't work.
261 instance: ty::Instance<'tcx>,
262 destination: Option<(Place, mir::BasicBlock)>,
263 args: &[ValTy<'tcx>],
265 sig: ty::FnSig<'tcx>,
266 ) -> EvalResult<'tcx> {
267 trace!("eval_fn_call: {:#?}", instance);
269 ty::InstanceDef::Intrinsic(..) => {
270 let (ret, target) = match destination {
272 _ => return err!(Unreachable),
274 let ty = sig.output();
275 let layout = self.layout_of(ty)?;
276 M::call_intrinsic(self, instance, args, ret, layout, target)?;
277 self.dump_local(ret);
280 // FIXME: figure out why we can't just go through the shim
281 ty::InstanceDef::ClosureOnceShim { .. } => {
282 if M::eval_fn_call(self, instance, destination, args, span, sig)? {
285 let mut arg_locals = self.frame().mir.args_iter();
287 // closure as closure once
289 for (arg_local, &valty) in arg_locals.zip(args) {
290 let dest = self.eval_place(&mir::Place::Local(arg_local))?;
291 self.write_value(valty, dest)?;
294 // non capture closure as fn ptr
295 // need to inject zst ptr for closure object (aka do nothing)
296 // and need to pack arguments
300 self.frame().mir.args_iter().collect::<Vec<_>>()
302 trace!("args: {:#?}", args);
303 let local = arg_locals.nth(1).unwrap();
304 for (i, &valty) in args.into_iter().enumerate() {
305 let dest = self.eval_place(&mir::Place::Local(local).field(
309 self.write_value(valty, dest)?;
312 _ => bug!("bad ABI for ClosureOnceShim: {:?}", sig.abi),
316 ty::InstanceDef::FnPtrShim(..) |
317 ty::InstanceDef::DropGlue(..) |
318 ty::InstanceDef::CloneShim(..) |
319 ty::InstanceDef::Item(_) => {
320 // Push the stack frame, and potentially be entirely done if the call got hooked
321 if M::eval_fn_call(self, instance, destination, args, span, sig)? {
325 // Pass the arguments
326 let mut arg_locals = self.frame().mir.args_iter();
327 trace!("ABI: {:?}", sig.abi);
330 self.frame().mir.args_iter().collect::<Vec<_>>()
332 trace!("args: {:#?}", args);
335 assert_eq!(args.len(), 2);
338 // write first argument
339 let first_local = arg_locals.next().unwrap();
340 let dest = self.eval_place(&mir::Place::Local(first_local))?;
341 self.write_value(args[0], dest)?;
344 // unpack and write all other args
345 let layout = self.layout_of(args[1].ty)?;
346 if let ty::TyTuple(_) = args[1].ty.sty {
348 // Nothing to do, no need to unpack zsts
351 if self.frame().mir.args_iter().count() == layout.fields.count() + 1 {
352 for (i, arg_local) in arg_locals.enumerate() {
353 let field = mir::Field::new(i);
354 let (value, layout) = self.read_field(args[1].value, None, field, layout)?;
355 let dest = self.eval_place(&mir::Place::Local(arg_local))?;
360 self.write_value(valty, dest)?;
363 trace!("manual impl of rust-call ABI");
364 // called a manual impl of a rust-call function
365 let dest = self.eval_place(
366 &mir::Place::Local(arg_locals.next().unwrap()),
368 self.write_value(args[1], dest)?;
372 "rust-call ABI tuple argument was {:#?}, {:#?}",
379 for (arg_local, &valty) in arg_locals.zip(args) {
380 let dest = self.eval_place(&mir::Place::Local(arg_local))?;
381 self.write_value(valty, dest)?;
387 // cannot use the shim here, because that will only result in infinite recursion
388 ty::InstanceDef::Virtual(_, idx) => {
389 let ptr_size = self.memory.pointer_size();
390 let ptr_align = self.tcx.data_layout.pointer_align;
391 let (ptr, vtable) = self.into_ptr_vtable_pair(args[0].value)?;
392 let fn_ptr = self.memory.read_ptr_sized(
393 vtable.offset(ptr_size * (idx as u64 + 3), &self)?,
395 )?.read()?.to_ptr()?;
396 let instance = self.memory.get_fn(fn_ptr)?;
397 let mut args = args.to_vec();
398 let ty = self.layout_of(args[0].ty)?.field(&self, 0)?.ty;
400 args[0].value = Value::Scalar(ptr);
401 // recurse with concrete function
402 self.eval_fn_call(instance, destination, &args, span, sig)