1 use rustc::ty::{self, Ty};
2 use rustc::ty::layout::{self, Align, LayoutOf, Size};
3 use rustc::hir::def_id::{DefId, CRATE_DEF_INDEX};
5 use rustc_data_structures::indexed_vec::Idx;
6 use rustc_target::spec::abi::Abi;
8 use syntax::codemap::Span;
16 use super::memory::MemoryKind;
18 fn write_discriminant_value<'a, 'mir, 'tcx: 'a + 'mir>(
19 ecx: &mut EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>>,
23 ) -> EvalResult<'tcx> {
24 let layout = ecx.layout_of(dest_ty)?;
26 match layout.variants {
27 layout::Variants::Single { index } => {
28 if index != variant_index {
29 // If the layout of an enum is `Single`, all
30 // other variants are necessarily uninhabited.
31 assert_eq!(layout.for_variant(&ecx, variant_index).abi,
32 layout::Abi::Uninhabited);
35 layout::Variants::Tagged { .. } => {
36 let discr_val = dest_ty.ty_adt_def().unwrap()
37 .discriminant_for_variant(*ecx.tcx, variant_index)
40 let (discr_dest, discr) = ecx.place_field(dest, mir::Field::new(0), layout)?;
41 ecx.write_scalar(discr_dest, Scalar::from_u128(discr_val), discr.ty)?;
43 layout::Variants::NicheFilling {
49 if variant_index != dataful_variant {
50 let (niche_dest, niche) =
51 ecx.place_field(dest, mir::Field::new(0), layout)?;
52 let niche_value = ((variant_index - niche_variants.start()) as u128)
53 .wrapping_add(niche_start);
54 ecx.write_scalar(niche_dest, Scalar::from_u128(niche_value), niche.ty)?;
62 pub trait EvalContextExt<'tcx> {
69 dest_block: mir::BasicBlock,
70 ) -> EvalResult<'tcx>;
72 fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>>;
76 instance: ty::Instance<'tcx>,
77 destination: Option<(Place, mir::BasicBlock)>,
81 ) -> EvalResult<'tcx>;
85 instance: ty::Instance<'tcx>,
86 destination: Option<(Place, mir::BasicBlock)>,
90 ) -> EvalResult<'tcx, bool>;
92 fn write_null(&mut self, dest: Place, dest_ty: Ty<'tcx>) -> EvalResult<'tcx>;
95 impl<'a, 'mir, 'tcx: 'mir + 'a> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>> {
98 instance: ty::Instance<'tcx>,
99 destination: Option<(Place, mir::BasicBlock)>,
100 args: &[ValTy<'tcx>],
102 sig: ty::FnSig<'tcx>,
103 ) -> EvalResult<'tcx, bool> {
104 trace!("eval_fn_call: {:#?}, {:#?}", instance, destination);
106 let def_id = instance.def_id();
107 let item_path = self.tcx.absolute_item_path_str(def_id);
108 if item_path.starts_with("std::") {
109 //println!("{}", item_path);
112 "std::sys::unix::thread::guard::init" | "std::sys::unix::thread::guard::current" => {
113 // Return None, as it doesn't make sense to return Some, because miri detects stack overflow itself.
114 let ret_ty = sig.output();
116 ty::TyAdt(ref adt_def, _) => {
117 assert!(adt_def.is_enum(), "Unexpected return type for {}", item_path);
118 let none_variant_index = adt_def.variants.iter().position(|def| {
119 def.name.as_str() == "None"
120 }).expect("No None variant");
121 let (return_place, return_to_block) = destination.unwrap();
122 write_discriminant_value(self, ret_ty, return_place, none_variant_index)?;
123 self.goto_block(return_to_block);
126 _ => panic!("Unexpected return type for {}", item_path)
129 "std::sys::unix::fast_thread_local::register_dtor" => {
130 // TODO: register the dtor
131 let (_return_place, return_to_block) = destination.unwrap();
132 self.goto_block(return_to_block);
138 if self.tcx.lang_items().align_offset_fn() == Some(instance.def.def_id()) {
139 // FIXME: return a real value in case the target allocation has an
140 // alignment bigger than the one requested
141 let n = u128::max_value();
142 let amt = 128 - self.memory.pointer_size().bytes() * 8;
143 let (dest, return_to_block) = destination.unwrap();
144 let ty = self.tcx.types.usize;
145 self.write_scalar(dest, Scalar::from_u128((n << amt) >> amt), ty)?;
146 self.goto_block(return_to_block);
150 let mir = match self.load_mir(instance.def) {
152 Err(EvalError { kind: EvalErrorKind::NoMirFor(path), .. }) => {
153 self.call_missing_fn(
162 Err(other) => return Err(other),
165 let (return_place, return_to_block) = match destination {
166 Some((place, block)) => (place, StackPopCleanup::Goto(block)),
167 None => (Place::undef(), StackPopCleanup::None),
170 self.push_stack_frame(
184 args: &[ValTy<'tcx>],
187 dest_block: mir::BasicBlock,
188 ) -> EvalResult<'tcx> {
189 let attrs = self.tcx.get_attrs(def_id);
190 let link_name = match attr::first_attr_value_str_by_name(&attrs, "link_name") {
191 Some(name) => name.as_str(),
192 None => self.tcx.item_name(def_id).as_str(),
195 match &link_name[..] {
197 let size = self.value_to_scalar(args[0])?.to_u64()?;
199 self.write_null(dest, dest_ty)?;
201 let align = self.tcx.data_layout.pointer_align;
202 let ptr = self.memory.allocate(Size::from_bytes(size), align, Some(MemoryKind::C.into()))?;
203 self.write_scalar(dest, Scalar::Ptr(ptr), dest_ty)?;
208 let ptr = self.into_ptr(args[0].value)?;
210 self.memory.deallocate(
213 MemoryKind::C.into(),
219 // TODO: read `syscall` ids like `sysconf` ids and
220 // figure out some way to actually process some of them
222 // libc::syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), GRND_NONBLOCK)
223 // is called if a `HashMap` is created the regular way.
224 match self.value_to_scalar(args[0])?.to_u64()? {
226 return err!(Unimplemented(
227 "miri does not support random number generators".to_owned(),
231 return err!(Unimplemented(
232 format!("miri does not support syscall id {}", id),
239 let _handle = self.into_ptr(args[0].value)?;
240 let symbol = self.into_ptr(args[1].value)?.to_ptr()?;
241 let symbol_name = self.memory.read_c_str(symbol)?;
242 let err = format!("bad c unicode symbol: {:?}", symbol_name);
243 let symbol_name = ::std::str::from_utf8(symbol_name).unwrap_or(&err);
244 return err!(Unimplemented(format!(
245 "miri does not support dynamically loading libraries (requested symbol: {})",
250 "__rust_maybe_catch_panic" => {
251 // fn __rust_maybe_catch_panic(f: fn(*mut u8), data: *mut u8, data_ptr: *mut usize, vtable_ptr: *mut usize) -> u32
252 // We abort on panic, so not much is going on here, but we still have to call the closure
253 let u8_ptr_ty = self.tcx.mk_mut_ptr(self.tcx.types.u8);
254 let f = self.into_ptr(args[0].value)?.to_ptr()?;
255 let data = self.into_ptr(args[1].value)?;
256 let f_instance = self.memory.get_fn(f)?;
257 self.write_null(dest, dest_ty)?;
259 // Now we make a function call. TODO: Consider making this re-usable? EvalContext::step does sth. similar for the TLS dtors,
260 // and of course eval_main.
261 let mir = self.load_mir(f_instance.def)?;
262 self.push_stack_frame(
267 StackPopCleanup::Goto(dest_block),
269 let mut args = self.frame().mir.args_iter();
271 let arg_local = args.next().ok_or(
272 EvalErrorKind::AbiViolation(
273 "Argument to __rust_maybe_catch_panic does not take enough arguments."
277 let arg_dest = self.eval_place(&mir::Place::Local(arg_local))?;
278 self.write_ptr(arg_dest, data, u8_ptr_ty)?;
280 assert!(args.next().is_none(), "__rust_maybe_catch_panic argument has more arguments than expected");
282 // We ourselves return 0
283 self.write_null(dest, dest_ty)?;
285 // Don't fall through
289 "__rust_start_panic" => {
294 let left = self.into_ptr(args[0].value)?;
295 let right = self.into_ptr(args[1].value)?;
296 let n = Size::from_bytes(self.value_to_scalar(args[2])?.to_u64()?);
299 let left_bytes = self.memory.read_bytes(left, n)?;
300 let right_bytes = self.memory.read_bytes(right, n)?;
302 use std::cmp::Ordering::*;
303 match left_bytes.cmp(right_bytes) {
312 Scalar::from_i32(result),
318 let ptr = self.into_ptr(args[0].value)?;
319 let val = self.value_to_scalar(args[1])?.to_u64()? as u8;
320 let num = self.value_to_scalar(args[2])?.to_u64()?;
321 if let Some(idx) = self.memory.read_bytes(ptr, Size::from_bytes(num))?.iter().rev().position(
325 let new_ptr = ptr.ptr_offset(Size::from_bytes(num - idx as u64 - 1), &self)?;
326 self.write_ptr(dest, new_ptr, dest_ty)?;
328 self.write_null(dest, dest_ty)?;
333 let ptr = self.into_ptr(args[0].value)?;
334 let val = self.value_to_scalar(args[1])?.to_u64()? as u8;
335 let num = self.value_to_scalar(args[2])?.to_u64()?;
336 if let Some(idx) = self.memory.read_bytes(ptr, Size::from_bytes(num))?.iter().position(
340 let new_ptr = ptr.ptr_offset(Size::from_bytes(idx as u64), &self)?;
341 self.write_ptr(dest, new_ptr, dest_ty)?;
343 self.write_null(dest, dest_ty)?;
349 let name_ptr = self.into_ptr(args[0].value)?.to_ptr()?;
350 let name = self.memory.read_c_str(name_ptr)?;
351 match self.machine.env_vars.get(name) {
352 Some(&var) => Scalar::Ptr(var),
353 None => Scalar::null(),
356 self.write_scalar(dest, result, dest_ty)?;
360 let mut success = None;
362 let name_ptr = self.into_ptr(args[0].value)?;
363 if !name_ptr.is_null()? {
364 let name = self.memory.read_c_str(name_ptr.to_ptr()?)?;
365 if !name.is_empty() && !name.contains(&b'=') {
366 success = Some(self.machine.env_vars.remove(name));
370 if let Some(old) = success {
371 if let Some(var) = old {
372 self.memory.deallocate(var, None, MemoryKind::Env.into())?;
374 self.write_null(dest, dest_ty)?;
376 self.write_scalar(dest, Scalar::from_i128(-1), dest_ty)?;
383 let name_ptr = self.into_ptr(args[0].value)?;
384 let value_ptr = self.into_ptr(args[1].value)?.to_ptr()?;
385 let value = self.memory.read_c_str(value_ptr)?;
386 if !name_ptr.is_null()? {
387 let name = self.memory.read_c_str(name_ptr.to_ptr()?)?;
388 if !name.is_empty() && !name.contains(&b'=') {
389 new = Some((name.to_owned(), value.to_owned()));
393 if let Some((name, value)) = new {
394 // +1 for the null terminator
395 let value_copy = self.memory.allocate(
396 Size::from_bytes((value.len() + 1) as u64),
397 Align::from_bytes(1, 1).unwrap(),
398 Some(MemoryKind::Env.into()),
400 self.memory.write_bytes(value_copy.into(), &value)?;
401 let trailing_zero_ptr = value_copy.offset(Size::from_bytes(value.len() as u64), &self)?.into();
402 self.memory.write_bytes(trailing_zero_ptr, &[0])?;
403 if let Some(var) = self.machine.env_vars.insert(
408 self.memory.deallocate(var, None, MemoryKind::Env.into())?;
410 self.write_null(dest, dest_ty)?;
412 self.write_scalar(dest, Scalar::from_i128(-1), dest_ty)?;
417 let fd = self.value_to_scalar(args[0])?.to_u64()?;
418 let buf = self.into_ptr(args[1].value)?;
419 let n = self.value_to_scalar(args[2])?.to_u64()?;
420 trace!("Called write({:?}, {:?}, {:?})", fd, buf, n);
421 let result = if fd == 1 || fd == 2 {
423 use std::io::{self, Write};
425 let buf_cont = self.memory.read_bytes(buf, Size::from_bytes(n))?;
426 let res = if fd == 1 {
427 io::stdout().write(buf_cont)
429 io::stderr().write(buf_cont)
436 warn!("Ignored output to FD {}", fd);
437 n as i64 // pretend it all went well
438 }; // now result is the value we return back to the program
439 let ptr_size = self.memory.pointer_size();
442 Scalar::from_isize(result, ptr_size),
448 let ptr = self.into_ptr(args[0].value)?.to_ptr()?;
449 let n = self.memory.read_c_str(ptr)?.len();
450 let ptr_size = self.memory.pointer_size();
451 self.write_scalar(dest, Scalar::from_usize(n as u64, ptr_size), dest_ty)?;
454 // Some things needed for sys::thread initialization to go through
455 "signal" | "sigaction" | "sigaltstack" => {
456 self.write_scalar(dest, Scalar::null(), dest_ty)?;
460 let name = self.value_to_scalar(args[0])?.to_u64()?;
462 trace!("sysconf() called with name {}", name);
463 // cache the sysconf integers via miri's global cache
465 (&["libc", "_SC_PAGESIZE"], Scalar::from_i128(4096)),
466 (&["libc", "_SC_GETPW_R_SIZE_MAX"], Scalar::from_i128(-1)),
468 let mut result = None;
469 for &(path, path_value) in paths {
470 if let Ok(instance) = self.resolve_path(path) {
475 let const_val = self.const_eval(cid)?;
476 let value = const_val.unwrap_usize(self.tcx.tcx);
478 result = Some(path_value);
483 if let Some(result) = result {
484 self.write_scalar(dest, result, dest_ty)?;
486 return err!(Unimplemented(
487 format!("Unimplemented sysconf name: {}", name),
492 // Hook pthread calls that go to the thread-local storage memory subsystem
493 "pthread_key_create" => {
494 let key_ptr = self.into_ptr(args[0].value)?;
495 let key_align = self.layout_of(args[0].ty)?.align;
497 // Extract the function type out of the signature (that seems easier than constructing it ourselves...)
498 let dtor = match self.into_ptr(args[1].value)? {
499 Scalar::Ptr(dtor_ptr) => Some(self.memory.get_fn(dtor_ptr)?),
500 Scalar::Bits { defined: 0, .. } => return err!(ReadUndefBytes),
501 Scalar::Bits { bits: 0, .. } => None,
502 Scalar::Bits { .. } => return err!(ReadBytesAsPointer),
505 // Figure out how large a pthread TLS key actually is. This is libc::pthread_key_t.
506 let key_type = args[0].ty.builtin_deref(true)
507 .ok_or(EvalErrorKind::AbiViolation("Wrong signature used for pthread_key_create: First argument must be a raw pointer.".to_owned()))?.ty;
508 let key_size = self.layout_of(key_type)?.size;
510 // Create key and write it into the memory where key_ptr wants it
511 let key = self.memory.create_tls_key(dtor) as u128;
512 if key_size.bits() < 128 && key >= (1u128 << key_size.bits() as u128) {
513 return err!(OutOfTls);
515 self.memory.write_scalar(
518 Scalar::from_u128(key),
523 // Return success (0)
524 self.write_null(dest, dest_ty)?;
526 "pthread_key_delete" => {
527 let key = self.value_to_scalar(args[0])?.to_bytes()?;
528 self.memory.delete_tls_key(key)?;
529 // Return success (0)
530 self.write_null(dest, dest_ty)?;
532 "pthread_getspecific" => {
533 let key = self.value_to_scalar(args[0])?.to_bytes()?;
534 let ptr = self.memory.load_tls(key)?;
535 self.write_ptr(dest, ptr, dest_ty)?;
537 "pthread_setspecific" => {
538 let key = self.value_to_scalar(args[0])?.to_bytes()?;
539 let new_ptr = self.into_ptr(args[1].value)?;
540 self.memory.store_tls(key, new_ptr)?;
542 // Return success (0)
543 self.write_null(dest, dest_ty)?;
547 return err!(Unimplemented("can't interpret with full mir for osx target".to_owned()));
550 // Stub out all the other pthread calls to just return 0
551 link_name if link_name.starts_with("pthread_") => {
552 info!("ignoring C ABI call: {}", link_name);
553 self.write_null(dest, dest_ty)?;
557 // This is a horrible hack, but well... the guard page mechanism calls mmap and expects a particular return value, so we give it that value
558 let addr = self.into_ptr(args[0].value)?;
559 self.write_ptr(dest, addr, dest_ty)?;
563 return err!(Unimplemented(
564 format!("can't call C ABI function: {}", link_name),
569 // Since we pushed no stack frame, the main loop will act
570 // as if the call just completed and it's returning to the
572 self.dump_local(dest);
573 self.goto_block(dest_block);
577 /// Get an instance for a path.
578 fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>> {
582 .find(|&&krate| self.tcx.original_crate_name(krate) == path[0])
586 index: CRATE_DEF_INDEX,
588 let mut items = self.tcx.item_children(krate);
589 let mut path_it = path.iter().skip(1).peekable();
591 while let Some(segment) = path_it.next() {
592 for item in mem::replace(&mut items, Default::default()).iter() {
593 if item.ident.name == *segment {
594 if path_it.peek().is_none() {
595 return Some(ty::Instance::mono(self.tcx.tcx, item.def.def_id()));
598 items = self.tcx.item_children(item.def.def_id());
606 let path = path.iter().map(|&s| s.to_owned()).collect();
607 EvalErrorKind::PathNotFound(path).into()
613 instance: ty::Instance<'tcx>,
614 destination: Option<(Place, mir::BasicBlock)>,
615 args: &[ValTy<'tcx>],
616 sig: ty::FnSig<'tcx>,
618 ) -> EvalResult<'tcx> {
619 // In some cases in non-MIR libstd-mode, not having a destination is legit. Handle these early.
621 "std::panicking::rust_panic_with_hook" |
622 "core::panicking::panic_fmt::::panic_impl" |
623 "std::rt::begin_panic_fmt" => return err!(Panic),
627 let dest_ty = sig.output();
628 let (dest, dest_block) = destination.ok_or_else(
629 || EvalErrorKind::NoMirFor(path.clone()),
632 if sig.abi == Abi::C {
633 // An external C function
634 // TODO: That functions actually has a similar preamble to what follows here. May make sense to
635 // unify these two mechanisms for "hooking into missing functions".
647 // Allocators are magic. They have no MIR, even when the rest of libstd does.
648 "alloc::alloc::::__rust_alloc" => {
649 let size = self.value_to_scalar(args[0])?.to_u64()?;
650 let align = self.value_to_scalar(args[1])?.to_u64()?;
652 return err!(HeapAllocZeroBytes);
654 if !align.is_power_of_two() {
655 return err!(HeapAllocNonPowerOfTwoAlignment(align));
657 let ptr = self.memory.allocate(Size::from_bytes(size),
658 Align::from_bytes(align, align).unwrap(),
659 Some(MemoryKind::Rust.into()))?;
660 self.write_scalar(dest, Scalar::Ptr(ptr), dest_ty)?;
662 "alloc::alloc::::__rust_alloc_zeroed" => {
663 let size = self.value_to_scalar(args[0])?.to_u64()?;
664 let align = self.value_to_scalar(args[1])?.to_u64()?;
666 return err!(HeapAllocZeroBytes);
668 if !align.is_power_of_two() {
669 return err!(HeapAllocNonPowerOfTwoAlignment(align));
671 let ptr = self.memory.allocate(Size::from_bytes(size),
672 Align::from_bytes(align, align).unwrap(),
673 Some(MemoryKind::Rust.into()))?;
674 self.memory.write_repeat(ptr.into(), 0, Size::from_bytes(size))?;
675 self.write_scalar(dest, Scalar::Ptr(ptr), dest_ty)?;
677 "alloc::alloc::::__rust_dealloc" => {
678 let ptr = self.into_ptr(args[0].value)?.to_ptr()?;
679 let old_size = self.value_to_scalar(args[1])?.to_u64()?;
680 let align = self.value_to_scalar(args[2])?.to_u64()?;
682 return err!(HeapAllocZeroBytes);
684 if !align.is_power_of_two() {
685 return err!(HeapAllocNonPowerOfTwoAlignment(align));
687 self.memory.deallocate(
689 Some((Size::from_bytes(old_size), Align::from_bytes(align, align).unwrap())),
690 MemoryKind::Rust.into(),
693 "alloc::alloc::::__rust_realloc" => {
694 let ptr = self.into_ptr(args[0].value)?.to_ptr()?;
695 let old_size = self.value_to_scalar(args[1])?.to_u64()?;
696 let align = self.value_to_scalar(args[2])?.to_u64()?;
697 let new_size = self.value_to_scalar(args[3])?.to_u64()?;
698 if old_size == 0 || new_size == 0 {
699 return err!(HeapAllocZeroBytes);
701 if !align.is_power_of_two() {
702 return err!(HeapAllocNonPowerOfTwoAlignment(align));
704 let new_ptr = self.memory.reallocate(
706 Size::from_bytes(old_size),
707 Align::from_bytes(align, align).unwrap(),
708 Size::from_bytes(new_size),
709 Align::from_bytes(align, align).unwrap(),
710 MemoryKind::Rust.into(),
712 self.write_scalar(dest, Scalar::Ptr(new_ptr), dest_ty)?;
715 // A Rust function is missing, which means we are running with MIR missing for libstd (or other dependencies).
716 // Still, we can make many things mostly work by "emulating" or ignoring some functions.
717 "std::io::_print" => {
719 "Ignoring output. To run programs that print, make sure you have a libstd with full MIR."
722 "std::thread::Builder::new" => {
723 return err!(Unimplemented("miri does not support threading".to_owned()))
725 "std::env::args" => {
726 return err!(Unimplemented(
727 "miri does not support program arguments".to_owned(),
730 "std::panicking::panicking" |
731 "std::rt::panicking" => {
732 // we abort on panic -> `std::rt::panicking` always returns false
733 let bool = self.tcx.types.bool;
734 self.write_scalar(dest, Scalar::from_bool(false), bool)?;
736 "std::sys::imp::c::::AddVectoredExceptionHandler" |
737 "std::sys::imp::c::::SetThreadStackGuarantee" => {
738 let usize = self.tcx.types.usize;
739 // any non zero value works for the stdlib. This is just used for stackoverflows anyway
740 self.write_scalar(dest, Scalar::from_u128(1), usize)?;
742 _ => return err!(NoMirFor(path)),
745 // Since we pushed no stack frame, the main loop will act
746 // as if the call just completed and it's returning to the
748 self.dump_local(dest);
749 self.goto_block(dest_block);
753 fn write_null(&mut self, dest: Place, dest_ty: Ty<'tcx>) -> EvalResult<'tcx> {
754 self.write_scalar(dest, Scalar::null(), dest_ty)