2 use rustc::ty::layout::{Align, LayoutOf, Size};
3 use rustc::hir::def_id::{DefId, CRATE_DEF_INDEX};
11 pub trait EvalContextExt<'tcx, 'mir> {
12 /// Emulate calling a foreign item, fail if the item is not supported.
13 /// This function will handle `goto_block` if needed.
14 fn emulate_foreign_item(
17 args: &[OpTy<'tcx, Borrow>],
18 dest: PlaceTy<'tcx, Borrow>,
20 ) -> EvalResult<'tcx>;
22 fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>>;
24 /// Emulate a function that should have MIR but does not.
25 /// This is solely to support execution without full MIR.
26 /// Fail if emulating this function is not supported.
27 /// This function will handle `goto_block` if needed.
28 fn emulate_missing_fn(
31 args: &[OpTy<'tcx, Borrow>],
32 dest: Option<PlaceTy<'tcx, Borrow>>,
33 ret: Option<mir::BasicBlock>,
34 ) -> EvalResult<'tcx>;
38 instance: ty::Instance<'tcx>,
39 args: &[OpTy<'tcx, Borrow>],
40 dest: Option<PlaceTy<'tcx, Borrow>>,
41 ret: Option<mir::BasicBlock>,
42 ) -> EvalResult<'tcx, Option<&'mir mir::Mir<'tcx>>>;
44 fn write_null(&mut self, dest: PlaceTy<'tcx, Borrow>) -> EvalResult<'tcx>;
47 impl<'a, 'mir, 'tcx: 'mir + 'a> EvalContextExt<'tcx, 'mir> for EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>> {
50 instance: ty::Instance<'tcx>,
51 args: &[OpTy<'tcx, Borrow>],
52 dest: Option<PlaceTy<'tcx, Borrow>>,
53 ret: Option<mir::BasicBlock>,
54 ) -> EvalResult<'tcx, Option<&'mir mir::Mir<'tcx>>> {
55 trace!("eval_fn_call: {:#?}, {:?}", instance, dest.map(|place| *place));
57 // first run the common hooks also supported by CTFE
58 if self.hook_fn(instance, args, dest)? {
59 self.goto_block(ret)?;
62 // there are some more lang items we want to hook that CTFE does not hook (yet)
63 if self.tcx.lang_items().align_offset_fn() == Some(instance.def.def_id()) {
64 // FIXME: return a real value in case the target allocation has an
65 // alignment bigger than the one requested
66 let n = u128::max_value();
67 let dest = dest.unwrap();
68 let n = self.truncate(n, dest.layout);
69 self.write_scalar(Scalar::from_uint(n, dest.layout.size), dest)?;
70 self.goto_block(ret)?;
74 // Try to see if we can do something about foreign items
75 if self.tcx.is_foreign_item(instance.def_id()) {
76 // An external function that we cannot find MIR for, but we can still run enough
77 // of them to make miri viable.
78 self.emulate_foreign_item(
84 // `goto_block` already handled
88 // Otherwise we really want to see the MIR -- but if we do not have it, maybe we can
89 // emulate something. This is a HACK to support running without a full-MIR libstd.
90 let mir = match self.load_mir(instance.def) {
92 Err(EvalError { kind: EvalErrorKind::NoMirFor(path), .. }) => {
93 self.emulate_missing_fn(
99 // `goto_block` already handled
102 Err(other) => return Err(other),
108 fn emulate_foreign_item(
111 args: &[OpTy<'tcx, Borrow>],
112 dest: PlaceTy<'tcx, Borrow>,
113 ret: mir::BasicBlock,
114 ) -> EvalResult<'tcx> {
115 let attrs = self.tcx.get_attrs(def_id);
116 let link_name = match attr::first_attr_value_str_by_name(&attrs, "link_name") {
117 Some(name) => name.as_str(),
118 None => self.tcx.item_name(def_id).as_str(),
121 match &link_name[..] {
123 let size = self.read_scalar(args[0])?.to_usize(&self)?;
125 self.write_null(dest)?;
127 let align = self.tcx.data_layout.pointer_align;
128 let ptr = self.memory.allocate(Size::from_bytes(size), align, MiriMemoryKind::C.into())?;
129 self.write_scalar(Scalar::Ptr(ptr), dest)?;
134 let ptr = self.read_scalar(args[0])?.not_undef()?;
136 self.memory.deallocate(
139 MiriMemoryKind::C.into(),
145 let size = self.read_scalar(args[0])?.to_usize(&self)?;
146 let align = self.read_scalar(args[1])?.to_usize(&self)?;
148 return err!(HeapAllocZeroBytes);
150 if !align.is_power_of_two() {
151 return err!(HeapAllocNonPowerOfTwoAlignment(align));
153 let ptr = self.memory.allocate(Size::from_bytes(size),
154 Align::from_bytes(align, align).unwrap(),
155 MiriMemoryKind::Rust.into())?;
156 self.write_scalar(Scalar::Ptr(ptr), dest)?;
158 "__rust_alloc_zeroed" => {
159 let size = self.read_scalar(args[0])?.to_usize(&self)?;
160 let align = self.read_scalar(args[1])?.to_usize(&self)?;
162 return err!(HeapAllocZeroBytes);
164 if !align.is_power_of_two() {
165 return err!(HeapAllocNonPowerOfTwoAlignment(align));
167 let ptr = self.memory.allocate(Size::from_bytes(size),
168 Align::from_bytes(align, align).unwrap(),
169 MiriMemoryKind::Rust.into())?;
170 self.memory.write_repeat(ptr.into(), 0, Size::from_bytes(size))?;
171 self.write_scalar(Scalar::Ptr(ptr), dest)?;
173 "__rust_dealloc" => {
174 let ptr = self.read_scalar(args[0])?.to_ptr()?;
175 let old_size = self.read_scalar(args[1])?.to_usize(&self)?;
176 let align = self.read_scalar(args[2])?.to_usize(&self)?;
178 return err!(HeapAllocZeroBytes);
180 if !align.is_power_of_two() {
181 return err!(HeapAllocNonPowerOfTwoAlignment(align));
183 self.memory.deallocate(
185 Some((Size::from_bytes(old_size), Align::from_bytes(align, align).unwrap())),
186 MiriMemoryKind::Rust.into(),
189 "__rust_realloc" => {
190 let ptr = self.read_scalar(args[0])?.to_ptr()?;
191 let old_size = self.read_scalar(args[1])?.to_usize(&self)?;
192 let align = self.read_scalar(args[2])?.to_usize(&self)?;
193 let new_size = self.read_scalar(args[3])?.to_usize(&self)?;
194 if old_size == 0 || new_size == 0 {
195 return err!(HeapAllocZeroBytes);
197 if !align.is_power_of_two() {
198 return err!(HeapAllocNonPowerOfTwoAlignment(align));
200 let new_ptr = self.memory.reallocate(
202 Size::from_bytes(old_size),
203 Align::from_bytes(align, align).unwrap(),
204 Size::from_bytes(new_size),
205 Align::from_bytes(align, align).unwrap(),
206 MiriMemoryKind::Rust.into(),
208 self.write_scalar(Scalar::Ptr(new_ptr), dest)?;
212 // TODO: read `syscall` ids like `sysconf` ids and
213 // figure out some way to actually process some of them
215 // libc::syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), GRND_NONBLOCK)
216 // is called if a `HashMap` is created the regular way.
217 match self.read_scalar(args[0])?.to_usize(&self)? {
219 return err!(Unimplemented(
220 "miri does not support random number generators".to_owned(),
224 return err!(Unimplemented(
225 format!("miri does not support syscall id {}", id),
232 let _handle = self.read_scalar(args[0])?;
233 let symbol = self.read_scalar(args[1])?.to_ptr()?;
234 let symbol_name = self.memory.read_c_str(symbol)?;
235 let err = format!("bad c unicode symbol: {:?}", symbol_name);
236 let symbol_name = ::std::str::from_utf8(symbol_name).unwrap_or(&err);
237 return err!(Unimplemented(format!(
238 "miri does not support dynamically loading libraries (requested symbol: {})",
243 "__rust_maybe_catch_panic" => {
244 // fn __rust_maybe_catch_panic(f: fn(*mut u8), data: *mut u8, data_ptr: *mut usize, vtable_ptr: *mut usize) -> u32
245 // We abort on panic, so not much is going on here, but we still have to call the closure
246 let f = self.read_scalar(args[0])?.to_ptr()?;
247 let data = self.read_scalar(args[1])?.not_undef()?;
248 let f_instance = self.memory.get_fn(f)?;
249 self.write_null(dest)?;
250 trace!("__rust_maybe_catch_panic: {:?}", f_instance);
252 // Now we make a function call. TODO: Consider making this re-usable? EvalContext::step does sth. similar for the TLS dtors,
253 // and of course eval_main.
254 let mir = self.load_mir(f_instance.def)?;
255 let ret_place = MPlaceTy::dangling(self.layout_of(self.tcx.mk_unit())?, &self).into();
256 self.push_stack_frame(
261 StackPopCleanup::Goto(Some(ret)), // directly return to caller
263 let mut args = self.frame().mir.args_iter();
265 let arg_local = args.next().ok_or_else(||
266 EvalErrorKind::AbiViolation(
267 "Argument to __rust_maybe_catch_panic does not take enough arguments."
271 let arg_dest = self.eval_place(&mir::Place::Local(arg_local))?;
272 self.write_scalar(data, arg_dest)?;
274 assert!(args.next().is_none(), "__rust_maybe_catch_panic argument has more arguments than expected");
276 // We ourselves will return 0, eventually (because we will not return if we paniced)
277 self.write_null(dest)?;
279 // Don't fall through, we do NOT want to `goto_block`!
283 "__rust_start_panic" =>
284 return err!(MachineError("the evaluated program panicked".to_string())),
287 let left = self.read_scalar(args[0])?.not_undef()?;
288 let right = self.read_scalar(args[1])?.not_undef()?;
289 let n = Size::from_bytes(self.read_scalar(args[2])?.to_usize(&self)?);
292 let left_bytes = self.memory.read_bytes(left, n)?;
293 let right_bytes = self.memory.read_bytes(right, n)?;
295 use std::cmp::Ordering::*;
296 match left_bytes.cmp(right_bytes) {
304 Scalar::from_int(result, Size::from_bits(32)),
310 let ptr = self.read_scalar(args[0])?.not_undef()?;
311 let val = self.read_scalar(args[1])?.to_bytes()? as u8;
312 let num = self.read_scalar(args[2])?.to_usize(&self)?;
313 if let Some(idx) = self.memory.read_bytes(ptr, Size::from_bytes(num))?.iter().rev().position(
317 let new_ptr = ptr.ptr_offset(Size::from_bytes(num - idx as u64 - 1), &self)?;
318 self.write_scalar(new_ptr, dest)?;
320 self.write_null(dest)?;
325 let ptr = self.read_scalar(args[0])?.not_undef()?;
326 let val = self.read_scalar(args[1])?.to_bytes()? as u8;
327 let num = self.read_scalar(args[2])?.to_usize(&self)?;
328 if let Some(idx) = self.memory.read_bytes(ptr, Size::from_bytes(num))?.iter().position(
332 let new_ptr = ptr.ptr_offset(Size::from_bytes(idx as u64), &self)?;
333 self.write_scalar(new_ptr, dest)?;
335 self.write_null(dest)?;
341 let name_ptr = self.read_scalar(args[0])?.to_ptr()?;
342 let name = self.memory.read_c_str(name_ptr)?;
343 match self.machine.env_vars.get(name) {
344 Some(&var) => Scalar::Ptr(var),
345 None => Scalar::ptr_null(*self.tcx),
348 self.write_scalar(result, dest)?;
352 let mut success = None;
354 let name_ptr = self.read_scalar(args[0])?.not_undef()?;
355 if !name_ptr.is_null() {
356 let name = self.memory.read_c_str(name_ptr.to_ptr()?)?;
357 if !name.is_empty() && !name.contains(&b'=') {
358 success = Some(self.machine.env_vars.remove(name));
362 if let Some(old) = success {
363 if let Some(var) = old {
364 self.memory.deallocate(var, None, MiriMemoryKind::Env.into())?;
366 self.write_null(dest)?;
368 self.write_scalar(Scalar::from_int(-1, dest.layout.size), dest)?;
375 let name_ptr = self.read_scalar(args[0])?.not_undef()?;
376 let value_ptr = self.read_scalar(args[1])?.to_ptr()?;
377 let value = self.memory.read_c_str(value_ptr)?;
378 if !name_ptr.is_null() {
379 let name = self.memory.read_c_str(name_ptr.to_ptr()?)?;
380 if !name.is_empty() && !name.contains(&b'=') {
381 new = Some((name.to_owned(), value.to_owned()));
385 if let Some((name, value)) = new {
386 // +1 for the null terminator
387 let value_copy = self.memory.allocate(
388 Size::from_bytes((value.len() + 1) as u64),
389 Align::from_bytes(1, 1).unwrap(),
390 MiriMemoryKind::Env.into(),
392 self.memory.write_bytes(value_copy.into(), &value)?;
393 let trailing_zero_ptr = value_copy.offset(Size::from_bytes(value.len() as u64), &self)?.into();
394 self.memory.write_bytes(trailing_zero_ptr, &[0])?;
395 if let Some(var) = self.machine.env_vars.insert(
400 self.memory.deallocate(var, None, MiriMemoryKind::Env.into())?;
402 self.write_null(dest)?;
404 self.write_scalar(Scalar::from_int(-1, dest.layout.size), dest)?;
409 let fd = self.read_scalar(args[0])?.to_bytes()?;
410 let buf = self.read_scalar(args[1])?.not_undef()?;
411 let n = self.read_scalar(args[2])?.to_bytes()? as u64;
412 trace!("Called write({:?}, {:?}, {:?})", fd, buf, n);
413 let result = if fd == 1 || fd == 2 {
415 use std::io::{self, Write};
417 let buf_cont = self.memory.read_bytes(buf, Size::from_bytes(n))?;
418 let res = if fd == 1 {
419 io::stdout().write(buf_cont)
421 io::stderr().write(buf_cont)
428 warn!("Ignored output to FD {}", fd);
429 n as i64 // pretend it all went well
430 }; // now result is the value we return back to the program
432 Scalar::from_int(result, dest.layout.size),
438 let ptr = self.read_scalar(args[0])?.to_ptr()?;
439 let n = self.memory.read_c_str(ptr)?.len();
440 self.write_scalar(Scalar::from_uint(n as u64, dest.layout.size), dest)?;
443 // Some things needed for sys::thread initialization to go through
444 "signal" | "sigaction" | "sigaltstack" => {
445 self.write_scalar(Scalar::from_int(0, dest.layout.size), dest)?;
449 let name = self.read_scalar(args[0])?.to_i32()?;
451 trace!("sysconf() called with name {}", name);
452 // cache the sysconf integers via miri's global cache
454 (&["libc", "_SC_PAGESIZE"], Scalar::from_int(4096, dest.layout.size)),
455 (&["libc", "_SC_GETPW_R_SIZE_MAX"], Scalar::from_int(-1, dest.layout.size)),
457 let mut result = None;
458 for &(path, path_value) in paths {
459 if let Ok(instance) = self.resolve_path(path) {
464 let const_val = self.const_eval(cid)?;
465 let value = const_val.unwrap_bits(
467 ty::ParamEnv::empty().and(self.tcx.types.i32)) as i32;
469 result = Some(path_value);
474 if let Some(result) = result {
475 self.write_scalar(result, dest)?;
477 return err!(Unimplemented(
478 format!("Unimplemented sysconf name: {}", name),
483 // Hook pthread calls that go to the thread-local storage memory subsystem
484 "pthread_key_create" => {
485 let key_ptr = self.read_scalar(args[0])?.to_ptr()?;
487 // Extract the function type out of the signature (that seems easier than constructing it ourselves...)
488 let dtor = match self.read_scalar(args[1])?.not_undef()? {
489 Scalar::Ptr(dtor_ptr) => Some(self.memory.get_fn(dtor_ptr)?),
490 Scalar::Bits { bits: 0, size } => {
491 assert_eq!(size as u64, self.memory.pointer_size().bytes());
494 Scalar::Bits { .. } => return err!(ReadBytesAsPointer),
497 // Figure out how large a pthread TLS key actually is. This is libc::pthread_key_t.
498 let key_type = args[0].layout.ty.builtin_deref(true)
499 .ok_or_else(|| EvalErrorKind::AbiViolation("Wrong signature used for pthread_key_create: First argument must be a raw pointer.".to_owned()))?.ty;
500 let key_layout = self.layout_of(key_type)?;
502 // Create key and write it into the memory where key_ptr wants it
503 let key = self.machine.tls.create_tls_key(dtor, *self.tcx) as u128;
504 if key_layout.size.bits() < 128 && key >= (1u128 << key_layout.size.bits() as u128) {
505 return err!(OutOfTls);
507 self.memory.write_scalar(
510 Scalar::from_uint(key, key_layout.size).into(),
514 // Return success (0)
515 self.write_null(dest)?;
517 "pthread_key_delete" => {
518 let key = self.read_scalar(args[0])?.to_bytes()?;
519 self.machine.tls.delete_tls_key(key)?;
520 // Return success (0)
521 self.write_null(dest)?;
523 "pthread_getspecific" => {
524 let key = self.read_scalar(args[0])?.to_bytes()?;
525 let ptr = self.machine.tls.load_tls(key)?;
526 self.write_scalar(ptr, dest)?;
528 "pthread_setspecific" => {
529 let key = self.read_scalar(args[0])?.to_bytes()?;
530 let new_ptr = self.read_scalar(args[1])?.not_undef()?;
531 self.machine.tls.store_tls(key, new_ptr)?;
533 // Return success (0)
534 self.write_null(dest)?;
538 // FIXME: Register the dtor
541 // Determining stack base address
542 "pthread_attr_init" | "pthread_attr_destroy" | "pthread_attr_get_np" |
543 "pthread_getattr_np" | "pthread_self" | "pthread_get_stacksize_np" => {
544 self.write_null(dest)?;
546 "pthread_attr_getstack" => {
547 // second argument is where we are supposed to write the stack size
548 let ptr = self.ref_to_mplace(self.read_value(args[1])?)?;
549 let stackaddr = Scalar::from_int(0x80000, args[1].layout.size); // just any address
550 self.write_scalar(stackaddr, ptr.into())?;
552 self.write_null(dest)?;
554 "pthread_get_stackaddr_np" => {
555 let stackaddr = Scalar::from_int(0x80000, dest.layout.size); // just any address
556 self.write_scalar(stackaddr, dest)?;
559 // Stub out calls for condvar, mutex and rwlock to just return 0
560 "pthread_mutexattr_init" | "pthread_mutexattr_settype" | "pthread_mutex_init" |
561 "pthread_mutexattr_destroy" | "pthread_mutex_lock" | "pthread_mutex_unlock" |
562 "pthread_mutex_destroy" | "pthread_rwlock_rdlock" | "pthread_rwlock_unlock" |
563 "pthread_rwlock_wrlock" | "pthread_rwlock_destroy" | "pthread_condattr_init" |
564 "pthread_condattr_setclock" | "pthread_cond_init" | "pthread_condattr_destroy" |
565 "pthread_cond_destroy" => {
566 self.write_null(dest)?;
570 // This is a horrible hack, but well... the guard page mechanism calls mmap and expects a particular return value, so we give it that value
571 let addr = self.read_scalar(args[0])?.not_undef()?;
572 self.write_scalar(addr, dest)?;
575 self.write_null(dest)?;
579 "AddVectoredExceptionHandler" => {
580 // any non zero value works for the stdlib. This is just used for stackoverflows anyway
581 self.write_scalar(Scalar::from_int(1, dest.layout.size), dest)?;
583 "InitializeCriticalSection" |
584 "EnterCriticalSection" |
585 "LeaveCriticalSection" |
586 "DeleteCriticalSection" |
588 // Function does not return anything, nothing to do
592 "TryEnterCriticalSection" => {
593 // pretend these do not exist/nothing happened, by returning zero
594 self.write_null(dest)?;
597 // this is c::ERROR_CALL_NOT_IMPLEMENTED
598 self.write_scalar(Scalar::from_int(120, dest.layout.size), dest)?;
603 // This just creates a key; Windows does not natively support TLS dtors.
605 // Create key and return it
606 let key = self.machine.tls.create_tls_key(None, *self.tcx) as u128;
608 // Figure out how large a TLS key actually is. This is c::DWORD.
609 if dest.layout.size.bits() < 128 && key >= (1u128 << dest.layout.size.bits() as u128) {
610 return err!(OutOfTls);
612 self.write_scalar(Scalar::from_uint(key, dest.layout.size), dest)?;
615 let key = self.read_scalar(args[0])?.to_bytes()?;
616 let ptr = self.machine.tls.load_tls(key)?;
617 self.write_scalar(ptr, dest)?;
620 let key = self.read_scalar(args[0])?.to_bytes()?;
621 let new_ptr = self.read_scalar(args[1])?.not_undef()?;
622 self.machine.tls.store_tls(key, new_ptr)?;
624 // Return success (1)
625 self.write_scalar(Scalar::from_int(1, dest.layout.size), dest)?;
628 // We can't execute anything else
630 return err!(Unimplemented(
631 format!("can't call foreign function: {}", link_name),
636 self.goto_block(Some(ret))?;
637 self.dump_place(*dest);
641 /// Get an instance for a path.
642 fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>> {
646 .find(|&&krate| self.tcx.original_crate_name(krate) == path[0])
650 index: CRATE_DEF_INDEX,
652 let mut items = self.tcx.item_children(krate);
653 let mut path_it = path.iter().skip(1).peekable();
655 while let Some(segment) = path_it.next() {
656 for item in mem::replace(&mut items, Default::default()).iter() {
657 if item.ident.name == *segment {
658 if path_it.peek().is_none() {
659 return Some(ty::Instance::mono(self.tcx.tcx, item.def.def_id()));
662 items = self.tcx.item_children(item.def.def_id());
670 let path = path.iter().map(|&s| s.to_owned()).collect();
671 EvalErrorKind::PathNotFound(path).into()
675 fn emulate_missing_fn(
678 _args: &[OpTy<'tcx, Borrow>],
679 dest: Option<PlaceTy<'tcx, Borrow>>,
680 ret: Option<mir::BasicBlock>,
681 ) -> EvalResult<'tcx> {
682 // In some cases in non-MIR libstd-mode, not having a destination is legit. Handle these early.
684 "std::panicking::rust_panic_with_hook" |
685 "core::panicking::panic_fmt::::panic_impl" |
686 "std::rt::begin_panic_fmt" =>
687 return err!(MachineError("the evaluated program panicked".to_string())),
691 let dest = dest.ok_or_else(
692 // Must be some function we do not support
693 || EvalErrorKind::NoMirFor(path.clone()),
697 // A Rust function is missing, which means we are running with MIR missing for libstd (or other dependencies).
698 // Still, we can make many things mostly work by "emulating" or ignoring some functions.
700 "std::io::_eprint" => {
702 "Ignoring output. To run programs that print, make sure you have a libstd with full MIR."
705 "std::thread::Builder::new" => {
706 return err!(Unimplemented("miri does not support threading".to_owned()))
708 "std::env::args" => {
709 return err!(Unimplemented(
710 "miri does not support program arguments".to_owned(),
713 "std::panicking::panicking" |
714 "std::rt::panicking" => {
715 // we abort on panic -> `std::rt::panicking` always returns false
716 self.write_scalar(Scalar::from_bool(false), dest)?;
719 _ => return err!(NoMirFor(path)),
722 self.goto_block(ret)?;
723 self.dump_place(*dest);
727 fn write_null(&mut self, dest: PlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> {
728 self.write_scalar(Scalar::from_int(0, dest.layout.size), dest)