]> git.lizzy.rs Git - rust.git/blob - src/fn_call.rs
`align_offset` intrinsic is now a lang item
[rust.git] / src / fn_call.rs
1 use rustc::ty::{self, Ty};
2 use rustc::ty::layout::{self, Align, LayoutOf, Size};
3 use rustc::hir::def_id::{DefId, CRATE_DEF_INDEX};
4 use rustc::mir;
5 use rustc_data_structures::indexed_vec::Idx;
6 use rustc_target::spec::abi::Abi;
7 use syntax::attr;
8 use syntax::codemap::Span;
9
10 use std::mem;
11
12 use super::*;
13
14 use tls::MemoryExt;
15
16 use super::memory::MemoryKind;
17
18 fn write_discriminant_value<'a, 'mir, 'tcx: 'a + 'mir>(
19         ecx: &mut EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>>,
20         dest_ty: Ty<'tcx>,
21         dest: Place,
22         variant_index: usize,
23     ) -> EvalResult<'tcx> {
24         let layout = ecx.layout_of(dest_ty)?;
25
26         match layout.variants {
27             layout::Variants::Single { index } => {
28                 if index != variant_index {
29                     // If the layout of an enum is `Single`, all
30                     // other variants are necessarily uninhabited.
31                     assert_eq!(layout.for_variant(&ecx, variant_index).abi,
32                                layout::Abi::Uninhabited);
33                 }
34             }
35             layout::Variants::Tagged { .. } => {
36                 let discr_val = dest_ty.ty_adt_def().unwrap()
37                     .discriminant_for_variant(*ecx.tcx, variant_index)
38                     .val;
39
40                 let (discr_dest, discr) = ecx.place_field(dest, mir::Field::new(0), layout)?;
41                 ecx.write_scalar(discr_dest, Scalar::from_u128(discr_val), discr.ty)?;
42             }
43             layout::Variants::NicheFilling {
44                 dataful_variant,
45                 ref niche_variants,
46                 niche_start,
47                 ..
48             } => {
49                 if variant_index != dataful_variant {
50                     let (niche_dest, niche) =
51                         ecx.place_field(dest, mir::Field::new(0), layout)?;
52                     let niche_value = ((variant_index - niche_variants.start()) as u128)
53                         .wrapping_add(niche_start);
54                     ecx.write_scalar(niche_dest, Scalar::from_u128(niche_value), niche.ty)?;
55                 }
56             }
57         }
58
59         Ok(())
60     }
61
62 pub trait EvalContextExt<'tcx> {
63     fn call_c_abi(
64         &mut self,
65         def_id: DefId,
66         args: &[ValTy<'tcx>],
67         dest: Place,
68         dest_ty: Ty<'tcx>,
69         dest_block: mir::BasicBlock,
70     ) -> EvalResult<'tcx>;
71
72     fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>>;
73
74     fn call_missing_fn(
75         &mut self,
76         instance: ty::Instance<'tcx>,
77         destination: Option<(Place, mir::BasicBlock)>,
78         args: &[ValTy<'tcx>],
79         sig: ty::FnSig<'tcx>,
80         path: String,
81     ) -> EvalResult<'tcx>;
82
83     fn eval_fn_call(
84         &mut self,
85         instance: ty::Instance<'tcx>,
86         destination: Option<(Place, mir::BasicBlock)>,
87         args: &[ValTy<'tcx>],
88         span: Span,
89         sig: ty::FnSig<'tcx>,
90     ) -> EvalResult<'tcx, bool>;
91
92     fn write_null(&mut self, dest: Place, dest_ty: Ty<'tcx>) -> EvalResult<'tcx>;
93 }
94
95 impl<'a, 'mir, 'tcx: 'mir + 'a> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>> {
96     fn eval_fn_call(
97         &mut self,
98         instance: ty::Instance<'tcx>,
99         destination: Option<(Place, mir::BasicBlock)>,
100         args: &[ValTy<'tcx>],
101         span: Span,
102         sig: ty::FnSig<'tcx>,
103     ) -> EvalResult<'tcx, bool> {
104         trace!("eval_fn_call: {:#?}, {:#?}", instance, destination);
105
106         let def_id = instance.def_id();
107         let item_path = self.tcx.absolute_item_path_str(def_id);
108         if item_path.starts_with("std::") {
109             //println!("{}", item_path);
110         }
111         match &*item_path {
112             "std::sys::unix::thread::guard::init" | "std::sys::unix::thread::guard::current" => {
113                 // Return None, as it doesn't make sense to return Some, because miri detects stack overflow itself.
114                 let ret_ty = sig.output();
115                 match ret_ty.sty {
116                     ty::TyAdt(ref adt_def, _) => {
117                         assert!(adt_def.is_enum(), "Unexpected return type for {}", item_path);
118                         let none_variant_index = adt_def.variants.iter().position(|def| {
119                             def.name.as_str() == "None"
120                         }).expect("No None variant");
121                         let (return_place, return_to_block) = destination.unwrap();
122                         write_discriminant_value(self, ret_ty, return_place, none_variant_index)?;
123                         self.goto_block(return_to_block);
124                         return Ok(true);
125                     }
126                     _ => panic!("Unexpected return type for {}", item_path)
127                 }
128             }
129             "std::sys::unix::fast_thread_local::register_dtor" => {
130                 // TODO: register the dtor
131                 let (_return_place, return_to_block) = destination.unwrap();
132                 self.goto_block(return_to_block);
133                 return Ok(true);
134             }
135             _ => {}
136         }
137
138         if self.tcx.lang_items().align_offset_fn() == Some(instance.def.def_id()) {
139             // FIXME: return a real value in case the target allocation has an
140             // alignment bigger than the one requested
141             let n = u128::max_value();
142             let amt = 128 - self.memory.pointer_size().bytes() * 8;
143             let (dest, return_to_block) = destination.unwrap();
144             let ty = self.tcx.types.usize;
145             self.write_scalar(dest, Scalar::from_u128((n << amt) >> amt), ty)?;
146             self.goto_block(return_to_block);
147             return Ok(true);
148         }
149
150         let mir = match self.load_mir(instance.def) {
151             Ok(mir) => mir,
152             Err(EvalError { kind: EvalErrorKind::NoMirFor(path), .. }) => {
153                 self.call_missing_fn(
154                     instance,
155                     destination,
156                     args,
157                     sig,
158                     path,
159                 )?;
160                 return Ok(true);
161             }
162             Err(other) => return Err(other),
163         };
164
165         let (return_place, return_to_block) = match destination {
166             Some((place, block)) => (place, StackPopCleanup::Goto(block)),
167             None => (Place::undef(), StackPopCleanup::None),
168         };
169
170         self.push_stack_frame(
171             instance,
172             span,
173             mir,
174             return_place,
175             return_to_block,
176         )?;
177
178         Ok(false)
179     }
180
181     fn call_c_abi(
182         &mut self,
183         def_id: DefId,
184         args: &[ValTy<'tcx>],
185         dest: Place,
186         dest_ty: Ty<'tcx>,
187         dest_block: mir::BasicBlock,
188     ) -> EvalResult<'tcx> {
189         let attrs = self.tcx.get_attrs(def_id);
190         let link_name = match attr::first_attr_value_str_by_name(&attrs, "link_name") {
191             Some(name) => name.as_str(),
192             None => self.tcx.item_name(def_id).as_str(),
193         };
194
195         match &link_name[..] {
196             "malloc" => {
197                 let size = self.value_to_scalar(args[0])?.to_u64()?;
198                 if size == 0 {
199                     self.write_null(dest, dest_ty)?;
200                 } else {
201                     let align = self.tcx.data_layout.pointer_align;
202                     let ptr = self.memory.allocate(Size::from_bytes(size), align, Some(MemoryKind::C.into()))?;
203                     self.write_scalar(dest, Scalar::Ptr(ptr), dest_ty)?;
204                 }
205             }
206
207             "free" => {
208                 let ptr = self.into_ptr(args[0].value)?;
209                 if !ptr.is_null()? {
210                     self.memory.deallocate(
211                         ptr.to_ptr()?,
212                         None,
213                         MemoryKind::C.into(),
214                     )?;
215                 }
216             }
217
218             "syscall" => {
219                 // TODO: read `syscall` ids like `sysconf` ids and
220                 // figure out some way to actually process some of them
221                 //
222                 // libc::syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), GRND_NONBLOCK)
223                 // is called if a `HashMap` is created the regular way.
224                 match self.value_to_scalar(args[0])?.to_u64()? {
225                     318 | 511 => {
226                         return err!(Unimplemented(
227                             "miri does not support random number generators".to_owned(),
228                         ))
229                     }
230                     id => {
231                         return err!(Unimplemented(
232                             format!("miri does not support syscall id {}", id),
233                         ))
234                     }
235                 }
236             }
237
238             "dlsym" => {
239                 let _handle = self.into_ptr(args[0].value)?;
240                 let symbol = self.into_ptr(args[1].value)?.to_ptr()?;
241                 let symbol_name = self.memory.read_c_str(symbol)?;
242                 let err = format!("bad c unicode symbol: {:?}", symbol_name);
243                 let symbol_name = ::std::str::from_utf8(symbol_name).unwrap_or(&err);
244                 return err!(Unimplemented(format!(
245                     "miri does not support dynamically loading libraries (requested symbol: {})",
246                     symbol_name
247                 )));
248             }
249
250             "__rust_maybe_catch_panic" => {
251                 // fn __rust_maybe_catch_panic(f: fn(*mut u8), data: *mut u8, data_ptr: *mut usize, vtable_ptr: *mut usize) -> u32
252                 // We abort on panic, so not much is going on here, but we still have to call the closure
253                 let u8_ptr_ty = self.tcx.mk_mut_ptr(self.tcx.types.u8);
254                 let f = self.into_ptr(args[0].value)?.to_ptr()?;
255                 let data = self.into_ptr(args[1].value)?;
256                 let f_instance = self.memory.get_fn(f)?;
257                 self.write_null(dest, dest_ty)?;
258
259                 // Now we make a function call.  TODO: Consider making this re-usable?  EvalContext::step does sth. similar for the TLS dtors,
260                 // and of course eval_main.
261                 let mir = self.load_mir(f_instance.def)?;
262                 self.push_stack_frame(
263                     f_instance,
264                     mir.span,
265                     mir,
266                     Place::undef(),
267                     StackPopCleanup::Goto(dest_block),
268                 )?;
269                 let mut args = self.frame().mir.args_iter();
270
271                 let arg_local = args.next().ok_or(
272                     EvalErrorKind::AbiViolation(
273                         "Argument to __rust_maybe_catch_panic does not take enough arguments."
274                             .to_owned(),
275                     ),
276                 )?;
277                 let arg_dest = self.eval_place(&mir::Place::Local(arg_local))?;
278                 self.write_ptr(arg_dest, data, u8_ptr_ty)?;
279
280                 assert!(args.next().is_none(), "__rust_maybe_catch_panic argument has more arguments than expected");
281
282                 // We ourselves return 0
283                 self.write_null(dest, dest_ty)?;
284
285                 // Don't fall through
286                 return Ok(());
287             }
288
289             "__rust_start_panic" => {
290                 return err!(Panic);
291             }
292
293             "memcmp" => {
294                 let left = self.into_ptr(args[0].value)?;
295                 let right = self.into_ptr(args[1].value)?;
296                 let n = Size::from_bytes(self.value_to_scalar(args[2])?.to_u64()?);
297
298                 let result = {
299                     let left_bytes = self.memory.read_bytes(left, n)?;
300                     let right_bytes = self.memory.read_bytes(right, n)?;
301
302                     use std::cmp::Ordering::*;
303                     match left_bytes.cmp(right_bytes) {
304                         Less => -1i32,
305                         Equal => 0,
306                         Greater => 1,
307                     }
308                 };
309
310                 self.write_scalar(
311                     dest,
312                     Scalar::from_i32(result),
313                     dest_ty,
314                 )?;
315             }
316
317             "memrchr" => {
318                 let ptr = self.into_ptr(args[0].value)?;
319                 let val = self.value_to_scalar(args[1])?.to_u64()? as u8;
320                 let num = self.value_to_scalar(args[2])?.to_u64()?;
321                 if let Some(idx) = self.memory.read_bytes(ptr, Size::from_bytes(num))?.iter().rev().position(
322                     |&c| c == val,
323                 )
324                 {
325                     let new_ptr = ptr.ptr_offset(Size::from_bytes(num - idx as u64 - 1), &self)?;
326                     self.write_ptr(dest, new_ptr, dest_ty)?;
327                 } else {
328                     self.write_null(dest, dest_ty)?;
329                 }
330             }
331
332             "memchr" => {
333                 let ptr = self.into_ptr(args[0].value)?;
334                 let val = self.value_to_scalar(args[1])?.to_u64()? as u8;
335                 let num = self.value_to_scalar(args[2])?.to_u64()?;
336                 if let Some(idx) = self.memory.read_bytes(ptr, Size::from_bytes(num))?.iter().position(
337                     |&c| c == val,
338                 )
339                 {
340                     let new_ptr = ptr.ptr_offset(Size::from_bytes(idx as u64), &self)?;
341                     self.write_ptr(dest, new_ptr, dest_ty)?;
342                 } else {
343                     self.write_null(dest, dest_ty)?;
344                 }
345             }
346
347             "getenv" => {
348                 let result = {
349                     let name_ptr = self.into_ptr(args[0].value)?.to_ptr()?;
350                     let name = self.memory.read_c_str(name_ptr)?;
351                     match self.machine.env_vars.get(name) {
352                         Some(&var) => Scalar::Ptr(var),
353                         None => Scalar::null(),
354                     }
355                 };
356                 self.write_scalar(dest, result, dest_ty)?;
357             }
358
359             "unsetenv" => {
360                 let mut success = None;
361                 {
362                     let name_ptr = self.into_ptr(args[0].value)?;
363                     if !name_ptr.is_null()? {
364                         let name = self.memory.read_c_str(name_ptr.to_ptr()?)?;
365                         if !name.is_empty() && !name.contains(&b'=') {
366                             success = Some(self.machine.env_vars.remove(name));
367                         }
368                     }
369                 }
370                 if let Some(old) = success {
371                     if let Some(var) = old {
372                         self.memory.deallocate(var, None, MemoryKind::Env.into())?;
373                     }
374                     self.write_null(dest, dest_ty)?;
375                 } else {
376                     self.write_scalar(dest, Scalar::from_i128(-1), dest_ty)?;
377                 }
378             }
379
380             "setenv" => {
381                 let mut new = None;
382                 {
383                     let name_ptr = self.into_ptr(args[0].value)?;
384                     let value_ptr = self.into_ptr(args[1].value)?.to_ptr()?;
385                     let value = self.memory.read_c_str(value_ptr)?;
386                     if !name_ptr.is_null()? {
387                         let name = self.memory.read_c_str(name_ptr.to_ptr()?)?;
388                         if !name.is_empty() && !name.contains(&b'=') {
389                             new = Some((name.to_owned(), value.to_owned()));
390                         }
391                     }
392                 }
393                 if let Some((name, value)) = new {
394                     // +1 for the null terminator
395                     let value_copy = self.memory.allocate(
396                         Size::from_bytes((value.len() + 1) as u64),
397                         Align::from_bytes(1, 1).unwrap(),
398                         Some(MemoryKind::Env.into()),
399                     )?;
400                     self.memory.write_bytes(value_copy.into(), &value)?;
401                     let trailing_zero_ptr = value_copy.offset(Size::from_bytes(value.len() as u64), &self)?.into();
402                     self.memory.write_bytes(trailing_zero_ptr, &[0])?;
403                     if let Some(var) = self.machine.env_vars.insert(
404                         name.to_owned(),
405                         value_copy,
406                     )
407                     {
408                         self.memory.deallocate(var, None, MemoryKind::Env.into())?;
409                     }
410                     self.write_null(dest, dest_ty)?;
411                 } else {
412                     self.write_scalar(dest, Scalar::from_i128(-1), dest_ty)?;
413                 }
414             }
415
416             "write" => {
417                 let fd = self.value_to_scalar(args[0])?.to_u64()?;
418                 let buf = self.into_ptr(args[1].value)?;
419                 let n = self.value_to_scalar(args[2])?.to_u64()?;
420                 trace!("Called write({:?}, {:?}, {:?})", fd, buf, n);
421                 let result = if fd == 1 || fd == 2 {
422                     // stdout/stderr
423                     use std::io::{self, Write};
424
425                     let buf_cont = self.memory.read_bytes(buf, Size::from_bytes(n))?;
426                     let res = if fd == 1 {
427                         io::stdout().write(buf_cont)
428                     } else {
429                         io::stderr().write(buf_cont)
430                     };
431                     match res {
432                         Ok(n) => n as i64,
433                         Err(_) => -1,
434                     }
435                 } else {
436                     warn!("Ignored output to FD {}", fd);
437                     n as i64 // pretend it all went well
438                 }; // now result is the value we return back to the program
439                 let ptr_size = self.memory.pointer_size();
440                 self.write_scalar(
441                     dest,
442                     Scalar::from_isize(result, ptr_size),
443                     dest_ty,
444                 )?;
445             }
446
447             "strlen" => {
448                 let ptr = self.into_ptr(args[0].value)?.to_ptr()?;
449                 let n = self.memory.read_c_str(ptr)?.len();
450                 let ptr_size = self.memory.pointer_size();
451                 self.write_scalar(dest, Scalar::from_usize(n as u64, ptr_size), dest_ty)?;
452             }
453
454             // Some things needed for sys::thread initialization to go through
455             "signal" | "sigaction" | "sigaltstack" => {
456                 self.write_scalar(dest, Scalar::null(), dest_ty)?;
457             }
458
459             "sysconf" => {
460                 let name = self.value_to_scalar(args[0])?.to_u64()?;
461
462                 trace!("sysconf() called with name {}", name);
463                 // cache the sysconf integers via miri's global cache
464                 let paths = &[
465                     (&["libc", "_SC_PAGESIZE"], Scalar::from_i128(4096)),
466                     (&["libc", "_SC_GETPW_R_SIZE_MAX"], Scalar::from_i128(-1)),
467                 ];
468                 let mut result = None;
469                 for &(path, path_value) in paths {
470                     if let Ok(instance) = self.resolve_path(path) {
471                         let cid = GlobalId {
472                             instance,
473                             promoted: None,
474                         };
475                         let const_val = self.const_eval(cid)?;
476                         let value = const_val.unwrap_usize(self.tcx.tcx);
477                         if value == name {
478                             result = Some(path_value);
479                             break;
480                         }
481                     }
482                 }
483                 if let Some(result) = result {
484                     self.write_scalar(dest, result, dest_ty)?;
485                 } else {
486                     return err!(Unimplemented(
487                         format!("Unimplemented sysconf name: {}", name),
488                     ));
489                 }
490             }
491
492             // Hook pthread calls that go to the thread-local storage memory subsystem
493             "pthread_key_create" => {
494                 let key_ptr = self.into_ptr(args[0].value)?;
495                 let key_align = self.layout_of(args[0].ty)?.align;
496
497                 // Extract the function type out of the signature (that seems easier than constructing it ourselves...)
498                 let dtor = match self.into_ptr(args[1].value)? {
499                     Scalar::Ptr(dtor_ptr) => Some(self.memory.get_fn(dtor_ptr)?),
500                     Scalar::Bits { defined: 0, .. } => return err!(ReadUndefBytes),
501                     Scalar::Bits { bits: 0, .. } => None,
502                     Scalar::Bits { .. } => return err!(ReadBytesAsPointer),
503                 };
504
505                 // Figure out how large a pthread TLS key actually is. This is libc::pthread_key_t.
506                 let key_type = args[0].ty.builtin_deref(true)
507                                    .ok_or(EvalErrorKind::AbiViolation("Wrong signature used for pthread_key_create: First argument must be a raw pointer.".to_owned()))?.ty;
508                 let key_size = self.layout_of(key_type)?.size;
509
510                 // Create key and write it into the memory where key_ptr wants it
511                 let key = self.memory.create_tls_key(dtor) as u128;
512                 if key_size.bits() < 128 && key >= (1u128 << key_size.bits() as u128) {
513                     return err!(OutOfTls);
514                 }
515                 self.memory.write_scalar(
516                     key_ptr,
517                     key_align,
518                     Scalar::from_u128(key),
519                     key_size,
520                     false,
521                 )?;
522
523                 // Return success (0)
524                 self.write_null(dest, dest_ty)?;
525             }
526             "pthread_key_delete" => {
527                 let key = self.value_to_scalar(args[0])?.to_bytes()?;
528                 self.memory.delete_tls_key(key)?;
529                 // Return success (0)
530                 self.write_null(dest, dest_ty)?;
531             }
532             "pthread_getspecific" => {
533                 let key = self.value_to_scalar(args[0])?.to_bytes()?;
534                 let ptr = self.memory.load_tls(key)?;
535                 self.write_ptr(dest, ptr, dest_ty)?;
536             }
537             "pthread_setspecific" => {
538                 let key = self.value_to_scalar(args[0])?.to_bytes()?;
539                 let new_ptr = self.into_ptr(args[1].value)?;
540                 self.memory.store_tls(key, new_ptr)?;
541
542                 // Return success (0)
543                 self.write_null(dest, dest_ty)?;
544             }
545
546             "_tlv_atexit" => {
547                 return err!(Unimplemented("can't interpret with full mir for osx target".to_owned()));
548             },
549
550             // Stub out all the other pthread calls to just return 0
551             link_name if link_name.starts_with("pthread_") => {
552                 info!("ignoring C ABI call: {}", link_name);
553                 self.write_null(dest, dest_ty)?;
554             }
555
556             "mmap" => {
557                 // This is a horrible hack, but well... the guard page mechanism calls mmap and expects a particular return value, so we give it that value
558                 let addr = self.into_ptr(args[0].value)?;
559                 self.write_ptr(dest, addr, dest_ty)?;
560             }
561
562             _ => {
563                 return err!(Unimplemented(
564                     format!("can't call C ABI function: {}", link_name),
565                 ));
566             }
567         }
568
569         // Since we pushed no stack frame, the main loop will act
570         // as if the call just completed and it's returning to the
571         // current frame.
572         self.dump_local(dest);
573         self.goto_block(dest_block);
574         Ok(())
575     }
576
577     /// Get an instance for a path.
578     fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>> {
579         self.tcx
580             .crates()
581             .iter()
582             .find(|&&krate| self.tcx.original_crate_name(krate) == path[0])
583             .and_then(|krate| {
584                 let krate = DefId {
585                     krate: *krate,
586                     index: CRATE_DEF_INDEX,
587                 };
588                 let mut items = self.tcx.item_children(krate);
589                 let mut path_it = path.iter().skip(1).peekable();
590
591                 while let Some(segment) = path_it.next() {
592                     for item in mem::replace(&mut items, Default::default()).iter() {
593                         if item.ident.name == *segment {
594                             if path_it.peek().is_none() {
595                                 return Some(ty::Instance::mono(self.tcx.tcx, item.def.def_id()));
596                             }
597
598                             items = self.tcx.item_children(item.def.def_id());
599                             break;
600                         }
601                     }
602                 }
603                 None
604             })
605             .ok_or_else(|| {
606                 let path = path.iter().map(|&s| s.to_owned()).collect();
607                 EvalErrorKind::PathNotFound(path).into()
608             })
609     }
610
611     fn call_missing_fn(
612         &mut self,
613         instance: ty::Instance<'tcx>,
614         destination: Option<(Place, mir::BasicBlock)>,
615         args: &[ValTy<'tcx>],
616         sig: ty::FnSig<'tcx>,
617         path: String,
618     ) -> EvalResult<'tcx> {
619         // In some cases in non-MIR libstd-mode, not having a destination is legit.  Handle these early.
620         match &path[..] {
621             "std::panicking::rust_panic_with_hook" |
622             "core::panicking::panic_fmt::::panic_impl" |
623             "std::rt::begin_panic_fmt" => return err!(Panic),
624             _ => {}
625         }
626
627         let dest_ty = sig.output();
628         let (dest, dest_block) = destination.ok_or_else(
629             || EvalErrorKind::NoMirFor(path.clone()),
630         )?;
631
632         if sig.abi == Abi::C {
633             // An external C function
634             // TODO: That functions actually has a similar preamble to what follows here.  May make sense to
635             // unify these two mechanisms for "hooking into missing functions".
636             self.call_c_abi(
637                 instance.def_id(),
638                 args,
639                 dest,
640                 dest_ty,
641                 dest_block,
642             )?;
643             return Ok(());
644         }
645
646         match &path[..] {
647             // Allocators are magic.  They have no MIR, even when the rest of libstd does.
648             "alloc::alloc::::__rust_alloc" => {
649                 let size = self.value_to_scalar(args[0])?.to_u64()?;
650                 let align = self.value_to_scalar(args[1])?.to_u64()?;
651                 if size == 0 {
652                     return err!(HeapAllocZeroBytes);
653                 }
654                 if !align.is_power_of_two() {
655                     return err!(HeapAllocNonPowerOfTwoAlignment(align));
656                 }
657                 let ptr = self.memory.allocate(Size::from_bytes(size),
658                                                Align::from_bytes(align, align).unwrap(),
659                                                Some(MemoryKind::Rust.into()))?;
660                 self.write_scalar(dest, Scalar::Ptr(ptr), dest_ty)?;
661             }
662             "alloc::alloc::::__rust_alloc_zeroed" => {
663                 let size = self.value_to_scalar(args[0])?.to_u64()?;
664                 let align = self.value_to_scalar(args[1])?.to_u64()?;
665                 if size == 0 {
666                     return err!(HeapAllocZeroBytes);
667                 }
668                 if !align.is_power_of_two() {
669                     return err!(HeapAllocNonPowerOfTwoAlignment(align));
670                 }
671                 let ptr = self.memory.allocate(Size::from_bytes(size),
672                                                Align::from_bytes(align, align).unwrap(),
673                                                Some(MemoryKind::Rust.into()))?;
674                 self.memory.write_repeat(ptr.into(), 0, Size::from_bytes(size))?;
675                 self.write_scalar(dest, Scalar::Ptr(ptr), dest_ty)?;
676             }
677             "alloc::alloc::::__rust_dealloc" => {
678                 let ptr = self.into_ptr(args[0].value)?.to_ptr()?;
679                 let old_size = self.value_to_scalar(args[1])?.to_u64()?;
680                 let align = self.value_to_scalar(args[2])?.to_u64()?;
681                 if old_size == 0 {
682                     return err!(HeapAllocZeroBytes);
683                 }
684                 if !align.is_power_of_two() {
685                     return err!(HeapAllocNonPowerOfTwoAlignment(align));
686                 }
687                 self.memory.deallocate(
688                     ptr,
689                     Some((Size::from_bytes(old_size), Align::from_bytes(align, align).unwrap())),
690                     MemoryKind::Rust.into(),
691                 )?;
692             }
693             "alloc::alloc::::__rust_realloc" => {
694                 let ptr = self.into_ptr(args[0].value)?.to_ptr()?;
695                 let old_size = self.value_to_scalar(args[1])?.to_u64()?;
696                 let align = self.value_to_scalar(args[2])?.to_u64()?;
697                 let new_size = self.value_to_scalar(args[3])?.to_u64()?;
698                 if old_size == 0 || new_size == 0 {
699                     return err!(HeapAllocZeroBytes);
700                 }
701                 if !align.is_power_of_two() {
702                     return err!(HeapAllocNonPowerOfTwoAlignment(align));
703                 }
704                 let new_ptr = self.memory.reallocate(
705                     ptr,
706                     Size::from_bytes(old_size),
707                     Align::from_bytes(align, align).unwrap(),
708                     Size::from_bytes(new_size),
709                     Align::from_bytes(align, align).unwrap(),
710                     MemoryKind::Rust.into(),
711                 )?;
712                 self.write_scalar(dest, Scalar::Ptr(new_ptr), dest_ty)?;
713             }
714
715             // A Rust function is missing, which means we are running with MIR missing for libstd (or other dependencies).
716             // Still, we can make many things mostly work by "emulating" or ignoring some functions.
717             "std::io::_print" => {
718                 warn!(
719                     "Ignoring output.  To run programs that print, make sure you have a libstd with full MIR."
720                 );
721             }
722             "std::thread::Builder::new" => {
723                 return err!(Unimplemented("miri does not support threading".to_owned()))
724             }
725             "std::env::args" => {
726                 return err!(Unimplemented(
727                     "miri does not support program arguments".to_owned(),
728                 ))
729             }
730             "std::panicking::panicking" |
731             "std::rt::panicking" => {
732                 // we abort on panic -> `std::rt::panicking` always returns false
733                 let bool = self.tcx.types.bool;
734                 self.write_scalar(dest, Scalar::from_bool(false), bool)?;
735             }
736             "std::sys::imp::c::::AddVectoredExceptionHandler" |
737             "std::sys::imp::c::::SetThreadStackGuarantee" => {
738                 let usize = self.tcx.types.usize;
739                 // any non zero value works for the stdlib. This is just used for stackoverflows anyway
740                 self.write_scalar(dest, Scalar::from_u128(1), usize)?;
741             },
742             _ => return err!(NoMirFor(path)),
743         }
744
745         // Since we pushed no stack frame, the main loop will act
746         // as if the call just completed and it's returning to the
747         // current frame.
748         self.dump_local(dest);
749         self.goto_block(dest_block);
750         return Ok(());
751     }
752
753     fn write_null(&mut self, dest: Place, dest_ty: Ty<'tcx>) -> EvalResult<'tcx> {
754         self.write_scalar(dest, Scalar::null(), dest_ty)
755     }
756 }