]> git.lizzy.rs Git - rust.git/blob - src/fn_call.rs
509119beb35c43f894205f2d38355eb679050c4c
[rust.git] / src / fn_call.rs
1 use rustc::ty::{self, Ty};
2 use rustc::ty::layout::{self, Align, LayoutOf, Size};
3 use rustc::hir::def_id::{DefId, CRATE_DEF_INDEX};
4 use rustc::mir;
5 use rustc_data_structures::indexed_vec::Idx;
6 use syntax::attr;
7 use syntax::codemap::Span;
8
9 use std::mem;
10
11 use super::*;
12
13 use tls::MemoryExt;
14
15 use super::memory::MemoryKind;
16
17 fn write_discriminant_value<'a, 'mir, 'tcx: 'a + 'mir>(
18         ecx: &mut EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>>,
19         dest_ty: Ty<'tcx>,
20         dest: Place,
21         variant_index: usize,
22     ) -> EvalResult<'tcx> {
23         let layout = ecx.layout_of(dest_ty)?;
24
25         match layout.variants {
26             layout::Variants::Single { index } => {
27                 if index != variant_index {
28                     // If the layout of an enum is `Single`, all
29                     // other variants are necessarily uninhabited.
30                     assert_eq!(layout.for_variant(&ecx, variant_index).abi,
31                                layout::Abi::Uninhabited);
32                 }
33             }
34             layout::Variants::Tagged { .. } => {
35                 let discr_val = dest_ty.ty_adt_def().unwrap()
36                     .discriminant_for_variant(*ecx.tcx, variant_index)
37                     .val;
38
39                 let (discr_dest, discr) = ecx.place_field(dest, mir::Field::new(0), layout)?;
40                 ecx.write_scalar(discr_dest, Scalar::from_uint(discr_val, discr.size), discr.ty)?;
41             }
42             layout::Variants::NicheFilling {
43                 dataful_variant,
44                 ref niche_variants,
45                 niche_start,
46                 ..
47             } => {
48                 if variant_index != dataful_variant {
49                     let (niche_dest, niche) =
50                         ecx.place_field(dest, mir::Field::new(0), layout)?;
51                     let niche_value = ((variant_index - niche_variants.start()) as u128)
52                         .wrapping_add(niche_start);
53                     ecx.write_scalar(niche_dest, Scalar::from_uint(niche_value, niche.size), niche.ty)?;
54                 }
55             }
56         }
57
58         Ok(())
59     }
60
61 pub trait EvalContextExt<'tcx> {
62     fn call_foreign_item(
63         &mut self,
64         def_id: DefId,
65         args: &[ValTy<'tcx>],
66         dest: Place,
67         dest_ty: Ty<'tcx>,
68         dest_block: mir::BasicBlock,
69     ) -> EvalResult<'tcx>;
70
71     fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>>;
72
73     fn call_missing_fn(
74         &mut self,
75         instance: ty::Instance<'tcx>,
76         destination: Option<(Place, mir::BasicBlock)>,
77         args: &[ValTy<'tcx>],
78         sig: ty::FnSig<'tcx>,
79         path: String,
80     ) -> EvalResult<'tcx>;
81
82     fn eval_fn_call(
83         &mut self,
84         instance: ty::Instance<'tcx>,
85         destination: Option<(Place, mir::BasicBlock)>,
86         args: &[ValTy<'tcx>],
87         span: Span,
88         sig: ty::FnSig<'tcx>,
89     ) -> EvalResult<'tcx, bool>;
90
91     fn write_null(&mut self, dest: Place, dest_layout: TyLayout<'tcx>) -> EvalResult<'tcx>;
92 }
93
94 impl<'a, 'mir, 'tcx: 'mir + 'a> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super::Evaluator<'tcx>> {
95     fn eval_fn_call(
96         &mut self,
97         instance: ty::Instance<'tcx>,
98         destination: Option<(Place, mir::BasicBlock)>,
99         args: &[ValTy<'tcx>],
100         span: Span,
101         sig: ty::FnSig<'tcx>,
102     ) -> EvalResult<'tcx, bool> {
103         trace!("eval_fn_call: {:#?}, {:#?}", instance, destination);
104
105         let def_id = instance.def_id();
106         let item_path = self.tcx.absolute_item_path_str(def_id);
107         match &*item_path {
108             "std::sys::unix::thread::guard::init" | "std::sys::unix::thread::guard::current" => {
109                 // Return None, as it doesn't make sense to return Some, because miri detects stack overflow itself.
110                 let ret_ty = sig.output();
111                 match ret_ty.sty {
112                     ty::TyAdt(ref adt_def, _) => {
113                         assert!(adt_def.is_enum(), "Unexpected return type for {}", item_path);
114                         let none_variant_index = adt_def.variants.iter().position(|def| {
115                             def.name.as_str() == "None"
116                         }).expect("No None variant");
117                         let (return_place, return_to_block) = destination.unwrap();
118                         write_discriminant_value(self, ret_ty, return_place, none_variant_index)?;
119                         self.goto_block(return_to_block);
120                         return Ok(true);
121                     }
122                     _ => panic!("Unexpected return type for {}", item_path)
123                 }
124             }
125             "std::sys::unix::fast_thread_local::register_dtor" => {
126                 // TODO: register the dtor
127                 let (_return_place, return_to_block) = destination.unwrap();
128                 self.goto_block(return_to_block);
129                 return Ok(true);
130             }
131             _ => {}
132         }
133
134         if self.tcx.lang_items().align_offset_fn() == Some(instance.def.def_id()) {
135             // FIXME: return a real value in case the target allocation has an
136             // alignment bigger than the one requested
137             let n = u128::max_value();
138             let amt = 128 - self.memory.pointer_size().bytes() * 8;
139             let (dest, return_to_block) = destination.unwrap();
140             let ty = self.tcx.types.usize;
141             let ptr_size = self.memory.pointer_size();
142             self.write_scalar(dest, Scalar::from_uint((n << amt) >> amt, ptr_size), ty)?;
143             self.goto_block(return_to_block);
144             return Ok(true);
145         }
146
147         let mir = match self.load_mir(instance.def) {
148             Ok(mir) => mir,
149             Err(EvalError { kind: EvalErrorKind::NoMirFor(path), .. }) => {
150                 self.call_missing_fn(
151                     instance,
152                     destination,
153                     args,
154                     sig,
155                     path,
156                 )?;
157                 return Ok(true);
158             }
159             Err(other) => return Err(other),
160         };
161
162         let (return_place, return_to_block) = match destination {
163             Some((place, block)) => (place, StackPopCleanup::Goto(block)),
164             None => (Place::undef(), StackPopCleanup::None),
165         };
166
167         self.push_stack_frame(
168             instance,
169             span,
170             mir,
171             return_place,
172             return_to_block,
173         )?;
174
175         Ok(false)
176     }
177
178     fn call_foreign_item(
179         &mut self,
180         def_id: DefId,
181         args: &[ValTy<'tcx>],
182         dest: Place,
183         dest_ty: Ty<'tcx>,
184         dest_block: mir::BasicBlock,
185     ) -> EvalResult<'tcx> {
186         let attrs = self.tcx.get_attrs(def_id);
187         let link_name = match attr::first_attr_value_str_by_name(&attrs, "link_name") {
188             Some(name) => name.as_str(),
189             None => self.tcx.item_name(def_id).as_str(),
190         };
191         let dest_layout = self.layout_of(dest_ty)?;
192
193         match &link_name[..] {
194             "malloc" => {
195                 let size = self.value_to_scalar(args[0])?.to_usize(self)?;
196                 if size == 0 {
197                     self.write_null(dest, dest_layout)?;
198                 } else {
199                     let align = self.tcx.data_layout.pointer_align;
200                     let ptr = self.memory.allocate(Size::from_bytes(size), align, MemoryKind::C.into())?;
201                     self.write_scalar(dest, Scalar::Ptr(ptr), dest_ty)?;
202                 }
203             }
204
205             "free" => {
206                 let ptr = self.into_ptr(args[0].value)?.unwrap_or_err()?;
207                 if !ptr.is_null() {
208                     self.memory.deallocate(
209                         ptr.to_ptr()?,
210                         None,
211                         MemoryKind::C.into(),
212                     )?;
213                 }
214             }
215
216             "__rust_alloc" => {
217                 let size = self.value_to_scalar(args[0])?.to_usize(self)?;
218                 let align = self.value_to_scalar(args[1])?.to_usize(self)?;
219                 if size == 0 {
220                     return err!(HeapAllocZeroBytes);
221                 }
222                 if !align.is_power_of_two() {
223                     return err!(HeapAllocNonPowerOfTwoAlignment(align));
224                 }
225                 let ptr = self.memory.allocate(Size::from_bytes(size),
226                                                Align::from_bytes(align, align).unwrap(),
227                                                MemoryKind::Rust.into())?;
228                 self.write_scalar(dest, Scalar::Ptr(ptr), dest_ty)?;
229             }
230             "__rust_alloc_zeroed" => {
231                 let size = self.value_to_scalar(args[0])?.to_usize(self)?;
232                 let align = self.value_to_scalar(args[1])?.to_usize(self)?;
233                 if size == 0 {
234                     return err!(HeapAllocZeroBytes);
235                 }
236                 if !align.is_power_of_two() {
237                     return err!(HeapAllocNonPowerOfTwoAlignment(align));
238                 }
239                 let ptr = self.memory.allocate(Size::from_bytes(size),
240                                                Align::from_bytes(align, align).unwrap(),
241                                                MemoryKind::Rust.into())?;
242                 self.memory.write_repeat(ptr.into(), 0, Size::from_bytes(size))?;
243                 self.write_scalar(dest, Scalar::Ptr(ptr), dest_ty)?;
244             }
245             "__rust_dealloc" => {
246                 let ptr = self.into_ptr(args[0].value)?.unwrap_or_err()?.to_ptr()?;
247                 let old_size = self.value_to_scalar(args[1])?.to_usize(self)?;
248                 let align = self.value_to_scalar(args[2])?.to_usize(self)?;
249                 if old_size == 0 {
250                     return err!(HeapAllocZeroBytes);
251                 }
252                 if !align.is_power_of_two() {
253                     return err!(HeapAllocNonPowerOfTwoAlignment(align));
254                 }
255                 self.memory.deallocate(
256                     ptr,
257                     Some((Size::from_bytes(old_size), Align::from_bytes(align, align).unwrap())),
258                     MemoryKind::Rust.into(),
259                 )?;
260             }
261             "__rust_realloc" => {
262                 let ptr = self.into_ptr(args[0].value)?.unwrap_or_err()?.to_ptr()?;
263                 let old_size = self.value_to_scalar(args[1])?.to_usize(self)?;
264                 let align = self.value_to_scalar(args[2])?.to_usize(self)?;
265                 let new_size = self.value_to_scalar(args[3])?.to_usize(self)?;
266                 if old_size == 0 || new_size == 0 {
267                     return err!(HeapAllocZeroBytes);
268                 }
269                 if !align.is_power_of_two() {
270                     return err!(HeapAllocNonPowerOfTwoAlignment(align));
271                 }
272                 let new_ptr = self.memory.reallocate(
273                     ptr,
274                     Size::from_bytes(old_size),
275                     Align::from_bytes(align, align).unwrap(),
276                     Size::from_bytes(new_size),
277                     Align::from_bytes(align, align).unwrap(),
278                     MemoryKind::Rust.into(),
279                 )?;
280                 self.write_scalar(dest, Scalar::Ptr(new_ptr), dest_ty)?;
281             }
282
283             "syscall" => {
284                 // TODO: read `syscall` ids like `sysconf` ids and
285                 // figure out some way to actually process some of them
286                 //
287                 // libc::syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), GRND_NONBLOCK)
288                 // is called if a `HashMap` is created the regular way.
289                 match self.value_to_scalar(args[0])?.to_usize(self)? {
290                     318 | 511 => {
291                         return err!(Unimplemented(
292                             "miri does not support random number generators".to_owned(),
293                         ))
294                     }
295                     id => {
296                         return err!(Unimplemented(
297                             format!("miri does not support syscall id {}", id),
298                         ))
299                     }
300                 }
301             }
302
303             "dlsym" => {
304                 let _handle = self.into_ptr(args[0].value)?;
305                 let symbol = self.into_ptr(args[1].value)?.unwrap_or_err()?.to_ptr()?;
306                 let symbol_name = self.memory.read_c_str(symbol)?;
307                 let err = format!("bad c unicode symbol: {:?}", symbol_name);
308                 let symbol_name = ::std::str::from_utf8(symbol_name).unwrap_or(&err);
309                 return err!(Unimplemented(format!(
310                     "miri does not support dynamically loading libraries (requested symbol: {})",
311                     symbol_name
312                 )));
313             }
314
315             "__rust_maybe_catch_panic" => {
316                 // fn __rust_maybe_catch_panic(f: fn(*mut u8), data: *mut u8, data_ptr: *mut usize, vtable_ptr: *mut usize) -> u32
317                 // We abort on panic, so not much is going on here, but we still have to call the closure
318                 let u8_ptr_ty = self.tcx.mk_mut_ptr(self.tcx.types.u8);
319                 let f = self.into_ptr(args[0].value)?.unwrap_or_err()?.to_ptr()?;
320                 let data = self.into_ptr(args[1].value)?.unwrap_or_err()?;
321                 let f_instance = self.memory.get_fn(f)?;
322                 self.write_null(dest, dest_layout)?;
323
324                 // Now we make a function call.  TODO: Consider making this re-usable?  EvalContext::step does sth. similar for the TLS dtors,
325                 // and of course eval_main.
326                 let mir = self.load_mir(f_instance.def)?;
327                 self.push_stack_frame(
328                     f_instance,
329                     mir.span,
330                     mir,
331                     Place::undef(),
332                     StackPopCleanup::Goto(dest_block),
333                 )?;
334                 let mut args = self.frame().mir.args_iter();
335
336                 let arg_local = args.next().ok_or_else(||
337                     EvalErrorKind::AbiViolation(
338                         "Argument to __rust_maybe_catch_panic does not take enough arguments."
339                             .to_owned(),
340                     ),
341                 )?;
342                 let arg_dest = self.eval_place(&mir::Place::Local(arg_local))?;
343                 self.write_ptr(arg_dest, data, u8_ptr_ty)?;
344
345                 assert!(args.next().is_none(), "__rust_maybe_catch_panic argument has more arguments than expected");
346
347                 // We ourselves return 0
348                 self.write_null(dest, dest_layout)?;
349
350                 // Don't fall through
351                 return Ok(());
352             }
353
354             "__rust_start_panic" => {
355                 return err!(Panic);
356             }
357
358             "memcmp" => {
359                 let left = self.into_ptr(args[0].value)?.unwrap_or_err()?;
360                 let right = self.into_ptr(args[1].value)?.unwrap_or_err()?;
361                 let n = Size::from_bytes(self.value_to_scalar(args[2])?.to_usize(self)?);
362
363                 let result = {
364                     let left_bytes = self.memory.read_bytes(left, n)?;
365                     let right_bytes = self.memory.read_bytes(right, n)?;
366
367                     use std::cmp::Ordering::*;
368                     match left_bytes.cmp(right_bytes) {
369                         Less => -1i32,
370                         Equal => 0,
371                         Greater => 1,
372                     }
373                 };
374
375                 self.write_scalar(
376                     dest,
377                     Scalar::from_i32(result),
378                     dest_ty,
379                 )?;
380             }
381
382             "memrchr" => {
383                 let ptr = self.into_ptr(args[0].value)?.unwrap_or_err()?;
384                 let val = self.value_to_scalar(args[1])?.to_bytes()? as u8;
385                 let num = self.value_to_scalar(args[2])?.to_usize(self)?;
386                 if let Some(idx) = self.memory.read_bytes(ptr, Size::from_bytes(num))?.iter().rev().position(
387                     |&c| c == val,
388                 )
389                 {
390                     let new_ptr = ptr.ptr_offset(Size::from_bytes(num - idx as u64 - 1), &self)?;
391                     self.write_ptr(dest, new_ptr, dest_ty)?;
392                 } else {
393                     self.write_null(dest, dest_layout)?;
394                 }
395             }
396
397             "memchr" => {
398                 let ptr = self.into_ptr(args[0].value)?.unwrap_or_err()?;
399                 let val = self.value_to_scalar(args[1])?.to_bytes()? as u8;
400                 let num = self.value_to_scalar(args[2])?.to_usize(self)?;
401                 if let Some(idx) = self.memory.read_bytes(ptr, Size::from_bytes(num))?.iter().position(
402                     |&c| c == val,
403                 )
404                 {
405                     let new_ptr = ptr.ptr_offset(Size::from_bytes(idx as u64), &self)?;
406                     self.write_ptr(dest, new_ptr, dest_ty)?;
407                 } else {
408                     self.write_null(dest, dest_layout)?;
409                 }
410             }
411
412             "getenv" => {
413                 let result = {
414                     let name_ptr = self.into_ptr(args[0].value)?.unwrap_or_err()?.to_ptr()?;
415                     let name = self.memory.read_c_str(name_ptr)?;
416                     match self.machine.env_vars.get(name) {
417                         Some(&var) => Scalar::Ptr(var),
418                         None => Scalar::null(self.memory.pointer_size()),
419                     }
420                 };
421                 self.write_scalar(dest, result, dest_ty)?;
422             }
423
424             "unsetenv" => {
425                 let mut success = None;
426                 {
427                     let name_ptr = self.into_ptr(args[0].value)?.unwrap_or_err()?;
428                     if !name_ptr.is_null() {
429                         let name = self.memory.read_c_str(name_ptr.to_ptr()?)?;
430                         if !name.is_empty() && !name.contains(&b'=') {
431                             success = Some(self.machine.env_vars.remove(name));
432                         }
433                     }
434                 }
435                 if let Some(old) = success {
436                     if let Some(var) = old {
437                         self.memory.deallocate(var, None, MemoryKind::Env.into())?;
438                     }
439                     self.write_null(dest, dest_layout)?;
440                 } else {
441                     self.write_scalar(dest, Scalar::from_int(-1, dest_layout.size), dest_ty)?;
442                 }
443             }
444
445             "setenv" => {
446                 let mut new = None;
447                 {
448                     let name_ptr = self.into_ptr(args[0].value)?.unwrap_or_err()?;
449                     let value_ptr = self.into_ptr(args[1].value)?.unwrap_or_err()?.to_ptr()?;
450                     let value = self.memory.read_c_str(value_ptr)?;
451                     if !name_ptr.is_null() {
452                         let name = self.memory.read_c_str(name_ptr.to_ptr()?)?;
453                         if !name.is_empty() && !name.contains(&b'=') {
454                             new = Some((name.to_owned(), value.to_owned()));
455                         }
456                     }
457                 }
458                 if let Some((name, value)) = new {
459                     // +1 for the null terminator
460                     let value_copy = self.memory.allocate(
461                         Size::from_bytes((value.len() + 1) as u64),
462                         Align::from_bytes(1, 1).unwrap(),
463                         MemoryKind::Env.into(),
464                     )?;
465                     self.memory.write_bytes(value_copy.into(), &value)?;
466                     let trailing_zero_ptr = value_copy.offset(Size::from_bytes(value.len() as u64), &self)?.into();
467                     self.memory.write_bytes(trailing_zero_ptr, &[0])?;
468                     if let Some(var) = self.machine.env_vars.insert(
469                         name.to_owned(),
470                         value_copy,
471                     )
472                     {
473                         self.memory.deallocate(var, None, MemoryKind::Env.into())?;
474                     }
475                     self.write_null(dest, dest_layout)?;
476                 } else {
477                     self.write_scalar(dest, Scalar::from_int(-1, dest_layout.size), dest_ty)?;
478                 }
479             }
480
481             "write" => {
482                 let fd = self.value_to_scalar(args[0])?.to_bytes()?;
483                 let buf = self.into_ptr(args[1].value)?.unwrap_or_err()?;
484                 let n = self.value_to_scalar(args[2])?.to_bytes()? as u64;
485                 trace!("Called write({:?}, {:?}, {:?})", fd, buf, n);
486                 let result = if fd == 1 || fd == 2 {
487                     // stdout/stderr
488                     use std::io::{self, Write};
489
490                     let buf_cont = self.memory.read_bytes(buf, Size::from_bytes(n))?;
491                     let res = if fd == 1 {
492                         io::stdout().write(buf_cont)
493                     } else {
494                         io::stderr().write(buf_cont)
495                     };
496                     match res {
497                         Ok(n) => n as i64,
498                         Err(_) => -1,
499                     }
500                 } else {
501                     warn!("Ignored output to FD {}", fd);
502                     n as i64 // pretend it all went well
503                 }; // now result is the value we return back to the program
504                 let ptr_size = self.memory.pointer_size();
505                 self.write_scalar(
506                     dest,
507                     Scalar::from_int(result, ptr_size),
508                     dest_ty,
509                 )?;
510             }
511
512             "strlen" => {
513                 let ptr = self.into_ptr(args[0].value)?.unwrap_or_err()?.to_ptr()?;
514                 let n = self.memory.read_c_str(ptr)?.len();
515                 let ptr_size = self.memory.pointer_size();
516                 self.write_scalar(dest, Scalar::from_uint(n as u64, ptr_size), dest_ty)?;
517             }
518
519             // Some things needed for sys::thread initialization to go through
520             "signal" | "sigaction" | "sigaltstack" => {
521                 let ptr_size = self.memory.pointer_size();
522                 self.write_scalar(dest, Scalar::null(ptr_size), dest_ty)?;
523             }
524
525             "sysconf" => {
526                 let name = self.value_to_scalar(args[0])?.to_usize(self)?;
527                 let ptr_size = self.memory.pointer_size();
528
529                 trace!("sysconf() called with name {}", name);
530                 // cache the sysconf integers via miri's global cache
531                 let paths = &[
532                     (&["libc", "_SC_PAGESIZE"], Scalar::from_int(4096, ptr_size)),
533                     (&["libc", "_SC_GETPW_R_SIZE_MAX"], Scalar::from_int(-1, ptr_size)),
534                 ];
535                 let mut result = None;
536                 for &(path, path_value) in paths {
537                     if let Ok(instance) = self.resolve_path(path) {
538                         let cid = GlobalId {
539                             instance,
540                             promoted: None,
541                         };
542                         let const_val = self.const_eval(cid)?;
543                         let value = const_val.unwrap_usize(self.tcx.tcx);
544                         if value == name {
545                             result = Some(path_value);
546                             break;
547                         }
548                     }
549                 }
550                 if let Some(result) = result {
551                     self.write_scalar(dest, result, dest_ty)?;
552                 } else {
553                     return err!(Unimplemented(
554                         format!("Unimplemented sysconf name: {}", name),
555                     ));
556                 }
557             }
558
559             // Hook pthread calls that go to the thread-local storage memory subsystem
560             "pthread_key_create" => {
561                 let key_ptr = self.into_ptr(args[0].value)?.unwrap_or_err()?;
562
563                 // Extract the function type out of the signature (that seems easier than constructing it ourselves...)
564                 let dtor = match self.into_ptr(args[1].value)?.unwrap_or_err()? {
565                     Scalar::Ptr(dtor_ptr) => Some(self.memory.get_fn(dtor_ptr)?),
566                     Scalar::Bits { bits: 0, size } => {
567                         assert_eq!(size as u64, self.memory.pointer_size().bytes());
568                         None
569                     },
570                     Scalar::Bits { .. } => return err!(ReadBytesAsPointer),
571                 };
572
573                 // Figure out how large a pthread TLS key actually is. This is libc::pthread_key_t.
574                 let key_type = args[0].ty.builtin_deref(true)
575                                    .ok_or_else(|| EvalErrorKind::AbiViolation("Wrong signature used for pthread_key_create: First argument must be a raw pointer.".to_owned()))?.ty;
576                 let key_layout = self.layout_of(key_type)?;
577
578                 // Create key and write it into the memory where key_ptr wants it
579                 let key = self.memory.create_tls_key(dtor) as u128;
580                 if key_layout.size.bits() < 128 && key >= (1u128 << key_layout.size.bits() as u128) {
581                     return err!(OutOfTls);
582                 }
583                 self.memory.write_scalar(
584                     key_ptr,
585                     key_layout.align,
586                     Scalar::from_uint(key, key_layout.size).into(),
587                     key_layout.size,
588                     key_layout.align,
589                     false,
590                 )?;
591
592                 // Return success (0)
593                 self.write_null(dest, dest_layout)?;
594             }
595             "pthread_key_delete" => {
596                 let key = self.value_to_scalar(args[0])?.to_bytes()?;
597                 self.memory.delete_tls_key(key)?;
598                 // Return success (0)
599                 self.write_null(dest, dest_layout)?;
600             }
601             "pthread_getspecific" => {
602                 let key = self.value_to_scalar(args[0])?.to_bytes()?;
603                 let ptr = self.memory.load_tls(key)?;
604                 self.write_ptr(dest, ptr, dest_ty)?;
605             }
606             "pthread_setspecific" => {
607                 let key = self.value_to_scalar(args[0])?.to_bytes()?;
608                 let new_ptr = self.into_ptr(args[1].value)?.unwrap_or_err()?;
609                 self.memory.store_tls(key, new_ptr)?;
610
611                 // Return success (0)
612                 self.write_null(dest, dest_layout)?;
613             }
614
615             "_tlv_atexit" => {
616                 return err!(Unimplemented("Thread-local store is not fully supported on macOS".to_owned()));
617             },
618
619             // Stub out all the other pthread calls to just return 0
620             link_name if link_name.starts_with("pthread_") => {
621                 debug!("ignoring C ABI call: {}", link_name);
622                 self.write_null(dest, dest_layout)?;
623             }
624
625             "mmap" => {
626                 // This is a horrible hack, but well... the guard page mechanism calls mmap and expects a particular return value, so we give it that value
627                 let addr = self.into_ptr(args[0].value)?.unwrap_or_err()?;
628                 self.write_ptr(dest, addr, dest_ty)?;
629             }
630
631             // Windows API subs
632             "AddVectoredExceptionHandler" => {
633                 // any non zero value works for the stdlib. This is just used for stackoverflows anyway
634                 let ptr_size = self.memory.pointer_size();
635                 self.write_scalar(dest, Scalar::from_int(1, ptr_size), dest_ty)?;
636             },
637             "InitializeCriticalSection" |
638             "EnterCriticalSection" |
639             "LeaveCriticalSection" |
640             "DeleteCriticalSection" |
641             "SetLastError" => {
642                 // Function does not return anything, nothing to do
643             },
644             "GetModuleHandleW" |
645             "GetProcAddress" |
646             "TryEnterCriticalSection" => {
647                 // pretend these do not exist/nothing happened, by returning zero
648                 self.write_scalar(dest, Scalar::from_int(0, dest_layout.size), dest_ty)?;
649             },
650             "GetLastError" => {
651                 // this is c::ERROR_CALL_NOT_IMPLEMENTED
652                 self.write_scalar(dest, Scalar::from_int(120, dest_layout.size), dest_ty)?;
653             },
654
655             // Windows TLS
656             "TlsAlloc" => {
657                 // This just creates a key; Windows does not natively support TLS dtors.
658
659                 // Create key and return it
660                 let key = self.memory.create_tls_key(None) as u128;
661
662                 // Figure out how large a TLS key actually is. This is c::DWORD.
663                 if dest_layout.size.bits() < 128 && key >= (1u128 << dest_layout.size.bits() as u128) {
664                     return err!(OutOfTls);
665                 }
666                 self.write_scalar(dest, Scalar::from_uint(key, dest_layout.size), dest_layout.ty)?;
667             }
668             "TlsGetValue" => {
669                 let key = self.value_to_scalar(args[0])?.to_bytes()?;
670                 let ptr = self.memory.load_tls(key)?;
671                 self.write_ptr(dest, ptr, dest_ty)?;
672             }
673             "TlsSetValue" => {
674                 let key = self.value_to_scalar(args[0])?.to_bytes()?;
675                 let new_ptr = self.into_ptr(args[1].value)?.unwrap_or_err()?;
676                 self.memory.store_tls(key, new_ptr)?;
677
678                 // Return success (1)
679                 self.write_scalar(dest, Scalar::from_int(1, dest_layout.size), dest_ty)?;
680             }
681
682             // We can't execute anything else
683             _ => {
684                 return err!(Unimplemented(
685                     format!("can't call foreign function: {}", link_name),
686                 ));
687             }
688         }
689
690         // Since we pushed no stack frame, the main loop will act
691         // as if the call just completed and it's returning to the
692         // current frame.
693         self.dump_local(dest);
694         self.goto_block(dest_block);
695         Ok(())
696     }
697
698     /// Get an instance for a path.
699     fn resolve_path(&self, path: &[&str]) -> EvalResult<'tcx, ty::Instance<'tcx>> {
700         self.tcx
701             .crates()
702             .iter()
703             .find(|&&krate| self.tcx.original_crate_name(krate) == path[0])
704             .and_then(|krate| {
705                 let krate = DefId {
706                     krate: *krate,
707                     index: CRATE_DEF_INDEX,
708                 };
709                 let mut items = self.tcx.item_children(krate);
710                 let mut path_it = path.iter().skip(1).peekable();
711
712                 while let Some(segment) = path_it.next() {
713                     for item in mem::replace(&mut items, Default::default()).iter() {
714                         if item.ident.name == *segment {
715                             if path_it.peek().is_none() {
716                                 return Some(ty::Instance::mono(self.tcx.tcx, item.def.def_id()));
717                             }
718
719                             items = self.tcx.item_children(item.def.def_id());
720                             break;
721                         }
722                     }
723                 }
724                 None
725             })
726             .ok_or_else(|| {
727                 let path = path.iter().map(|&s| s.to_owned()).collect();
728                 EvalErrorKind::PathNotFound(path).into()
729             })
730     }
731
732     fn call_missing_fn(
733         &mut self,
734         instance: ty::Instance<'tcx>,
735         destination: Option<(Place, mir::BasicBlock)>,
736         args: &[ValTy<'tcx>],
737         sig: ty::FnSig<'tcx>,
738         path: String,
739     ) -> EvalResult<'tcx> {
740         // In some cases in non-MIR libstd-mode, not having a destination is legit.  Handle these early.
741         match &path[..] {
742             "std::panicking::rust_panic_with_hook" |
743             "core::panicking::panic_fmt::::panic_impl" |
744             "std::rt::begin_panic_fmt" => return err!(Panic),
745             _ => {}
746         }
747
748         let dest_ty = sig.output();
749         let (dest, dest_block) = destination.ok_or_else(
750             || EvalErrorKind::NoMirFor(path.clone()),
751         )?;
752
753         if self.tcx.is_foreign_item(instance.def_id()) {
754             // An external function
755             // TODO: That functions actually has a similar preamble to what follows here.  May make sense to
756             // unify these two mechanisms for "hooking into missing functions".
757             self.call_foreign_item(
758                 instance.def_id(),
759                 args,
760                 dest,
761                 dest_ty,
762                 dest_block,
763             )?;
764             return Ok(());
765         }
766
767         match &path[..] {
768             // A Rust function is missing, which means we are running with MIR missing for libstd (or other dependencies).
769             // Still, we can make many things mostly work by "emulating" or ignoring some functions.
770             "std::io::_print" |
771             "std::io::_eprint" => {
772                 warn!(
773                     "Ignoring output.  To run programs that print, make sure you have a libstd with full MIR."
774                 );
775             }
776             "std::thread::Builder::new" => {
777                 return err!(Unimplemented("miri does not support threading".to_owned()))
778             }
779             "std::env::args" => {
780                 return err!(Unimplemented(
781                     "miri does not support program arguments".to_owned(),
782                 ))
783             }
784             "std::panicking::panicking" |
785             "std::rt::panicking" => {
786                 // we abort on panic -> `std::rt::panicking` always returns false
787                 let bool = self.tcx.types.bool;
788                 self.write_scalar(dest, Scalar::from_bool(false), bool)?;
789             }
790
791             _ => return err!(NoMirFor(path)),
792         }
793
794         // Since we pushed no stack frame, the main loop will act
795         // as if the call just completed and it's returning to the
796         // current frame.
797         self.dump_local(dest);
798         self.goto_block(dest_block);
799         Ok(())
800     }
801
802     fn write_null(&mut self, dest: Place, dest_layout: TyLayout<'tcx>) -> EvalResult<'tcx> {
803         self.write_scalar(dest, Scalar::null(dest_layout.size), dest_layout.ty)
804     }
805 }