1 use byteorder::{ByteOrder, NativeEndian, ReadBytesExt, WriteBytesExt};
2 use std::collections::Bound::{Included, Excluded};
3 use std::collections::{btree_map, BTreeMap, HashMap, HashSet, VecDeque};
4 use std::{iter, mem, ptr};
6 use error::{EvalError, EvalResult};
9 ////////////////////////////////////////////////////////////////////////////////
10 // Value representations
11 ////////////////////////////////////////////////////////////////////////////////
13 #[derive(Clone, Debug, Eq, PartialEq)]
15 /// Representation for a non-aggregate type such as a boolean, integer, character or pointer.
20 /// The representation for aggregate types including structs, enums, and tuples.
22 /// The size of the discriminant (an integer). Should be between 0 and 8. Always 0 for
23 /// structs and tuples.
26 /// The size of the entire aggregate, including the discriminant.
29 /// The representations of the contents of each variant.
30 variants: Vec<Vec<FieldRepr>>,
36 /// Number of elements.
41 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
42 pub struct FieldRepr {
48 pub fn size(&self) -> usize {
50 Repr::Primitive { size } => size,
51 Repr::Aggregate { size, .. } => size,
52 Repr::Array { elem_size, length } => elem_size * length,
57 ////////////////////////////////////////////////////////////////////////////////
58 // Allocations and pointers
59 ////////////////////////////////////////////////////////////////////////////////
61 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
62 pub struct AllocId(u64);
65 pub struct Allocation {
67 pub relocations: BTreeMap<usize, AllocId>,
68 pub undef_mask: UndefMask,
71 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
73 pub alloc_id: AllocId,
78 pub fn offset(self, i: isize) -> Self {
79 Pointer { offset: (self.offset as isize + i) as usize, ..self }
83 ////////////////////////////////////////////////////////////////////////////////
84 // Top-level interpreter memory
85 ////////////////////////////////////////////////////////////////////////////////
88 alloc_map: HashMap<u64, Allocation>,
90 pub pointer_size: usize,
94 pub fn new() -> Self {
96 alloc_map: HashMap::new(),
99 // TODO(tsion): Should this be host's or target's usize?
100 pointer_size: mem::size_of::<usize>(),
104 pub fn allocate(&mut self, size: usize) -> Pointer {
105 let id = AllocId(self.next_id);
106 let alloc = Allocation {
107 bytes: vec![0; size].into_boxed_slice(),
108 relocations: BTreeMap::new(),
109 undef_mask: UndefMask::new(size),
111 self.alloc_map.insert(self.next_id, alloc);
119 ////////////////////////////////////////////////////////////////////////////////
120 // Allocation accessors
121 ////////////////////////////////////////////////////////////////////////////////
123 pub fn get(&self, id: AllocId) -> EvalResult<&Allocation> {
124 self.alloc_map.get(&id.0).ok_or(EvalError::DanglingPointerDeref)
127 pub fn get_mut(&mut self, id: AllocId) -> EvalResult<&mut Allocation> {
128 self.alloc_map.get_mut(&id.0).ok_or(EvalError::DanglingPointerDeref)
131 /// Print an allocation and all allocations it points to, recursively.
132 pub fn dump(&self, id: AllocId) {
133 let mut allocs_seen = HashSet::new();
134 let mut allocs_to_print = VecDeque::new();
135 allocs_to_print.push_back(id);
137 while let Some(id) = allocs_to_print.pop_front() {
138 allocs_seen.insert(id.0);
139 let alloc = self.get(id).unwrap();
140 let prefix = format!("Alloc {:<5} ", format!("{}:", id.0));
141 print!("{}", prefix);
142 let mut relocations = vec![];
144 for i in 0..alloc.bytes.len() {
145 if let Some(&target_id) = alloc.relocations.get(&i) {
146 if !allocs_seen.contains(&target_id.0) {
147 allocs_to_print.push_back(target_id);
149 relocations.push((i, target_id.0));
151 if alloc.undef_mask.is_range_defined(i, i+1) {
152 print!("{:02x} ", alloc.bytes[i]);
159 if !relocations.is_empty() {
160 print!("{:1$}", "", prefix.len()); // Print spaces.
162 let relocation_width = (self.pointer_size - 1) * 3;
163 for (i, target_id) in relocations {
164 print!("{:1$}", "", (i - pos) * 3);
165 print!("└{0:─^1$}┘ ", format!("({})", target_id), relocation_width);
166 pos = i + self.pointer_size;
173 ////////////////////////////////////////////////////////////////////////////////
175 ////////////////////////////////////////////////////////////////////////////////
177 fn get_bytes_unchecked(&self, ptr: Pointer, size: usize) -> EvalResult<&[u8]> {
178 let alloc = try!(self.get(ptr.alloc_id));
179 if ptr.offset + size > alloc.bytes.len() {
180 return Err(EvalError::PointerOutOfBounds);
182 Ok(&alloc.bytes[ptr.offset..ptr.offset + size])
185 fn get_bytes_unchecked_mut(&mut self, ptr: Pointer, size: usize) -> EvalResult<&mut [u8]> {
186 let alloc = try!(self.get_mut(ptr.alloc_id));
187 if ptr.offset + size > alloc.bytes.len() {
188 return Err(EvalError::PointerOutOfBounds);
190 Ok(&mut alloc.bytes[ptr.offset..ptr.offset + size])
193 fn get_bytes(&self, ptr: Pointer, size: usize) -> EvalResult<&[u8]> {
194 if try!(self.relocations(ptr, size)).count() != 0 {
195 return Err(EvalError::ReadPointerAsBytes);
197 try!(self.check_defined(ptr, size));
198 self.get_bytes_unchecked(ptr, size)
201 fn get_bytes_mut(&mut self, ptr: Pointer, size: usize) -> EvalResult<&mut [u8]> {
202 try!(self.clear_relocations(ptr, size));
203 try!(self.mark_definedness(ptr, size, true));
204 self.get_bytes_unchecked_mut(ptr, size)
207 ////////////////////////////////////////////////////////////////////////////////
208 // Reading and writing
209 ////////////////////////////////////////////////////////////////////////////////
211 pub fn copy(&mut self, src: Pointer, dest: Pointer, size: usize) -> EvalResult<()> {
212 try!(self.check_relocation_edges(src, size));
214 let src_bytes = try!(self.get_bytes_unchecked_mut(src, size)).as_mut_ptr();
215 let dest_bytes = try!(self.get_bytes_mut(dest, size)).as_mut_ptr();
217 // SAFE: The above indexing would have panicked if there weren't at least `size` bytes
218 // behind `src` and `dest`. Also, we use the overlapping-safe `ptr::copy` if `src` and
219 // `dest` could possibly overlap.
221 if src.alloc_id == dest.alloc_id {
222 ptr::copy(src_bytes, dest_bytes, size);
224 ptr::copy_nonoverlapping(src_bytes, dest_bytes, size);
228 try!(self.copy_undef_mask(src, dest, size));
229 try!(self.copy_relocations(src, dest, size));
234 pub fn write_bytes(&mut self, ptr: Pointer, src: &[u8]) -> EvalResult<()> {
235 self.get_bytes_mut(ptr, src.len()).map(|dest| dest.clone_from_slice(src))
238 pub fn read_ptr(&self, ptr: Pointer) -> EvalResult<Pointer> {
239 let size = self.pointer_size;
240 try!(self.check_defined(ptr, size));
241 let offset = try!(self.get_bytes_unchecked(ptr, size))
242 .read_uint::<NativeEndian>(size).unwrap() as usize;
243 let alloc = try!(self.get(ptr.alloc_id));
244 match alloc.relocations.get(&ptr.offset) {
245 Some(&alloc_id) => Ok(Pointer { alloc_id: alloc_id, offset: offset }),
246 None => Err(EvalError::ReadBytesAsPointer),
250 pub fn write_ptr(&mut self, dest: Pointer, ptr: Pointer) -> EvalResult<()> {
252 let size = self.pointer_size;
253 let mut bytes = try!(self.get_bytes_mut(dest, size));
254 bytes.write_uint::<NativeEndian>(ptr.offset as u64, size).unwrap();
256 try!(self.get_mut(dest.alloc_id)).relocations.insert(dest.offset, ptr.alloc_id);
260 pub fn write_primval(&mut self, ptr: Pointer, val: PrimVal) -> EvalResult<()> {
261 let pointer_size = self.pointer_size;
263 PrimVal::Bool(b) => self.write_bool(ptr, b),
264 PrimVal::I8(n) => self.write_int(ptr, n as i64, 1),
265 PrimVal::I16(n) => self.write_int(ptr, n as i64, 2),
266 PrimVal::I32(n) => self.write_int(ptr, n as i64, 4),
267 PrimVal::I64(n) => self.write_int(ptr, n as i64, 8),
268 PrimVal::U8(n) => self.write_uint(ptr, n as u64, 1),
269 PrimVal::U16(n) => self.write_uint(ptr, n as u64, 2),
270 PrimVal::U32(n) => self.write_uint(ptr, n as u64, 4),
271 PrimVal::U64(n) => self.write_uint(ptr, n as u64, 8),
272 PrimVal::IntegerPtr(n) => self.write_uint(ptr, n as u64, pointer_size),
273 PrimVal::AbstractPtr(_p) => unimplemented!(),
277 pub fn read_bool(&self, ptr: Pointer) -> EvalResult<bool> {
278 let bytes = try!(self.get_bytes(ptr, 1));
282 _ => Err(EvalError::InvalidBool),
286 pub fn write_bool(&mut self, ptr: Pointer, b: bool) -> EvalResult<()> {
287 self.get_bytes_mut(ptr, 1).map(|bytes| bytes[0] = b as u8)
290 pub fn read_int(&self, ptr: Pointer, size: usize) -> EvalResult<i64> {
291 self.get_bytes(ptr, size).map(|mut b| b.read_int::<NativeEndian>(size).unwrap())
294 pub fn write_int(&mut self, ptr: Pointer, n: i64, size: usize) -> EvalResult<()> {
295 self.get_bytes_mut(ptr, size).map(|mut b| b.write_int::<NativeEndian>(n, size).unwrap())
298 pub fn read_uint(&self, ptr: Pointer, size: usize) -> EvalResult<u64> {
299 self.get_bytes(ptr, size).map(|mut b| b.read_uint::<NativeEndian>(size).unwrap())
302 pub fn write_uint(&mut self, ptr: Pointer, n: u64, size: usize) -> EvalResult<()> {
303 self.get_bytes_mut(ptr, size).map(|mut b| b.write_uint::<NativeEndian>(n, size).unwrap())
306 pub fn read_isize(&self, ptr: Pointer) -> EvalResult<i64> {
307 self.read_int(ptr, self.pointer_size)
310 pub fn write_isize(&mut self, ptr: Pointer, n: i64) -> EvalResult<()> {
311 let size = self.pointer_size;
312 self.write_int(ptr, n, size)
315 pub fn read_usize(&self, ptr: Pointer) -> EvalResult<u64> {
316 self.read_uint(ptr, self.pointer_size)
319 pub fn write_usize(&mut self, ptr: Pointer, n: u64) -> EvalResult<()> {
320 let size = self.pointer_size;
321 self.write_uint(ptr, n, size)
324 ////////////////////////////////////////////////////////////////////////////////
326 ////////////////////////////////////////////////////////////////////////////////
328 fn relocations(&self, ptr: Pointer, size: usize)
329 -> EvalResult<btree_map::Range<usize, AllocId>>
331 let start = ptr.offset.saturating_sub(self.pointer_size - 1);
332 let end = start + size;
333 Ok(try!(self.get(ptr.alloc_id)).relocations.range(Included(&start), Excluded(&end)))
336 fn clear_relocations(&mut self, ptr: Pointer, size: usize) -> EvalResult<()> {
337 // Find all relocations overlapping the given range.
338 let keys: Vec<_> = try!(self.relocations(ptr, size)).map(|(&k, _)| k).collect();
339 if keys.len() == 0 { return Ok(()); }
341 // Find the start and end of the given range and its outermost relocations.
342 let start = ptr.offset;
343 let end = start + size;
344 let first = *keys.first().unwrap();
345 let last = *keys.last().unwrap() + self.pointer_size;
347 let alloc = try!(self.get_mut(ptr.alloc_id));
349 // Mark parts of the outermost relocations as undefined if they partially fall outside the
351 if first < start { alloc.undef_mask.set_range(first, start, false); }
352 if last > end { alloc.undef_mask.set_range(end, last, false); }
354 // Forget all the relocations.
355 for k in keys { alloc.relocations.remove(&k); }
360 fn check_relocation_edges(&self, ptr: Pointer, size: usize) -> EvalResult<()> {
361 let overlapping_start = try!(self.relocations(ptr, 0)).count();
362 let overlapping_end = try!(self.relocations(ptr.offset(size as isize), 0)).count();
363 if overlapping_start + overlapping_end != 0 {
364 return Err(EvalError::ReadPointerAsBytes);
369 fn copy_relocations(&mut self, src: Pointer, dest: Pointer, size: usize) -> EvalResult<()> {
370 let relocations: Vec<_> = try!(self.relocations(src, size))
371 .map(|(&offset, &alloc_id)| {
372 // Update relocation offsets for the new positions in the destination allocation.
373 (offset + dest.offset - src.offset, alloc_id)
376 try!(self.get_mut(dest.alloc_id)).relocations.extend(relocations);
380 ////////////////////////////////////////////////////////////////////////////////
382 ////////////////////////////////////////////////////////////////////////////////
384 // FIXME(tsino): This is a very naive, slow version.
385 fn copy_undef_mask(&mut self, src: Pointer, dest: Pointer, size: usize) -> EvalResult<()> {
386 // The bits have to be saved locally before writing to dest in case src and dest overlap.
387 let mut v = Vec::with_capacity(size);
389 let defined = try!(self.get(src.alloc_id)).undef_mask.get(src.offset + i);
392 for (i, defined) in v.into_iter().enumerate() {
393 try!(self.get_mut(dest.alloc_id)).undef_mask.set(dest.offset + i, defined);
398 fn check_defined(&self, ptr: Pointer, size: usize) -> EvalResult<()> {
399 let alloc = try!(self.get(ptr.alloc_id));
400 if !alloc.undef_mask.is_range_defined(ptr.offset, ptr.offset + size) {
401 return Err(EvalError::ReadUndefBytes);
406 pub fn mark_definedness(&mut self, ptr: Pointer, size: usize, new_state: bool)
409 let mut alloc = try!(self.get_mut(ptr.alloc_id));
410 alloc.undef_mask.set_range(ptr.offset, ptr.offset + size, new_state);
415 ////////////////////////////////////////////////////////////////////////////////
416 // Undefined byte tracking
417 ////////////////////////////////////////////////////////////////////////////////
420 const BLOCK_SIZE: usize = 64;
422 #[derive(Clone, Debug)]
423 pub struct UndefMask {
429 fn new(size: usize) -> Self {
430 let mut m = UndefMask {
438 /// Check whether the range `start..end` (end-exclusive) is entirely defined.
439 fn is_range_defined(&self, start: usize, end: usize) -> bool {
440 if end > self.len { return false; }
441 for i in start..end {
442 if !self.get(i) { return false; }
447 fn set_range(&mut self, start: usize, end: usize, new_state: bool) {
449 if end > len { self.grow(end - len, new_state); }
450 self.set_range_inbounds(start, end, new_state);
453 fn set_range_inbounds(&mut self, start: usize, end: usize, new_state: bool) {
454 for i in start..end { self.set(i, new_state); }
457 fn get(&self, i: usize) -> bool {
458 let (block, bit) = bit_index(i);
459 (self.blocks[block] & 1 << bit) != 0
462 fn set(&mut self, i: usize, new_state: bool) {
463 let (block, bit) = bit_index(i);
465 self.blocks[block] |= 1 << bit;
467 self.blocks[block] &= !(1 << bit);
471 fn grow(&mut self, amount: usize, new_state: bool) {
472 let unused_trailing_bits = self.blocks.len() * BLOCK_SIZE - self.len;
473 if amount > unused_trailing_bits {
474 let additional_blocks = amount / BLOCK_SIZE + 1;
475 self.blocks.extend(iter::repeat(0).take(additional_blocks));
477 let start = self.len;
479 self.set_range_inbounds(start, start + amount, new_state);
483 // fn uniform_block(state: bool) -> Block {
484 // if state { !0 } else { 0 }
487 fn bit_index(bits: usize) -> (usize, usize) {
488 (bits / BLOCK_SIZE, bits % BLOCK_SIZE)