2 use rustc::ty::layout::{Size, Align};
3 use rustc::ty::{self, Ty};
4 use rustc_data_structures::indexed_vec::Idx;
5 use syntax::ast::Mutability;
7 use error::{EvalError, EvalResult};
8 use eval_context::EvalContext;
9 use memory::MemoryPointer;
10 use value::{PrimVal, Value, Pointer};
12 #[derive(Copy, Clone, Debug)]
13 pub enum Lvalue<'tcx> {
14 /// An lvalue referring to a value allocated in the `Memory` system.
16 /// An lvalue may have an invalid (integral or undef) pointer,
17 /// since it might be turned back into a reference
18 /// before ever being dereferenced.
21 /// Remember whether this lvalue is *supposed* to be aligned.
25 /// An lvalue referring to a value on the stack. Represented by a stack frame index paired with
26 /// a Mir local index.
32 /// An lvalue referring to a global
33 Global(GlobalId<'tcx>),
36 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
37 pub enum LvalueExtra {
40 Vtable(MemoryPointer),
41 DowncastVariant(usize),
44 /// Uniquely identifies a specific constant or static.
45 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
46 pub struct GlobalId<'tcx> {
47 /// For a constant or static, the `Instance` of the item itself.
48 /// For a promoted global, the `Instance` of the function they belong to.
49 pub(super) instance: ty::Instance<'tcx>,
51 /// The index for promoted globals within their function's `Mir`.
52 pub(super) promoted: Option<mir::Promoted>,
55 #[derive(Clone, Debug)]
56 pub struct Global<'tcx> {
57 pub(super) value: Value,
58 /// Only used in `force_allocation` to ensure we don't mark the memory
59 /// before the static is initialized. It is possible to convert a
60 /// global which initially is `Value::ByVal(PrimVal::Undef)` and gets
61 /// lifted to an allocation before the static is fully initialized
62 pub(super) initialized: bool,
63 pub(super) mutable: Mutability,
64 pub(super) ty: Ty<'tcx>,
67 impl<'tcx> Lvalue<'tcx> {
68 /// Produces an Lvalue that will error if attempted to be read from
69 pub fn undef() -> Self {
70 Self::from_primval_ptr(PrimVal::Undef.into())
73 pub(crate) fn from_primval_ptr(ptr: Pointer) -> Self {
74 Lvalue::Ptr { ptr, extra: LvalueExtra::None, aligned: true }
77 pub(crate) fn from_ptr(ptr: MemoryPointer) -> Self {
78 Self::from_primval_ptr(ptr.into())
81 pub(super) fn to_ptr_extra_aligned(self) -> (Pointer, LvalueExtra, bool) {
83 Lvalue::Ptr { ptr, extra, aligned } => (ptr, extra, aligned),
84 _ => bug!("to_ptr_and_extra: expected Lvalue::Ptr, got {:?}", self),
89 pub(super) fn to_ptr(self) -> EvalResult<'tcx, MemoryPointer> {
90 let (ptr, extra, _aligned) = self.to_ptr_extra_aligned();
91 // At this point, we forget about the alignment information -- the lvalue has been turned into a reference,
92 // and no matter where it came from, it now must be aligned.
93 assert_eq!(extra, LvalueExtra::None);
97 pub(super) fn elem_ty_and_len(self, ty: Ty<'tcx>) -> (Ty<'tcx>, u64) {
99 ty::TyArray(elem, n) => (elem, n as u64),
101 ty::TySlice(elem) => {
103 Lvalue::Ptr { extra: LvalueExtra::Length(len), .. } => (elem, len),
104 _ => bug!("elem_ty_and_len of a TySlice given non-slice lvalue: {:?}", self),
108 _ => bug!("elem_ty_and_len expected array or slice, got {:?}", ty),
113 impl<'tcx> Global<'tcx> {
114 pub(super) fn uninitialized(ty: Ty<'tcx>) -> Self {
116 value: Value::ByVal(PrimVal::Undef),
117 mutable: Mutability::Mutable,
123 pub(super) fn initialized(ty: Ty<'tcx>, value: Value, mutable: Mutability) -> Self {
133 impl<'a, 'tcx> EvalContext<'a, 'tcx> {
134 /// Reads a value from the lvalue without going through the intermediate step of obtaining
136 pub fn try_read_lvalue(&mut self, lvalue: &mir::Lvalue<'tcx>) -> EvalResult<'tcx, Option<Value>> {
137 use rustc::mir::Lvalue::*;
139 // Might allow this in the future, right now there's no way to do this from Rust code anyway
140 Local(mir::RETURN_POINTER) => Err(EvalError::ReadFromReturnPointer),
141 // Directly reading a local will always succeed
142 Local(local) => self.frame().get_local(local).map(Some),
143 // Directly reading a static will always succeed
144 Static(ref static_) => {
145 let instance = ty::Instance::mono(self.tcx, static_.def_id);
146 let cid = GlobalId { instance, promoted: None };
147 Ok(Some(self.globals.get(&cid).expect("global not cached").value))
149 Projection(ref proj) => self.try_read_lvalue_projection(proj),
153 fn try_read_lvalue_projection(&mut self, proj: &mir::LvalueProjection<'tcx>) -> EvalResult<'tcx, Option<Value>> {
154 use rustc::mir::ProjectionElem::*;
155 let base = match self.try_read_lvalue(&proj.base)? {
157 None => return Ok(None),
159 let base_ty = self.lvalue_ty(&proj.base);
161 Field(field, _) => match (field.index(), base) {
162 // the only field of a struct
163 (0, Value::ByVal(val)) => Ok(Some(Value::ByVal(val))),
164 // split fat pointers, 2 element tuples, ...
165 (0...1, Value::ByValPair(a, b)) if self.get_field_count(base_ty)? == 2 => {
166 let val = [a, b][field.index()];
167 Ok(Some(Value::ByVal(val)))
169 // the only field of a struct is a fat pointer
170 (0, Value::ByValPair(..)) => Ok(Some(base)),
173 // The NullablePointer cases should work fine, need to take care for normal enums
176 // reading index 0 or index 1 from a ByVal or ByVal pair could be optimized
177 ConstantIndex { .. } | Index(_) |
178 // No way to optimize this projection any better than the normal lvalue path
183 /// Returns a value and (in case of a ByRef) if we are supposed to use aligned accesses.
184 pub(super) fn eval_and_read_lvalue(&mut self, lvalue: &mir::Lvalue<'tcx>) -> EvalResult<'tcx, Value> {
185 // Shortcut for things like accessing a fat pointer's field,
186 // which would otherwise (in the `eval_lvalue` path) require moving a `ByValPair` to memory
187 // and returning an `Lvalue::Ptr` to it
188 if let Some(val) = self.try_read_lvalue(lvalue)? {
191 let lvalue = self.eval_lvalue(lvalue)?;
192 self.read_lvalue(lvalue)
195 pub fn read_lvalue(&self, lvalue: Lvalue<'tcx>) -> EvalResult<'tcx, Value> {
197 Lvalue::Ptr { ptr, extra, aligned } => {
198 assert_eq!(extra, LvalueExtra::None);
199 Ok(Value::ByRef(ptr, aligned))
201 Lvalue::Local { frame, local } => {
202 self.stack[frame].get_local(local)
204 Lvalue::Global(cid) => {
205 Ok(self.globals.get(&cid).expect("global not cached").value)
210 pub(super) fn eval_lvalue(&mut self, mir_lvalue: &mir::Lvalue<'tcx>) -> EvalResult<'tcx, Lvalue<'tcx>> {
211 use rustc::mir::Lvalue::*;
212 let lvalue = match *mir_lvalue {
213 Local(mir::RETURN_POINTER) => self.frame().return_lvalue,
214 Local(local) => Lvalue::Local { frame: self.cur_frame(), local },
216 Static(ref static_) => {
217 let instance = ty::Instance::mono(self.tcx, static_.def_id);
218 Lvalue::Global(GlobalId { instance, promoted: None })
221 Projection(ref proj) => {
222 let ty = self.lvalue_ty(&proj.base);
223 let lvalue = self.eval_lvalue(&proj.base)?;
224 return self.eval_lvalue_projection(lvalue, ty, &proj.elem);
228 if log_enabled!(::log::LogLevel::Trace) {
229 self.dump_local(lvalue);
241 ) -> EvalResult<'tcx, Lvalue<'tcx>> {
242 let base_layout = self.type_layout(base_ty)?;
243 use rustc::ty::layout::Layout::*;
244 let (offset, packed) = match *base_layout {
245 Univariant { ref variant, .. } => {
246 (variant.offsets[field_index], variant.packed)
249 General { ref variants, .. } => {
250 let (_, base_extra, _) = base.to_ptr_extra_aligned();
251 if let LvalueExtra::DowncastVariant(variant_idx) = base_extra {
252 // +1 for the discriminant, which is field 0
253 (variants[variant_idx].offsets[field_index + 1], variants[variant_idx].packed)
255 bug!("field access on enum had no variant index");
259 RawNullablePointer { .. } => {
260 assert_eq!(field_index, 0);
264 StructWrappedNullablePointer { ref nonnull, .. } => {
265 (nonnull.offsets[field_index], nonnull.packed)
268 UntaggedUnion { .. } => return Ok(base),
270 Vector { element, count } => {
271 let field = field_index as u64;
272 assert!(field < count);
273 let elem_size = element.size(&self.tcx.data_layout).bytes();
274 (Size::from_bytes(field * elem_size), false)
277 // We treat arrays + fixed sized indexing like field accesses
279 let field = field_index as u64;
280 let elem_size = match base_ty.sty {
281 ty::TyArray(elem_ty, n) => {
282 assert!(field < n as u64);
283 self.type_size(elem_ty)?.expect("array elements are sized") as u64
285 _ => bug!("lvalue_field: got Array layout but non-array type {:?}", base_ty),
287 (Size::from_bytes(field * elem_size), false)
290 FatPointer { .. } => {
291 let bytes = field_index as u64 * self.memory.pointer_size();
292 let offset = Size::from_bytes(bytes);
296 _ => bug!("field access on non-product type: {:?}", base_layout),
299 // Do not allocate in trivial cases
300 let (base_ptr, base_extra, aligned) = match base {
301 Lvalue::Ptr { ptr, extra, aligned } => (ptr, extra, aligned),
302 Lvalue::Local { frame, local } => match self.stack[frame].get_local(local)? {
303 // in case the type has a single field, just return the value
304 Value::ByVal(_) if self.get_field_count(base_ty).map(|c| c == 1).unwrap_or(false) => {
305 assert_eq!(offset.bytes(), 0, "ByVal can only have 1 non zst field with offset 0");
309 Value::ByValPair(..) |
310 Value::ByVal(_) => self.force_allocation(base)?.to_ptr_extra_aligned(),
312 Lvalue::Global(cid) => match self.globals.get(&cid).expect("uncached global").value {
313 // in case the type has a single field, just return the value
314 Value::ByVal(_) if self.get_field_count(base_ty).map(|c| c == 1).unwrap_or(false) => {
315 assert_eq!(offset.bytes(), 0, "ByVal can only have 1 non zst field with offset 0");
319 Value::ByValPair(..) |
320 Value::ByVal(_) => self.force_allocation(base)?.to_ptr_extra_aligned(),
324 let offset = match base_extra {
325 LvalueExtra::Vtable(tab) => {
326 let (_, align) = self.size_and_align_of_dst(base_ty, base_ptr.to_value_with_vtable(tab))?;
327 offset.abi_align(Align::from_bytes(align, align).unwrap()).bytes()
332 let ptr = base_ptr.offset(offset, &self)?;
334 let field_ty = self.monomorphize(field_ty, self.substs());
336 let extra = if self.type_is_sized(field_ty) {
340 LvalueExtra::None => bug!("expected fat pointer"),
341 LvalueExtra::DowncastVariant(..) =>
342 bug!("Rust doesn't support unsized fields in enum variants"),
343 LvalueExtra::Vtable(_) |
344 LvalueExtra::Length(_) => {},
349 Ok(Lvalue::Ptr { ptr, extra, aligned: aligned && !packed })
352 fn eval_lvalue_projection(
356 proj_elem: &mir::ProjectionElem<'tcx, mir::Operand<'tcx>>,
357 ) -> EvalResult<'tcx, Lvalue<'tcx>> {
358 use rustc::mir::ProjectionElem::*;
359 let (ptr, extra, aligned) = match *proj_elem {
360 Field(field, field_ty) => {
361 return self.lvalue_field(base, field.index(), base_ty, field_ty);
364 Downcast(_, variant) => {
365 let base_layout = self.type_layout(base_ty)?;
367 let base = self.force_allocation(base)?;
368 let (base_ptr, base_extra, aligned) = base.to_ptr_extra_aligned();
370 use rustc::ty::layout::Layout::*;
371 let extra = match *base_layout {
372 General { .. } => LvalueExtra::DowncastVariant(variant),
373 RawNullablePointer { .. } | StructWrappedNullablePointer { .. } => base_extra,
374 _ => bug!("variant downcast on non-aggregate: {:?}", base_layout),
376 (base_ptr, extra, aligned)
380 let val = self.read_lvalue(base)?;
382 let pointee_type = match base_ty.sty {
383 ty::TyRawPtr(ref tam) |
384 ty::TyRef(_, ref tam) => tam.ty,
385 ty::TyAdt(def, _) if def.is_box() => base_ty.boxed_ty(),
386 _ => bug!("can only deref pointer types"),
389 trace!("deref to {} on {:?}", pointee_type, val);
391 match self.tcx.struct_tail(pointee_type).sty {
392 ty::TyDynamic(..) => {
393 let (ptr, vtable) = val.into_ptr_vtable_pair(&mut self.memory)?;
394 (ptr, LvalueExtra::Vtable(vtable), true)
396 ty::TyStr | ty::TySlice(_) => {
397 let (ptr, len) = val.into_slice(&mut self.memory)?;
398 (ptr, LvalueExtra::Length(len), true)
400 _ => (val.into_ptr(&mut self.memory)?, LvalueExtra::None, true),
404 Index(ref operand) => {
406 let base = self.force_allocation(base)?;
407 let (base_ptr, _, aligned) = base.to_ptr_extra_aligned();
409 let (elem_ty, len) = base.elem_ty_and_len(base_ty);
410 let elem_size = self.type_size(elem_ty)?.expect("slice element must be sized");
411 let n_ptr = self.eval_operand(operand)?;
412 let usize = self.tcx.types.usize;
413 let n = self.value_to_primval(n_ptr, usize)?.to_u64()?;
414 assert!(n < len, "Tried to access element {} of array/slice with length {}", n, len);
415 let ptr = base_ptr.offset(n * elem_size, &self)?;
416 (ptr, LvalueExtra::None, aligned)
419 ConstantIndex { offset, min_length, from_end } => {
421 let base = self.force_allocation(base)?;
422 let (base_ptr, _, aligned) = base.to_ptr_extra_aligned();
424 let (elem_ty, n) = base.elem_ty_and_len(base_ty);
425 let elem_size = self.type_size(elem_ty)?.expect("sequence element must be sized");
426 assert!(n >= min_length as u64);
428 let index = if from_end {
429 n - u64::from(offset)
434 let ptr = base_ptr.offset(index * elem_size, &self)?;
435 (ptr, LvalueExtra::None, aligned)
438 Subslice { from, to } => {
440 let base = self.force_allocation(base)?;
441 let (base_ptr, _, aligned) = base.to_ptr_extra_aligned();
443 let (elem_ty, n) = base.elem_ty_and_len(base_ty);
444 let elem_size = self.type_size(elem_ty)?.expect("slice element must be sized");
445 assert!(u64::from(from) <= n - u64::from(to));
446 let ptr = base_ptr.offset(u64::from(from) * elem_size, &self)?;
447 let extra = LvalueExtra::Length(n - u64::from(to) - u64::from(from));
448 (ptr, extra, aligned)
452 Ok(Lvalue::Ptr { ptr, extra, aligned })
455 pub(super) fn lvalue_ty(&self, lvalue: &mir::Lvalue<'tcx>) -> Ty<'tcx> {
456 self.monomorphize(lvalue.ty(self.mir(), self.tcx).to_ty(self.tcx), self.substs())