1 use std::convert::TryFrom;
5 ieee::{Double, Single},
8 use rustc_macros::HashStable;
9 use rustc_target::abi::{HasDataLayout, Size, TargetDataLayout};
11 use crate::ty::{ParamEnv, Ty, TyCtxt};
13 use super::{sign_extend, truncate, AllocId, Allocation, InterpResult, Pointer, PointerArithmetic};
15 /// Represents the result of a raw const operation, pre-validation.
16 #[derive(Clone, HashStable)]
17 pub struct RawConst<'tcx> {
18 // the value lives here, at offset 0, and that allocation definitely is a `AllocKind::Memory`
19 // (so you can use `AllocMap::unwrap_memory`).
20 pub alloc_id: AllocId,
24 /// Represents a constant value in Rust. `Scalar` and `Slice` are optimizations for
25 /// array length computations, enum discriminants and the pattern matching logic.
26 #[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
28 pub enum ConstValue<'tcx> {
29 /// Used only for types with `layout::abi::Scalar` ABI and ZSTs.
31 /// Not using the enum `Value` to encode that this must not be `Undef`.
34 /// Used only for `&[u8]` and `&str`
35 Slice { data: &'tcx Allocation, start: usize, end: usize },
37 /// A value not represented/representable by `Scalar` or `Slice`
39 /// The backing memory of the value, may contain more memory than needed for just the value
40 /// in order to share `Allocation`s between values
41 alloc: &'tcx Allocation,
42 /// Offset into `alloc`
47 #[cfg(target_arch = "x86_64")]
48 static_assert_size!(ConstValue<'_>, 32);
50 impl<'tcx> ConstValue<'tcx> {
52 pub fn try_to_scalar(&self) -> Option<Scalar> {
54 ConstValue::ByRef { .. } | ConstValue::Slice { .. } => None,
55 ConstValue::Scalar(val) => Some(val),
59 pub fn try_to_bits(&self, size: Size) -> Option<u128> {
60 self.try_to_scalar()?.to_bits(size).ok()
63 pub fn try_to_bits_for_ty(
66 param_env: ParamEnv<'tcx>,
69 let size = tcx.layout_of(param_env.with_reveal_all().and(ty)).ok()?.size;
70 self.try_to_bits(size)
73 pub fn from_bool(b: bool) -> Self {
74 ConstValue::Scalar(Scalar::from_bool(b))
77 pub fn from_u64(i: u64) -> Self {
78 ConstValue::Scalar(Scalar::from_u64(i))
81 pub fn from_machine_usize(i: u64, cx: &impl HasDataLayout) -> Self {
82 ConstValue::Scalar(Scalar::from_machine_usize(i, cx))
86 /// A `Scalar` represents an immediate, primitive value existing outside of a
87 /// `memory::Allocation`. It is in many ways like a small chunk of a `Allocation`, up to 8 bytes in
88 /// size. Like a range of bytes in an `Allocation`, a `Scalar` can either represent the raw bytes
89 /// of a simple value or a pointer into another `Allocation`
90 #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
92 pub enum Scalar<Tag = (), Id = AllocId> {
93 /// The raw bytes of a simple value.
95 /// The first `size` bytes of `data` are the value.
96 /// Do not try to read less or more bytes than that. The remaining bytes must be 0.
101 /// A pointer into an `Allocation`. An `Allocation` in the `memory` module has a list of
102 /// relocations, but a `Scalar` is only large enough to contain one, so we just represent the
103 /// relocation and its associated offset together as a `Pointer` here.
104 Ptr(Pointer<Tag, Id>),
107 #[cfg(target_arch = "x86_64")]
108 static_assert_size!(Scalar, 24);
110 impl<Tag: fmt::Debug, Id: fmt::Debug> fmt::Debug for Scalar<Tag, Id> {
111 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
113 Scalar::Ptr(ptr) => write!(f, "{:?}", ptr),
114 &Scalar::Raw { data, size } => {
115 Scalar::check_data(data, size);
119 // Format as hex number wide enough to fit any value of the given `size`.
120 // So data=20, size=1 will be "0x14", but with size=4 it'll be "0x00000014".
121 write!(f, "0x{:>0width$x}", data, width = (size * 2) as usize)
128 impl<Tag> fmt::Display for Scalar<Tag> {
129 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
131 Scalar::Ptr(_) => write!(f, "a pointer"),
132 Scalar::Raw { data, .. } => write!(f, "{}", data),
137 impl<Tag> From<Single> for Scalar<Tag> {
139 fn from(f: Single) -> Self {
144 impl<Tag> From<Double> for Scalar<Tag> {
146 fn from(f: Double) -> Self {
152 /// Make sure the `data` fits in `size`.
153 /// This is guaranteed by all constructors here, but since the enum variants are public,
154 /// it could still be violated (even though no code outside this file should
155 /// construct `Scalar`s).
157 fn check_data(data: u128, size: u8) {
159 truncate(data, Size::from_bytes(u64::from(size))),
161 "Scalar value {:#x} exceeds size of {} bytes",
167 /// Tag this scalar with `new_tag` if it is a pointer, leave it unchanged otherwise.
169 /// Used by `MemPlace::replace_tag`.
171 pub fn with_tag<Tag>(self, new_tag: Tag) -> Scalar<Tag> {
173 Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_tag(new_tag)),
174 Scalar::Raw { data, size } => Scalar::Raw { data, size },
179 impl<'tcx, Tag> Scalar<Tag> {
180 /// Erase the tag from the scalar, if any.
182 /// Used by error reporting code to avoid having the error type depend on `Tag`.
184 pub fn erase_tag(self) -> Scalar {
186 Scalar::Ptr(ptr) => Scalar::Ptr(ptr.erase_tag()),
187 Scalar::Raw { data, size } => Scalar::Raw { data, size },
192 pub fn null_ptr(cx: &impl HasDataLayout) -> Self {
193 Scalar::Raw { data: 0, size: cx.data_layout().pointer_size.bytes() as u8 }
197 pub fn zst() -> Self {
198 Scalar::Raw { data: 0, size: 0 }
204 dl: &TargetDataLayout,
205 f_int: impl FnOnce(u64) -> InterpResult<'tcx, u64>,
206 f_ptr: impl FnOnce(Pointer<Tag>) -> InterpResult<'tcx, Pointer<Tag>>,
207 ) -> InterpResult<'tcx, Self> {
209 Scalar::Raw { data, size } => {
210 assert_eq!(u64::from(size), dl.pointer_size.bytes());
211 Ok(Scalar::Raw { data: u128::from(f_int(u64::try_from(data).unwrap())?), size })
213 Scalar::Ptr(ptr) => Ok(Scalar::Ptr(f_ptr(ptr)?)),
218 pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
219 let dl = cx.data_layout();
220 self.ptr_op(dl, |int| dl.offset(int, i.bytes()), |ptr| ptr.offset(i, dl))
224 pub fn ptr_wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
225 let dl = cx.data_layout();
228 |int| Ok(dl.overflowing_offset(int, i.bytes()).0),
229 |ptr| Ok(ptr.wrapping_offset(i, dl)),
235 pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> InterpResult<'tcx, Self> {
236 let dl = cx.data_layout();
237 self.ptr_op(dl, |int| dl.signed_offset(int, i), |ptr| ptr.signed_offset(i, dl))
241 pub fn ptr_wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
242 let dl = cx.data_layout();
245 |int| Ok(dl.overflowing_signed_offset(int, i).0),
246 |ptr| Ok(ptr.wrapping_signed_offset(i, dl)),
252 pub fn from_bool(b: bool) -> Self {
253 // Guaranteed to be truncated and does not need sign extension.
254 Scalar::Raw { data: b as u128, size: 1 }
258 pub fn from_char(c: char) -> Self {
259 // Guaranteed to be truncated and does not need sign extension.
260 Scalar::Raw { data: c as u128, size: 4 }
264 pub fn try_from_uint(i: impl Into<u128>, size: Size) -> Option<Self> {
266 if truncate(i, size) == i {
267 Some(Scalar::Raw { data: i, size: size.bytes() as u8 })
274 pub fn from_uint(i: impl Into<u128>, size: Size) -> Self {
276 Self::try_from_uint(i, size)
277 .unwrap_or_else(|| bug!("Unsigned value {:#x} does not fit in {} bits", i, size.bits()))
281 pub fn from_u8(i: u8) -> Self {
282 // Guaranteed to be truncated and does not need sign extension.
283 Scalar::Raw { data: i.into(), size: 1 }
287 pub fn from_u16(i: u16) -> Self {
288 // Guaranteed to be truncated and does not need sign extension.
289 Scalar::Raw { data: i.into(), size: 2 }
293 pub fn from_u32(i: u32) -> Self {
294 // Guaranteed to be truncated and does not need sign extension.
295 Scalar::Raw { data: i.into(), size: 4 }
299 pub fn from_u64(i: u64) -> Self {
300 // Guaranteed to be truncated and does not need sign extension.
301 Scalar::Raw { data: i.into(), size: 8 }
305 pub fn from_machine_usize(i: u64, cx: &impl HasDataLayout) -> Self {
306 Self::from_uint(i, cx.data_layout().pointer_size)
310 pub fn try_from_int(i: impl Into<i128>, size: Size) -> Option<Self> {
312 // `into` performed sign extension, we have to truncate
313 let truncated = truncate(i as u128, size);
314 if sign_extend(truncated, size) as i128 == i {
315 Some(Scalar::Raw { data: truncated, size: size.bytes() as u8 })
322 pub fn from_int(i: impl Into<i128>, size: Size) -> Self {
324 Self::try_from_int(i, size)
325 .unwrap_or_else(|| bug!("Signed value {:#x} does not fit in {} bits", i, size.bits()))
329 pub fn from_i8(i: i8) -> Self {
330 Self::from_int(i, Size::from_bits(8))
334 pub fn from_i16(i: i16) -> Self {
335 Self::from_int(i, Size::from_bits(16))
339 pub fn from_i32(i: i32) -> Self {
340 Self::from_int(i, Size::from_bits(32))
344 pub fn from_i64(i: i64) -> Self {
345 Self::from_int(i, Size::from_bits(64))
349 pub fn from_machine_isize(i: i64, cx: &impl HasDataLayout) -> Self {
350 Self::from_int(i, cx.data_layout().pointer_size)
354 pub fn from_f32(f: Single) -> Self {
355 // We trust apfloat to give us properly truncated data.
356 Scalar::Raw { data: f.to_bits(), size: 4 }
360 pub fn from_f64(f: Double) -> Self {
361 // We trust apfloat to give us properly truncated data.
362 Scalar::Raw { data: f.to_bits(), size: 8 }
365 /// This is very rarely the method you want! You should dispatch on the type
366 /// and use `force_bits`/`assert_bits`/`force_ptr`/`assert_ptr`.
367 /// This method only exists for the benefit of low-level memory operations
368 /// as well as the implementation of the `force_*` methods.
370 pub fn to_bits_or_ptr(
373 cx: &impl HasDataLayout,
374 ) -> Result<u128, Pointer<Tag>> {
375 assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
377 Scalar::Raw { data, size } => {
378 assert_eq!(target_size.bytes(), u64::from(size));
379 Scalar::check_data(data, size);
382 Scalar::Ptr(ptr) => {
383 assert_eq!(target_size, cx.data_layout().pointer_size);
389 /// This method is intentionally private!
390 /// It is just a helper for other methods in this file.
392 fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {
393 assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
395 Scalar::Raw { data, size } => {
396 if target_size.bytes() != u64::from(size) {
397 throw_ub!(ArgumentSizeMismatch {
398 target_size: target_size.bytes(),
399 data_size: u64::from(size)
402 Scalar::check_data(data, size);
405 Scalar::Ptr(_) => throw_unsup!(ReadPointerAsBytes),
410 pub fn assert_bits(self, target_size: Size) -> u128 {
411 self.to_bits(target_size).expect("expected Raw bits but got a Pointer")
415 pub fn assert_ptr(self) -> Pointer<Tag> {
418 Scalar::Raw { .. } => bug!("expected a Pointer but got Raw bits"),
422 /// Do not call this method! Dispatch based on the type instead.
424 pub fn is_bits(self) -> bool {
426 Scalar::Raw { .. } => true,
431 /// Do not call this method! Dispatch based on the type instead.
433 pub fn is_ptr(self) -> bool {
435 Scalar::Ptr(_) => true,
440 pub fn to_bool(self) -> InterpResult<'tcx, bool> {
441 let val = self.to_u8()?;
445 _ => throw_ub!(InvalidBool(val)),
449 pub fn to_char(self) -> InterpResult<'tcx, char> {
450 let val = self.to_u32()?;
451 match ::std::char::from_u32(val) {
453 None => throw_ub!(InvalidChar(val)),
458 fn to_unsigned_with_bit_width(self, bits: u64) -> InterpResult<'static, u128> {
459 let sz = Size::from_bits(bits);
463 /// Converts the scalar to produce an `u8`. Fails if the scalar is a pointer.
464 pub fn to_u8(self) -> InterpResult<'static, u8> {
465 self.to_unsigned_with_bit_width(8).map(|v| u8::try_from(v).unwrap())
468 /// Converts the scalar to produce an `u16`. Fails if the scalar is a pointer.
469 pub fn to_u16(self) -> InterpResult<'static, u16> {
470 self.to_unsigned_with_bit_width(16).map(|v| u16::try_from(v).unwrap())
473 /// Converts the scalar to produce an `u32`. Fails if the scalar is a pointer.
474 pub fn to_u32(self) -> InterpResult<'static, u32> {
475 self.to_unsigned_with_bit_width(32).map(|v| u32::try_from(v).unwrap())
478 /// Converts the scalar to produce an `u64`. Fails if the scalar is a pointer.
479 pub fn to_u64(self) -> InterpResult<'static, u64> {
480 self.to_unsigned_with_bit_width(64).map(|v| u64::try_from(v).unwrap())
483 pub fn to_machine_usize(self, cx: &impl HasDataLayout) -> InterpResult<'static, u64> {
484 let b = self.to_bits(cx.data_layout().pointer_size)?;
485 Ok(u64::try_from(b).unwrap())
489 fn to_signed_with_bit_width(self, bits: u64) -> InterpResult<'static, i128> {
490 let sz = Size::from_bits(bits);
491 let b = self.to_bits(sz)?;
492 Ok(sign_extend(b, sz) as i128)
495 /// Converts the scalar to produce an `i8`. Fails if the scalar is a pointer.
496 pub fn to_i8(self) -> InterpResult<'static, i8> {
497 self.to_signed_with_bit_width(8).map(|v| i8::try_from(v).unwrap())
500 /// Converts the scalar to produce an `i16`. Fails if the scalar is a pointer.
501 pub fn to_i16(self) -> InterpResult<'static, i16> {
502 self.to_signed_with_bit_width(16).map(|v| i16::try_from(v).unwrap())
505 /// Converts the scalar to produce an `i32`. Fails if the scalar is a pointer.
506 pub fn to_i32(self) -> InterpResult<'static, i32> {
507 self.to_signed_with_bit_width(32).map(|v| i32::try_from(v).unwrap())
510 /// Converts the scalar to produce an `i64`. Fails if the scalar is a pointer.
511 pub fn to_i64(self) -> InterpResult<'static, i64> {
512 self.to_signed_with_bit_width(64).map(|v| i64::try_from(v).unwrap())
515 pub fn to_machine_isize(self, cx: &impl HasDataLayout) -> InterpResult<'static, i64> {
516 let sz = cx.data_layout().pointer_size;
517 let b = self.to_bits(sz)?;
518 let b = sign_extend(b, sz) as i128;
519 Ok(i64::try_from(b).unwrap())
523 pub fn to_f32(self) -> InterpResult<'static, Single> {
524 // Going through `u32` to check size and truncation.
525 Ok(Single::from_bits(self.to_u32()?.into()))
529 pub fn to_f64(self) -> InterpResult<'static, Double> {
530 // Going through `u64` to check size and truncation.
531 Ok(Double::from_bits(self.to_u64()?.into()))
535 impl<Tag> From<Pointer<Tag>> for Scalar<Tag> {
537 fn from(ptr: Pointer<Tag>) -> Self {
542 #[derive(Clone, Copy, Eq, PartialEq, RustcEncodable, RustcDecodable, HashStable, Hash)]
543 pub enum ScalarMaybeUndef<Tag = (), Id = AllocId> {
544 Scalar(Scalar<Tag, Id>),
548 impl<Tag> From<Scalar<Tag>> for ScalarMaybeUndef<Tag> {
550 fn from(s: Scalar<Tag>) -> Self {
551 ScalarMaybeUndef::Scalar(s)
555 impl<Tag> From<Pointer<Tag>> for ScalarMaybeUndef<Tag> {
557 fn from(s: Pointer<Tag>) -> Self {
558 ScalarMaybeUndef::Scalar(s.into())
562 impl<Tag: fmt::Debug, Id: fmt::Debug> fmt::Debug for ScalarMaybeUndef<Tag, Id> {
563 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
565 ScalarMaybeUndef::Undef => write!(f, "Undef"),
566 ScalarMaybeUndef::Scalar(s) => write!(f, "{:?}", s),
571 impl<Tag> fmt::Display for ScalarMaybeUndef<Tag> {
572 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
574 ScalarMaybeUndef::Undef => write!(f, "uninitialized bytes"),
575 ScalarMaybeUndef::Scalar(s) => write!(f, "{}", s),
580 impl<'tcx, Tag> ScalarMaybeUndef<Tag> {
581 /// Erase the tag from the scalar, if any.
583 /// Used by error reporting code to avoid having the error type depend on `Tag`.
585 pub fn erase_tag(self) -> ScalarMaybeUndef {
587 ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.erase_tag()),
588 ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
593 pub fn not_undef(self) -> InterpResult<'static, Scalar<Tag>> {
595 ScalarMaybeUndef::Scalar(scalar) => Ok(scalar),
596 ScalarMaybeUndef::Undef => throw_ub!(InvalidUndefBytes(None)),
601 pub fn to_bool(self) -> InterpResult<'tcx, bool> {
602 self.not_undef()?.to_bool()
606 pub fn to_char(self) -> InterpResult<'tcx, char> {
607 self.not_undef()?.to_char()
611 pub fn to_f32(self) -> InterpResult<'tcx, Single> {
612 self.not_undef()?.to_f32()
616 pub fn to_f64(self) -> InterpResult<'tcx, Double> {
617 self.not_undef()?.to_f64()
621 pub fn to_u8(self) -> InterpResult<'tcx, u8> {
622 self.not_undef()?.to_u8()
626 pub fn to_u16(self) -> InterpResult<'tcx, u16> {
627 self.not_undef()?.to_u16()
631 pub fn to_u32(self) -> InterpResult<'tcx, u32> {
632 self.not_undef()?.to_u32()
636 pub fn to_u64(self) -> InterpResult<'tcx, u64> {
637 self.not_undef()?.to_u64()
641 pub fn to_machine_usize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, u64> {
642 self.not_undef()?.to_machine_usize(cx)
646 pub fn to_i8(self) -> InterpResult<'tcx, i8> {
647 self.not_undef()?.to_i8()
651 pub fn to_i16(self) -> InterpResult<'tcx, i16> {
652 self.not_undef()?.to_i16()
656 pub fn to_i32(self) -> InterpResult<'tcx, i32> {
657 self.not_undef()?.to_i32()
661 pub fn to_i64(self) -> InterpResult<'tcx, i64> {
662 self.not_undef()?.to_i64()
666 pub fn to_machine_isize(self, cx: &impl HasDataLayout) -> InterpResult<'tcx, i64> {
667 self.not_undef()?.to_machine_isize(cx)
671 /// Gets the bytes of a constant slice value.
672 pub fn get_slice_bytes<'tcx>(cx: &impl HasDataLayout, val: ConstValue<'tcx>) -> &'tcx [u8] {
673 if let ConstValue::Slice { data, start, end } = val {
674 let len = end - start;
677 // invent a pointer, only the offset is relevant anyway
678 Pointer::new(AllocId(0), Size::from_bytes(start)),
679 Size::from_bytes(len),
681 .unwrap_or_else(|err| bug!("const slice is invalid: {:?}", err))
683 bug!("expected const slice, but found another const value");