2 use rustc_macros::HashStable;
4 use crate::ty::{Ty, InferConst, ParamConst, layout::{HasDataLayout, Size}, subst::SubstsRef};
5 use crate::ty::PlaceholderConst;
6 use crate::hir::def_id::DefId;
8 use super::{EvalResult, Pointer, PointerArithmetic, Allocation, AllocId, sign_extend, truncate};
10 /// Represents the result of a raw const operation, pre-validation.
11 #[derive(Copy, Clone, Debug, Eq, PartialEq, RustcEncodable, RustcDecodable, Hash, HashStable)]
12 pub struct RawConst<'tcx> {
13 // the value lives here, at offset 0, and that allocation definitely is a `AllocKind::Memory`
14 // (so you can use `AllocMap::unwrap_memory`).
15 pub alloc_id: AllocId,
19 /// Represents a constant value in Rust. `Scalar` and `ScalarPair` are optimizations that
20 /// match the `LocalState` optimizations for easy conversions between `Value` and `ConstValue`.
21 #[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord,
22 RustcEncodable, RustcDecodable, Hash, HashStable)]
23 pub enum ConstValue<'tcx> {
24 /// A const generic parameter.
27 /// Infer the value of the const.
28 Infer(InferConst<'tcx>),
30 /// A placeholder const - universally quantified higher-ranked const.
31 Placeholder(PlaceholderConst),
33 /// Used only for types with `layout::abi::Scalar` ABI and ZSTs.
35 /// Not using the enum `Value` to encode that this must not be `Undef`.
38 /// Used only for `&[u8]` and `&str`
40 data: &'tcx Allocation,
45 /// An allocation together with a pointer into the allocation.
46 /// Invariant: the pointer's `AllocId` resolves to the allocation.
47 ByRef(Pointer, &'tcx Allocation),
49 /// Used in the HIR by using `Unevaluated` everywhere and later normalizing to one of the other
50 /// variants when the code is monomorphic enough for that.
51 Unevaluated(DefId, SubstsRef<'tcx>),
54 #[cfg(target_arch = "x86_64")]
55 static_assert_size!(ConstValue<'_>, 32);
57 impl<'tcx> ConstValue<'tcx> {
59 pub fn try_to_scalar(&self) -> Option<Scalar> {
61 ConstValue::Param(_) |
62 ConstValue::Infer(_) |
63 ConstValue::Placeholder(_) |
64 ConstValue::ByRef(..) |
65 ConstValue::Unevaluated(..) |
66 ConstValue::Slice { .. } => None,
67 ConstValue::Scalar(val) => Some(val),
72 pub fn try_to_bits(&self, size: Size) -> Option<u128> {
73 self.try_to_scalar()?.to_bits(size).ok()
77 pub fn try_to_ptr(&self) -> Option<Pointer> {
78 self.try_to_scalar()?.to_ptr().ok()
82 /// A `Scalar` represents an immediate, primitive value existing outside of a
83 /// `memory::Allocation`. It is in many ways like a small chunk of a `Allocation`, up to 8 bytes in
84 /// size. Like a range of bytes in an `Allocation`, a `Scalar` can either represent the raw bytes
85 /// of a simple value or a pointer into another `Allocation`
86 #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd,
87 RustcEncodable, RustcDecodable, Hash, HashStable)]
88 pub enum Scalar<Tag=(), Id=AllocId> {
89 /// The raw bytes of a simple value.
91 /// The first `size` bytes of `data` are the value.
92 /// Do not try to read less or more bytes than that. The remaining bytes must be 0.
97 /// A pointer into an `Allocation`. An `Allocation` in the `memory` module has a list of
98 /// relocations, but a `Scalar` is only large enough to contain one, so we just represent the
99 /// relocation and its associated offset together as a `Pointer` here.
100 Ptr(Pointer<Tag, Id>),
103 #[cfg(target_arch = "x86_64")]
104 static_assert_size!(Scalar, 24);
106 impl<Tag: fmt::Debug, Id: fmt::Debug> fmt::Debug for Scalar<Tag, Id> {
107 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
110 write!(f, "{:?}", ptr),
111 &Scalar::Raw { data, size } => {
112 Scalar::check_data(data, size);
116 // Format as hex number wide enough to fit any value of the given `size`.
117 // So data=20, size=1 will be "0x14", but with size=4 it'll be "0x00000014".
118 write!(f, "0x{:>0width$x}", data, width=(size*2) as usize)
125 impl<Tag> fmt::Display for Scalar<Tag> {
126 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
128 Scalar::Ptr(_) => write!(f, "a pointer"),
129 Scalar::Raw { data, .. } => write!(f, "{}", data),
134 impl<'tcx> Scalar<()> {
136 fn check_data(data: u128, size: u8) {
137 debug_assert_eq!(truncate(data, Size::from_bytes(size as u64)), data,
138 "Scalar value {:#x} exceeds size of {} bytes", data, size);
142 pub fn with_tag<Tag>(self, new_tag: Tag) -> Scalar<Tag> {
143 // Used by `MemPlace::replace_tag`
145 Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_tag(new_tag)),
146 Scalar::Raw { data, size } => Scalar::Raw { data, size },
151 impl<'tcx, Tag> Scalar<Tag> {
153 pub fn erase_tag(self) -> Scalar {
154 // Used by error reporting code to avoid having the error type depend on `Tag`
156 Scalar::Ptr(ptr) => Scalar::Ptr(ptr.erase_tag()),
157 Scalar::Raw { data, size } => Scalar::Raw { data, size },
162 pub fn ptr_null(cx: &impl HasDataLayout) -> Self {
165 size: cx.data_layout().pointer_size.bytes() as u8,
170 pub fn zst() -> Self {
171 Scalar::Raw { data: 0, size: 0 }
175 pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
176 let dl = cx.data_layout();
178 Scalar::Raw { data, size } => {
179 assert_eq!(size as u64, dl.pointer_size.bytes());
181 data: dl.offset(data as u64, i.bytes())? as u128,
185 Scalar::Ptr(ptr) => ptr.offset(i, dl).map(Scalar::Ptr),
190 pub fn ptr_wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
191 let dl = cx.data_layout();
193 Scalar::Raw { data, size } => {
194 assert_eq!(size as u64, dl.pointer_size.bytes());
196 data: dl.overflowing_offset(data as u64, i.bytes()).0 as u128,
200 Scalar::Ptr(ptr) => Scalar::Ptr(ptr.wrapping_offset(i, dl)),
205 pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
206 let dl = cx.data_layout();
208 Scalar::Raw { data, size } => {
209 assert_eq!(size as u64, dl.pointer_size().bytes());
211 data: dl.signed_offset(data as u64, i)? as u128,
215 Scalar::Ptr(ptr) => ptr.signed_offset(i, dl).map(Scalar::Ptr),
220 pub fn ptr_wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
221 let dl = cx.data_layout();
223 Scalar::Raw { data, size } => {
224 assert_eq!(size as u64, dl.pointer_size.bytes());
226 data: dl.overflowing_signed_offset(data as u64, i128::from(i)).0 as u128,
230 Scalar::Ptr(ptr) => Scalar::Ptr(ptr.wrapping_signed_offset(i, dl)),
234 /// Returns this pointer's offset from the allocation base, or from NULL (for
235 /// integer pointers).
237 pub fn get_ptr_offset(self, cx: &impl HasDataLayout) -> Size {
239 Scalar::Raw { data, size } => {
240 assert_eq!(size as u64, cx.pointer_size().bytes());
241 Size::from_bytes(data as u64)
243 Scalar::Ptr(ptr) => ptr.offset,
248 pub fn is_null_ptr(self, cx: &impl HasDataLayout) -> bool {
250 Scalar::Raw { data, size } => {
251 assert_eq!(size as u64, cx.data_layout().pointer_size.bytes());
254 Scalar::Ptr(_) => false,
259 pub fn from_bool(b: bool) -> Self {
260 Scalar::Raw { data: b as u128, size: 1 }
264 pub fn from_char(c: char) -> Self {
265 Scalar::Raw { data: c as u128, size: 4 }
269 pub fn from_uint(i: impl Into<u128>, size: Size) -> Self {
272 truncate(i, size), i,
273 "Unsigned value {:#x} does not fit in {} bits", i, size.bits()
275 Scalar::Raw { data: i, size: size.bytes() as u8 }
279 pub fn from_int(i: impl Into<i128>, size: Size) -> Self {
281 // `into` performed sign extension, we have to truncate
282 let truncated = truncate(i as u128, size);
284 sign_extend(truncated, size) as i128, i,
285 "Signed value {:#x} does not fit in {} bits", i, size.bits()
287 Scalar::Raw { data: truncated, size: size.bytes() as u8 }
291 pub fn from_f32(f: f32) -> Self {
292 Scalar::Raw { data: f.to_bits() as u128, size: 4 }
296 pub fn from_f64(f: f64) -> Self {
297 Scalar::Raw { data: f.to_bits() as u128, size: 8 }
301 pub fn to_bits_or_ptr(
304 cx: &impl HasDataLayout,
305 ) -> Result<u128, Pointer<Tag>> {
307 Scalar::Raw { data, size } => {
308 assert_eq!(target_size.bytes(), size as u64);
309 assert_ne!(size, 0, "you should never look at the bits of a ZST");
310 Scalar::check_data(data, size);
313 Scalar::Ptr(ptr) => {
314 assert_eq!(target_size, cx.data_layout().pointer_size);
321 pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
323 Scalar::Raw { data, size } => {
324 assert_eq!(target_size.bytes(), size as u64);
325 assert_ne!(size, 0, "you should never look at the bits of a ZST");
326 Scalar::check_data(data, size);
329 Scalar::Ptr(_) => err!(ReadPointerAsBytes),
334 pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
336 Scalar::Raw { data: 0, .. } => err!(InvalidNullPointerUsage),
337 Scalar::Raw { .. } => err!(ReadBytesAsPointer),
338 Scalar::Ptr(p) => Ok(p),
343 pub fn is_bits(self) -> bool {
345 Scalar::Raw { .. } => true,
351 pub fn is_ptr(self) -> bool {
353 Scalar::Ptr(_) => true,
358 pub fn to_bool(self) -> EvalResult<'tcx, bool> {
360 Scalar::Raw { data: 0, size: 1 } => Ok(false),
361 Scalar::Raw { data: 1, size: 1 } => Ok(true),
362 _ => err!(InvalidBool),
366 pub fn to_char(self) -> EvalResult<'tcx, char> {
367 let val = self.to_u32()?;
368 match ::std::char::from_u32(val) {
370 None => err!(InvalidChar(val as u128)),
374 pub fn to_u8(self) -> EvalResult<'static, u8> {
375 let sz = Size::from_bits(8);
376 let b = self.to_bits(sz)?;
380 pub fn to_u32(self) -> EvalResult<'static, u32> {
381 let sz = Size::from_bits(32);
382 let b = self.to_bits(sz)?;
386 pub fn to_u64(self) -> EvalResult<'static, u64> {
387 let sz = Size::from_bits(64);
388 let b = self.to_bits(sz)?;
392 pub fn to_usize(self, cx: &impl HasDataLayout) -> EvalResult<'static, u64> {
393 let b = self.to_bits(cx.data_layout().pointer_size)?;
397 pub fn to_i8(self) -> EvalResult<'static, i8> {
398 let sz = Size::from_bits(8);
399 let b = self.to_bits(sz)?;
400 let b = sign_extend(b, sz) as i128;
404 pub fn to_i32(self) -> EvalResult<'static, i32> {
405 let sz = Size::from_bits(32);
406 let b = self.to_bits(sz)?;
407 let b = sign_extend(b, sz) as i128;
411 pub fn to_i64(self) -> EvalResult<'static, i64> {
412 let sz = Size::from_bits(64);
413 let b = self.to_bits(sz)?;
414 let b = sign_extend(b, sz) as i128;
418 pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'static, i64> {
419 let sz = cx.data_layout().pointer_size;
420 let b = self.to_bits(sz)?;
421 let b = sign_extend(b, sz) as i128;
426 pub fn to_f32(self) -> EvalResult<'static, f32> {
427 Ok(f32::from_bits(self.to_u32()?))
431 pub fn to_f64(self) -> EvalResult<'static, f64> {
432 Ok(f64::from_bits(self.to_u64()?))
436 impl<Tag> From<Pointer<Tag>> for Scalar<Tag> {
438 fn from(ptr: Pointer<Tag>) -> Self {
443 #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
444 pub enum ScalarMaybeUndef<Tag=(), Id=AllocId> {
445 Scalar(Scalar<Tag, Id>),
449 impl<Tag> From<Scalar<Tag>> for ScalarMaybeUndef<Tag> {
451 fn from(s: Scalar<Tag>) -> Self {
452 ScalarMaybeUndef::Scalar(s)
456 impl<Tag: fmt::Debug, Id: fmt::Debug> fmt::Debug for ScalarMaybeUndef<Tag, Id> {
457 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
459 ScalarMaybeUndef::Undef => write!(f, "Undef"),
460 ScalarMaybeUndef::Scalar(s) => write!(f, "{:?}", s),
465 impl<Tag> fmt::Display for ScalarMaybeUndef<Tag> {
466 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
468 ScalarMaybeUndef::Undef => write!(f, "uninitialized bytes"),
469 ScalarMaybeUndef::Scalar(s) => write!(f, "{}", s),
474 impl<'tcx, Tag> ScalarMaybeUndef<Tag> {
476 pub fn erase_tag(self) -> ScalarMaybeUndef
478 // Used by error reporting code to avoid having the error type depend on `Tag`
480 ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.erase_tag()),
481 ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
486 pub fn not_undef(self) -> EvalResult<'static, Scalar<Tag>> {
488 ScalarMaybeUndef::Scalar(scalar) => Ok(scalar),
489 ScalarMaybeUndef::Undef => err!(ReadUndefBytes(Size::from_bytes(0))),
494 pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
495 self.not_undef()?.to_ptr()
499 pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
500 self.not_undef()?.to_bits(target_size)
504 pub fn to_bool(self) -> EvalResult<'tcx, bool> {
505 self.not_undef()?.to_bool()
509 pub fn to_char(self) -> EvalResult<'tcx, char> {
510 self.not_undef()?.to_char()
514 pub fn to_f32(self) -> EvalResult<'tcx, f32> {
515 self.not_undef()?.to_f32()
519 pub fn to_f64(self) -> EvalResult<'tcx, f64> {
520 self.not_undef()?.to_f64()
524 pub fn to_u8(self) -> EvalResult<'tcx, u8> {
525 self.not_undef()?.to_u8()
529 pub fn to_u32(self) -> EvalResult<'tcx, u32> {
530 self.not_undef()?.to_u32()
534 pub fn to_u64(self) -> EvalResult<'tcx, u64> {
535 self.not_undef()?.to_u64()
539 pub fn to_usize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, u64> {
540 self.not_undef()?.to_usize(cx)
544 pub fn to_i8(self) -> EvalResult<'tcx, i8> {
545 self.not_undef()?.to_i8()
549 pub fn to_i32(self) -> EvalResult<'tcx, i32> {
550 self.not_undef()?.to_i32()
554 pub fn to_i64(self) -> EvalResult<'tcx, i64> {
555 self.not_undef()?.to_i64()
559 pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, i64> {
560 self.not_undef()?.to_isize(cx)
564 impl_stable_hash_for!(enum crate::mir::interpret::ScalarMaybeUndef {