2 use crate::cmp::Ordering::{self, Equal, Greater, Less};
4 use crate::slice::{self, SliceIndex};
7 impl<T: ?Sized> *mut T {
8 /// Returns `true` if the pointer is null.
10 /// Note that unsized types have many possible null pointers, as only the
11 /// raw data pointer is considered, not their length, vtable, etc.
12 /// Therefore, two pointers that are null may still not compare equal to
15 /// ## Behavior during const evaluation
17 /// When this function is used during const evaluation, it may return `false` for pointers
18 /// that turn out to be null at runtime. Specifically, when a pointer to some memory
19 /// is offset beyond its bounds in such a way that the resulting pointer is null,
20 /// the function will still return `false`. There is no way for CTFE to know
21 /// the absolute position of that memory, so we cannot tell if the pointer is
29 /// let mut s = [1, 2, 3];
30 /// let ptr: *mut u32 = s.as_mut_ptr();
31 /// assert!(!ptr.is_null());
33 #[stable(feature = "rust1", since = "1.0.0")]
34 #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")]
36 pub const fn is_null(self) -> bool {
37 // Compare via a cast to a thin pointer, so fat pointers are only
38 // considering their "data" part for null-ness.
39 (self as *mut u8).guaranteed_eq(null_mut())
42 /// Casts to a pointer of another type.
43 #[stable(feature = "ptr_cast", since = "1.38.0")]
44 #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
46 pub const fn cast<U>(self) -> *mut U {
50 /// Decompose a (possibly wide) pointer into is address and metadata components.
52 /// The pointer can be later reconstructed with [`from_raw_parts_mut`].
53 #[unstable(feature = "ptr_metadata", issue = "81513")]
54 #[rustc_const_unstable(feature = "ptr_metadata", issue = "81513")]
56 pub const fn to_raw_parts(self) -> (*mut (), <T as super::Pointee>::Metadata) {
57 (self.cast(), super::metadata(self))
60 /// Returns `None` if the pointer is null, or else returns a shared reference to
61 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
62 /// must be used instead.
64 /// For the mutable counterpart see [`as_mut`].
66 /// [`as_uninit_ref`]: #method.as_uninit_ref-1
67 /// [`as_mut`]: #method.as_mut
71 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
72 /// all of the following is true:
74 /// * The pointer must be properly aligned.
76 /// * It must be "dereferencable" in the sense defined in [the module documentation].
78 /// * The pointer must point to an initialized instance of `T`.
80 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
81 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
82 /// In particular, for the duration of this lifetime, the memory the pointer points to must
83 /// not get mutated (except inside `UnsafeCell`).
85 /// This applies even if the result of this method is unused!
86 /// (The part about being initialized is not yet fully decided, but until
87 /// it is, the only safe approach is to ensure that they are indeed initialized.)
89 /// [the module documentation]: crate::ptr#safety
96 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
99 /// if let Some(val_back) = ptr.as_ref() {
100 /// println!("We got back the value: {}!", val_back);
105 /// # Null-unchecked version
107 /// If you are sure the pointer can never be null and are looking for some kind of
108 /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
109 /// dereference the pointer directly.
112 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
115 /// let val_back = &*ptr;
116 /// println!("We got back the value: {}!", val_back);
119 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
121 pub unsafe fn as_ref<'a>(self) -> Option<&'a T> {
122 // SAFETY: the caller must guarantee that `self` is valid for a
123 // reference if it isn't null.
124 if self.is_null() { None } else { unsafe { Some(&*self) } }
127 /// Returns `None` if the pointer is null, or else returns a shared reference to
128 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
129 /// that the value has to be initialized.
131 /// For the mutable counterpart see [`as_uninit_mut`].
133 /// [`as_ref`]: #method.as_ref-1
134 /// [`as_uninit_mut`]: #method.as_uninit_mut
138 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
139 /// all of the following is true:
141 /// * The pointer must be properly aligned.
143 /// * It must be "dereferencable" in the sense defined in [the module documentation].
145 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
146 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
147 /// In particular, for the duration of this lifetime, the memory the pointer points to must
148 /// not get mutated (except inside `UnsafeCell`).
150 /// This applies even if the result of this method is unused!
152 /// [the module documentation]: crate::ptr#safety
159 /// #![feature(ptr_as_uninit)]
161 /// let ptr: *mut u8 = &mut 10u8 as *mut u8;
164 /// if let Some(val_back) = ptr.as_uninit_ref() {
165 /// println!("We got back the value: {}!", val_back.assume_init());
170 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
171 pub unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
175 // SAFETY: the caller must guarantee that `self` meets all the
176 // requirements for a reference.
177 if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
180 /// Calculates the offset from a pointer.
182 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
183 /// offset of `3 * size_of::<T>()` bytes.
187 /// If any of the following conditions are violated, the result is Undefined
190 /// * Both the starting and resulting pointer must be either in bounds or one
191 /// byte past the end of the same [allocated object].
193 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
195 /// * The offset being in bounds cannot rely on "wrapping around" the address
196 /// space. That is, the infinite-precision sum, **in bytes** must fit in a usize.
198 /// The compiler and standard library generally tries to ensure allocations
199 /// never reach a size where an offset is a concern. For instance, `Vec`
200 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
201 /// `vec.as_ptr().add(vec.len())` is always safe.
203 /// Most platforms fundamentally can't even construct such an allocation.
204 /// For instance, no known 64-bit platform can ever serve a request
205 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
206 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
207 /// more than `isize::MAX` bytes with things like Physical Address
208 /// Extension. As such, memory acquired directly from allocators or memory
209 /// mapped files *may* be too large to handle with this function.
211 /// Consider using [`wrapping_offset`] instead if these constraints are
212 /// difficult to satisfy. The only advantage of this method is that it
213 /// enables more aggressive compiler optimizations.
215 /// [`wrapping_offset`]: #method.wrapping_offset
216 /// [allocated object]: crate::ptr#allocated-object
223 /// let mut s = [1, 2, 3];
224 /// let ptr: *mut u32 = s.as_mut_ptr();
227 /// println!("{}", *ptr.offset(1));
228 /// println!("{}", *ptr.offset(2));
231 #[stable(feature = "rust1", since = "1.0.0")]
232 #[must_use = "returns a new pointer rather than modifying its argument"]
233 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
235 pub const unsafe fn offset(self, count: isize) -> *mut T
239 // SAFETY: the caller must uphold the safety contract for `offset`.
240 // The obtained pointer is valid for writes since the caller must
241 // guarantee that it points to the same allocated object as `self`.
242 unsafe { intrinsics::offset(self, count) as *mut T }
245 /// Calculates the offset from a pointer using wrapping arithmetic.
246 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
247 /// offset of `3 * size_of::<T>()` bytes.
251 /// This operation itself is always safe, but using the resulting pointer is not.
253 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it may not
254 /// be used to read or write other allocated objects.
256 /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
257 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
258 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
259 /// `x` and `y` point into the same allocated object.
261 /// Compared to [`offset`], this method basically delays the requirement of staying within the
262 /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object
263 /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
264 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
265 /// can be optimized better and is thus preferable in performance-sensitive code.
267 /// The delayed check only considers the value of the pointer that was dereferenced, not the
268 /// intermediate values used during the computation of the final result. For example,
269 /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
270 /// words, leaving the allocated object and then re-entering it later is permitted.
272 /// [`offset`]: #method.offset
273 /// [allocated object]: crate::ptr#allocated-object
280 /// // Iterate using a raw pointer in increments of two elements
281 /// let mut data = [1u8, 2, 3, 4, 5];
282 /// let mut ptr: *mut u8 = data.as_mut_ptr();
284 /// let end_rounded_up = ptr.wrapping_offset(6);
286 /// while ptr != end_rounded_up {
290 /// ptr = ptr.wrapping_offset(step);
292 /// assert_eq!(&data, &[0, 2, 0, 4, 0]);
294 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
295 #[must_use = "returns a new pointer rather than modifying its argument"]
296 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
298 pub const fn wrapping_offset(self, count: isize) -> *mut T
302 // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
303 unsafe { intrinsics::arith_offset(self, count) as *mut T }
306 /// Returns `None` if the pointer is null, or else returns a unique reference to
307 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_mut`]
308 /// must be used instead.
310 /// For the shared counterpart see [`as_ref`].
312 /// [`as_uninit_mut`]: #method.as_uninit_mut
313 /// [`as_ref`]: #method.as_ref-1
317 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
318 /// all of the following is true:
320 /// * The pointer must be properly aligned.
322 /// * It must be "dereferencable" in the sense defined in [the module documentation].
324 /// * The pointer must point to an initialized instance of `T`.
326 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
327 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
328 /// In particular, for the duration of this lifetime, the memory the pointer points to must
329 /// not get accessed (read or written) through any other pointer.
331 /// This applies even if the result of this method is unused!
332 /// (The part about being initialized is not yet fully decided, but until
333 /// it is, the only safe approach is to ensure that they are indeed initialized.)
335 /// [the module documentation]: crate::ptr#safety
342 /// let mut s = [1, 2, 3];
343 /// let ptr: *mut u32 = s.as_mut_ptr();
344 /// let first_value = unsafe { ptr.as_mut().unwrap() };
345 /// *first_value = 4;
346 /// # assert_eq!(s, [4, 2, 3]);
347 /// println!("{:?}", s); // It'll print: "[4, 2, 3]".
350 /// # Null-unchecked version
352 /// If you are sure the pointer can never be null and are looking for some kind of
353 /// `as_mut_unchecked` that returns the `&mut T` instead of `Option<&mut T>`, know that
354 /// you can dereference the pointer directly.
357 /// let mut s = [1, 2, 3];
358 /// let ptr: *mut u32 = s.as_mut_ptr();
359 /// let first_value = unsafe { &mut *ptr };
360 /// *first_value = 4;
361 /// # assert_eq!(s, [4, 2, 3]);
362 /// println!("{:?}", s); // It'll print: "[4, 2, 3]".
364 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
366 pub unsafe fn as_mut<'a>(self) -> Option<&'a mut T> {
367 // SAFETY: the caller must guarantee that `self` is be valid for
368 // a mutable reference if it isn't null.
369 if self.is_null() { None } else { unsafe { Some(&mut *self) } }
372 /// Returns `None` if the pointer is null, or else returns a unique reference to
373 /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
374 /// that the value has to be initialized.
376 /// For the shared counterpart see [`as_uninit_ref`].
378 /// [`as_mut`]: #method.as_mut
379 /// [`as_uninit_ref`]: #method.as_uninit_ref-1
383 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
384 /// all of the following is true:
386 /// * The pointer must be properly aligned.
388 /// * It must be "dereferencable" in the sense defined in [the module documentation].
390 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
391 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
392 /// In particular, for the duration of this lifetime, the memory the pointer points to must
393 /// not get accessed (read or written) through any other pointer.
395 /// This applies even if the result of this method is unused!
397 /// [the module documentation]: crate::ptr#safety
399 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
400 pub unsafe fn as_uninit_mut<'a>(self) -> Option<&'a mut MaybeUninit<T>>
404 // SAFETY: the caller must guarantee that `self` meets all the
405 // requirements for a reference.
406 if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit<T>) }) }
409 /// Returns whether two pointers are guaranteed to be equal.
411 /// At runtime this function behaves like `self == other`.
412 /// However, in some contexts (e.g., compile-time evaluation),
413 /// it is not always possible to determine equality of two pointers, so this function may
414 /// spuriously return `false` for pointers that later actually turn out to be equal.
415 /// But when it returns `true`, the pointers are guaranteed to be equal.
417 /// This function is the mirror of [`guaranteed_ne`], but not its inverse. There are pointer
418 /// comparisons for which both functions return `false`.
420 /// [`guaranteed_ne`]: #method.guaranteed_ne
422 /// The return value may change depending on the compiler version and unsafe code may not
423 /// rely on the result of this function for soundness. It is suggested to only use this function
424 /// for performance optimizations where spurious `false` return values by this function do not
425 /// affect the outcome, but just the performance.
426 /// The consequences of using this method to make runtime and compile-time code behave
427 /// differently have not been explored. This method should not be used to introduce such
428 /// differences, and it should also not be stabilized before we have a better understanding
430 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
431 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
433 pub const fn guaranteed_eq(self, other: *mut T) -> bool
437 intrinsics::ptr_guaranteed_eq(self as *const _, other as *const _)
440 /// Returns whether two pointers are guaranteed to be unequal.
442 /// At runtime this function behaves like `self != other`.
443 /// However, in some contexts (e.g., compile-time evaluation),
444 /// it is not always possible to determine the inequality of two pointers, so this function may
445 /// spuriously return `false` for pointers that later actually turn out to be unequal.
446 /// But when it returns `true`, the pointers are guaranteed to be unequal.
448 /// This function is the mirror of [`guaranteed_eq`], but not its inverse. There are pointer
449 /// comparisons for which both functions return `false`.
451 /// [`guaranteed_eq`]: #method.guaranteed_eq
453 /// The return value may change depending on the compiler version and unsafe code may not
454 /// rely on the result of this function for soundness. It is suggested to only use this function
455 /// for performance optimizations where spurious `false` return values by this function do not
456 /// affect the outcome, but just the performance.
457 /// The consequences of using this method to make runtime and compile-time code behave
458 /// differently have not been explored. This method should not be used to introduce such
459 /// differences, and it should also not be stabilized before we have a better understanding
461 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
462 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
464 pub const unsafe fn guaranteed_ne(self, other: *mut T) -> bool
468 intrinsics::ptr_guaranteed_ne(self as *const _, other as *const _)
471 /// Calculates the distance between two pointers. The returned value is in
472 /// units of T: the distance in bytes is divided by `mem::size_of::<T>()`.
474 /// This function is the inverse of [`offset`].
476 /// [`offset`]: #method.offset-1
480 /// If any of the following conditions are violated, the result is Undefined
483 /// * Both the starting and other pointer must be either in bounds or one
484 /// byte past the end of the same [allocated object].
486 /// * Both pointers must be *derived from* a pointer to the same object.
487 /// (See below for an example.)
489 /// * The distance between the pointers, in bytes, must be an exact multiple
490 /// of the size of `T`.
492 /// * The distance between the pointers, **in bytes**, cannot overflow an `isize`.
494 /// * The distance being in bounds cannot rely on "wrapping around" the address space.
496 /// Rust types are never larger than `isize::MAX` and Rust allocations never wrap around the
497 /// address space, so two pointers within some value of any Rust type `T` will always satisfy
498 /// the last two conditions. The standard library also generally ensures that allocations
499 /// never reach a size where an offset is a concern. For instance, `Vec` and `Box` ensure they
500 /// never allocate more than `isize::MAX` bytes, so `ptr_into_vec.offset_from(vec.as_ptr())`
501 /// always satisfies the last two conditions.
503 /// Most platforms fundamentally can't even construct such a large allocation.
504 /// For instance, no known 64-bit platform can ever serve a request
505 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
506 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
507 /// more than `isize::MAX` bytes with things like Physical Address
508 /// Extension. As such, memory acquired directly from allocators or memory
509 /// mapped files *may* be too large to handle with this function.
510 /// (Note that [`offset`] and [`add`] also have a similar limitation and hence cannot be used on
511 /// such large allocations either.)
513 /// [`add`]: #method.add
514 /// [allocated object]: crate::ptr#allocated-object
518 /// This function panics if `T` is a Zero-Sized Type ("ZST").
525 /// let mut a = [0; 5];
526 /// let ptr1: *mut i32 = &mut a[1];
527 /// let ptr2: *mut i32 = &mut a[3];
529 /// assert_eq!(ptr2.offset_from(ptr1), 2);
530 /// assert_eq!(ptr1.offset_from(ptr2), -2);
531 /// assert_eq!(ptr1.offset(2), ptr2);
532 /// assert_eq!(ptr2.offset(-2), ptr1);
536 /// *Incorrect* usage:
539 /// let ptr1 = Box::into_raw(Box::new(0u8));
540 /// let ptr2 = Box::into_raw(Box::new(1u8));
541 /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
542 /// // Make ptr2_other an "alias" of ptr2, but derived from ptr1.
543 /// let ptr2_other = (ptr1 as *mut u8).wrapping_offset(diff);
544 /// assert_eq!(ptr2 as usize, ptr2_other as usize);
545 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
546 /// // computing their offset is undefined behavior, even though
547 /// // they point to the same address!
549 /// let zero = ptr2_other.offset_from(ptr2); // Undefined Behavior
552 #[stable(feature = "ptr_offset_from", since = "1.47.0")]
553 #[rustc_const_unstable(feature = "const_ptr_offset_from", issue = "41079")]
555 pub const unsafe fn offset_from(self, origin: *const T) -> isize
559 // SAFETY: the caller must uphold the safety contract for `offset_from`.
560 unsafe { (self as *const T).offset_from(origin) }
563 /// Calculates the offset from a pointer (convenience for `.offset(count as isize)`).
565 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
566 /// offset of `3 * size_of::<T>()` bytes.
570 /// If any of the following conditions are violated, the result is Undefined
573 /// * Both the starting and resulting pointer must be either in bounds or one
574 /// byte past the end of the same [allocated object].
576 /// * The computed offset, **in bytes**, cannot overflow an `isize`.
578 /// * The offset being in bounds cannot rely on "wrapping around" the address
579 /// space. That is, the infinite-precision sum must fit in a `usize`.
581 /// The compiler and standard library generally tries to ensure allocations
582 /// never reach a size where an offset is a concern. For instance, `Vec`
583 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
584 /// `vec.as_ptr().add(vec.len())` is always safe.
586 /// Most platforms fundamentally can't even construct such an allocation.
587 /// For instance, no known 64-bit platform can ever serve a request
588 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
589 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
590 /// more than `isize::MAX` bytes with things like Physical Address
591 /// Extension. As such, memory acquired directly from allocators or memory
592 /// mapped files *may* be too large to handle with this function.
594 /// Consider using [`wrapping_add`] instead if these constraints are
595 /// difficult to satisfy. The only advantage of this method is that it
596 /// enables more aggressive compiler optimizations.
598 /// [`wrapping_add`]: #method.wrapping_add
605 /// let s: &str = "123";
606 /// let ptr: *const u8 = s.as_ptr();
609 /// println!("{}", *ptr.add(1) as char);
610 /// println!("{}", *ptr.add(2) as char);
613 #[stable(feature = "pointer_methods", since = "1.26.0")]
614 #[must_use = "returns a new pointer rather than modifying its argument"]
615 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
617 pub const unsafe fn add(self, count: usize) -> Self
621 // SAFETY: the caller must uphold the safety contract for `offset`.
622 unsafe { self.offset(count as isize) }
625 /// Calculates the offset from a pointer (convenience for
626 /// `.offset((count as isize).wrapping_neg())`).
628 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
629 /// offset of `3 * size_of::<T>()` bytes.
633 /// If any of the following conditions are violated, the result is Undefined
636 /// * Both the starting and resulting pointer must be either in bounds or one
637 /// byte past the end of the same [allocated object].
639 /// * The computed offset cannot exceed `isize::MAX` **bytes**.
641 /// * The offset being in bounds cannot rely on "wrapping around" the address
642 /// space. That is, the infinite-precision sum must fit in a usize.
644 /// The compiler and standard library generally tries to ensure allocations
645 /// never reach a size where an offset is a concern. For instance, `Vec`
646 /// and `Box` ensure they never allocate more than `isize::MAX` bytes, so
647 /// `vec.as_ptr().add(vec.len()).sub(vec.len())` is always safe.
649 /// Most platforms fundamentally can't even construct such an allocation.
650 /// For instance, no known 64-bit platform can ever serve a request
651 /// for 2<sup>63</sup> bytes due to page-table limitations or splitting the address space.
652 /// However, some 32-bit and 16-bit platforms may successfully serve a request for
653 /// more than `isize::MAX` bytes with things like Physical Address
654 /// Extension. As such, memory acquired directly from allocators or memory
655 /// mapped files *may* be too large to handle with this function.
657 /// Consider using [`wrapping_sub`] instead if these constraints are
658 /// difficult to satisfy. The only advantage of this method is that it
659 /// enables more aggressive compiler optimizations.
661 /// [`wrapping_sub`]: #method.wrapping_sub
662 /// [allocated object]: crate::ptr#allocated-object
669 /// let s: &str = "123";
672 /// let end: *const u8 = s.as_ptr().add(3);
673 /// println!("{}", *end.sub(1) as char);
674 /// println!("{}", *end.sub(2) as char);
677 #[stable(feature = "pointer_methods", since = "1.26.0")]
678 #[must_use = "returns a new pointer rather than modifying its argument"]
679 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
681 pub const unsafe fn sub(self, count: usize) -> Self
685 // SAFETY: the caller must uphold the safety contract for `offset`.
686 unsafe { self.offset((count as isize).wrapping_neg()) }
689 /// Calculates the offset from a pointer using wrapping arithmetic.
690 /// (convenience for `.wrapping_offset(count as isize)`)
692 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
693 /// offset of `3 * size_of::<T>()` bytes.
697 /// This operation itself is always safe, but using the resulting pointer is not.
699 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it may not
700 /// be used to read or write other allocated objects.
702 /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
703 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
704 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
705 /// `x` and `y` point into the same allocated object.
707 /// Compared to [`add`], this method basically delays the requirement of staying within the
708 /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object
709 /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
710 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
711 /// can be optimized better and is thus preferable in performance-sensitive code.
713 /// The delayed check only considers the value of the pointer that was dereferenced, not the
714 /// intermediate values used during the computation of the final result. For example,
715 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
716 /// allocated object and then re-entering it later is permitted.
718 /// [`add`]: #method.add
719 /// [allocated object]: crate::ptr#allocated-object
726 /// // Iterate using a raw pointer in increments of two elements
727 /// let data = [1u8, 2, 3, 4, 5];
728 /// let mut ptr: *const u8 = data.as_ptr();
730 /// let end_rounded_up = ptr.wrapping_add(6);
732 /// // This loop prints "1, 3, 5, "
733 /// while ptr != end_rounded_up {
735 /// print!("{}, ", *ptr);
737 /// ptr = ptr.wrapping_add(step);
740 #[stable(feature = "pointer_methods", since = "1.26.0")]
741 #[must_use = "returns a new pointer rather than modifying its argument"]
742 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
744 pub const fn wrapping_add(self, count: usize) -> Self
748 self.wrapping_offset(count as isize)
751 /// Calculates the offset from a pointer using wrapping arithmetic.
752 /// (convenience for `.wrapping_offset((count as isize).wrapping_neg())`)
754 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
755 /// offset of `3 * size_of::<T>()` bytes.
759 /// This operation itself is always safe, but using the resulting pointer is not.
761 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it may not
762 /// be used to read or write other allocated objects.
764 /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
765 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
766 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
767 /// `x` and `y` point into the same allocated object.
769 /// Compared to [`sub`], this method basically delays the requirement of staying within the
770 /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object
771 /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
772 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
773 /// can be optimized better and is thus preferable in performance-sensitive code.
775 /// The delayed check only considers the value of the pointer that was dereferenced, not the
776 /// intermediate values used during the computation of the final result. For example,
777 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
778 /// allocated object and then re-entering it later is permitted.
780 /// [`sub`]: #method.sub
781 /// [allocated object]: crate::ptr#allocated-object
788 /// // Iterate using a raw pointer in increments of two elements (backwards)
789 /// let data = [1u8, 2, 3, 4, 5];
790 /// let mut ptr: *const u8 = data.as_ptr();
791 /// let start_rounded_down = ptr.wrapping_sub(2);
792 /// ptr = ptr.wrapping_add(4);
794 /// // This loop prints "5, 3, 1, "
795 /// while ptr != start_rounded_down {
797 /// print!("{}, ", *ptr);
799 /// ptr = ptr.wrapping_sub(step);
802 #[stable(feature = "pointer_methods", since = "1.26.0")]
803 #[must_use = "returns a new pointer rather than modifying its argument"]
804 #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
806 pub const fn wrapping_sub(self, count: usize) -> Self
810 self.wrapping_offset((count as isize).wrapping_neg())
813 /// Sets the pointer value to `ptr`.
815 /// In case `self` is a (fat) pointer to an unsized type, this operation
816 /// will only affect the pointer part, whereas for (thin) pointers to
817 /// sized types, this has the same effect as a simple assignment.
819 /// The resulting pointer will have provenance of `val`, i.e., for a fat
820 /// pointer, this operation is semantically the same as creating a new
821 /// fat pointer with the data pointer value of `val` but the metadata of
826 /// This function is primarily useful for allowing byte-wise pointer
827 /// arithmetic on potentially fat pointers:
830 /// #![feature(set_ptr_value)]
831 /// # use core::fmt::Debug;
832 /// let mut arr: [i32; 3] = [1, 2, 3];
833 /// let mut ptr = arr.as_mut_ptr() as *mut dyn Debug;
834 /// let thin = ptr as *mut u8;
836 /// ptr = ptr.set_ptr_value(thin.add(8));
837 /// # assert_eq!(*(ptr as *mut i32), 3);
838 /// println!("{:?}", &*ptr); // will print "3"
841 #[unstable(feature = "set_ptr_value", issue = "75091")]
842 #[must_use = "returns a new pointer rather than modifying its argument"]
844 pub fn set_ptr_value(mut self, val: *mut u8) -> Self {
845 let thin = &mut self as *mut *mut T as *mut *mut u8;
846 // SAFETY: In case of a thin pointer, this operations is identical
847 // to a simple assignment. In case of a fat pointer, with the current
848 // fat pointer layout implementation, the first field of such a
849 // pointer is always the data pointer, which is likewise assigned.
850 unsafe { *thin = val };
854 /// Reads the value from `self` without moving it. This leaves the
855 /// memory in `self` unchanged.
857 /// See [`ptr::read`] for safety concerns and examples.
859 /// [`ptr::read`]: crate::ptr::read()
860 #[stable(feature = "pointer_methods", since = "1.26.0")]
861 #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
863 pub const unsafe fn read(self) -> T
867 // SAFETY: the caller must uphold the safety contract for ``.
868 unsafe { read(self) }
871 /// Performs a volatile read of the value from `self` without moving it. This
872 /// leaves the memory in `self` unchanged.
874 /// Volatile operations are intended to act on I/O memory, and are guaranteed
875 /// to not be elided or reordered by the compiler across other volatile
878 /// See [`ptr::read_volatile`] for safety concerns and examples.
880 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
881 #[stable(feature = "pointer_methods", since = "1.26.0")]
883 pub unsafe fn read_volatile(self) -> T
887 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
888 unsafe { read_volatile(self) }
891 /// Reads the value from `self` without moving it. This leaves the
892 /// memory in `self` unchanged.
894 /// Unlike `read`, the pointer may be unaligned.
896 /// See [`ptr::read_unaligned`] for safety concerns and examples.
898 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
899 #[stable(feature = "pointer_methods", since = "1.26.0")]
900 #[rustc_const_unstable(feature = "const_ptr_read", issue = "80377")]
902 pub const unsafe fn read_unaligned(self) -> T
906 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
907 unsafe { read_unaligned(self) }
910 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
911 /// and destination may overlap.
913 /// NOTE: this has the *same* argument order as [`ptr::copy`].
915 /// See [`ptr::copy`] for safety concerns and examples.
917 /// [`ptr::copy`]: crate::ptr::copy()
918 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
919 #[stable(feature = "pointer_methods", since = "1.26.0")]
921 pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
925 // SAFETY: the caller must uphold the safety contract for `copy`.
926 unsafe { copy(self, dest, count) }
929 /// Copies `count * size_of<T>` bytes from `self` to `dest`. The source
930 /// and destination may *not* overlap.
932 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
934 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
936 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
937 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
938 #[stable(feature = "pointer_methods", since = "1.26.0")]
940 pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
944 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
945 unsafe { copy_nonoverlapping(self, dest, count) }
948 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
949 /// and destination may overlap.
951 /// NOTE: this has the *opposite* argument order of [`ptr::copy`].
953 /// See [`ptr::copy`] for safety concerns and examples.
955 /// [`ptr::copy`]: crate::ptr::copy()
956 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
957 #[stable(feature = "pointer_methods", since = "1.26.0")]
959 pub const unsafe fn copy_from(self, src: *const T, count: usize)
963 // SAFETY: the caller must uphold the safety contract for `copy`.
964 unsafe { copy(src, self, count) }
967 /// Copies `count * size_of<T>` bytes from `src` to `self`. The source
968 /// and destination may *not* overlap.
970 /// NOTE: this has the *opposite* argument order of [`ptr::copy_nonoverlapping`].
972 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
974 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
975 #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
976 #[stable(feature = "pointer_methods", since = "1.26.0")]
978 pub const unsafe fn copy_from_nonoverlapping(self, src: *const T, count: usize)
982 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
983 unsafe { copy_nonoverlapping(src, self, count) }
986 /// Executes the destructor (if any) of the pointed-to value.
988 /// See [`ptr::drop_in_place`] for safety concerns and examples.
990 /// [`ptr::drop_in_place`]: crate::ptr::drop_in_place()
991 #[stable(feature = "pointer_methods", since = "1.26.0")]
993 pub unsafe fn drop_in_place(self) {
994 // SAFETY: the caller must uphold the safety contract for `drop_in_place`.
995 unsafe { drop_in_place(self) }
998 /// Overwrites a memory location with the given value without reading or
999 /// dropping the old value.
1001 /// See [`ptr::write`] for safety concerns and examples.
1003 /// [`ptr::write`]: crate::ptr::write()
1004 #[stable(feature = "pointer_methods", since = "1.26.0")]
1005 #[rustc_const_unstable(feature = "const_ptr_write", issue = "none")]
1007 pub const unsafe fn write(self, val: T)
1011 // SAFETY: the caller must uphold the safety contract for `write`.
1012 unsafe { write(self, val) }
1015 /// Invokes memset on the specified pointer, setting `count * size_of::<T>()`
1016 /// bytes of memory starting at `self` to `val`.
1018 /// See [`ptr::write_bytes`] for safety concerns and examples.
1020 /// [`ptr::write_bytes`]: crate::ptr::write_bytes()
1021 #[stable(feature = "pointer_methods", since = "1.26.0")]
1023 pub unsafe fn write_bytes(self, val: u8, count: usize)
1027 // SAFETY: the caller must uphold the safety contract for `write_bytes`.
1028 unsafe { write_bytes(self, val, count) }
1031 /// Performs a volatile write of a memory location with the given value without
1032 /// reading or dropping the old value.
1034 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1035 /// to not be elided or reordered by the compiler across other volatile
1038 /// See [`ptr::write_volatile`] for safety concerns and examples.
1040 /// [`ptr::write_volatile`]: crate::ptr::write_volatile()
1041 #[stable(feature = "pointer_methods", since = "1.26.0")]
1043 pub unsafe fn write_volatile(self, val: T)
1047 // SAFETY: the caller must uphold the safety contract for `write_volatile`.
1048 unsafe { write_volatile(self, val) }
1051 /// Overwrites a memory location with the given value without reading or
1052 /// dropping the old value.
1054 /// Unlike `write`, the pointer may be unaligned.
1056 /// See [`ptr::write_unaligned`] for safety concerns and examples.
1058 /// [`ptr::write_unaligned`]: crate::ptr::write_unaligned()
1059 #[stable(feature = "pointer_methods", since = "1.26.0")]
1060 #[rustc_const_unstable(feature = "const_ptr_write", issue = "none")]
1062 pub const unsafe fn write_unaligned(self, val: T)
1066 // SAFETY: the caller must uphold the safety contract for `write_unaligned`.
1067 unsafe { write_unaligned(self, val) }
1070 /// Replaces the value at `self` with `src`, returning the old
1071 /// value, without dropping either.
1073 /// See [`ptr::replace`] for safety concerns and examples.
1075 /// [`ptr::replace`]: crate::ptr::replace()
1076 #[stable(feature = "pointer_methods", since = "1.26.0")]
1078 pub unsafe fn replace(self, src: T) -> T
1082 // SAFETY: the caller must uphold the safety contract for `replace`.
1083 unsafe { replace(self, src) }
1086 /// Swaps the values at two mutable locations of the same type, without
1087 /// deinitializing either. They may overlap, unlike `mem::swap` which is
1088 /// otherwise equivalent.
1090 /// See [`ptr::swap`] for safety concerns and examples.
1092 /// [`ptr::swap`]: crate::ptr::swap()
1093 #[stable(feature = "pointer_methods", since = "1.26.0")]
1095 pub unsafe fn swap(self, with: *mut T)
1099 // SAFETY: the caller must uphold the safety contract for `swap`.
1100 unsafe { swap(self, with) }
1103 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1106 /// If it is not possible to align the pointer, the implementation returns
1107 /// `usize::MAX`. It is permissible for the implementation to *always*
1108 /// return `usize::MAX`. Only your algorithm's performance can depend
1109 /// on getting a usable offset here, not its correctness.
1111 /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1112 /// used with the `wrapping_add` method.
1114 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1115 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1116 /// the returned offset is correct in all terms other than alignment.
1120 /// The function panics if `align` is not a power-of-two.
1124 /// Accessing adjacent `u8` as `u16`
1127 /// # fn foo(n: usize) {
1128 /// # use std::mem::align_of;
1130 /// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
1131 /// let ptr = x.as_ptr().add(n) as *const u8;
1132 /// let offset = ptr.align_offset(align_of::<u16>());
1133 /// if offset < x.len() - n - 1 {
1134 /// let u16_ptr = ptr.add(offset) as *const u16;
1135 /// assert_ne!(*u16_ptr, 500);
1137 /// // while the pointer can be aligned via `offset`, it would point
1138 /// // outside the allocation
1142 #[stable(feature = "align_offset", since = "1.36.0")]
1143 pub fn align_offset(self, align: usize) -> usize
1147 if !align.is_power_of_two() {
1148 panic!("align_offset: align is not a power-of-two");
1150 // SAFETY: `align` has been checked to be a power of 2 above
1151 unsafe { align_offset(self, align) }
1155 #[lang = "mut_slice_ptr"]
1157 /// Returns the length of a raw slice.
1159 /// The returned value is the number of **elements**, not the number of bytes.
1161 /// This function is safe, even when the raw slice cannot be cast to a slice
1162 /// reference because the pointer is null or unaligned.
1167 /// #![feature(slice_ptr_len)]
1170 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1171 /// assert_eq!(slice.len(), 3);
1174 #[unstable(feature = "slice_ptr_len", issue = "71146")]
1175 #[rustc_const_unstable(feature = "const_slice_ptr_len", issue = "71146")]
1176 pub const fn len(self) -> usize {
1180 /// Returns a raw pointer to the slice's buffer.
1182 /// This is equivalent to casting `self` to `*mut T`, but more type-safe.
1187 /// #![feature(slice_ptr_get)]
1190 /// let slice: *mut [i8] = ptr::slice_from_raw_parts_mut(ptr::null_mut(), 3);
1191 /// assert_eq!(slice.as_mut_ptr(), 0 as *mut i8);
1194 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1195 #[rustc_const_unstable(feature = "slice_ptr_get", issue = "74265")]
1196 pub const fn as_mut_ptr(self) -> *mut T {
1200 /// Returns a raw pointer to an element or subslice, without doing bounds
1203 /// Calling this method with an out-of-bounds index or when `self` is not dereferencable
1204 /// is *[undefined behavior]* even if the resulting pointer is not used.
1206 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1211 /// #![feature(slice_ptr_get)]
1213 /// let x = &mut [1, 2, 4] as *mut [i32];
1216 /// assert_eq!(x.get_unchecked_mut(1), x.as_mut_ptr().add(1));
1219 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1221 pub unsafe fn get_unchecked_mut<I>(self, index: I) -> *mut I::Output
1225 // SAFETY: the caller ensures that `self` is dereferencable and `index` in-bounds.
1226 unsafe { index.get_unchecked_mut(self) }
1229 /// Returns `None` if the pointer is null, or else returns a shared slice to
1230 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
1231 /// that the value has to be initialized.
1233 /// For the mutable counterpart see [`as_uninit_slice_mut`].
1235 /// [`as_ref`]: #method.as_ref-1
1236 /// [`as_uninit_slice_mut`]: #method.as_uninit_slice_mut
1240 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
1241 /// all of the following is true:
1243 /// * The pointer must be [valid] for reads for `ptr.len() * mem::size_of::<T>()` many bytes,
1244 /// and it must be properly aligned. This means in particular:
1246 /// * The entire memory range of this slice must be contained within a single [allocated object]!
1247 /// Slices can never span across multiple allocated objects.
1249 /// * The pointer must be aligned even for zero-length slices. One
1250 /// reason for this is that enum layout optimizations may rely on references
1251 /// (including slices of any length) being aligned and non-null to distinguish
1252 /// them from other data. You can obtain a pointer that is usable as `data`
1253 /// for zero-length slices using [`NonNull::dangling()`].
1255 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1256 /// See the safety documentation of [`pointer::offset`].
1258 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1259 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1260 /// In particular, for the duration of this lifetime, the memory the pointer points to must
1261 /// not get mutated (except inside `UnsafeCell`).
1263 /// This applies even if the result of this method is unused!
1265 /// See also [`slice::from_raw_parts`][].
1267 /// [valid]: crate::ptr#safety
1268 /// [allocated object]: crate::ptr#allocated-object
1270 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1271 pub unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1275 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1276 Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1280 /// Returns `None` if the pointer is null, or else returns a unique slice to
1281 /// the value wrapped in `Some`. In contrast to [`as_mut`], this does not require
1282 /// that the value has to be initialized.
1284 /// For the shared counterpart see [`as_uninit_slice`].
1286 /// [`as_mut`]: #method.as_mut
1287 /// [`as_uninit_slice`]: #method.as_uninit_slice-1
1291 /// When calling this method, you have to ensure that *either* the pointer is NULL *or*
1292 /// all of the following is true:
1294 /// * The pointer must be [valid] for reads and writes for `ptr.len() * mem::size_of::<T>()`
1295 /// many bytes, and it must be properly aligned. This means in particular:
1297 /// * The entire memory range of this slice must be contained within a single [allocated object]!
1298 /// Slices can never span across multiple allocated objects.
1300 /// * The pointer must be aligned even for zero-length slices. One
1301 /// reason for this is that enum layout optimizations may rely on references
1302 /// (including slices of any length) being aligned and non-null to distinguish
1303 /// them from other data. You can obtain a pointer that is usable as `data`
1304 /// for zero-length slices using [`NonNull::dangling()`].
1306 /// * The total size `ptr.len() * mem::size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1307 /// See the safety documentation of [`pointer::offset`].
1309 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1310 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1311 /// In particular, for the duration of this lifetime, the memory the pointer points to must
1312 /// not get accessed (read or written) through any other pointer.
1314 /// This applies even if the result of this method is unused!
1316 /// See also [`slice::from_raw_parts_mut`][].
1318 /// [valid]: crate::ptr#safety
1319 /// [allocated object]: crate::ptr#allocated-object
1321 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1322 pub unsafe fn as_uninit_slice_mut<'a>(self) -> Option<&'a mut [MaybeUninit<T>]> {
1326 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice_mut`.
1327 Some(unsafe { slice::from_raw_parts_mut(self as *mut MaybeUninit<T>, self.len()) })
1332 // Equality for pointers
1333 #[stable(feature = "rust1", since = "1.0.0")]
1334 impl<T: ?Sized> PartialEq for *mut T {
1336 fn eq(&self, other: &*mut T) -> bool {
1341 #[stable(feature = "rust1", since = "1.0.0")]
1342 impl<T: ?Sized> Eq for *mut T {}
1344 #[stable(feature = "rust1", since = "1.0.0")]
1345 impl<T: ?Sized> Ord for *mut T {
1347 fn cmp(&self, other: &*mut T) -> Ordering {
1350 } else if self == other {
1358 #[stable(feature = "rust1", since = "1.0.0")]
1359 impl<T: ?Sized> PartialOrd for *mut T {
1361 fn partial_cmp(&self, other: &*mut T) -> Option<Ordering> {
1362 Some(self.cmp(other))
1366 fn lt(&self, other: &*mut T) -> bool {
1371 fn le(&self, other: &*mut T) -> bool {
1376 fn gt(&self, other: &*mut T) -> bool {
1381 fn ge(&self, other: &*mut T) -> bool {