From 1b77f8e6ead5f717e9f7889885c6e84e05f4cef2 Mon Sep 17 00:00:00 2001 From: Albin Hedman Date: Sat, 26 Dec 2020 02:22:29 +0100 Subject: [PATCH] Constify intrinsics::copy[_nonoverlapping] --- .../rustc_mir/src/interpret/intrinsics.rs | 23 +++++++++++++++++++ library/core/src/intrinsics.rs | 17 +++++++++----- 2 files changed, 34 insertions(+), 6 deletions(-) diff --git a/compiler/rustc_mir/src/interpret/intrinsics.rs b/compiler/rustc_mir/src/interpret/intrinsics.rs index 474e1f8e577..c182fa35ee2 100644 --- a/compiler/rustc_mir/src/interpret/intrinsics.rs +++ b/compiler/rustc_mir/src/interpret/intrinsics.rs @@ -322,6 +322,29 @@ pub fn emulate_intrinsic( let result = Scalar::from_uint(truncated_bits, layout.size); self.write_scalar(result, dest)?; } + sym::copy | sym::copy_nonoverlapping => { + let elem_ty = instance.substs.type_at(0); + let elem_layout = self.layout_of(elem_ty)?; + let count = self.read_scalar(args[2])?.to_machine_usize(self)?; + let elem_align = elem_layout.align.abi; + + let size = elem_layout.size.checked_mul(count, self).ok_or_else(|| { + err_ub_format!("overflow computing total size of `{}`", intrinsic_name) + })?; + let src = self.read_scalar(args[0])?.check_init()?; + let src = self.memory.check_ptr_access(src, size, elem_align)?; + let dest = self.read_scalar(args[1])?.check_init()?; + let dest = self.memory.check_ptr_access(dest, size, elem_align)?; + + if let (Some(src), Some(dest)) = (src, dest) { + self.memory.copy( + src, + dest, + size, + intrinsic_name == sym::copy_nonoverlapping, + )?; + } + } sym::offset => { let ptr = self.read_scalar(args[0])?.check_init()?; let offset_count = self.read_scalar(args[1])?.to_machine_isize(self)?; diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs index 52921822693..87863ab5c68 100644 --- a/library/core/src/intrinsics.rs +++ b/library/core/src/intrinsics.rs @@ -1846,20 +1846,22 @@ pub(crate) fn is_nonoverlapping(src: *const T, dst: *const T, count: usize) - /// [`Vec::append`]: ../../std/vec/struct.Vec.html#method.append #[doc(alias = "memcpy")] #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "none")] #[inline] -pub unsafe fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize) { +pub const unsafe fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize) { extern "rust-intrinsic" { fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize); } - if cfg!(debug_assertions) + // FIXME: Perform these checks only at run time + /*if cfg!(debug_assertions) && !(is_aligned_and_not_null(src) && is_aligned_and_not_null(dst) && is_nonoverlapping(src, dst, count)) { // Not panicking to keep codegen impact smaller. abort(); - } + }*/ // SAFETY: the safety contract for `copy_nonoverlapping` must be // upheld by the caller. @@ -1928,16 +1930,19 @@ pub unsafe fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize) { /// ``` #[doc(alias = "memmove")] #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "none")] #[inline] -pub unsafe fn copy(src: *const T, dst: *mut T, count: usize) { +pub const unsafe fn copy(src: *const T, dst: *mut T, count: usize) { extern "rust-intrinsic" { + #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "none")] fn copy(src: *const T, dst: *mut T, count: usize); } - if cfg!(debug_assertions) && !(is_aligned_and_not_null(src) && is_aligned_and_not_null(dst)) { + // FIXME: Perform these checks only at run time + /*if cfg!(debug_assertions) && !(is_aligned_and_not_null(src) && is_aligned_and_not_null(dst)) { // Not panicking to keep codegen impact smaller. abort(); - } + }*/ // SAFETY: the safety contract for `copy` must be upheld by the caller. unsafe { copy(src, dst, count) } -- 2.44.0