4 use crate::cell::UnsafeCell;
6 use crate::convert::TryInto;
8 use crate::ops::{CoerceUnsized, Deref, DerefMut, Index, IndexMut};
9 use crate::ptr::{self, NonNull};
11 use crate::slice::SliceIndex;
13 use super::super::mem::{is_enclave_range, is_user_range};
14 use fortanix_sgx_abi::*;
16 /// A type that can be safely read from or written to userspace.
18 /// Non-exhaustive list of specific requirements for reading and writing:
19 /// * **Type is `Copy`** (and therefore also not `Drop`). Copies will be
20 /// created when copying from/to userspace. Destructors will not be called.
21 /// * **No references or Rust-style owned pointers** (`Vec`, `Arc`, etc.). When
22 /// reading from userspace, references into enclave memory must not be
23 /// created. Also, only enclave memory is considered managed by the Rust
24 /// compiler's static analysis. When reading from userspace, there can be no
25 /// guarantee that the value correctly adheres to the expectations of the
26 /// type. When writing to userspace, memory addresses of data in enclave
27 /// memory must not be leaked for confidentiality reasons. `User` and
28 /// `UserRef` are also not allowed for the same reasons.
29 /// * **No fat pointers.** When reading from userspace, the size or vtable
30 /// pointer could be automatically interpreted and used by the code. When
31 /// writing to userspace, memory addresses of data in enclave memory (such
32 /// as vtable pointers) must not be leaked for confidentiality reasons.
34 /// Non-exhaustive list of specific requirements for reading from userspace:
35 /// * **Any bit pattern is valid** for this type (no `enum`s). There can be no
36 /// guarantee that the value correctly adheres to the expectations of the
37 /// type, so any value must be valid for this type.
39 /// Non-exhaustive list of specific requirements for writing to userspace:
40 /// * **No pointers to enclave memory.** Memory addresses of data in enclave
41 /// memory must not be leaked for confidentiality reasons.
42 /// * **No internal padding.** Padding might contain previously-initialized
43 /// secret data stored at that memory location and must not be leaked for
44 /// confidentiality reasons.
45 #[unstable(feature = "sgx_platform", issue = "56975")]
46 pub unsafe trait UserSafeSized: Copy + Sized {}
48 #[unstable(feature = "sgx_platform", issue = "56975")]
49 unsafe impl UserSafeSized for u8 {}
50 #[unstable(feature = "sgx_platform", issue = "56975")]
51 unsafe impl<T> UserSafeSized for FifoDescriptor<T> {}
52 #[unstable(feature = "sgx_platform", issue = "56975")]
53 unsafe impl UserSafeSized for ByteBuffer {}
54 #[unstable(feature = "sgx_platform", issue = "56975")]
55 unsafe impl UserSafeSized for Usercall {}
56 #[unstable(feature = "sgx_platform", issue = "56975")]
57 unsafe impl UserSafeSized for Return {}
58 #[unstable(feature = "sgx_platform", issue = "56975")]
59 unsafe impl UserSafeSized for Cancel {}
60 #[unstable(feature = "sgx_platform", issue = "56975")]
61 unsafe impl<T: UserSafeSized> UserSafeSized for [T; 2] {}
63 /// A type that can be represented in memory as one or more `UserSafeSized`s.
64 #[unstable(feature = "sgx_platform", issue = "56975")]
65 pub unsafe trait UserSafe {
66 /// Equivalent to `mem::align_of::<Self>`.
67 fn align_of() -> usize;
69 /// Construct a pointer to `Self` given a memory range in user space.
71 /// N.B., this takes a size, not a length!
75 /// The caller must ensure the memory range is in user memory, is the
76 /// correct size and is correctly aligned and points to the right type.
77 unsafe fn from_raw_sized_unchecked(ptr: *mut u8, size: usize) -> *mut Self;
79 /// Construct a pointer to `Self` given a memory range.
81 /// N.B., this takes a size, not a length!
85 /// The caller must ensure the memory range points to the correct type.
89 /// This function panics if:
91 /// * the pointer is not aligned.
92 /// * the pointer is null.
93 /// * the pointed-to range does not fit in the address space.
94 /// * the pointed-to range is not in user memory.
95 unsafe fn from_raw_sized(ptr: *mut u8, size: usize) -> NonNull<Self> {
96 assert!(ptr.wrapping_add(size) >= ptr);
97 // SAFETY: The caller has guaranteed the pointer is valid
98 let ret = unsafe { Self::from_raw_sized_unchecked(ptr, size) };
100 Self::check_ptr(ret);
101 NonNull::new_unchecked(ret as _)
105 /// Checks if a pointer may point to `Self` in user memory.
109 /// The caller must ensure the memory range points to the correct type and
110 /// length (if this is a slice).
114 /// This function panics if:
116 /// * the pointer is not aligned.
117 /// * the pointer is null.
118 /// * the pointed-to range is not in user memory.
119 unsafe fn check_ptr(ptr: *const Self) {
120 let is_aligned = |p: *const u8| -> bool { p.is_aligned_to(Self::align_of()) };
122 assert!(is_aligned(ptr as *const u8));
123 assert!(is_user_range(ptr as _, mem::size_of_val(unsafe { &*ptr })));
124 assert!(!ptr.is_null());
128 #[unstable(feature = "sgx_platform", issue = "56975")]
129 unsafe impl<T: UserSafeSized> UserSafe for T {
130 fn align_of() -> usize {
134 unsafe fn from_raw_sized_unchecked(ptr: *mut u8, size: usize) -> *mut Self {
135 assert_eq!(size, mem::size_of::<T>());
140 #[unstable(feature = "sgx_platform", issue = "56975")]
141 unsafe impl<T: UserSafeSized> UserSafe for [T] {
142 fn align_of() -> usize {
147 /// Behavior is undefined if any of these conditions are violated:
148 /// * `ptr` must be [valid] for writes of `size` many bytes, and it must be
149 /// properly aligned.
151 /// [valid]: core::ptr#safety
154 /// This function panics if:
156 /// * the element size is not a factor of the size
157 unsafe fn from_raw_sized_unchecked(ptr: *mut u8, size: usize) -> *mut Self {
158 let elem_size = mem::size_of::<T>();
159 assert_eq!(size % elem_size, 0);
160 let len = size / elem_size;
161 // SAFETY: The caller must uphold the safety contract for `from_raw_sized_unchecked`
162 unsafe { slice::from_raw_parts_mut(ptr as _, len) }
166 /// A reference to some type in userspace memory. `&UserRef<T>` is equivalent
167 /// to `&T` in enclave memory. Access to the memory is only allowed by copying
168 /// to avoid TOCTTOU issues. After copying, code should make sure to completely
169 /// check the value before use.
171 /// It is also possible to obtain a mutable reference `&mut UserRef<T>`. Unlike
172 /// regular mutable references, these are not exclusive. Userspace may always
173 /// write to the backing memory at any time, so it can't be assumed that there
174 /// the pointed-to memory is uniquely borrowed. The two different reference types
175 /// are used solely to indicate intent: a mutable reference is for writing to
176 /// user memory, an immutable reference for reading from user memory.
177 #[unstable(feature = "sgx_platform", issue = "56975")]
178 pub struct UserRef<T: ?Sized>(UnsafeCell<T>);
179 /// An owned type in userspace memory. `User<T>` is equivalent to `Box<T>` in
180 /// enclave memory. Access to the memory is only allowed by copying to avoid
181 /// TOCTTOU issues. The user memory will be freed when the value is dropped.
182 /// After copying, code should make sure to completely check the value before
184 #[unstable(feature = "sgx_platform", issue = "56975")]
185 pub struct User<T: UserSafe + ?Sized>(NonNull<UserRef<T>>);
187 trait NewUserRef<T: ?Sized> {
188 unsafe fn new_userref(v: T) -> Self;
191 impl<T: ?Sized> NewUserRef<*mut T> for NonNull<UserRef<T>> {
192 unsafe fn new_userref(v: *mut T) -> Self {
193 // SAFETY: The caller has guaranteed the pointer is valid
194 unsafe { NonNull::new_unchecked(v as _) }
198 impl<T: ?Sized> NewUserRef<NonNull<T>> for NonNull<UserRef<T>> {
199 unsafe fn new_userref(v: NonNull<T>) -> Self {
200 // SAFETY: The caller has guaranteed the pointer is valid
201 unsafe { NonNull::new_userref(v.as_ptr()) }
205 #[unstable(feature = "sgx_platform", issue = "56975")]
206 impl<T: ?Sized> User<T>
210 // This function returns memory that is practically uninitialized, but is
211 // not considered "unspecified" or "undefined" for purposes of an
212 // optimizing compiler. This is achieved by returning a pointer from
213 // from outside as obtained by `super::alloc`.
214 fn new_uninit_bytes(size: usize) -> Self {
216 // Mustn't call alloc with size 0.
217 let ptr = if size > 0 {
218 // `copy_to_userspace` is more efficient when data is 8-byte aligned
219 let alignment = cmp::max(T::align_of(), 8);
220 rtunwrap!(Ok, super::alloc(size, alignment)) as _
222 T::align_of() as _ // dangling pointer ok for size 0
224 if let Ok(v) = crate::panic::catch_unwind(|| T::from_raw_sized(ptr, size)) {
225 User(NonNull::new_userref(v))
227 rtabort!("Got invalid pointer from alloc() usercall")
232 /// Copies `val` into freshly allocated space in user memory.
233 pub fn new_from_enclave(val: &T) -> Self {
235 let mut user = Self::new_uninit_bytes(mem::size_of_val(val));
236 user.copy_from_enclave(val);
241 /// Creates an owned `User<T>` from a raw pointer.
244 /// The caller must ensure `ptr` points to `T`, is freeable with the `free`
245 /// usercall and the alignment of `T`, and is uniquely owned.
248 /// This function panics if:
250 /// * The pointer is not aligned
251 /// * The pointer is null
252 /// * The pointed-to range is not in user memory
253 pub unsafe fn from_raw(ptr: *mut T) -> Self {
254 // SAFETY: the caller must uphold the safety contract for `from_raw`.
255 unsafe { T::check_ptr(ptr) };
256 User(unsafe { NonNull::new_userref(ptr) })
259 /// Converts this value into a raw pointer. The value will no longer be
260 /// automatically freed.
261 pub fn into_raw(self) -> *mut T {
268 #[unstable(feature = "sgx_platform", issue = "56975")]
273 /// Allocate space for `T` in user memory.
274 pub fn uninitialized() -> Self {
275 Self::new_uninit_bytes(mem::size_of::<T>())
279 #[unstable(feature = "sgx_platform", issue = "56975")]
284 /// Allocate space for a `[T]` of `n` elements in user memory.
285 pub fn uninitialized(n: usize) -> Self {
286 Self::new_uninit_bytes(n * mem::size_of::<T>())
289 /// Creates an owned `User<[T]>` from a raw thin pointer and a slice length.
292 /// The caller must ensure `ptr` points to `len` elements of `T`, is
293 /// freeable with the `free` usercall and the alignment of `T`, and is
297 /// This function panics if:
299 /// * The pointer is not aligned
300 /// * The pointer is null
301 /// * The pointed-to range does not fit in the address space
302 /// * The pointed-to range is not in user memory
303 pub unsafe fn from_raw_parts(ptr: *mut T, len: usize) -> Self {
305 NonNull::new_userref(<[T]>::from_raw_sized(ptr as _, len * mem::size_of::<T>()))
310 // Split a memory region ptr..ptr + len into three parts:
312 // | small0 | Chunk smaller than 8 bytes
314 // | big | Chunk 8-byte aligned, and size a multiple of 8 bytes
316 // | small1 | Chunk smaller than 8 bytes
318 fn region_as_aligned_chunks(ptr: *const u8, len: usize) -> (usize, usize, usize) {
319 let small0_size = if ptr.is_aligned_to(8) { 0 } else { 8 - ptr.addr() % 8 };
320 let small1_size = (len - small0_size) % 8;
321 let big_size = len - small0_size - small1_size;
323 (small0_size, big_size, small1_size)
326 unsafe fn copy_quadwords(src: *const u8, dst: *mut u8, len: usize) {
329 "rep movsq (%rsi), (%rdi)",
330 inout("rcx") len / 8 => _,
331 inout("rdi") dst => _,
332 inout("rsi") src => _,
333 options(att_syntax, nostack, preserves_flags)
338 /// Copies `len` bytes of data from enclave pointer `src` to userspace `dst`
340 /// This function mitigates stale data vulnerabilities by ensuring all writes to untrusted memory are either:
341 /// - preceded by the VERW instruction and followed by the MFENCE; LFENCE instruction sequence
342 /// - or are in multiples of 8 bytes, aligned to an 8-byte boundary
345 /// This function panics if:
347 /// * The `src` pointer is null
348 /// * The `dst` pointer is null
349 /// * The `src` memory range is not in enclave memory
350 /// * The `dst` memory range is not in user memory
353 /// - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00615.html
354 /// - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/technical-documentation/processor-mmio-stale-data-vulnerabilities.html#inpage-nav-3-2-2
355 pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
356 unsafe fn copy_bytewise_to_userspace(src: *const u8, dst: *mut u8, len: usize) {
358 let mut seg_sel: u16 = 0;
367 val = in(reg_byte) *src.add(off),
368 dst = in(reg) dst.add(off),
369 seg_sel = in(reg) &mut seg_sel,
370 options(nostack, att_syntax)
376 assert!(!src.is_null());
377 assert!(!dst.is_null());
378 assert!(is_enclave_range(src, len));
379 assert!(is_user_range(dst, len));
380 assert!(len < isize::MAX as usize);
381 assert!(!src.addr().overflowing_add(len).1);
382 assert!(!dst.addr().overflowing_add(len).1);
385 // Can't align on 8 byte boundary: copy safely byte per byte
387 copy_bytewise_to_userspace(src, dst, len);
389 } else if len % 8 == 0 && dst.is_aligned_to(8) {
390 // Copying 8-byte aligned quadwords: copy quad word per quad word
392 copy_quadwords(src, dst, len);
395 // Split copies into three parts:
397 // | small0 | Chunk smaller than 8 bytes
399 // | big | Chunk 8-byte aligned, and size a multiple of 8 bytes
401 // | small1 | Chunk smaller than 8 bytes
403 let (small0_size, big_size, small1_size) = region_as_aligned_chunks(dst, len);
407 copy_bytewise_to_userspace(src, dst, small0_size);
410 let big_src = src.add(small0_size);
411 let big_dst = dst.add(small0_size);
412 copy_quadwords(big_src, big_dst, big_size);
415 let small1_src = src.add(big_size + small0_size);
416 let small1_dst = dst.add(big_size + small0_size);
417 copy_bytewise_to_userspace(small1_src, small1_dst, small1_size);
422 /// Copies `len` bytes of data from userspace pointer `src` to enclave pointer `dst`
424 /// This function mitigates AEPIC leak vulnerabilities by ensuring all reads from untrusted memory are 8-byte aligned
427 /// This function panics if:
429 /// * The `src` pointer is null
430 /// * The `dst` pointer is null
431 /// * The `src` memory range is not in user memory
432 /// * The `dst` memory range is not in enclave memory
435 /// - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00657.html
436 /// - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/advisory-guidance/stale-data-read-from-xapic.html
437 pub(crate) unsafe fn copy_from_userspace(src: *const u8, dst: *mut u8, len: usize) {
438 // Copies memory region `src..src + len` to the enclave at `dst`. The source memory region
440 // - strictly less than 8 bytes in size and may be
441 // - located at a misaligned memory location
442 fn copy_misaligned_chunk_to_enclave(src: *const u8, dst: *mut u8, len: usize) {
443 let mut tmp_buff = [0u8; 16];
446 // Compute an aligned memory region to read from
447 // +--------+ <-- aligned_src + aligned_len (8B-aligned)
449 // +--------+ <-- src + len (misaligned)
453 // +--------+ <-- src (misaligned)
455 // +--------+ <-- aligned_src (8B-aligned)
456 let pad0_size = src as usize % 8;
457 let aligned_src = src.sub(pad0_size);
459 let pad1_size = 8 - (src.add(len) as usize % 8);
460 let aligned_len = pad0_size + len + pad1_size;
462 debug_assert!(len < 8);
463 debug_assert_eq!(aligned_src as usize % 8, 0);
464 debug_assert_eq!(aligned_len % 8, 0);
465 debug_assert!(aligned_len <= 16);
467 // Copy the aligned buffer to a temporary buffer
468 // Note: copying from a slightly different memory location is a bit odd. In this case it
469 // can't lead to page faults or inadvertent copying from the enclave as we only ensured
470 // that the `src` pointer is aligned at an 8 byte boundary. As pages are 4096 bytes
471 // aligned, `aligned_src` must be on the same page as `src`. A similar argument can be made
473 copy_quadwords(aligned_src as _, tmp_buff.as_mut_ptr(), aligned_len);
475 // Copy the correct parts of the temporary buffer to the destination
476 ptr::copy(tmp_buff.as_ptr().add(pad0_size), dst, len);
480 assert!(!src.is_null());
481 assert!(!dst.is_null());
482 assert!(is_user_range(src, len));
483 assert!(is_enclave_range(dst, len));
484 assert!(!(src as usize).overflowing_add(len + 8).1);
485 assert!(!(dst as usize).overflowing_add(len + 8).1);
488 copy_misaligned_chunk_to_enclave(src, dst, len);
489 } else if len % 8 == 0 && src as usize % 8 == 0 {
490 // Copying 8-byte aligned quadwords: copy quad word per quad word
492 copy_quadwords(src, dst, len);
495 // Split copies into three parts:
497 // | small0 | Chunk smaller than 8 bytes
499 // | big | Chunk 8-byte aligned, and size a multiple of 8 bytes
501 // | small1 | Chunk smaller than 8 bytes
503 let (small0_size, big_size, small1_size) = region_as_aligned_chunks(dst, len);
507 copy_misaligned_chunk_to_enclave(src, dst, small0_size);
510 let big_src = src.add(small0_size);
511 let big_dst = dst.add(small0_size);
512 copy_quadwords(big_src, big_dst, big_size);
515 let small1_src = src.add(big_size + small0_size);
516 let small1_dst = dst.add(big_size + small0_size);
517 copy_misaligned_chunk_to_enclave(small1_src, small1_dst, small1_size);
522 #[unstable(feature = "sgx_platform", issue = "56975")]
523 impl<T: ?Sized> UserRef<T>
527 /// Creates a `&UserRef<[T]>` from a raw pointer.
530 /// The caller must ensure `ptr` points to `T`.
533 /// This function panics if:
535 /// * The pointer is not aligned
536 /// * The pointer is null
537 /// * The pointed-to range is not in user memory
538 pub unsafe fn from_ptr<'a>(ptr: *const T) -> &'a Self {
539 // SAFETY: The caller must uphold the safety contract for `from_ptr`.
540 unsafe { T::check_ptr(ptr) };
541 unsafe { &*(ptr as *const Self) }
544 /// Creates a `&mut UserRef<[T]>` from a raw pointer. See the struct
545 /// documentation for the nuances regarding a `&mut UserRef<T>`.
548 /// The caller must ensure `ptr` points to `T`.
551 /// This function panics if:
553 /// * The pointer is not aligned
554 /// * The pointer is null
555 /// * The pointed-to range is not in user memory
556 pub unsafe fn from_mut_ptr<'a>(ptr: *mut T) -> &'a mut Self {
557 // SAFETY: The caller must uphold the safety contract for `from_mut_ptr`.
558 unsafe { T::check_ptr(ptr) };
559 unsafe { &mut *(ptr as *mut Self) }
562 /// Copies `val` into user memory.
565 /// This function panics if the destination doesn't have the same size as
566 /// the source. This can happen for dynamically-sized types such as slices.
567 pub fn copy_from_enclave(&mut self, val: &T) {
569 assert_eq!(mem::size_of_val(val), mem::size_of_val(&*self.0.get()));
571 val as *const T as *const u8,
572 self.0.get() as *mut T as *mut u8,
573 mem::size_of_val(val),
578 /// Copies the value from user memory and place it into `dest`.
581 /// This function panics if the destination doesn't have the same size as
582 /// the source. This can happen for dynamically-sized types such as slices.
583 pub fn copy_to_enclave(&self, dest: &mut T) {
585 assert_eq!(mem::size_of_val(dest), mem::size_of_val(&*self.0.get()));
587 self.0.get() as *const T as *const u8,
588 dest as *mut T as *mut u8,
589 mem::size_of_val(dest),
594 /// Obtain a raw pointer from this reference.
595 pub fn as_raw_ptr(&self) -> *const T {
596 self as *const _ as _
599 /// Obtain a raw pointer from this reference.
600 pub fn as_raw_mut_ptr(&mut self) -> *mut T {
605 #[unstable(feature = "sgx_platform", issue = "56975")]
610 /// Copies the value from user memory into enclave memory.
611 pub fn to_enclave(&self) -> T {
613 let mut data: T = mem::MaybeUninit::uninit().assume_init();
614 copy_from_userspace(self.0.get() as _, &mut data as *mut T as _, mem::size_of::<T>());
620 #[unstable(feature = "sgx_platform", issue = "56975")]
625 /// Creates a `&UserRef<[T]>` from a raw thin pointer and a slice length.
628 /// The caller must ensure `ptr` points to `n` elements of `T`.
631 /// This function panics if:
633 /// * The pointer is not aligned
634 /// * The pointer is null
635 /// * The pointed-to range does not fit in the address space
636 /// * The pointed-to range is not in user memory
637 pub unsafe fn from_raw_parts<'a>(ptr: *const T, len: usize) -> &'a Self {
638 // SAFETY: The caller must uphold the safety contract for `from_raw_parts`.
640 &*(<[T]>::from_raw_sized(ptr as _, len * mem::size_of::<T>()).as_ptr() as *const Self)
644 /// Creates a `&mut UserRef<[T]>` from a raw thin pointer and a slice length.
645 /// See the struct documentation for the nuances regarding a
646 /// `&mut UserRef<T>`.
649 /// The caller must ensure `ptr` points to `n` elements of `T`.
652 /// This function panics if:
654 /// * The pointer is not aligned
655 /// * The pointer is null
656 /// * The pointed-to range does not fit in the address space
657 /// * The pointed-to range is not in user memory
658 pub unsafe fn from_raw_parts_mut<'a>(ptr: *mut T, len: usize) -> &'a mut Self {
659 // SAFETY: The caller must uphold the safety contract for `from_raw_parts_mut`.
661 &mut *(<[T]>::from_raw_sized(ptr as _, len * mem::size_of::<T>()).as_ptr() as *mut Self)
665 /// Obtain a raw pointer to the first element of this user slice.
666 pub fn as_ptr(&self) -> *const T {
670 /// Obtain a raw pointer to the first element of this user slice.
671 pub fn as_mut_ptr(&mut self) -> *mut T {
675 /// Obtain the number of elements in this user slice.
676 pub fn len(&self) -> usize {
677 unsafe { (*self.0.get()).len() }
680 /// Copies the value from user memory and place it into `dest`. Afterwards,
681 /// `dest` will contain exactly `self.len()` elements.
684 /// This function panics if the destination doesn't have the same size as
685 /// the source. This can happen for dynamically-sized types such as slices.
686 pub fn copy_to_enclave_vec(&self, dest: &mut Vec<T>) {
687 if let Some(missing) = self.len().checked_sub(dest.capacity()) {
688 dest.reserve(missing)
690 // SAFETY: We reserve enough space above.
691 unsafe { dest.set_len(self.len()) };
692 self.copy_to_enclave(&mut dest[..]);
695 /// Copies the value from user memory into a vector in enclave memory.
696 pub fn to_enclave(&self) -> Vec<T> {
697 let mut ret = Vec::with_capacity(self.len());
698 self.copy_to_enclave_vec(&mut ret);
702 /// Returns an iterator over the slice.
703 pub fn iter(&self) -> Iter<'_, T>
705 T: UserSafe, // FIXME: should be implied by [T]: UserSafe?
707 unsafe { Iter((&*self.as_raw_ptr()).iter()) }
710 /// Returns an iterator that allows modifying each value.
711 pub fn iter_mut(&mut self) -> IterMut<'_, T>
713 T: UserSafe, // FIXME: should be implied by [T]: UserSafe?
715 unsafe { IterMut((&mut *self.as_raw_mut_ptr()).iter_mut()) }
719 /// Immutable user slice iterator
721 /// This struct is created by the `iter` method on `UserRef<[T]>`.
722 #[unstable(feature = "sgx_platform", issue = "56975")]
723 pub struct Iter<'a, T: 'a + UserSafe>(slice::Iter<'a, T>);
725 #[unstable(feature = "sgx_platform", issue = "56975")]
726 impl<'a, T: UserSafe> Iterator for Iter<'a, T> {
727 type Item = &'a UserRef<T>;
730 fn next(&mut self) -> Option<Self::Item> {
731 unsafe { self.0.next().map(|e| UserRef::from_ptr(e)) }
735 /// Mutable user slice iterator
737 /// This struct is created by the `iter_mut` method on `UserRef<[T]>`.
738 #[unstable(feature = "sgx_platform", issue = "56975")]
739 pub struct IterMut<'a, T: 'a + UserSafe>(slice::IterMut<'a, T>);
741 #[unstable(feature = "sgx_platform", issue = "56975")]
742 impl<'a, T: UserSafe> Iterator for IterMut<'a, T> {
743 type Item = &'a mut UserRef<T>;
746 fn next(&mut self) -> Option<Self::Item> {
747 unsafe { self.0.next().map(|e| UserRef::from_mut_ptr(e)) }
751 #[unstable(feature = "sgx_platform", issue = "56975")]
752 impl<T: ?Sized> Deref for User<T>
756 type Target = UserRef<T>;
758 fn deref(&self) -> &Self::Target {
759 unsafe { &*self.0.as_ptr() }
763 #[unstable(feature = "sgx_platform", issue = "56975")]
764 impl<T: ?Sized> DerefMut for User<T>
768 fn deref_mut(&mut self) -> &mut Self::Target {
769 unsafe { &mut *self.0.as_ptr() }
773 #[unstable(feature = "sgx_platform", issue = "56975")]
774 impl<T: ?Sized> Drop for User<T>
780 let ptr = (*self.0.as_ptr()).0.get();
781 super::free(ptr as _, mem::size_of_val(&mut *ptr), T::align_of());
786 #[unstable(feature = "sgx_platform", issue = "56975")]
787 impl<T: CoerceUnsized<U>, U> CoerceUnsized<UserRef<U>> for UserRef<T> {}
789 #[unstable(feature = "sgx_platform", issue = "56975")]
790 impl<T, I> Index<I> for UserRef<[T]>
796 type Output = UserRef<I::Output>;
799 fn index(&self, index: I) -> &UserRef<I::Output> {
801 if let Some(slice) = index.get(&*self.as_raw_ptr()) {
802 UserRef::from_ptr(slice)
804 rtabort!("index out of range for user slice");
810 #[unstable(feature = "sgx_platform", issue = "56975")]
811 impl<T, I> IndexMut<I> for UserRef<[T]>
818 fn index_mut(&mut self, index: I) -> &mut UserRef<I::Output> {
820 if let Some(slice) = index.get_mut(&mut *self.as_raw_mut_ptr()) {
821 UserRef::from_mut_ptr(slice)
823 rtabort!("index out of range for user slice");
829 #[unstable(feature = "sgx_platform", issue = "56975")]
830 impl UserRef<super::raw::ByteBuffer> {
831 /// Copies the user memory range pointed to by the user `ByteBuffer` to
835 /// This function panics if, in the user `ByteBuffer`:
837 /// * The pointer is null
838 /// * The pointed-to range does not fit in the address space
839 /// * The pointed-to range is not in user memory
840 pub fn copy_user_buffer(&self) -> Vec<u8> {
842 let buf = self.to_enclave();
844 User::from_raw_parts(buf.data as _, buf.len).to_enclave()
846 // Mustn't look at `data` or call `free` if `len` is `0`.
847 Vec::with_capacity(0)