From 2ff14448863ac1a1dd9533461708e29aae170c2d Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 17 Apr 2024 14:06:31 +0200 Subject: Adding debian version 1.65.0+dfsg1-2. Signed-off-by: Daniel Baumann --- library/std/src/sys/sgx/abi/usercalls/alloc.rs | 165 +++++++++++++++++++++---- 1 file changed, 142 insertions(+), 23 deletions(-) (limited to 'library/std/src/sys/sgx/abi/usercalls/alloc.rs') diff --git a/library/std/src/sys/sgx/abi/usercalls/alloc.rs b/library/std/src/sys/sgx/abi/usercalls/alloc.rs index ea24fedd0..5409bd177 100644 --- a/library/std/src/sys/sgx/abi/usercalls/alloc.rs +++ b/library/std/src/sys/sgx/abi/usercalls/alloc.rs @@ -56,6 +56,8 @@ unsafe impl UserSafeSized for Usercall {} #[unstable(feature = "sgx_platform", issue = "56975")] unsafe impl UserSafeSized for Return {} #[unstable(feature = "sgx_platform", issue = "56975")] +unsafe impl UserSafeSized for Cancel {} +#[unstable(feature = "sgx_platform", issue = "56975")] unsafe impl UserSafeSized for [T; 2] {} /// A type that can be represented in memory as one or more `UserSafeSized`s. @@ -115,7 +117,7 @@ pub unsafe trait UserSafe { /// * the pointer is null. /// * the pointed-to range is not in user memory. unsafe fn check_ptr(ptr: *const Self) { - let is_aligned = |p| -> bool { 0 == (p as usize) & (Self::align_of() - 1) }; + let is_aligned = |p: *const u8| -> bool { p.is_aligned_to(Self::align_of()) }; assert!(is_aligned(ptr as *const u8)); assert!(is_user_range(ptr as _, mem::size_of_val(unsafe { &*ptr }))); @@ -305,6 +307,34 @@ where } } +// Split a memory region ptr..ptr + len into three parts: +// +--------+ +// | small0 | Chunk smaller than 8 bytes +// +--------+ +// | big | Chunk 8-byte aligned, and size a multiple of 8 bytes +// +--------+ +// | small1 | Chunk smaller than 8 bytes +// +--------+ +fn region_as_aligned_chunks(ptr: *const u8, len: usize) -> (usize, usize, usize) { + let small0_size = if ptr as usize % 8 == 0 { 0 } else { 8 - ptr as usize % 8 }; + let small1_size = (len - small0_size as usize) % 8; + let big_size = len - small0_size as usize - small1_size as usize; + + (small0_size, big_size, small1_size) +} + +unsafe fn copy_quadwords(src: *const u8, dst: *mut u8, len: usize) { + unsafe { + asm!( + "rep movsq (%rsi), (%rdi)", + inout("rcx") len / 8 => _, + inout("rdi") dst => _, + inout("rsi") src => _, + options(att_syntax, nostack, preserves_flags) + ); + } +} + /// Copies `len` bytes of data from enclave pointer `src` to userspace `dst` /// /// This function mitigates stale data vulnerabilities by ensuring all writes to untrusted memory are either: @@ -343,17 +373,6 @@ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize) } } - unsafe fn copy_aligned_quadwords_to_userspace(src: *const u8, dst: *mut u8, len: usize) { - unsafe { - asm!( - "rep movsq (%rsi), (%rdi)", - inout("rcx") len / 8 => _, - inout("rdi") dst => _, - inout("rsi") src => _, - options(att_syntax, nostack, preserves_flags) - ); - } - } assert!(!src.is_null()); assert!(!dst.is_null()); assert!(is_enclave_range(src, len)); @@ -367,10 +386,10 @@ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize) unsafe { copy_bytewise_to_userspace(src, dst, len); } - } else if len % 8 == 0 && dst as usize % 8 == 0 { + } else if len % 8 == 0 && dst.is_aligned_to(8) { // Copying 8-byte aligned quadwords: copy quad word per quad word unsafe { - copy_aligned_quadwords_to_userspace(src, dst, len); + copy_quadwords(src, dst, len); } } else { // Split copies into three parts: @@ -381,20 +400,16 @@ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize) // +--------+ // | small1 | Chunk smaller than 8 bytes // +--------+ + let (small0_size, big_size, small1_size) = region_as_aligned_chunks(dst, len); unsafe { // Copy small0 - let small0_size = (8 - dst as usize % 8) as u8; - let small0_src = src; - let small0_dst = dst; - copy_bytewise_to_userspace(small0_src as _, small0_dst, small0_size as _); + copy_bytewise_to_userspace(src, dst, small0_size as _); // Copy big - let small1_size = ((len - small0_size as usize) % 8) as u8; - let big_size = len - small0_size as usize - small1_size as usize; let big_src = src.offset(small0_size as _); let big_dst = dst.offset(small0_size as _); - copy_aligned_quadwords_to_userspace(big_src as _, big_dst, big_size); + copy_quadwords(big_src as _, big_dst, big_size); // Copy small1 let small1_src = src.offset(big_size as isize + small0_size as isize); @@ -404,6 +419,106 @@ pub(crate) unsafe fn copy_to_userspace(src: *const u8, dst: *mut u8, len: usize) } } +/// Copies `len` bytes of data from userspace pointer `src` to enclave pointer `dst` +/// +/// This function mitigates AEPIC leak vulnerabilities by ensuring all reads from untrusted memory are 8-byte aligned +/// +/// # Panics +/// This function panics if: +/// +/// * The `src` pointer is null +/// * The `dst` pointer is null +/// * The `src` memory range is not in user memory +/// * The `dst` memory range is not in enclave memory +/// +/// # References +/// - https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00657.html +/// - https://www.intel.com/content/www/us/en/developer/articles/technical/software-security-guidance/advisory-guidance/stale-data-read-from-xapic.html +pub(crate) unsafe fn copy_from_userspace(src: *const u8, dst: *mut u8, len: usize) { + // Copies memory region `src..src + len` to the enclave at `dst`. The source memory region + // is: + // - strictly less than 8 bytes in size and may be + // - located at a misaligned memory location + fn copy_misaligned_chunk_to_enclave(src: *const u8, dst: *mut u8, len: usize) { + let mut tmp_buff = [0u8; 16]; + + unsafe { + // Compute an aligned memory region to read from + // +--------+ <-- aligned_src + aligned_len (8B-aligned) + // | pad1 | + // +--------+ <-- src + len (misaligned) + // | | + // | | + // | | + // +--------+ <-- src (misaligned) + // | pad0 | + // +--------+ <-- aligned_src (8B-aligned) + let pad0_size = src as usize % 8; + let aligned_src = src.sub(pad0_size); + + let pad1_size = 8 - (src.add(len) as usize % 8); + let aligned_len = pad0_size + len + pad1_size; + + debug_assert!(len < 8); + debug_assert_eq!(aligned_src as usize % 8, 0); + debug_assert_eq!(aligned_len % 8, 0); + debug_assert!(aligned_len <= 16); + + // Copy the aligned buffer to a temporary buffer + // Note: copying from a slightly different memory location is a bit odd. In this case it + // can't lead to page faults or inadvertent copying from the enclave as we only ensured + // that the `src` pointer is aligned at an 8 byte boundary. As pages are 4096 bytes + // aligned, `aligned_src` must be on the same page as `src`. A similar argument can be made + // for `src + len` + copy_quadwords(aligned_src as _, tmp_buff.as_mut_ptr(), aligned_len); + + // Copy the correct parts of the temporary buffer to the destination + ptr::copy(tmp_buff.as_ptr().add(pad0_size), dst, len); + } + } + + assert!(!src.is_null()); + assert!(!dst.is_null()); + assert!(is_user_range(src, len)); + assert!(is_enclave_range(dst, len)); + assert!(!(src as usize).overflowing_add(len + 8).1); + assert!(!(dst as usize).overflowing_add(len + 8).1); + + if len < 8 { + copy_misaligned_chunk_to_enclave(src, dst, len); + } else if len % 8 == 0 && src as usize % 8 == 0 { + // Copying 8-byte aligned quadwords: copy quad word per quad word + unsafe { + copy_quadwords(src, dst, len); + } + } else { + // Split copies into three parts: + // +--------+ + // | small0 | Chunk smaller than 8 bytes + // +--------+ + // | big | Chunk 8-byte aligned, and size a multiple of 8 bytes + // +--------+ + // | small1 | Chunk smaller than 8 bytes + // +--------+ + let (small0_size, big_size, small1_size) = region_as_aligned_chunks(dst, len); + + unsafe { + // Copy small0 + copy_misaligned_chunk_to_enclave(src, dst, small0_size); + + // Copy big + let big_src = src.add(small0_size); + let big_dst = dst.add(small0_size); + copy_quadwords(big_src, big_dst, big_size); + + // Copy small1 + let small1_src = src.add(big_size + small0_size); + let small1_dst = dst.add(big_size + small0_size); + copy_misaligned_chunk_to_enclave(small1_src, small1_dst, small1_size); + } + } +} + #[unstable(feature = "sgx_platform", issue = "56975")] impl UserRef where @@ -468,7 +583,7 @@ where pub fn copy_to_enclave(&self, dest: &mut T) { unsafe { assert_eq!(mem::size_of_val(dest), mem::size_of_val(&*self.0.get())); - ptr::copy( + copy_from_userspace( self.0.get() as *const T as *const u8, dest as *mut T as *mut u8, mem::size_of_val(dest), @@ -494,7 +609,11 @@ where { /// Copies the value from user memory into enclave memory. pub fn to_enclave(&self) -> T { - unsafe { ptr::read(self.0.get()) } + unsafe { + let mut data: T = mem::MaybeUninit::uninit().assume_init(); + copy_from_userspace(self.0.get() as _, &mut data as *mut T as _, mem::size_of::()); + data + } } } -- cgit v1.2.3