summaryrefslogtreecommitdiffstats
path: root/library/alloc/tests/boxed.rs
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 12:02:58 +0000
commit698f8c2f01ea549d77d7dc3338a12e04c11057b9 (patch)
tree173a775858bd501c378080a10dca74132f05bc50 /library/alloc/tests/boxed.rs
parentInitial commit. (diff)
downloadrustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.tar.xz
rustc-698f8c2f01ea549d77d7dc3338a12e04c11057b9.zip
Adding upstream version 1.64.0+dfsg1.upstream/1.64.0+dfsg1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'library/alloc/tests/boxed.rs')
-rw-r--r--library/alloc/tests/boxed.rs167
1 files changed, 167 insertions, 0 deletions
diff --git a/library/alloc/tests/boxed.rs b/library/alloc/tests/boxed.rs
new file mode 100644
index 000000000..9e5123be9
--- /dev/null
+++ b/library/alloc/tests/boxed.rs
@@ -0,0 +1,167 @@
+use core::alloc::{AllocError, Allocator, Layout};
+use core::cell::Cell;
+use core::mem::MaybeUninit;
+use core::ptr::NonNull;
+
+#[test]
+fn uninitialized_zero_size_box() {
+ assert_eq!(
+ &*Box::<()>::new_uninit() as *const _,
+ NonNull::<MaybeUninit<()>>::dangling().as_ptr(),
+ );
+ assert_eq!(
+ Box::<[()]>::new_uninit_slice(4).as_ptr(),
+ NonNull::<MaybeUninit<()>>::dangling().as_ptr(),
+ );
+ assert_eq!(
+ Box::<[String]>::new_uninit_slice(0).as_ptr(),
+ NonNull::<MaybeUninit<String>>::dangling().as_ptr(),
+ );
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+struct Dummy {
+ _data: u8,
+}
+
+#[test]
+fn box_clone_and_clone_from_equivalence() {
+ for size in (0..8).map(|i| 2usize.pow(i)) {
+ let control = vec![Dummy { _data: 42 }; size].into_boxed_slice();
+ let clone = control.clone();
+ let mut copy = vec![Dummy { _data: 84 }; size].into_boxed_slice();
+ copy.clone_from(&control);
+ assert_eq!(control, clone);
+ assert_eq!(control, copy);
+ }
+}
+
+/// This test might give a false positive in case the box reallocates,
+/// but the allocator keeps the original pointer.
+///
+/// On the other hand, it won't give a false negative: If it fails, then the
+/// memory was definitely not reused.
+#[test]
+fn box_clone_from_ptr_stability() {
+ for size in (0..8).map(|i| 2usize.pow(i)) {
+ let control = vec![Dummy { _data: 42 }; size].into_boxed_slice();
+ let mut copy = vec![Dummy { _data: 84 }; size].into_boxed_slice();
+ let copy_raw = copy.as_ptr() as usize;
+ copy.clone_from(&control);
+ assert_eq!(copy.as_ptr() as usize, copy_raw);
+ }
+}
+
+#[test]
+fn box_deref_lval() {
+ let x = Box::new(Cell::new(5));
+ x.set(1000);
+ assert_eq!(x.get(), 1000);
+}
+
+pub struct ConstAllocator;
+
+unsafe impl const Allocator for ConstAllocator {
+ fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
+ match layout.size() {
+ 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
+ _ => unsafe {
+ let ptr = core::intrinsics::const_allocate(layout.size(), layout.align());
+ Ok(NonNull::new_unchecked(ptr as *mut [u8; 0] as *mut [u8]))
+ },
+ }
+ }
+
+ unsafe fn deallocate(&self, _ptr: NonNull<u8>, layout: Layout) {
+ match layout.size() {
+ 0 => { /* do nothing */ }
+ _ => { /* do nothing too */ }
+ }
+ }
+
+ fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
+ let ptr = self.allocate(layout)?;
+ if layout.size() > 0 {
+ unsafe {
+ ptr.as_mut_ptr().write_bytes(0, layout.size());
+ }
+ }
+ Ok(ptr)
+ }
+
+ unsafe fn grow(
+ &self,
+ ptr: NonNull<u8>,
+ old_layout: Layout,
+ new_layout: Layout,
+ ) -> Result<NonNull<[u8]>, AllocError> {
+ debug_assert!(
+ new_layout.size() >= old_layout.size(),
+ "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
+ );
+
+ let new_ptr = self.allocate(new_layout)?;
+ if new_layout.size() > 0 {
+ new_ptr.as_mut_ptr().copy_from_nonoverlapping(ptr.as_ptr(), old_layout.size());
+ self.deallocate(ptr, old_layout);
+ }
+ Ok(new_ptr)
+ }
+
+ unsafe fn grow_zeroed(
+ &self,
+ ptr: NonNull<u8>,
+ old_layout: Layout,
+ new_layout: Layout,
+ ) -> Result<NonNull<[u8]>, AllocError> {
+ let new_ptr = self.grow(ptr, old_layout, new_layout)?;
+ if new_layout.size() > 0 {
+ let old_size = old_layout.size();
+ let new_size = new_layout.size();
+ let raw_ptr = new_ptr.as_mut_ptr();
+ raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
+ }
+ Ok(new_ptr)
+ }
+
+ unsafe fn shrink(
+ &self,
+ ptr: NonNull<u8>,
+ old_layout: Layout,
+ new_layout: Layout,
+ ) -> Result<NonNull<[u8]>, AllocError> {
+ debug_assert!(
+ new_layout.size() <= old_layout.size(),
+ "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
+ );
+
+ let new_ptr = self.allocate(new_layout)?;
+ if new_layout.size() > 0 {
+ new_ptr.as_mut_ptr().copy_from_nonoverlapping(ptr.as_ptr(), new_layout.size());
+ self.deallocate(ptr, old_layout);
+ }
+ Ok(new_ptr)
+ }
+
+ fn by_ref(&self) -> &Self
+ where
+ Self: Sized,
+ {
+ self
+ }
+}
+
+#[test]
+fn const_box() {
+ const VALUE: u32 = {
+ let mut boxed = Box::new_in(1u32, ConstAllocator);
+ assert!(*boxed == 1);
+
+ *boxed = 42;
+ assert!(*boxed == 42);
+
+ *Box::leak(boxed)
+ };
+
+ assert!(VALUE == 42);
+}