summaryrefslogtreecommitdiffstats
path: root/third_party/rust/typed-arena-nomut/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 00:47:55 +0000
commit26a029d407be480d791972afb5975cf62c9360a6 (patch)
treef435a8308119effd964b339f76abb83a57c29483 /third_party/rust/typed-arena-nomut/src
parentInitial commit. (diff)
downloadfirefox-26a029d407be480d791972afb5975cf62c9360a6.tar.xz
firefox-26a029d407be480d791972afb5975cf62c9360a6.zip
Adding upstream version 124.0.1.upstream/124.0.1
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/rust/typed-arena-nomut/src')
-rw-r--r--third_party/rust/typed-arena-nomut/src/lib.rs633
-rw-r--r--third_party/rust/typed-arena-nomut/src/test.rs373
2 files changed, 1006 insertions, 0 deletions
diff --git a/third_party/rust/typed-arena-nomut/src/lib.rs b/third_party/rust/typed-arena-nomut/src/lib.rs
new file mode 100644
index 0000000000..be8e3248ed
--- /dev/null
+++ b/third_party/rust/typed-arena-nomut/src/lib.rs
@@ -0,0 +1,633 @@
+//! The arena, a fast but limited type of allocator.
+//!
+//! **A fast (but limited) allocation arena for values of a single type.**
+//!
+//! Allocated objects are destroyed all at once, when the arena itself is
+//! destroyed. There is no deallocation of individual objects while the arena
+//! itself is still alive. The flipside is that allocation is fast: typically
+//! just a vector push.
+//!
+//! There is also a method `into_vec()` to recover ownership of allocated
+//! objects when the arena is no longer required, instead of destroying
+//! everything.
+//!
+//! ## Example
+//!
+//! ```
+//! use typed_arena_nomut::Arena;
+//!
+//! struct Monster {
+//! level: u32,
+//! }
+//!
+//! let monsters = Arena::new();
+//!
+//! let goku = monsters.alloc(Monster { level: 9001 });
+//! assert!(goku.level > 9000);
+//! ```
+//!
+//! ## Safe Cycles
+//!
+//! All allocated objects get the same lifetime, so you can safely create cycles
+//! between them. This can be useful for certain data structures, such as graphs
+//! and trees with parent pointers.
+//!
+//! ```
+//! use std::cell::Cell;
+//! use typed_arena_nomut::Arena;
+//!
+//! struct CycleParticipant<'a> {
+//! other: Cell<Option<&'a CycleParticipant<'a>>>,
+//! }
+//!
+//! let arena = Arena::new();
+//!
+//! let a = arena.alloc(CycleParticipant { other: Cell::new(None) });
+//! let b = arena.alloc(CycleParticipant { other: Cell::new(None) });
+//!
+//! a.other.set(Some(b));
+//! b.other.set(Some(a));
+//! ```
+
+// Potential optimizations:
+// 1) add and stabilize a method for in-place reallocation of vecs.
+// 2) add and stabilize placement new.
+// 3) use an iterator. This may add far too much unsafe code.
+
+#![deny(missing_docs)]
+#![cfg_attr(not(any(feature = "std", test)), no_std)]
+
+#[cfg(not(feature = "std"))]
+extern crate alloc;
+
+#[cfg(any(feature = "std", test))]
+extern crate core;
+
+#[cfg(not(feature = "std"))]
+use alloc::vec::Vec;
+
+use core::cell::RefCell;
+use core::cmp;
+use core::iter;
+use core::mem;
+use core::slice;
+use core::str;
+use std::cell::Ref;
+
+use mem::MaybeUninit;
+
+#[cfg(test)]
+mod test;
+
+// Initial size in bytes.
+const INITIAL_SIZE: usize = 1024;
+// Minimum capacity. Must be larger than 0.
+const MIN_CAPACITY: usize = 1;
+
+/// An arena of objects of type `T`.
+///
+/// ## Example
+///
+/// ```
+/// use typed_arena_nomut::Arena;
+///
+/// struct Monster {
+/// level: u32,
+/// }
+///
+/// let monsters = Arena::new();
+///
+/// let vegeta = monsters.alloc(Monster { level: 9001 });
+/// assert!(vegeta.level > 9000);
+/// ```
+pub struct Arena<T> {
+ chunks: RefCell<ChunkList<T>>,
+}
+
+struct ChunkList<T> {
+ current: Vec<T>,
+ rest: Vec<Vec<T>>,
+}
+
+impl<T> Arena<T> {
+ /// Construct a new arena.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// use typed_arena_nomut::Arena;
+ ///
+ /// let arena = Arena::new();
+ /// # arena.alloc(1);
+ /// ```
+ pub fn new() -> Arena<T> {
+ let size = cmp::max(1, mem::size_of::<T>());
+ Arena::with_capacity(INITIAL_SIZE / size)
+ }
+
+ /// Construct a new arena with capacity for `n` values pre-allocated.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// use typed_arena_nomut::Arena;
+ ///
+ /// let arena = Arena::with_capacity(1337);
+ /// # arena.alloc(1);
+ /// ```
+ pub fn with_capacity(n: usize) -> Arena<T> {
+ let n = cmp::max(MIN_CAPACITY, n);
+ Arena {
+ chunks: RefCell::new(ChunkList {
+ current: Vec::with_capacity(n),
+ rest: Vec::new(),
+ }),
+ }
+ }
+
+ /// Return the size of the arena
+ ///
+ /// This is useful for using the size of previous typed arenas to build new typed arenas with large enough spaces.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// use typed_arena_nomut::Arena;
+ ///
+ /// let arena = Arena::with_capacity(0);
+ /// let a = arena.alloc(1);
+ /// let b = arena.alloc(2);
+ ///
+ /// assert_eq!(arena.len(), 2);
+ /// ```
+ pub fn len(&self) -> usize {
+ let chunks = self.chunks.borrow();
+
+ let mut res = 0;
+ for vec in chunks.rest.iter() {
+ res += vec.len()
+ }
+
+ res + chunks.current.len()
+ }
+
+ /// Allocates a value in the arena, and returns a mutable reference
+ /// to that value.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// use typed_arena_nomut::Arena;
+ ///
+ /// let arena = Arena::new();
+ /// let x = arena.alloc(42);
+ /// assert_eq!(*x, 42);
+ /// ```
+ #[inline]
+ pub fn alloc(&self, value: T) -> &T {
+ self.alloc_fast_path(value)
+ .unwrap_or_else(|value| self.alloc_slow_path(value))
+ }
+
+ #[inline]
+ fn alloc_fast_path(&self, value: T) -> Result<&T, T> {
+ let mut chunks = self.chunks.borrow_mut();
+ let len = chunks.current.len();
+ if len < chunks.current.capacity() {
+ chunks.current.push(value);
+ // Avoid going through `Vec::deref_mut`, which overlaps
+ // other references we have already handed out!
+ debug_assert!(len < chunks.current.len()); // bounds check
+ Ok(unsafe { &mut *chunks.current.as_mut_ptr().add(len) })
+ } else {
+ Err(value)
+ }
+ }
+
+ fn alloc_slow_path(&self, value: T) -> &T {
+ &self.alloc_extend(iter::once(value))[0]
+ }
+
+ /// Uses the contents of an iterator to allocate values in the arena.
+ /// Returns a mutable slice that contains these values.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// use typed_arena_nomut::Arena;
+ ///
+ /// let arena = Arena::new();
+ /// let abc = arena.alloc_extend("abcdefg".chars().take(3));
+ /// assert_eq!(abc, ['a', 'b', 'c']);
+ /// ```
+ pub fn alloc_extend<I>(&self, iterable: I) -> &[T]
+ where
+ I: IntoIterator<Item = T>,
+ {
+ let mut iter = iterable.into_iter();
+
+ let mut chunks = self.chunks.borrow_mut();
+
+ let iter_min_len = iter.size_hint().0;
+ let mut next_item_index;
+ debug_assert!(
+ chunks.current.capacity() >= chunks.current.len(),
+ "capacity is always greater than or equal to len, so we don't need to worry about underflow"
+ );
+ if iter_min_len > chunks.current.capacity() - chunks.current.len() {
+ chunks.reserve(iter_min_len);
+ chunks.current.extend(iter);
+ next_item_index = 0;
+ } else {
+ next_item_index = chunks.current.len();
+ let mut i = 0;
+ while let Some(elem) = iter.next() {
+ if chunks.current.len() == chunks.current.capacity() {
+ // The iterator was larger than we could fit into the current chunk.
+ let chunks = &mut *chunks;
+ // Create a new chunk into which we can freely push the entire iterator into
+ chunks.reserve(i + 1);
+ let previous_chunk = chunks.rest.last_mut().unwrap();
+ let previous_chunk_len = previous_chunk.len();
+ // Move any elements we put into the previous chunk into this new chunk
+ chunks
+ .current
+ .extend(previous_chunk.drain(previous_chunk_len - i..));
+ chunks.current.push(elem);
+ // And the remaining elements in the iterator
+ chunks.current.extend(iter);
+ next_item_index = 0;
+ break;
+ } else {
+ chunks.current.push(elem);
+ }
+ i += 1;
+ }
+ }
+ let new_slice_ref = &mut chunks.current[next_item_index..];
+
+ // Extend the lifetime from that of `chunks_borrow` to that of `self`.
+ // This is OK because we’re careful to never move items
+ // by never pushing to inner `Vec`s beyond their initial capacity.
+ // The returned reference is unique (`&mut`):
+ // the `Arena` never gives away references to existing items.
+ unsafe { mem::transmute::<&mut [T], &mut [T]>(new_slice_ref) }
+ }
+
+ /// Allocates space for a given number of values, but doesn't initialize it.
+ ///
+ /// ## Safety
+ ///
+ /// After calling this method, the arena considers the elements initialized. If you fail to
+ /// initialize them (which includes because of panicking during the initialization), the arena
+ /// will run destructors on the uninitialized memory. Therefore, you must initialize them.
+ ///
+ /// Considering how easy it is to cause undefined behaviour using this, you're advised to
+ /// prefer the other (safe) methods, like [`alloc_extend`][Arena::alloc_extend].
+ ///
+ /// ## Example
+ ///
+ /// ```rust
+ /// use std::mem::{self, MaybeUninit};
+ /// use std::ptr;
+ /// use typed_arena_nomut::Arena;
+ ///
+ /// // Transmute from MaybeUninit slice to slice of initialized T.
+ /// // It is a separate function to preserve the lifetime of the reference.
+ /// unsafe fn transmute_uninit<A>(r: &mut [MaybeUninit<A>]) -> &mut [A] {
+ /// mem::transmute(r)
+ /// }
+ ///
+ /// let arena: Arena<bool> = Arena::new();
+ /// let slice: &mut [bool];
+ /// unsafe {
+ /// let uninitialized = arena.alloc_uninitialized(10);
+ /// for elem in uninitialized.iter_mut() {
+ /// ptr::write(elem.as_mut_ptr(), true);
+ /// }
+ /// slice = transmute_uninit(uninitialized);
+ /// }
+ /// ```
+ ///
+ /// ## Alternative allocation pattern
+ ///
+ /// To avoid the problem of dropping assumed to be initialized elements on panic, it is also
+ /// possible to combine the [`reserve_extend`][Arena::reserve_extend] with
+ /// [`uninitialized_array`][Arena::uninitialized_array], initialize the elements and confirm
+ /// them by this method. In such case, when there's a panic during initialization, the already
+ /// initialized elements would leak but it wouldn't cause UB.
+ ///
+ /// ```rust
+ /// use std::mem::{self, MaybeUninit};
+ /// use std::ptr;
+ /// use typed_arena_nomut::Arena;
+ ///
+ /// unsafe fn transmute_uninit<A>(r: &mut [MaybeUninit<A>]) -> &mut [A] {
+ /// mem::transmute(r)
+ /// }
+ ///
+ /// const COUNT: usize = 2;
+ ///
+ /// let arena: Arena<String> = Arena::new();
+ ///
+ /// arena.reserve_extend(COUNT);
+ /// let slice: &mut [String];
+ /// unsafe {
+ /// // Perform initialization before we claim the memory.
+ /// let uninitialized = arena.uninitialized_array();
+ /// assert!((*uninitialized).len() >= COUNT); // Ensured by the reserve_extend
+ /// for elem in &mut (*uninitialized)[..COUNT] {
+ /// ptr::write(elem.as_mut_ptr(), "Hello".to_owned());
+ /// }
+ /// let addr = (*uninitialized).as_ptr() as usize;
+ ///
+ /// // The alloc_uninitialized returns the same memory, but "confirms" its allocation.
+ /// slice = transmute_uninit(arena.alloc_uninitialized(COUNT));
+ /// assert_eq!(addr, slice.as_ptr() as usize);
+ /// assert_eq!(slice, &["Hello".to_owned(), "Hello".to_owned()]);
+ /// }
+ /// ```
+ pub unsafe fn alloc_uninitialized(&self, num: usize) -> &mut [MaybeUninit<T>] {
+ let mut chunks = self.chunks.borrow_mut();
+
+ debug_assert!(
+ chunks.current.capacity() >= chunks.current.len(),
+ "capacity is always greater than or equal to len, so we don't need to worry about underflow"
+ );
+ if num > chunks.current.capacity() - chunks.current.len() {
+ chunks.reserve(num);
+ }
+
+ // At this point, the current chunk must have free capacity.
+ let next_item_index = chunks.current.len();
+ chunks.current.set_len(next_item_index + num);
+
+ // Go through pointers, to make sure we never create a reference to uninitialized T.
+ let start = chunks.current.as_mut_ptr().offset(next_item_index as isize);
+ let start_uninit = start as *mut MaybeUninit<T>;
+ slice::from_raw_parts_mut(start_uninit, num)
+ }
+
+ /// Makes sure there's enough continuous space for at least `num` elements.
+ ///
+ /// This may save some work if called before [`alloc_extend`][Arena::alloc_extend]. It also
+ /// allows somewhat safer use pattern of [`alloc_uninitialized`][Arena::alloc_uninitialized].
+ /// On the other hand this might waste up to `n - 1` elements of space. In case new allocation
+ /// is needed, the unused ones in current chunk are never used.
+ pub fn reserve_extend(&self, num: usize) {
+ let mut chunks = self.chunks.borrow_mut();
+
+ debug_assert!(
+ chunks.current.capacity() >= chunks.current.len(),
+ "capacity is always greater than or equal to len, so we don't need to worry about underflow"
+ );
+ if num > chunks.current.capacity() - chunks.current.len() {
+ chunks.reserve(num);
+ }
+ }
+
+ /// Returns unused space.
+ ///
+ /// *This unused space is still not considered "allocated".* Therefore, it
+ /// won't be dropped unless there are further calls to `alloc`,
+ /// [`alloc_uninitialized`][Arena::alloc_uninitialized], or
+ /// [`alloc_extend`][Arena::alloc_extend] which is why the method is safe.
+ ///
+ /// It returns a raw pointer to avoid creating multiple mutable references to the same place.
+ /// It is up to the caller not to dereference it after any of the `alloc_` methods are called.
+ pub fn uninitialized_array(&self) -> *mut [MaybeUninit<T>] {
+ let mut chunks = self.chunks.borrow_mut();
+ let len = chunks.current.capacity() - chunks.current.len();
+ let next_item_index = chunks.current.len();
+
+ unsafe {
+ // Go through pointers, to make sure we never create a reference to uninitialized T.
+ let start = chunks.current.as_mut_ptr().offset(next_item_index as isize);
+ let start_uninit = start as *mut MaybeUninit<T>;
+ slice::from_raw_parts_mut(start_uninit, len) as *mut _
+ }
+ }
+
+ /// Convert this `Arena` into a `Vec<T>`.
+ ///
+ /// Items in the resulting `Vec<T>` appear in the order that they were
+ /// allocated in.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// use typed_arena_nomut::Arena;
+ ///
+ /// let arena = Arena::new();
+ ///
+ /// arena.alloc("a");
+ /// arena.alloc("b");
+ /// arena.alloc("c");
+ ///
+ /// let easy_as_123 = arena.into_vec();
+ ///
+ /// assert_eq!(easy_as_123, vec!["a", "b", "c"]);
+ /// ```
+ pub fn into_vec(self) -> Vec<T> {
+ let mut chunks = self.chunks.into_inner();
+ // keep order of allocation in the resulting Vec
+ let n = chunks
+ .rest
+ .iter()
+ .fold(chunks.current.len(), |a, v| a + v.len());
+ let mut result = Vec::with_capacity(n);
+ for mut vec in chunks.rest {
+ result.append(&mut vec);
+ }
+ result.append(&mut chunks.current);
+ result
+ }
+
+ /// Returns an iterator that allows modifying each value.
+ ///
+ /// Items are yielded in the order that they were allocated.
+ ///
+ /// ## Example
+ ///
+ /// ```
+ /// use typed_arena_nomut::Arena;
+ /// use std::cell::Cell;
+ ///
+ /// #[derive(Debug, PartialEq, Eq)]
+ /// struct Point { x: Cell<i32>, y: i32 };
+ ///
+ /// let mut arena = Arena::new();
+ ///
+ /// arena.alloc(Point { x: Cell::new(0), y: 0 });
+ /// arena.alloc(Point { x: Cell::new(1), y: 1 });
+ ///
+ /// for point in arena.iter() {
+ /// point.x.set(point.x.get() + 10);
+ /// }
+ ///
+ /// let points = arena.into_vec();
+ ///
+ /// assert_eq!(points, vec![Point { x: Cell::new(10), y: 0 }, Point { x: Cell::new(11), y: 1 }]);
+ ///
+ /// ```
+ ///
+ /// ## Immutable Iteration
+ ///
+ /// Note that there is no corresponding `iter` method. Access to the arena's contents
+ /// requries mutable access to the arena itself.
+ ///
+ /// ```
+ /// use typed_arena_nomut::Arena;
+ /// use std::cell::Cell;
+ ///
+ /// let mut arena = Arena::new();
+ /// let x = arena.alloc(Cell::new(1));
+ ///
+ /// for i in arena.iter() {
+ /// println!("i: {}", i.get());
+ /// }
+ ///
+ /// x.set(x.get() * 2);
+ /// ```
+ #[inline]
+ pub fn iter(&self) -> Iter<T> {
+ let chunks = self.chunks.borrow();
+ let position = if !chunks.rest.is_empty() {
+ let index = 0;
+ let inner_iter = chunks.rest[index].iter();
+ // Extend the lifetime of the individual elements to that of the arena.
+ // This is OK because we borrow the arena mutably to prevent new allocations
+ // and we take care here to never move items inside the arena while the
+ // iterator is alive.
+ let inner_iter = unsafe { mem::transmute(inner_iter) };
+ IterState::ChunkListRest { index, inner_iter }
+ } else {
+ // Extend the lifetime of the individual elements to that of the arena.
+ let iter = unsafe { mem::transmute(chunks.current.iter()) };
+ IterState::ChunkListCurrent { iter }
+ };
+ Iter {
+ chunks,
+ state: position,
+ }
+ }
+}
+
+impl Arena<u8> {
+ /// Allocates a string slice and returns a mutable reference to it.
+ ///
+ /// This is on `Arena<u8>`, because string slices use byte slices (`[u8]`) as their backing
+ /// storage.
+ ///
+ /// # Example
+ ///
+ /// ```
+ /// use typed_arena_nomut::Arena;
+ ///
+ /// let arena: Arena<u8> = Arena::new();
+ /// let hello = arena.alloc_str("Hello world");
+ /// assert_eq!("Hello world", hello);
+ /// ```
+ #[inline]
+ pub fn alloc_str(&self, s: &str) -> & str {
+ let buffer = self.alloc_extend(s.bytes());
+ // Can't fail the utf8 validation, it already came in as utf8
+ unsafe { str::from_utf8_unchecked(buffer) }
+ }
+}
+
+impl<T> Default for Arena<T> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<T> ChunkList<T> {
+ #[inline(never)]
+ #[cold]
+ fn reserve(&mut self, additional: usize) {
+ let double_cap = self
+ .current
+ .capacity()
+ .checked_mul(2)
+ .expect("capacity overflow");
+ let required_cap = additional
+ .checked_next_power_of_two()
+ .expect("capacity overflow");
+ let new_capacity = cmp::max(double_cap, required_cap);
+ let chunk = mem::replace(&mut self.current, Vec::with_capacity(new_capacity));
+ self.rest.push(chunk);
+ }
+}
+
+enum IterState<'a, T> {
+ ChunkListRest {
+ index: usize,
+ inner_iter: slice::Iter<'a, T>,
+ },
+ ChunkListCurrent {
+ iter: slice::Iter<'a, T>,
+ },
+}
+
+/// Mutable arena iterator.
+///
+/// This struct is created by the [`iter_mut`](struct.Arena.html#method.iter_mut) method on [Arenas](struct.Arena.html).
+pub struct Iter<'a, T: 'a> {
+ chunks: Ref<'a, ChunkList<T>>,
+ state: IterState<'a, T>,
+}
+
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+ fn next(&mut self) -> Option<&'a T> {
+ loop {
+ self.state = match self.state {
+ IterState::ChunkListRest {
+ mut index,
+ ref mut inner_iter,
+ } => {
+ match inner_iter.next() {
+ Some(item) => return Some(item),
+ None => {
+ index += 1;
+ if index < self.chunks.rest.len() {
+ let inner_iter = self.chunks.rest[index].iter();
+ // Extend the lifetime of the individual elements to that of the arena.
+ let inner_iter = unsafe { mem::transmute(inner_iter) };
+ IterState::ChunkListRest { index, inner_iter }
+ } else {
+ let iter = self.chunks.current.iter();
+ // Extend the lifetime of the individual elements to that of the arena.
+ let iter = unsafe { mem::transmute(iter) };
+ IterState::ChunkListCurrent { iter }
+ }
+ }
+ }
+ }
+ IterState::ChunkListCurrent { ref mut iter } => return iter.next(),
+ };
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let current_len = self.chunks.current.len();
+ let current_cap = self.chunks.current.capacity();
+ if self.chunks.rest.is_empty() {
+ (current_len, Some(current_len))
+ } else {
+ let rest_len = self.chunks.rest.len();
+ let last_chunk_len = self
+ .chunks
+ .rest
+ .last()
+ .map(|chunk| chunk.len())
+ .unwrap_or(0);
+
+ let min = current_len + last_chunk_len;
+ let max = min + (rest_len * current_cap / rest_len);
+
+ (min, Some(max))
+ }
+ }
+}
diff --git a/third_party/rust/typed-arena-nomut/src/test.rs b/third_party/rust/typed-arena-nomut/src/test.rs
new file mode 100644
index 0000000000..6f666128a0
--- /dev/null
+++ b/third_party/rust/typed-arena-nomut/src/test.rs
@@ -0,0 +1,373 @@
+use super::*;
+use std::cell::Cell;
+use std::mem;
+use std::panic::{self, AssertUnwindSafe};
+use std::ptr;
+
+struct DropTracker<'a>(&'a Cell<u32>);
+impl<'a> Drop for DropTracker<'a> {
+ fn drop(&mut self) {
+ self.0.set(self.0.get() + 1);
+ }
+}
+
+struct Node<'a, 'b: 'a>(Option<&'a Node<'a, 'b>>, u32, DropTracker<'b>);
+
+#[test]
+fn arena_as_intended() {
+ let drop_counter = Cell::new(0);
+ {
+ let arena = Arena::with_capacity(2);
+
+ let mut node: &Node = arena.alloc(Node(None, 1, DropTracker(&drop_counter)));
+ assert_eq!(arena.chunks.borrow().rest.len(), 0);
+
+ node = arena.alloc(Node(Some(node), 2, DropTracker(&drop_counter)));
+ assert_eq!(arena.chunks.borrow().rest.len(), 0);
+
+ node = arena.alloc(Node(Some(node), 3, DropTracker(&drop_counter)));
+ assert_eq!(arena.chunks.borrow().rest.len(), 1);
+
+ node = arena.alloc(Node(Some(node), 4, DropTracker(&drop_counter)));
+ assert_eq!(arena.chunks.borrow().rest.len(), 1);
+
+ assert_eq!(node.1, 4);
+ assert_eq!(node.0.unwrap().1, 3);
+ assert_eq!(node.0.unwrap().0.unwrap().1, 2);
+ assert_eq!(node.0.unwrap().0.unwrap().0.unwrap().1, 1);
+ assert!(node.0.unwrap().0.unwrap().0.unwrap().0.is_none());
+
+ assert_eq!(arena.len(), 4);
+
+ mem::drop(node);
+ assert_eq!(drop_counter.get(), 0);
+
+ let mut node: &Node = arena.alloc(Node(None, 5, DropTracker(&drop_counter)));
+ assert_eq!(arena.chunks.borrow().rest.len(), 1);
+
+ node = arena.alloc(Node(Some(node), 6, DropTracker(&drop_counter)));
+ assert_eq!(arena.chunks.borrow().rest.len(), 1);
+
+ node = arena.alloc(Node(Some(node), 7, DropTracker(&drop_counter)));
+ assert_eq!(arena.chunks.borrow().rest.len(), 2);
+
+ assert_eq!(drop_counter.get(), 0);
+
+ assert_eq!(node.1, 7);
+ assert_eq!(node.0.unwrap().1, 6);
+ assert_eq!(node.0.unwrap().0.unwrap().1, 5);
+ assert!(node.0.unwrap().0.unwrap().0.is_none());
+
+ assert_eq!(drop_counter.get(), 0);
+ }
+ assert_eq!(drop_counter.get(), 7);
+}
+
+#[test]
+fn ensure_into_vec_maintains_order_of_allocation() {
+ let arena = Arena::with_capacity(1); // force multiple inner vecs
+ for &s in &["t", "e", "s", "t"] {
+ arena.alloc(String::from(s));
+ }
+ let vec = arena.into_vec();
+ assert_eq!(vec, vec!["t", "e", "s", "t"]);
+}
+
+#[test]
+fn test_zero_cap() {
+ let arena = Arena::with_capacity(0);
+ let a = arena.alloc(1);
+ let b = arena.alloc(2);
+ assert_eq!(*a, 1);
+ assert_eq!(*b, 2);
+ assert_eq!(arena.len(), 2);
+}
+
+#[test]
+fn test_alloc_extend() {
+ let arena = Arena::with_capacity(2);
+ for i in 0..15 {
+ let slice = arena.alloc_extend(0..i);
+ for (j, &elem) in slice.iter().enumerate() {
+ assert_eq!(j, elem);
+ }
+ }
+}
+
+#[test]
+fn test_alloc_uninitialized() {
+ const LIMIT: usize = 15;
+ let drop_counter = Cell::new(0);
+ unsafe {
+ let arena: Arena<Node> = Arena::with_capacity(4);
+ for i in 0..LIMIT {
+ let slice = arena.alloc_uninitialized(i);
+ for (j, elem) in slice.iter_mut().enumerate() {
+ ptr::write(elem.as_mut_ptr(), Node(None, j as u32, DropTracker(&drop_counter)));
+ }
+ assert_eq!(drop_counter.get(), 0);
+ }
+ }
+ assert_eq!(drop_counter.get(), (0..LIMIT).fold(0, |a, e| a + e) as u32);
+}
+
+#[test]
+fn test_alloc_extend_with_drop_counter() {
+ let drop_counter = Cell::new(0);
+ {
+ let arena = Arena::with_capacity(2);
+ let iter = (0..100).map(|j| Node(None, j as u32, DropTracker(&drop_counter)));
+ let older_ref = Some(&arena.alloc_extend(iter)[0]);
+ assert_eq!(drop_counter.get(), 0);
+ let iter = (0..100).map(|j| Node(older_ref, j as u32, DropTracker(&drop_counter)));
+ arena.alloc_extend(iter);
+ assert_eq!(drop_counter.get(), 0);
+ }
+ assert_eq!(drop_counter.get(), 200);
+}
+
+/// Test with bools.
+///
+/// Bools, unlike integers, have invalid bit patterns. Therefore, ever having an uninitialized bool
+/// is insta-UB. Make sure miri doesn't find any such thing.
+#[test]
+fn test_alloc_uninitialized_bools() {
+ const LEN: usize = 20;
+ unsafe {
+ let arena: Arena<bool> = Arena::with_capacity(2);
+ let slice = arena.alloc_uninitialized(LEN);
+ for elem in slice.iter_mut() {
+ ptr::write(elem.as_mut_ptr(), true);
+ }
+ // Now it is fully initialized, we can safely transmute the slice.
+ let slice: &mut [bool] = mem::transmute(slice);
+ assert_eq!(&[true; LEN], slice);
+ }
+}
+
+/// Check nothing bad happens by panicking during initialization of borrowed slice.
+#[test]
+fn alloc_uninitialized_with_panic() {
+ struct Dropper(bool);
+
+ impl Drop for Dropper {
+ fn drop(&mut self) {
+ // Just make sure we touch the value, to make sure miri would bite if it was
+ // unitialized
+ if self.0 {
+ panic!();
+ }
+ }
+ }
+ let mut reached_first_init = false;
+ panic::catch_unwind(AssertUnwindSafe(|| unsafe {
+ let arena: Arena<Dropper> = Arena::new();
+ arena.reserve_extend(2);
+ let uninitialized = arena.uninitialized_array();
+ assert!((*uninitialized).len() >= 2);
+ ptr::write((*uninitialized)[0].as_mut_ptr(), Dropper(false));
+ reached_first_init = true;
+ panic!("To drop the arena");
+ // If it didn't panic, we would continue by initializing the second one and confirming by
+ // .alloc_uninitialized();
+ })).unwrap_err();
+ assert!(reached_first_init);
+}
+
+#[test]
+fn test_uninitialized_array() {
+ let arena = Arena::with_capacity(2);
+ let uninit = arena.uninitialized_array();
+ arena.alloc_extend(0..2);
+ unsafe {
+ for (&a, b) in (&*uninit).iter().zip(0..2) {
+ assert_eq!(a.assume_init(), b);
+ }
+ assert!((&*arena.uninitialized_array()).as_ptr() != (&*uninit).as_ptr());
+ arena.alloc(0);
+ let uninit = arena.uninitialized_array();
+ assert_eq!((&*uninit).len(), 3);
+ }
+}
+
+#[test]
+fn dont_trust_the_iterator_size() {
+ use std::iter::repeat;
+
+ struct WrongSizeIter<I>(I);
+ impl<I> Iterator for WrongSizeIter<I>
+ where
+ I: Iterator,
+ {
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, Some(0))
+ }
+ }
+
+ impl<I> ExactSizeIterator for WrongSizeIter<I> where I: Iterator {}
+
+ let arena = Arena::with_capacity(2);
+ arena.alloc(0);
+ let slice = arena.alloc_extend(WrongSizeIter(repeat(1).take(1_000)));
+ // Allocation of 1000 elements should have created a new chunk
+ assert_eq!(arena.chunks.borrow().rest.len(), 1);
+ assert_eq!(slice.len(), 1000);
+}
+
+#[test]
+fn arena_is_send() {
+ fn assert_is_send<T: Send>(_: T) {}
+
+ // If `T` is `Send`, ...
+ assert_is_send(42_u32);
+
+ // Then `Arena<T>` is also `Send`.
+ let arena: Arena<u32> = Arena::new();
+ assert_is_send(arena);
+}
+
+#[test]
+fn iter_mut_low_capacity() {
+ #[derive(Debug, PartialEq, Eq)]
+ struct NonCopy(usize);
+
+ const MAX: usize = 1_000;
+ const CAP: usize = 16;
+
+ let arena = Arena::with_capacity(CAP);
+ for i in 1..MAX {
+ arena.alloc(NonCopy(i));
+ }
+
+ assert!(
+ arena.chunks.borrow().rest.len() > 1,
+ "expected multiple chunks"
+ );
+
+ let mut iter = arena.iter();
+ for i in 1..MAX {
+ assert_eq!(Some(&NonCopy(i)), iter.next());
+ }
+
+ assert_eq!(None, iter.next());
+}
+
+#[test]
+fn iter_mut_high_capacity() {
+ #[derive(Debug, PartialEq, Eq)]
+ struct NonCopy(usize);
+
+ const MAX: usize = 1_000;
+ const CAP: usize = 8192;
+
+ let arena = Arena::with_capacity(CAP);
+ for i in 1..MAX {
+ arena.alloc(NonCopy(i));
+ }
+
+ assert!(
+ arena.chunks.borrow().rest.is_empty(),
+ "expected single chunk"
+ );
+
+ let mut iter = arena.iter();
+ for i in 1..MAX {
+ assert_eq!(Some(&NonCopy(i)), iter.next());
+ }
+
+ assert_eq!(None, iter.next());
+}
+
+fn assert_size_hint<T>(arena_len: usize, iter: Iter<'_, T>) {
+ let (min, max) = iter.size_hint();
+
+ assert!(max.is_some());
+ let max = max.unwrap();
+
+ // Check that the actual arena length lies between the estimated min and max
+ assert!(min <= arena_len);
+ assert!(max >= arena_len);
+
+ // Check that the min and max estimates are within a factor of 3
+ assert!(min >= arena_len / 3);
+ assert!(max <= arena_len * 3);
+}
+
+#[test]
+fn size_hint() {
+ #[derive(Debug, PartialEq, Eq)]
+ struct NonCopy(usize);
+
+ const MAX: usize = 32;
+ const CAP: usize = 0;
+
+ for cap in CAP..(CAP + 16/* check some non-power-of-two capacities */) {
+ let arena = Arena::with_capacity(cap);
+ for i in 1..MAX {
+ arena.alloc(NonCopy(i));
+ let iter = arena.iter();
+ assert_size_hint(i, iter);
+ }
+ }
+}
+
+#[test]
+#[cfg_attr(miri, ignore)]
+fn size_hint_low_initial_capacities() {
+ #[derive(Debug, PartialEq, Eq)]
+ struct NonCopy(usize);
+
+ const MAX: usize = 25_000;
+ const CAP: usize = 0;
+
+ for cap in CAP..(CAP + 128/* check some non-power-of-two capacities */) {
+ let arena = Arena::with_capacity(cap);
+ for i in 1..MAX {
+ arena.alloc(NonCopy(i));
+ let iter = arena.iter();
+ assert_size_hint(i, iter);
+ }
+ }
+}
+
+#[test]
+#[cfg_attr(miri, ignore)]
+fn size_hint_high_initial_capacities() {
+ #[derive(Debug, PartialEq, Eq)]
+ struct NonCopy(usize);
+
+ const MAX: usize = 25_000;
+ const CAP: usize = 8164;
+
+ for cap in CAP..(CAP + 128/* check some non-power-of-two capacities */) {
+ let arena = Arena::with_capacity(cap);
+ for i in 1..MAX {
+ arena.alloc(NonCopy(i));
+ let iter = arena.iter();
+ assert_size_hint(i, iter);
+ }
+ }
+}
+
+#[test]
+#[cfg_attr(miri, ignore)]
+fn size_hint_many_items() {
+ #[derive(Debug, PartialEq, Eq)]
+ struct NonCopy(usize);
+
+ const MAX: usize = 5_000_000;
+ const CAP: usize = 16;
+
+ let arena = Arena::with_capacity(CAP);
+ for i in 1..MAX {
+ arena.alloc(NonCopy(i));
+ let iter = arena.iter();
+ assert_size_hint(i, iter);
+ }
+}