summaryrefslogtreecommitdiffstats
path: root/third_party/rust/bumpalo/src/collections
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--third_party/rust/bumpalo/src/collections/raw_vec.rs86
-rw-r--r--third_party/rust/bumpalo/src/collections/string.rs17
-rw-r--r--third_party/rust/bumpalo/src/collections/vec.rs148
3 files changed, 230 insertions, 21 deletions
diff --git a/third_party/rust/bumpalo/src/collections/raw_vec.rs b/third_party/rust/bumpalo/src/collections/raw_vec.rs
index ac3bd0758c..456829d447 100644
--- a/third_party/rust/bumpalo/src/collections/raw_vec.rs
+++ b/third_party/rust/bumpalo/src/collections/raw_vec.rs
@@ -319,7 +319,7 @@ impl<'a, T> RawVec<'a, T> {
used_cap: usize,
needed_extra_cap: usize,
) -> Result<(), CollectionAllocErr> {
- self.reserve_internal(used_cap, needed_extra_cap, Fallible, Exact)
+ self.fallible_reserve_internal(used_cap, needed_extra_cap, Exact)
}
/// Ensures that the buffer contains at least enough space to hold
@@ -343,11 +343,7 @@ impl<'a, T> RawVec<'a, T> {
///
/// Aborts on OOM
pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) {
- match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Exact) {
- Err(CapacityOverflow) => capacity_overflow(),
- Err(AllocErr) => unreachable!(),
- Ok(()) => { /* yay */ }
- }
+ self.infallible_reserve_internal(used_cap, needed_extra_cap, Exact)
}
/// Calculates the buffer's new size given that it'll hold `used_cap +
@@ -374,7 +370,7 @@ impl<'a, T> RawVec<'a, T> {
used_cap: usize,
needed_extra_cap: usize,
) -> Result<(), CollectionAllocErr> {
- self.reserve_internal(used_cap, needed_extra_cap, Fallible, Amortized)
+ self.fallible_reserve_internal(used_cap, needed_extra_cap, Amortized)
}
/// Ensures that the buffer contains at least enough space to hold
@@ -429,13 +425,11 @@ impl<'a, T> RawVec<'a, T> {
/// # vector.push_all(&[1, 3, 5, 7, 9]);
/// # }
/// ```
+ #[inline(always)]
pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) {
- match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Amortized) {
- Err(CapacityOverflow) => capacity_overflow(),
- Err(AllocErr) => unreachable!(),
- Ok(()) => { /* yay */ }
- }
+ self.infallible_reserve_internal(used_cap, needed_extra_cap, Amortized)
}
+
/// Attempts to ensure that the buffer contains at least enough space to hold
/// `used_cap + needed_extra_cap` elements. If it doesn't already have
/// enough capacity, will reallocate in place enough space plus comfortable slack
@@ -593,6 +587,68 @@ enum ReserveStrategy {
use self::ReserveStrategy::*;
impl<'a, T> RawVec<'a, T> {
+ #[inline(always)]
+ fn fallible_reserve_internal(
+ &mut self,
+ used_cap: usize,
+ needed_extra_cap: usize,
+ strategy: ReserveStrategy,
+ ) -> Result<(), CollectionAllocErr> {
+ // This portion of the method should always be inlined.
+ if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
+ return Ok(());
+ }
+ // This portion of the method should never be inlined, and will only be called when
+ // the check above has confirmed that it is necessary.
+ self.reserve_internal_or_error(used_cap, needed_extra_cap, Fallible, strategy)
+ }
+
+ #[inline(always)]
+ fn infallible_reserve_internal(
+ &mut self,
+ used_cap: usize,
+ needed_extra_cap: usize,
+ strategy: ReserveStrategy,
+ ) {
+ // This portion of the method should always be inlined.
+ if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
+ return;
+ }
+ // This portion of the method should never be inlined, and will only be called when
+ // the check above has confirmed that it is necessary.
+ self.reserve_internal_or_panic(used_cap, needed_extra_cap, strategy)
+ }
+
+ #[inline(never)]
+ fn reserve_internal_or_panic(
+ &mut self,
+ used_cap: usize,
+ needed_extra_cap: usize,
+ strategy: ReserveStrategy,
+ ) {
+ // Delegates the call to `reserve_internal_or_error` and panics in the event of an error.
+ // This allows the method to have a return type of `()`, simplifying the assembly at the
+ // call site.
+ match self.reserve_internal(used_cap, needed_extra_cap, Infallible, strategy) {
+ Err(CapacityOverflow) => capacity_overflow(),
+ Err(AllocErr) => unreachable!(),
+ Ok(()) => { /* yay */ }
+ }
+ }
+
+ #[inline(never)]
+ fn reserve_internal_or_error(
+ &mut self,
+ used_cap: usize,
+ needed_extra_cap: usize,
+ fallibility: Fallibility,
+ strategy: ReserveStrategy,)-> Result<(), CollectionAllocErr> {
+ // Delegates the call to `reserve_internal`, which can be inlined.
+ self.reserve_internal(used_cap, needed_extra_cap, fallibility, strategy)
+ }
+
+ /// Helper method to reserve additional space, reallocating the backing memory.
+ /// The caller is responsible for confirming that there is not already enough space available.
fn reserve_internal(
&mut self,
used_cap: usize,
@@ -608,12 +664,6 @@ impl<'a, T> RawVec<'a, T> {
// If we make it past the first branch then we are guaranteed to
// panic.
- // Don't actually need any more capacity.
- // Wrapping in case they gave a bad `used_cap`.
- if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
- return Ok(());
- }
-
// Nothing we can really do about these checks :(
let new_cap = match strategy {
Exact => used_cap
diff --git a/third_party/rust/bumpalo/src/collections/string.rs b/third_party/rust/bumpalo/src/collections/string.rs
index ffd1db92de..e9fafbf204 100644
--- a/third_party/rust/bumpalo/src/collections/string.rs
+++ b/third_party/rust/bumpalo/src/collections/string.rs
@@ -680,8 +680,19 @@ impl<'bump> String<'bump> {
/// assert_eq!(s, "hello");
/// ```
pub fn from_str_in(s: &str, bump: &'bump Bump) -> String<'bump> {
- let mut t = String::with_capacity_in(s.len(), bump);
- t.push_str(s);
+ let len = s.len();
+ let mut t = String::with_capacity_in(len, bump);
+ // SAFETY:
+ // * `src` is valid for reads of `s.len()` bytes by virtue of being an allocated `&str`.
+ // * `dst` is valid for writes of `s.len()` bytes as `String::with_capacity_in(s.len(), bump)`
+ // above guarantees that.
+ // * Alignment is not relevant as `u8` has no alignment requirements.
+ // * Source and destination ranges cannot overlap as we just reserved the destination
+ // range from the bump.
+ unsafe { ptr::copy_nonoverlapping(s.as_ptr(), t.vec.as_mut_ptr(), len) };
+ // SAFETY: We reserved sufficent capacity for the string above.
+ // The elements at `0..len` were initialized by `copy_nonoverlapping` above.
+ unsafe { t.vec.set_len(len) };
t
}
@@ -925,7 +936,7 @@ impl<'bump> String<'bump> {
/// ```
#[inline]
pub fn push_str(&mut self, string: &str) {
- self.vec.extend_from_slice(string.as_bytes())
+ self.vec.extend_from_slice_copy(string.as_bytes())
}
/// Returns this `String`'s capacity, in bytes.
diff --git a/third_party/rust/bumpalo/src/collections/vec.rs b/third_party/rust/bumpalo/src/collections/vec.rs
index 312aa055b9..0dab700727 100644
--- a/third_party/rust/bumpalo/src/collections/vec.rs
+++ b/third_party/rust/bumpalo/src/collections/vec.rs
@@ -104,6 +104,8 @@ use core::ops::{Index, IndexMut, RangeBounds};
use core::ptr;
use core::ptr::NonNull;
use core::slice;
+#[cfg(feature = "std")]
+use std::io;
unsafe fn arith_offset<T>(p: *const T, offset: isize) -> *const T {
p.offset(offset)
@@ -1775,6 +1777,132 @@ impl<'bump, T: 'bump + Clone> Vec<'bump, T> {
}
}
+impl<'bump, T: 'bump + Copy> Vec<'bump, T> {
+ /// Helper method to copy all of the items in `other` and append them to the end of `self`.
+ ///
+ /// SAFETY:
+ /// * The caller is responsible for:
+ /// * calling [`reserve`](Self::reserve) beforehand to guarantee that there is enough
+ /// capacity to store `other.len()` more items.
+ /// * guaranteeing that `self` and `other` do not overlap.
+ unsafe fn extend_from_slice_copy_unchecked(&mut self, other: &[T]) {
+ let old_len = self.len();
+ debug_assert!(old_len + other.len() <= self.capacity());
+
+ // SAFETY:
+ // * `src` is valid for reads of `other.len()` values by virtue of being a `&[T]`.
+ // * `dst` is valid for writes of `other.len()` bytes because the caller of this
+ // method is required to `reserve` capacity to store at least `other.len()` items
+ // beforehand.
+ // * Because `src` is a `&[T]` and dst is a `&[T]` within the `Vec<T>`,
+ // `copy_nonoverlapping`'s alignment requirements are met.
+ // * Caller is required to guarantee that the source and destination ranges cannot overlap
+ unsafe {
+ let src = other.as_ptr();
+ let dst = self.as_mut_ptr().add(old_len);
+ ptr::copy_nonoverlapping(src, dst, other.len());
+ self.set_len(old_len + other.len());
+ }
+ }
+
+
+ /// Copies all elements in the slice `other` and appends them to the `Vec`.
+ ///
+ /// Note that this function is same as [`extend_from_slice`] except that it is optimized for
+ /// slices of types that implement the `Copy` trait. If and when Rust gets specialization
+ /// this function will likely be deprecated (but still available).
+ ///
+ /// To copy and append the data from multiple source slices at once, see
+ /// [`extend_from_slices_copy`].
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1];
+ /// vec.extend_from_slice_copy(&[2, 3, 4]);
+ /// assert_eq!(vec, [1, 2, 3, 4]);
+ /// ```
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 'H' as u8];
+ /// vec.extend_from_slice_copy("ello, world!".as_bytes());
+ /// assert_eq!(vec, "Hello, world!".as_bytes());
+ /// ```
+ ///
+ /// [`extend_from_slice`]: #method.extend_from_slice
+ /// [`extend_from_slices`]: #method.extend_from_slices
+ pub fn extend_from_slice_copy(&mut self, other: &[T]) {
+ // Reserve space in the Vec for the values to be added
+ self.reserve(other.len());
+
+ // Copy values into the space that was just reserved
+ // SAFETY:
+ // * `self` has enough capacity to store `other.len()` more items as `self.reserve(other.len())`
+ // above guarantees that.
+ // * Source and destination data ranges cannot overlap as we just reserved the destination
+ // range from the bump.
+ unsafe {
+ self.extend_from_slice_copy_unchecked(other);
+ }
+ }
+
+ /// For each slice in `slices`, copies all elements in the slice and appends them to the `Vec`.
+ ///
+ /// This method is equivalent to calling [`extend_from_slice_copy`] in a loop, but is able
+ /// to precompute the total amount of space to reserve in advance. This reduces the potential
+ /// maximum number of reallocations needed from one-per-slice to just one.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 1];
+ /// vec.extend_from_slices_copy(&[&[2, 3], &[], &[4]]);
+ /// assert_eq!(vec, [1, 2, 3, 4]);
+ /// ```
+ ///
+ /// ```
+ /// use bumpalo::{Bump, collections::Vec};
+ ///
+ /// let b = Bump::new();
+ ///
+ /// let mut vec = bumpalo::vec![in &b; 'H' as u8];
+ /// vec.extend_from_slices_copy(&["ello,".as_bytes(), &[], " world!".as_bytes()]);
+ /// assert_eq!(vec, "Hello, world!".as_bytes());
+ /// ```
+ ///
+ /// [`extend_from_slice_copy`]: #method.extend_from_slice_copy
+ pub fn extend_from_slices_copy(&mut self, slices: &[&[T]]) {
+ // Reserve the total amount of capacity we'll need to safely append the aggregated contents
+ // of each slice in `slices`.
+ let capacity_to_reserve: usize = slices.iter().map(|slice| slice.len()).sum();
+ self.reserve(capacity_to_reserve);
+
+ // SAFETY:
+ // * `dst` is valid for writes of `capacity_to_reserve` items as
+ // `self.reserve(capacity_to_reserve)` above guarantees that.
+ // * Source and destination ranges cannot overlap as we just reserved the destination
+ // range from the bump.
+ unsafe {
+ // Copy the contents of each slice onto the end of `self`
+ slices.iter().for_each(|slice| {
+ self.extend_from_slice_copy_unchecked(slice);
+ });
+ }
+ }
+}
+
// This code generalises `extend_with_{element,default}`.
trait ExtendWith<T> {
fn next(&mut self) -> T;
@@ -2612,3 +2740,23 @@ where
}
}
}
+
+#[cfg(feature = "std")]
+impl<'bump> io::Write for Vec<'bump, u8> {
+ #[inline]
+ fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+ self.extend_from_slice_copy(buf);
+ Ok(buf.len())
+ }
+
+ #[inline]
+ fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
+ self.extend_from_slice_copy(buf);
+ Ok(())
+ }
+
+ #[inline]
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}